summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
authorLee Garrett <lgarrett@rocketjump.eu>2021-11-17 20:15:37 +0100
committerLee Garrett <lgarrett@rocketjump.eu>2021-11-17 20:15:37 +0100
commitb1739f3e93dadd7d8fa794644ceedc24bddc8388 (patch)
tree193d287510fd44d67857e2d6b6bfcbb2b495c60a /test
parent13e2c2e94d3559b85a7d813d98e9835b891b0a9f (diff)
downloaddebian-ansible-core-b1739f3e93dadd7d8fa794644ceedc24bddc8388.zip
New upstream version 2.12.0
Diffstat (limited to 'test')
-rw-r--r--test/ansible_test/Makefile2
-rw-r--r--test/integration/targets/adhoc/aliases1
-rw-r--r--test/integration/targets/ansiballz_python/aliases1
-rw-r--r--test/integration/targets/ansible-doc/aliases1
-rw-r--r--test/integration/targets/ansible-doc/fakemodule.output4
-rw-r--r--test/integration/targets/ansible-doc/randommodule-text.output105
-rwxr-xr-xtest/integration/targets/ansible-doc/runme.sh8
-rw-r--r--test/integration/targets/ansible-galaxy-collection-scm/aliases3
-rw-r--r--test/integration/targets/ansible-galaxy-collection/aliases1
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/install.yml4
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/upgrade.yml6
-rw-r--r--test/integration/targets/ansible-galaxy-role/aliases2
-rw-r--r--test/integration/targets/ansible-galaxy/aliases3
-rw-r--r--test/integration/targets/ansible-inventory/aliases1
-rw-r--r--test/integration/targets/ansible-inventory/files/invalid_sample.yml7
-rw-r--r--test/integration/targets/ansible-inventory/files/valid_sample.yml7
-rw-r--r--test/integration/targets/ansible-inventory/tasks/main.yml114
-rw-r--r--test/integration/targets/ansible-pull/aliases (renamed from test/integration/targets/vault/aliases)2
-rw-r--r--test/integration/targets/ansible-pull/cleanup.yml (renamed from test/integration/targets/pull/cleanup.yml)0
-rw-r--r--test/integration/targets/ansible-pull/pull-integration-test/ansible.cfg (renamed from test/integration/targets/pull/pull-integration-test/ansible.cfg)0
-rw-r--r--test/integration/targets/ansible-pull/pull-integration-test/inventory (renamed from test/integration/targets/pull/pull-integration-test/inventory)0
-rw-r--r--test/integration/targets/ansible-pull/pull-integration-test/local.yml (renamed from test/integration/targets/pull/pull-integration-test/local.yml)0
-rw-r--r--test/integration/targets/ansible-pull/pull-integration-test/multi_play_1.yml (renamed from test/integration/targets/pull/pull-integration-test/multi_play_1.yml)0
-rw-r--r--test/integration/targets/ansible-pull/pull-integration-test/multi_play_2.yml (renamed from test/integration/targets/pull/pull-integration-test/multi_play_2.yml)0
-rwxr-xr-xtest/integration/targets/ansible-pull/runme.sh (renamed from test/integration/targets/pull/runme.sh)0
-rw-r--r--test/integration/targets/ansible-pull/setup.yml (renamed from test/integration/targets/pull/setup.yml)0
-rw-r--r--test/integration/targets/ansible-runner/aliases3
-rw-r--r--test/integration/targets/ansible-test-cloud-acme/aliases3
-rw-r--r--test/integration/targets/ansible-test-cloud-acme/tasks/main.yml7
-rw-r--r--test/integration/targets/ansible-test-cloud-cs/aliases3
-rw-r--r--test/integration/targets/ansible-test-cloud-cs/tasks/main.yml8
-rw-r--r--test/integration/targets/ansible-test-cloud-foreman/aliases3
-rw-r--r--test/integration/targets/ansible-test-cloud-foreman/tasks/main.yml6
-rw-r--r--test/integration/targets/ansible-test-cloud-galaxy/aliases4
-rw-r--r--test/integration/targets/ansible-test-cloud-galaxy/tasks/main.yml25
-rw-r--r--test/integration/targets/ansible-test-cloud-httptester-windows/aliases4
-rw-r--r--test/integration/targets/ansible-test-cloud-httptester-windows/tasks/main.yml15
-rw-r--r--test/integration/targets/ansible-test-cloud-httptester/aliases3
-rw-r--r--test/integration/targets/ansible-test-cloud-httptester/tasks/main.yml15
-rw-r--r--test/integration/targets/ansible-test-cloud-nios/aliases3
-rw-r--r--test/integration/targets/ansible-test-cloud-nios/tasks/main.yml10
-rw-r--r--test/integration/targets/ansible-test-cloud-openshift/aliases4
-rw-r--r--test/integration/targets/ansible-test-cloud-openshift/tasks/main.yml6
-rw-r--r--test/integration/targets/ansible-test-cloud-vcenter/aliases3
-rw-r--r--test/integration/targets/ansible-test-cloud-vcenter/tasks/main.yml6
-rw-r--r--test/integration/targets/ansible-test-docker/aliases1
-rwxr-xr-xtest/integration/targets/ansible-test-docker/collection-tests/docker.sh2
-rw-r--r--test/integration/targets/ansible-test/aliases2
-rw-r--r--test/integration/targets/ansible-test/ansible_collections/ns/col_constraints/tests/integration/targets/constraints/aliases1
-rwxr-xr-xtest/integration/targets/ansible-test/collection-tests/coverage.sh4
-rwxr-xr-xtest/integration/targets/ansible-test/collection-tests/venv.sh4
-rw-r--r--test/integration/targets/ansible-vault/aliases (renamed from test/integration/targets/pull/aliases)2
-rw-r--r--test/integration/targets/ansible-vault/empty-password (renamed from test/integration/targets/vault/empty-password)0
-rw-r--r--test/integration/targets/ansible-vault/encrypted-vault-password (renamed from test/integration/targets/vault/encrypted-vault-password)0
-rw-r--r--test/integration/targets/ansible-vault/encrypted_file_encrypted_var_password (renamed from test/integration/targets/vault/encrypted_file_encrypted_var_password)0
-rw-r--r--test/integration/targets/ansible-vault/example1_password (renamed from test/integration/targets/vault/example1_password)0
-rw-r--r--test/integration/targets/ansible-vault/example2_password (renamed from test/integration/targets/vault/example2_password)0
-rw-r--r--test/integration/targets/ansible-vault/example3_password (renamed from test/integration/targets/vault/example3_password)0
-rwxr-xr-xtest/integration/targets/ansible-vault/faux-editor.py (renamed from test/integration/targets/vault/faux-editor.py)2
-rw-r--r--test/integration/targets/ansible-vault/files/test_assemble/nonsecret.txt (renamed from test/integration/targets/vault/files/test_assemble/nonsecret.txt)0
-rw-r--r--test/integration/targets/ansible-vault/files/test_assemble/secret.vault (renamed from test/integration/targets/vault/files/test_assemble/secret.vault)0
-rw-r--r--test/integration/targets/ansible-vault/format_1_1_AES256.yml (renamed from test/integration/targets/vault/format_1_1_AES256.yml)0
-rw-r--r--test/integration/targets/ansible-vault/format_1_2_AES256.yml (renamed from test/integration/targets/vault/format_1_2_AES256.yml)0
-rw-r--r--test/integration/targets/ansible-vault/host_vars/myhost.yml (renamed from test/integration/targets/vault/host_vars/myhost.yml)0
-rw-r--r--test/integration/targets/ansible-vault/host_vars/testhost.yml (renamed from test/integration/targets/vault/host_vars/testhost.yml)0
-rw-r--r--test/integration/targets/ansible-vault/invalid_format/README.md (renamed from test/integration/targets/vault/invalid_format/README.md)0
-rw-r--r--test/integration/targets/ansible-vault/invalid_format/broken-group-vars-tasks.yml (renamed from test/integration/targets/vault/invalid_format/broken-group-vars-tasks.yml)0
-rw-r--r--test/integration/targets/ansible-vault/invalid_format/broken-host-vars-tasks.yml (renamed from test/integration/targets/vault/invalid_format/broken-host-vars-tasks.yml)0
-rw-r--r--test/integration/targets/ansible-vault/invalid_format/group_vars/broken-group-vars.yml (renamed from test/integration/targets/vault/invalid_format/group_vars/broken-group-vars.yml)0
-rw-r--r--test/integration/targets/ansible-vault/invalid_format/host_vars/broken-host-vars.example.com/vars (renamed from test/integration/targets/vault/invalid_format/host_vars/broken-host-vars.example.com/vars)0
-rw-r--r--test/integration/targets/ansible-vault/invalid_format/inventory (renamed from test/integration/targets/vault/invalid_format/inventory)0
-rw-r--r--test/integration/targets/ansible-vault/invalid_format/original-broken-host-vars (renamed from test/integration/targets/vault/invalid_format/original-broken-host-vars)0
-rw-r--r--test/integration/targets/ansible-vault/invalid_format/original-group-vars.yml (renamed from test/integration/targets/vault/invalid_format/original-group-vars.yml)0
-rw-r--r--test/integration/targets/ansible-vault/invalid_format/some-vars (renamed from test/integration/targets/vault/invalid_format/some-vars)0
-rw-r--r--test/integration/targets/ansible-vault/invalid_format/vault-secret (renamed from test/integration/targets/vault/invalid_format/vault-secret)0
-rw-r--r--test/integration/targets/ansible-vault/inventory.toml (renamed from test/integration/targets/vault/inventory.toml)0
-rwxr-xr-xtest/integration/targets/ansible-vault/password-script.py (renamed from test/integration/targets/vault/password-script.py)2
-rw-r--r--test/integration/targets/ansible-vault/roles/test_vault/tasks/main.yml (renamed from test/integration/targets/vault/roles/test_vault/tasks/main.yml)0
-rw-r--r--test/integration/targets/ansible-vault/roles/test_vault/vars/main.yml (renamed from test/integration/targets/vault/roles/test_vault/vars/main.yml)0
-rw-r--r--test/integration/targets/ansible-vault/roles/test_vault_embedded/tasks/main.yml (renamed from test/integration/targets/vault/roles/test_vault_embedded/tasks/main.yml)0
-rw-r--r--test/integration/targets/ansible-vault/roles/test_vault_embedded/vars/main.yml (renamed from test/integration/targets/vault/roles/test_vault_embedded/vars/main.yml)0
-rw-r--r--test/integration/targets/ansible-vault/roles/test_vault_embedded_ids/tasks/main.yml (renamed from test/integration/targets/vault/roles/test_vault_embedded_ids/tasks/main.yml)0
-rw-r--r--test/integration/targets/ansible-vault/roles/test_vault_embedded_ids/vars/main.yml (renamed from test/integration/targets/vault/roles/test_vault_embedded_ids/vars/main.yml)0
-rw-r--r--test/integration/targets/ansible-vault/roles/test_vault_file_encrypted_embedded/README.md (renamed from test/integration/targets/vault/roles/test_vault_file_encrypted_embedded/README.md)0
-rw-r--r--test/integration/targets/ansible-vault/roles/test_vault_file_encrypted_embedded/tasks/main.yml (renamed from test/integration/targets/vault/roles/test_vault_file_encrypted_embedded/tasks/main.yml)0
-rw-r--r--test/integration/targets/ansible-vault/roles/test_vault_file_encrypted_embedded/vars/main.yml (renamed from test/integration/targets/vault/roles/test_vault_file_encrypted_embedded/vars/main.yml)0
-rw-r--r--test/integration/targets/ansible-vault/roles/test_vaulted_template/tasks/main.yml (renamed from test/integration/targets/vault/roles/test_vaulted_template/tasks/main.yml)0
-rw-r--r--test/integration/targets/ansible-vault/roles/test_vaulted_template/templates/vaulted_template.j2 (renamed from test/integration/targets/vault/roles/test_vaulted_template/templates/vaulted_template.j2)0
-rwxr-xr-xtest/integration/targets/ansible-vault/runme.sh (renamed from test/integration/targets/vault/runme.sh)0
-rw-r--r--test/integration/targets/ansible-vault/single_vault_as_string.yml (renamed from test/integration/targets/vault/single_vault_as_string.yml)2
-rwxr-xr-xtest/integration/targets/ansible-vault/test-vault-client.py (renamed from test/integration/targets/vault/test-vault-client.py)0
-rw-r--r--test/integration/targets/ansible-vault/test_dangling_temp.yml (renamed from test/integration/targets/vault/test_dangling_temp.yml)0
-rw-r--r--test/integration/targets/ansible-vault/test_utf8_value_in_filename.yml (renamed from test/integration/targets/vault/test_utf8_value_in_filename.yml)0
-rw-r--r--test/integration/targets/ansible-vault/test_vault.yml (renamed from test/integration/targets/vault/test_vault.yml)0
-rw-r--r--test/integration/targets/ansible-vault/test_vault_embedded.yml (renamed from test/integration/targets/vault/test_vault_embedded.yml)0
-rw-r--r--test/integration/targets/ansible-vault/test_vault_embedded_ids.yml (renamed from test/integration/targets/vault/test_vault_embedded_ids.yml)0
-rw-r--r--test/integration/targets/ansible-vault/test_vault_file_encrypted_embedded.yml (renamed from test/integration/targets/vault/test_vault_file_encrypted_embedded.yml)0
-rw-r--r--test/integration/targets/ansible-vault/test_vaulted_inventory.yml (renamed from test/integration/targets/vault/test_vaulted_inventory.yml)0
-rw-r--r--test/integration/targets/ansible-vault/test_vaulted_inventory_toml.yml (renamed from test/integration/targets/vault/test_vaulted_inventory_toml.yml)0
-rw-r--r--test/integration/targets/ansible-vault/test_vaulted_template.yml (renamed from test/integration/targets/vault/test_vaulted_template.yml)0
-rw-r--r--test/integration/targets/ansible-vault/test_vaulted_utf8_value.yml (renamed from test/integration/targets/vault/test_vaulted_utf8_value.yml)0
-rw-r--r--test/integration/targets/ansible-vault/vault-café.yml (renamed from test/integration/targets/vault/vault-café.yml)0
-rw-r--r--test/integration/targets/ansible-vault/vault-password (renamed from test/integration/targets/vault/vault-password)0
-rw-r--r--test/integration/targets/ansible-vault/vault-password-ansible (renamed from test/integration/targets/vault/vault-password-ansible)0
-rw-r--r--test/integration/targets/ansible-vault/vault-password-wrong (renamed from test/integration/targets/vault/vault-password-wrong)0
-rw-r--r--test/integration/targets/ansible-vault/vault-secret.txt (renamed from test/integration/targets/vault/vault-secret.txt)0
-rw-r--r--test/integration/targets/ansible-vault/vaulted.inventory (renamed from test/integration/targets/vault/vaulted.inventory)0
-rw-r--r--test/integration/targets/ansible/aliases2
-rwxr-xr-xtest/integration/targets/ansible/module_common_regex_regression.sh15
-rwxr-xr-xtest/integration/targets/ansible/runme.sh4
-rw-r--r--test/integration/targets/any_errors_fatal/18602.yml21
-rw-r--r--test/integration/targets/any_errors_fatal/aliases1
-rw-r--r--test/integration/targets/any_errors_fatal/on_includes.yml2
-rw-r--r--test/integration/targets/apt/tasks/downgrade.yml77
-rw-r--r--test/integration/targets/apt/tasks/repo.yml25
-rw-r--r--test/integration/targets/args/aliases1
-rw-r--r--test/integration/targets/argspec/aliases1
-rw-r--r--test/integration/targets/assert/aliases2
-rw-r--r--test/integration/targets/async/tasks/main.yml29
-rw-r--r--test/integration/targets/async_extra_data/aliases1
-rw-r--r--test/integration/targets/become/aliases1
-rw-r--r--test/integration/targets/become/tasks/main.yml6
-rw-r--r--test/integration/targets/become_su/aliases2
-rw-r--r--test/integration/targets/become_unprivileged/aliases2
-rw-r--r--test/integration/targets/binary/aliases1
-rw-r--r--test/integration/targets/binary_modules_posix/aliases1
-rw-r--r--test/integration/targets/blocks/aliases1
-rw-r--r--test/integration/targets/blocks/main.yml8
-rw-r--r--test/integration/targets/blocks/nested_fail.yml4
-rw-r--r--test/integration/targets/blocks/nested_nested_fail.yml4
-rw-r--r--test/integration/targets/builtin_vars_prompt/aliases1
-rw-r--r--test/integration/targets/callback_default/aliases1
-rwxr-xr-xtest/integration/targets/callback_default/runme.sh7
-rw-r--r--test/integration/targets/callback_default/test_async.yml14
-rw-r--r--test/integration/targets/changed_when/aliases1
-rw-r--r--test/integration/targets/changed_when/tasks/main.yml12
-rw-r--r--test/integration/targets/check_mode/aliases1
-rw-r--r--test/integration/targets/cli/aliases1
-rw-r--r--test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/module_utils/MyCSMUOptional.cs19
-rw-r--r--test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/module_utils/MyPSMUOptional.psm116
-rw-r--r--test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/win_uses_optional.ps133
-rwxr-xr-xtest/integration/targets/collections/runme.sh6
-rwxr-xr-xtest/integration/targets/collections/test_task_resolved_plugin.sh48
-rw-r--r--test/integration/targets/collections/test_task_resolved_plugin/action_plugins/legacy_action.py14
-rw-r--r--test/integration/targets/collections/test_task_resolved_plugin/callback_plugins/display_resolved_action.py37
-rw-r--r--test/integration/targets/collections/test_task_resolved_plugin/collections/ansible_collections/test_ns/test_coll/meta/runtime.yml7
-rw-r--r--test/integration/targets/collections/test_task_resolved_plugin/collections/ansible_collections/test_ns/test_coll/plugins/action/collection_action.py14
-rw-r--r--test/integration/targets/collections/test_task_resolved_plugin/collections/ansible_collections/test_ns/test_coll/plugins/modules/collection_module.py29
-rw-r--r--test/integration/targets/collections/test_task_resolved_plugin/fqcn.yml14
-rw-r--r--test/integration/targets/collections/test_task_resolved_plugin/library/legacy_module.py29
-rw-r--r--test/integration/targets/collections/test_task_resolved_plugin/unqualified.yml8
-rw-r--r--test/integration/targets/collections/test_task_resolved_plugin/unqualified_and_collections_kw.yml14
-rw-r--r--test/integration/targets/collections/windows.yml6
-rw-r--r--test/integration/targets/collections_plugin_namespace/aliases1
-rw-r--r--test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/PSRel4.psm112
-rw-r--r--test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/modules/win_relative_optional.ps117
-rw-r--r--test/integration/targets/collections_relative_imports/windows.yml9
-rw-r--r--test/integration/targets/collections_runtime_pythonpath/aliases3
-rwxr-xr-xtest/integration/targets/collections_runtime_pythonpath/runme.sh6
-rw-r--r--test/integration/targets/command_nonexisting/aliases3
-rw-r--r--test/integration/targets/command_shell/tasks/main.yml4
-rw-r--r--test/integration/targets/common_network/aliases1
-rw-r--r--test/integration/targets/conditionals/aliases1
-rw-r--r--test/integration/targets/conditionals/play.yml150
-rwxr-xr-xtest/integration/targets/conditionals/runme.sh12
-rw-r--r--test/integration/targets/conditionals/test_no_warnings.yml18
-rw-r--r--test/integration/targets/conditionals/test_warnings.yml14
-rw-r--r--test/integration/targets/conditionals/vars/main.yml7
-rw-r--r--test/integration/targets/config/aliases1
-rw-r--r--test/integration/targets/config/files/types.env11
-rw-r--r--test/integration/targets/config/files/types.ini13
-rw-r--r--test/integration/targets/config/files/types.vars15
-rw-r--r--test/integration/targets/config/files/types_dump.txt8
-rw-r--r--test/integration/targets/config/lookup_plugins/types.py82
-rwxr-xr-xtest/integration/targets/config/runme.sh16
-rw-r--r--test/integration/targets/config/type_munging.cfg8
-rw-r--r--test/integration/targets/config/types.yml25
-rw-r--r--test/integration/targets/connection_delegation/aliases1
-rwxr-xr-xtest/integration/targets/connection_delegation/runme.sh2
-rw-r--r--test/integration/targets/connection_paramiko_ssh/aliases1
-rw-r--r--test/integration/targets/connection_ssh/aliases1
-rwxr-xr-xtest/integration/targets/connection_ssh/runme.sh3
-rw-r--r--test/integration/targets/connection_ssh/test_ssh_defaults.cfg5
-rw-r--r--test/integration/targets/connection_ssh/verify_config.yml21
-rw-r--r--test/integration/targets/controller/aliases2
-rw-r--r--test/integration/targets/controller/tasks/main.yml9
-rw-r--r--test/integration/targets/copy/tasks/main.yml3
-rw-r--r--test/integration/targets/copy/tasks/tests.yml8
-rw-r--r--test/integration/targets/cron/tasks/main.yml20
-rw-r--r--test/integration/targets/dataloader/aliases1
-rw-r--r--test/integration/targets/debug/aliases1
-rw-r--r--test/integration/targets/delegate_to/aliases2
-rw-r--r--test/integration/targets/dict_transformations/aliases1
-rw-r--r--test/integration/targets/dnf/tasks/cacheonly.yml15
-rw-r--r--test/integration/targets/dnf/tasks/dnf.yml22
-rw-r--r--test/integration/targets/dnf/tasks/main.yml11
-rw-r--r--test/integration/targets/dnf/vars/Fedora-34.yml2
-rw-r--r--test/integration/targets/dpkg_selections/tasks/main.yaml2
-rw-r--r--test/integration/targets/egg-info/aliases1
-rw-r--r--test/integration/targets/embedded_module/aliases1
-rw-r--r--test/integration/targets/environment/aliases1
-rw-r--r--test/integration/targets/error_from_connection/aliases1
-rw-r--r--test/integration/targets/facts_d/aliases1
-rw-r--r--test/integration/targets/facts_linux_network/aliases1
-rw-r--r--test/integration/targets/failed_when/aliases1
-rw-r--r--test/integration/targets/failed_when/tasks/main.yml12
-rw-r--r--test/integration/targets/fetch/aliases1
-rw-r--r--test/integration/targets/fetch/cleanup.yml16
-rw-r--r--test/integration/targets/fetch/roles/fetch_tests/defaults/main.yml1
-rw-r--r--test/integration/targets/fetch/roles/fetch_tests/handlers/main.yml8
-rw-r--r--test/integration/targets/fetch/roles/fetch_tests/tasks/fail_on_missing.yml53
-rw-r--r--test/integration/targets/fetch/roles/fetch_tests/tasks/failures.yml41
-rw-r--r--test/integration/targets/fetch/roles/fetch_tests/tasks/main.yml146
-rw-r--r--test/integration/targets/fetch/roles/fetch_tests/tasks/normal.yml38
-rw-r--r--test/integration/targets/fetch/roles/fetch_tests/tasks/setup.yml45
-rw-r--r--test/integration/targets/fetch/roles/fetch_tests/tasks/symlink.yml13
-rw-r--r--test/integration/targets/fetch/roles/fetch_tests/vars/Darwin.yml3
-rw-r--r--test/integration/targets/fetch/roles/fetch_tests/vars/default.yml1
-rwxr-xr-xtest/integration/targets/fetch/runme.sh26
-rw-r--r--test/integration/targets/fetch/setup_unreadable_test.yml40
-rw-r--r--test/integration/targets/fetch/test_unreadable_with_stat.yml36
-rw-r--r--test/integration/targets/file/handlers/main.yml1
-rw-r--r--test/integration/targets/file/tasks/directory_as_dest.yml2
-rw-r--r--test/integration/targets/file/tasks/main.yml5
-rw-r--r--test/integration/targets/file/tasks/selinux_tests.yml2
-rw-r--r--test/integration/targets/file/tasks/state_link.yml4
-rw-r--r--test/integration/targets/filter_core/aliases2
-rw-r--r--test/integration/targets/filter_core/tasks/main.yml49
-rw-r--r--test/integration/targets/filter_encryption/aliases1
-rw-r--r--test/integration/targets/filter_encryption/base.yml37
-rwxr-xr-xtest/integration/targets/filter_encryption/runme.sh5
-rw-r--r--test/integration/targets/filter_mathstuff/aliases2
-rw-r--r--test/integration/targets/filter_mathstuff/host_vars/localhost.yml1
-rwxr-xr-xtest/integration/targets/filter_mathstuff/runme.sh17
-rw-r--r--test/integration/targets/filter_mathstuff/runme.yml4
-rw-r--r--test/integration/targets/filter_mathstuff/tasks/main.yml36
-rw-r--r--test/integration/targets/filter_mathstuff/vars/defined_later.yml3
-rw-r--r--test/integration/targets/filter_mathstuff/vars/main.yml1
-rw-r--r--test/integration/targets/filter_urls/aliases2
-rw-r--r--test/integration/targets/filter_urlsplit/aliases2
-rw-r--r--test/integration/targets/find/tasks/main.yml116
-rw-r--r--test/integration/targets/gathering/aliases1
-rw-r--r--test/integration/targets/gathering_facts/aliases1
-rw-r--r--test/integration/targets/gathering_facts/collections/ansible_collections/cisco/ios/plugins/modules/ios_facts.py38
-rw-r--r--test/integration/targets/gathering_facts/inventory2
-rwxr-xr-xtest/integration/targets/gathering_facts/runme.sh2
-rw-r--r--test/integration/targets/gathering_facts/test_gathering_facts.yml28
-rw-r--r--test/integration/targets/gathering_facts/test_module_defaults.yml51
-rw-r--r--test/integration/targets/gathering_facts/test_prevent_injection.yml2
-rw-r--r--test/integration/targets/get_url/tasks/main.yml29
-rw-r--r--test/integration/targets/git/tasks/archive.yml1
-rw-r--r--test/integration/targets/git/tasks/main.yml1
-rw-r--r--test/integration/targets/git/tasks/missing_hostkey.yml13
-rw-r--r--test/integration/targets/git/tasks/missing_hostkey_acceptnew.yml78
-rw-r--r--test/integration/targets/git/tasks/submodules.yml26
-rw-r--r--test/integration/targets/groupby_filter/aliases1
-rw-r--r--test/integration/targets/handler_race/aliases3
-rw-r--r--test/integration/targets/handlers/58841.yml9
-rw-r--r--test/integration/targets/handlers/aliases3
-rw-r--r--test/integration/targets/handlers/roles/import_template_handler_names/tasks/main.yml11
-rw-r--r--test/integration/targets/handlers/roles/template_handler_names/handlers/main.yml5
-rw-r--r--test/integration/targets/handlers/roles/template_handler_names/tasks/evaluation_time.yml5
-rw-r--r--test/integration/targets/handlers/roles/template_handler_names/tasks/lazy_evaluation.yml5
-rw-r--r--test/integration/targets/handlers/roles/test_handlers_include/handlers/main.yml2
-rwxr-xr-xtest/integration/targets/handlers/runme.sh26
-rw-r--r--test/integration/targets/handlers/test_handlers_include.yml2
-rw-r--r--test/integration/targets/hardware_facts/aliases1
-rw-r--r--test/integration/targets/hash/aliases1
-rw-r--r--test/integration/targets/hosts_field/aliases1
-rw-r--r--test/integration/targets/ignore_errors/aliases1
-rw-r--r--test/integration/targets/ignore_unreachable/aliases1
-rw-r--r--test/integration/targets/import_tasks/aliases2
-rw-r--r--test/integration/targets/incidental_cloud_init_data_facts/aliases1
-rw-r--r--test/integration/targets/incidental_deploy_helper/aliases1
-rw-r--r--test/integration/targets/incidental_inventory_aws_ec2/aliases1
-rwxr-xr-xtest/integration/targets/incidental_inventory_aws_ec2/runme.sh4
-rw-r--r--test/integration/targets/incidental_inventory_docker_swarm/aliases3
-rw-r--r--test/integration/targets/incidental_inventory_foreman/aliases1
-rw-r--r--test/integration/targets/incidental_inventory_foreman/inspect_cache.yml4
-rwxr-xr-xtest/integration/targets/incidental_inventory_foreman/runme.sh4
-rw-r--r--test/integration/targets/incidental_ios_file/tasks/cli.yaml2
-rw-r--r--test/integration/targets/incidental_ios_file/tasks/main.yaml2
-rw-r--r--test/integration/targets/incidental_mongodb_parameter/aliases1
-rw-r--r--test/integration/targets/incidental_setup_docker/vars/RedHat-8.yml1
-rw-r--r--test/integration/targets/incidental_vyos_config/tasks/cli.yaml8
-rw-r--r--test/integration/targets/incidental_vyos_config/tasks/cli_config.yaml4
-rw-r--r--test/integration/targets/incidental_vyos_config/tasks/main.yaml4
-rw-r--r--test/integration/targets/incidental_vyos_lldp_interfaces/tasks/cli.yaml2
-rw-r--r--test/integration/targets/incidental_vyos_lldp_interfaces/tasks/main.yaml2
-rw-r--r--test/integration/targets/incidental_win_data_deduplication/tasks/main.yml2
-rw-r--r--test/integration/targets/incidental_win_data_deduplication/tasks/pre_test.yml2
-rw-r--r--test/integration/targets/incidental_win_security_policy/aliases2
-rw-r--r--test/integration/targets/incidental_win_security_policy/library/test_win_security_policy.ps153
-rw-r--r--test/integration/targets/incidental_win_security_policy/tasks/main.yml41
-rw-r--r--test/integration/targets/incidental_win_security_policy/tasks/tests.yml186
-rw-r--r--test/integration/targets/include_import/aliases2
-rw-r--r--test/integration/targets/include_import/include_role_omit/playbook.yml12
-rw-r--r--test/integration/targets/include_import/include_role_omit/roles/foo/tasks/main.yml2
-rw-r--r--test/integration/targets/include_import/playbook/test_templated_filenames.yml47
-rw-r--r--test/integration/targets/include_import/playbook/validate_templated_playbook.yml5
-rw-r--r--test/integration/targets/include_import/playbook/validate_templated_tasks.yml1
-rw-r--r--test/integration/targets/include_import/roles/role1/tasks/templated.yml1
-rwxr-xr-xtest/integration/targets/include_import/runme.sh9
-rw-r--r--test/integration/targets/include_import/undefined_var/playbook.yml3
-rw-r--r--test/integration/targets/include_vars-ad-hoc/aliases1
-rw-r--r--test/integration/targets/include_vars/tasks/main.yml53
-rw-r--r--test/integration/targets/include_vars/vars/no_auto_unsafe.yml1
-rw-r--r--test/integration/targets/include_vars/vars2/hashes/hash1.yml5
-rw-r--r--test/integration/targets/include_vars/vars2/hashes/hash2.yml5
-rw-r--r--test/integration/targets/include_when_parent_is_dynamic/aliases2
-rw-r--r--test/integration/targets/include_when_parent_is_static/aliases2
-rw-r--r--test/integration/targets/includes/aliases1
-rw-r--r--test/integration/targets/includes/include_on_playbook_should_fail.yml1
-rw-r--r--test/integration/targets/includes/roles/test_includes/tasks/branch_toplevel.yml10
-rw-r--r--test/integration/targets/includes/roles/test_includes/tasks/main.yml24
-rwxr-xr-xtest/integration/targets/includes/runme.sh6
-rw-r--r--test/integration/targets/includes/test_includes.yml8
-rw-r--r--test/integration/targets/includes_race/aliases2
-rw-r--r--test/integration/targets/infra/aliases1
-rwxr-xr-xtest/integration/targets/infra/runme.sh4
-rw-r--r--test/integration/targets/interpreter_discovery_python/aliases1
-rw-r--r--test/integration/targets/interpreter_discovery_python/tasks/main.yml4
-rw-r--r--test/integration/targets/interpreter_discovery_python_delegate_facts/aliases1
-rw-r--r--test/integration/targets/inventory/aliases1
-rw-r--r--test/integration/targets/inventory_advanced_host_list/aliases (renamed from test/integration/targets/module_utils_respawn/aliases)0
-rwxr-xr-xtest/integration/targets/inventory_advanced_host_list/runme.sh36
-rw-r--r--test/integration/targets/inventory_advanced_host_list/test_advanced_host_list.yml9
-rw-r--r--test/integration/targets/inventory_cache/aliases1
-rw-r--r--test/integration/targets/inventory_constructed/keyed_group_default_value.yml5
-rw-r--r--test/integration/targets/inventory_constructed/keyed_group_list_default_value.yml5
-rw-r--r--test/integration/targets/inventory_constructed/keyed_group_str_default_value.yml5
-rw-r--r--test/integration/targets/inventory_constructed/keyed_group_trailing_separator.yml5
-rwxr-xr-xtest/integration/targets/inventory_constructed/runme.sh29
-rw-r--r--test/integration/targets/inventory_constructed/tag_inventory.yml12
-rw-r--r--test/integration/targets/inventory_yaml/aliases1
-rw-r--r--test/integration/targets/jinja2_native_types/aliases1
-rw-r--r--test/integration/targets/jinja_plugins/aliases2
-rw-r--r--test/integration/targets/jinja_plugins/collections/ansible_collections/foo/bar/plugins/filter/bad_collection_filter.py11
-rw-r--r--test/integration/targets/jinja_plugins/collections/ansible_collections/foo/bar/plugins/filter/good_collection_filter.py13
-rw-r--r--test/integration/targets/jinja_plugins/collections/ansible_collections/foo/bar/plugins/test/bad_collection_test.py11
-rw-r--r--test/integration/targets/jinja_plugins/collections/ansible_collections/foo/bar/plugins/test/good_collection_test.py13
-rw-r--r--test/integration/targets/jinja_plugins/filter_plugins/bad_filter.py11
-rw-r--r--test/integration/targets/jinja_plugins/filter_plugins/good_filter.py13
-rw-r--r--test/integration/targets/jinja_plugins/playbook.yml10
-rw-r--r--test/integration/targets/jinja_plugins/tasks/main.yml22
-rw-r--r--test/integration/targets/jinja_plugins/test_plugins/bad_test.py11
-rw-r--r--test/integration/targets/jinja_plugins/test_plugins/good_test.py13
-rw-r--r--test/integration/targets/json_cleanup/aliases1
-rw-r--r--test/integration/targets/limit_inventory/aliases1
-rw-r--r--test/integration/targets/lineinfile/meta/main.yml1
-rw-r--r--test/integration/targets/lineinfile/tasks/main.yml279
-rw-r--r--test/integration/targets/lineinfile/tasks/test_string01.yml26
-rw-r--r--test/integration/targets/lineinfile/tasks/test_string02.yml28
-rw-r--r--test/integration/targets/lookup_config/aliases2
-rw-r--r--test/integration/targets/lookup_config/tasks/main.yml17
-rw-r--r--test/integration/targets/lookup_csvfile/aliases1
-rw-r--r--test/integration/targets/lookup_csvfile/tasks/main.yml30
-rw-r--r--test/integration/targets/lookup_dict/aliases2
-rw-r--r--test/integration/targets/lookup_env/aliases2
-rw-r--r--test/integration/targets/lookup_file/aliases2
-rw-r--r--test/integration/targets/lookup_first_found/aliases2
-rw-r--r--test/integration/targets/lookup_first_found/tasks/main.yml21
-rw-r--r--test/integration/targets/lookup_indexed_items/aliases2
-rw-r--r--test/integration/targets/lookup_indexed_items/tasks/main.yml16
-rw-r--r--test/integration/targets/lookup_ini/aliases1
-rw-r--r--test/integration/targets/lookup_ini/lookup_case_check.properties2
-rw-r--r--test/integration/targets/lookup_ini/mysql.ini8
-rwxr-xr-xtest/integration/targets/lookup_ini/runme.sh3
-rw-r--r--test/integration/targets/lookup_ini/test_allow_no_value.yml23
-rw-r--r--test/integration/targets/lookup_ini/test_case_sensitive.yml31
-rw-r--r--test/integration/targets/lookup_ini/test_errors.yml32
-rw-r--r--test/integration/targets/lookup_ini/test_ini.yml4
-rw-r--r--test/integration/targets/lookup_ini/test_lookup_properties.yml12
-rw-r--r--test/integration/targets/lookup_inventory_hostnames/aliases1
-rw-r--r--test/integration/targets/lookup_items/aliases2
-rw-r--r--test/integration/targets/lookup_lines/aliases2
-rw-r--r--test/integration/targets/lookup_list/aliases2
-rw-r--r--test/integration/targets/lookup_nested/aliases2
-rw-r--r--test/integration/targets/lookup_password/aliases2
-rw-r--r--test/integration/targets/lookup_pipe/aliases2
-rw-r--r--test/integration/targets/lookup_random_choice/aliases2
-rw-r--r--test/integration/targets/lookup_sequence/aliases2
-rw-r--r--test/integration/targets/lookup_sequence/tasks/main.yml135
-rw-r--r--test/integration/targets/lookup_subelements/aliases2
-rw-r--r--test/integration/targets/lookup_template/aliases2
-rw-r--r--test/integration/targets/lookup_template/tasks/main.yml8
-rw-r--r--test/integration/targets/lookup_template/templates/hello_comment.txt2
-rw-r--r--test/integration/targets/lookup_together/aliases2
-rw-r--r--test/integration/targets/lookup_together/tasks/main.yml15
-rw-r--r--test/integration/targets/lookup_unvault/aliases1
-rw-r--r--test/integration/targets/lookup_url/aliases2
-rw-r--r--test/integration/targets/lookup_varnames/aliases1
-rw-r--r--test/integration/targets/lookup_vars/aliases2
-rw-r--r--test/integration/targets/lookup_vars/tasks/main.yml40
-rw-r--r--test/integration/targets/loop_control/aliases1
-rw-r--r--test/integration/targets/loops/aliases2
-rw-r--r--test/integration/targets/meta_tasks/aliases1
-rwxr-xr-xtest/integration/targets/meta_tasks/runme.sh15
-rw-r--r--test/integration/targets/meta_tasks/test_end_batch.yml13
-rw-r--r--test/integration/targets/meta_tasks/test_end_play_serial_one.yml13
-rw-r--r--test/integration/targets/missing_required_lib/aliases1
-rw-r--r--test/integration/targets/module_defaults/action_plugins/debug.py80
-rw-r--r--test/integration/targets/module_defaults/aliases1
-rw-r--r--test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/meta/runtime.yml30
-rw-r--r--test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/modules/metadata.py45
-rw-r--r--test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/modules/ping.py83
-rw-r--r--test/integration/targets/module_defaults/library/legacy_ping.py83
-rwxr-xr-xtest/integration/targets/module_defaults/runme.sh4
-rw-r--r--test/integration/targets/module_defaults/tasks/main.yml2
-rw-r--r--test/integration/targets/module_defaults/templates/test_metadata_warning.yml.j28
-rw-r--r--test/integration/targets/module_defaults/test_action_group_metadata.yml123
-rw-r--r--test/integration/targets/module_defaults/test_action_groups.yml132
-rw-r--r--test/integration/targets/module_defaults/test_defaults.yml52
-rw-r--r--test/integration/targets/module_no_log/aliases2
-rw-r--r--test/integration/targets/module_precedence/aliases1
-rw-r--r--test/integration/targets/module_tracebacks/aliases2
-rw-r--r--test/integration/targets/module_utils/aliases1
-rw-r--r--test/integration/targets/module_utils/module_utils_basic_setcwd.yml8
-rw-r--r--test/integration/targets/module_utils/module_utils_test.yml4
-rw-r--r--test/integration/targets/module_utils/module_utils_test_no_log.yml3
-rw-r--r--test/integration/targets/module_utils/module_utils_vvvvv.yml3
-rw-r--r--test/integration/targets/module_utils_common.respawn/aliases1
-rw-r--r--test/integration/targets/module_utils_common.respawn/library/respawnme.py (renamed from test/integration/targets/module_utils_respawn/library/respawnme.py)0
-rw-r--r--test/integration/targets/module_utils_common.respawn/tasks/main.yml (renamed from test/integration/targets/module_utils_respawn/tasks/main.yml)0
-rw-r--r--test/integration/targets/module_utils_distro/aliases2
-rw-r--r--test/integration/targets/module_utils_distro/meta/main.yml2
-rwxr-xr-xtest/integration/targets/module_utils_distro/runme.sh24
-rw-r--r--test/integration/targets/module_utils_facts.system.selinux/aliases (renamed from test/integration/targets/module_utils_selinux/aliases)1
-rw-r--r--test/integration/targets/module_utils_facts.system.selinux/tasks/main.yml (renamed from test/integration/targets/module_utils_selinux/tasks/main.yml)0
-rw-r--r--test/integration/targets/module_utils_facts.system.selinux/tasks/selinux.yml (renamed from test/integration/targets/module_utils_selinux/tasks/selinux.yml)0
-rw-r--r--test/integration/targets/no_log/aliases1
-rw-r--r--test/integration/targets/noexec/aliases1
-rw-r--r--test/integration/targets/old_style_cache_plugins/aliases4
-rw-r--r--test/integration/targets/old_style_cache_plugins/cleanup.yml41
-rw-r--r--test/integration/targets/old_style_cache_plugins/inspect_cache.yml36
-rw-r--r--test/integration/targets/old_style_cache_plugins/plugins/cache/configurable_redis.py147
-rw-r--r--test/integration/targets/old_style_cache_plugins/plugins/cache/legacy_redis.py (renamed from test/integration/targets/old_style_cache_plugins/plugins/cache/redis.py)0
-rwxr-xr-xtest/integration/targets/old_style_cache_plugins/runme.sh91
-rw-r--r--test/integration/targets/old_style_cache_plugins/setup_redis_cache.yml51
-rw-r--r--test/integration/targets/old_style_cache_plugins/test_fact_gathering.yml16
-rw-r--r--test/integration/targets/old_style_cache_plugins/test_inventory_cache.yml45
-rw-r--r--test/integration/targets/old_style_modules_posix/aliases1
-rw-r--r--test/integration/targets/omit/aliases1
-rw-r--r--test/integration/targets/order/aliases1
-rw-r--r--test/integration/targets/package/tasks/main.yml104
-rw-r--r--test/integration/targets/parsing/aliases1
-rw-r--r--test/integration/targets/path_lookups/aliases1
-rw-r--r--test/integration/targets/path_lookups/play.yml10
-rw-r--r--test/integration/targets/path_with_comma_in_inventory/aliases1
-rw-r--r--test/integration/targets/pause/aliases2
-rw-r--r--test/integration/targets/pip/tasks/pip.yml55
-rw-r--r--test/integration/targets/pkg_resources/aliases1
-rw-r--r--test/integration/targets/play_iterator/aliases1
-rw-r--r--test/integration/targets/playbook/aliases1
-rw-r--r--test/integration/targets/playbook/empty.yml1
-rw-r--r--test/integration/targets/playbook/empty_hosts.yml4
-rw-r--r--test/integration/targets/playbook/malformed_post_tasks.yml2
-rw-r--r--test/integration/targets/playbook/malformed_pre_tasks.yml2
-rw-r--r--test/integration/targets/playbook/malformed_roles.yml2
-rw-r--r--test/integration/targets/playbook/malformed_tasks.yml2
-rw-r--r--test/integration/targets/playbook/malformed_vars_prompt.yml3
-rw-r--r--test/integration/targets/playbook/old_style_role.yml3
-rw-r--r--test/integration/targets/playbook/remote_user_and_user.yml6
-rw-r--r--test/integration/targets/playbook/roles_null.yml3
-rwxr-xr-xtest/integration/targets/playbook/runme.sh83
-rw-r--r--test/integration/targets/playbook/some_vars.yml2
-rw-r--r--test/integration/targets/playbook/user.yml23
-rw-r--r--test/integration/targets/playbook/vars_files_null.yml3
-rw-r--r--test/integration/targets/playbook/vars_files_string.yml6
-rw-r--r--test/integration/targets/playbook/vars_prompt_null.yml3
-rw-r--r--test/integration/targets/plugin_config_for_inventory/aliases1
-rw-r--r--test/integration/targets/plugin_filtering/aliases1
-rw-r--r--test/integration/targets/plugin_loader/aliases1
-rw-r--r--test/integration/targets/plugin_namespace/aliases1
-rw-r--r--test/integration/targets/prepare_http_tests/tasks/kerberos.yml4
-rw-r--r--test/integration/targets/rel_plugin_loading/aliases1
-rw-r--r--test/integration/targets/remote_tmp/aliases2
-rw-r--r--test/integration/targets/remote_tmp/playbook.yml14
-rwxr-xr-xtest/integration/targets/remote_tmp/runme.sh2
-rw-r--r--test/integration/targets/retry_task_name_in_callback/aliases1
-rw-r--r--test/integration/targets/roles/aliases1
-rw-r--r--test/integration/targets/roles/no_dupes.yml10
-rwxr-xr-xtest/integration/targets/roles/runme.sh3
-rw-r--r--test/integration/targets/roles_arg_spec/aliases1
-rw-r--r--test/integration/targets/roles_var_inheritance/aliases2
-rw-r--r--test/integration/targets/roles_var_inheritance/play.yml4
-rw-r--r--test/integration/targets/roles_var_inheritance/roles/A/meta/main.yml4
-rw-r--r--test/integration/targets/roles_var_inheritance/roles/B/meta/main.yml4
-rw-r--r--test/integration/targets/roles_var_inheritance/roles/child_nested_dep/vars/main.yml1
-rw-r--r--test/integration/targets/roles_var_inheritance/roles/common_dep/meta/main.yml4
-rw-r--r--test/integration/targets/roles_var_inheritance/roles/common_dep/vars/main.yml1
-rw-r--r--test/integration/targets/roles_var_inheritance/roles/nested_dep/meta/main.yml3
-rw-r--r--test/integration/targets/roles_var_inheritance/roles/nested_dep/tasks/main.yml5
-rwxr-xr-xtest/integration/targets/roles_var_inheritance/runme.sh9
-rw-r--r--test/integration/targets/rpm_key/tasks/main.yaml2
-rw-r--r--test/integration/targets/run_modules/aliases1
-rw-r--r--test/integration/targets/set_fact/aliases2
-rwxr-xr-xtest/integration/targets/set_fact/runme.sh3
-rw-r--r--test/integration/targets/set_fact/set_fact_auto_unsafe.yml10
-rw-r--r--test/integration/targets/set_stats/aliases2
-rwxr-xr-xtest/integration/targets/set_stats/runme.sh13
-rw-r--r--test/integration/targets/set_stats/test_aggregate.yml13
-rw-r--r--test/integration/targets/set_stats/test_simple.yml79
-rw-r--r--test/integration/targets/setup_cron/defaults/main.yml2
-rw-r--r--test/integration/targets/setup_cron/meta/main.yml2
-rw-r--r--test/integration/targets/setup_cron/tasks/main.yml12
-rw-r--r--test/integration/targets/setup_paramiko/install-FreeBSD-11-python-2.yml3
-rw-r--r--test/integration/targets/setup_paramiko/install-FreeBSD-11-python-3.yml12
-rw-r--r--test/integration/targets/setup_paramiko/install-FreeBSD-11.4-python-3.yml3
-rw-r--r--test/integration/targets/setup_paramiko/install-FreeBSD-12-python-2.yml3
-rw-r--r--test/integration/targets/setup_paramiko/install-FreeBSD-12-python-3.yml3
-rw-r--r--test/integration/targets/setup_paramiko/install-FreeBSD-12.2-python-3.yml3
-rw-r--r--test/integration/targets/setup_paramiko/install-FreeBSD-python-3.yml6
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-FreeBSD-11-python-2.yml4
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-FreeBSD-11-python-3.yml4
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-FreeBSD-11.4-python-3.yml4
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-FreeBSD-12-python-2.yml4
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-FreeBSD-12-python-3.yml4
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-FreeBSD-12.2-python-3.yml4
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-FreeBSD-python-3.yml4
-rw-r--r--test/integration/targets/setup_remote_tmp_dir/defaults/main.yml2
-rw-r--r--test/integration/targets/setup_remote_tmp_dir/handlers/main.yml2
-rw-r--r--test/integration/targets/setup_remote_tmp_dir/tasks/default.yml1
-rw-r--r--test/integration/targets/setup_rpm_repo/files/create-repo.py71
-rw-r--r--test/integration/targets/special_vars/aliases1
-rw-r--r--test/integration/targets/special_vars_hosts/aliases2
-rw-r--r--test/integration/targets/special_vars_hosts/inventory3
-rw-r--r--test/integration/targets/special_vars_hosts/playbook.yml53
-rwxr-xr-xtest/integration/targets/special_vars_hosts/runme.sh7
-rw-r--r--test/integration/targets/split/aliases2
-rw-r--r--test/integration/targets/split/tasks/main.yml30
-rw-r--r--test/integration/targets/subversion/roles/subversion/defaults/main.yml5
-rw-r--r--test/integration/targets/subversion/roles/subversion/tasks/setup.yml18
-rwxr-xr-xtest/integration/targets/subversion/runme.sh15
-rw-r--r--test/integration/targets/subversion/runme.yml2
-rw-r--r--test/integration/targets/tags/aliases2
-rw-r--r--test/integration/targets/task_ordering/aliases1
-rw-r--r--test/integration/targets/task_ordering/tasks/main.yml2
-rw-r--r--test/integration/targets/tasks/aliases1
-rw-r--r--test/integration/targets/tempfile/aliases1
-rw-r--r--test/integration/targets/tempfile/meta/main.yml2
-rw-r--r--test/integration/targets/tempfile/tasks/main.yml63
-rw-r--r--test/integration/targets/template/aliases2
-rw-r--r--test/integration/targets/template/files/custom_comment_string.expected2
-rwxr-xr-xtest/integration/targets/template/runme.sh4
-rw-r--r--test/integration/targets/template/tasks/main.yml22
-rw-r--r--test/integration/targets/template/templates/custom_comment_string.j23
-rw-r--r--test/integration/targets/template_jinja2_latest/aliases2
-rw-r--r--test/integration/targets/template_jinja2_non_native/aliases1
-rw-r--r--test/integration/targets/templating_lookups/aliases2
-rw-r--r--test/integration/targets/templating_settings/aliases1
-rw-r--r--test/integration/targets/test_core/aliases1
-rw-r--r--test/integration/targets/test_files/aliases1
-rw-r--r--test/integration/targets/test_mathstuff/aliases1
-rw-r--r--test/integration/targets/throttle/aliases1
-rw-r--r--test/integration/targets/unarchive/tasks/test_download.yml66
-rw-r--r--test/integration/targets/unarchive/tasks/test_include.yml3
-rw-r--r--test/integration/targets/unarchive/tasks/test_owner_group.yml2
-rw-r--r--test/integration/targets/unarchive/tasks/test_tar_gz_owner_group.yml2
-rw-r--r--test/integration/targets/unarchive/tasks/test_unprivileged_user.yml4
-rw-r--r--test/integration/targets/undefined/aliases1
-rw-r--r--test/integration/targets/undefined/tasks/main.yml3
-rw-r--r--test/integration/targets/unicode/aliases1
-rw-r--r--test/integration/targets/unsafe_writes/aliases2
-rw-r--r--test/integration/targets/unsafe_writes/basic.yml21
-rwxr-xr-xtest/integration/targets/unsafe_writes/runme.sh6
-rw-r--r--test/integration/targets/until/aliases1
-rw-r--r--test/integration/targets/unvault/aliases1
-rw-r--r--test/integration/targets/uri/meta/main.yml1
-rw-r--r--test/integration/targets/uri/tasks/main.yml54
-rw-r--r--test/integration/targets/user/tasks/main.yml2
-rw-r--r--test/integration/targets/user/tasks/test_umask.yml57
-rw-r--r--test/integration/targets/var_blending/aliases1
-rw-r--r--test/integration/targets/var_precedence/aliases1
-rw-r--r--test/integration/targets/var_reserved/aliases1
-rw-r--r--test/integration/targets/var_templating/aliases1
-rwxr-xr-xtest/integration/targets/vault/runme_change_pip_installed.sh27
-rw-r--r--test/integration/targets/want_json_modules_posix/aliases1
-rw-r--r--test/integration/targets/yaml_parsing/aliases1
-rw-r--r--test/integration/targets/yum/filter_plugins/filter_list_of_tuples_by_first_param.py25
-rw-r--r--test/integration/targets/yum/tasks/cacheonly.yml16
-rw-r--r--test/integration/targets/yum/tasks/main.yml13
-rw-r--r--test/integration/targets/yum/tasks/multiarch.yml154
-rw-r--r--test/integration/targets/yum/tasks/repo.yml24
-rw-r--r--test/integration/targets/yum/tasks/yum.yml15
-rw-r--r--test/integration/targets/yum/vars/main.yml1
-rw-r--r--test/integration/targets/yum_repository/tasks/main.yml24
-rwxr-xr-xtest/lib/ansible_test/_data/cli/ansible_test_cli_stub.py28
-rw-r--r--test/lib/ansible_test/_data/completion/docker.txt25
-rw-r--r--test/lib/ansible_test/_data/completion/network.txt4
-rw-r--r--test/lib/ansible_test/_data/completion/remote.txt16
-rw-r--r--test/lib/ansible_test/_data/completion/windows.txt9
-rw-r--r--test/lib/ansible_test/_data/cryptography-constraints.txt3
l---------test/lib/ansible_test/_data/injector/ansible1
l---------test/lib/ansible_test/_data/injector/ansible-config1
l---------test/lib/ansible_test/_data/injector/ansible-connection1
l---------test/lib/ansible_test/_data/injector/ansible-console1
l---------test/lib/ansible_test/_data/injector/ansible-doc1
l---------test/lib/ansible_test/_data/injector/ansible-galaxy1
l---------test/lib/ansible_test/_data/injector/ansible-inventory1
l---------test/lib/ansible_test/_data/injector/ansible-playbook1
l---------test/lib/ansible_test/_data/injector/ansible-pull1
l---------test/lib/ansible_test/_data/injector/ansible-test1
l---------test/lib/ansible_test/_data/injector/ansible-vault1
l---------test/lib/ansible_test/_data/injector/importer.py1
l---------test/lib/ansible_test/_data/injector/pytest1
-rw-r--r--test/lib/ansible_test/_data/injector/virtualenv-isolated.sh18
-rw-r--r--test/lib/ansible_test/_data/inventory6
-rw-r--r--test/lib/ansible_test/_data/playbooks/posix_coverage_setup.yml21
-rw-r--r--test/lib/ansible_test/_data/playbooks/posix_coverage_teardown.yml8
-rw-r--r--test/lib/ansible_test/_data/playbooks/posix_hosts_prepare.yml9
-rw-r--r--test/lib/ansible_test/_data/playbooks/posix_hosts_restore.yml10
-rw-r--r--test/lib/ansible_test/_data/playbooks/pypi_proxy_prepare.yml23
-rw-r--r--test/lib/ansible_test/_data/playbooks/pypi_proxy_restore.yml12
-rw-r--r--test/lib/ansible_test/_data/playbooks/windows_coverage_setup.yml11
-rw-r--r--test/lib/ansible_test/_data/playbooks/windows_coverage_teardown.yml21
-rw-r--r--test/lib/ansible_test/_data/playbooks/windows_hosts_prepare.ps134
-rw-r--r--test/lib/ansible_test/_data/playbooks/windows_hosts_prepare.yml7
-rw-r--r--test/lib/ansible_test/_data/playbooks/windows_hosts_restore.ps137
-rw-r--r--test/lib/ansible_test/_data/playbooks/windows_hosts_restore.yml7
-rw-r--r--test/lib/ansible_test/_data/requirements/ansible-test.txt6
-rw-r--r--test/lib/ansible_test/_data/requirements/ansible.txt (renamed from test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt)0
-rw-r--r--test/lib/ansible_test/_data/requirements/constraints.txt32
-rw-r--r--test/lib/ansible_test/_data/requirements/coverage.txt1
-rw-r--r--test/lib/ansible_test/_data/requirements/integration.cloud.aws.txt3
-rw-r--r--test/lib/ansible_test/_data/requirements/integration.cloud.azure.txt39
-rw-r--r--test/lib/ansible_test/_data/requirements/integration.cloud.cs.txt2
-rw-r--r--test/lib/ansible_test/_data/requirements/integration.cloud.hcloud.txt1
-rw-r--r--test/lib/ansible_test/_data/requirements/integration.cloud.nios.txt1
-rw-r--r--test/lib/ansible_test/_data/requirements/integration.cloud.opennebula.txt1
-rw-r--r--test/lib/ansible_test/_data/requirements/integration.cloud.openshift.txt1
-rw-r--r--test/lib/ansible_test/_data/requirements/integration.cloud.vcenter.txt2
-rw-r--r--test/lib/ansible_test/_data/requirements/integration.txt7
-rw-r--r--test/lib/ansible_test/_data/requirements/network-integration.txt7
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt10
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.changelog.txt8
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.import.txt2
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt2
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_data/requirements/sanity.pslint.ps1 (renamed from test/lib/ansible_test/_data/requirements/sanity.ps1)1
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.pylint.txt12
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt2
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt7
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.yamllint.txt4
-rw-r--r--test/lib/ansible_test/_data/requirements/units.txt5
-rw-r--r--test/lib/ansible_test/_data/requirements/windows-integration.txt6
-rwxr-xr-xtest/lib/ansible_test/_data/sanity/compile/compile.py41
-rw-r--r--test/lib/ansible_test/_data/setup/docker.sh13
-rw-r--r--test/lib/ansible_test/_data/setup/remote.sh185
-rw-r--r--test/lib/ansible_test/_data/setup/ssh-keys.sh35
-rw-r--r--test/lib/ansible_test/_data/setup/windows-httptester.ps1229
-rwxr-xr-xtest/lib/ansible_test/_data/versions.py20
-rw-r--r--test/lib/ansible_test/_internal/__init__.py102
-rw-r--r--test/lib/ansible_test/_internal/ansible_util.py112
-rw-r--r--test/lib/ansible_test/_internal/become.py52
-rw-r--r--test/lib/ansible_test/_internal/bootstrap.py95
-rw-r--r--test/lib/ansible_test/_internal/cache.py33
-rw-r--r--test/lib/ansible_test/_internal/ci/__init__.py35
-rw-r--r--test/lib/ansible_test/_internal/ci/azp.py16
-rw-r--r--test/lib/ansible_test/_internal/ci/local.py16
-rw-r--r--test/lib/ansible_test/_internal/ci/shippable.py269
-rw-r--r--test/lib/ansible_test/_internal/classification/__init__.py (renamed from test/lib/ansible_test/_internal/classification.py)185
-rw-r--r--test/lib/ansible_test/_internal/classification/common.py26
-rw-r--r--test/lib/ansible_test/_internal/classification/csharp.py (renamed from test/lib/ansible_test/_internal/csharp_import_analysis.py)44
-rw-r--r--test/lib/ansible_test/_internal/classification/powershell.py (renamed from test/lib/ansible_test/_internal/powershell_import_analysis.py)42
-rw-r--r--test/lib/ansible_test/_internal/classification/python.py (renamed from test/lib/ansible_test/_internal/import_analysis.py)82
-rw-r--r--test/lib/ansible_test/_internal/cli.py1224
-rw-r--r--test/lib/ansible_test/_internal/cli/__init__.py55
-rw-r--r--test/lib/ansible_test/_internal/cli/actions.py90
-rw-r--r--test/lib/ansible_test/_internal/cli/argparsing/__init__.py263
-rw-r--r--test/lib/ansible_test/_internal/cli/argparsing/actions.py18
-rw-r--r--test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py124
-rw-r--r--test/lib/ansible_test/_internal/cli/argparsing/parsers.py581
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/__init__.py240
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/__init__.py85
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/analyze/__init__.py28
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/__init__.py48
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/combine.py49
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/expand.py48
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/filter.py76
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/generate.py49
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/missing.py65
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/combine.py48
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/erase.py36
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/html.py42
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/report.py60
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/xml.py42
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/env.py63
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/integration/__init__.py161
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/integration/network.py81
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/integration/posix.py50
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/integration/windows.py50
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/sanity.py119
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/shell.py47
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/units.py65
-rw-r--r--test/lib/ansible_test/_internal/cli/compat.py482
-rw-r--r--test/lib/ansible_test/_internal/cli/completers.py26
-rw-r--r--test/lib/ansible_test/_internal/cli/converters.py20
-rw-r--r--test/lib/ansible_test/_internal/cli/environments.py574
-rw-r--r--test/lib/ansible_test/_internal/cli/parsers/__init__.py303
-rw-r--r--test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py73
-rw-r--r--test/lib/ansible_test/_internal/cli/parsers/helpers.py59
-rw-r--r--test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py310
-rw-r--r--test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py213
-rw-r--r--test/lib/ansible_test/_internal/cli/parsers/value_parsers.py172
-rw-r--r--test/lib/ansible_test/_internal/cloud/acme.py193
-rw-r--r--test/lib/ansible_test/_internal/cloud/cloudscale.py80
-rw-r--r--test/lib/ansible_test/_internal/cloud/cs.py299
-rw-r--r--test/lib/ansible_test/_internal/cloud/foreman.py191
-rw-r--r--test/lib/ansible_test/_internal/cloud/nios.py193
-rw-r--r--test/lib/ansible_test/_internal/cloud/openshift.py236
-rw-r--r--test/lib/ansible_test/_internal/cloud/scaleway.py72
-rw-r--r--test/lib/ansible_test/_internal/cloud/vcenter.py232
-rw-r--r--test/lib/ansible_test/_internal/cloud/vultr.py71
-rw-r--r--test/lib/ansible_test/_internal/commands/__init__.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/__init__.py (renamed from test/lib/ansible_test/_internal/coverage/__init__.py)94
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/__init__.py (renamed from test/lib/ansible_test/_internal/coverage/analyze/__init__.py)8
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py (renamed from test/lib/ansible_test/_internal/coverage/analyze/targets/__init__.py)14
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py (renamed from test/lib/ansible_test/_internal/coverage/analyze/targets/combine.py)19
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py (renamed from test/lib/ansible_test/_internal/coverage/analyze/targets/expand.py)23
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py (renamed from test/lib/ansible_test/_internal/coverage/analyze/targets/filter.py)21
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py (renamed from test/lib/ansible_test/_internal/coverage/analyze/targets/generate.py)33
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py (renamed from test/lib/ansible_test/_internal/coverage/analyze/targets/missing.py)23
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/combine.py (renamed from test/lib/ansible_test/_internal/coverage/combine.py)146
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/erase.py43
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/html.py (renamed from test/lib/ansible_test/_internal/coverage/html.py)32
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/report.py (renamed from test/lib/ansible_test/_internal/coverage/report.py)46
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/xml.py (renamed from test/lib/ansible_test/_internal/coverage/xml.py)51
-rw-r--r--test/lib/ansible_test/_internal/commands/env/__init__.py (renamed from test/lib/ansible_test/_internal/env.py)157
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/__init__.py950
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py (renamed from test/lib/ansible_test/_internal/cloud/__init__.py)309
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/acme.py79
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/aws.py (renamed from test/lib/ansible_test/_internal/cloud/aws.py)76
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/azure.py (renamed from test/lib/ansible_test/_internal/cloud/azure.py)102
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/cloudscale.py62
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/cs.py174
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/digitalocean.py55
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py94
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py (renamed from test/lib/ansible_test/_internal/cloud/galaxy.py)173
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/gcp.py (renamed from test/lib/ansible_test/_internal/cloud/gcp.py)39
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/hcloud.py (renamed from test/lib/ansible_test/_internal/cloud/hcloud.py)80
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py92
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/nios.py97
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/opennebula.py (renamed from test/lib/ansible_test/_internal/cloud/opennebula.py)40
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py114
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/scaleway.py56
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py138
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/vultr.py55
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/coverage.py416
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/filters.py273
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/network.py73
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/posix.py48
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/windows.py77
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/__init__.py (renamed from test/lib/ansible_test/_internal/sanity/__init__.py)564
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py (renamed from test/lib/ansible_test/_internal/sanity/ansible_doc.py)48
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py (renamed from test/lib/ansible_test/_internal/sanity/bin_symlinks.py)42
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/compile.py (renamed from test/lib/ansible_test/_internal/sanity/compile.py)49
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/ignores.py (renamed from test/lib/ansible_test/_internal/sanity/ignores.py)19
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/import.py184
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py (renamed from test/lib/ansible_test/_internal/sanity/integration_aliases.py)126
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/pep8.py (renamed from test/lib/ansible_test/_internal/sanity/pep8.py)36
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/pslint.py (renamed from test/lib/ansible_test/_internal/sanity/pslint.py)36
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/pylint.py (renamed from test/lib/ansible_test/_internal/sanity/pylint.py)83
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/sanity_docs.py (renamed from test/lib/ansible_test/_internal/sanity/sanity_docs.py)22
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/shellcheck.py (renamed from test/lib/ansible_test/_internal/sanity/shellcheck.py)28
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/validate_modules.py (renamed from test/lib/ansible_test/_internal/sanity/validate_modules.py)48
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/yamllint.py (renamed from test/lib/ansible_test/_internal/sanity/yamllint.py)63
-rw-r--r--test/lib/ansible_test/_internal/commands/shell/__init__.py89
-rw-r--r--test/lib/ansible_test/_internal/commands/units/__init__.py334
-rw-r--r--test/lib/ansible_test/_internal/compat/__init__.py2
-rw-r--r--test/lib/ansible_test/_internal/compat/packaging.py16
-rw-r--r--test/lib/ansible_test/_internal/compat/yaml.py20
-rw-r--r--test/lib/ansible_test/_internal/completion.py226
-rw-r--r--test/lib/ansible_test/_internal/config.py314
-rw-r--r--test/lib/ansible_test/_internal/connections.py246
l---------[-rw-r--r--]test/lib/ansible_test/_internal/constants.py11
-rw-r--r--test/lib/ansible_test/_internal/containers.py835
-rw-r--r--test/lib/ansible_test/_internal/content_config.py151
-rw-r--r--test/lib/ansible_test/_internal/core_ci.py191
-rw-r--r--test/lib/ansible_test/_internal/coverage/erase.py27
-rw-r--r--test/lib/ansible_test/_internal/coverage_util.py117
-rw-r--r--test/lib/ansible_test/_internal/data.py70
-rw-r--r--test/lib/ansible_test/_internal/delegation.py693
-rw-r--r--test/lib/ansible_test/_internal/diff.py82
-rw-r--r--test/lib/ansible_test/_internal/docker_util.py525
-rw-r--r--test/lib/ansible_test/_internal/encoding.py5
-rw-r--r--test/lib/ansible_test/_internal/executor.py2225
-rw-r--r--test/lib/ansible_test/_internal/git.py79
-rw-r--r--test/lib/ansible_test/_internal/host_configs.py491
-rw-r--r--test/lib/ansible_test/_internal/host_profiles.py761
-rw-r--r--test/lib/ansible_test/_internal/http.py79
-rw-r--r--test/lib/ansible_test/_internal/init.py3
-rw-r--r--test/lib/ansible_test/_internal/integration/__init__.py349
-rw-r--r--test/lib/ansible_test/_internal/inventory.py170
-rw-r--r--test/lib/ansible_test/_internal/io.py25
l---------test/lib/ansible_test/_internal/junit_xml.py1
-rw-r--r--test/lib/ansible_test/_internal/manage_ci.py401
-rw-r--r--test/lib/ansible_test/_internal/metadata.py66
-rw-r--r--test/lib/ansible_test/_internal/payload.py24
-rw-r--r--test/lib/ansible_test/_internal/provider/__init__.py21
-rw-r--r--test/lib/ansible_test/_internal/provider/layout/__init__.py8
-rw-r--r--test/lib/ansible_test/_internal/provider/layout/ansible.py6
-rw-r--r--test/lib/ansible_test/_internal/provider/layout/collection.py9
-rw-r--r--test/lib/ansible_test/_internal/provider/source/__init__.py6
-rw-r--r--test/lib/ansible_test/_internal/provider/source/git.py6
-rw-r--r--test/lib/ansible_test/_internal/provider/source/installed.py6
-rw-r--r--test/lib/ansible_test/_internal/provider/source/unversioned.py6
-rw-r--r--test/lib/ansible_test/_internal/provisioning.py196
-rw-r--r--test/lib/ansible_test/_internal/pypi_proxy.py178
-rw-r--r--test/lib/ansible_test/_internal/python_requirements.py482
-rw-r--r--test/lib/ansible_test/_internal/sanity/import.py218
-rw-r--r--test/lib/ansible_test/_internal/ssh.py258
-rw-r--r--test/lib/ansible_test/_internal/target.py277
-rw-r--r--test/lib/ansible_test/_internal/test.py309
-rw-r--r--test/lib/ansible_test/_internal/thread.py37
-rw-r--r--test/lib/ansible_test/_internal/timeout.py93
-rw-r--r--test/lib/ansible_test/_internal/types.py32
-rw-r--r--test/lib/ansible_test/_internal/units/__init__.py159
-rw-r--r--test/lib/ansible_test/_internal/util.py462
-rw-r--r--test/lib/ansible_test/_internal/util_common.py441
-rw-r--r--test/lib/ansible_test/_internal/venv.py114
-rw-r--r--test/lib/ansible_test/_util/__init__.py3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/action-plugin-docs.json)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/action-plugin-docs.py)1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/changelog.json)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/changelog.py)1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/changelog/sphinx.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/changelog/sphinx.py)0
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/empty-init.json)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/empty-init.py)1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/future-import-boilerplate.json)1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/future-import-boilerplate.py)1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/line-endings.json)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/line-endings.py)1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/metaclass-boilerplate.json)1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/metaclass-boilerplate.py)1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/no-assert.json)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/no-assert.py)5
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/no-basestring.json)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/no-basestring.py)1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/no-dict-iteritems.json)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/no-dict-iteritems.py)1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iterkeys.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/no-dict-iterkeys.json)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iterkeys.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/no-dict-iterkeys.py)1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-itervalues.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/no-dict-itervalues.json)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-itervalues.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/no-dict-itervalues.py)1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/no-get-exception.json)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/no-get-exception.py)1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-illegal-filenames.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/no-illegal-filenames.json)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/no-illegal-filenames.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/no-illegal-filenames.py)2
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/no-main-display.json)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/no-main-display.py)5
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/no-smart-quotes.json)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/no-smart-quotes.py)1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/no-unicode-literals.json)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/no-unicode-literals.py)1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/replace-urlopen.json)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/replace-urlopen.py)1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.json)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.py)7
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/shebang.json)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/shebang.py)5
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/symlinks.json)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/symlinks.py)1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/use-argspec-type-path.json)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/use-argspec-type-path.py)1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.json (renamed from test/lib/ansible_test/_data/sanity/code-smell/use-compat-six.json)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.py (renamed from test/lib/ansible_test/_data/sanity/code-smell/use-compat-six.py)1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/integration-aliases/yaml_to_json.py (renamed from test/lib/ansible_test/_data/sanity/integration-aliases/yaml_to_json.py)0
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pep8/current-ignore.txt (renamed from test/lib/ansible_test/_data/sanity/pep8/current-ignore.txt)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/pslint/pslint.ps1 (renamed from test/lib/ansible_test/_data/sanity/pslint/pslint.ps1)1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd1 (renamed from test/lib/ansible_test/_data/sanity/pslint/settings.psd1)0
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg (renamed from test/lib/ansible_test/_data/sanity/pylint/config/ansible-test.cfg)13
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg (renamed from test/lib/ansible_test/_data/sanity/pylint/config/sanity.cfg)22
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/config/code-smell.cfg55
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/config/collection.cfg (renamed from test/lib/ansible_test/_data/sanity/pylint/config/collection.cfg)2
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg (renamed from test/lib/ansible_test/_data/sanity/pylint/config/default.cfg)7
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py (renamed from test/lib/ansible_test/_data/sanity/pylint/plugins/deprecated.py)10
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py (renamed from test/lib/ansible_test/_data/sanity/pylint/plugins/string_format.py)12
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py (renamed from test/lib/ansible_test/_data/sanity/pylint/plugins/unwanted.py)82
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/shellcheck/exclude.txt (renamed from test/lib/ansible_test/_data/sanity/shellcheck/exclude.txt)0
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/validate-modules/main.py (renamed from test/lib/ansible_test/_data/sanity/validate-modules/main.py)1
l---------test/lib/ansible_test/_util/controller/sanity/validate-modules/validate-modules (renamed from test/lib/ansible_test/_data/sanity/validate-modules/validate-modules)0
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/__init__.py (renamed from test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/__init__.py)0
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py (renamed from test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/main.py)113
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py (renamed from test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/module_args.py)4
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/ps_argspec.ps1 (renamed from test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/ps_argspec.ps1)1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py (renamed from test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/schema.py)61
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py (renamed from test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/utils.py)15
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/yamllint/config/default.yml (renamed from test/lib/ansible_test/_data/sanity/yamllint/config/default.yml)0
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/yamllint/config/modules.yml (renamed from test/lib/ansible_test/_data/sanity/yamllint/config/modules.yml)0
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/yamllint/config/plugins.yml (renamed from test/lib/ansible_test/_data/sanity/yamllint/config/plugins.yml)0
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py (renamed from test/lib/ansible_test/_data/sanity/yamllint/yamllinter.py)75
-rw-r--r--test/lib/ansible_test/_util/controller/tools/collection_detail.py (renamed from test/lib/ansible_test/_data/collection_detail.py)0
-rw-r--r--test/lib/ansible_test/_util/controller/tools/coverage_stub.ps138
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/tools/sslcheck.py (renamed from test/lib/ansible_test/_data/sslcheck.py)1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/tools/virtualenvcheck.py (renamed from test/lib/ansible_test/_data/virtualenvcheck.py)1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/tools/yamlcheck.py (renamed from test/lib/ansible_test/_data/yamlcheck.py)3
-rw-r--r--test/lib/ansible_test/_util/target/__init__.py3
-rwxr-xr-xtest/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py42
-rw-r--r--test/lib/ansible_test/_util/target/common/__init__.py3
-rw-r--r--test/lib/ansible_test/_util/target/common/constants.py62
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/target/injector/python.py (renamed from test/lib/ansible_test/_data/injector/python.py)17
-rw-r--r--test/lib/ansible_test/_util/target/injector/virtualenv.sh (renamed from test/lib/ansible_test/_data/injector/virtualenv.sh)2
-rw-r--r--test/lib/ansible_test/_util/target/legacy_collection_loader/__init__.py31
-rw-r--r--test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_config.py107
-rw-r--r--test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_finder.py1067
-rw-r--r--test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_meta.py37
-rw-r--r--test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py (renamed from test/lib/ansible_test/_data/pytest/plugins/ansible_pytest_collections.py)12
-rw-r--r--test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_coverage.py (renamed from test/lib/ansible_test/_data/pytest/plugins/ansible_pytest_coverage.py)0
-rw-r--r--test/lib/ansible_test/_util/target/sanity/compile/compile.py47
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/target/sanity/import/importer.py (renamed from test/lib/ansible_test/_data/sanity/import/importer.py)66
-rw-r--r--test/lib/ansible_test/_util/target/sanity/import/yaml_to_json.py (renamed from test/lib/ansible_test/_data/sanity/import/yaml_to_json.py)1
-rw-r--r--test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1 (renamed from test/lib/ansible_test/_data/setup/ConfigureRemotingForAnsible.ps1)0
-rw-r--r--test/lib/ansible_test/_util/target/setup/bootstrap.sh323
-rw-r--r--test/lib/ansible_test/_util/target/setup/quiet_pip.py (renamed from test/lib/ansible_test/_data/quiet_pip.py)1
-rw-r--r--test/lib/ansible_test/_util/target/setup/requirements.py252
-rw-r--r--test/lib/ansible_test/config/config.yml41
-rw-r--r--test/sanity/code-smell/ansible-requirements.json2
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/ansible-requirements.py3
-rw-r--r--test/sanity/code-smell/ansible-test-future-boilerplate.json9
-rw-r--r--test/sanity/code-smell/ansible-test-future-boilerplate.py44
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/configure-remoting-ps1.py3
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/deprecated-config.py3
-rw-r--r--test/sanity/code-smell/deprecated-config.requirements.txt7
-rw-r--r--test/sanity/code-smell/docs-build.json1
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/docs-build.py1
-rw-r--r--test/sanity/code-smell/docs-build.requirements.txt56
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/no-unwanted-files.py1
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/obsolete-files.py1
-rw-r--r--test/sanity/code-smell/package-data.json1
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/package-data.py8
-rw-r--r--test/sanity/code-smell/package-data.requirements.txt20
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/release-names.py1
-rw-r--r--test/sanity/code-smell/release-names.requirements.txt2
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/required-and-default-attributes.py1
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/rstcheck.py1
-rw-r--r--test/sanity/code-smell/rstcheck.requirements.txt29
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/test-constraints.py15
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/update-bundled.py6
-rw-r--r--test/sanity/code-smell/update-bundled.requirements.txt5
-rw-r--r--test/sanity/ignore.txt182
-rw-r--r--test/support/integration/plugins/inventory/foreman.py2
-rw-r--r--test/support/integration/plugins/module_utils/aws/core.py2
-rw-r--r--test/support/integration/plugins/module_utils/crypto.py2
-rw-r--r--test/support/integration/plugins/module_utils/docker/common.py2
-rw-r--r--test/support/integration/plugins/module_utils/postgres.py2
-rw-r--r--test/support/integration/plugins/modules/docker_swarm.py5
-rw-r--r--test/support/integration/plugins/modules/ec2.py2
-rw-r--r--test/support/integration/plugins/modules/htpasswd.py2
-rw-r--r--test/support/integration/plugins/modules/mongodb_user.py2
-rw-r--r--test/support/integration/plugins/modules/x509_crl.py2
-rw-r--r--test/support/integration/plugins/modules/x509_crl_info.py2
-rw-r--r--test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/action/ios.py2
-rw-r--r--test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/action/vyos.py2
-rw-r--r--test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/module_utils/WebRequest.psm1518
-rw-r--r--test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_uri.ps1219
-rw-r--r--test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_uri.py155
-rw-r--r--test/support/windows-integration/plugins/modules/win_security_policy.ps1196
-rw-r--r--test/support/windows-integration/plugins/modules/win_security_policy.py126
-rw-r--r--test/units/_vendor/__init__.py0
-rw-r--r--test/units/_vendor/test_vendor.py2
-rw-r--r--test/units/ansible_test/ci/test_shippable.py31
-rw-r--r--test/units/ansible_test/test_docker_util.py131
-rw-r--r--test/units/cli/test_adhoc.py2
-rw-r--r--test/units/cli/test_doc.py5
-rw-r--r--test/units/executor/module_common/test_recursive_finder.py1
-rw-r--r--test/units/executor/test_interpreter_discovery.py5
-rw-r--r--test/units/executor/test_task_executor.py4
-rw-r--r--test/units/galaxy/test_collection.py45
-rw-r--r--test/units/galaxy/test_collection_install.py57
-rw-r--r--test/units/galaxy/test_role_install.py151
-rw-r--r--test/units/galaxy/test_token.py43
-rw-r--r--test/units/mock/loader.py8
-rw-r--r--test/units/module_utils/basic/test_argument_spec.py5
-rw-r--r--test/units/module_utils/basic/test_deprecate_warn.py6
-rw-r--r--test/units/module_utils/basic/test_exit_json.py25
-rw-r--r--test/units/module_utils/basic/test_heuristic_log_sanitize.py3
-rw-r--r--test/units/module_utils/basic/test_imports.py6
-rw-r--r--test/units/module_utils/basic/test_platform_distribution.py11
-rw-r--r--test/units/module_utils/basic/test_run_command.py13
-rw-r--r--test/units/module_utils/common/arg_spec/test_module_validate.py6
-rw-r--r--test/units/module_utils/common/arg_spec/test_validate_valid.py2
-rw-r--r--test/units/module_utils/common/test_dict_transformations.py118
-rw-r--r--test/units/module_utils/common/test_locale.py42
-rw-r--r--test/units/module_utils/common/test_network.py11
-rw-r--r--test/units/module_utils/common/test_removed.py62
-rw-r--r--test/units/module_utils/common/test_sys_info.py32
-rw-r--r--test/units/module_utils/common/text/converters/test_to_str.py11
-rw-r--r--test/units/module_utils/common/validation/test_check_missing_parameters.py35
-rw-r--r--test/units/module_utils/common/validation/test_check_required_by.py98
-rw-r--r--test/units/module_utils/common/validation/test_check_required_if.py79
-rw-r--r--test/units/module_utils/common/validation/test_check_required_one_of.py47
-rw-r--r--test/units/module_utils/common/warnings/test_deprecate.py23
-rw-r--r--test/units/module_utils/common/warnings/test_warn.py2
-rw-r--r--test/units/module_utils/facts/network/test_fc_wwn.py47
-rw-r--r--test/units/module_utils/facts/test_collectors.py40
-rw-r--r--test/units/module_utils/facts/test_date_time.py2
-rw-r--r--test/units/module_utils/test_distro.py5
-rw-r--r--test/units/module_utils/urls/test_channel_binding.py2
-rw-r--r--test/units/module_utils/urls/test_fetch_url.py4
-rw-r--r--test/units/modules/test_apt_key.py32
-rw-r--r--test/units/modules/test_async_wrapper.py5
-rw-r--r--test/units/modules/test_hostname.py35
-rw-r--r--test/units/modules/test_iptables.py10
-rw-r--r--test/units/modules/test_pip.py2
-rw-r--r--test/units/modules/test_service.py70
-rw-r--r--test/units/modules/test_yum.py15
-rw-r--r--test/units/parsing/vault/test_vault.py71
-rw-r--r--test/units/parsing/vault/test_vault_editor.py13
-rw-r--r--test/units/parsing/yaml/test_dumper.py20
-rw-r--r--test/units/playbook/test_helpers.py30
-rw-r--r--test/units/playbook/test_play.py373
-rw-r--r--test/units/playbook/test_play_context.py17
-rw-r--r--test/units/plugins/action/test_gather_facts.py46
-rw-r--r--test/units/plugins/become/test_su.py36
-rw-r--r--test/units/plugins/become/test_sudo.py39
-rw-r--r--test/units/plugins/cache/test_cache.py98
-rw-r--r--test/units/plugins/callback/test_callback.py20
-rw-r--r--test/units/plugins/connection/test_connection.py14
-rw-r--r--test/units/plugins/connection/test_ssh.py21
-rw-r--r--test/units/plugins/filter/test_core.py2
-rw-r--r--test/units/plugins/inventory/test_constructed.py130
-rw-r--r--test/units/plugins/lookup/test_ini.py3
-rw-r--r--test/units/plugins/lookup/test_password.py109
-rw-r--r--test/units/requirements.txt8
-rw-r--r--test/units/utils/test_encrypt.py68
-rw-r--r--test/units/utils/test_unsafe_proxy.py11
-rw-r--r--test/units/utils/test_version.py2
1027 files changed, 27289 insertions, 13940 deletions
diff --git a/test/ansible_test/Makefile b/test/ansible_test/Makefile
index 7fb2a007..2d85e3da 100644
--- a/test/ansible_test/Makefile
+++ b/test/ansible_test/Makefile
@@ -10,4 +10,4 @@ unit:
.PHONY: validate-modules-unit
validate-modules-unit:
- PYTHONPATH=$(abspath ${CURDIR}/../lib/ansible_test/_data/sanity/validate-modules):$(abspath ${CURDIR}/../../lib) pytest validate-modules-unit ${FLAGS}
+ PYTHONPATH=$(abspath ${CURDIR}/../lib/ansible_test/_util/controller/sanity/validate-modules):$(abspath ${CURDIR}/../../lib) pytest validate-modules-unit ${FLAGS}
diff --git a/test/integration/targets/adhoc/aliases b/test/integration/targets/adhoc/aliases
index 765b70da..90ea9e12 100644
--- a/test/integration/targets/adhoc/aliases
+++ b/test/integration/targets/adhoc/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/ansiballz_python/aliases b/test/integration/targets/ansiballz_python/aliases
index f8e28c7e..e2c8fd39 100644
--- a/test/integration/targets/ansiballz_python/aliases
+++ b/test/integration/targets/ansiballz_python/aliases
@@ -1,2 +1,3 @@
shippable/posix/group1
skip/aix
+context/target
diff --git a/test/integration/targets/ansible-doc/aliases b/test/integration/targets/ansible-doc/aliases
index a6dafcf8..13e01f0c 100644
--- a/test/integration/targets/ansible-doc/aliases
+++ b/test/integration/targets/ansible-doc/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/ansible-doc/fakemodule.output b/test/integration/targets/ansible-doc/fakemodule.output
index 01070fd5..5548ad5e 100644
--- a/test/integration/targets/ansible-doc/fakemodule.output
+++ b/test/integration/targets/ansible-doc/fakemodule.output
@@ -2,6 +2,8 @@
this is a fake module
+ADDED IN: version 1.0.0 of testns.testcol
+
OPTIONS (= is mandatory):
- _notreal
@@ -12,5 +14,3 @@ OPTIONS (= is mandatory):
AUTHOR: me
SHORT_DESCIPTION: fake module
-
-VERSION_ADDED_COLLECTION: testns.testcol
diff --git a/test/integration/targets/ansible-doc/randommodule-text.output b/test/integration/targets/ansible-doc/randommodule-text.output
new file mode 100644
index 00000000..24327a59
--- /dev/null
+++ b/test/integration/targets/ansible-doc/randommodule-text.output
@@ -0,0 +1,105 @@
+> TESTNS.TESTCOL.RANDOMMODULE (./collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py)
+
+ A random module.
+
+ADDED IN: version 1.0.0 of testns.testcol
+
+DEPRECATED:
+
+ Reason: Test deprecation
+ Will be removed in: Ansible 3.0.0
+ Alternatives: Use some other module
+
+
+OPTIONS (= is mandatory):
+
+- sub
+ Suboptions.
+ [Default: (null)]
+ set_via:
+ env:
+ - deprecated:
+ alternative: none
+ removed_in: 2.0.0
+ version: 2.0.0
+ why: Test deprecation
+ name: TEST_ENV
+
+ type: dict
+
+ OPTIONS:
+
+ - subtest2
+ Another suboption.
+ [Default: (null)]
+ type: float
+ added in: version 1.1.0
+
+
+
+ SUBOPTIONS:
+
+ - subtest
+ A suboption.
+ [Default: (null)]
+ type: int
+ added in: version 1.1.0 of testns.testcol
+
+
+- test
+ Some text.
+ [Default: (null)]
+ type: str
+ added in: version 1.2.0 of testns.testcol
+
+
+- testcol2option
+ An option taken from testcol2
+ [Default: (null)]
+ type: str
+ added in: version 1.0.0 of testns.testcol2
+
+
+- testcol2option2
+ Another option taken from testcol2
+ [Default: (null)]
+ type: str
+
+
+AUTHOR: Ansible Core Team
+
+EXAMPLES:
+
+
+
+
+RETURN VALUES:
+- a_first
+ A first result.
+
+ returned: success
+ type: str
+
+- m_middle
+ This should be in the middle.
+ Has some more data
+
+ returned: success and 1st of month
+ type: dict
+
+ CONTAINS:
+
+ - suboption
+ A suboption.
+ (Choices: ARF, BARN, c_without_capital_first_letter)
+ type: str
+ added in: version 1.4.0 of testns.testcol
+
+
+- z_last
+ A last result.
+
+ returned: success
+ type: str
+ added in: version 1.3.0 of testns.testcol
+
diff --git a/test/integration/targets/ansible-doc/runme.sh b/test/integration/targets/ansible-doc/runme.sh
index 4f40d7c3..549a341b 100755
--- a/test/integration/targets/ansible-doc/runme.sh
+++ b/test/integration/targets/ansible-doc/runme.sh
@@ -19,6 +19,11 @@ current_out="$(ansible-doc --playbook-dir ./ testns.testcol.fakemodule | sed '1
expected_out="$(sed '1 s/\(^> TESTNS\.TESTCOL\.FAKEMODULE\).*(.*)$/\1/' fakemodule.output)"
test "$current_out" == "$expected_out"
+# we use sed to strip the module path from the first line
+current_out="$(ansible-doc --playbook-dir ./ testns.testcol.randommodule | sed '1 s/\(^> TESTNS\.TESTCOL\.RANDOMMODULE\).*(.*)$/\1/')"
+expected_out="$(sed '1 s/\(^> TESTNS\.TESTCOL\.RANDOMMODULE\).*(.*)$/\1/' randommodule-text.output)"
+test "$current_out" == "$expected_out"
+
# ensure we do work with valid collection name for list
ansible-doc --list testns.testcol --playbook-dir ./ 2>&1 | grep -v "Invalid collection pattern"
@@ -94,3 +99,6 @@ test "$current_out" == "$expected_out"
current_out="$(ansible-doc --json --playbook-dir ./ -t vars testns.testcol.noop_vars_plugin | sed 's/ *$//' | sed 's/ *"filename": "[^"]*",$//')"
expected_out="$(sed 's/ *"filename": "[^"]*",$//' noop_vars_plugin.output)"
test "$current_out" == "$expected_out"
+
+# just ensure it runs
+ANSIBLE_LIBRARY='./nolibrary' ansible-doc --metadata-dump --playbook-dir /dev/null
diff --git a/test/integration/targets/ansible-galaxy-collection-scm/aliases b/test/integration/targets/ansible-galaxy-collection-scm/aliases
index 9c34b360..498fedd5 100644
--- a/test/integration/targets/ansible-galaxy-collection-scm/aliases
+++ b/test/integration/targets/ansible-galaxy-collection-scm/aliases
@@ -1,3 +1,2 @@
shippable/posix/group4
-skip/aix
-skip/python2.6 # ansible-galaxy uses tarfile with features not available until 2.7
+context/controller
diff --git a/test/integration/targets/ansible-galaxy-collection/aliases b/test/integration/targets/ansible-galaxy-collection/aliases
index e501bce5..6c57208a 100644
--- a/test/integration/targets/ansible-galaxy-collection/aliases
+++ b/test/integration/targets/ansible-galaxy-collection/aliases
@@ -1,3 +1,4 @@
shippable/galaxy/group1
shippable/galaxy/smoketest
cloud/galaxy
+context/controller
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/install.yml b/test/integration/targets/ansible-galaxy-collection/tasks/install.yml
index 7d66be2f..ad10bff8 100644
--- a/test/integration/targets/ansible-galaxy-collection/tasks/install.yml
+++ b/test/integration/targets/ansible-galaxy-collection/tasks/install.yml
@@ -214,7 +214,7 @@
state: absent
- assert:
- that: expected_error in error
+ that: error == expected_error
vars:
reset_color: '\x1b\[0m'
color: '\x1b\[[0-9];[0-9]{2}m'
@@ -260,7 +260,7 @@
- debug: msg="Expected - {{ expected_error }}"
- assert:
- that: expected_error in error
+ that: error == expected_error
always:
- name: clean up collection skeleton and artifact
file:
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/upgrade.yml b/test/integration/targets/ansible-galaxy-collection/tasks/upgrade.yml
index b49f1eec..893ea803 100644
--- a/test/integration/targets/ansible-galaxy-collection/tasks/upgrade.yml
+++ b/test/integration/targets/ansible-galaxy-collection/tasks/upgrade.yml
@@ -124,9 +124,9 @@
that:
- '"parent_dep.parent_collection:1.1.0 was installed successfully" in result.stdout_lines'
- (metadata.results[0].content | b64decode | from_json).collection_info.version == '1.1.0'
- - "\"Skipping 'child_dep.child_collection:0.9.9' as it is already installed\" in result.stdout_lines"
+ - "\"'child_dep.child_collection:0.9.9' is already installed, skipping.\" in result.stdout_lines"
- (metadata.results[1].content | b64decode | from_json).collection_info.version == '0.9.9'
- - "\"Skipping 'child_dep.child_dep2:1.2.2' as it is already installed\" in result.stdout_lines"
+ - "\"'child_dep.child_dep2:1.2.2' is already installed, skipping.\" in result.stdout_lines"
- (metadata.results[2].content | b64decode | from_json).collection_info.version == '1.2.2'
##### Updating collections with --upgrade
@@ -187,7 +187,7 @@
register: result
- assert:
- that: "\"Skipping 'namespace1.name1:1.1.0-beta.1' as it is already installed\" in result.stdout"
+ that: "\"'namespace1.name1:1.1.0-beta.1' is already installed, skipping.\" in result.stdout"
# With deps
diff --git a/test/integration/targets/ansible-galaxy-role/aliases b/test/integration/targets/ansible-galaxy-role/aliases
index 62548acd..498fedd5 100644
--- a/test/integration/targets/ansible-galaxy-role/aliases
+++ b/test/integration/targets/ansible-galaxy-role/aliases
@@ -1,2 +1,2 @@
shippable/posix/group4
-skip/python2.6 # build uses tarfile with features not available until 2.7
+context/controller
diff --git a/test/integration/targets/ansible-galaxy/aliases b/test/integration/targets/ansible-galaxy/aliases
index 48ed7d60..275bdbfd 100644
--- a/test/integration/targets/ansible-galaxy/aliases
+++ b/test/integration/targets/ansible-galaxy/aliases
@@ -1,4 +1,3 @@
destructive
shippable/posix/group4
-skip/python2.6 # build uses tarfile with features not available until 2.7
-skip/aix
+context/controller
diff --git a/test/integration/targets/ansible-inventory/aliases b/test/integration/targets/ansible-inventory/aliases
index 70a7b7a9..1d28bdb2 100644
--- a/test/integration/targets/ansible-inventory/aliases
+++ b/test/integration/targets/ansible-inventory/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/ansible-inventory/files/invalid_sample.yml b/test/integration/targets/ansible-inventory/files/invalid_sample.yml
new file mode 100644
index 00000000..f7bbe0cf
--- /dev/null
+++ b/test/integration/targets/ansible-inventory/files/invalid_sample.yml
@@ -0,0 +1,7 @@
+all:
+ children:
+ somegroup:
+ hosts:
+ something:
+ 7.2: bar
+ ungrouped: {}
diff --git a/test/integration/targets/ansible-inventory/files/valid_sample.yml b/test/integration/targets/ansible-inventory/files/valid_sample.yml
new file mode 100644
index 00000000..477f82f2
--- /dev/null
+++ b/test/integration/targets/ansible-inventory/files/valid_sample.yml
@@ -0,0 +1,7 @@
+all:
+ children:
+ somegroup:
+ hosts:
+ something:
+ foo: bar
+ ungrouped: {} \ No newline at end of file
diff --git a/test/integration/targets/ansible-inventory/tasks/main.yml b/test/integration/targets/ansible-inventory/tasks/main.yml
index 0ab09c07..685cad88 100644
--- a/test/integration/targets/ansible-inventory/tasks/main.yml
+++ b/test/integration/targets/ansible-inventory/tasks/main.yml
@@ -1,3 +1,111 @@
+- name: "No command supplied"
+ command: ansible-inventory
+ ignore_errors: true
+ register: result
+
+- assert:
+ that:
+ - result is failed
+ - '"ERROR! No action selected, at least one of --host, --graph or --list needs to be specified." in result.stderr'
+
+- name: "test option: --list --export"
+ command: ansible-inventory --list --export
+ register: result
+
+- assert:
+ that:
+ - result is succeeded
+
+- name: "test option: --list --yaml --export"
+ command: ansible-inventory --list --yaml --export
+ register: result
+
+- assert:
+ that:
+ - result is succeeded
+
+- name: "test option: --list --output"
+ command: ansible-inventory --list --output junk.txt
+ register: result
+
+- name: stat output file
+ stat:
+ path: junk.txt
+ register: st
+
+- assert:
+ that:
+ - result is succeeded
+ - st.stat.exists
+
+- name: "test option: --graph"
+ command: ansible-inventory --graph
+ register: result
+
+- assert:
+ that:
+ - result is succeeded
+
+- name: "test option: --graph --vars"
+ command: ansible-inventory --graph --vars
+ register: result
+
+- assert:
+ that:
+ - result is succeeded
+
+- name: "test option: --graph with bad pattern"
+ command: ansible-inventory --graph invalid
+ ignore_errors: true
+ register: result
+
+- assert:
+ that:
+ - result is failed
+ - '"ERROR! Pattern must be valid group name when using --graph" in result.stderr'
+
+- name: "test option: --host localhost"
+ command: ansible-inventory --host localhost
+ register: result
+
+- assert:
+ that:
+ - result is succeeded
+
+- name: "test option: --host with invalid host"
+ command: ansible-inventory --host invalid
+ ignore_errors: true
+ register: result
+
+- assert:
+ that:
+ - result is failed
+ - '"ERROR! Could not match supplied host pattern, ignoring: invalid" in result.stderr'
+
+- name: Install toml package
+ pip:
+ name:
+ - toml
+ state: present
+
+- name: "test option: --toml with valid group name"
+ command: ansible-inventory --list --toml -i {{ role_path }}/files/valid_sample.yml
+ register: result
+
+- assert:
+ that:
+ - result is succeeded
+
+- name: "test option: --toml with invalid group name"
+ command: ansible-inventory --list --toml -i {{ role_path }}/files/invalid_sample.yml
+ ignore_errors: true
+ register: result
+
+- assert:
+ that:
+ - result is failed
+ - '"ERROR! The source inventory contains a non-string key" in result.stderr'
+
- name: "test json output with unicode characters"
command: ansible-inventory --list -i {{ role_path }}/files/unicode.yml
register: result
@@ -47,12 +155,6 @@
state: absent
- block:
- - name: Install toml package
- pip:
- name:
- - toml
- state: present
-
- name: "test toml output with unicode characters"
command: ansible-inventory --list --toml -i {{ role_path }}/files/unicode.yml
register: result
diff --git a/test/integration/targets/vault/aliases b/test/integration/targets/ansible-pull/aliases
index 757c9966..8278ec8b 100644
--- a/test/integration/targets/vault/aliases
+++ b/test/integration/targets/ansible-pull/aliases
@@ -1,2 +1,2 @@
shippable/posix/group3
-skip/aix
+context/controller
diff --git a/test/integration/targets/pull/cleanup.yml b/test/integration/targets/ansible-pull/cleanup.yml
index 68686964..68686964 100644
--- a/test/integration/targets/pull/cleanup.yml
+++ b/test/integration/targets/ansible-pull/cleanup.yml
diff --git a/test/integration/targets/pull/pull-integration-test/ansible.cfg b/test/integration/targets/ansible-pull/pull-integration-test/ansible.cfg
index f8fc6cdb..f8fc6cdb 100644
--- a/test/integration/targets/pull/pull-integration-test/ansible.cfg
+++ b/test/integration/targets/ansible-pull/pull-integration-test/ansible.cfg
diff --git a/test/integration/targets/pull/pull-integration-test/inventory b/test/integration/targets/ansible-pull/pull-integration-test/inventory
index 72644cef..72644cef 100644
--- a/test/integration/targets/pull/pull-integration-test/inventory
+++ b/test/integration/targets/ansible-pull/pull-integration-test/inventory
diff --git a/test/integration/targets/pull/pull-integration-test/local.yml b/test/integration/targets/ansible-pull/pull-integration-test/local.yml
index d358ee86..d358ee86 100644
--- a/test/integration/targets/pull/pull-integration-test/local.yml
+++ b/test/integration/targets/ansible-pull/pull-integration-test/local.yml
diff --git a/test/integration/targets/pull/pull-integration-test/multi_play_1.yml b/test/integration/targets/ansible-pull/pull-integration-test/multi_play_1.yml
index 0ec0da6b..0ec0da6b 100644
--- a/test/integration/targets/pull/pull-integration-test/multi_play_1.yml
+++ b/test/integration/targets/ansible-pull/pull-integration-test/multi_play_1.yml
diff --git a/test/integration/targets/pull/pull-integration-test/multi_play_2.yml b/test/integration/targets/ansible-pull/pull-integration-test/multi_play_2.yml
index 1fe5a584..1fe5a584 100644
--- a/test/integration/targets/pull/pull-integration-test/multi_play_2.yml
+++ b/test/integration/targets/ansible-pull/pull-integration-test/multi_play_2.yml
diff --git a/test/integration/targets/pull/runme.sh b/test/integration/targets/ansible-pull/runme.sh
index 347971a4..347971a4 100755
--- a/test/integration/targets/pull/runme.sh
+++ b/test/integration/targets/ansible-pull/runme.sh
diff --git a/test/integration/targets/pull/setup.yml b/test/integration/targets/ansible-pull/setup.yml
index ebd5a1c0..ebd5a1c0 100644
--- a/test/integration/targets/pull/setup.yml
+++ b/test/integration/targets/ansible-pull/setup.yml
diff --git a/test/integration/targets/ansible-runner/aliases b/test/integration/targets/ansible-runner/aliases
index 42d2022b..17ae2d5e 100644
--- a/test/integration/targets/ansible-runner/aliases
+++ b/test/integration/targets/ansible-runner/aliases
@@ -1,6 +1,5 @@
shippable/posix/group3
-skip/python2 # ansible-runner is for controller and deprecated python2 support
-skip/aix
+context/controller
skip/osx
skip/macos
skip/freebsd
diff --git a/test/integration/targets/ansible-test-cloud-acme/aliases b/test/integration/targets/ansible-test-cloud-acme/aliases
new file mode 100644
index 00000000..db3ab680
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-acme/aliases
@@ -0,0 +1,3 @@
+cloud/acme
+shippable/generic/group1
+context/controller
diff --git a/test/integration/targets/ansible-test-cloud-acme/tasks/main.yml b/test/integration/targets/ansible-test-cloud-acme/tasks/main.yml
new file mode 100644
index 00000000..42ebc284
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-acme/tasks/main.yml
@@ -0,0 +1,7 @@
+- name: Verify endpoints respond
+ uri:
+ url: "{{ item }}"
+ validate_certs: no
+ with_items:
+ - http://{{ acme_host }}:5000/
+ - https://{{ acme_host }}:14000/dir
diff --git a/test/integration/targets/ansible-test-cloud-cs/aliases b/test/integration/targets/ansible-test-cloud-cs/aliases
new file mode 100644
index 00000000..cf43ff1e
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-cs/aliases
@@ -0,0 +1,3 @@
+cloud/cs
+shippable/generic/group1
+context/controller
diff --git a/test/integration/targets/ansible-test-cloud-cs/tasks/main.yml b/test/integration/targets/ansible-test-cloud-cs/tasks/main.yml
new file mode 100644
index 00000000..3b219c7e
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-cs/tasks/main.yml
@@ -0,0 +1,8 @@
+- name: Verify endpoints respond
+ uri:
+ url: "{{ item }}"
+ validate_certs: no
+ register: this
+ failed_when: "this.status != 401" # authentication is required, but not provided (requests must be signed)
+ with_items:
+ - "{{ ansible_env.CLOUDSTACK_ENDPOINT }}"
diff --git a/test/integration/targets/ansible-test-cloud-foreman/aliases b/test/integration/targets/ansible-test-cloud-foreman/aliases
new file mode 100644
index 00000000..a4bdcea6
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-foreman/aliases
@@ -0,0 +1,3 @@
+cloud/foreman
+shippable/generic/group1
+context/controller
diff --git a/test/integration/targets/ansible-test-cloud-foreman/tasks/main.yml b/test/integration/targets/ansible-test-cloud-foreman/tasks/main.yml
new file mode 100644
index 00000000..4170d83e
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-foreman/tasks/main.yml
@@ -0,0 +1,6 @@
+- name: Verify endpoints respond
+ uri:
+ url: "{{ item }}"
+ validate_certs: no
+ with_items:
+ - http://{{ ansible_env.FOREMAN_HOST }}:{{ ansible_env.FOREMAN_PORT }}/ping
diff --git a/test/integration/targets/ansible-test-cloud-galaxy/aliases b/test/integration/targets/ansible-test-cloud-galaxy/aliases
new file mode 100644
index 00000000..6c57208a
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-galaxy/aliases
@@ -0,0 +1,4 @@
+shippable/galaxy/group1
+shippable/galaxy/smoketest
+cloud/galaxy
+context/controller
diff --git a/test/integration/targets/ansible-test-cloud-galaxy/tasks/main.yml b/test/integration/targets/ansible-test-cloud-galaxy/tasks/main.yml
new file mode 100644
index 00000000..8ae15ea5
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-galaxy/tasks/main.yml
@@ -0,0 +1,25 @@
+# The pulp container has a long start up time.
+# The first task to interact with pulp needs to wait until it responds appropriately.
+- name: Wait for Pulp API
+ uri:
+ url: '{{ pulp_api }}/pulp/api/v3/distributions/ansible/ansible/'
+ user: '{{ pulp_user }}'
+ password: '{{ pulp_password }}'
+ force_basic_auth: true
+ register: this
+ until: this is successful
+ delay: 1
+ retries: 60
+
+- name: Verify Galaxy NG server
+ uri:
+ url: "{{ galaxy_ng_server }}"
+ user: '{{ pulp_user }}'
+ password: '{{ pulp_password }}'
+ force_basic_auth: true
+
+- name: Verify Pulp server
+ uri:
+ url: "{{ pulp_server }}"
+ status_code:
+ - 404 # endpoint responds without authentication
diff --git a/test/integration/targets/ansible-test-cloud-httptester-windows/aliases b/test/integration/targets/ansible-test-cloud-httptester-windows/aliases
new file mode 100644
index 00000000..f45a1623
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-httptester-windows/aliases
@@ -0,0 +1,4 @@
+cloud/httptester
+windows
+shippable/windows/group1
+context/target
diff --git a/test/integration/targets/ansible-test-cloud-httptester-windows/tasks/main.yml b/test/integration/targets/ansible-test-cloud-httptester-windows/tasks/main.yml
new file mode 100644
index 00000000..a78b28ca
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-httptester-windows/tasks/main.yml
@@ -0,0 +1,15 @@
+- name: Verify HTTPTESTER environment variable
+ assert:
+ that:
+ - "lookup('env', 'HTTPTESTER') == '1'"
+
+- name: Verify endpoints respond
+ ansible.windows.win_uri:
+ url: "{{ item }}"
+ validate_certs: no
+ with_items:
+ - http://ansible.http.tests/
+ - https://ansible.http.tests/
+ - https://sni1.ansible.http.tests/
+ - https://fail.ansible.http.tests/
+ - https://self-signed.ansible.http.tests/
diff --git a/test/integration/targets/ansible-test-cloud-httptester/aliases b/test/integration/targets/ansible-test-cloud-httptester/aliases
new file mode 100644
index 00000000..eb5f7080
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-httptester/aliases
@@ -0,0 +1,3 @@
+needs/httptester # using legacy alias for testing purposes
+shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/ansible-test-cloud-httptester/tasks/main.yml b/test/integration/targets/ansible-test-cloud-httptester/tasks/main.yml
new file mode 100644
index 00000000..16b632f3
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-httptester/tasks/main.yml
@@ -0,0 +1,15 @@
+- name: Verify HTTPTESTER environment variable
+ assert:
+ that:
+ - "lookup('env', 'HTTPTESTER') == '1'"
+
+- name: Verify endpoints respond
+ uri:
+ url: "{{ item }}"
+ validate_certs: no
+ with_items:
+ - http://ansible.http.tests/
+ - https://ansible.http.tests/
+ - https://sni1.ansible.http.tests/
+ - https://fail.ansible.http.tests/
+ - https://self-signed.ansible.http.tests/
diff --git a/test/integration/targets/ansible-test-cloud-nios/aliases b/test/integration/targets/ansible-test-cloud-nios/aliases
new file mode 100644
index 00000000..136344a9
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-nios/aliases
@@ -0,0 +1,3 @@
+cloud/nios
+shippable/generic/group1
+context/controller
diff --git a/test/integration/targets/ansible-test-cloud-nios/tasks/main.yml b/test/integration/targets/ansible-test-cloud-nios/tasks/main.yml
new file mode 100644
index 00000000..b4d447d7
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-nios/tasks/main.yml
@@ -0,0 +1,10 @@
+- name: Verify endpoints respond
+ uri:
+ url: "{{ item }}"
+ url_username: "{{ nios_provider.username }}"
+ url_password: "{{ nios_provider.password }}"
+ validate_certs: no
+ register: this
+ failed_when: "this.status != 404" # authentication succeeded, but the requested path was not found
+ with_items:
+ - https://{{ nios_provider.host }}/
diff --git a/test/integration/targets/ansible-test-cloud-openshift/aliases b/test/integration/targets/ansible-test-cloud-openshift/aliases
new file mode 100644
index 00000000..6e32db7b
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-openshift/aliases
@@ -0,0 +1,4 @@
+cloud/openshift
+shippable/generic/group1
+disabled # disabled due to requirements conflict: botocore 1.20.6 has requirement urllib3<1.27,>=1.25.4, but you have urllib3 1.24.3.
+context/controller
diff --git a/test/integration/targets/ansible-test-cloud-openshift/tasks/main.yml b/test/integration/targets/ansible-test-cloud-openshift/tasks/main.yml
new file mode 100644
index 00000000..c3b51904
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-openshift/tasks/main.yml
@@ -0,0 +1,6 @@
+- name: Verify endpoints respond
+ uri:
+ url: "{{ item }}"
+ validate_certs: no
+ with_items:
+ - https://openshift-origin:8443/
diff --git a/test/integration/targets/ansible-test-cloud-vcenter/aliases b/test/integration/targets/ansible-test-cloud-vcenter/aliases
new file mode 100644
index 00000000..0cd8ad20
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-vcenter/aliases
@@ -0,0 +1,3 @@
+cloud/vcenter
+shippable/generic/group1
+context/controller
diff --git a/test/integration/targets/ansible-test-cloud-vcenter/tasks/main.yml b/test/integration/targets/ansible-test-cloud-vcenter/tasks/main.yml
new file mode 100644
index 00000000..49e5c16a
--- /dev/null
+++ b/test/integration/targets/ansible-test-cloud-vcenter/tasks/main.yml
@@ -0,0 +1,6 @@
+- name: Verify endpoints respond
+ uri:
+ url: "{{ item }}"
+ validate_certs: no
+ with_items:
+ - http://{{ vcenter_hostname }}:5000/ # control endpoint for the simulator
diff --git a/test/integration/targets/ansible-test-docker/aliases b/test/integration/targets/ansible-test-docker/aliases
index d1284cf7..a862ab8b 100644
--- a/test/integration/targets/ansible-test-docker/aliases
+++ b/test/integration/targets/ansible-test-docker/aliases
@@ -1 +1,2 @@
shippable/generic/group1 # Runs in the default test container so access to tools like pwsh
+context/controller
diff --git a/test/integration/targets/ansible-test-docker/collection-tests/docker.sh b/test/integration/targets/ansible-test-docker/collection-tests/docker.sh
index e0e34290..69372245 100755
--- a/test/integration/targets/ansible-test-docker/collection-tests/docker.sh
+++ b/test/integration/targets/ansible-test-docker/collection-tests/docker.sh
@@ -7,7 +7,7 @@ cd "${WORK_DIR}/ansible_collections/ns/col"
# common args for all tests
# because we are running in shippable/generic/ we are already in the default docker container
-common=(--python "${ANSIBLE_TEST_PYTHON_VERSION}" --color --truncate 0 "${@}")
+common=(--python "${ANSIBLE_TEST_PYTHON_VERSION}" --venv --venv-system-site-packages --color --truncate 0 "${@}")
# prime the venv to work around issue with PyYAML detection in ansible-test
ansible-test sanity "${common[@]}" --test ignores
diff --git a/test/integration/targets/ansible-test/aliases b/test/integration/targets/ansible-test/aliases
index f8e28c7e..13e01f0c 100644
--- a/test/integration/targets/ansible-test/aliases
+++ b/test/integration/targets/ansible-test/aliases
@@ -1,2 +1,2 @@
shippable/posix/group1
-skip/aix
+context/controller
diff --git a/test/integration/targets/ansible-test/ansible_collections/ns/col_constraints/tests/integration/targets/constraints/aliases b/test/integration/targets/ansible-test/ansible_collections/ns/col_constraints/tests/integration/targets/constraints/aliases
new file mode 100644
index 00000000..1af1cf90
--- /dev/null
+++ b/test/integration/targets/ansible-test/ansible_collections/ns/col_constraints/tests/integration/targets/constraints/aliases
@@ -0,0 +1 @@
+context/controller
diff --git a/test/integration/targets/ansible-test/collection-tests/coverage.sh b/test/integration/targets/ansible-test/collection-tests/coverage.sh
index 033a9836..221ae66a 100755
--- a/test/integration/targets/ansible-test/collection-tests/coverage.sh
+++ b/test/integration/targets/ansible-test/collection-tests/coverage.sh
@@ -7,8 +7,8 @@ cd "${WORK_DIR}/ansible_collections/ns/col"
# rename the sanity ignore file to match the current ansible version and update import ignores with the python version
ansible_version="$(python -c 'import ansible.release; print(".".join(ansible.release.__version__.split(".")[:2]))')"
-if [ "${ANSIBLE_TEST_PYTHON_VERSION}" == "2.6" ]; then
- # Non-module/module_utils plugins are not checked on this remote-only Python versions
+if [[ "${ANSIBLE_TEST_PYTHON_VERSION}" =~ ^2\. ]] || [[ "${ANSIBLE_TEST_PYTHON_VERSION}" =~ ^3\.[567] ]]; then
+ # Non-module/module_utils plugins are not checked on these remote-only Python versions
sed "s/ import$/ import-${ANSIBLE_TEST_PYTHON_VERSION}/;" < "tests/sanity/ignore.txt" | grep -v 'plugins/[^m].* import' > "tests/sanity/ignore-${ansible_version}.txt"
else
sed "s/ import$/ import-${ANSIBLE_TEST_PYTHON_VERSION}/;" < "tests/sanity/ignore.txt" > "tests/sanity/ignore-${ansible_version}.txt"
diff --git a/test/integration/targets/ansible-test/collection-tests/venv.sh b/test/integration/targets/ansible-test/collection-tests/venv.sh
index ba0d2628..42dbfde4 100755
--- a/test/integration/targets/ansible-test/collection-tests/venv.sh
+++ b/test/integration/targets/ansible-test/collection-tests/venv.sh
@@ -7,8 +7,8 @@ cd "${WORK_DIR}/ansible_collections/ns/col"
# rename the sanity ignore file to match the current ansible version and update import ignores with the python version
ansible_version="$(python -c 'import ansible.release; print(".".join(ansible.release.__version__.split(".")[:2]))')"
-if [ "${ANSIBLE_TEST_PYTHON_VERSION}" == "2.6" ]; then
- # Non-module/module_utils plugins are not checked on this remote-only Python versions
+if [[ "${ANSIBLE_TEST_PYTHON_VERSION}" =~ ^2\. ]] || [[ "${ANSIBLE_TEST_PYTHON_VERSION}" =~ ^3\.[567] ]]; then
+ # Non-module/module_utils plugins are not checked on these remote-only Python versions
sed "s/ import$/ import-${ANSIBLE_TEST_PYTHON_VERSION}/;" < "tests/sanity/ignore.txt" | grep -v 'plugins/[^m].* import' > "tests/sanity/ignore-${ansible_version}.txt"
else
sed "s/ import$/ import-${ANSIBLE_TEST_PYTHON_VERSION}/;" < "tests/sanity/ignore.txt" > "tests/sanity/ignore-${ansible_version}.txt"
diff --git a/test/integration/targets/pull/aliases b/test/integration/targets/ansible-vault/aliases
index 757c9966..8278ec8b 100644
--- a/test/integration/targets/pull/aliases
+++ b/test/integration/targets/ansible-vault/aliases
@@ -1,2 +1,2 @@
shippable/posix/group3
-skip/aix
+context/controller
diff --git a/test/integration/targets/vault/empty-password b/test/integration/targets/ansible-vault/empty-password
index e69de29b..e69de29b 100644
--- a/test/integration/targets/vault/empty-password
+++ b/test/integration/targets/ansible-vault/empty-password
diff --git a/test/integration/targets/vault/encrypted-vault-password b/test/integration/targets/ansible-vault/encrypted-vault-password
index 7aa4e4be..7aa4e4be 100644
--- a/test/integration/targets/vault/encrypted-vault-password
+++ b/test/integration/targets/ansible-vault/encrypted-vault-password
diff --git a/test/integration/targets/vault/encrypted_file_encrypted_var_password b/test/integration/targets/ansible-vault/encrypted_file_encrypted_var_password
index 57bc06e3..57bc06e3 100644
--- a/test/integration/targets/vault/encrypted_file_encrypted_var_password
+++ b/test/integration/targets/ansible-vault/encrypted_file_encrypted_var_password
diff --git a/test/integration/targets/vault/example1_password b/test/integration/targets/ansible-vault/example1_password
index e723c8f9..e723c8f9 100644
--- a/test/integration/targets/vault/example1_password
+++ b/test/integration/targets/ansible-vault/example1_password
diff --git a/test/integration/targets/vault/example2_password b/test/integration/targets/ansible-vault/example2_password
index 7b010f87..7b010f87 100644
--- a/test/integration/targets/vault/example2_password
+++ b/test/integration/targets/ansible-vault/example2_password
diff --git a/test/integration/targets/vault/example3_password b/test/integration/targets/ansible-vault/example3_password
index f5bc5a8c..f5bc5a8c 100644
--- a/test/integration/targets/vault/example3_password
+++ b/test/integration/targets/ansible-vault/example3_password
diff --git a/test/integration/targets/vault/faux-editor.py b/test/integration/targets/ansible-vault/faux-editor.py
index 68f62590..b67c7475 100755
--- a/test/integration/targets/vault/faux-editor.py
+++ b/test/integration/targets/ansible-vault/faux-editor.py
@@ -14,7 +14,7 @@
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# ansible-vault is a script that encrypts/decrypts YAML files. See
-# https://docs.ansible.com/playbooks_vault.html for more details.
+# https://docs.ansible.com/ansible/latest/user_guide/vault.html for more details.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/integration/targets/vault/files/test_assemble/nonsecret.txt b/test/integration/targets/ansible-vault/files/test_assemble/nonsecret.txt
index 320b6b4c..320b6b4c 100644
--- a/test/integration/targets/vault/files/test_assemble/nonsecret.txt
+++ b/test/integration/targets/ansible-vault/files/test_assemble/nonsecret.txt
diff --git a/test/integration/targets/vault/files/test_assemble/secret.vault b/test/integration/targets/ansible-vault/files/test_assemble/secret.vault
index fd278564..fd278564 100644
--- a/test/integration/targets/vault/files/test_assemble/secret.vault
+++ b/test/integration/targets/ansible-vault/files/test_assemble/secret.vault
diff --git a/test/integration/targets/vault/format_1_1_AES256.yml b/test/integration/targets/ansible-vault/format_1_1_AES256.yml
index 5616605e..5616605e 100644
--- a/test/integration/targets/vault/format_1_1_AES256.yml
+++ b/test/integration/targets/ansible-vault/format_1_1_AES256.yml
diff --git a/test/integration/targets/vault/format_1_2_AES256.yml b/test/integration/targets/ansible-vault/format_1_2_AES256.yml
index 1e3795fb..1e3795fb 100644
--- a/test/integration/targets/vault/format_1_2_AES256.yml
+++ b/test/integration/targets/ansible-vault/format_1_2_AES256.yml
diff --git a/test/integration/targets/vault/host_vars/myhost.yml b/test/integration/targets/ansible-vault/host_vars/myhost.yml
index 1434ec15..1434ec15 100644
--- a/test/integration/targets/vault/host_vars/myhost.yml
+++ b/test/integration/targets/ansible-vault/host_vars/myhost.yml
diff --git a/test/integration/targets/vault/host_vars/testhost.yml b/test/integration/targets/ansible-vault/host_vars/testhost.yml
index b3e569ad..b3e569ad 100644
--- a/test/integration/targets/vault/host_vars/testhost.yml
+++ b/test/integration/targets/ansible-vault/host_vars/testhost.yml
diff --git a/test/integration/targets/vault/invalid_format/README.md b/test/integration/targets/ansible-vault/invalid_format/README.md
index cbbc07a9..cbbc07a9 100644
--- a/test/integration/targets/vault/invalid_format/README.md
+++ b/test/integration/targets/ansible-vault/invalid_format/README.md
diff --git a/test/integration/targets/vault/invalid_format/broken-group-vars-tasks.yml b/test/integration/targets/ansible-vault/invalid_format/broken-group-vars-tasks.yml
index 71dbacc0..71dbacc0 100644
--- a/test/integration/targets/vault/invalid_format/broken-group-vars-tasks.yml
+++ b/test/integration/targets/ansible-vault/invalid_format/broken-group-vars-tasks.yml
diff --git a/test/integration/targets/vault/invalid_format/broken-host-vars-tasks.yml b/test/integration/targets/ansible-vault/invalid_format/broken-host-vars-tasks.yml
index 9afbd58e..9afbd58e 100644
--- a/test/integration/targets/vault/invalid_format/broken-host-vars-tasks.yml
+++ b/test/integration/targets/ansible-vault/invalid_format/broken-host-vars-tasks.yml
diff --git a/test/integration/targets/vault/invalid_format/group_vars/broken-group-vars.yml b/test/integration/targets/ansible-vault/invalid_format/group_vars/broken-group-vars.yml
index 5f477431..5f477431 100644
--- a/test/integration/targets/vault/invalid_format/group_vars/broken-group-vars.yml
+++ b/test/integration/targets/ansible-vault/invalid_format/group_vars/broken-group-vars.yml
diff --git a/test/integration/targets/vault/invalid_format/host_vars/broken-host-vars.example.com/vars b/test/integration/targets/ansible-vault/invalid_format/host_vars/broken-host-vars.example.com/vars
index 2d309eb5..2d309eb5 100644
--- a/test/integration/targets/vault/invalid_format/host_vars/broken-host-vars.example.com/vars
+++ b/test/integration/targets/ansible-vault/invalid_format/host_vars/broken-host-vars.example.com/vars
diff --git a/test/integration/targets/vault/invalid_format/inventory b/test/integration/targets/ansible-vault/invalid_format/inventory
index e6e259a4..e6e259a4 100644
--- a/test/integration/targets/vault/invalid_format/inventory
+++ b/test/integration/targets/ansible-vault/invalid_format/inventory
diff --git a/test/integration/targets/vault/invalid_format/original-broken-host-vars b/test/integration/targets/ansible-vault/invalid_format/original-broken-host-vars
index 6be696b5..6be696b5 100644
--- a/test/integration/targets/vault/invalid_format/original-broken-host-vars
+++ b/test/integration/targets/ansible-vault/invalid_format/original-broken-host-vars
diff --git a/test/integration/targets/vault/invalid_format/original-group-vars.yml b/test/integration/targets/ansible-vault/invalid_format/original-group-vars.yml
index 817557be..817557be 100644
--- a/test/integration/targets/vault/invalid_format/original-group-vars.yml
+++ b/test/integration/targets/ansible-vault/invalid_format/original-group-vars.yml
diff --git a/test/integration/targets/vault/invalid_format/some-vars b/test/integration/targets/ansible-vault/invalid_format/some-vars
index e841a262..e841a262 100644
--- a/test/integration/targets/vault/invalid_format/some-vars
+++ b/test/integration/targets/ansible-vault/invalid_format/some-vars
diff --git a/test/integration/targets/vault/invalid_format/vault-secret b/test/integration/targets/ansible-vault/invalid_format/vault-secret
index 4406e35c..4406e35c 100644
--- a/test/integration/targets/vault/invalid_format/vault-secret
+++ b/test/integration/targets/ansible-vault/invalid_format/vault-secret
diff --git a/test/integration/targets/vault/inventory.toml b/test/integration/targets/ansible-vault/inventory.toml
index d97ed398..d97ed398 100644
--- a/test/integration/targets/vault/inventory.toml
+++ b/test/integration/targets/ansible-vault/inventory.toml
diff --git a/test/integration/targets/vault/password-script.py b/test/integration/targets/ansible-vault/password-script.py
index c47fdfb9..1b7f02be 100755
--- a/test/integration/targets/vault/password-script.py
+++ b/test/integration/targets/ansible-vault/password-script.py
@@ -14,7 +14,7 @@
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# ansible-vault is a script that encrypts/decrypts YAML files. See
-# https://docs.ansible.com/playbooks_vault.html for more details.
+# https://docs.ansible.com/ansible/latest/user_guide/vault.html for more details.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/integration/targets/vault/roles/test_vault/tasks/main.yml b/test/integration/targets/ansible-vault/roles/test_vault/tasks/main.yml
index 4e5551d9..4e5551d9 100644
--- a/test/integration/targets/vault/roles/test_vault/tasks/main.yml
+++ b/test/integration/targets/ansible-vault/roles/test_vault/tasks/main.yml
diff --git a/test/integration/targets/vault/roles/test_vault/vars/main.yml b/test/integration/targets/ansible-vault/roles/test_vault/vars/main.yml
index cfac107a..cfac107a 100644
--- a/test/integration/targets/vault/roles/test_vault/vars/main.yml
+++ b/test/integration/targets/ansible-vault/roles/test_vault/vars/main.yml
diff --git a/test/integration/targets/vault/roles/test_vault_embedded/tasks/main.yml b/test/integration/targets/ansible-vault/roles/test_vault_embedded/tasks/main.yml
index eba93896..eba93896 100644
--- a/test/integration/targets/vault/roles/test_vault_embedded/tasks/main.yml
+++ b/test/integration/targets/ansible-vault/roles/test_vault_embedded/tasks/main.yml
diff --git a/test/integration/targets/vault/roles/test_vault_embedded/vars/main.yml b/test/integration/targets/ansible-vault/roles/test_vault_embedded/vars/main.yml
index 54e6004f..54e6004f 100644
--- a/test/integration/targets/vault/roles/test_vault_embedded/vars/main.yml
+++ b/test/integration/targets/ansible-vault/roles/test_vault_embedded/vars/main.yml
diff --git a/test/integration/targets/vault/roles/test_vault_embedded_ids/tasks/main.yml b/test/integration/targets/ansible-vault/roles/test_vault_embedded_ids/tasks/main.yml
index 9aeaf240..9aeaf240 100644
--- a/test/integration/targets/vault/roles/test_vault_embedded_ids/tasks/main.yml
+++ b/test/integration/targets/ansible-vault/roles/test_vault_embedded_ids/tasks/main.yml
diff --git a/test/integration/targets/vault/roles/test_vault_embedded_ids/vars/main.yml b/test/integration/targets/ansible-vault/roles/test_vault_embedded_ids/vars/main.yml
index 9c8fa4b2..9c8fa4b2 100644
--- a/test/integration/targets/vault/roles/test_vault_embedded_ids/vars/main.yml
+++ b/test/integration/targets/ansible-vault/roles/test_vault_embedded_ids/vars/main.yml
diff --git a/test/integration/targets/vault/roles/test_vault_file_encrypted_embedded/README.md b/test/integration/targets/ansible-vault/roles/test_vault_file_encrypted_embedded/README.md
index 4a75cece..4a75cece 100644
--- a/test/integration/targets/vault/roles/test_vault_file_encrypted_embedded/README.md
+++ b/test/integration/targets/ansible-vault/roles/test_vault_file_encrypted_embedded/README.md
diff --git a/test/integration/targets/vault/roles/test_vault_file_encrypted_embedded/tasks/main.yml b/test/integration/targets/ansible-vault/roles/test_vault_file_encrypted_embedded/tasks/main.yml
index e09004a1..e09004a1 100644
--- a/test/integration/targets/vault/roles/test_vault_file_encrypted_embedded/tasks/main.yml
+++ b/test/integration/targets/ansible-vault/roles/test_vault_file_encrypted_embedded/tasks/main.yml
diff --git a/test/integration/targets/vault/roles/test_vault_file_encrypted_embedded/vars/main.yml b/test/integration/targets/ansible-vault/roles/test_vault_file_encrypted_embedded/vars/main.yml
index 89cc4a0f..89cc4a0f 100644
--- a/test/integration/targets/vault/roles/test_vault_file_encrypted_embedded/vars/main.yml
+++ b/test/integration/targets/ansible-vault/roles/test_vault_file_encrypted_embedded/vars/main.yml
diff --git a/test/integration/targets/vault/roles/test_vaulted_template/tasks/main.yml b/test/integration/targets/ansible-vault/roles/test_vaulted_template/tasks/main.yml
index b4af5efc..b4af5efc 100644
--- a/test/integration/targets/vault/roles/test_vaulted_template/tasks/main.yml
+++ b/test/integration/targets/ansible-vault/roles/test_vaulted_template/tasks/main.yml
diff --git a/test/integration/targets/vault/roles/test_vaulted_template/templates/vaulted_template.j2 b/test/integration/targets/ansible-vault/roles/test_vaulted_template/templates/vaulted_template.j2
index af9c3eb1..af9c3eb1 100644
--- a/test/integration/targets/vault/roles/test_vaulted_template/templates/vaulted_template.j2
+++ b/test/integration/targets/ansible-vault/roles/test_vaulted_template/templates/vaulted_template.j2
diff --git a/test/integration/targets/vault/runme.sh b/test/integration/targets/ansible-vault/runme.sh
index e3b21d7f..e3b21d7f 100755
--- a/test/integration/targets/vault/runme.sh
+++ b/test/integration/targets/ansible-vault/runme.sh
diff --git a/test/integration/targets/vault/single_vault_as_string.yml b/test/integration/targets/ansible-vault/single_vault_as_string.yml
index 1eb17d04..ca147b0b 100644
--- a/test/integration/targets/vault/single_vault_as_string.yml
+++ b/test/integration/targets/ansible-vault/single_vault_as_string.yml
@@ -27,7 +27,7 @@
- vaulted_value|forceescape == 'foo bar'
- vaulted_value|first == 'f'
- "'%s'|format(vaulted_value) == 'foo bar'"
- - vaulted_value|indent(indentfirst=True) == ' foo bar'
+ - vaulted_value|indent(first=True) == ' foo bar'
- vaulted_value.split() == ['foo', 'bar']
- vaulted_value|join('-') == 'f-o-o- -b-a-r'
- vaulted_value|last == 'r'
diff --git a/test/integration/targets/vault/test-vault-client.py b/test/integration/targets/ansible-vault/test-vault-client.py
index ee461887..ee461887 100755
--- a/test/integration/targets/vault/test-vault-client.py
+++ b/test/integration/targets/ansible-vault/test-vault-client.py
diff --git a/test/integration/targets/vault/test_dangling_temp.yml b/test/integration/targets/ansible-vault/test_dangling_temp.yml
index 71a9d73a..71a9d73a 100644
--- a/test/integration/targets/vault/test_dangling_temp.yml
+++ b/test/integration/targets/ansible-vault/test_dangling_temp.yml
diff --git a/test/integration/targets/vault/test_utf8_value_in_filename.yml b/test/integration/targets/ansible-vault/test_utf8_value_in_filename.yml
index 9bd394dc..9bd394dc 100644
--- a/test/integration/targets/vault/test_utf8_value_in_filename.yml
+++ b/test/integration/targets/ansible-vault/test_utf8_value_in_filename.yml
diff --git a/test/integration/targets/vault/test_vault.yml b/test/integration/targets/ansible-vault/test_vault.yml
index 7f8ed115..7f8ed115 100644
--- a/test/integration/targets/vault/test_vault.yml
+++ b/test/integration/targets/ansible-vault/test_vault.yml
diff --git a/test/integration/targets/vault/test_vault_embedded.yml b/test/integration/targets/ansible-vault/test_vault_embedded.yml
index ee9739f8..ee9739f8 100644
--- a/test/integration/targets/vault/test_vault_embedded.yml
+++ b/test/integration/targets/ansible-vault/test_vault_embedded.yml
diff --git a/test/integration/targets/vault/test_vault_embedded_ids.yml b/test/integration/targets/ansible-vault/test_vault_embedded_ids.yml
index 23ebbb96..23ebbb96 100644
--- a/test/integration/targets/vault/test_vault_embedded_ids.yml
+++ b/test/integration/targets/ansible-vault/test_vault_embedded_ids.yml
diff --git a/test/integration/targets/vault/test_vault_file_encrypted_embedded.yml b/test/integration/targets/ansible-vault/test_vault_file_encrypted_embedded.yml
index 685d20ef..685d20ef 100644
--- a/test/integration/targets/vault/test_vault_file_encrypted_embedded.yml
+++ b/test/integration/targets/ansible-vault/test_vault_file_encrypted_embedded.yml
diff --git a/test/integration/targets/vault/test_vaulted_inventory.yml b/test/integration/targets/ansible-vault/test_vaulted_inventory.yml
index 06b6582b..06b6582b 100644
--- a/test/integration/targets/vault/test_vaulted_inventory.yml
+++ b/test/integration/targets/ansible-vault/test_vaulted_inventory.yml
diff --git a/test/integration/targets/vault/test_vaulted_inventory_toml.yml b/test/integration/targets/ansible-vault/test_vaulted_inventory_toml.yml
index f6e2c5d6..f6e2c5d6 100644
--- a/test/integration/targets/vault/test_vaulted_inventory_toml.yml
+++ b/test/integration/targets/ansible-vault/test_vaulted_inventory_toml.yml
diff --git a/test/integration/targets/vault/test_vaulted_template.yml b/test/integration/targets/ansible-vault/test_vaulted_template.yml
index b495211d..b495211d 100644
--- a/test/integration/targets/vault/test_vaulted_template.yml
+++ b/test/integration/targets/ansible-vault/test_vaulted_template.yml
diff --git a/test/integration/targets/vault/test_vaulted_utf8_value.yml b/test/integration/targets/ansible-vault/test_vaulted_utf8_value.yml
index 63b602b1..63b602b1 100644
--- a/test/integration/targets/vault/test_vaulted_utf8_value.yml
+++ b/test/integration/targets/ansible-vault/test_vaulted_utf8_value.yml
diff --git a/test/integration/targets/vault/vault-café.yml b/test/integration/targets/ansible-vault/vault-café.yml
index 0d179aec..0d179aec 100644
--- a/test/integration/targets/vault/vault-café.yml
+++ b/test/integration/targets/ansible-vault/vault-café.yml
diff --git a/test/integration/targets/vault/vault-password b/test/integration/targets/ansible-vault/vault-password
index 96973929..96973929 100644
--- a/test/integration/targets/vault/vault-password
+++ b/test/integration/targets/ansible-vault/vault-password
diff --git a/test/integration/targets/vault/vault-password-ansible b/test/integration/targets/ansible-vault/vault-password-ansible
index 90d40550..90d40550 100644
--- a/test/integration/targets/vault/vault-password-ansible
+++ b/test/integration/targets/ansible-vault/vault-password-ansible
diff --git a/test/integration/targets/vault/vault-password-wrong b/test/integration/targets/ansible-vault/vault-password-wrong
index 50e2efad..50e2efad 100644
--- a/test/integration/targets/vault/vault-password-wrong
+++ b/test/integration/targets/ansible-vault/vault-password-wrong
diff --git a/test/integration/targets/vault/vault-secret.txt b/test/integration/targets/ansible-vault/vault-secret.txt
index b6bc9bfb..b6bc9bfb 100644
--- a/test/integration/targets/vault/vault-secret.txt
+++ b/test/integration/targets/ansible-vault/vault-secret.txt
diff --git a/test/integration/targets/vault/vaulted.inventory b/test/integration/targets/ansible-vault/vaulted.inventory
index 1ed258b6..1ed258b6 100644
--- a/test/integration/targets/vault/vaulted.inventory
+++ b/test/integration/targets/ansible-vault/vaulted.inventory
diff --git a/test/integration/targets/ansible/aliases b/test/integration/targets/ansible/aliases
index f71c8117..498fedd5 100644
--- a/test/integration/targets/ansible/aliases
+++ b/test/integration/targets/ansible/aliases
@@ -1,2 +1,2 @@
shippable/posix/group4
-skip/aix
+context/controller
diff --git a/test/integration/targets/ansible/module_common_regex_regression.sh b/test/integration/targets/ansible/module_common_regex_regression.sh
new file mode 100755
index 00000000..4869f4f0
--- /dev/null
+++ b/test/integration/targets/ansible/module_common_regex_regression.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env bash
+
+# #74270 -- ensure we escape directory names before passing to re.compile()
+# particularly in module_common.
+
+set -eux
+
+lib_path=$(python -c 'import os, ansible; print(os.path.dirname(os.path.dirname(ansible.__file__)))')
+bad_dir="${OUTPUT_DIR}/ansi[ble"
+
+mkdir "${bad_dir}"
+cp -a "${lib_path}" "${bad_dir}"
+
+PYTHONPATH="${bad_dir}/lib" ansible -m ping localhost -i ../../inventory "$@"
+rm -rf "${bad_dir}"
diff --git a/test/integration/targets/ansible/runme.sh b/test/integration/targets/ansible/runme.sh
index fc79e33e..e9e72a9f 100755
--- a/test/integration/targets/ansible/runme.sh
+++ b/test/integration/targets/ansible/runme.sh
@@ -80,3 +80,7 @@ if ansible-playbook -i ../../inventory --extra-vars ./vars.yml playbook.yml; the
fi
ansible-playbook -i ../../inventory --extra-vars @./vars.yml playbook.yml
+
+# #74270 -- ensure we escape directory names before passing to re.compile()
+# particularly in module_common.
+bash module_common_regex_regression.sh
diff --git a/test/integration/targets/any_errors_fatal/18602.yml b/test/integration/targets/any_errors_fatal/18602.yml
deleted file mode 100644
index 66bcb88b..00000000
--- a/test/integration/targets/any_errors_fatal/18602.yml
+++ /dev/null
@@ -1,21 +0,0 @@
----
- - hosts: localhost
- any_errors_fatal: true
- tasks:
- - block:
- - debug: msg='i execute normally'
- - name: EXPECTED FAILURE primary block command
- command: /bin/false
- - debug: msg='i never execute, cause ERROR!'
- rescue:
- - name: rescue block debug
- debug: msg='I caught an error'
- - name: EXPECTED FAILURE rescue block command
- command: /bin/false
- - debug: msg='I also never execute :-('
- always:
- - name: A debug task in the always block
- debug: msg="this always executes"
-
- - set_fact:
- always_ran: true
diff --git a/test/integration/targets/any_errors_fatal/aliases b/test/integration/targets/any_errors_fatal/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/any_errors_fatal/aliases
+++ b/test/integration/targets/any_errors_fatal/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/any_errors_fatal/on_includes.yml b/test/integration/targets/any_errors_fatal/on_includes.yml
index 981d9f46..cbc51cb2 100644
--- a/test/integration/targets/any_errors_fatal/on_includes.yml
+++ b/test/integration/targets/any_errors_fatal/on_includes.yml
@@ -4,4 +4,4 @@
hosts: testhost,testhost2
any_errors_fatal: True
tasks:
- - include: test_fatal.yml
+ - import_tasks: test_fatal.yml
diff --git a/test/integration/targets/apt/tasks/downgrade.yml b/test/integration/targets/apt/tasks/downgrade.yml
new file mode 100644
index 00000000..896b644d
--- /dev/null
+++ b/test/integration/targets/apt/tasks/downgrade.yml
@@ -0,0 +1,77 @@
+- block:
+ - name: Disable ubuntu repos so system packages are not upgraded and do not change testing env
+ command: mv /etc/apt/sources.list /etc/apt/sources.list.backup
+
+ - name: install latest foo
+ apt:
+ name: foo
+ state: latest
+ allow_unauthenticated: yes
+
+ - name: check foo version
+ shell: dpkg -s foo | grep Version | awk '{print $2}'
+ register: apt_downgrade_foo_version
+
+ - name: ensure the correct version of foo has been installed
+ assert:
+ that:
+ - "'1.0.1' in apt_downgrade_foo_version.stdout"
+
+ - name: try to downgrade foo
+ apt:
+ name: foo=1.0.0
+ state: present
+ allow_unauthenticated: yes
+ ignore_errors: yes
+ register: apt_downgrade_foo_fail
+
+ - name: verify failure of downgrading without allow downgrade flag
+ assert:
+ that:
+ - apt_downgrade_foo_fail is failed
+
+ - name: try to downgrade foo with flag
+ apt:
+ name: foo=1.0.0
+ state: present
+ allow_downgrade: yes
+ allow_unauthenticated: yes
+ register: apt_downgrade_foo_succeed
+
+ - name: verify success of downgrading with allow downgrade flag
+ assert:
+ that:
+ - apt_downgrade_foo_succeed is success
+
+ - name: check foo version
+ shell: dpkg -s foo | grep Version | awk '{print $2}'
+ register: apt_downgrade_foo_version
+
+ - name: check that version downgraded correctly
+ assert:
+ that:
+ - "'1.0.0' in apt_downgrade_foo_version.stdout"
+ - "{{ apt_downgrade_foo_version.changed }}"
+
+ - name: downgrade foo with flag again
+ apt:
+ name: foo=1.0.0
+ state: present
+ allow_downgrade: yes
+ allow_unauthenticated: yes
+ register: apt_downgrade_second_downgrade
+
+ - name: check that nothing has changed (idempotent)
+ assert:
+ that:
+ - "apt_downgrade_second_downgrade.changed == false"
+
+ always:
+ - name: Clean up
+ apt:
+ pkg: foo,foobar
+ state: absent
+ autoclean: yes
+
+ - name: Restore ubuntu repos
+ command: mv /etc/apt/sources.list.backup /etc/apt/sources.list
diff --git a/test/integration/targets/apt/tasks/repo.yml b/test/integration/targets/apt/tasks/repo.yml
index e1863f38..8269452a 100644
--- a/test/integration/targets/apt/tasks/repo.yml
+++ b/test/integration/targets/apt/tasks/repo.yml
@@ -210,6 +210,8 @@
- name: Restore ubuntu repos
command: mv /etc/apt/sources.list.backup /etc/apt/sources.list
+- name: Downgrades
+ import_tasks: "downgrade.yml"
- name: Upgrades
block:
@@ -263,3 +265,26 @@
state: absent
when:
- aptitude_status.stdout.find('ii') == -1
+
+- block:
+ - name: Install the foo package with diff=yes
+ apt:
+ name: foo
+ allow_unauthenticated: yes
+ diff: yes
+ register: apt_result
+
+ - debug:
+ var: apt_result
+
+ - name: Check the content of diff.prepared
+ assert:
+ that:
+ - apt_result is success
+ - "'The following NEW packages will be installed:\n foo' in apt_result.diff.prepared"
+ always:
+ - name: Clean up
+ apt:
+ name: foo
+ state: absent
+ allow_unauthenticated: yes
diff --git a/test/integration/targets/args/aliases b/test/integration/targets/args/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/args/aliases
+++ b/test/integration/targets/args/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/argspec/aliases b/test/integration/targets/argspec/aliases
index 70a7b7a9..1d28bdb2 100644
--- a/test/integration/targets/argspec/aliases
+++ b/test/integration/targets/argspec/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/assert/aliases b/test/integration/targets/assert/aliases
index 757c9966..10179323 100644
--- a/test/integration/targets/assert/aliases
+++ b/test/integration/targets/assert/aliases
@@ -1,2 +1,2 @@
shippable/posix/group3
-skip/aix
+context/controller # this is a controller-only action, the module is just for documentation
diff --git a/test/integration/targets/async/tasks/main.yml b/test/integration/targets/async/tasks/main.yml
index c8c12f6d..05c789e6 100644
--- a/test/integration/targets/async/tasks/main.yml
+++ b/test/integration/targets/async/tasks/main.yml
@@ -244,26 +244,6 @@
path: '{{ custom_async_tmp }}'
state: absent
- - name: run async task with custom dir - deprecated format
- command: sleep 1
- register: async_custom_dir_dep
- async: 5
- poll: 1
- environment:
- ANSIBLE_ASYNC_DIR: '{{ custom_async_tmp }}'
-
- - name: check if the async temp dir is created - deprecated format
- stat:
- path: '{{ custom_async_tmp }}'
- register: async_custom_dir_dep_result
-
- - name: assert run async task with custom dir - deprecated format
- assert:
- that:
- - async_custom_dir_dep is successful
- - async_custom_dir_dep is finished
- - async_custom_dir_dep_result.stat.exists
-
- name: remove custom async dir after deprecation test
file:
path: '{{ custom_async_tmp }}'
@@ -290,13 +270,6 @@
vars:
ansible_async_dir: '{{ custom_async_tmp }}'
- - name: get async status with custom dir - deprecated format
- async_status:
- jid: '{{ async_fandf_custom_dir.ansible_job_id }}'
- register: async_fandf_custom_dir_dep_result
- environment:
- ANSIBLE_ASYNC_DIR: '{{ custom_async_tmp }}'
-
- name: assert run fire and forget async task with custom dir
assert:
that:
@@ -304,7 +277,6 @@
- async_fandf_custom_dir_fail is failed
- async_fandf_custom_dir_fail.msg == "could not find job"
- async_fandf_custom_dir_result is successful
- - async_fandf_custom_dir_dep_result is successful
always:
- name: remove custom tmp dir after test
@@ -320,6 +292,7 @@
- name: run async poll callback test playbook
command: ansible-playbook {{ role_path }}/callback_test.yml
+ delegate_to: localhost
register: callback_output
- assert:
diff --git a/test/integration/targets/async_extra_data/aliases b/test/integration/targets/async_extra_data/aliases
index 70a7b7a9..7bd941e6 100644
--- a/test/integration/targets/async_extra_data/aliases
+++ b/test/integration/targets/async_extra_data/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/target
diff --git a/test/integration/targets/become/aliases b/test/integration/targets/become/aliases
index 3a07aab3..ad691e7d 100644
--- a/test/integration/targets/become/aliases
+++ b/test/integration/targets/become/aliases
@@ -1,3 +1,4 @@
destructive
shippable/posix/group1
skip/aix
+context/target
diff --git a/test/integration/targets/become/tasks/main.yml b/test/integration/targets/become/tasks/main.yml
index 3feb5cc7..b4c7b601 100644
--- a/test/integration/targets/become/tasks/main.yml
+++ b/test/integration/targets/become/tasks/main.yml
@@ -1,5 +1,5 @@
- include_vars: default.yml
-- include: default.yml
-- include: sudo.yml
-- include: su.yml
+- import_tasks: default.yml
+- import_tasks: sudo.yml
+- import_tasks: su.yml
diff --git a/test/integration/targets/become_su/aliases b/test/integration/targets/become_su/aliases
index 3a07aab3..f3e45b5e 100644
--- a/test/integration/targets/become_su/aliases
+++ b/test/integration/targets/become_su/aliases
@@ -1,3 +1,3 @@
destructive
shippable/posix/group1
-skip/aix
+context/controller
diff --git a/test/integration/targets/become_unprivileged/aliases b/test/integration/targets/become_unprivileged/aliases
index c96617f6..c97d2f98 100644
--- a/test/integration/targets/become_unprivileged/aliases
+++ b/test/integration/targets/become_unprivileged/aliases
@@ -1,5 +1,5 @@
destructive
shippable/posix/group1
-skip/aix
needs/ssh
needs/root
+context/controller
diff --git a/test/integration/targets/binary/aliases b/test/integration/targets/binary/aliases
index 765b70da..6452e6d4 100644
--- a/test/integration/targets/binary/aliases
+++ b/test/integration/targets/binary/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/target
diff --git a/test/integration/targets/binary_modules_posix/aliases b/test/integration/targets/binary_modules_posix/aliases
index 2c6e4a07..2cfe7ea8 100644
--- a/test/integration/targets/binary_modules_posix/aliases
+++ b/test/integration/targets/binary_modules_posix/aliases
@@ -1,2 +1,3 @@
shippable/posix/group3
needs/target/binary_modules
+context/target
diff --git a/test/integration/targets/blocks/aliases b/test/integration/targets/blocks/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/blocks/aliases
+++ b/test/integration/targets/blocks/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/blocks/main.yml b/test/integration/targets/blocks/main.yml
index 012d5ab2..efe358a3 100644
--- a/test/integration/targets/blocks/main.yml
+++ b/test/integration/targets/blocks/main.yml
@@ -96,8 +96,8 @@
tasks:
- block:
- name: include fail.yml in tasks
- include: fail.yml
- args:
+ import_tasks: fail.yml
+ vars:
msg: "failed from tasks"
- name: tasks flag should not be set after failure
set_fact:
@@ -106,8 +106,8 @@
- set_fact:
rescue_run_after_include_fail: true
- name: include fail.yml in rescue
- include: fail.yml
- args:
+ import_tasks: fail.yml
+ vars:
msg: "failed from rescue"
- name: flag should not be set after failure in rescue
set_fact:
diff --git a/test/integration/targets/blocks/nested_fail.yml b/test/integration/targets/blocks/nested_fail.yml
index 31ae870e..12e33cb4 100644
--- a/test/integration/targets/blocks/nested_fail.yml
+++ b/test/integration/targets/blocks/nested_fail.yml
@@ -1,3 +1,3 @@
-- include: fail.yml
- args:
+- import_tasks: fail.yml
+ vars:
msg: "nested {{msg}}"
diff --git a/test/integration/targets/blocks/nested_nested_fail.yml b/test/integration/targets/blocks/nested_nested_fail.yml
index e9a050fb..f63fa5ce 100644
--- a/test/integration/targets/blocks/nested_nested_fail.yml
+++ b/test/integration/targets/blocks/nested_nested_fail.yml
@@ -1,3 +1,3 @@
-- include: nested_fail.yml
- args:
+- import_tasks: nested_fail.yml
+ vars:
msg: "nested {{msg}}"
diff --git a/test/integration/targets/builtin_vars_prompt/aliases b/test/integration/targets/builtin_vars_prompt/aliases
index 4317d112..4b94ea15 100644
--- a/test/integration/targets/builtin_vars_prompt/aliases
+++ b/test/integration/targets/builtin_vars_prompt/aliases
@@ -1,3 +1,4 @@
setup/always/setup_passlib
setup/always/setup_pexpect
shippable/posix/group4
+context/controller
diff --git a/test/integration/targets/callback_default/aliases b/test/integration/targets/callback_default/aliases
index f8e28c7e..a6dafcf8 100644
--- a/test/integration/targets/callback_default/aliases
+++ b/test/integration/targets/callback_default/aliases
@@ -1,2 +1 @@
shippable/posix/group1
-skip/aix
diff --git a/test/integration/targets/callback_default/runme.sh b/test/integration/targets/callback_default/runme.sh
index b5c98ef7..f9b60b6b 100755
--- a/test/integration/targets/callback_default/runme.sh
+++ b/test/integration/targets/callback_default/runme.sh
@@ -125,6 +125,13 @@ export ANSIBLE_CHECK_MODE_MARKERS=0
run_test default
+# Check for async output
+# NOTE: regex to match 1 or more digits works for both BSD and GNU grep
+ansible-playbook -i inventory test_async.yml 2>&1 | tee async_test.out
+grep "ASYNC OK .* jid=[0-9]\{1,\}" async_test.out
+grep "ASYNC FAILED .* jid=[0-9]\{1,\}" async_test.out
+rm -f async_test.out
+
# Hide skipped
export ANSIBLE_DISPLAY_SKIPPED_HOSTS=0
diff --git a/test/integration/targets/callback_default/test_async.yml b/test/integration/targets/callback_default/test_async.yml
new file mode 100644
index 00000000..57294a4c
--- /dev/null
+++ b/test/integration/targets/callback_default/test_async.yml
@@ -0,0 +1,14 @@
+---
+- hosts: testhost
+ gather_facts: no
+ tasks:
+ - name: test success async output
+ command: sleep 1
+ async: 10
+ poll: 1
+
+ - name: test failure async output
+ command: sleep 10
+ async: 1
+ poll: 1
+ ignore_errors: yes
diff --git a/test/integration/targets/changed_when/aliases b/test/integration/targets/changed_when/aliases
index 765b70da..90ea9e12 100644
--- a/test/integration/targets/changed_when/aliases
+++ b/test/integration/targets/changed_when/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/changed_when/tasks/main.yml b/test/integration/targets/changed_when/tasks/main.yml
index 7b997189..4f0a8747 100644
--- a/test/integration/targets/changed_when/tasks/main.yml
+++ b/test/integration/targets/changed_when/tasks/main.yml
@@ -59,3 +59,15 @@
assert:
that:
- groupby is not changed
+
+- name: invalid conditional
+ command: echo foo
+ changed_when: boomboomboom
+ register: invalid_conditional
+ ignore_errors: true
+
+- assert:
+ that:
+ - invalid_conditional is failed
+ - invalid_conditional.stdout is defined
+ - invalid_conditional.changed_when_result is contains('boomboomboom')
diff --git a/test/integration/targets/check_mode/aliases b/test/integration/targets/check_mode/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/check_mode/aliases
+++ b/test/integration/targets/check_mode/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/cli/aliases b/test/integration/targets/cli/aliases
index a8816e11..c73d4253 100644
--- a/test/integration/targets/cli/aliases
+++ b/test/integration/targets/cli/aliases
@@ -3,3 +3,4 @@ needs/root
needs/ssh
needs/target/setup_pexpect
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/module_utils/MyCSMUOptional.cs b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/module_utils/MyCSMUOptional.cs
new file mode 100644
index 00000000..0a3e758c
--- /dev/null
+++ b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/module_utils/MyCSMUOptional.cs
@@ -0,0 +1,19 @@
+using System;
+
+using ansible_collections.testns.testcoll.plugins.module_utils.AnotherCSMU;
+using ansible_collections.testns.testcoll.plugins.module_utils.subpkg.subcs;
+
+//TypeAccelerator -Name MyCSMU -TypeName CustomThing
+
+namespace ansible_collections.testns.testcoll.plugins.module_utils.MyCSMU
+{
+ public class CustomThing
+ {
+ public static string HelloWorld()
+ {
+ string res1 = AnotherThing.CallMe();
+ string res2 = NestedUtil.HelloWorld();
+ return String.Format("Hello from user_mu collection-hosted MyCSMUOptional, also {0} and {1}", res1, res2);
+ }
+ }
+}
diff --git a/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/module_utils/MyPSMUOptional.psm1 b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/module_utils/MyPSMUOptional.psm1
new file mode 100644
index 00000000..1e361598
--- /dev/null
+++ b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/module_utils/MyPSMUOptional.psm1
@@ -0,0 +1,16 @@
+#AnsibleRequires -CSharpUtil Ansible.Invalid -Optional
+#AnsibleRequires -Powershell Ansible.ModuleUtils.Invalid -Optional
+#AnsibleRequires -CSharpUtil ansible_collections.testns.testcoll.plugins.module_utils.invalid -Optional
+#AnsibleRequires -CSharpUtil ansible_collections.testns.testcoll.plugins.module_utils.invalid.invalid -Optional
+#AnsibleRequires -Powershell ansible_collections.testns.testcoll.plugins.module_utils.invalid -Optional
+#AnsibleRequires -Powershell ansible_collections.testns.testcoll.plugins.module_utils.invalid.invalid -Optional
+
+Function Invoke-FromUserPSMU {
+ <#
+ .SYNOPSIS
+ Test function
+ #>
+ return "from optional user_mu"
+}
+
+Export-ModuleMember -Function Invoke-FromUserPSMU
diff --git a/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/win_uses_optional.ps1 b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/win_uses_optional.ps1
new file mode 100644
index 00000000..c44dcfea
--- /dev/null
+++ b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/modules/win_uses_optional.ps1
@@ -0,0 +1,33 @@
+#!powershell
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Test builtin C# still works with -Optional
+#AnsibleRequires -CSharpUtil Ansible.Basic -Optional
+
+# Test no failure when importing an invalid builtin C# and pwsh util with -Optional
+#AnsibleRequires -CSharpUtil Ansible.Invalid -Optional
+#AnsibleRequires -PowerShell Ansible.ModuleUtils.Invalid -Optional
+
+# Test valid module_util still works with -Optional
+#AnsibleRequires -CSharpUtil ansible_collections.testns.testcoll.plugins.module_utils.MyCSMUOptional -Optional
+#AnsibleRequires -Powershell ansible_collections.testns.testcoll.plugins.module_utils.MyPSMUOptional -Optional
+
+# Test no failure when importing an invalid collection C# and pwsh util with -Optional
+#AnsibleRequires -CSharpUtil ansible_collections.testns.testcoll.plugins.module_utils.invalid -Optional
+#AnsibleRequires -CSharpUtil ansible_collections.testns.testcoll.plugins.module_utils.invalid.invalid -Optional
+#AnsibleRequires -Powershell ansible_collections.testns.testcoll.plugins.module_utils.invalid -Optional
+#AnsibleRequires -Powershell ansible_collections.testns.testcoll.plugins.module_utils.invalid.invalid -Optional
+
+$spec = @{
+ options = @{
+ data = @{ type = "str"; default = "called $(Invoke-FromUserPSMU)" }
+ }
+ supports_check_mode = $true
+}
+$module = [Ansible.Basic.AnsibleModule]::Create($args, $spec)
+
+$module.Result.data = $module.Params.data
+$module.Result.csharp = [MyCSMU]::HelloWorld()
+
+$module.ExitJson()
diff --git a/test/integration/targets/collections/runme.sh b/test/integration/targets/collections/runme.sh
index 1e9584ff..5a5261bb 100755
--- a/test/integration/targets/collections/runme.sh
+++ b/test/integration/targets/collections/runme.sh
@@ -8,10 +8,6 @@ export ANSIBLE_GATHER_SUBSET=minimal
export ANSIBLE_HOST_PATTERN_MISMATCH=error
unset ANSIBLE_COLLECTIONS_ON_ANSIBLE_VERSION_MISMATCH
-# FUTURE: just use INVENTORY_PATH as-is once ansible-test sets the right dir
-ipath=../../$(basename "${INVENTORY_PATH:-../../inventory}")
-export INVENTORY_PATH="$ipath"
-
# ensure we can call collection module
ansible localhost -m testns.testcoll.testmodule
@@ -137,3 +133,5 @@ if [[ "$(grep -wc "dynamic_host_a" "$CACHEFILE")" -ne "0" ]]; then
fi
./vars_plugin_tests.sh
+
+./test_task_resolved_plugin.sh
diff --git a/test/integration/targets/collections/test_task_resolved_plugin.sh b/test/integration/targets/collections/test_task_resolved_plugin.sh
new file mode 100755
index 00000000..444b4f11
--- /dev/null
+++ b/test/integration/targets/collections/test_task_resolved_plugin.sh
@@ -0,0 +1,48 @@
+#!/usr/bin/env bash
+
+set -eux
+
+export ANSIBLE_CALLBACKS_ENABLED=display_resolved_action
+
+ansible-playbook test_task_resolved_plugin/unqualified.yml "$@" | tee out.txt
+action_resolution=(
+ "legacy_action == legacy_action"
+ "legacy_module == legacy_module"
+ "debug == ansible.builtin.debug"
+ "ping == ansible.builtin.ping"
+)
+for result in "${action_resolution[@]}"; do
+ grep -q out.txt -e "$result"
+done
+
+ansible-playbook test_task_resolved_plugin/unqualified_and_collections_kw.yml "$@" | tee out.txt
+action_resolution=(
+ "legacy_action == legacy_action"
+ "legacy_module == legacy_module"
+ "debug == ansible.builtin.debug"
+ "ping == ansible.builtin.ping"
+ "collection_action == test_ns.test_coll.collection_action"
+ "collection_module == test_ns.test_coll.collection_module"
+ "formerly_action == test_ns.test_coll.collection_action"
+ "formerly_module == test_ns.test_coll.collection_module"
+)
+for result in "${action_resolution[@]}"; do
+ grep -q out.txt -e "$result"
+done
+
+ansible-playbook test_task_resolved_plugin/fqcn.yml "$@" | tee out.txt
+action_resolution=(
+ "ansible.legacy.legacy_action == legacy_action"
+ "ansible.legacy.legacy_module == legacy_module"
+ "ansible.legacy.debug == ansible.builtin.debug"
+ "ansible.legacy.ping == ansible.builtin.ping"
+ "ansible.builtin.debug == ansible.builtin.debug"
+ "ansible.builtin.ping == ansible.builtin.ping"
+ "test_ns.test_coll.collection_action == test_ns.test_coll.collection_action"
+ "test_ns.test_coll.collection_module == test_ns.test_coll.collection_module"
+ "test_ns.test_coll.formerly_action == test_ns.test_coll.collection_action"
+ "test_ns.test_coll.formerly_module == test_ns.test_coll.collection_module"
+)
+for result in "${action_resolution[@]}"; do
+ grep -q out.txt -e "$result"
+done
diff --git a/test/integration/targets/collections/test_task_resolved_plugin/action_plugins/legacy_action.py b/test/integration/targets/collections/test_task_resolved_plugin/action_plugins/legacy_action.py
new file mode 100644
index 00000000..fa4d514b
--- /dev/null
+++ b/test/integration/targets/collections/test_task_resolved_plugin/action_plugins/legacy_action.py
@@ -0,0 +1,14 @@
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.plugins.action import ActionBase
+
+
+class ActionModule(ActionBase):
+ TRANSFERS_FILES = False
+ _VALID_ARGS = frozenset()
+
+ def run(self, tmp=None, task_vars=None):
+ return {'changed': False}
diff --git a/test/integration/targets/collections/test_task_resolved_plugin/callback_plugins/display_resolved_action.py b/test/integration/targets/collections/test_task_resolved_plugin/callback_plugins/display_resolved_action.py
new file mode 100644
index 00000000..23cce104
--- /dev/null
+++ b/test/integration/targets/collections/test_task_resolved_plugin/callback_plugins/display_resolved_action.py
@@ -0,0 +1,37 @@
+# (c) 2020 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+DOCUMENTATION = '''
+ name: display_resolved_action
+ type: aggregate
+ short_description: Displays the requested and resolved actions at the end of a playbook.
+ description:
+ - Displays the requested and resolved actions in the format "requested == resolved".
+ requirements:
+ - Enable in configuration.
+'''
+
+from ansible import constants as C
+from ansible.plugins.callback import CallbackBase
+
+
+class CallbackModule(CallbackBase):
+
+ CALLBACK_VERSION = 2.0
+ CALLBACK_TYPE = 'aggregate'
+ CALLBACK_NAME = 'display_resolved_action'
+ CALLBACK_NEEDS_ENABLED = True
+
+ def __init__(self, *args, **kwargs):
+ super(CallbackModule, self).__init__(*args, **kwargs)
+ self.requested_to_resolved = {}
+
+ def v2_playbook_on_task_start(self, task, is_conditional):
+ self.requested_to_resolved[task.action] = task.resolved_action
+
+ def v2_playbook_on_stats(self, stats):
+ for requested, resolved in self.requested_to_resolved.items():
+ self._display.display("%s == %s" % (requested, resolved), screen_only=True)
diff --git a/test/integration/targets/collections/test_task_resolved_plugin/collections/ansible_collections/test_ns/test_coll/meta/runtime.yml b/test/integration/targets/collections/test_task_resolved_plugin/collections/ansible_collections/test_ns/test_coll/meta/runtime.yml
new file mode 100644
index 00000000..8c27dba0
--- /dev/null
+++ b/test/integration/targets/collections/test_task_resolved_plugin/collections/ansible_collections/test_ns/test_coll/meta/runtime.yml
@@ -0,0 +1,7 @@
+plugin_routing:
+ modules:
+ formerly_module:
+ redirect: test_ns.test_coll.collection_module
+ action:
+ formerly_action:
+ redirect: test_ns.test_coll.collection_action
diff --git a/test/integration/targets/collections/test_task_resolved_plugin/collections/ansible_collections/test_ns/test_coll/plugins/action/collection_action.py b/test/integration/targets/collections/test_task_resolved_plugin/collections/ansible_collections/test_ns/test_coll/plugins/action/collection_action.py
new file mode 100644
index 00000000..fa4d514b
--- /dev/null
+++ b/test/integration/targets/collections/test_task_resolved_plugin/collections/ansible_collections/test_ns/test_coll/plugins/action/collection_action.py
@@ -0,0 +1,14 @@
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.plugins.action import ActionBase
+
+
+class ActionModule(ActionBase):
+ TRANSFERS_FILES = False
+ _VALID_ARGS = frozenset()
+
+ def run(self, tmp=None, task_vars=None):
+ return {'changed': False}
diff --git a/test/integration/targets/collections/test_task_resolved_plugin/collections/ansible_collections/test_ns/test_coll/plugins/modules/collection_module.py b/test/integration/targets/collections/test_task_resolved_plugin/collections/ansible_collections/test_ns/test_coll/plugins/modules/collection_module.py
new file mode 100644
index 00000000..8f312263
--- /dev/null
+++ b/test/integration/targets/collections/test_task_resolved_plugin/collections/ansible_collections/test_ns/test_coll/plugins/modules/collection_module.py
@@ -0,0 +1,29 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: collection_module
+short_description: A module to test a task's resolved action name.
+description: A module to test a task's resolved action name.
+options: {}
+author: Ansible Core Team
+notes:
+ - Supports C(check_mode).
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+
+
+def main():
+ module = AnsibleModule(supports_check_mode=True, argument_spec={})
+ module.exit_json(changed=False)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/integration/targets/collections/test_task_resolved_plugin/fqcn.yml b/test/integration/targets/collections/test_task_resolved_plugin/fqcn.yml
new file mode 100644
index 00000000..ab9e9259
--- /dev/null
+++ b/test/integration/targets/collections/test_task_resolved_plugin/fqcn.yml
@@ -0,0 +1,14 @@
+---
+- hosts: localhost
+ gather_facts: no
+ tasks:
+ - ansible.legacy.legacy_action:
+ - ansible.legacy.legacy_module:
+ - ansible.legacy.debug:
+ - ansible.legacy.ping:
+ - ansible.builtin.debug:
+ - ansible.builtin.ping:
+ - test_ns.test_coll.collection_action:
+ - test_ns.test_coll.collection_module:
+ - test_ns.test_coll.formerly_action:
+ - test_ns.test_coll.formerly_module:
diff --git a/test/integration/targets/collections/test_task_resolved_plugin/library/legacy_module.py b/test/integration/targets/collections/test_task_resolved_plugin/library/legacy_module.py
new file mode 100644
index 00000000..4fd75871
--- /dev/null
+++ b/test/integration/targets/collections/test_task_resolved_plugin/library/legacy_module.py
@@ -0,0 +1,29 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: legacy_module
+short_description: A module to test a task's resolved action name.
+description: A module to test a task's resolved action name.
+options: {}
+author: Ansible Core Team
+notes:
+ - Supports C(check_mode).
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+
+
+def main():
+ module = AnsibleModule(supports_check_mode=True, argument_spec={})
+ module.exit_json(changed=False)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/integration/targets/collections/test_task_resolved_plugin/unqualified.yml b/test/integration/targets/collections/test_task_resolved_plugin/unqualified.yml
new file mode 100644
index 00000000..076b8cc7
--- /dev/null
+++ b/test/integration/targets/collections/test_task_resolved_plugin/unqualified.yml
@@ -0,0 +1,8 @@
+---
+- hosts: localhost
+ gather_facts: no
+ tasks:
+ - legacy_action:
+ - legacy_module:
+ - debug:
+ - ping:
diff --git a/test/integration/targets/collections/test_task_resolved_plugin/unqualified_and_collections_kw.yml b/test/integration/targets/collections/test_task_resolved_plugin/unqualified_and_collections_kw.yml
new file mode 100644
index 00000000..5af4eda9
--- /dev/null
+++ b/test/integration/targets/collections/test_task_resolved_plugin/unqualified_and_collections_kw.yml
@@ -0,0 +1,14 @@
+---
+- hosts: localhost
+ gather_facts: no
+ collections:
+ - test_ns.test_coll
+ tasks:
+ - legacy_action:
+ - legacy_module:
+ - debug:
+ - ping:
+ - collection_action:
+ - collection_module:
+ - formerly_action:
+ - formerly_module:
diff --git a/test/integration/targets/collections/windows.yml b/test/integration/targets/collections/windows.yml
index 4bdfb0ed..cf98ca1e 100644
--- a/test/integration/targets/collections/windows.yml
+++ b/test/integration/targets/collections/windows.yml
@@ -12,6 +12,9 @@
- testns.testcoll.win_uses_coll_csmu:
register: uses_coll_csmu
+ - testns.testcoll.win_uses_optional:
+ register: uses_coll_optional
+
- assert:
that:
- selfcontained_out.source == 'user'
@@ -26,3 +29,6 @@
- "'Hello from subpkg.subcs' in uses_coll_csmu.ping"
- uses_coll_csmu.subpkg == 'Hello from subpkg.subcs'
- uses_coll_csmu.type_accelerator == uses_coll_csmu.ping
+ # win_uses_optional
+ - uses_coll_optional.data == "called from optional user_mu"
+ - uses_coll_optional.csharp == "Hello from user_mu collection-hosted MyCSMUOptional, also Hello from nested user-collection-hosted AnotherCSMU and Hello from subpkg.subcs"
diff --git a/test/integration/targets/collections_plugin_namespace/aliases b/test/integration/targets/collections_plugin_namespace/aliases
index a6dafcf8..13e01f0c 100644
--- a/test/integration/targets/collections_plugin_namespace/aliases
+++ b/test/integration/targets/collections_plugin_namespace/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/PSRel4.psm1 b/test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/PSRel4.psm1
new file mode 100644
index 00000000..bcb5ec19
--- /dev/null
+++ b/test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/PSRel4.psm1
@@ -0,0 +1,12 @@
+#AnsibleRequires -CSharpUtil .sub_pkg.CSRel5 -Optional
+#AnsibleRequires -PowerShell .sub_pkg.PSRelInvalid -Optional
+
+Function Invoke-FromPSRel4 {
+ <#
+ .SYNOPSIS
+ Test function
+ #>
+ return "Invoke-FromPSRel4"
+}
+
+Export-ModuleMember -Function Invoke-FromPSRel4
diff --git a/test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/modules/win_relative_optional.ps1 b/test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/modules/win_relative_optional.ps1
new file mode 100644
index 00000000..9086ca42
--- /dev/null
+++ b/test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/modules/win_relative_optional.ps1
@@ -0,0 +1,17 @@
+#!powershell
+
+#AnsibleRequires -CSharpUtil Ansible.Basic -Optional
+#AnsibleRequires -PowerShell ..module_utils.PSRel4 -optional
+
+# These do not exist
+#AnsibleRequires -CSharpUtil ..invalid_package.name -Optional
+#AnsibleRequires -CSharpUtil ..module_utils.InvalidName -optional
+#AnsibleRequires -PowerShell ..invalid_package.pwsh_name -optional
+#AnsibleRequires -PowerShell ..module_utils.InvalidPwshName -Optional
+
+
+$module = [Ansible.Basic.AnsibleModule]::Create($args, @{})
+
+$module.Result.data = Invoke-FromPSRel4
+
+$module.ExitJson()
diff --git a/test/integration/targets/collections_relative_imports/windows.yml b/test/integration/targets/collections_relative_imports/windows.yml
index aa6badfa..3a3c5488 100644
--- a/test/integration/targets/collections_relative_imports/windows.yml
+++ b/test/integration/targets/collections_relative_imports/windows.yml
@@ -9,3 +9,12 @@
assert:
that:
- win_relative.data == 'CSRel4.Invoke() -> Invoke-FromPSRel3 -> Invoke-FromPSRel2 -> Invoke-FromPSRel1'
+
+ - name: test out relative imports on Windows modules with optional import
+ my_ns.my_col.win_relative_optional:
+ register: win_relative_optional
+
+ - name: assert relative imports on Windows modules with optional import
+ assert:
+ that:
+ - win_relative_optional.data == 'Invoke-FromPSRel4'
diff --git a/test/integration/targets/collections_runtime_pythonpath/aliases b/test/integration/targets/collections_runtime_pythonpath/aliases
index 0a772ad7..498fedd5 100644
--- a/test/integration/targets/collections_runtime_pythonpath/aliases
+++ b/test/integration/targets/collections_runtime_pythonpath/aliases
@@ -1,3 +1,2 @@
shippable/posix/group4
-skip/python2.6
-skip/aix
+context/controller
diff --git a/test/integration/targets/collections_runtime_pythonpath/runme.sh b/test/integration/targets/collections_runtime_pythonpath/runme.sh
index 654104a1..38c6c64f 100755
--- a/test/integration/targets/collections_runtime_pythonpath/runme.sh
+++ b/test/integration/targets/collections_runtime_pythonpath/runme.sh
@@ -25,19 +25,19 @@ ansible \
=== Test that the module \
gets picked up if installed \
into site-packages ===
-python -m pip.__main__ install pep517
+python -m pip install pep517
( # Build a binary Python dist (a wheel) using PEP517:
cp -r ansible-collection-python-dist-boo "${OUTPUT_DIR}/"
cd "${OUTPUT_DIR}/ansible-collection-python-dist-boo"
python -m pep517.build --binary --out-dir dist .
)
# Install a pre-built dist with pip:
-python -m pip.__main__ install \
+python -m pip install \
--no-index \
-f "${OUTPUT_DIR}/ansible-collection-python-dist-boo/dist/" \
--only-binary=ansible-collections.python.dist \
ansible-collections.python.dist
-python -m pip.__main__ show ansible-collections.python.dist
+python -m pip show ansible-collections.python.dist
ansible \
-m python.dist.boo \
-a 'name=Frodo' \
diff --git a/test/integration/targets/command_nonexisting/aliases b/test/integration/targets/command_nonexisting/aliases
index e2dcf795..90ea9e12 100644
--- a/test/integration/targets/command_nonexisting/aliases
+++ b/test/integration/targets/command_nonexisting/aliases
@@ -1 +1,2 @@
-shippable/posix/group2 \ No newline at end of file
+shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/command_shell/tasks/main.yml b/test/integration/targets/command_shell/tasks/main.yml
index 653b0059..aad63c0d 100644
--- a/test/integration/targets/command_shell/tasks/main.yml
+++ b/test/integration/targets/command_shell/tasks/main.yml
@@ -504,11 +504,11 @@
when: ansible_facts.python_version is version('3', '>=')
- name: run command with strip
- command: '{{ ansible_playbook_python}} -c "import sys; msg=''hello \n \r''; print(msg); {{ print_error_command }}"'
+ command: '{{ ansible_python_interpreter }} -c "import sys; msg=''hello \n \r''; print(msg); {{ print_error_command }}"'
register: command_strip
- name: run command without strip
- command: '{{ ansible_playbook_python}} -c "import sys; msg=''hello \n \r''; print(msg); {{ print_error_command }}"'
+ command: '{{ ansible_python_interpreter }} -c "import sys; msg=''hello \n \r''; print(msg); {{ print_error_command }}"'
args:
strip_empty_ends: no
register: command_no_strip
diff --git a/test/integration/targets/common_network/aliases b/test/integration/targets/common_network/aliases
index 70a7b7a9..1d28bdb2 100644
--- a/test/integration/targets/common_network/aliases
+++ b/test/integration/targets/common_network/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/conditionals/aliases b/test/integration/targets/conditionals/aliases
index a6dafcf8..13e01f0c 100644
--- a/test/integration/targets/conditionals/aliases
+++ b/test/integration/targets/conditionals/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/conditionals/play.yml b/test/integration/targets/conditionals/play.yml
index c6bb3815..455818c9 100644
--- a/test/integration/targets/conditionals/play.yml
+++ b/test/integration/targets/conditionals/play.yml
@@ -6,10 +6,6 @@
vars_files:
- vars/main.yml
tasks:
- - name: set conditional bare vars status
- set_fact:
- bare: "{{lookup('config', 'CONDITIONAL_BARE_VARS')|bool}}"
-
- name: test conditional '=='
shell: echo 'testing'
when: 1 == 1
@@ -164,6 +160,136 @@
- "result.stdout == 'testing'"
- "result.rc == 0"
+ - name: not test bare conditional
+ shell: echo 'testing'
+ when: not test_bare
+ register: result
+
+ - name: assert did not run
+ assert:
+ that:
+ - result is skipped
+
+ - name: empty string is false
+ shell: echo 'testing'
+ when: string_lit_empty
+ register: result
+
+ - name: assert did not run
+ assert:
+ that:
+ - result is skipped
+
+ - name: not empty string is true
+ shell: echo 'testing'
+ when: not string_lit_empty
+ register: result
+
+ - name: assert ran
+ assert:
+ that:
+ - result is not skipped
+
+ - name: literal 0 is false
+ shell: echo 'testing'
+ when: int_lit_0
+ register: result
+
+ - name: assert did not run
+ assert:
+ that:
+ - result is skipped
+
+ - name: not literal 0 is true
+ shell: echo 'testing'
+ when: not int_lit_0
+ register: result
+
+ - name: assert ran
+ assert:
+ that:
+ - result is not skipped
+
+ - name: literal 1 is true
+ shell: echo 'testing'
+ when: int_lit_1
+ register: result
+
+ - name: assert ran
+ assert:
+ that:
+ - result is not skipped
+
+ - name: not literal 1 is false
+ shell: echo 'testing'
+ when: not int_lit_1
+ register: result
+
+ - name: assert did not run
+ assert:
+ that:
+ - result is skipped
+
+ - name: null is false
+ shell: echo 'testing'
+ when: lit_null
+ register: result
+
+ - name: assert did not run
+ assert:
+ that:
+ - result is skipped
+
+ - name: literal string "true" is true
+ shell: echo 'testing'
+ when: string_lit_true
+ register: result
+
+ - name: assert ran
+ assert:
+ that:
+ - result is not skipped
+
+ - name: not literal string "true" is false
+ shell: echo 'testing'
+ when: not string_lit_true
+ register: result
+
+ - name: assert did not run
+ assert:
+ that:
+ - result is skipped
+
+ - name: literal string "false" is true (nonempty string)
+ shell: echo 'testing'
+ when: string_lit_false
+ register: result
+
+ - name: assert ran
+ assert:
+ that:
+ - result is not skipped
+
+ - name: not literal string "false" is false
+ shell: echo 'testing'
+ when: not string_lit_false
+ register: result
+
+ - name: assert did not run
+ assert:
+ that:
+ - result is skipped
+
+ - name: not literal string "true" is false
+ shell: echo 'testing'
+ when: not string_lit_true
+ register: result
+
+ - name: assert did not run
+ assert:
+ that:
+ - result is skipped
+
- name: test conditional using a variable
shell: echo 'testing'
when: test_bare_var == 123
@@ -195,23 +321,13 @@
- debug: var={{item}}
loop:
- - bare
- result
- test_bare_nested_bad
- - name: assert that the bad nested conditional is skipped since 'bare' since 'string' template is resolved to 'false'
+ - name: assert that the bad nested conditional ran (it is a non-empty string, so truthy)
assert:
that:
- - result is skipped
-
- when: bare|bool
-
- - name: assert that the bad nested conditional did run since non bare 'string' is untemplated but 'trueish'
- assert:
- that:
- - result is skipped
- when: not bare|bool
- - result is changed
+ - result is not skipped
- name: test bad conditional based on nested variables with bool filter
shell: echo 'testing'
@@ -223,6 +339,7 @@
that:
- result is skipped
+
#-----------------------------------------------------------------------
# proper booleanification tests (issue #8629)
@@ -382,7 +499,6 @@
- name: Deal with multivar equality
tags: ['leveldiff']
- when: not bare|bool
vars:
toplevel_hash:
hash_var_one: justastring
diff --git a/test/integration/targets/conditionals/runme.sh b/test/integration/targets/conditionals/runme.sh
index 934443a5..4858fbf2 100755
--- a/test/integration/targets/conditionals/runme.sh
+++ b/test/integration/targets/conditionals/runme.sh
@@ -2,14 +2,4 @@
set -eux
-ANSIBLE_CONDITIONAL_BARE_VARS=1 ansible-playbook -i ../../inventory play.yml "$@"
-ANSIBLE_CONDITIONAL_BARE_VARS=0 ansible-playbook -i ../../inventory play.yml "$@"
-
-export ANSIBLE_CONDITIONAL_BARE_VARS=1
-export ANSIBLE_DEPRECATION_WARNINGS=True
-
-# No warnings for conditionals that are already type bool
-test "$(ansible-playbook -i ../../inventory test_no_warnings.yml "$@" 2>&1 | grep -c '\[DEPRECATION WARNING\]')" = 0
-
-# Warn for bare vars of other types since they may be interpreted differently when CONDITIONAL_BARE_VARS defaults to False
-test "$(ansible-playbook -i ../../inventory test_warnings.yml "$@" 2>&1 | grep -c '\[DEPRECATION WARNING\]')" = 2
+ansible-playbook -i ../../inventory play.yml "$@"
diff --git a/test/integration/targets/conditionals/test_no_warnings.yml b/test/integration/targets/conditionals/test_no_warnings.yml
deleted file mode 100644
index 93280447..00000000
--- a/test/integration/targets/conditionals/test_no_warnings.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-- hosts: testhost
- gather_facts: false
- vars:
- boolean_var: false
- nested:
- bool_var: false
- tasks:
- - name: Run tasks with previous warnings requesting the bool filter on type boolean vars
- block:
- - debug:
- when: boolean_var
- - debug:
- when: nested.bool_var
- - debug:
- when: double_interpolated
- vars:
- double_interpolated: "{{ other }}"
- other: false
diff --git a/test/integration/targets/conditionals/test_warnings.yml b/test/integration/targets/conditionals/test_warnings.yml
deleted file mode 100644
index 4186cd01..00000000
--- a/test/integration/targets/conditionals/test_warnings.yml
+++ /dev/null
@@ -1,14 +0,0 @@
-- hosts: testhost
- gather_facts: false
- vars:
- str_boolean_var: 'false'
- tasks:
- - name: Run tasks with warnings for conditionals that will change in behavior depending on CONDITIONAL_BARE_VARS
- block:
- - debug:
- when: str_boolean_var
- - debug:
- when: double_interpolated
- vars:
- double_interpolated: other
- other: false
diff --git a/test/integration/targets/conditionals/vars/main.yml b/test/integration/targets/conditionals/vars/main.yml
index d6221478..2af6cee2 100644
--- a/test/integration/targets/conditionals/vars/main.yml
+++ b/test/integration/targets/conditionals/vars/main.yml
@@ -20,3 +20,10 @@ test_bare: true
test_bare_var: 123
test_bare_nested_good: "test_bare_var == 123"
test_bare_nested_bad: "{{test_bare_var}} == 321"
+
+string_lit_true: "true"
+string_lit_false: "false"
+string_lit_empty: ""
+lit_null: null
+int_lit_0: 0
+int_lit_1: 1
diff --git a/test/integration/targets/config/aliases b/test/integration/targets/config/aliases
index a6dafcf8..13e01f0c 100644
--- a/test/integration/targets/config/aliases
+++ b/test/integration/targets/config/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/config/files/types.env b/test/integration/targets/config/files/types.env
new file mode 100644
index 00000000..b5fc43ee
--- /dev/null
+++ b/test/integration/targets/config/files/types.env
@@ -0,0 +1,11 @@
+# valid(list): does nothihng, just for testing values
+ANSIBLE_TYPES_VALID=
+
+# mustunquote(list): does nothihng, just for testing values
+ANSIBLE_TYPES_MUSTUNQUOTE=
+
+# notvalid(list): does nothihng, just for testing values
+ANSIBLE_TYPES_NOTVALID=
+
+# totallynotvalid(list): does nothihng, just for testing values
+ANSIBLE_TYPES_TOTALLYNOTVALID=
diff --git a/test/integration/targets/config/files/types.ini b/test/integration/targets/config/files/types.ini
new file mode 100644
index 00000000..c04b6d5a
--- /dev/null
+++ b/test/integration/targets/config/files/types.ini
@@ -0,0 +1,13 @@
+[list_values]
+# (list) does nothihng, just for testing values
+mustunquote=
+
+# (list) does nothihng, just for testing values
+notvalid=
+
+# (list) does nothihng, just for testing values
+totallynotvalid=
+
+# (list) does nothihng, just for testing values
+valid=
+
diff --git a/test/integration/targets/config/files/types.vars b/test/integration/targets/config/files/types.vars
new file mode 100644
index 00000000..d1427fc8
--- /dev/null
+++ b/test/integration/targets/config/files/types.vars
@@ -0,0 +1,15 @@
+# valid(list): does nothihng, just for testing values
+ansible_types_valid: ''
+
+
+# mustunquote(list): does nothihng, just for testing values
+ansible_types_mustunquote: ''
+
+
+# notvalid(list): does nothihng, just for testing values
+ansible_types_notvalid: ''
+
+
+# totallynotvalid(list): does nothihng, just for testing values
+ansible_types_totallynotvalid: ''
+
diff --git a/test/integration/targets/config/files/types_dump.txt b/test/integration/targets/config/files/types_dump.txt
new file mode 100644
index 00000000..2139f4d1
--- /dev/null
+++ b/test/integration/targets/config/files/types_dump.txt
@@ -0,0 +1,8 @@
+
+types:
+_____
+_terms(default) = None
+mustunquote(default) = None
+notvalid(default) = None
+totallynotvalid(default) = None
+valid(default) = None
diff --git a/test/integration/targets/config/lookup_plugins/types.py b/test/integration/targets/config/lookup_plugins/types.py
new file mode 100644
index 00000000..d3092296
--- /dev/null
+++ b/test/integration/targets/config/lookup_plugins/types.py
@@ -0,0 +1,82 @@
+# (c) 2021 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = """
+ name: types
+ author: Ansible Core Team
+ version_added: histerical
+ short_description: returns what you gave it
+ description:
+ - this is mostly a noop
+ options:
+ _terms:
+ description: stuff to pass through
+ valid:
+ description: does nothihng, just for testing values
+ type: list
+ ini:
+ - section: list_values
+ key: valid
+ env:
+ - name: ANSIBLE_TYPES_VALID
+ vars:
+ - name: ansible_types_valid
+ mustunquote:
+ description: does nothihng, just for testing values
+ type: list
+ ini:
+ - section: list_values
+ key: mustunquote
+ env:
+ - name: ANSIBLE_TYPES_MUSTUNQUOTE
+ vars:
+ - name: ansible_types_mustunquote
+ notvalid:
+ description: does nothihng, just for testing values
+ type: list
+ ini:
+ - section: list_values
+ key: notvalid
+ env:
+ - name: ANSIBLE_TYPES_NOTVALID
+ vars:
+ - name: ansible_types_notvalid
+ totallynotvalid:
+ description: does nothihng, just for testing values
+ type: list
+ ini:
+ - section: list_values
+ key: totallynotvalid
+ env:
+ - name: ANSIBLE_TYPES_TOTALLYNOTVALID
+ vars:
+ - name: ansible_types_totallynotvalid
+"""
+
+EXAMPLES = """
+- name: like some other plugins, this is mostly useless
+ debug: msg={{ q('types', [1,2,3])}}
+"""
+
+RETURN = """
+ _list:
+ description: basically the same as you fed in
+ type: list
+ elements: raw
+"""
+
+from ansible.plugins.lookup import LookupBase
+
+
+class LookupModule(LookupBase):
+
+ def run(self, terms, variables=None, **kwargs):
+
+ self.set_options(var_options=variables, direct=kwargs)
+
+ return terms
diff --git a/test/integration/targets/config/runme.sh b/test/integration/targets/config/runme.sh
index bbff6acc..76df44c4 100755
--- a/test/integration/targets/config/runme.sh
+++ b/test/integration/targets/config/runme.sh
@@ -21,3 +21,19 @@ ANSIBLE_CONFIG=inline_comment_ansible.cfg ansible-config dump --only-changed | g
# test the config option validation
ansible-playbook validation.yml "$@"
+
+# test types from config (just lists for now)
+ANSIBLE_CONFIG=type_munging.cfg ansible-playbook types.yml "$@"
+
+cleanup() {
+ rm -f files/*.new.*
+}
+
+trap 'cleanup' EXIT
+
+# check a-c init per format
+for format in "vars" "ini" "env"
+do
+ ANSIBLE_LOOKUP_PLUGINS=./ ansible-config init types -t lookup -f "${format}" > "files/types.new.${format}"
+ diff -u "files/types.${format}" "files/types.new.${format}"
+done
diff --git a/test/integration/targets/config/type_munging.cfg b/test/integration/targets/config/type_munging.cfg
new file mode 100644
index 00000000..d6aeaab6
--- /dev/null
+++ b/test/integration/targets/config/type_munging.cfg
@@ -0,0 +1,8 @@
+[defaults]
+nothing = here
+
+[list_values]
+valid = 1, 2, 3
+mustunquote = '1', '2', '3'
+notvalid = [1, 2, 3]
+totallynotvalid = ['1', '2', '3']
diff --git a/test/integration/targets/config/types.yml b/test/integration/targets/config/types.yml
new file mode 100644
index 00000000..650a96f6
--- /dev/null
+++ b/test/integration/targets/config/types.yml
@@ -0,0 +1,25 @@
+- hosts: localhost
+ gather_facts: false
+ tasks:
+ - name: ensures we got the list we expected
+ block:
+ - name: initialize plugin
+ debug: msg={{ lookup('types', 'starting test') }}
+
+ - set_fact:
+ valid: '{{ lookup("config", "valid", plugin_type="lookup", plugin_name="types") }}'
+ mustunquote: '{{ lookup("config", "mustunquote", plugin_type="lookup", plugin_name="types") }}'
+ notvalid: '{{ lookup("config", "notvalid", plugin_type="lookup", plugin_name="types") }}'
+ totallynotvalid: '{{ lookup("config", "totallynotvalid", plugin_type="lookup", plugin_name="types") }}'
+
+ - assert:
+ that:
+ - 'valid|type_debug == "list"'
+ - 'mustunquote|type_debug == "list"'
+ - 'notvalid|type_debug == "list"'
+ - 'totallynotvalid|type_debug == "list"'
+ - valid[0]|int == 1
+ - mustunquote[0]|int == 1
+ - "notvalid[0] == '[1'"
+ # using 'and true' to avoid quote hell
+ - totallynotvalid[0] == "['1'" and True
diff --git a/test/integration/targets/connection_delegation/aliases b/test/integration/targets/connection_delegation/aliases
index 87caabdf..44e49e4f 100644
--- a/test/integration/targets/connection_delegation/aliases
+++ b/test/integration/targets/connection_delegation/aliases
@@ -1,4 +1,5 @@
shippable/posix/group1
+context/controller
skip/freebsd # No sshpass
skip/osx # No sshpass
skip/macos # No sshpass
diff --git a/test/integration/targets/connection_delegation/runme.sh b/test/integration/targets/connection_delegation/runme.sh
index eb26f7c5..4d507243 100755
--- a/test/integration/targets/connection_delegation/runme.sh
+++ b/test/integration/targets/connection_delegation/runme.sh
@@ -3,7 +3,7 @@
set -ux
echo "Checking if sshpass is present"
-which sshpass 2>&1 || exit 0
+command -v sshpass 2>&1 || exit 0
echo "sshpass is present, continuing with test"
sshpass -p my_password ansible-playbook -i inventory.ini test.yml -k "$@"
diff --git a/test/integration/targets/connection_paramiko_ssh/aliases b/test/integration/targets/connection_paramiko_ssh/aliases
index ad44392e..fd5b08a4 100644
--- a/test/integration/targets/connection_paramiko_ssh/aliases
+++ b/test/integration/targets/connection_paramiko_ssh/aliases
@@ -2,4 +2,3 @@ needs/ssh
shippable/posix/group3
needs/target/setup_paramiko
destructive # potentially installs/uninstalls OS packages via setup_paramiko
-skip/aix
diff --git a/test/integration/targets/connection_ssh/aliases b/test/integration/targets/connection_ssh/aliases
index 1d822b45..50fb8eb8 100644
--- a/test/integration/targets/connection_ssh/aliases
+++ b/test/integration/targets/connection_ssh/aliases
@@ -1,3 +1,2 @@
needs/ssh
shippable/posix/group1
-skip/aix
diff --git a/test/integration/targets/connection_ssh/runme.sh b/test/integration/targets/connection_ssh/runme.sh
index 7e5953ed..cbadf1d5 100755
--- a/test/integration/targets/connection_ssh/runme.sh
+++ b/test/integration/targets/connection_ssh/runme.sh
@@ -68,3 +68,6 @@ ANSIBLE_SSH_TRANSFER_METHOD=piped ./posix.sh "$@"
# test config defaults override
ansible-playbook check_ssh_defaults.yml "$@" -i test_connection.inventory
+
+# ensure we can load from ini cfg
+ANSIBLE_CONFIG=./test_ssh_defaults.cfg ansible-playbook verify_config.yml "$@"
diff --git a/test/integration/targets/connection_ssh/test_ssh_defaults.cfg b/test/integration/targets/connection_ssh/test_ssh_defaults.cfg
new file mode 100644
index 00000000..362f9460
--- /dev/null
+++ b/test/integration/targets/connection_ssh/test_ssh_defaults.cfg
@@ -0,0 +1,5 @@
+[ssh_connection]
+ssh_common_args=fromconfig
+ssh_extra_args=fromconfig
+scp_extra_args=fromconfig
+sftp_extra_args=fromconfig
diff --git a/test/integration/targets/connection_ssh/verify_config.yml b/test/integration/targets/connection_ssh/verify_config.yml
new file mode 100644
index 00000000..0bf79586
--- /dev/null
+++ b/test/integration/targets/connection_ssh/verify_config.yml
@@ -0,0 +1,21 @@
+- hosts: localhost
+ gather_facts: false
+ vars:
+ ssh_configs:
+ - ssh_common_args
+ - ssh_extra_args
+ - sftp_extra_args
+ - scp_extra_args
+ tasks:
+ - debug:
+ msg: '{{item ~ ": " ~ lookup("config", item, plugin_type="connection", plugin_name="ssh")}}'
+ verbosity: 3
+ loop: '{{ssh_configs}}'
+ tags: [ configfile ]
+
+ - name: check config from file
+ assert:
+ that:
+ - 'lookup("config", item, plugin_type="connection", plugin_name="ssh") == "fromconfig"'
+ loop: '{{ssh_configs}}'
+ tags: [ configfile ]
diff --git a/test/integration/targets/controller/aliases b/test/integration/targets/controller/aliases
new file mode 100644
index 00000000..0ac86c92
--- /dev/null
+++ b/test/integration/targets/controller/aliases
@@ -0,0 +1,2 @@
+context/controller
+shippable/posix/group1
diff --git a/test/integration/targets/controller/tasks/main.yml b/test/integration/targets/controller/tasks/main.yml
new file mode 100644
index 00000000..354a593e
--- /dev/null
+++ b/test/integration/targets/controller/tasks/main.yml
@@ -0,0 +1,9 @@
+- name: Verify testhost is control host
+ stat:
+ path: "{{ output_dir }}"
+- name: Get control host details
+ setup:
+ register: control_host
+- name: Show control host details
+ debug:
+ msg: "{{ control_host.ansible_facts.ansible_distribution }} {{ control_host.ansible_facts.ansible_distribution_version }}"
diff --git a/test/integration/targets/copy/tasks/main.yml b/test/integration/targets/copy/tasks/main.yml
index e02c0232..bef182b8 100644
--- a/test/integration/targets/copy/tasks/main.yml
+++ b/test/integration/targets/copy/tasks/main.yml
@@ -27,6 +27,7 @@
chdir: '{{role_path}}/files/subdir/subdir1'
warn: no
with_dict: "{{ symlinks }}"
+ delegate_to: localhost
- name: Create remote unprivileged remote user
user:
@@ -78,6 +79,7 @@
when: ansible_os_family == 'RedHat' and ansible_selinux.get('mode') == 'enforcing'
- import_tasks: no_log.yml
+ delegate_to: localhost
- import_tasks: check_mode.yml
@@ -113,6 +115,7 @@
name: '{{ remote_unprivileged_user }}'
state: absent
remove: yes
+ force: yes
- name: Remove sudoers.d file
file:
diff --git a/test/integration/targets/copy/tasks/tests.yml b/test/integration/targets/copy/tasks/tests.yml
index be955317..fa4254c7 100644
--- a/test/integration/targets/copy/tasks/tests.yml
+++ b/test/integration/targets/copy/tasks/tests.yml
@@ -1489,13 +1489,13 @@
# src is a file, dest is a non-existent directory (2 levels of directories):
# using remote_src
# checks that dest is created
-- include: dest_in_non_existent_directories_remote_src.yml
+- include_tasks: file=dest_in_non_existent_directories_remote_src.yml
with_items:
- { src: 'foo.txt', dest: 'new_sub_dir1/sub_dir2/', check: 'new_sub_dir1/sub_dir2/foo.txt' }
# src is a file, dest is file in a non-existent directory: checks that a failure occurs
# using remote_src
-- include: src_file_dest_file_in_non_existent_dir_remote_src.yml
+- include_tasks: file=src_file_dest_file_in_non_existent_dir_remote_src.yml
with_items:
- 'new_sub_dir1/sub_dir2/foo.txt'
- 'new_sub_dir1/foo.txt'
@@ -1504,7 +1504,7 @@
# src is a file, dest is a non-existent directory (2 levels of directories):
# checks that dest is created
-- include: dest_in_non_existent_directories.yml
+- include_tasks: file=dest_in_non_existent_directories.yml
with_items:
- { src: 'foo.txt', dest: 'new_sub_dir1/sub_dir2/', check: 'new_sub_dir1/sub_dir2/foo.txt' }
- { src: 'subdir', dest: 'new_sub_dir1/sub_dir2/', check: 'new_sub_dir1/sub_dir2/subdir/bar.txt' }
@@ -1513,7 +1513,7 @@
- { src: 'subdir/', dest: 'new_sub_dir1/sub_dir2', check: 'new_sub_dir1/sub_dir2/bar.txt' }
# src is a file, dest is file in a non-existent directory: checks that a failure occurs
-- include: src_file_dest_file_in_non_existent_dir.yml
+- include_tasks: file=src_file_dest_file_in_non_existent_dir.yml
with_items:
- 'new_sub_dir1/sub_dir2/foo.txt'
- 'new_sub_dir1/foo.txt'
diff --git a/test/integration/targets/cron/tasks/main.yml b/test/integration/targets/cron/tasks/main.yml
index 899ec549..32e345d3 100644
--- a/test/integration/targets/cron/tasks/main.yml
+++ b/test/integration/targets/cron/tasks/main.yml
@@ -110,8 +110,9 @@
- assert:
that: remove_task_idempotence is not changed
-- name: Check that removing a cron task with cron_file and without specifying an user is allowed (#58493)
+- name: Check that removing a cron task with cron_file and without specifying a user is allowed (#58493)
cron:
+ name: test cron task
cron_file: unexistent_cron_file
state: absent
register: remove_cron_file
@@ -214,7 +215,24 @@
- assert:
that: not cron_file_stats.stat.exists
+- name: System cron tab can not be managed
+ when: ansible_distribution != 'Alpine'
+ block:
+ - name: Add cron job
+ cron:
+ cron_file: "{{ system_crontab }}"
+ user: root
+ name: "integration test cron"
+ job: 'ls'
+ ignore_errors: yes
+ register: result
+
+ - assert:
+ that: "result.msg == 'Will not manage /etc/crontab via cron_file, see documentation.'"
+
+# TODO: restrict other root crontab locations
- name: System cron tab does not get removed
+ when: ansible_distribution == 'Alpine'
block:
- name: Add cron job
cron:
diff --git a/test/integration/targets/dataloader/aliases b/test/integration/targets/dataloader/aliases
index a6dafcf8..13e01f0c 100644
--- a/test/integration/targets/dataloader/aliases
+++ b/test/integration/targets/dataloader/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/debug/aliases b/test/integration/targets/debug/aliases
index a6dafcf8..97c468e5 100644
--- a/test/integration/targets/debug/aliases
+++ b/test/integration/targets/debug/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller # this is a controller-only action, the module is just for documentation
diff --git a/test/integration/targets/delegate_to/aliases b/test/integration/targets/delegate_to/aliases
index b8e973da..d6bb651c 100644
--- a/test/integration/targets/delegate_to/aliases
+++ b/test/integration/targets/delegate_to/aliases
@@ -1,4 +1,4 @@
shippable/posix/group3
needs/ssh
needs/root # only on macOS and FreeBSD to configure network interfaces
-skip/aix
+context/controller
diff --git a/test/integration/targets/dict_transformations/aliases b/test/integration/targets/dict_transformations/aliases
index a6dafcf8..13e01f0c 100644
--- a/test/integration/targets/dict_transformations/aliases
+++ b/test/integration/targets/dict_transformations/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/dnf/tasks/cacheonly.yml b/test/integration/targets/dnf/tasks/cacheonly.yml
new file mode 100644
index 00000000..a5c84a37
--- /dev/null
+++ b/test/integration/targets/dnf/tasks/cacheonly.yml
@@ -0,0 +1,15 @@
+---
+- name: Test cacheonly (clean before testing)
+ command: dnf clean all
+
+- name: Try installing from cache where it has been cleaned
+ dnf:
+ name: sos
+ state: latest
+ cacheonly: true
+ register: dnf_result
+
+- name: Verify dnf has not changed
+ assert:
+ that:
+ - "not dnf_result is changed"
diff --git a/test/integration/targets/dnf/tasks/dnf.yml b/test/integration/targets/dnf/tasks/dnf.yml
index 7e6a8d8f..bf1ea848 100644
--- a/test/integration/targets/dnf/tasks/dnf.yml
+++ b/test/integration/targets/dnf/tasks/dnf.yml
@@ -700,7 +700,7 @@
content: |
[main]
exclude=lsof*
- dest: '{{ output_dir }}/test-dnf.conf'
+ dest: '{{ remote_tmp_dir }}/test-dnf.conf'
register: test_dnf_copy
- block:
@@ -728,7 +728,7 @@
always:
- name: remove exclude lsof conf file
file:
- path: '{{ output_dir }}/test-dnf.conf'
+ path: '{{ remote_tmp_dir }}/test-dnf.conf'
state: absent
# end test case where disable_excludes is supported
@@ -816,3 +816,21 @@
that:
- nonexisting is success
- nonexisting.msg == 'Nothing to do'
+
+# running on RHEL which is --remote where .mo language files are present
+# for dnf as opposed to in --docker
+- when: ansible_distribution == 'RedHat'
+ block:
+ - dnf:
+ name: langpacks-ja
+ state: present
+
+ - dnf:
+ name: nginx-mod*
+ state: absent
+ environment:
+ LANG: ja_JP.UTF-8
+ always:
+ - dnf:
+ name: langpacks-ja
+ state: absent
diff --git a/test/integration/targets/dnf/tasks/main.yml b/test/integration/targets/dnf/tasks/main.yml
index 51ab7d20..d66a0653 100644
--- a/test/integration/targets/dnf/tasks/main.yml
+++ b/test/integration/targets/dnf/tasks/main.yml
@@ -63,6 +63,15 @@
# TODO: Construct our own instance where 'nobest' applies, so we can stop using
# a third-party repo to test this behavior.
+#
+# This fails due to conflicts on Fedora 34, but we can nuke this entirely once
+# #74224 lands, because it covers nobest cases.
- include_tasks: nobest.yml
- when: (ansible_distribution == 'Fedora' and ansible_distribution_major_version is version('24', '>=')) or
+ when: (ansible_distribution == 'Fedora' and ansible_distribution_major_version is version('24', '>=') and
+ ansible_distribution_major_version is version('34', '!=')) or
+ (ansible_distribution in ['RedHat', 'CentOS'] and ansible_distribution_major_version is version('8', '>='))
+
+
+- include_tasks: cacheonly.yml
+ when: (ansible_distribution == 'Fedora' and ansible_distribution_major_version is version('23', '>=')) or
(ansible_distribution in ['RedHat', 'CentOS'] and ansible_distribution_major_version is version('8', '>='))
diff --git a/test/integration/targets/dnf/vars/Fedora-34.yml b/test/integration/targets/dnf/vars/Fedora-34.yml
new file mode 100644
index 00000000..859059f1
--- /dev/null
+++ b/test/integration/targets/dnf/vars/Fedora-34.yml
@@ -0,0 +1,2 @@
+astream_name: '@httpd:2.4/common'
+astream_name_no_stream: '@httpd/common'
diff --git a/test/integration/targets/dpkg_selections/tasks/main.yaml b/test/integration/targets/dpkg_selections/tasks/main.yaml
index 6abd1dec..abf9fa1b 100644
--- a/test/integration/targets/dpkg_selections/tasks/main.yaml
+++ b/test/integration/targets/dpkg_selections/tasks/main.yaml
@@ -1,3 +1,3 @@
---
- - include: 'dpkg_selections.yaml'
+ - include_tasks: file='dpkg_selections.yaml'
when: ansible_distribution in ('Ubuntu', 'Debian')
diff --git a/test/integration/targets/egg-info/aliases b/test/integration/targets/egg-info/aliases
index a6dafcf8..13e01f0c 100644
--- a/test/integration/targets/egg-info/aliases
+++ b/test/integration/targets/egg-info/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/embedded_module/aliases b/test/integration/targets/embedded_module/aliases
index 765b70da..6452e6d4 100644
--- a/test/integration/targets/embedded_module/aliases
+++ b/test/integration/targets/embedded_module/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/target
diff --git a/test/integration/targets/environment/aliases b/test/integration/targets/environment/aliases
index b5983214..a3ada117 100644
--- a/test/integration/targets/environment/aliases
+++ b/test/integration/targets/environment/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/target
diff --git a/test/integration/targets/error_from_connection/aliases b/test/integration/targets/error_from_connection/aliases
index 765b70da..90ea9e12 100644
--- a/test/integration/targets/error_from_connection/aliases
+++ b/test/integration/targets/error_from_connection/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/facts_d/aliases b/test/integration/targets/facts_d/aliases
index 765b70da..90ea9e12 100644
--- a/test/integration/targets/facts_d/aliases
+++ b/test/integration/targets/facts_d/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/facts_linux_network/aliases b/test/integration/targets/facts_linux_network/aliases
index 21a4e907..703c532e 100644
--- a/test/integration/targets/facts_linux_network/aliases
+++ b/test/integration/targets/facts_linux_network/aliases
@@ -3,3 +3,4 @@ shippable/posix/group2
skip/freebsd
skip/osx
skip/macos
+context/controller
diff --git a/test/integration/targets/failed_when/aliases b/test/integration/targets/failed_when/aliases
index 765b70da..90ea9e12 100644
--- a/test/integration/targets/failed_when/aliases
+++ b/test/integration/targets/failed_when/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/failed_when/tasks/main.yml b/test/integration/targets/failed_when/tasks/main.yml
index 3f8ae545..1b10bef1 100644
--- a/test/integration/targets/failed_when/tasks/main.yml
+++ b/test/integration/targets/failed_when/tasks/main.yml
@@ -66,3 +66,15 @@
that:
- "'failed' in result and not result.failed"
- "'failed_when_result' in result and not result.failed_when_result"
+
+- name: invalid conditional
+ command: echo foo
+ failed_when: boomboomboom
+ register: invalid_conditional
+ ignore_errors: true
+
+- assert:
+ that:
+ - invalid_conditional is failed
+ - invalid_conditional.stdout is defined
+ - invalid_conditional.failed_when_result is contains('boomboomboom')
diff --git a/test/integration/targets/fetch/aliases b/test/integration/targets/fetch/aliases
index fb5d6faa..ff56593d 100644
--- a/test/integration/targets/fetch/aliases
+++ b/test/integration/targets/fetch/aliases
@@ -1,2 +1,3 @@
shippable/posix/group2
needs/target/setup_remote_tmp_dir
+needs/ssh
diff --git a/test/integration/targets/fetch/cleanup.yml b/test/integration/targets/fetch/cleanup.yml
new file mode 100644
index 00000000..792b603c
--- /dev/null
+++ b/test/integration/targets/fetch/cleanup.yml
@@ -0,0 +1,16 @@
+- name: Cleanup user account
+ hosts: testhost
+
+ tasks:
+ - name: remove test user
+ user:
+ name: fetcher
+ state: absent
+ remove: yes
+ force: yes
+
+ - name: delete temporary directory
+ file:
+ path: "{{ remote_tmp_dir }}"
+ state: absent
+ no_log: yes
diff --git a/test/integration/targets/fetch/roles/fetch_tests/defaults/main.yml b/test/integration/targets/fetch/roles/fetch_tests/defaults/main.yml
new file mode 100644
index 00000000..f0b9cfc4
--- /dev/null
+++ b/test/integration/targets/fetch/roles/fetch_tests/defaults/main.yml
@@ -0,0 +1 @@
+skip_cleanup: no
diff --git a/test/integration/targets/fetch/roles/fetch_tests/handlers/main.yml b/test/integration/targets/fetch/roles/fetch_tests/handlers/main.yml
new file mode 100644
index 00000000..c6c296af
--- /dev/null
+++ b/test/integration/targets/fetch/roles/fetch_tests/handlers/main.yml
@@ -0,0 +1,8 @@
+- name: remove test user
+ user:
+ name: fetcher
+ state: absent
+ remove: yes
+ force: yes
+ become: yes
+ when: not skip_cleanup | bool
diff --git a/test/integration/targets/fetch/roles/fetch_tests/tasks/fail_on_missing.yml b/test/integration/targets/fetch/roles/fetch_tests/tasks/fail_on_missing.yml
new file mode 100644
index 00000000..d918aaeb
--- /dev/null
+++ b/test/integration/targets/fetch/roles/fetch_tests/tasks/fail_on_missing.yml
@@ -0,0 +1,53 @@
+- name: Attempt to fetch a non-existent file - do not fail on missing
+ fetch:
+ src: "{{ remote_tmp_dir }}/doesnotexist"
+ dest: "{{ output_dir }}/fetched"
+ fail_on_missing: no
+ register: fetch_missing_nofail
+
+- name: Attempt to fetch a non-existent file - fail on missing
+ fetch:
+ src: "{{ remote_tmp_dir }}/doesnotexist"
+ dest: "{{ output_dir }}/fetched"
+ fail_on_missing: yes
+ register: fetch_missing
+ ignore_errors: yes
+
+- name: Attempt to fetch a non-existent file - fail on missing implicit
+ fetch:
+ src: "{{ remote_tmp_dir }}/doesnotexist"
+ dest: "{{ output_dir }}/fetched"
+ register: fetch_missing_implicit
+ ignore_errors: yes
+
+- name: Attempt to fetch a directory - should not fail but return a message
+ fetch:
+ src: "{{ remote_tmp_dir }}"
+ dest: "{{ output_dir }}/somedir"
+ fail_on_missing: no
+ register: fetch_dir
+
+- name: Attempt to fetch a directory - should fail
+ fetch:
+ src: "{{ remote_tmp_dir }}"
+ dest: "{{ output_dir }}/somedir"
+ fail_on_missing: yes
+ register: failed_fetch_dir
+ ignore_errors: yes
+
+- name: Check fetch missing with failure with implicit fail
+ assert:
+ that:
+ - fetch_missing_nofail.msg is search('ignored')
+ - fetch_missing_nofail is not changed
+ - fetch_missing is failed
+ - fetch_missing is not changed
+ - fetch_missing.msg is search ('remote file does not exist')
+ - fetch_missing_implicit is failed
+ - fetch_missing_implicit is not changed
+ - fetch_missing_implicit.msg is search ('remote file does not exist')
+ - fetch_dir is not changed
+ - fetch_dir.msg is search('is a directory')
+ - failed_fetch_dir is failed
+ - failed_fetch_dir is not changed
+ - failed_fetch_dir.msg is search('is a directory')
diff --git a/test/integration/targets/fetch/roles/fetch_tests/tasks/failures.yml b/test/integration/targets/fetch/roles/fetch_tests/tasks/failures.yml
new file mode 100644
index 00000000..8a6b5b7b
--- /dev/null
+++ b/test/integration/targets/fetch/roles/fetch_tests/tasks/failures.yml
@@ -0,0 +1,41 @@
+- name: Fetch with no parameters
+ fetch:
+ register: fetch_no_params
+ ignore_errors: yes
+
+- name: Fetch with incorrect source type
+ fetch:
+ src: [1, 2]
+ dest: "{{ output_dir }}/fetched"
+ register: fetch_incorrect_src
+ ignore_errors: yes
+
+- name: Try to fetch a file inside an inaccessible directory
+ fetch:
+ src: "{{ remote_tmp_dir }}/noaccess/file1"
+ dest: "{{ output_dir }}"
+ register: failed_fetch_no_access
+ become: yes
+ become_user: fetcher
+ become_method: su
+ ignore_errors: yes
+
+- name: Dest is an existing directory name without trailing slash and flat=yes, should fail
+ fetch:
+ src: "{{ remote_tmp_dir }}/orig"
+ dest: "{{ output_dir }}"
+ flat: yes
+ register: failed_fetch_dest_dir
+ ignore_errors: true
+
+- name: Ensure fetch failed
+ assert:
+ that:
+ - fetch_no_params is failed
+ - fetch_no_params.msg is search('src and dest are required')
+ - fetch_incorrect_src is failed
+ - fetch_incorrect_src.msg is search('Invalid type supplied for source')
+ - failed_fetch_no_access is failed
+ - failed_fetch_no_access.msg is search('file is not readable')
+ - failed_fetch_dest_dir is failed
+ - failed_fetch_dest_dir.msg is search('dest is an existing directory')
diff --git a/test/integration/targets/fetch/roles/fetch_tests/tasks/main.yml b/test/integration/targets/fetch/roles/fetch_tests/tasks/main.yml
index 267ae0f0..eefe95c8 100644
--- a/test/integration/targets/fetch/roles/fetch_tests/tasks/main.yml
+++ b/test/integration/targets/fetch/roles/fetch_tests/tasks/main.yml
@@ -1,141 +1,5 @@
-# test code for the pip module
-# (c) 2014, Michael DeHaan <michael.dehaan@gmail.com>
-
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-- name: create a file that we can use to fetch
- copy: content="test" dest={{ remote_tmp_dir }}/orig
-
-- name: fetch the test file
- fetch: src={{ remote_tmp_dir }}/orig dest={{ output_dir }}/fetched
- register: fetched
-
-- debug: var=fetched
-
-- name: Assert that we fetched correctly
- assert:
- that:
- - 'fetched["changed"] == True'
- - 'fetched["checksum"] == "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3"'
- - 'fetched["remote_checksum"] == "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3"'
- - 'lookup("file", output_dir + "/fetched/" + inventory_hostname + remote_tmp_dir + "/orig") == "test"'
-
-# TODO: check the become and non-become forms of fetch because in one form we'll do
-# the get method of the connection plugin and in the become case we'll use the
-# fetch module.
-
-- name: fetch a second time to show idempotence
- fetch: src={{ remote_tmp_dir }}/orig dest={{ output_dir }}/fetched
- register: fetched
-
-- name: Assert that the file was not fetched the second time
- assert:
- that:
- - 'fetched["changed"] == False'
- - 'fetched["checksum"] == "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3"'
-
-- name: attempt to fetch a non-existent file - do not fail on missing
- fetch: src={{ remote_tmp_dir }}/doesnotexist dest={{ output_dir }}/fetched fail_on_missing=False
- register: fetch_missing_nofail
-
-- name: check fetch missing no fail result
- assert:
- that:
- - "fetch_missing_nofail.msg"
- - "fetch_missing_nofail is not changed"
-
-- name: attempt to fetch a non-existent file - fail on missing
- fetch: src={{ remote_tmp_dir }}/doesnotexist dest={{ output_dir }}/fetched fail_on_missing=yes
- register: fetch_missing
- ignore_errors: true
-
-- name: check fetch missing with failure
- assert:
- that:
- - "fetch_missing is failed"
- - "fetch_missing.msg"
- - "fetch_missing is not changed"
-
-- name: attempt to fetch a non-existent file - fail on missing implicit
- fetch: src={{ remote_tmp_dir }}/doesnotexist dest={{ output_dir }}/fetched
- register: fetch_missing_implicit
- ignore_errors: true
-
-- name: check fetch missing with failure with implicit fail
- assert:
- that:
- - "fetch_missing_implicit is failed"
- - "fetch_missing_implicit.msg"
- - "fetch_missing_implicit is not changed"
-
-- name: attempt to fetch a directory - should not fail but return a message
- fetch: src={{ remote_tmp_dir }} dest={{ output_dir }}/somedir fail_on_missing=False
- register: fetch_dir
-
-- name: check fetch directory result
- assert:
- that:
- - "fetch_dir is not changed"
- - "fetch_dir.msg"
-
-- name: attempt to fetch a directory - should fail
- fetch: src={{ remote_tmp_dir }} dest={{ output_dir }}/somedir fail_on_missing=True
- register: failed_fetch_dir
- ignore_errors: true
-
-- name: check fetch directory result
- assert:
- that:
- - "failed_fetch_dir is failed"
- - "fetch_dir.msg"
-
-- name: create symlink to a file that we can fetch
- file:
- path: "{{ remote_tmp_dir }}/link"
- src: "{{ remote_tmp_dir }}/orig"
- state: "link"
-
-- name: fetch the file via a symlink
- fetch: src={{ remote_tmp_dir }}/link dest={{ output_dir }}/fetched-link
- register: fetched
-
-- debug: var=fetched
-
-- name: Assert that we fetched correctly
- assert:
- that:
- - 'fetched["changed"] == True'
- - 'fetched["checksum"] == "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3"'
- - 'fetched["remote_checksum"] == "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3"'
- - 'lookup("file", output_dir + "/fetched-link/" + inventory_hostname + remote_tmp_dir + "/link") == "test"'
-
-# TODO: check the become and non-become forms of fetch because in one form we'll do
-# the get method of the connection plugin and in the become case we'll use the
-# fetch module.
-
-- name: dest is an existing directory name without trailing slash and flat=yes, should fail
- fetch:
- src: "{{ remote_tmp_dir }}/orig"
- dest: "{{ output_dir }}"
- flat: yes
- register: failed_fetch_dest_dir
- ignore_errors: true
-
-- name: check that it indeed failed
- assert:
- that:
- - "failed_fetch_dest_dir is failed"
- - "failed_fetch_dest_dir.msg"
+- import_tasks: setup.yml
+- import_tasks: normal.yml
+- import_tasks: symlink.yml
+- import_tasks: fail_on_missing.yml
+- import_tasks: failures.yml
diff --git a/test/integration/targets/fetch/roles/fetch_tests/tasks/normal.yml b/test/integration/targets/fetch/roles/fetch_tests/tasks/normal.yml
new file mode 100644
index 00000000..6f3ab620
--- /dev/null
+++ b/test/integration/targets/fetch/roles/fetch_tests/tasks/normal.yml
@@ -0,0 +1,38 @@
+- name: Fetch the test file
+ fetch: src={{ remote_tmp_dir }}/orig dest={{ output_dir }}/fetched
+ register: fetched
+
+- name: Fetch a second time to show no changes
+ fetch: src={{ remote_tmp_dir }}/orig dest={{ output_dir }}/fetched
+ register: fetched_again
+
+- name: Fetch the test file in check mode
+ fetch:
+ src: "{{ remote_tmp_dir }}/orig"
+ dest: "{{ output_dir }}/fetched"
+ check_mode: yes
+ register: fetch_check_mode
+
+- name: Fetch with dest ending in path sep
+ fetch:
+ src: "{{ remote_tmp_dir }}/orig"
+ dest: "{{ output_dir }}/"
+ flat: yes
+
+- name: Fetch with dest with relative path
+ fetch:
+ src: "{{ remote_tmp_dir }}/orig"
+ dest: "{{ output_dir[1:] }}"
+ flat: yes
+
+- name: Assert that we fetched correctly
+ assert:
+ that:
+ - fetched is changed
+ - fetched.checksum == "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3"
+ - fetched_again is not changed
+ - fetched_again.checksum == "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3"
+ - fetched.remote_checksum == "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3"
+ - lookup("file", output_dir + "/fetched/" + inventory_hostname + remote_tmp_dir + "/orig") == "test"
+ - fetch_check_mode is skipped
+ - fetch_check_mode.msg is search('not \(yet\) supported')
diff --git a/test/integration/targets/fetch/roles/fetch_tests/tasks/setup.yml b/test/integration/targets/fetch/roles/fetch_tests/tasks/setup.yml
new file mode 100644
index 00000000..89b94c46
--- /dev/null
+++ b/test/integration/targets/fetch/roles/fetch_tests/tasks/setup.yml
@@ -0,0 +1,45 @@
+- name: Include system specific variables
+ include_vars: "{{ lookup('first_found', params) }}"
+ vars:
+ params:
+ files:
+ - "{{ ansible_facts.system }}.yml"
+ - default.yml
+ paths:
+ - "{{ role_path }}/vars"
+
+- name: Work-around for locked users on Alpine
+ # see https://github.com/ansible/ansible/issues/68676
+ set_fact:
+ password: '*'
+ when: ansible_distribution == 'Alpine'
+
+- name: Create test user
+ user:
+ name: fetcher
+ create_home: yes
+ groups: "{{ _fetch_additional_groups | default(omit) }}"
+ append: "{{ True if _fetch_additional_groups else False }}"
+ password: "{{ password | default(omit) }}"
+ become: yes
+ notify:
+ - remove test user
+
+- name: Create a file that we can use to fetch
+ copy:
+ content: "test"
+ dest: "{{ remote_tmp_dir }}/orig"
+
+- name: Create symlink to a file that we can fetch
+ file:
+ path: "{{ remote_tmp_dir }}/link"
+ src: "{{ remote_tmp_dir }}/orig"
+ state: "link"
+
+- name: Create an inaccessible directory
+ file:
+ path: "{{ remote_tmp_dir }}/noaccess"
+ state: directory
+ mode: '0600'
+ owner: root
+ become: yes
diff --git a/test/integration/targets/fetch/roles/fetch_tests/tasks/symlink.yml b/test/integration/targets/fetch/roles/fetch_tests/tasks/symlink.yml
new file mode 100644
index 00000000..41d7b35a
--- /dev/null
+++ b/test/integration/targets/fetch/roles/fetch_tests/tasks/symlink.yml
@@ -0,0 +1,13 @@
+- name: Fetch the file via a symlink
+ fetch:
+ src: "{{ remote_tmp_dir }}/link"
+ dest: "{{ output_dir }}/fetched-link"
+ register: fetched_symlink
+
+- name: Assert that we fetched correctly
+ assert:
+ that:
+ - fetched_symlink is changed
+ - fetched_symlink.checksum == "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3"
+ - fetched_symlink.remote_checksum == "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3"
+ - 'lookup("file", output_dir + "/fetched-link/" + inventory_hostname + remote_tmp_dir + "/link") == "test"'
diff --git a/test/integration/targets/fetch/roles/fetch_tests/vars/Darwin.yml b/test/integration/targets/fetch/roles/fetch_tests/vars/Darwin.yml
new file mode 100644
index 00000000..0654b711
--- /dev/null
+++ b/test/integration/targets/fetch/roles/fetch_tests/vars/Darwin.yml
@@ -0,0 +1,3 @@
+# macOS requires users to be in an additional group for ssh access
+
+_fetch_additional_groups: com.apple.access_ssh
diff --git a/test/integration/targets/fetch/roles/fetch_tests/vars/default.yml b/test/integration/targets/fetch/roles/fetch_tests/vars/default.yml
new file mode 100644
index 00000000..69d7958d
--- /dev/null
+++ b/test/integration/targets/fetch/roles/fetch_tests/vars/default.yml
@@ -0,0 +1 @@
+_fetch_additional_groups: []
diff --git a/test/integration/targets/fetch/runme.sh b/test/integration/targets/fetch/runme.sh
index 7e909dde..a508a0a6 100755
--- a/test/integration/targets/fetch/runme.sh
+++ b/test/integration/targets/fetch/runme.sh
@@ -2,11 +2,33 @@
set -eux
+function cleanup {
+ ansible-playbook -i "${INVENTORY_PATH}" cleanup.yml -e "output_dir=${OUTPUT_DIR}" -b "$@"
+ unset ANSIBLE_CACHE_PLUGIN
+ unset ANSIBLE_CACHE_PLUGIN_CONNECTION
+}
+
+trap 'cleanup "$@"' EXIT
+
# setup required roles
ln -s ../../setup_remote_tmp_dir roles/setup_remote_tmp_dir
# run old type role tests
-ansible-playbook -i ../../inventory run_fetch_tests.yml -e "output_dir=${OUTPUT_DIR}" -v "$@"
+ansible-playbook -i ../../inventory run_fetch_tests.yml -e "output_dir=${OUTPUT_DIR}" "$@"
+
+# run same test with become
+ansible-playbook -i ../../inventory run_fetch_tests.yml -e "output_dir=${OUTPUT_DIR}" -b "$@"
# run tests to avoid path injection from slurp when fetch uses become
-ansible-playbook -i ../../inventory injection/avoid_slurp_return.yml -e "output_dir=${OUTPUT_DIR}" -v "$@"
+ansible-playbook -i ../../inventory injection/avoid_slurp_return.yml -e "output_dir=${OUTPUT_DIR}" "$@"
+
+## Test unreadable file with stat. Requires running without become and as a user other than root.
+#
+# Change the known_hosts file to avoid changing the test environment
+export ANSIBLE_CACHE_PLUGIN=jsonfile
+export ANSIBLE_CACHE_PLUGIN_CONNECTION="${OUTPUT_DIR}/cache"
+# Create a non-root user account and configure SSH acccess for that account
+ansible-playbook -i "${INVENTORY_PATH}" setup_unreadable_test.yml -e "output_dir=${OUTPUT_DIR}" "$@"
+
+# Run the tests as the unprivileged user without become to test the use of the stat module from the fetch module
+ansible-playbook -i "${INVENTORY_PATH}" test_unreadable_with_stat.yml -e ansible_user=fetcher -e ansible_become=no -e "output_dir=${OUTPUT_DIR}" "$@"
diff --git a/test/integration/targets/fetch/setup_unreadable_test.yml b/test/integration/targets/fetch/setup_unreadable_test.yml
new file mode 100644
index 00000000..f4cc8c1e
--- /dev/null
+++ b/test/integration/targets/fetch/setup_unreadable_test.yml
@@ -0,0 +1,40 @@
+- name: Create a user account and configure ssh access
+ hosts: testhost
+ gather_facts: no
+
+ tasks:
+ - import_role:
+ name: fetch_tests
+ tasks_from: setup.yml
+ vars:
+ # Keep the remote temp dir and cache the remote_tmp_dir fact. The directory itself
+ # and the fact that contains the path are needed in a separate ansible-playbook run.
+ setup_remote_tmp_dir_skip_cleanup: yes
+ setup_remote_tmp_dir_cache_path: yes
+ skip_cleanup: yes
+
+ # This prevents ssh access. It is fixed in some container images but not all.
+ # https://github.com/ansible/distro-test-containers/pull/70
+ - name: Remove /run/nologin
+ file:
+ path: /run/nologin
+ state: absent
+
+ # Setup ssh access for the unprivileged user.
+ - name: Get home directory for temporary user
+ command: echo ~fetcher
+ register: fetcher_home
+
+ - name: Create .ssh dir
+ file:
+ path: "{{ fetcher_home.stdout }}/.ssh"
+ state: directory
+ owner: fetcher
+ mode: '0700'
+
+ - name: Configure authorized_keys
+ copy:
+ src: "~root/.ssh/authorized_keys"
+ dest: "{{ fetcher_home.stdout }}/.ssh/authorized_keys"
+ owner: fetcher
+ mode: '0600'
diff --git a/test/integration/targets/fetch/test_unreadable_with_stat.yml b/test/integration/targets/fetch/test_unreadable_with_stat.yml
new file mode 100644
index 00000000..c8a0145c
--- /dev/null
+++ b/test/integration/targets/fetch/test_unreadable_with_stat.yml
@@ -0,0 +1,36 @@
+# This playbook needs to be run as a non-root user without become. Under
+# those circumstances, the fetch module uses stat and not slurp.
+
+- name: Test unreadable file using stat
+ hosts: testhost
+ gather_facts: no
+
+ tasks:
+ - name: Check connectivity
+ command: whoami
+ register: whoami
+
+ - name: Verify user
+ assert:
+ that:
+ - whoami.stdout == 'fetcher'
+
+ - name: Try to fetch a file inside an inaccessible directory
+ fetch:
+ src: "{{ remote_tmp_dir }}/noaccess/file1"
+ dest: "{{ output_dir }}"
+ register: failed_fetch_no_access
+ ignore_errors: yes
+
+ - name: Try to fetch a file inside an inaccessible directory without fail_on_missing
+ fetch:
+ src: "{{ remote_tmp_dir }}/noaccess/file1"
+ dest: "{{ output_dir }}"
+ fail_on_missing: no
+ register: failed_fetch_no_access_fail_on_missing
+
+ - assert:
+ that:
+ - failed_fetch_no_access is failed
+ - failed_fetch_no_access.msg is search('Permission denied')
+ - failed_fetch_no_access_fail_on_missing.msg is search(', ignored')
diff --git a/test/integration/targets/file/handlers/main.yml b/test/integration/targets/file/handlers/main.yml
index b5040f6e..553f69ce 100644
--- a/test/integration/targets/file/handlers/main.yml
+++ b/test/integration/targets/file/handlers/main.yml
@@ -3,6 +3,7 @@
name: "{{ item }}"
state: absent
remove: yes
+ force: yes
loop:
- test1
- test_uid
diff --git a/test/integration/targets/file/tasks/directory_as_dest.yml b/test/integration/targets/file/tasks/directory_as_dest.yml
index 9b6ddb5d..85451e43 100644
--- a/test/integration/targets/file/tasks/directory_as_dest.yml
+++ b/test/integration/targets/file/tasks/directory_as_dest.yml
@@ -1,6 +1,6 @@
# File module tests for overwriting directories
- name: Initialize the test output dir
- include: initialize.yml
+ import_tasks: initialize.yml
# We need to make this more consistent:
# https://github.com/ansible/proposals/issues/111
diff --git a/test/integration/targets/file/tasks/main.yml b/test/integration/targets/file/tasks/main.yml
index 565afa02..c96beba3 100644
--- a/test/integration/targets/file/tasks/main.yml
+++ b/test/integration/targets/file/tasks/main.yml
@@ -91,7 +91,10 @@
- "file2_result.state == 'absent'"
- name: verify we can touch a file
- file: path={{output_dir}}/baz.txt state=touch
+ file:
+ path: "{{output_dir}}/baz.txt"
+ state: touch
+ mode: '0644'
register: file3_result
- name: verify that the file was marked as changed
diff --git a/test/integration/targets/file/tasks/selinux_tests.yml b/test/integration/targets/file/tasks/selinux_tests.yml
index 6a95c442..8efe8195 100644
--- a/test/integration/targets/file/tasks/selinux_tests.yml
+++ b/test/integration/targets/file/tasks/selinux_tests.yml
@@ -17,7 +17,7 @@
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
- name: Initialize the test output dir
- include: initialize.yml
+ import_tasks: initialize.yml
- name: touch a file for testing
file: path={{output_dir}}/foo-se.txt state=touch
diff --git a/test/integration/targets/file/tasks/state_link.yml b/test/integration/targets/file/tasks/state_link.yml
index d84bb310..851b213e 100644
--- a/test/integration/targets/file/tasks/state_link.yml
+++ b/test/integration/targets/file/tasks/state_link.yml
@@ -1,7 +1,7 @@
# file module tests for dealing with symlinks (state=link)
- name: Initialize the test output dir
- include: initialize.yml
+ import_tasks: initialize.yml
#
# Basic absolute symlink to a file
@@ -181,6 +181,8 @@
user:
name: '{{ remote_unprivileged_user }}'
state: absent
+ force: yes
+ remove: yes
- name: Delete unprivileged user home and tempdir
file:
diff --git a/test/integration/targets/filter_core/aliases b/test/integration/targets/filter_core/aliases
index 1603f435..765b70da 100644
--- a/test/integration/targets/filter_core/aliases
+++ b/test/integration/targets/filter_core/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/python2.6 # filters are controller only, and we no longer support Python 2.6 on the controller
-skip/aix
diff --git a/test/integration/targets/filter_core/tasks/main.yml b/test/integration/targets/filter_core/tasks/main.yml
index 8ab9d446..5a5d813f 100644
--- a/test/integration/targets/filter_core/tasks/main.yml
+++ b/test/integration/targets/filter_core/tasks/main.yml
@@ -283,6 +283,7 @@
multi_line: "{{ 'hello\nworld' | regex_search('^world', multiline=true) }}"
named_groups: "{{ 'goodbye' | regex_search('(?P<first>good)(?P<second>bye)', '\\g<second>', '\\g<first>') }}"
numbered_groups: "{{ 'goodbye' | regex_search('(good)(bye)', '\\2', '\\1') }}"
+ no_match_is_none_inline: "{{ 'hello' | regex_search('world') == none }}"
- name: regex_search unknown argument (failure expected)
set_fact:
@@ -299,6 +300,7 @@
- multi_line == 'world'
- named_groups == ['bye', 'good']
- numbered_groups == ['bye', 'good']
+ - no_match_is_none_inline
- failure is failed
- name: Verify to_bool
@@ -378,6 +380,18 @@
- "2|from_yaml == 2"
- "'---\nbananas: yellow\n---\napples: red'|from_yaml_all|list == [{'bananas': 'yellow'}, {'apples': 'red'}]"
- "2|from_yaml_all == 2"
+ - "unsafe_fruit|from_yaml == {'bananas': 'yellow', 'apples': 'red'}"
+ - "unsafe_fruit_all|from_yaml_all|list == [{'bananas': 'yellow'}, {'apples': 'red'}]"
+ vars:
+ unsafe_fruit: !unsafe |
+ ---
+ bananas: yellow
+ apples: red
+ unsafe_fruit_all: !unsafe |
+ ---
+ bananas: yellow
+ ---
+ apples: red
- name: Verify random raises on non-iterable input (failure expected)
set_fact:
@@ -435,7 +449,7 @@
- name: Verify password_hash
assert:
that:
- - "'what in the WORLD is up?'|password_hash|length == 106"
+ - "'what in the WORLD is up?'|password_hash|length == 120 or 'what in the WORLD is up?'|password_hash|length == 106"
# This throws a vastly different error on py2 vs py3, so we just check
# that it's a failure, not a substring of the exception.
- password_hash_1 is failed
@@ -480,6 +494,39 @@
- mandatory_2 is failed
- "mandatory_2.msg == 'You did not give me a variable. I am a sad wolf.'"
+- name: Verify undef throws if resolved
+ set_fact:
+ foo: '{{ fail_foo }}'
+ vars:
+ fail_foo: '{{ undef("Expected failure") }}'
+ ignore_errors: yes
+ register: fail_1
+
+- name: Setup fail_foo for overriding in test
+ block:
+ - name: Verify undef not executed if overridden
+ set_fact:
+ foo: '{{ fail_foo }}'
+ vars:
+ fail_foo: 'overridden value'
+ register: fail_2
+ vars:
+ fail_foo: '{{ undef(hint="Expected failure") }}'
+
+- name: Verify undef is inspectable
+ debug:
+ var: fail_foo
+ vars:
+ fail_foo: '{{ undef("Expected failure") }}'
+ register: fail_3
+
+- name: Verify undef
+ assert:
+ that:
+ - fail_1 is failed
+ - not (fail_2 is failed)
+ - not (fail_3 is failed)
+
- name: Verify comment
assert:
that:
diff --git a/test/integration/targets/filter_encryption/aliases b/test/integration/targets/filter_encryption/aliases
new file mode 100644
index 00000000..765b70da
--- /dev/null
+++ b/test/integration/targets/filter_encryption/aliases
@@ -0,0 +1 @@
+shippable/posix/group2
diff --git a/test/integration/targets/filter_encryption/base.yml b/test/integration/targets/filter_encryption/base.yml
new file mode 100644
index 00000000..8bf25f77
--- /dev/null
+++ b/test/integration/targets/filter_encryption/base.yml
@@ -0,0 +1,37 @@
+- hosts: localhost
+ gather_facts: true
+ vars:
+ data: secret
+ dvault: '{{ "secret"|vault("test")}}'
+ password: test
+ s_32: '{{(2**31-1)}}'
+ s_64: '{{(2**63-1)}}'
+ vaultedstring_32: "$ANSIBLE_VAULT;1.2;AES256;filter_default\n33360a30386436633031333665316161303732656333373131373935623033393964633637346464\n6234613765313539306138373564366363306533356464613334320a666339363037303764636538\n3131633564326637303237313463613864626231\n"
+ vaultedstring_64: "$ANSIBLE_VAULT;1.2;AES256;filter_default\n33370a34333734353636633035656232613935353432656132646533346233326431346232616261\n6133383034376566366261316365633931356133633337396363370a376664386236313834326561\n6338373864623763613165366636633031303739\n"
+ vault: !vault |
+ $ANSIBLE_VAULT;1.1;AES256
+ 33323332333033383335333533383338333333303339333733323339333833303334333133313339
+ 33373339333133323331333833373335333933323338333633343338333133343334333733383334
+ 33333335333433383337333133303339333433353332333333363339333733363335333233303330
+ 3337333733353331333633313335333733373334333733320a373938666533366165653830313163
+ 62386564343438653437333564383664646538653364343138303831613039313232636437336530
+ 3438376662373764650a633366646563386335623161646262366137393635633464333265613938
+ 6661
+ # allow testing against 32b/64b limited archs, normally you can set higher values for random (2**256)
+ is_64: '{{ "64" in ansible_facts["architecture"] }}'
+ salt: '{{ is_64|bool|ternary(s_64, s_32)|random(seed=inventory_hostname)}}'
+ vaultedstring: '{{ is_64|bool|ternary(vaultedstring_64, vaultedstring_32) }}'
+
+ tasks:
+ - name: check vaulting
+ assert:
+ that:
+ - data|vault(password, salt=salt) == vaultedstring
+ - "data|vault(password, salt=salt)|type_debug != 'AnsibleVaultEncryptedUnicode'"
+ - "data|vault(password, salt=salt, wrap_object=True)|type_debug == 'AnsibleVaultEncryptedUnicode'"
+
+ - name: check unvaulting
+ assert:
+ that:
+ - vaultedstring|unvault(password) == data
+ - vault|unvault(password) == data
diff --git a/test/integration/targets/filter_encryption/runme.sh b/test/integration/targets/filter_encryption/runme.sh
new file mode 100755
index 00000000..41b30b1d
--- /dev/null
+++ b/test/integration/targets/filter_encryption/runme.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+set -eux
+
+ANSIBLE_GATHER_SUBSET='min' ansible-playbook base.yml "$@"
diff --git a/test/integration/targets/filter_mathstuff/aliases b/test/integration/targets/filter_mathstuff/aliases
index 1603f435..765b70da 100644
--- a/test/integration/targets/filter_mathstuff/aliases
+++ b/test/integration/targets/filter_mathstuff/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/python2.6 # filters are controller only, and we no longer support Python 2.6 on the controller
-skip/aix
diff --git a/test/integration/targets/filter_mathstuff/host_vars/localhost.yml b/test/integration/targets/filter_mathstuff/host_vars/localhost.yml
new file mode 100644
index 00000000..1f5a9e03
--- /dev/null
+++ b/test/integration/targets/filter_mathstuff/host_vars/localhost.yml
@@ -0,0 +1 @@
+foo: test
diff --git a/test/integration/targets/filter_mathstuff/runme.sh b/test/integration/targets/filter_mathstuff/runme.sh
new file mode 100755
index 00000000..36503003
--- /dev/null
+++ b/test/integration/targets/filter_mathstuff/runme.sh
@@ -0,0 +1,17 @@
+#!/usr/bin/env bash
+
+set -eux
+
+export ANSIBLE_ROLES_PATH=../
+
+ansible-playbook runme.yml "$@"
+
+source virtualenv.sh
+
+# Install Jinja < 2.10 since we want to test the fallback to Ansible's custom
+# unique filter. Jinja < 2.10 does not have do_unique so we will trigger the
+# fallback.
+pip install 'jinja2 < 2.10'
+
+# Run the playbook again in the venv with Jinja < 2.10
+ansible-playbook runme.yml "$@"
diff --git a/test/integration/targets/filter_mathstuff/runme.yml b/test/integration/targets/filter_mathstuff/runme.yml
new file mode 100644
index 00000000..a1eaef7a
--- /dev/null
+++ b/test/integration/targets/filter_mathstuff/runme.yml
@@ -0,0 +1,4 @@
+- hosts: localhost
+ gather_facts: false
+ roles:
+ - { role: filter_mathstuff }
diff --git a/test/integration/targets/filter_mathstuff/tasks/main.yml b/test/integration/targets/filter_mathstuff/tasks/main.yml
index 2a708be1..019f00e4 100644
--- a/test/integration/targets/filter_mathstuff/tasks/main.yml
+++ b/test/integration/targets/filter_mathstuff/tasks/main.yml
@@ -1,6 +1,6 @@
-- name: Verify unique's fallback's exception throwing for case_sensitive=True
+- name: Verify unique's fallback's exception throwing for case_sensitive=False
set_fact:
- unique_fallback_exc1: '{{ [{"foo": "bar", "moo": "cow"}]|unique(case_sensitive=True) }}'
+ unique_fallback_exc1: '{{ [{"foo": "bar", "moo": "cow"}]|unique(case_sensitive=False) }}'
ignore_errors: true
tags: unique
register: unique_fallback_exc1_res
@@ -67,6 +67,11 @@
- '[1,2,3]|intersect([3,2,1]) == [1,2,3]'
- '(1,2,3)|intersect((4,5,6))|list == []'
- '(1,2,3)|intersect((3,4,5,6))|list == [3]'
+ - '["a","A","b"]|intersect(["B","c","C"]) == []'
+ - '["a","A","b"]|intersect(["b","B","c","C"]) == ["b"]'
+ - '["a","A","b"]|intersect(["b","A","a"]) == ["a","A","b"]'
+ - '("a","A","b")|intersect(("B","c","C"))|list == []'
+ - '("a","A","b")|intersect(("b","B","c","C"))|list == ["b"]'
- name: Verify difference
tags: difference
@@ -77,6 +82,11 @@
- '[1,2,3]|difference([3,2,1]) == []'
- '(1,2,3)|difference((4,5,6))|list == [1,2,3]'
- '(1,2,3)|difference((3,4,5,6))|list == [1,2]'
+ - '["a","A","b"]|difference(["B","c","C"]) == ["a","A","b"]'
+ - '["a","A","b"]|difference(["b","B","c","C"]) == ["a","A"]'
+ - '["a","A","b"]|difference(["b","A","a"]) == []'
+ - '("a","A","b")|difference(("B","c","C"))|list|sort(case_sensitive=True) == ["A","a","b"]'
+ - '("a","A","b")|difference(("b","B","c","C"))|list|sort(case_sensitive=True) == ["A","a"]'
- name: Verify symmetric_difference
tags: symmetric_difference
@@ -87,6 +97,11 @@
- '[1,2,3]|symmetric_difference([3,2,1]) == []'
- '(1,2,3)|symmetric_difference((4,5,6))|list == [1,2,3,4,5,6]'
- '(1,2,3)|symmetric_difference((3,4,5,6))|list == [1,2,4,5,6]'
+ - '["a","A","b"]|symmetric_difference(["B","c","C"]) == ["a","A","b","B","c","C"]'
+ - '["a","A","b"]|symmetric_difference(["b","B","c","C"]) == ["a","A","B","c","C"]'
+ - '["a","A","b"]|symmetric_difference(["b","A","a"]) == []'
+ - '("a","A","b")|symmetric_difference(("B","c","C"))|list|sort(case_sensitive=True) == ["A","B","C","a","b","c"]'
+ - '("a","A","b")|symmetric_difference(("b","B","c","C"))|list|sort(case_sensitive=True) == ["A","B","C","a","c"]'
- name: Verify union
tags: union
@@ -97,6 +112,11 @@
- '[1,2,3]|union([3,2,1]) == [1,2,3]'
- '(1,2,3)|union((4,5,6))|list == [1,2,3,4,5,6]'
- '(1,2,3)|union((3,4,5,6))|list == [1,2,3,4,5,6]'
+ - '["a","A","b"]|union(["B","c","C"]) == ["a","A","b","B","c","C"]'
+ - '["a","A","b"]|union(["b","B","c","C"]) == ["a","A","b","B","c","C"]'
+ - '["a","A","b"]|union(["b","A","a"]) == ["a","A","b"]'
+ - '("a","A","b")|union(("B","c","C"))|list|sort(case_sensitive=True) == ["A","B","C","a","b","c"]'
+ - '("a","A","b")|union(("b","B","c","C"))|list|sort(case_sensitive=True) == ["A","B","C","a","b","c"]'
- name: Verify min
tags: min
@@ -281,6 +301,18 @@
- rekey_on_member_exc5_res is failed
- '"is not unique, cannot correctly turn into dict" in rekey_on_member_exc5_res.msg'
+- name: test undefined positional args for rekey_on_member are properly handled
+ vars:
+ all_vars: "{{ hostvars[inventory_hostname] }}"
+ test_var: "{{ all_vars.foo }}"
+ block:
+ - include_vars:
+ file: defined_later.yml
+ - assert:
+ that: "test_var == 'test'"
+ - assert:
+ that: "rekeyed == {'value': {'test': 'value'}}"
+
# TODO: For some reason, the coverage tool isn't accounting for the last test
# so add another "last test" to fake it...
- assert:
diff --git a/test/integration/targets/filter_mathstuff/vars/defined_later.yml b/test/integration/targets/filter_mathstuff/vars/defined_later.yml
new file mode 100644
index 00000000..dfb2421b
--- /dev/null
+++ b/test/integration/targets/filter_mathstuff/vars/defined_later.yml
@@ -0,0 +1,3 @@
+do_rekey:
+ - test: value
+rekeyed: "{{ do_rekey | rekey_on_member(defined_later) }}"
diff --git a/test/integration/targets/filter_mathstuff/vars/main.yml b/test/integration/targets/filter_mathstuff/vars/main.yml
new file mode 100644
index 00000000..bb61e12e
--- /dev/null
+++ b/test/integration/targets/filter_mathstuff/vars/main.yml
@@ -0,0 +1 @@
+defined_later: "{{ test_var }}"
diff --git a/test/integration/targets/filter_urls/aliases b/test/integration/targets/filter_urls/aliases
index 1603f435..765b70da 100644
--- a/test/integration/targets/filter_urls/aliases
+++ b/test/integration/targets/filter_urls/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/python2.6 # filters are controller only, and we no longer support Python 2.6 on the controller
-skip/aix
diff --git a/test/integration/targets/filter_urlsplit/aliases b/test/integration/targets/filter_urlsplit/aliases
index 1603f435..765b70da 100644
--- a/test/integration/targets/filter_urlsplit/aliases
+++ b/test/integration/targets/filter_urlsplit/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/python2.6 # filters are controller only, and we no longer support Python 2.6 on the controller
-skip/aix
diff --git a/test/integration/targets/find/tasks/main.yml b/test/integration/targets/find/tasks/main.yml
index 91d92471..366ef312 100644
--- a/test/integration/targets/find/tasks/main.yml
+++ b/test/integration/targets/find/tasks/main.yml
@@ -207,7 +207,8 @@
- assert:
that:
- failed_path.files == []
- - failed_path.msg.startswith("Skipped '{{mypath}}' path due to this access issue")
+ - 'failed_path.msg == "Not all paths examined, check warnings for details"'
+ - mypath in failed_path.skipped_paths
- name: test number of examined directories/files
block:
@@ -272,3 +273,116 @@
assert:
that:
- '"{{ output_dir_test }}/e/f/g/h/8.ogg" not in find_test3_list'
+
+- name: create our age/size testing sub-directory
+ file:
+ path: "{{ output_dir_test }}/astest"
+ state: directory
+
+- name: create test file with old timestamps
+ file:
+ path: "{{ output_dir_test }}/astest/old.txt"
+ state: touch
+ modification_time: "202001011200.0"
+
+- name: create test file with current timestamps
+ file:
+ path: "{{ output_dir_test }}/astest/new.txt"
+ state: touch
+
+- name: create hidden test file with current timestamps
+ file:
+ path: "{{ output_dir_test }}/astest/.hidden.txt"
+ state: touch
+
+- name: find files older than 1 week
+ find:
+ path: "{{ output_dir_test }}/astest"
+ age: 1w
+ hidden: true
+ register: result
+
+- set_fact:
+ astest_list: >-
+ [ {% for f in result.files %}
+ {{ f.path }}
+ {% if not loop.last %},{% endif %}
+ {% endfor %}
+ ]
+
+- name: assert we only find the old file
+ assert:
+ that:
+ - result.matched == 1
+ - '"{{ output_dir_test }}/astest/old.txt" in astest_list'
+
+- name: find files newer than 1 week
+ find:
+ path: "{{ output_dir_test }}/astest"
+ age: -1w
+ register: result
+
+- set_fact:
+ astest_list: >-
+ [ {% for f in result.files %}
+ {{ f.path }}
+ {% if not loop.last %},{% endif %}
+ {% endfor %}
+ ]
+
+- name: assert we only find the current file
+ assert:
+ that:
+ - result.matched == 1
+ - '"{{ output_dir_test }}/astest/new.txt" in astest_list'
+
+- name: add some content to the new file
+ shell: "echo hello world > {{ output_dir_test }}/astest/new.txt"
+
+- name: find files with MORE than 5 bytes, also get checksums
+ find:
+ path: "{{ output_dir_test }}/astest"
+ size: 5
+ hidden: true
+ get_checksum: true
+ register: result
+
+- set_fact:
+ astest_list: >-
+ [ {% for f in result.files %}
+ {{ f.path }}
+ {% if not loop.last %},{% endif %}
+ {% endfor %}
+ ]
+
+- name: assert we only find the hello world file
+ assert:
+ that:
+ - result.matched == 1
+ - '"{{ output_dir_test }}/astest/new.txt" in astest_list'
+ - '"checksum" in result.files[0]'
+
+- name: find ANY item with LESS than 5 bytes, also get checksums
+ find:
+ path: "{{ output_dir_test }}/astest"
+ size: -5
+ hidden: true
+ get_checksum: true
+ file_type: any
+ register: result
+
+- set_fact:
+ astest_list: >-
+ [ {% for f in result.files %}
+ {{ f.path }}
+ {% if not loop.last %},{% endif %}
+ {% endfor %}
+ ]
+
+- name: assert we do not find the hello world file and a checksum is present
+ assert:
+ that:
+ - result.matched == 2
+ - '"{{ output_dir_test }}/astest/old.txt" in astest_list'
+ - '"{{ output_dir_test }}/astest/.hidden.txt" in astest_list'
+ - '"checksum" in result.files[0]'
diff --git a/test/integration/targets/gathering/aliases b/test/integration/targets/gathering/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/gathering/aliases
+++ b/test/integration/targets/gathering/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/gathering_facts/aliases b/test/integration/targets/gathering_facts/aliases
index 0ee704e1..027aba88 100644
--- a/test/integration/targets/gathering_facts/aliases
+++ b/test/integration/targets/gathering_facts/aliases
@@ -1,2 +1,3 @@
shippable/posix/group3
needs/root
+context/controller
diff --git a/test/integration/targets/gathering_facts/collections/ansible_collections/cisco/ios/plugins/modules/ios_facts.py b/test/integration/targets/gathering_facts/collections/ansible_collections/cisco/ios/plugins/modules/ios_facts.py
new file mode 100644
index 00000000..b79f7941
--- /dev/null
+++ b/test/integration/targets/gathering_facts/collections/ansible_collections/cisco/ios/plugins/modules/ios_facts.py
@@ -0,0 +1,38 @@
+#!/usr/bin/python
+
+DOCUMENTATION = """
+---
+module: ios_facts
+short_description: supporting network facts module
+description:
+ - supporting network facts module for gather_facts + module_defaults tests
+options:
+ gather_subset:
+ description:
+ - When supplied, this argument restricts the facts collected
+ to a given subset.
+ - Possible values for this argument include
+ C(all), C(hardware), C(config), and C(interfaces).
+ - Specify a list of values to include a larger subset.
+ - Use a value with an initial C(!) to collect all facts except that subset.
+ required: false
+ default: '!config'
+"""
+
+from ansible.module_utils.basic import AnsibleModule
+
+
+def main():
+ """main entry point for module execution
+ """
+ argument_spec = dict(
+ gather_subset=dict(default='!config')
+ )
+ module = AnsibleModule(argument_spec=argument_spec,
+ supports_check_mode=True)
+
+ module.exit_json(ansible_facts={'gather_subset': module.params['gather_subset'], '_ansible_facts_gathered': True})
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/integration/targets/gathering_facts/inventory b/test/integration/targets/gathering_facts/inventory
index e15ae780..6352a7d7 100644
--- a/test/integration/targets/gathering_facts/inventory
+++ b/test/integration/targets/gathering_facts/inventory
@@ -1,2 +1,2 @@
[local]
-facthost[0:25] ansible_connection=local ansible_python_interpreter="{{ ansible_playbook_python }}"
+facthost[0:26] ansible_connection=local ansible_python_interpreter="{{ ansible_playbook_python }}"
diff --git a/test/integration/targets/gathering_facts/runme.sh b/test/integration/targets/gathering_facts/runme.sh
index ebb82ab4..c1df560c 100755
--- a/test/integration/targets/gathering_facts/runme.sh
+++ b/test/integration/targets/gathering_facts/runme.sh
@@ -23,3 +23,5 @@ ansible-playbook verify_subset.yml "$@"
# ensure we can set defaults for the action plugin and facts module
ansible-playbook test_module_defaults.yml "$@" --tags default_fact_module
ANSIBLE_FACTS_MODULES='ansible.legacy.setup' ansible-playbook test_module_defaults.yml "$@" --tags custom_fact_module
+
+ansible-playbook test_module_defaults.yml "$@" --tags networking
diff --git a/test/integration/targets/gathering_facts/test_gathering_facts.yml b/test/integration/targets/gathering_facts/test_gathering_facts.yml
index 0939cba7..9174675d 100644
--- a/test/integration/targets/gathering_facts/test_gathering_facts.yml
+++ b/test/integration/targets/gathering_facts/test_gathering_facts.yml
@@ -140,6 +140,34 @@
- 'ansible_virtualization_role|default("UNDEF_VIRT") != "UNDEF_VIRT"'
- 'ansible_env|default("UNDEF_ENV") != "UNDEF_ENV"'
+- hosts: facthost25
+ tags: [ 'fact_min' ]
+ gather_facts: no
+ tasks:
+ - setup:
+ filter:
+ - "date_time"
+
+ - name: Test that retrieving all facts filtered to date_time even w/o using ansible_ prefix
+ assert:
+ that:
+ - 'ansible_facts["date_time"]|default("UNDEF_MOUNT") != "UNDEF_MOUNT"'
+ - 'ansible_date_time|default("UNDEF_MOUNT") != "UNDEF_MOUNT"'
+
+- hosts: facthost26
+ tags: [ 'fact_min' ]
+ gather_facts: no
+ tasks:
+ - setup:
+ filter:
+ - "ansible_date_time"
+
+ - name: Test that retrieving all facts filtered to date_time even using ansible_ prefix
+ assert:
+ that:
+ - 'ansible_facts["date_time"]|default("UNDEF_MOUNT") != "UNDEF_MOUNT"'
+ - 'ansible_date_time|default("UNDEF_MOUNT") != "UNDEF_MOUNT"'
+
- hosts: facthost13
tags: [ 'fact_min' ]
connection: local
diff --git a/test/integration/targets/gathering_facts/test_module_defaults.yml b/test/integration/targets/gathering_facts/test_module_defaults.yml
index 5b0f9dd8..038b8ecf 100644
--- a/test/integration/targets/gathering_facts/test_module_defaults.yml
+++ b/test/integration/targets/gathering_facts/test_module_defaults.yml
@@ -77,3 +77,54 @@
- assert:
that:
- "gather_subset == ['min']"
+
+- hosts: localhost
+ gather_facts: no
+ tags:
+ - networking
+ tasks:
+ - name: test that task args aren't used for fqcn network facts
+ gather_facts:
+ gather_subset: min
+ vars:
+ ansible_network_os: 'cisco.ios.ios'
+ register: result
+
+ - assert:
+ that:
+ - "ansible_facts.gather_subset == '!config'"
+
+ - name: test that module_defaults are used for fqcn network facts
+ gather_facts:
+ vars:
+ ansible_network_os: 'cisco.ios.ios'
+ module_defaults:
+ 'cisco.ios.ios_facts': {'gather_subset': 'min'}
+ register: result
+
+ - assert:
+ that:
+ - "ansible_facts.gather_subset == 'min'"
+
+ - name: test that task args aren't used for legacy network facts
+ gather_facts:
+ gather_subset: min
+ vars:
+ ansible_network_os: 'ios'
+ register: result
+
+ - assert:
+ that:
+ - "ansible_facts.gather_subset == '!config'"
+
+ - name: test that module_defaults are used for legacy network facts
+ gather_facts:
+ vars:
+ ansible_network_os: 'ios'
+ module_defaults:
+ 'ios_facts': {'gather_subset': 'min'}
+ register: result
+
+ - assert:
+ that:
+ - "ansible_facts.gather_subset == 'min'"
diff --git a/test/integration/targets/gathering_facts/test_prevent_injection.yml b/test/integration/targets/gathering_facts/test_prevent_injection.yml
index f304fe88..064b7a90 100644
--- a/test/integration/targets/gathering_facts/test_prevent_injection.yml
+++ b/test/integration/targets/gathering_facts/test_prevent_injection.yml
@@ -5,7 +5,7 @@
- name: gather 'bad' facts
action: bogus_facts
- - name: ensure that the 'bad' facts didn't polute what they are not supposed to
+ - name: ensure that the 'bad' facts didn't pollute what they are not supposed to
assert:
that:
- "'touch' not in discovered_interpreter_python|default('')"
diff --git a/test/integration/targets/get_url/tasks/main.yml b/test/integration/targets/get_url/tasks/main.yml
index 32da1d51..b5a9c7e5 100644
--- a/test/integration/targets/get_url/tasks/main.yml
+++ b/test/integration/targets/get_url/tasks/main.yml
@@ -579,6 +579,35 @@
- '(result.content | b64decode) == "ansible.http.tests:SUCCESS"'
when: has_httptester
+- name: test unredirected_headers
+ get_url:
+ url: 'https://{{ httpbin_host }}/redirect-to?status_code=301&url=/basic-auth/user/passwd'
+ username: user
+ password: passwd
+ force_basic_auth: true
+ unredirected_headers:
+ - authorization
+ dest: "{{ remote_tmp_dir }}/doesnt_matter"
+ ignore_errors: true
+ register: unredirected_headers
+
+- name: test unredirected_headers
+ get_url:
+ url: 'https://{{ httpbin_host }}/redirect-to?status_code=301&url=/basic-auth/user/passwd'
+ username: user
+ password: passwd
+ force_basic_auth: true
+ dest: "{{ remote_tmp_dir }}/doesnt_matter"
+ register: redirected_headers
+
+- name: ensure unredirected_headers caused auth to fail
+ assert:
+ that:
+ - unredirected_headers is failed
+ - unredirected_headers.status_code == 401
+ - redirected_headers is successful
+ - redirected_headers.status_code == 200
+
- name: Test use_gssapi=True
include_tasks:
file: use_gssapi.yml
diff --git a/test/integration/targets/git/tasks/archive.yml b/test/integration/targets/git/tasks/archive.yml
index 574559ef..18b9dff3 100644
--- a/test/integration/targets/git/tasks/archive.yml
+++ b/test/integration/targets/git/tasks/archive.yml
@@ -119,6 +119,7 @@
unarchive:
src: '{{ checkout_dir }}/test_role.{{ item }}'
dest: '{{ checkout_dir }}/{{ git_archive_prefix }}.{{ item }}'
+ remote_src: yes
with_items: "{{ git_archive_extensions[ansible_os_family ~ ansible_distribution_major_version | default('default') ] | default(git_archive_extensions.default) }}"
- name: ARCHIVE | Check if prefix directory exists in what's extracted
diff --git a/test/integration/targets/git/tasks/main.yml b/test/integration/targets/git/tasks/main.yml
index c5aeacbe..ed06eab5 100644
--- a/test/integration/targets/git/tasks/main.yml
+++ b/test/integration/targets/git/tasks/main.yml
@@ -21,6 +21,7 @@
- import_tasks: formats.yml
- import_tasks: missing_hostkey.yml
+- import_tasks: missing_hostkey_acceptnew.yml
- import_tasks: no-destination.yml
- import_tasks: specific-revision.yml
- import_tasks: submodules.yml
diff --git a/test/integration/targets/git/tasks/missing_hostkey.yml b/test/integration/targets/git/tasks/missing_hostkey.yml
index 02d5be35..6e4d53c3 100644
--- a/test/integration/targets/git/tasks/missing_hostkey.yml
+++ b/test/integration/targets/git/tasks/missing_hostkey.yml
@@ -46,3 +46,16 @@
that:
- git_result is changed
when: github_ssh_private_key is defined
+
+- name: MISSING-HOSTEKY | Remove github.com hostkey from known_hosts
+ lineinfile:
+ dest: '{{ output_dir }}/known_hosts'
+ regexp: "github.com"
+ state: absent
+ when: github_ssh_private_key is defined
+
+- name: MISSING-HOSTKEY | clear checkout_dir
+ file:
+ state: absent
+ path: '{{ checkout_dir }}'
+ when: github_ssh_private_key is defined
diff --git a/test/integration/targets/git/tasks/missing_hostkey_acceptnew.yml b/test/integration/targets/git/tasks/missing_hostkey_acceptnew.yml
new file mode 100644
index 00000000..fb8bb063
--- /dev/null
+++ b/test/integration/targets/git/tasks/missing_hostkey_acceptnew.yml
@@ -0,0 +1,78 @@
+- name: MISSING-HOSTKEY | check accept_newhostkey support
+ shell: ssh -o StrictHostKeyChecking=accept-new -V
+ register: ssh_supports_accept_newhostkey
+ ignore_errors: true
+
+- block:
+ - name: MISSING-HOSTKEY | accept_newhostkey when ssh does not support the option
+ git:
+ repo: '{{ repo_format2 }}'
+ dest: '{{ checkout_dir }}'
+ accept_newhostkey: true
+ ssh_opts: '-o UserKnownHostsFile={{ output_dir }}/known_hosts'
+ register: git_result
+ ignore_errors: true
+
+ - assert:
+ that:
+ - git_result is failed
+ - git_result.warnings is search("does not support")
+
+ when: ssh_supports_accept_newhostkey.rc != 0
+
+- name: MISSING-HOSTKEY | checkout ssh://git@github.com repo without accept_newhostkey (expected fail)
+ git:
+ repo: '{{ repo_format2 }}'
+ dest: '{{ checkout_dir }}'
+ ssh_opts: '-o UserKnownHostsFile={{ output_dir }}/known_hosts'
+ register: git_result
+ ignore_errors: true
+
+- assert:
+ that:
+ - git_result is failed
+
+- block:
+ - name: MISSING-HOSTKEY | checkout git@github.com repo with accept_newhostkey (expected pass)
+ git:
+ repo: '{{ repo_format2 }}'
+ dest: '{{ checkout_dir }}'
+ accept_newhostkey: true
+ key_file: '{{ github_ssh_private_key }}'
+ ssh_opts: '-o UserKnownHostsFile={{ output_dir }}/known_hosts'
+ register: git_result
+
+ - assert:
+ that:
+ - git_result is changed
+
+ - name: MISSING-HOSTKEY | clear checkout_dir
+ file:
+ state: absent
+ path: '{{ checkout_dir }}'
+
+ - name: MISSING-HOSTKEY | checkout ssh://git@github.com repo with accept_newhostkey (expected pass)
+ git:
+ repo: '{{ repo_format3 }}'
+ dest: '{{ checkout_dir }}'
+ version: 'master'
+ accept_newhostkey: false # should already have been accepted
+ key_file: '{{ github_ssh_private_key }}'
+ ssh_opts: '-o UserKnownHostsFile={{ output_dir }}/known_hosts'
+ register: git_result
+
+ - assert:
+ that:
+ - git_result is changed
+
+ - name: MISSING-HOSTEKY | Remove github.com hostkey from known_hosts
+ lineinfile:
+ dest: '{{ output_dir }}/known_hosts'
+ regexp: "github.com"
+ state: absent
+
+ - name: MISSING-HOSTKEY | clear checkout_dir
+ file:
+ state: absent
+ path: '{{ checkout_dir }}'
+ when: github_ssh_private_key is defined and ssh_supports_accept_newhostkey.rc == 0
diff --git a/test/integration/targets/git/tasks/submodules.yml b/test/integration/targets/git/tasks/submodules.yml
index 647d1e23..0b311e79 100644
--- a/test/integration/targets/git/tasks/submodules.yml
+++ b/test/integration/targets/git/tasks/submodules.yml
@@ -122,3 +122,29 @@
- name: SUBMODULES | Enusre submodule2 is at the appropriate commit
assert:
that: '{{ submodule2.stdout_lines | length }} == 4'
+
+- name: SUBMODULES | clear checkout_dir
+ file:
+ state: absent
+ path: "{{ checkout_dir }}"
+
+
+- name: SUBMODULES | Clone main submodule repository
+ git:
+ repo: "{{ repo_submodules }}"
+ dest: "{{ checkout_dir }}/test.gitdir"
+ version: 45c6c07ef10fd9e453d90207e63da1ce5bd3ae1e
+ recursive: yes
+
+- name: SUBMODULES | Test that cloning submodule with .git in directory name works
+ git:
+ repo: "{{ repo_submodule1 }}"
+ dest: "{{ checkout_dir }}/test.gitdir/submodule1"
+
+- name: SUBMODULES | List submodule1
+ command: 'ls -1a {{ checkout_dir }}/test.gitdir/submodule1'
+ register: submodule1
+
+- name: SUBMODULES | Ensure submodule1 is at the appropriate commit
+ assert:
+ that: '{{ submodule1.stdout_lines | length }} == 4'
diff --git a/test/integration/targets/groupby_filter/aliases b/test/integration/targets/groupby_filter/aliases
index 31094c31..58201272 100644
--- a/test/integration/targets/groupby_filter/aliases
+++ b/test/integration/targets/groupby_filter/aliases
@@ -1,2 +1,3 @@
shippable/posix/group2
needs/file/test/lib/ansible_test/_data/requirements/constraints.txt
+context/controller
diff --git a/test/integration/targets/handler_race/aliases b/test/integration/targets/handler_race/aliases
index 68d6d978..1d28bdb2 100644
--- a/test/integration/targets/handler_race/aliases
+++ b/test/integration/targets/handler_race/aliases
@@ -1,3 +1,2 @@
shippable/posix/group5
-handler_race
-skip/aix
+context/controller
diff --git a/test/integration/targets/handlers/58841.yml b/test/integration/targets/handlers/58841.yml
new file mode 100644
index 00000000..eea5c2f3
--- /dev/null
+++ b/test/integration/targets/handlers/58841.yml
@@ -0,0 +1,9 @@
+---
+- hosts: localhost
+ gather_facts: no
+ tasks:
+ - include_role:
+ name: import_template_handler_names
+ tags:
+ - lazy_evaluation
+ - evaluation_time
diff --git a/test/integration/targets/handlers/aliases b/test/integration/targets/handlers/aliases
index 30bb677a..1d28bdb2 100644
--- a/test/integration/targets/handlers/aliases
+++ b/test/integration/targets/handlers/aliases
@@ -1,3 +1,2 @@
shippable/posix/group5
-handlers
-skip/aix
+context/controller
diff --git a/test/integration/targets/handlers/roles/import_template_handler_names/tasks/main.yml b/test/integration/targets/handlers/roles/import_template_handler_names/tasks/main.yml
new file mode 100644
index 00000000..3bc285e5
--- /dev/null
+++ b/test/integration/targets/handlers/roles/import_template_handler_names/tasks/main.yml
@@ -0,0 +1,11 @@
+- import_role:
+ name: template_handler_names
+ tasks_from: lazy_evaluation
+ tags:
+ - lazy_evaluation
+
+- import_role:
+ name: template_handler_names
+ tasks_from: evaluation_time
+ tags:
+ - evaluation_time
diff --git a/test/integration/targets/handlers/roles/template_handler_names/handlers/main.yml b/test/integration/targets/handlers/roles/template_handler_names/handlers/main.yml
new file mode 100644
index 00000000..bf8ca851
--- /dev/null
+++ b/test/integration/targets/handlers/roles/template_handler_names/handlers/main.yml
@@ -0,0 +1,5 @@
+- name: handler name with {{ test_var }}
+ debug: msg='handler with var ran'
+
+- name: handler name
+ debug: msg='handler ran'
diff --git a/test/integration/targets/handlers/roles/template_handler_names/tasks/evaluation_time.yml b/test/integration/targets/handlers/roles/template_handler_names/tasks/evaluation_time.yml
new file mode 100644
index 00000000..c0706fc5
--- /dev/null
+++ b/test/integration/targets/handlers/roles/template_handler_names/tasks/evaluation_time.yml
@@ -0,0 +1,5 @@
+- debug: msg='notify handler with variable in name'
+ notify: handler name with myvar
+ changed_when: True
+ tags:
+ - evaluation_time
diff --git a/test/integration/targets/handlers/roles/template_handler_names/tasks/lazy_evaluation.yml b/test/integration/targets/handlers/roles/template_handler_names/tasks/lazy_evaluation.yml
new file mode 100644
index 00000000..e82dca06
--- /dev/null
+++ b/test/integration/targets/handlers/roles/template_handler_names/tasks/lazy_evaluation.yml
@@ -0,0 +1,5 @@
+- debug: msg='notify handler'
+ notify: handler name
+ changed_when: True
+ tags:
+ - lazy_evaluation
diff --git a/test/integration/targets/handlers/roles/test_handlers_include/handlers/main.yml b/test/integration/targets/handlers/roles/test_handlers_include/handlers/main.yml
index abe01be4..6c3b73c6 100644
--- a/test/integration/targets/handlers/roles/test_handlers_include/handlers/main.yml
+++ b/test/integration/targets/handlers/roles/test_handlers_include/handlers/main.yml
@@ -1 +1 @@
-- include: handlers.yml
+- import_tasks: handlers.yml
diff --git a/test/integration/targets/handlers/runme.sh b/test/integration/targets/handlers/runme.sh
index cefa926b..7c403b4e 100755
--- a/test/integration/targets/handlers/runme.sh
+++ b/test/integration/targets/handlers/runme.sh
@@ -15,7 +15,7 @@ ansible-playbook from_handlers.yml -i inventory.handlers -v "$@" --tags scenario
ansible-playbook test_listening_handlers.yml -i inventory.handlers -v "$@"
[ "$(ansible-playbook test_handlers.yml -i inventory.handlers -v "$@" --tags scenario2 -l A \
-| grep -E -o 'RUNNING HANDLER \[test_handlers : .*?]')" = "RUNNING HANDLER [test_handlers : test handler]" ]
+| grep -E -o 'RUNNING HANDLER \[test_handlers : .*]')" = "RUNNING HANDLER [test_handlers : test handler]" ]
# Test forcing handlers using the linear and free strategy
for strategy in linear free; do
@@ -55,13 +55,13 @@ for strategy in linear free; do
done
[ "$(ansible-playbook test_handlers_include.yml -i ../../inventory -v "$@" --tags playbook_include_handlers \
-| grep -E -o 'RUNNING HANDLER \[.*?]')" = "RUNNING HANDLER [test handler]" ]
+| grep -E -o 'RUNNING HANDLER \[.*]')" = "RUNNING HANDLER [test handler]" ]
[ "$(ansible-playbook test_handlers_include.yml -i ../../inventory -v "$@" --tags role_include_handlers \
-| grep -E -o 'RUNNING HANDLER \[test_handlers_include : .*?]')" = "RUNNING HANDLER [test_handlers_include : test handler]" ]
+| grep -E -o 'RUNNING HANDLER \[test_handlers_include : .*]')" = "RUNNING HANDLER [test_handlers_include : test handler]" ]
[ "$(ansible-playbook test_handlers_include_role.yml -i ../../inventory -v "$@" \
-| grep -E -o 'RUNNING HANDLER \[test_handlers_include_role : .*?]')" = "RUNNING HANDLER [test_handlers_include_role : test handler]" ]
+| grep -E -o 'RUNNING HANDLER \[test_handlers_include_role : .*]')" = "RUNNING HANDLER [test_handlers_include_role : test handler]" ]
# Notify handler listen
ansible-playbook test_handlers_listen.yml -i inventory.handlers -v "$@"
@@ -96,3 +96,21 @@ result="$(ansible-playbook test_handlers_template_run_once.yml -i inventory.hand
set -e
grep -q "handler A" <<< "$result"
grep -q "handler B" <<< "$result"
+
+# Test an undefined variable in another handler name isn't a failure
+ansible-playbook 58841.yml "$@" --tags lazy_evaluation 2>&1 | tee out.txt ; cat out.txt
+grep out.txt -e "\[WARNING\]: Handler 'handler name with {{ test_var }}' is unusable"
+[ "$(grep out.txt -ce 'handler ran')" = "1" ]
+[ "$(grep out.txt -ce 'handler with var ran')" = "0" ]
+
+# Test templating a handler name with a defined variable
+ansible-playbook 58841.yml "$@" --tags evaluation_time -e test_var=myvar | tee out.txt ; cat out.txt
+[ "$(grep out.txt -ce 'handler ran')" = "0" ]
+[ "$(grep out.txt -ce 'handler with var ran')" = "1" ]
+
+# Test the handler is not found when the variable is undefined
+ansible-playbook 58841.yml "$@" --tags evaluation_time 2>&1 | tee out.txt ; cat out.txt
+grep out.txt -e "ERROR! The requested handler 'handler name with myvar' was not found"
+grep out.txt -e "\[WARNING\]: Handler 'handler name with {{ test_var }}' is unusable"
+[ "$(grep out.txt -ce 'handler ran')" = "0" ]
+[ "$(grep out.txt -ce 'handler with var ran')" = "0" ]
diff --git a/test/integration/targets/handlers/test_handlers_include.yml b/test/integration/targets/handlers/test_handlers_include.yml
index 5514fc10..158266d2 100644
--- a/test/integration/targets/handlers/test_handlers_include.yml
+++ b/test/integration/targets/handlers/test_handlers_include.yml
@@ -6,7 +6,7 @@
notify: test handler
tags: ['playbook_include_handlers']
handlers:
- - include: handlers.yml
+ - import_tasks: handlers.yml
- name: verify that role can include handler
hosts: testhost
diff --git a/test/integration/targets/hardware_facts/aliases b/test/integration/targets/hardware_facts/aliases
index e00c22c3..3933d2e5 100644
--- a/test/integration/targets/hardware_facts/aliases
+++ b/test/integration/targets/hardware_facts/aliases
@@ -1,3 +1,4 @@
destructive
needs/privileged
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/hash/aliases b/test/integration/targets/hash/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/hash/aliases
+++ b/test/integration/targets/hash/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/hosts_field/aliases b/test/integration/targets/hosts_field/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/hosts_field/aliases
+++ b/test/integration/targets/hosts_field/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/ignore_errors/aliases b/test/integration/targets/ignore_errors/aliases
index 3005e4b2..498fedd5 100644
--- a/test/integration/targets/ignore_errors/aliases
+++ b/test/integration/targets/ignore_errors/aliases
@@ -1 +1,2 @@
shippable/posix/group4
+context/controller
diff --git a/test/integration/targets/ignore_unreachable/aliases b/test/integration/targets/ignore_unreachable/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/ignore_unreachable/aliases
+++ b/test/integration/targets/ignore_unreachable/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/import_tasks/aliases b/test/integration/targets/import_tasks/aliases
index fff62d9f..a1b27a83 100644
--- a/test/integration/targets/import_tasks/aliases
+++ b/test/integration/targets/import_tasks/aliases
@@ -1,2 +1,2 @@
shippable/posix/group5
-skip/aix
+context/controller # this is a controller-only action, the module is just for documentation
diff --git a/test/integration/targets/incidental_cloud_init_data_facts/aliases b/test/integration/targets/incidental_cloud_init_data_facts/aliases
index 85f7fe0f..544fcacd 100644
--- a/test/integration/targets/incidental_cloud_init_data_facts/aliases
+++ b/test/integration/targets/incidental_cloud_init_data_facts/aliases
@@ -4,3 +4,4 @@ skip/aix
skip/osx
skip/macos
skip/freebsd
+context/target
diff --git a/test/integration/targets/incidental_deploy_helper/aliases b/test/integration/targets/incidental_deploy_helper/aliases
index 31c6a8b4..3b88c806 100644
--- a/test/integration/targets/incidental_deploy_helper/aliases
+++ b/test/integration/targets/incidental_deploy_helper/aliases
@@ -1 +1,2 @@
shippable/posix/incidental
+context/target
diff --git a/test/integration/targets/incidental_inventory_aws_ec2/aliases b/test/integration/targets/incidental_inventory_aws_ec2/aliases
index 29f60feb..41a05d3c 100644
--- a/test/integration/targets/incidental_inventory_aws_ec2/aliases
+++ b/test/integration/targets/incidental_inventory_aws_ec2/aliases
@@ -1,2 +1,3 @@
cloud/aws
shippable/aws/incidental
+context/controller
diff --git a/test/integration/targets/incidental_inventory_aws_ec2/runme.sh b/test/integration/targets/incidental_inventory_aws_ec2/runme.sh
index 916f7e8f..339be5dd 100755
--- a/test/integration/targets/incidental_inventory_aws_ec2/runme.sh
+++ b/test/integration/targets/incidental_inventory_aws_ec2/runme.sh
@@ -2,6 +2,10 @@
set -eux
+source virtualenv.sh
+
+python -m pip install boto3 boto
+
# ensure test config is empty
ansible-playbook playbooks/empty_inventory_config.yml "$@"
diff --git a/test/integration/targets/incidental_inventory_docker_swarm/aliases b/test/integration/targets/incidental_inventory_docker_swarm/aliases
index c3a38c06..74d3befe 100644
--- a/test/integration/targets/incidental_inventory_docker_swarm/aliases
+++ b/test/integration/targets/incidental_inventory_docker_swarm/aliases
@@ -1,6 +1,5 @@
shippable/posix/incidental
-skip/aix
-skip/power/centos
+context/controller
skip/osx
skip/macos
skip/freebsd
diff --git a/test/integration/targets/incidental_inventory_foreman/aliases b/test/integration/targets/incidental_inventory_foreman/aliases
index c28a056e..7eaacbbc 100644
--- a/test/integration/targets/incidental_inventory_foreman/aliases
+++ b/test/integration/targets/incidental_inventory_foreman/aliases
@@ -1,3 +1,4 @@
shippable/cloud/incidental
cloud/foreman
destructive
+context/controller
diff --git a/test/integration/targets/incidental_inventory_foreman/inspect_cache.yml b/test/integration/targets/incidental_inventory_foreman/inspect_cache.yml
index c91f4c38..b9e32f7d 100644
--- a/test/integration/targets/incidental_inventory_foreman/inspect_cache.yml
+++ b/test/integration/targets/incidental_inventory_foreman/inspect_cache.yml
@@ -6,6 +6,10 @@
foreman_stub_api_path: /api/v2
cached_hosts_key: "http://{{ foreman_stub_host }}:{{ foreman_stub_port }}{{ foreman_stub_api_path }}/hosts"
tasks:
+ - name: make sure jmespath is installed
+ pip:
+ name: jmespath
+
- name: verify a cache file was created
find:
path:
diff --git a/test/integration/targets/incidental_inventory_foreman/runme.sh b/test/integration/targets/incidental_inventory_foreman/runme.sh
index ba94a936..d81fa02f 100755
--- a/test/integration/targets/incidental_inventory_foreman/runme.sh
+++ b/test/integration/targets/incidental_inventory_foreman/runme.sh
@@ -43,8 +43,8 @@ password: secure
validate_certs: False
FOREMAN_YAML
-ansible-playbook test_foreman_inventory.yml --connection=local "$@"
-ansible-playbook inspect_cache.yml --connection=local "$@"
+ansible-playbook test_foreman_inventory.yml --connection=local -e 'ansible_python_interpreter={{ ansible_playbook_python }}' "$@"
+ansible-playbook inspect_cache.yml --connection=local -e 'ansible_python_interpreter={{ ansible_playbook_python }}' "$@"
# remove inventory cache
rm -r ./foreman_cache
diff --git a/test/integration/targets/incidental_ios_file/tasks/cli.yaml b/test/integration/targets/incidental_ios_file/tasks/cli.yaml
index d4f663b3..3eb57691 100644
--- a/test/integration/targets/incidental_ios_file/tasks/cli.yaml
+++ b/test/integration/targets/incidental_ios_file/tasks/cli.yaml
@@ -10,7 +10,7 @@
set_fact: test_items="{{ test_cases.files | map(attribute='path') | list }}"
- name: run test cases (connection=ansible.netcommon.network_cli)
- include: "{{ test_case_to_run }}"
+ include_tasks: "{{ test_case_to_run }}"
with_items: "{{ test_items }}"
loop_control:
loop_var: test_case_to_run
diff --git a/test/integration/targets/incidental_ios_file/tasks/main.yaml b/test/integration/targets/incidental_ios_file/tasks/main.yaml
index 415c99d8..24ad94ae 100644
--- a/test/integration/targets/incidental_ios_file/tasks/main.yaml
+++ b/test/integration/targets/incidental_ios_file/tasks/main.yaml
@@ -1,2 +1,2 @@
---
-- { include: cli.yaml, tags: ['cli'] }
+- { import_tasks: cli.yaml, tags: ['cli'] }
diff --git a/test/integration/targets/incidental_mongodb_parameter/aliases b/test/integration/targets/incidental_mongodb_parameter/aliases
index dc285483..72ed62eb 100644
--- a/test/integration/targets/incidental_mongodb_parameter/aliases
+++ b/test/integration/targets/incidental_mongodb_parameter/aliases
@@ -6,3 +6,4 @@ skip/macos
skip/freebsd
skip/rhel
needs/root
+context/target
diff --git a/test/integration/targets/incidental_setup_docker/vars/RedHat-8.yml b/test/integration/targets/incidental_setup_docker/vars/RedHat-8.yml
index ff6dcf7b..82343898 100644
--- a/test/integration/targets/incidental_setup_docker/vars/RedHat-8.yml
+++ b/test/integration/targets/incidental_setup_docker/vars/RedHat-8.yml
@@ -3,6 +3,7 @@ docker_prereq_packages:
- device-mapper-persistent-data
- lvm2
- libseccomp
+ - iptables
docker_packages:
- docker-ce-19.03.13
diff --git a/test/integration/targets/incidental_vyos_config/tasks/cli.yaml b/test/integration/targets/incidental_vyos_config/tasks/cli.yaml
index 22a71d96..d601bb70 100644
--- a/test/integration/targets/incidental_vyos_config/tasks/cli.yaml
+++ b/test/integration/targets/incidental_vyos_config/tasks/cli.yaml
@@ -10,13 +10,17 @@
set_fact: test_items="{{ test_cases.files | map(attribute='path') | list }}"
- name: run test case (connection=ansible.netcommon.network_cli)
- include: "{{ test_case_to_run }} ansible_connection=ansible.netcommon.network_cli"
+ include_tasks: "file={{ test_case_to_run }}"
+ vars:
+ ansible_connection: ansible.netcommon.network_cli
with_items: "{{ test_items }}"
loop_control:
loop_var: test_case_to_run
- name: run test case (connection=local)
- include: "{{ test_case_to_run }} ansible_connection=local"
+ include_tasks: "file={{ test_case_to_run }}"
+ vars:
+ ansible_connection: local
with_first_found: "{{ test_items }}"
loop_control:
loop_var: test_case_to_run
diff --git a/test/integration/targets/incidental_vyos_config/tasks/cli_config.yaml b/test/integration/targets/incidental_vyos_config/tasks/cli_config.yaml
index 8ed28748..7e673560 100644
--- a/test/integration/targets/incidental_vyos_config/tasks/cli_config.yaml
+++ b/test/integration/targets/incidental_vyos_config/tasks/cli_config.yaml
@@ -10,7 +10,9 @@
set_fact: test_items="{{ test_cases.files | map(attribute='path') | list }}"
- name: run test case (connection=ansible.netcommon.network_cli)
- include: "{{ test_case_to_run }} ansible_connection=ansible.netcommon.network_cli"
+ include_tasks: "file={{ test_case_to_run }}"
+ vars:
+ ansible_connection: ansible.netcommon.network_cli
with_items: "{{ test_items }}"
loop_control:
loop_var: test_case_to_run
diff --git a/test/integration/targets/incidental_vyos_config/tasks/main.yaml b/test/integration/targets/incidental_vyos_config/tasks/main.yaml
index 13977a44..0d4e8fdd 100644
--- a/test/integration/targets/incidental_vyos_config/tasks/main.yaml
+++ b/test/integration/targets/incidental_vyos_config/tasks/main.yaml
@@ -1,3 +1,3 @@
---
-- {include: cli.yaml, tags: ['cli']}
-- {include: cli_config.yaml, tags: ['cli_config']}
+- {import_tasks: cli.yaml, tags: ['cli']}
+- {import_tasks: cli_config.yaml, tags: ['cli_config']}
diff --git a/test/integration/targets/incidental_vyos_lldp_interfaces/tasks/cli.yaml b/test/integration/targets/incidental_vyos_lldp_interfaces/tasks/cli.yaml
index 83496e0e..c6923f3e 100644
--- a/test/integration/targets/incidental_vyos_lldp_interfaces/tasks/cli.yaml
+++ b/test/integration/targets/incidental_vyos_lldp_interfaces/tasks/cli.yaml
@@ -11,7 +11,7 @@
set_fact: test_items="{{ test_cases.files | map(attribute='path') | list }}"
- name: Run test case (connection=ansible.netcommon.network_cli)
- include: "{{ test_case_to_run }}"
+ include_tasks: "{{ test_case_to_run }}"
vars:
ansible_connection: ansible.netcommon.network_cli
with_items: "{{ test_items }}"
diff --git a/test/integration/targets/incidental_vyos_lldp_interfaces/tasks/main.yaml b/test/integration/targets/incidental_vyos_lldp_interfaces/tasks/main.yaml
index d4cf26fc..a6d418bb 100644
--- a/test/integration/targets/incidental_vyos_lldp_interfaces/tasks/main.yaml
+++ b/test/integration/targets/incidental_vyos_lldp_interfaces/tasks/main.yaml
@@ -1,2 +1,2 @@
---
-- {include: cli.yaml, tags: ['cli']}
+- {import_tasks: cli.yaml, tags: ['cli']}
diff --git a/test/integration/targets/incidental_win_data_deduplication/tasks/main.yml b/test/integration/targets/incidental_win_data_deduplication/tasks/main.yml
index ae6be90e..83c7197c 100644
--- a/test/integration/targets/incidental_win_data_deduplication/tasks/main.yml
+++ b/test/integration/targets/incidental_win_data_deduplication/tasks/main.yml
@@ -1,2 +1,2 @@
---
-- include: pre_test.yml
+- import_tasks: pre_test.yml
diff --git a/test/integration/targets/incidental_win_data_deduplication/tasks/pre_test.yml b/test/integration/targets/incidental_win_data_deduplication/tasks/pre_test.yml
index f72955e4..0d1c3d50 100644
--- a/test/integration/targets/incidental_win_data_deduplication/tasks/pre_test.yml
+++ b/test/integration/targets/incidental_win_data_deduplication/tasks/pre_test.yml
@@ -34,7 +34,7 @@
- name: Run tests
block:
- - include: tests.yml
+ - import_tasks: tests.yml
always:
- name: Detach disk
win_command: diskpart.exe /s {{ remote_tmp_dir }}\partition_deletion_script.txt
diff --git a/test/integration/targets/incidental_win_security_policy/aliases b/test/integration/targets/incidental_win_security_policy/aliases
deleted file mode 100644
index a5fc90dc..00000000
--- a/test/integration/targets/incidental_win_security_policy/aliases
+++ /dev/null
@@ -1,2 +0,0 @@
-shippable/windows/incidental
-windows
diff --git a/test/integration/targets/incidental_win_security_policy/library/test_win_security_policy.ps1 b/test/integration/targets/incidental_win_security_policy/library/test_win_security_policy.ps1
deleted file mode 100644
index 5c83c1b5..00000000
--- a/test/integration/targets/incidental_win_security_policy/library/test_win_security_policy.ps1
+++ /dev/null
@@ -1,53 +0,0 @@
-#!powershell
-
-# WANT_JSON
-# POWERSHELL_COMMON
-
-# basic script to get the lsit of users in a particular right
-# this is quite complex to put as a simple script so this is
-# just a simple module
-
-$ErrorActionPreference = 'Stop'
-
-$params = Parse-Args $args -supports_check_mode $false
-$section = Get-AnsibleParam -obj $params -name "section" -type "str" -failifempty $true
-$key = Get-AnsibleParam -obj $params -name "key" -type "str" -failifempty $true
-
-$result = @{
- changed = $false
-}
-
-Function ConvertFrom-Ini($file_path) {
- $ini = @{}
- switch -Regex -File $file_path {
- "^\[(.+)\]" {
- $section = $matches[1]
- $ini.$section = @{}
- }
- "(.+?)\s*=(.*)" {
- $name = $matches[1].Trim()
- $value = $matches[2].Trim()
- if ($value -match "^\d+$") {
- $value = [int]$value
- } elseif ($value.StartsWith('"') -and $value.EndsWith('"')) {
- $value = $value.Substring(1, $value.Length - 2)
- }
-
- $ini.$section.$name = $value
- }
- }
-
- $ini
-}
-
-$secedit_ini_path = [IO.Path]::GetTempFileName()
-&SecEdit.exe /export /cfg $secedit_ini_path /quiet
-$secedit_ini = ConvertFrom-Ini -file_path $secedit_ini_path
-
-if ($secedit_ini.ContainsKey($section)) {
- $result.value = $secedit_ini.$section.$key
-} else {
- $result.value = $null
-}
-
-Exit-Json $result
diff --git a/test/integration/targets/incidental_win_security_policy/tasks/main.yml b/test/integration/targets/incidental_win_security_policy/tasks/main.yml
deleted file mode 100644
index 28fdb5ea..00000000
--- a/test/integration/targets/incidental_win_security_policy/tasks/main.yml
+++ /dev/null
@@ -1,41 +0,0 @@
----
-- name: get current entry for audit
- test_win_security_policy:
- section: Event Audit
- key: AuditSystemEvents
- register: before_value_audit
-
-- name: get current entry for guest
- test_win_security_policy:
- section: System Access
- key: NewGuestName
- register: before_value_guest
-
-- block:
- - name: set AuditSystemEvents entry before tests
- win_security_policy:
- section: Event Audit
- key: AuditSystemEvents
- value: 0
-
- - name: set NewGuestName entry before tests
- win_security_policy:
- section: System Access
- key: NewGuestName
- value: Guest
-
- - name: run tests
- include_tasks: tests.yml
-
- always:
- - name: reset entries for AuditSystemEvents
- win_security_policy:
- section: Event Audit
- key: AuditSystemEvents
- value: "{{before_value_audit.value}}"
-
- - name: reset entries for NewGuestName
- win_security_policy:
- section: System Access
- key: NewGuestName
- value: "{{before_value_guest.value}}"
diff --git a/test/integration/targets/incidental_win_security_policy/tasks/tests.yml b/test/integration/targets/incidental_win_security_policy/tasks/tests.yml
deleted file mode 100644
index 724b6010..00000000
--- a/test/integration/targets/incidental_win_security_policy/tasks/tests.yml
+++ /dev/null
@@ -1,186 +0,0 @@
----
-- name: fail with invalid section name
- win_security_policy:
- section: This is not a valid section
- key: KeyName
- value: 0
- register: fail_invalid_section
- failed_when: fail_invalid_section.msg != "The section 'This is not a valid section' does not exist in SecEdit.exe output ini"
-
-- name: fail with invalid key name
- win_security_policy:
- section: System Access
- key: InvalidKey
- value: 0
- register: fail_invalid_key
- failed_when: fail_invalid_key.msg != "The key 'InvalidKey' in section 'System Access' is not a valid key, cannot set this value"
-
-- name: change existing key check
- win_security_policy:
- section: Event Audit
- key: AuditSystemEvents
- value: 1
- register: change_existing_check
- check_mode: yes
-
-- name: get actual change existing key check
- test_win_security_policy:
- section: Event Audit
- key: AuditSystemEvents
- register: change_existing_actual_check
-
-- name: assert change existing key check
- assert:
- that:
- - change_existing_check is changed
- - change_existing_actual_check.value == 0
-
-- name: change existing key
- win_security_policy:
- section: Event Audit
- key: AuditSystemEvents
- value: 1
- register: change_existing
-
-- name: get actual change existing key
- test_win_security_policy:
- section: Event Audit
- key: AuditSystemEvents
- register: change_existing_actual
-
-- name: assert change existing key
- assert:
- that:
- - change_existing is changed
- - change_existing_actual.value == 1
-
-- name: change existing key again
- win_security_policy:
- section: Event Audit
- key: AuditSystemEvents
- value: 1
- register: change_existing_again
-
-- name: assert change existing key again
- assert:
- that:
- - change_existing_again is not changed
- - change_existing_again.value == 1
-
-- name: change existing key with string type
- win_security_policy:
- section: Event Audit
- key: AuditSystemEvents
- value: "1"
- register: change_existing_key_with_type
-
-- name: assert change existing key with string type
- assert:
- that:
- - change_existing_key_with_type is not changed
- - change_existing_key_with_type.value == "1"
-
-- name: change existing string key check
- win_security_policy:
- section: System Access
- key: NewGuestName
- value: New Guest
- register: change_existing_string_check
- check_mode: yes
-
-- name: get actual change existing string key check
- test_win_security_policy:
- section: System Access
- key: NewGuestName
- register: change_existing_string_actual_check
-
-- name: assert change existing string key check
- assert:
- that:
- - change_existing_string_check is changed
- - change_existing_string_actual_check.value == "Guest"
-
-- name: change existing string key
- win_security_policy:
- section: System Access
- key: NewGuestName
- value: New Guest
- register: change_existing_string
-
-- name: get actual change existing string key
- test_win_security_policy:
- section: System Access
- key: NewGuestName
- register: change_existing_string_actual
-
-- name: assert change existing string key
- assert:
- that:
- - change_existing_string is changed
- - change_existing_string_actual.value == "New Guest"
-
-- name: change existing string key again
- win_security_policy:
- section: System Access
- key: NewGuestName
- value: New Guest
- register: change_existing_string_again
-
-- name: assert change existing string key again
- assert:
- that:
- - change_existing_string_again is not changed
- - change_existing_string_again.value == "New Guest"
-
-- name: add policy setting
- win_security_policy:
- section: Privilege Rights
- # following key is empty by default
- key: SeCreateTokenPrivilege
- # add Guests
- value: '*S-1-5-32-546'
-
-- name: get actual policy setting
- test_win_security_policy:
- section: Privilege Rights
- key: SeCreateTokenPrivilege
- register: add_policy_setting_actual
-
-- name: assert add policy setting
- assert:
- that:
- - add_policy_setting_actual.value == '*S-1-5-32-546'
-
-- name: remove policy setting
- win_security_policy:
- section: Privilege Rights
- key: SeCreateTokenPrivilege
- value: ''
- diff: yes
- register: remove_policy_setting
-
-- name: get actual policy setting
- test_win_security_policy:
- section: Privilege Rights
- key: SeCreateTokenPrivilege
- register: remove_policy_setting_actual
-
-- name: assert remove policy setting
- assert:
- that:
- - remove_policy_setting is changed
- - remove_policy_setting.diff.prepared == "[Privilege Rights]\n-SeCreateTokenPrivilege = *S-1-5-32-546\n+SeCreateTokenPrivilege = "
- - remove_policy_setting_actual.value is none
-
-- name: remove policy setting again
- win_security_policy:
- section: Privilege Rights
- key: SeCreateTokenPrivilege
- value: ''
- register: remove_policy_setting_again
-
-- name: assert remove policy setting again
- assert:
- that:
- - remove_policy_setting_again is not changed
- - remove_policy_setting_again.value == ''
diff --git a/test/integration/targets/include_import/aliases b/test/integration/targets/include_import/aliases
index fff62d9f..1d28bdb2 100644
--- a/test/integration/targets/include_import/aliases
+++ b/test/integration/targets/include_import/aliases
@@ -1,2 +1,2 @@
shippable/posix/group5
-skip/aix
+context/controller
diff --git a/test/integration/targets/include_import/include_role_omit/playbook.yml b/test/integration/targets/include_import/include_role_omit/playbook.yml
new file mode 100644
index 00000000..a036906a
--- /dev/null
+++ b/test/integration/targets/include_import/include_role_omit/playbook.yml
@@ -0,0 +1,12 @@
+- hosts: localhost
+ gather_facts: false
+ vars:
+ include_role_omit: false
+ tasks:
+ - include_role:
+ name: foo
+ tasks_from: '{{ omit }}'
+
+ - assert:
+ that:
+ - include_role_omit is sameas(true)
diff --git a/test/integration/targets/include_import/include_role_omit/roles/foo/tasks/main.yml b/test/integration/targets/include_import/include_role_omit/roles/foo/tasks/main.yml
new file mode 100644
index 00000000..e27ca5b0
--- /dev/null
+++ b/test/integration/targets/include_import/include_role_omit/roles/foo/tasks/main.yml
@@ -0,0 +1,2 @@
+- set_fact:
+ include_role_omit: true
diff --git a/test/integration/targets/include_import/playbook/test_templated_filenames.yml b/test/integration/targets/include_import/playbook/test_templated_filenames.yml
new file mode 100644
index 00000000..2f78ab09
--- /dev/null
+++ b/test/integration/targets/include_import/playbook/test_templated_filenames.yml
@@ -0,0 +1,47 @@
+- name: test templating import_playbook with extra vars
+ import_playbook: "{{ pb }}"
+
+- name: test templating import_playbook with vars
+ import_playbook: "{{ test_var }}"
+ vars:
+ test_var: validate_templated_playbook.yml
+
+- name: test templating import_tasks
+ hosts: localhost
+ gather_facts: no
+ vars:
+ play_var: validate_templated_tasks.yml
+ tasks:
+ - name: test templating import_tasks with play vars
+ import_tasks: "{{ play_var }}"
+
+ - name: test templating import_tasks with task vars
+ import_tasks: "{{ task_var }}"
+ vars:
+ task_var: validate_templated_tasks.yml
+
+ - name: test templating import_tasks with extra vars
+ import_tasks: "{{ tasks }}"
+
+- name: test templating import_role from_files
+ hosts: localhost
+ gather_facts: no
+ vars:
+ play_var: templated.yml
+ tasks:
+ - name: test templating import_role tasks_from with play vars
+ import_role:
+ name: role1
+ tasks_from: "{{ play_var }}"
+
+ - name: test templating import_role tasks_from with task vars
+ import_role:
+ name: role1
+ tasks_from: "{{ task_var }}"
+ vars:
+ task_var: templated.yml
+
+ - name: test templating import_role tasks_from with extra vars
+ import_role:
+ name: role1
+ tasks_from: "{{ tasks_from }}"
diff --git a/test/integration/targets/include_import/playbook/validate_templated_playbook.yml b/test/integration/targets/include_import/playbook/validate_templated_playbook.yml
new file mode 100644
index 00000000..631ee9b4
--- /dev/null
+++ b/test/integration/targets/include_import/playbook/validate_templated_playbook.yml
@@ -0,0 +1,5 @@
+---
+- hosts: localhost
+ gather_facts: no
+ tasks:
+ - debug: msg="In imported playbook"
diff --git a/test/integration/targets/include_import/playbook/validate_templated_tasks.yml b/test/integration/targets/include_import/playbook/validate_templated_tasks.yml
new file mode 100644
index 00000000..16d682d1
--- /dev/null
+++ b/test/integration/targets/include_import/playbook/validate_templated_tasks.yml
@@ -0,0 +1 @@
+- debug: msg="In imported tasks"
diff --git a/test/integration/targets/include_import/roles/role1/tasks/templated.yml b/test/integration/targets/include_import/roles/role1/tasks/templated.yml
new file mode 100644
index 00000000..eb9a9976
--- /dev/null
+++ b/test/integration/targets/include_import/roles/role1/tasks/templated.yml
@@ -0,0 +1 @@
+- debug: msg="In imported role"
diff --git a/test/integration/targets/include_import/runme.sh b/test/integration/targets/include_import/runme.sh
index f2633032..7029ab6d 100755
--- a/test/integration/targets/include_import/runme.sh
+++ b/test/integration/targets/include_import/runme.sh
@@ -126,3 +126,12 @@ ANSIBLE_HOST_PATTERN_MISMATCH=error ansible-playbook empty_group_warning/playboo
ansible-playbook test_include_loop.yml "$@"
ansible-playbook test_include_loop_fqcn.yml "$@"
+
+ansible-playbook include_role_omit/playbook.yml "$@"
+
+# Test templating import_playbook, import_tasks, and import_role files
+ansible-playbook playbook/test_templated_filenames.yml -e "pb=validate_templated_playbook.yml tasks=validate_templated_tasks.yml tasks_from=templated.yml" "$@" | tee out.txt
+cat out.txt
+test "$(grep out.txt -ce 'In imported playbook')" = 2
+test "$(grep out.txt -ce 'In imported tasks')" = 3
+test "$(grep out.txt -ce 'In imported role')" = 3
diff --git a/test/integration/targets/include_import/undefined_var/playbook.yml b/test/integration/targets/include_import/undefined_var/playbook.yml
index 0584fa8a..6576d50a 100644
--- a/test/integration/targets/include_import/undefined_var/playbook.yml
+++ b/test/integration/targets/include_import/undefined_var/playbook.yml
@@ -26,8 +26,7 @@
- "_include_role_result is failed"
msg: "'include_role' did not evaluate it's attached condition and failed"
- - include: include_that_defines_var.yml
- static: yes
+ - import_tasks: include_that_defines_var.yml
when:
- "_undefined == 'yes'"
diff --git a/test/integration/targets/include_vars-ad-hoc/aliases b/test/integration/targets/include_vars-ad-hoc/aliases
index 765b70da..90ea9e12 100644
--- a/test/integration/targets/include_vars-ad-hoc/aliases
+++ b/test/integration/targets/include_vars-ad-hoc/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/include_vars/tasks/main.yml b/test/integration/targets/include_vars/tasks/main.yml
index 799d7b26..db15ba3c 100644
--- a/test/integration/targets/include_vars/tasks/main.yml
+++ b/test/integration/targets/include_vars/tasks/main.yml
@@ -57,6 +57,8 @@
include_vars:
dir: vars
extensions: ['', 'yaml', 'yml', 'json']
+ ignore_files:
+ - no_auto_unsafe.yml
register: include_every_dir
- name: verify that the correct files have been loaded and overwrite based on alphabetical order
@@ -78,6 +80,7 @@
ignore_files:
- webapp.yml
- file_without_extension
+ - no_auto_unsafe.yml
register: include_without_webapp
- name: verify that the webapp.yml file was not included
@@ -162,3 +165,53 @@
that:
- "'my_custom_service' == service_name_fqcn"
- "'my_custom_service' == service_name_tmpl_fqcn"
+
+- name: Include a vars file with a hash variable
+ include_vars:
+ file: vars2/hashes/hash1.yml
+
+- name: Verify the hash variable
+ assert:
+ that:
+ - "{{ config | length }} == 3"
+ - "config.key0 == 0"
+ - "config.key1 == 0"
+ - "{{ config.key2 | length }} == 1"
+ - "config.key2.a == 21"
+
+- name: Include the second file to merge the hash variable
+ include_vars:
+ file: vars2/hashes/hash2.yml
+ hash_behaviour: merge
+
+- name: Verify that the hash is merged
+ assert:
+ that:
+ - "{{ config | length }} == 4"
+ - "config.key0 == 0"
+ - "config.key1 == 1"
+ - "{{ config.key2 | length }} == 2"
+ - "config.key2.a == 21"
+ - "config.key2.b == 22"
+ - "config.key3 == 3"
+
+- name: Include the second file again without hash_behaviour option
+ include_vars:
+ file: vars2/hashes/hash2.yml
+
+- name: Verify that the properties from the first file is cleared
+ assert:
+ that:
+ - "{{ config | length }} == 3"
+ - "config.key1 == 1"
+ - "{{ config.key2 | length }} == 1"
+ - "config.key2.b == 22"
+ - "config.key3 == 3"
+
+- include_vars:
+ file: no_auto_unsafe.yml
+ register: baz
+
+- assert:
+ that:
+ - baz.ansible_facts.foo|type_debug != "AnsibleUnsafeText"
diff --git a/test/integration/targets/include_vars/vars/no_auto_unsafe.yml b/test/integration/targets/include_vars/vars/no_auto_unsafe.yml
new file mode 100644
index 00000000..20e9ff3f
--- /dev/null
+++ b/test/integration/targets/include_vars/vars/no_auto_unsafe.yml
@@ -0,0 +1 @@
+foo: bar
diff --git a/test/integration/targets/include_vars/vars2/hashes/hash1.yml b/test/integration/targets/include_vars/vars2/hashes/hash1.yml
new file mode 100644
index 00000000..b0706f8f
--- /dev/null
+++ b/test/integration/targets/include_vars/vars2/hashes/hash1.yml
@@ -0,0 +1,5 @@
+---
+config:
+ key0: 0
+ key1: 0
+ key2: { a: 21 }
diff --git a/test/integration/targets/include_vars/vars2/hashes/hash2.yml b/test/integration/targets/include_vars/vars2/hashes/hash2.yml
new file mode 100644
index 00000000..1f2a9636
--- /dev/null
+++ b/test/integration/targets/include_vars/vars2/hashes/hash2.yml
@@ -0,0 +1,5 @@
+---
+config:
+ key1: 1
+ key2: { b: 22 }
+ key3: 3
diff --git a/test/integration/targets/include_when_parent_is_dynamic/aliases b/test/integration/targets/include_when_parent_is_dynamic/aliases
index 41c99f51..8278ec8b 100644
--- a/test/integration/targets/include_when_parent_is_dynamic/aliases
+++ b/test/integration/targets/include_when_parent_is_dynamic/aliases
@@ -1,2 +1,2 @@
shippable/posix/group3
-skip/python2.6 # include is controller only, and we no longer support Python 2.6 on the controller
+context/controller
diff --git a/test/integration/targets/include_when_parent_is_static/aliases b/test/integration/targets/include_when_parent_is_static/aliases
index 41c99f51..8278ec8b 100644
--- a/test/integration/targets/include_when_parent_is_static/aliases
+++ b/test/integration/targets/include_when_parent_is_static/aliases
@@ -1,2 +1,2 @@
shippable/posix/group3
-skip/python2.6 # include is controller only, and we no longer support Python 2.6 on the controller
+context/controller
diff --git a/test/integration/targets/includes/aliases b/test/integration/targets/includes/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/includes/aliases
+++ b/test/integration/targets/includes/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/includes/include_on_playbook_should_fail.yml b/test/integration/targets/includes/include_on_playbook_should_fail.yml
new file mode 100644
index 00000000..953459dc
--- /dev/null
+++ b/test/integration/targets/includes/include_on_playbook_should_fail.yml
@@ -0,0 +1 @@
+- include: test_includes3.yml
diff --git a/test/integration/targets/includes/roles/test_includes/tasks/branch_toplevel.yml b/test/integration/targets/includes/roles/test_includes/tasks/branch_toplevel.yml
index 62416705..30cd6f28 100644
--- a/test/integration/targets/includes/roles/test_includes/tasks/branch_toplevel.yml
+++ b/test/integration/targets/includes/roles/test_includes/tasks/branch_toplevel.yml
@@ -1,9 +1,11 @@
# 'canary2' used instead of 'canary', otherwise a "recursive loop detected in
# template string" occurs when both includes use static=yes
-- include: 'leaf_sublevel.yml canary2={{ canary }}'
- static: yes
+- import_tasks: leaf_sublevel.yml
+ vars:
+ canary2: '{{ canary }}'
when: 'nested_include_static|bool' # value for 'static' can not be a variable, hence use 'when'
-- include: 'leaf_sublevel.yml canary2={{ canary }}'
- static: no
+- include_tasks: leaf_sublevel.yml
+ vars:
+ canary2: '{{ canary }}'
when: 'not nested_include_static|bool'
diff --git a/test/integration/targets/includes/roles/test_includes/tasks/main.yml b/test/integration/targets/includes/roles/test_includes/tasks/main.yml
index 6fcac9eb..83ca468b 100644
--- a/test/integration/targets/includes/roles/test_includes/tasks/main.yml
+++ b/test/integration/targets/includes/roles/test_includes/tasks/main.yml
@@ -81,26 +81,34 @@
- included_handler
- verify_handler
-- include: branch_toplevel.yml canary=value1 nested_include_static=no
- static: no
+- include_tasks: branch_toplevel.yml
+ vars:
+ canary: value1
+ nested_include_static: 'no'
- assert:
that:
- 'canary_fact == "value1"'
-- include: branch_toplevel.yml canary=value2 nested_include_static=yes
- static: no
+- include_tasks: branch_toplevel.yml
+ vars:
+ canary: value2
+ nested_include_static: 'yes'
- assert:
that:
- 'canary_fact == "value2"'
-- include: branch_toplevel.yml canary=value3 nested_include_static=no
- static: yes
+- import_tasks: branch_toplevel.yml
+ vars:
+ canary: value3
+ nested_include_static: 'no'
- assert:
that:
- 'canary_fact == "value3"'
-- include: branch_toplevel.yml canary=value4 nested_include_static=yes
- static: yes
+- import_tasks: branch_toplevel.yml
+ vars:
+ canary: value4
+ nested_include_static: 'yes'
- assert:
that:
- 'canary_fact == "value4"'
diff --git a/test/integration/targets/includes/runme.sh b/test/integration/targets/includes/runme.sh
index 70ff105b..f4f0a016 100755
--- a/test/integration/targets/includes/runme.sh
+++ b/test/integration/targets/includes/runme.sh
@@ -5,3 +5,9 @@ set -eux
ansible-playbook test_includes.yml -i ../../inventory "$@"
ansible-playbook inherit_notify.yml "$@"
+
+echo "EXPECTED ERROR: Ensure we fail if using 'include' to include a playbook."
+set +e
+result="$(ansible-playbook -i ../../inventory include_on_playbook_should_fail.yml -v "$@" 2>&1)"
+set -e
+grep -q "ERROR! 'include' is not a valid attribute for a Play" <<< "$result"
diff --git a/test/integration/targets/includes/test_includes.yml b/test/integration/targets/includes/test_includes.yml
index 0bcebd4f..adeb80d2 100644
--- a/test/integration/targets/includes/test_includes.yml
+++ b/test/integration/targets/includes/test_includes.yml
@@ -1,7 +1,7 @@
-- include: test_includes2.yml parameter1=asdf parameter2=jkl
+- import_playbook: test_includes2.yml parameter1=asdf parameter2=jkl
-- include: test_includes3.yml
+- import_playbook: test_includes3.yml
-- include: test_include_free.yml
+- import_playbook: test_include_free.yml
-- include: test_include_host_pinned.yml
+- import_playbook: test_include_host_pinned.yml
diff --git a/test/integration/targets/includes_race/aliases b/test/integration/targets/includes_race/aliases
index fff62d9f..1d28bdb2 100644
--- a/test/integration/targets/includes_race/aliases
+++ b/test/integration/targets/includes_race/aliases
@@ -1,2 +1,2 @@
shippable/posix/group5
-skip/aix
+context/controller
diff --git a/test/integration/targets/infra/aliases b/test/integration/targets/infra/aliases
index 887d7029..71103238 100644
--- a/test/integration/targets/infra/aliases
+++ b/test/integration/targets/infra/aliases
@@ -1,3 +1,4 @@
shippable/posix/group3
needs/file/hacking/test-module.py
needs/file/lib/ansible/modules/ping.py
+context/controller
diff --git a/test/integration/targets/infra/runme.sh b/test/integration/targets/infra/runme.sh
index c4d84572..9e348b8c 100755
--- a/test/integration/targets/infra/runme.sh
+++ b/test/integration/targets/infra/runme.sh
@@ -30,10 +30,10 @@ PING_MODULE_PATH="../../../../lib/ansible/modules/ping.py"
../../../../hacking/test-module.py -m "$PING_MODULE_PATH"
# ensure test-module.py script works well
-../../../../hacking/test-module.py -m "$PING_MODULE_PATH" -I ansible_python_interpreter="$(which python)"
+../../../../hacking/test-module.py -m "$PING_MODULE_PATH" -I ansible_python_interpreter="${ANSIBLE_TEST_PYTHON_INTERPRETER}"
# ensure module.ansible_version is defined when using test-module.py
-../../../../hacking/test-module.py -m library/test.py -I ansible_python_interpreter="$(which python)" <<< '{"ANSIBLE_MODULE_ARGS": {}}'
+../../../../hacking/test-module.py -m library/test.py -I ansible_python_interpreter="${ANSIBLE_TEST_PYTHON_INTERPRETER}" <<< '{"ANSIBLE_MODULE_ARGS": {}}'
# ensure exercising module code locally works
python -m ansible.modules.file <<< '{"ANSIBLE_MODULE_ARGS": {"path": "/path/to/file", "state": "absent"}}'
diff --git a/test/integration/targets/interpreter_discovery_python/aliases b/test/integration/targets/interpreter_discovery_python/aliases
index 740ed1a5..0dfc90e7 100644
--- a/test/integration/targets/interpreter_discovery_python/aliases
+++ b/test/integration/targets/interpreter_discovery_python/aliases
@@ -1,2 +1,3 @@
shippable/posix/group1
non_local # workaround to allow override of ansible_python_interpreter; disables coverage on this integration target
+context/target
diff --git a/test/integration/targets/interpreter_discovery_python/tasks/main.yml b/test/integration/targets/interpreter_discovery_python/tasks/main.yml
index b8bafd15..770de0c5 100644
--- a/test/integration/targets/interpreter_discovery_python/tasks/main.yml
+++ b/test/integration/targets/interpreter_discovery_python/tasks/main.yml
@@ -76,10 +76,10 @@
ping:
register: legacy
- - name: check for dep warning (only on platforms where auto result is not /usr/bin/python and legacy is)
+ - name: check for warning (only on platforms where auto result is not /usr/bin/python and legacy is)
assert:
that:
- - legacy.deprecations | default([]) | length > 0
+ - legacy.warnings | default([]) | length > 0
# only check for a dep warning if legacy returned /usr/bin/python and auto didn't
when: legacy.ansible_facts.discovered_interpreter_python == '/usr/bin/python' and
auto_out.ansible_facts.discovered_interpreter_python != '/usr/bin/python'
diff --git a/test/integration/targets/interpreter_discovery_python_delegate_facts/aliases b/test/integration/targets/interpreter_discovery_python_delegate_facts/aliases
index dc9ac468..b4026b5f 100644
--- a/test/integration/targets/interpreter_discovery_python_delegate_facts/aliases
+++ b/test/integration/targets/interpreter_discovery_python_delegate_facts/aliases
@@ -1,2 +1,3 @@
shippable/posix/group1
non_local # this test requires interpreter discovery, which means code coverage must be disabled
+context/controller
diff --git a/test/integration/targets/inventory/aliases b/test/integration/targets/inventory/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/inventory/aliases
+++ b/test/integration/targets/inventory/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/module_utils_respawn/aliases b/test/integration/targets/inventory_advanced_host_list/aliases
index a6dafcf8..a6dafcf8 100644
--- a/test/integration/targets/module_utils_respawn/aliases
+++ b/test/integration/targets/inventory_advanced_host_list/aliases
diff --git a/test/integration/targets/inventory_advanced_host_list/runme.sh b/test/integration/targets/inventory_advanced_host_list/runme.sh
new file mode 100755
index 00000000..41b1f8b9
--- /dev/null
+++ b/test/integration/targets/inventory_advanced_host_list/runme.sh
@@ -0,0 +1,36 @@
+#!/usr/bin/env bash
+
+set -eux
+
+export ANSIBLE_INVENTORY_ENABLED=advanced_host_list
+
+# A few things to make it easier to grep against adhoc
+export ANSIBLE_LOAD_CALLBACK_PLUGINS=True
+export ANSIBLE_STDOUT_CALLBACK=oneline
+
+adhoc="$(ansible -i 'local[0:10],' -m ping --connection=local -e ansible_python_interpreter="{{ ansible_playbook_python }}" all -v)"
+
+for i in $(seq 0 10); do
+ grep -qE "local${i} \| SUCCESS.*\"ping\": \"pong\"" <<< "$adhoc"
+done
+
+set +e
+parse_fail="$(ansible -i 'local[1:j],' -m ping --connection=local all -v 2>&1)"
+set -e
+
+grep -q "Failed to parse local\[1:j\], with advanced_host_list" <<< "$parse_fail"
+
+# Intentionally missing comma, ensure we don't fatal.
+no_comma="$(ansible -i 'local[1:5]' -m ping --connection=local all -v 2>&1)"
+grep -q "No inventory was parsed" <<< "$no_comma"
+
+# Intentionally botched range (missing end number), ensure we don't fatal.
+no_end="$(ansible -i 'local[1:],' -m ping --connection=local -e ansible_python_interpreter="{{ ansible_playbook_python }}" all -vvv 2>&1)"
+grep -q "Unable to parse address from hostname, leaving unchanged:" <<< "$no_end"
+grep -q "host range must specify end value" <<< "$no_end"
+grep -q "local\[3:\] \| SUCCESS" <<< "$no_end"
+
+# Unset adhoc stuff
+unset ANSIBLE_LOAD_CALLBACK_PLUGINS ANSIBLE_STDOUT_CALLBACK
+
+ansible-playbook -i 'local100,local[100:110:2]' test_advanced_host_list.yml -v "$@"
diff --git a/test/integration/targets/inventory_advanced_host_list/test_advanced_host_list.yml b/test/integration/targets/inventory_advanced_host_list/test_advanced_host_list.yml
new file mode 100644
index 00000000..918078ae
--- /dev/null
+++ b/test/integration/targets/inventory_advanced_host_list/test_advanced_host_list.yml
@@ -0,0 +1,9 @@
+- hosts: all
+ connection: local
+ vars:
+ ansible_python_interpreter: "{{ ansible_playbook_python }}"
+ tasks:
+ - assert:
+ that:
+ - inventory_hostname in ["local100", "local102", "local104", "local106", "local108", "local110", "local118"]
+ - inventory_hostname not in ["local101", "local103", "local105", "local107", "local109", "local111"]
diff --git a/test/integration/targets/inventory_cache/aliases b/test/integration/targets/inventory_cache/aliases
index 70a7b7a9..1d28bdb2 100644
--- a/test/integration/targets/inventory_cache/aliases
+++ b/test/integration/targets/inventory_cache/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/inventory_constructed/keyed_group_default_value.yml b/test/integration/targets/inventory_constructed/keyed_group_default_value.yml
new file mode 100644
index 00000000..e4d0a76b
--- /dev/null
+++ b/test/integration/targets/inventory_constructed/keyed_group_default_value.yml
@@ -0,0 +1,5 @@
+plugin: constructed
+keyed_groups:
+ - key: tags
+ prefix: tag
+ default_value: "running"
diff --git a/test/integration/targets/inventory_constructed/keyed_group_list_default_value.yml b/test/integration/targets/inventory_constructed/keyed_group_list_default_value.yml
new file mode 100644
index 00000000..1c2d00e0
--- /dev/null
+++ b/test/integration/targets/inventory_constructed/keyed_group_list_default_value.yml
@@ -0,0 +1,5 @@
+plugin: constructed
+keyed_groups:
+ - key: roles
+ default_value: storage
+ prefix: host \ No newline at end of file
diff --git a/test/integration/targets/inventory_constructed/keyed_group_str_default_value.yml b/test/integration/targets/inventory_constructed/keyed_group_str_default_value.yml
new file mode 100644
index 00000000..ae3fd5ae
--- /dev/null
+++ b/test/integration/targets/inventory_constructed/keyed_group_str_default_value.yml
@@ -0,0 +1,5 @@
+plugin: constructed
+keyed_groups:
+ - key: os
+ default_value: "fedora"
+ prefix: host \ No newline at end of file
diff --git a/test/integration/targets/inventory_constructed/keyed_group_trailing_separator.yml b/test/integration/targets/inventory_constructed/keyed_group_trailing_separator.yml
new file mode 100644
index 00000000..cbe57c60
--- /dev/null
+++ b/test/integration/targets/inventory_constructed/keyed_group_trailing_separator.yml
@@ -0,0 +1,5 @@
+plugin: constructed
+keyed_groups:
+ - key: tags
+ prefix: tag
+ trailing_separator: False
diff --git a/test/integration/targets/inventory_constructed/runme.sh b/test/integration/targets/inventory_constructed/runme.sh
index 0cd1a293..91bbd66b 100755
--- a/test/integration/targets/inventory_constructed/runme.sh
+++ b/test/integration/targets/inventory_constructed/runme.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
-set -ex
+set -eux
ansible-inventory -i static_inventory.yml -i constructed.yml --graph | tee out.txt
@@ -24,6 +24,33 @@ grep '@prefix_hostvalue1' out.txt
grep '@prefix_item0' out.txt
grep '@prefix_key0_value0' out.txt
+# keyed group with default value for key's value empty (dict)
+ansible-inventory -i tag_inventory.yml -i keyed_group_default_value.yml --graph | tee out.txt
+
+grep '@tag_name_host0' out.txt
+grep '@tag_environment_test' out.txt
+grep '@tag_status_running' out.txt
+
+# keyed group with default value for key's value empty (list)
+ansible-inventory -i tag_inventory.yml -i keyed_group_list_default_value.yml --graph | tee out.txt
+
+grep '@host_db' out.txt
+grep '@host_web' out.txt
+grep '@host_storage' out.txt
+
+# keyed group with default value for key's value empty (str)
+ansible-inventory -i tag_inventory.yml -i keyed_group_str_default_value.yml --graph | tee out.txt
+
+grep '@host_fedora' out.txt
+
+
+# keyed group with 'trailing_separator' set to 'False' for key's value empty
+ansible-inventory -i tag_inventory.yml -i keyed_group_trailing_separator.yml --graph | tee out.txt
+
+grep '@tag_name_host0' out.txt
+grep '@tag_environment_test' out.txt
+grep '@tag_status' out.txt
+
# test using use_vars_plugins
ansible-inventory -i invs/1/one.yml -i invs/2/constructed.yml --graph | tee out.txt
diff --git a/test/integration/targets/inventory_constructed/tag_inventory.yml b/test/integration/targets/inventory_constructed/tag_inventory.yml
new file mode 100644
index 00000000..acf810ea
--- /dev/null
+++ b/test/integration/targets/inventory_constructed/tag_inventory.yml
@@ -0,0 +1,12 @@
+all:
+ hosts:
+ host0:
+ tags:
+ name: "host0"
+ environment: "test"
+ status: ""
+ os: ""
+ roles:
+ - db
+ - web
+ - ""
diff --git a/test/integration/targets/inventory_yaml/aliases b/test/integration/targets/inventory_yaml/aliases
index f8e28c7e..a6dafcf8 100644
--- a/test/integration/targets/inventory_yaml/aliases
+++ b/test/integration/targets/inventory_yaml/aliases
@@ -1,2 +1 @@
shippable/posix/group1
-skip/aix
diff --git a/test/integration/targets/jinja2_native_types/aliases b/test/integration/targets/jinja2_native_types/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/jinja2_native_types/aliases
+++ b/test/integration/targets/jinja2_native_types/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/jinja_plugins/aliases b/test/integration/targets/jinja_plugins/aliases
new file mode 100644
index 00000000..1d28bdb2
--- /dev/null
+++ b/test/integration/targets/jinja_plugins/aliases
@@ -0,0 +1,2 @@
+shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/jinja_plugins/collections/ansible_collections/foo/bar/plugins/filter/bad_collection_filter.py b/test/integration/targets/jinja_plugins/collections/ansible_collections/foo/bar/plugins/filter/bad_collection_filter.py
new file mode 100644
index 00000000..36669532
--- /dev/null
+++ b/test/integration/targets/jinja_plugins/collections/ansible_collections/foo/bar/plugins/filter/bad_collection_filter.py
@@ -0,0 +1,11 @@
+# Copyright (c) 2021 Matt Martz <matt@sivel.net>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+class FilterModule:
+ def filters(self):
+ raise TypeError('bad_collection_filter')
diff --git a/test/integration/targets/jinja_plugins/collections/ansible_collections/foo/bar/plugins/filter/good_collection_filter.py b/test/integration/targets/jinja_plugins/collections/ansible_collections/foo/bar/plugins/filter/good_collection_filter.py
new file mode 100644
index 00000000..e2e7ffcd
--- /dev/null
+++ b/test/integration/targets/jinja_plugins/collections/ansible_collections/foo/bar/plugins/filter/good_collection_filter.py
@@ -0,0 +1,13 @@
+# Copyright (c) 2021 Matt Martz <matt@sivel.net>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+class FilterModule:
+ def filters(self):
+ return {
+ 'hello': lambda x: 'Hello, %s!' % x,
+ }
diff --git a/test/integration/targets/jinja_plugins/collections/ansible_collections/foo/bar/plugins/test/bad_collection_test.py b/test/integration/targets/jinja_plugins/collections/ansible_collections/foo/bar/plugins/test/bad_collection_test.py
new file mode 100644
index 00000000..9fce5581
--- /dev/null
+++ b/test/integration/targets/jinja_plugins/collections/ansible_collections/foo/bar/plugins/test/bad_collection_test.py
@@ -0,0 +1,11 @@
+# Copyright (c) 2021 Matt Martz <matt@sivel.net>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+class TestModule:
+ def tests(self):
+ raise TypeError('bad_collection_test')
diff --git a/test/integration/targets/jinja_plugins/collections/ansible_collections/foo/bar/plugins/test/good_collection_test.py b/test/integration/targets/jinja_plugins/collections/ansible_collections/foo/bar/plugins/test/good_collection_test.py
new file mode 100644
index 00000000..a4ca2ff2
--- /dev/null
+++ b/test/integration/targets/jinja_plugins/collections/ansible_collections/foo/bar/plugins/test/good_collection_test.py
@@ -0,0 +1,13 @@
+# Copyright (c) 2021 Matt Martz <matt@sivel.net>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+class TestModule:
+ def tests(self):
+ return {
+ 'world': lambda x: x.lower() == 'world',
+ }
diff --git a/test/integration/targets/jinja_plugins/filter_plugins/bad_filter.py b/test/integration/targets/jinja_plugins/filter_plugins/bad_filter.py
new file mode 100644
index 00000000..eebf39c9
--- /dev/null
+++ b/test/integration/targets/jinja_plugins/filter_plugins/bad_filter.py
@@ -0,0 +1,11 @@
+# Copyright (c) 2021 Matt Martz <matt@sivel.net>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+class FilterModule:
+ def filters(self):
+ raise TypeError('bad_filter')
diff --git a/test/integration/targets/jinja_plugins/filter_plugins/good_filter.py b/test/integration/targets/jinja_plugins/filter_plugins/good_filter.py
new file mode 100644
index 00000000..e2e7ffcd
--- /dev/null
+++ b/test/integration/targets/jinja_plugins/filter_plugins/good_filter.py
@@ -0,0 +1,13 @@
+# Copyright (c) 2021 Matt Martz <matt@sivel.net>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+class FilterModule:
+ def filters(self):
+ return {
+ 'hello': lambda x: 'Hello, %s!' % x,
+ }
diff --git a/test/integration/targets/jinja_plugins/playbook.yml b/test/integration/targets/jinja_plugins/playbook.yml
new file mode 100644
index 00000000..789be659
--- /dev/null
+++ b/test/integration/targets/jinja_plugins/playbook.yml
@@ -0,0 +1,10 @@
+- hosts: localhost
+ gather_facts: false
+ tasks:
+ - assert:
+ that:
+ - '"World"|hello == "Hello, World!"'
+ - '"World" is world'
+
+ - '"World"|foo.bar.hello == "Hello, World!"'
+ - '"World" is foo.bar.world'
diff --git a/test/integration/targets/jinja_plugins/tasks/main.yml b/test/integration/targets/jinja_plugins/tasks/main.yml
new file mode 100644
index 00000000..012ec954
--- /dev/null
+++ b/test/integration/targets/jinja_plugins/tasks/main.yml
@@ -0,0 +1,22 @@
+- shell: ansible-playbook {{ verbosity }} playbook.yml
+ args:
+ chdir: '{{ role_path }}'
+ vars:
+ verbosity: "{{ '' if not ansible_verbosity else '-' ~ ('v' * ansible_verbosity) }}"
+ register: result
+
+- debug:
+ var: result
+
+- assert:
+ that:
+ - '"[WARNING]: Skipping filter plugin" in result.stderr'
+ - '"[WARNING]: Skipping test plugin" in result.stderr'
+ - |
+ result.stderr|regex_findall('bad_filter')|length == 2
+ - |
+ result.stderr|regex_findall('bad_test')|length == 2
+ - |
+ result.stderr|regex_findall('bad_collection_filter')|length == 2
+ - |
+ result.stderr|regex_findall('bad_collection_test')|length == 2
diff --git a/test/integration/targets/jinja_plugins/test_plugins/bad_test.py b/test/integration/targets/jinja_plugins/test_plugins/bad_test.py
new file mode 100644
index 00000000..0cc7a5a8
--- /dev/null
+++ b/test/integration/targets/jinja_plugins/test_plugins/bad_test.py
@@ -0,0 +1,11 @@
+# Copyright (c) 2021 Matt Martz <matt@sivel.net>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+class TestModule:
+ def tests(self):
+ raise TypeError('bad_test')
diff --git a/test/integration/targets/jinja_plugins/test_plugins/good_test.py b/test/integration/targets/jinja_plugins/test_plugins/good_test.py
new file mode 100644
index 00000000..a4ca2ff2
--- /dev/null
+++ b/test/integration/targets/jinja_plugins/test_plugins/good_test.py
@@ -0,0 +1,13 @@
+# Copyright (c) 2021 Matt Martz <matt@sivel.net>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+class TestModule:
+ def tests(self):
+ return {
+ 'world': lambda x: x.lower() == 'world',
+ }
diff --git a/test/integration/targets/json_cleanup/aliases b/test/integration/targets/json_cleanup/aliases
index 765b70da..90ea9e12 100644
--- a/test/integration/targets/json_cleanup/aliases
+++ b/test/integration/targets/json_cleanup/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/limit_inventory/aliases b/test/integration/targets/limit_inventory/aliases
index 3005e4b2..498fedd5 100644
--- a/test/integration/targets/limit_inventory/aliases
+++ b/test/integration/targets/limit_inventory/aliases
@@ -1 +1,2 @@
shippable/posix/group4
+context/controller
diff --git a/test/integration/targets/lineinfile/meta/main.yml b/test/integration/targets/lineinfile/meta/main.yml
index 98e60f78..a91e6847 100644
--- a/test/integration/targets/lineinfile/meta/main.yml
+++ b/test/integration/targets/lineinfile/meta/main.yml
@@ -18,3 +18,4 @@
dependencies:
- prepare_tests
+ - setup_remote_tmp_dir
diff --git a/test/integration/targets/lineinfile/tasks/main.yml b/test/integration/targets/lineinfile/tasks/main.yml
index cad926b3..3d4678c2 100644
--- a/test/integration/targets/lineinfile/tasks/main.yml
+++ b/test/integration/targets/lineinfile/tasks/main.yml
@@ -19,7 +19,7 @@
- name: deploy the test file for lineinfile
copy:
src: test.txt
- dest: "{{ output_dir }}/test.txt"
+ dest: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert that the test file was deployed
@@ -29,9 +29,62 @@
- "result.checksum == '5feac65e442c91f557fc90069ce6efc4d346ab51'"
- "result.state == 'file'"
+- name: "create a file that does not yet exist with `create: yes` and produce diff"
+ lineinfile:
+ dest: "{{ remote_tmp_dir }}/a/a.txt"
+ state: present
+ line: "First line"
+ create: yes
+ diff: yes
+ register: result1
+
+- name: assert that a diff was returned
+ assert:
+ that:
+ - result1.diff | length > 0
+
+- name: stat the new file
+ stat:
+ path: "{{ remote_tmp_dir }}/a/a.txt"
+ register: result
+
+- name: assert that the file exists
+ assert:
+ that:
+ - result.stat.exists
+
+- block:
+ - name: "EXPECTED FAILURE - test source file does not exist w/o `create: yes`"
+ lineinfile:
+ path: "/some/where/that/doesnotexist.txt"
+ state: present
+ line: "Doesn't matter"
+ - fail:
+ msg: "Should not get here"
+ rescue:
+ - name: Validate failure
+ assert:
+ that:
+ - "'Destination /some/where/that/doesnotexist.txt does not exist !' in ansible_failed_result.msg"
+
+- block:
+ - name: EXPECTED FAILURE - test invalid `validate` value
+ lineinfile:
+ path: "{{ remote_tmp_dir }}/test.txt"
+ state: present
+ line: "Doesn't matter"
+ validate: '/some/path'
+ - fail:
+ msg: "Should not get here"
+ rescue:
+ - name: Validate failure
+ assert:
+ that:
+ - "'validate must contain %s: /some/path' in ansible_failed_result.msg"
+
- name: insert a line at the beginning of the file, and back it up
lineinfile:
- dest: "{{ output_dir }}/test.txt"
+ dest: "{{ remote_tmp_dir }}/test.txt"
state: present
line: "New line at the beginning"
insertbefore: "BOF"
@@ -40,7 +93,7 @@
- name: insert a line at the beginning of the file again
lineinfile:
- dest: "{{ output_dir }}/test.txt"
+ dest: "{{ remote_tmp_dir }}/test.txt"
state: present
line: "New line at the beginning"
insertbefore: "BOF"
@@ -66,7 +119,7 @@
- name: stat the test after the insert at the head
stat:
- path: "{{ output_dir }}/test.txt"
+ path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test hash is what we expect for the file with the insert at the head
@@ -76,7 +129,7 @@
- name: insert a line at the end of the file
lineinfile:
- dest: "{{ output_dir }}/test.txt"
+ dest: "{{ remote_tmp_dir }}/test.txt"
state: present
line: "New line at the end"
insertafter: "EOF"
@@ -90,7 +143,7 @@
- name: stat the test after the insert at the end
stat:
- path: "{{ output_dir }}/test.txt"
+ path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test checksum matches after the insert at the end
@@ -100,7 +153,7 @@
- name: insert a line after the first line
lineinfile:
- dest: "{{ output_dir }}/test.txt"
+ dest: "{{ remote_tmp_dir }}/test.txt"
state: present
line: "New line after line 1"
insertafter: "^This is line 1$"
@@ -114,7 +167,7 @@
- name: stat the test after insert after the first line
stat:
- path: "{{ output_dir }}/test.txt"
+ path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test checksum matches after the insert after the first line
@@ -124,7 +177,7 @@
- name: insert a line before the last line
lineinfile:
- dest: "{{ output_dir }}/test.txt"
+ dest: "{{ remote_tmp_dir }}/test.txt"
state: present
line: "New line before line 5"
insertbefore: "^This is line 5$"
@@ -138,7 +191,7 @@
- name: stat the test after the insert before the last line
stat:
- path: "{{ output_dir }}/test.txt"
+ path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test checksum matches after the insert before the last line
@@ -148,7 +201,7 @@
- name: Replace a line with backrefs
lineinfile:
- dest: "{{ output_dir }}/test.txt"
+ dest: "{{ remote_tmp_dir }}/test.txt"
state: present
line: "This is line 3"
backrefs: yes
@@ -157,13 +210,13 @@
- name: Replace a line with backrefs again
lineinfile:
- dest: "{{ output_dir }}/test.txt"
+ dest: "{{ remote_tmp_dir }}/test.txt"
state: present
line: "This is line 3"
backrefs: yes
regexp: "^(REF) .* \\1$"
register: backrefs_result2
-- command: cat {{ output_dir }}/test.txt
+- command: cat {{ remote_tmp_dir }}/test.txt
- name: assert that the line with backrefs was changed
assert:
@@ -174,7 +227,7 @@
- name: stat the test after the backref line was replaced
stat:
- path: "{{ output_dir }}/test.txt"
+ path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test checksum matches after backref line was replaced
@@ -184,7 +237,7 @@
- name: remove the middle line
lineinfile:
- dest: "{{ output_dir }}/test.txt"
+ dest: "{{ remote_tmp_dir }}/test.txt"
state: absent
regexp: "^This is line 3$"
register: result
@@ -197,7 +250,7 @@
- name: stat the test after the middle line was removed
stat:
- path: "{{ output_dir }}/test.txt"
+ path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test checksum matches after the middle line was removed
@@ -207,7 +260,7 @@
- name: run a validation script that succeeds
lineinfile:
- dest: "{{ output_dir }}/test.txt"
+ dest: "{{ remote_tmp_dir }}/test.txt"
state: absent
regexp: "^This is line 5$"
validate: "true %s"
@@ -221,7 +274,7 @@
- name: stat the test after the validation succeeded
stat:
- path: "{{ output_dir }}/test.txt"
+ path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test checksum matches after the validation succeeded
@@ -231,7 +284,7 @@
- name: run a validation script that fails
lineinfile:
- dest: "{{ output_dir }}/test.txt"
+ dest: "{{ remote_tmp_dir }}/test.txt"
state: absent
regexp: "^This is line 1$"
validate: "/bin/false %s"
@@ -245,7 +298,7 @@
- name: stat the test after the validation failed
stat:
- path: "{{ output_dir }}/test.txt"
+ path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test checksum matches the previous after the validation failed
@@ -257,7 +310,7 @@
- name: use create=yes
lineinfile:
- dest: "{{ output_dir }}/new_test.txt"
+ dest: "{{ remote_tmp_dir }}/new_test.txt"
create: yes
insertbefore: BOF
state: present
@@ -272,7 +325,7 @@
- name: validate that the newly created file exists
stat:
- path: "{{ output_dir }}/new_test.txt"
+ path: "{{ remote_tmp_dir }}/new_test.txt"
register: result
ignore_errors: yes
@@ -303,12 +356,12 @@
- name: testnoeof deploy the file for lineinfile
copy:
src: testnoeof.txt
- dest: "{{ output_dir }}/testnoeof.txt"
+ dest: "{{ remote_tmp_dir }}/testnoeof.txt"
register: result
- name: testnoeof insert a line at the end of the file
lineinfile:
- dest: "{{ output_dir }}/testnoeof.txt"
+ dest: "{{ remote_tmp_dir }}/testnoeof.txt"
state: present
line: "New line at the end"
insertafter: "EOF"
@@ -322,7 +375,7 @@
- name: insert a multiple lines at the end of the file
lineinfile:
- dest: "{{ output_dir }}/test.txt"
+ dest: "{{ remote_tmp_dir }}/test.txt"
state: present
line: "This is a line\nwith \\n character"
insertafter: "EOF"
@@ -336,7 +389,7 @@
- name: testnoeof stat the no newline EOF test after the insert at the end
stat:
- path: "{{ output_dir }}/testnoeof.txt"
+ path: "{{ remote_tmp_dir }}/testnoeof.txt"
register: result
- name: testnoeof assert test checksum matches after the insert at the end
@@ -348,12 +401,12 @@
- name: testempty deploy the testempty file for lineinfile
copy:
src: testempty.txt
- dest: "{{ output_dir }}/testempty.txt"
+ dest: "{{ remote_tmp_dir }}/testempty.txt"
register: result
- name: testempty insert a line at the end of the file
lineinfile:
- dest: "{{ output_dir }}/testempty.txt"
+ dest: "{{ remote_tmp_dir }}/testempty.txt"
state: present
line: "New line at the end"
insertafter: "EOF"
@@ -367,7 +420,7 @@
- name: testempty stat the test after the insert at the end
stat:
- path: "{{ output_dir }}/testempty.txt"
+ path: "{{ remote_tmp_dir }}/testempty.txt"
register: result
- name: testempty assert test checksum matches after the insert at the end
@@ -376,7 +429,7 @@
- "result.stat.checksum == 'f440dc65ea9cec3fd496c1479ddf937e1b949412'"
- stat:
- path: "{{ output_dir }}/test.txt"
+ path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test checksum matches after inserting multiple lines
@@ -386,7 +439,7 @@
- name: replace a line with backrefs included in the line
lineinfile:
- dest: "{{ output_dir }}/test.txt"
+ dest: "{{ remote_tmp_dir }}/test.txt"
state: present
line: "New \\1 created with the backref"
backrefs: yes
@@ -401,7 +454,7 @@
- name: stat the test after the backref line was replaced
stat:
- path: "{{ output_dir }}/test.txt"
+ path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: assert test checksum matches after backref line was replaced
@@ -414,7 +467,7 @@
- name: create a new file for testing quoting issues
file:
- dest: "{{ output_dir }}/test_quoting.txt"
+ dest: "{{ remote_tmp_dir }}/test_quoting.txt"
state: touch
register: result
@@ -425,7 +478,7 @@
- name: use with_items to add code-like strings to the quoting txt file
lineinfile:
- dest: "{{ output_dir }}/test_quoting.txt"
+ dest: "{{ remote_tmp_dir }}/test_quoting.txt"
line: "{{ item }}"
insertbefore: BOF
with_items:
@@ -447,7 +500,7 @@
- name: stat the quote test file
stat:
- path: "{{ output_dir }}/test_quoting.txt"
+ path: "{{ remote_tmp_dir }}/test_quoting.txt"
register: result
- name: assert test checksum matches after backref line was replaced
@@ -457,7 +510,7 @@
- name: insert a line into the quoted file with a single quote
lineinfile:
- dest: "{{ output_dir }}/test_quoting.txt"
+ dest: "{{ remote_tmp_dir }}/test_quoting.txt"
line: "import g'"
register: result
@@ -468,7 +521,7 @@
- name: stat the quote test file
stat:
- path: "{{ output_dir }}/test_quoting.txt"
+ path: "{{ remote_tmp_dir }}/test_quoting.txt"
register: result
- name: assert test checksum matches after backref line was replaced
@@ -478,7 +531,7 @@
- name: insert a line into the quoted file with many double quotation strings
lineinfile:
- dest: "{{ output_dir }}/test_quoting.txt"
+ dest: "{{ remote_tmp_dir }}/test_quoting.txt"
line: "\"quote\" and \"unquote\""
register: result
@@ -489,7 +542,7 @@
- name: stat the quote test file
stat:
- path: "{{ output_dir }}/test_quoting.txt"
+ path: "{{ remote_tmp_dir }}/test_quoting.txt"
register: result
- name: assert test checksum matches after backref line was replaced
@@ -503,7 +556,7 @@
- name: Deploy the testmultiple file
copy:
src: testmultiple.txt
- dest: "{{ output_dir }}/testmultiple.txt"
+ dest: "{{ remote_tmp_dir }}/testmultiple.txt"
register: result
- name: Assert that the testmultiple file was deployed
@@ -516,7 +569,7 @@
# Test insertafter
- name: Write the same line to a file inserted after different lines
lineinfile:
- path: "{{ output_dir }}/testmultiple.txt"
+ path: "{{ remote_tmp_dir }}/testmultiple.txt"
insertafter: "{{ item.regex }}"
line: "{{ item.replace }}"
register: _multitest_1
@@ -532,7 +585,7 @@
- name: Do the same thing again to check for changes
lineinfile:
- path: "{{ output_dir }}/testmultiple.txt"
+ path: "{{ remote_tmp_dir }}/testmultiple.txt"
insertafter: "{{ item.regex }}"
line: "{{ item.replace }}"
register: _multitest_2
@@ -548,7 +601,7 @@
- name: Stat the insertafter file
stat:
- path: "{{ output_dir }}/testmultiple.txt"
+ path: "{{ remote_tmp_dir }}/testmultiple.txt"
register: result
- name: Assert that the insertafter file matches expected checksum
@@ -561,7 +614,7 @@
- name: Deploy the testmultiple file
copy:
src: testmultiple.txt
- dest: "{{ output_dir }}/testmultiple.txt"
+ dest: "{{ remote_tmp_dir }}/testmultiple.txt"
register: result
- name: Assert that the testmultiple file was deployed
@@ -573,7 +626,7 @@
- name: Write the same line to a file inserted before different lines
lineinfile:
- path: "{{ output_dir }}/testmultiple.txt"
+ path: "{{ remote_tmp_dir }}/testmultiple.txt"
insertbefore: "{{ item.regex }}"
line: "{{ item.replace }}"
register: _multitest_3
@@ -589,7 +642,7 @@
- name: Do the same thing again to check for changes
lineinfile:
- path: "{{ output_dir }}/testmultiple.txt"
+ path: "{{ remote_tmp_dir }}/testmultiple.txt"
insertbefore: "{{ item.regex }}"
line: "{{ item.replace }}"
register: _multitest_4
@@ -605,7 +658,7 @@
- name: Stat the insertbefore file
stat:
- path: "{{ output_dir }}/testmultiple.txt"
+ path: "{{ remote_tmp_dir }}/testmultiple.txt"
register: result
- name: Assert that the insertbefore file matches expected checksum
@@ -620,7 +673,7 @@
- name: Deploy the test.conf file
copy:
src: test.conf
- dest: "{{ output_dir }}/test.conf"
+ dest: "{{ remote_tmp_dir }}/test.conf"
register: result
- name: Assert that the test.conf file was deployed
@@ -633,7 +686,7 @@
# Test instertafter
- name: Insert lines after with regexp
lineinfile:
- path: "{{ output_dir }}/test.conf"
+ path: "{{ remote_tmp_dir }}/test.conf"
regexp: "{{ item.regexp }}"
line: "{{ item.line }}"
insertafter: "{{ item.after }}"
@@ -642,7 +695,7 @@
- name: Do the same thing again and check for changes
lineinfile:
- path: "{{ output_dir }}/test.conf"
+ path: "{{ remote_tmp_dir }}/test.conf"
regexp: "{{ item.regexp }}"
line: "{{ item.line }}"
insertafter: "{{ item.after }}"
@@ -660,7 +713,7 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/test.conf"
+ path: "{{ remote_tmp_dir }}/test.conf"
register: result
- name: Assert that the file contents match what is expected
@@ -670,7 +723,7 @@
- name: Do the same thing a third time without regexp and check for changes
lineinfile:
- path: "{{ output_dir }}/test.conf"
+ path: "{{ remote_tmp_dir }}/test.conf"
line: "{{ item.line }}"
insertafter: "{{ item.after }}"
with_items: "{{ test_befaf_regexp }}"
@@ -678,7 +731,7 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/test.conf"
+ path: "{{ remote_tmp_dir }}/test.conf"
register: result
- name: Assert that the file was changed when no regexp was provided
@@ -689,7 +742,7 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/test.conf"
+ path: "{{ remote_tmp_dir }}/test.conf"
register: result
- name: Assert that the file contents match what is expected
@@ -701,7 +754,7 @@
- name: Deploy the test.conf file
copy:
src: test.conf
- dest: "{{ output_dir }}/test.conf"
+ dest: "{{ remote_tmp_dir }}/test.conf"
register: result
- name: Assert that the test.conf file was deployed
@@ -713,7 +766,7 @@
- name: Insert lines before with regexp
lineinfile:
- path: "{{ output_dir }}/test.conf"
+ path: "{{ remote_tmp_dir }}/test.conf"
regexp: "{{ item.regexp }}"
line: "{{ item.line }}"
insertbefore: "{{ item.before }}"
@@ -722,7 +775,7 @@
- name: Do the same thing again and check for changes
lineinfile:
- path: "{{ output_dir }}/test.conf"
+ path: "{{ remote_tmp_dir }}/test.conf"
regexp: "{{ item.regexp }}"
line: "{{ item.line }}"
insertbefore: "{{ item.before }}"
@@ -740,7 +793,7 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/test.conf"
+ path: "{{ remote_tmp_dir }}/test.conf"
register: result
- name: Assert that the file contents match what is expected
@@ -750,7 +803,7 @@
- name: Do the same thing a third time without regexp and check for changes
lineinfile:
- path: "{{ output_dir }}/test.conf"
+ path: "{{ remote_tmp_dir }}/test.conf"
line: "{{ item.line }}"
insertbefore: "{{ item.before }}"
with_items: "{{ test_befaf_regexp }}"
@@ -758,7 +811,7 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/test.conf"
+ path: "{{ remote_tmp_dir }}/test.conf"
register: result
- name: Assert that the file was changed when no regexp was provided
@@ -769,7 +822,7 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/test.conf"
+ path: "{{ remote_tmp_dir }}/test.conf"
register: result
- name: Assert that the file contents match what is expected
@@ -780,25 +833,25 @@
- name: Copy empty file to test with insertbefore
copy:
src: testempty.txt
- dest: "{{ output_dir }}/testempty.txt"
+ dest: "{{ remote_tmp_dir }}/testempty.txt"
- name: Add a line to empty file with insertbefore
lineinfile:
- path: "{{ output_dir }}/testempty.txt"
+ path: "{{ remote_tmp_dir }}/testempty.txt"
line: top
insertbefore: '^not in the file$'
register: oneline_insbefore_test1
- name: Add a line to file with only one line using insertbefore
lineinfile:
- path: "{{ output_dir }}/testempty.txt"
+ path: "{{ remote_tmp_dir }}/testempty.txt"
line: top
insertbefore: '^not in the file$'
register: oneline_insbefore_test2
- name: Stat the file
stat:
- path: "{{ output_dir }}/testempty.txt"
+ path: "{{ remote_tmp_dir }}/testempty.txt"
register: oneline_insbefore_file
- name: Assert that insertebefore worked properly with a one line file
@@ -817,7 +870,7 @@
- name: Deploy the test file for lineinfile
copy:
src: test.txt
- dest: "{{ output_dir }}/test.txt"
+ dest: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: Assert that the test file was deployed
@@ -829,14 +882,14 @@
- name: Insert a line in the file using an empty string as a regular expression
lineinfile:
- path: "{{ output_dir }}/test.txt"
+ path: "{{ remote_tmp_dir }}/test.txt"
regexp: ''
line: This is line 6
register: insert_empty_regexp
- name: Stat the file
stat:
- path: "{{ output_dir }}/test.txt"
+ path: "{{ remote_tmp_dir }}/test.txt"
register: result
- name: Assert that the file contents match what is expected and a warning was displayed
@@ -858,7 +911,7 @@
- name: Deploy the test file for lineinfile
copy:
src: teststring.txt
- dest: "{{ output_dir }}/teststring.txt"
+ dest: "{{ remote_tmp_dir }}/teststring.txt"
register: result
- name: Assert that the test file was deployed
@@ -870,14 +923,14 @@
- name: Insert a line in the file using an empty string as a search string
lineinfile:
- path: "{{ output_dir }}/teststring.txt"
+ path: "{{ remote_tmp_dir }}/teststring.txt"
search_string: ''
line: This is line 6
register: insert_empty_literal
- name: Stat the file
stat:
- path: "{{ output_dir }}/teststring.txt"
+ path: "{{ remote_tmp_dir }}/teststring.txt"
register: result
- name: Assert that the file contents match what is expected and a warning was displayed
@@ -901,7 +954,7 @@
- name: Deploy the firstmatch test file
copy:
src: firstmatch.txt
- dest: "{{ output_dir }}/firstmatch.txt"
+ dest: "{{ remote_tmp_dir }}/firstmatch.txt"
register: result
- name: Assert that the test file was deployed
@@ -913,7 +966,7 @@
- name: Insert a line before an existing line using firstmatch
lineinfile:
- path: "{{ output_dir }}/firstmatch.txt"
+ path: "{{ remote_tmp_dir }}/firstmatch.txt"
line: INSERT
insertafter: line1
firstmatch: yes
@@ -921,7 +974,7 @@
- name: Insert a line before an existing line using firstmatch again
lineinfile:
- path: "{{ output_dir }}/firstmatch.txt"
+ path: "{{ remote_tmp_dir }}/firstmatch.txt"
line: INSERT
insertafter: line1
firstmatch: yes
@@ -929,7 +982,7 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/firstmatch.txt"
+ path: "{{ remote_tmp_dir }}/firstmatch.txt"
register: result
- name: Assert that the file was modified appropriately
@@ -947,7 +1000,7 @@
- name: Deploy the test file
copy:
src: test_58923.txt
- dest: "{{ output_dir }}/test_58923.txt"
+ dest: "{{ remote_tmp_dir }}/test_58923.txt"
register: initial_file
- name: Assert that the test file was deployed
@@ -968,7 +1021,7 @@
# Regexp is not present in the file, so the line must be inserted after ^#!/bin/sh
- name: Add the line using firstmatch, regexp, and insertafter
lineinfile:
- path: "{{ output_dir }}/test_58923.txt"
+ path: "{{ remote_tmp_dir }}/test_58923.txt"
insertafter: '^#!/bin/sh'
regexp: ^export FISHEYE_OPTS
firstmatch: true
@@ -977,12 +1030,12 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/test_58923.txt"
+ path: "{{ remote_tmp_dir }}/test_58923.txt"
register: insertafter_test1_file
- name: Add the line using firstmatch, regexp, and insertafter again
lineinfile:
- path: "{{ output_dir }}/test_58923.txt"
+ path: "{{ remote_tmp_dir }}/test_58923.txt"
insertafter: '^#!/bin/sh'
regexp: ^export FISHEYE_OPTS
firstmatch: true
@@ -994,7 +1047,7 @@
# so nothing has been added:
- name: Stat the file again
stat:
- path: "{{ output_dir }}/test_58923.txt"
+ path: "{{ remote_tmp_dir }}/test_58923.txt"
register: insertafter_test2_file
- name: Assert insertafter tests gave the expected results
@@ -1009,7 +1062,7 @@
- name: Deploy the test file
copy:
src: test_58923.txt
- dest: "{{ output_dir }}/test_58923.txt"
+ dest: "{{ remote_tmp_dir }}/test_58923.txt"
register: initial_file
- name: Assert that the test file was deployed
@@ -1021,7 +1074,7 @@
- name: Insert the line using firstmatch and insertafter without regexp
lineinfile:
- path: "{{ output_dir }}/test_58923.txt"
+ path: "{{ remote_tmp_dir }}/test_58923.txt"
insertafter: '^#!/bin/sh'
firstmatch: true
line: export FISHEYE_OPTS="-Xmx4096m -Xms2048m"
@@ -1029,12 +1082,12 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/test_58923.txt"
+ path: "{{ remote_tmp_dir }}/test_58923.txt"
register: insertafter_test3_file
- name: Insert the line using firstmatch and insertafter without regexp again
lineinfile:
- path: "{{ output_dir }}/test_58923.txt"
+ path: "{{ remote_tmp_dir }}/test_58923.txt"
insertafter: '^#!/bin/sh'
firstmatch: true
line: export FISHEYE_OPTS="-Xmx4096m -Xms2048m"
@@ -1042,7 +1095,7 @@
- name: Stat the file again
stat:
- path: "{{ output_dir }}/test_58923.txt"
+ path: "{{ remote_tmp_dir }}/test_58923.txt"
register: insertafter_test4_file
- name: Assert insertafter without regexp tests gave the expected results
@@ -1058,7 +1111,7 @@
- name: Deploy the test file
copy:
src: test_58923.txt
- dest: "{{ output_dir }}/test_58923.txt"
+ dest: "{{ remote_tmp_dir }}/test_58923.txt"
register: initial_file
- name: Assert that the test file was deployed
@@ -1070,7 +1123,7 @@
- name: Add the line using regexp, firstmatch, and insertbefore
lineinfile:
- path: "{{ output_dir }}/test_58923.txt"
+ path: "{{ remote_tmp_dir }}/test_58923.txt"
insertbefore: '^#!/bin/sh'
regexp: ^export FISHEYE_OPTS
firstmatch: true
@@ -1079,12 +1132,12 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/test_58923.txt"
+ path: "{{ remote_tmp_dir }}/test_58923.txt"
register: insertbefore_test1_file
- name: Add the line using regexp, firstmatch, and insertbefore again
lineinfile:
- path: "{{ output_dir }}/test_58923.txt"
+ path: "{{ remote_tmp_dir }}/test_58923.txt"
insertbefore: '^#!/bin/sh'
regexp: ^export FISHEYE_OPTS
firstmatch: true
@@ -1093,7 +1146,7 @@
- name: Stat the file again
stat:
- path: "{{ output_dir }}/test_58923.txt"
+ path: "{{ remote_tmp_dir }}/test_58923.txt"
register: insertbefore_test2_file
- name: Assert insertbefore with regexp tests gave the expected results
@@ -1109,7 +1162,7 @@
- name: Deploy the test file
copy:
src: test_58923.txt
- dest: "{{ output_dir }}/test_58923.txt"
+ dest: "{{ remote_tmp_dir }}/test_58923.txt"
register: initial_file
- name: Assert that the test file was deployed
@@ -1121,7 +1174,7 @@
- name: Add the line using insertbefore and firstmatch
lineinfile:
- path: "{{ output_dir }}/test_58923.txt"
+ path: "{{ remote_tmp_dir }}/test_58923.txt"
insertbefore: '^#!/bin/sh'
firstmatch: true
line: export FISHEYE_OPTS="-Xmx4096m -Xms2048m"
@@ -1129,12 +1182,12 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/test_58923.txt"
+ path: "{{ remote_tmp_dir }}/test_58923.txt"
register: insertbefore_test3_file
- name: Add the line using insertbefore and firstmatch again
lineinfile:
- path: "{{ output_dir }}/test_58923.txt"
+ path: "{{ remote_tmp_dir }}/test_58923.txt"
insertbefore: '^#!/bin/sh'
firstmatch: true
line: export FISHEYE_OPTS="-Xmx4096m -Xms2048m"
@@ -1142,7 +1195,7 @@
- name: Stat the file again
stat:
- path: "{{ output_dir }}/test_58923.txt"
+ path: "{{ remote_tmp_dir }}/test_58923.txt"
register: insertbefore_test4_file
# Test when the line is presented in the file but
@@ -1151,7 +1204,7 @@
Add the line using insertbefore and firstmatch when the regexp line
is presented but not close to insertbefore spot
lineinfile:
- path: "{{ output_dir }}/test_58923.txt"
+ path: "{{ remote_tmp_dir }}/test_58923.txt"
insertbefore: ' Darwin\*\) if \[ -z \"\$JAVA_HOME\" \] ; then'
firstmatch: true
line: export FISHEYE_OPTS="-Xmx4096m -Xms2048m"
@@ -1159,7 +1212,7 @@
- name: Stat the file again
stat:
- path: "{{ output_dir }}/test_58923.txt"
+ path: "{{ remote_tmp_dir }}/test_58923.txt"
register: insertbefore_test5_file
- name: Assert insertbefore with regexp tests gave the expected results
@@ -1179,7 +1232,7 @@
- name: Deploy the test file
copy:
src: teststring_58923.txt
- dest: "{{ output_dir }}/teststring_58923.txt"
+ dest: "{{ remote_tmp_dir }}/teststring_58923.txt"
register: initial_file
- name: Assert that the test file was deployed
@@ -1200,7 +1253,7 @@
# literal is not present in the file, so the line must be inserted after ^#!/bin/sh
- name: Add the line using firstmatch, regexp, and insertafter
lineinfile:
- path: "{{ output_dir }}/teststring_58923.txt"
+ path: "{{ remote_tmp_dir }}/teststring_58923.txt"
insertafter: '^#!/bin/sh'
search_string: export FISHEYE_OPTS
firstmatch: true
@@ -1209,12 +1262,12 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/teststring_58923.txt"
+ path: "{{ remote_tmp_dir }}/teststring_58923.txt"
register: insertafter_test1_file
- name: Add the line using firstmatch, literal, and insertafter again
lineinfile:
- path: "{{ output_dir }}/teststring_58923.txt"
+ path: "{{ remote_tmp_dir }}/teststring_58923.txt"
insertafter: '^#!/bin/sh'
search_string: export FISHEYE_OPTS
firstmatch: true
@@ -1226,7 +1279,7 @@
# so nothing has been added:
- name: Stat the file again
stat:
- path: "{{ output_dir }}/teststring_58923.txt"
+ path: "{{ remote_tmp_dir }}/teststring_58923.txt"
register: insertafter_test2_file
- name: Assert insertafter tests gave the expected results
@@ -1241,7 +1294,7 @@
- name: Deploy the test file
copy:
src: teststring_58923.txt
- dest: "{{ output_dir }}/teststring_58923.txt"
+ dest: "{{ remote_tmp_dir }}/teststring_58923.txt"
register: initial_file
- name: Assert that the test file was deployed
@@ -1253,7 +1306,7 @@
- name: Add the line using literal, firstmatch, and insertbefore
lineinfile:
- path: "{{ output_dir }}/teststring_58923.txt"
+ path: "{{ remote_tmp_dir }}/teststring_58923.txt"
insertbefore: '^#!/bin/sh'
search_string: export FISHEYE_OPTS
firstmatch: true
@@ -1262,12 +1315,12 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/teststring_58923.txt"
+ path: "{{ remote_tmp_dir }}/teststring_58923.txt"
register: insertbefore_test1_file
- name: Add the line using literal, firstmatch, and insertbefore again
lineinfile:
- path: "{{ output_dir }}/teststring_58923.txt"
+ path: "{{ remote_tmp_dir }}/teststring_58923.txt"
insertbefore: '^#!/bin/sh'
search_string: export FISHEYE_OPTS
firstmatch: true
@@ -1276,7 +1329,7 @@
- name: Stat the file again
stat:
- path: "{{ output_dir }}/teststring_58923.txt"
+ path: "{{ remote_tmp_dir }}/teststring_58923.txt"
register: insertbefore_test2_file
- name: Assert insertbefore with literal tests gave the expected results
@@ -1291,14 +1344,14 @@
# https://github.com/ansible/ansible/issues/63684
- name: Create a file by inserting a line
lineinfile:
- path: "{{ output_dir }}/testend.txt"
+ path: "{{ remote_tmp_dir }}/testend.txt"
create: yes
line: testline
register: testend1
- name: Insert a line at the end of the file
lineinfile:
- path: "{{ output_dir }}/testend.txt"
+ path: "{{ remote_tmp_dir }}/testend.txt"
insertafter: testline
regexp: line at the end
line: line at the end
@@ -1306,7 +1359,7 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/testend.txt"
+ path: "{{ remote_tmp_dir }}/testend.txt"
register: testend_file
- name: Assert inserting at the end gave the expected results.
@@ -1320,14 +1373,14 @@
- name: Create a file by inserting a line
lineinfile:
- path: "{{ output_dir }}/testendliteral.txt"
+ path: "{{ remote_tmp_dir }}/testendliteral.txt"
create: yes
line: testline
register: testend1
- name: Insert a line at the end of the file
lineinfile:
- path: "{{ output_dir }}/testendliteral.txt"
+ path: "{{ remote_tmp_dir }}/testendliteral.txt"
insertafter: testline
search_string: line at the end
line: line at the end
@@ -1335,7 +1388,7 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/testendliteral.txt"
+ path: "{{ remote_tmp_dir }}/testendliteral.txt"
register: testend_file
- name: Assert inserting at the end gave the expected results.
diff --git a/test/integration/targets/lineinfile/tasks/test_string01.yml b/test/integration/targets/lineinfile/tasks/test_string01.yml
index 6e0c12c3..b86cd09a 100644
--- a/test/integration/targets/lineinfile/tasks/test_string01.yml
+++ b/test/integration/targets/lineinfile/tasks/test_string01.yml
@@ -5,7 +5,7 @@
- name: deploy the test file for lineinfile string
copy:
src: teststring.txt
- dest: "{{ output_dir }}/teststring.txt"
+ dest: "{{ remote_tmp_dir }}/teststring.txt"
register: result
- name: assert that the test file was deployed
@@ -17,7 +17,7 @@
- name: insert a line at the beginning of the file, and back it up
lineinfile:
- dest: "{{ output_dir }}/teststring.txt"
+ dest: "{{ remote_tmp_dir }}/teststring.txt"
state: present
line: "New line at the beginning"
insertbefore: "BOF"
@@ -26,7 +26,7 @@
- name: insert a line at the beginning of the file again
lineinfile:
- dest: "{{ output_dir }}/teststring.txt"
+ dest: "{{ remote_tmp_dir }}/teststring.txt"
state: present
line: "New line at the beginning"
insertbefore: "BOF"
@@ -34,7 +34,7 @@
- name: Replace a line using string
lineinfile:
- dest: "{{ output_dir }}/teststring.txt"
+ dest: "{{ remote_tmp_dir }}/teststring.txt"
state: present
line: "Thi$ i^ [ine 3"
search_string: (\\w)(\\s+)([\\.,])
@@ -42,13 +42,13 @@
- name: Replace a line again using string
lineinfile:
- dest: "{{ output_dir }}/teststring.txt"
+ dest: "{{ remote_tmp_dir }}/teststring.txt"
state: present
line: "Thi$ i^ [ine 3"
search_string: (\\w)(\\s+)([\\.,])
register: backrefs_result2
-- command: cat {{ output_dir }}/teststring.txt
+- command: cat {{ remote_tmp_dir }}/teststring.txt
- name: assert that the line with backrefs was changed
assert:
@@ -59,7 +59,7 @@
- name: stat the test after the backref line was replaced
stat:
- path: "{{ output_dir }}/teststring.txt"
+ path: "{{ remote_tmp_dir }}/teststring.txt"
register: result
- name: assert test checksum matches after backref line was replaced
@@ -69,7 +69,7 @@
- name: remove the middle line using string
lineinfile:
- dest: "{{ output_dir }}/teststring.txt"
+ dest: "{{ remote_tmp_dir }}/teststring.txt"
state: absent
search_string: "Thi$ i^ [ine 3"
register: result
@@ -82,7 +82,7 @@
- name: stat the test after the middle line was removed
stat:
- path: "{{ output_dir }}/teststring.txt"
+ path: "{{ remote_tmp_dir }}/teststring.txt"
register: result
- name: assert test checksum matches after the middle line was removed
@@ -92,7 +92,7 @@
- name: run a validation script that succeeds using string
lineinfile:
- dest: "{{ output_dir }}/teststring.txt"
+ dest: "{{ remote_tmp_dir }}/teststring.txt"
state: absent
search_string: <FilesMatch ".py[45]?$">
validate: "true %s"
@@ -106,7 +106,7 @@
- name: stat the test after the validation succeeded
stat:
- path: "{{ output_dir }}/teststring.txt"
+ path: "{{ remote_tmp_dir }}/teststring.txt"
register: result
- name: assert test checksum matches after the validation succeeded
@@ -116,7 +116,7 @@
- name: run a validation script that fails using string
lineinfile:
- dest: "{{ output_dir }}/teststring.txt"
+ dest: "{{ remote_tmp_dir }}/teststring.txt"
state: absent
search_string: "This is line 1"
validate: "/bin/false %s"
@@ -130,7 +130,7 @@
- name: stat the test after the validation failed
stat:
- path: "{{ output_dir }}/teststring.txt"
+ path: "{{ remote_tmp_dir }}/teststring.txt"
register: result
- name: assert test checksum matches the previous after the validation failed
diff --git a/test/integration/targets/lineinfile/tasks/test_string02.yml b/test/integration/targets/lineinfile/tasks/test_string02.yml
index 886b290d..1fa48b85 100644
--- a/test/integration/targets/lineinfile/tasks/test_string02.yml
+++ b/test/integration/targets/lineinfile/tasks/test_string02.yml
@@ -5,7 +5,7 @@
- name: Deploy the teststring.conf file
copy:
src: teststring.conf
- dest: "{{ output_dir }}/teststring.conf"
+ dest: "{{ remote_tmp_dir }}/teststring.conf"
register: result
- name: Assert that the teststring.conf file was deployed
@@ -18,7 +18,7 @@
# Test instertafter
- name: Insert lines after with string
lineinfile:
- path: "{{ output_dir }}/teststring.conf"
+ path: "{{ remote_tmp_dir }}/teststring.conf"
search_string: "{{ item.regexp }}"
line: "{{ item.line }}"
insertafter: "{{ item.after }}"
@@ -27,7 +27,7 @@
- name: Do the same thing again and check for changes
lineinfile:
- path: "{{ output_dir }}/teststring.conf"
+ path: "{{ remote_tmp_dir }}/teststring.conf"
search_string: "{{ item.regexp }}"
line: "{{ item.line }}"
insertafter: "{{ item.after }}"
@@ -45,7 +45,7 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/teststring.conf"
+ path: "{{ remote_tmp_dir }}/teststring.conf"
register: result
- name: Assert that the file contents match what is expected
@@ -55,7 +55,7 @@
- name: Do the same thing a third time without string and check for changes
lineinfile:
- path: "{{ output_dir }}/teststring.conf"
+ path: "{{ remote_tmp_dir }}/teststring.conf"
line: "{{ item.line }}"
insertafter: "{{ item.after }}"
with_items: "{{ test_befaf_regexp }}"
@@ -63,7 +63,7 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/teststring.conf"
+ path: "{{ remote_tmp_dir }}/teststring.conf"
register: result
- name: Assert that the file was changed when no string was provided
@@ -74,7 +74,7 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/teststring.conf"
+ path: "{{ remote_tmp_dir }}/teststring.conf"
register: result
- name: Assert that the file contents match what is expected
@@ -86,7 +86,7 @@
- name: Deploy the test.conf file
copy:
src: teststring.conf
- dest: "{{ output_dir }}/teststring.conf"
+ dest: "{{ remote_tmp_dir }}/teststring.conf"
register: result
- name: Assert that the teststring.conf file was deployed
@@ -98,7 +98,7 @@
- name: Insert lines before with string
lineinfile:
- path: "{{ output_dir }}/teststring.conf"
+ path: "{{ remote_tmp_dir }}/teststring.conf"
search_string: "{{ item.regexp }}"
line: "{{ item.line }}"
insertbefore: "{{ item.before }}"
@@ -107,7 +107,7 @@
- name: Do the same thing again and check for changes
lineinfile:
- path: "{{ output_dir }}/teststring.conf"
+ path: "{{ remote_tmp_dir }}/teststring.conf"
search_string: "{{ item.regexp }}"
line: "{{ item.line }}"
insertbefore: "{{ item.before }}"
@@ -125,7 +125,7 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/teststring.conf"
+ path: "{{ remote_tmp_dir }}/teststring.conf"
register: result
- name: Assert that the file contents match what is expected
@@ -135,7 +135,7 @@
- name: Do the same thing a third time without string and check for changes
lineinfile:
- path: "{{ output_dir }}/teststring.conf"
+ path: "{{ remote_tmp_dir }}/teststring.conf"
line: "{{ item.line }}"
insertbefore: "{{ item.before }}"
with_items: "{{ test_befaf_regexp }}"
@@ -143,7 +143,7 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/teststring.conf"
+ path: "{{ remote_tmp_dir }}/teststring.conf"
register: result
- name: Assert that the file was changed when no string was provided
@@ -154,7 +154,7 @@
- name: Stat the file
stat:
- path: "{{ output_dir }}/teststring.conf"
+ path: "{{ remote_tmp_dir }}/teststring.conf"
register: result
- name: Assert that the file contents match what is expected
diff --git a/test/integration/targets/lookup_config/aliases b/test/integration/targets/lookup_config/aliases
index bc987654..765b70da 100644
--- a/test/integration/targets/lookup_config/aliases
+++ b/test/integration/targets/lookup_config/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_config/tasks/main.yml b/test/integration/targets/lookup_config/tasks/main.yml
index cda9aedc..356d2f80 100644
--- a/test/integration/targets/lookup_config/tasks/main.yml
+++ b/test/integration/targets/lookup_config/tasks/main.yml
@@ -39,6 +39,19 @@
ignore_errors: yes
register: lookup_config_7
+- name: remote user and port for ssh connection
+ set_fact:
+ ssh_user_and_port: '{{q("config", "remote_user", "port", plugin_type="connection", plugin_name="ssh")}}'
+ vars:
+ ansible_ssh_user: lola
+ ansible_ssh_port: 2022
+
+- name: remote_tmp for sh shell plugin
+ set_fact:
+ yolo_remote: '{{q("config", "remote_tmp", plugin_type="shell", plugin_name="sh")}}'
+ vars:
+ ansible_remote_tmp: yolo
+
- name: Verify lookup_config
assert:
that:
@@ -52,8 +65,10 @@
- lookup_config_4 is success
- 'lookup4|length == 0'
- lookup_config_5 is failed
- - '"must be a string and one of" in lookup_config_5.msg'
+ - '"valid values are" in lookup_config_5.msg'
- lookup_config_6 is failed
- '"Invalid setting identifier" in lookup_config_6.msg'
- lookup_config_7 is failed
- '"Invalid setting" in lookup_config_7.msg'
+ - ssh_user_and_port == ['lola', 2022]
+ - yolo_remote == ["yolo"]
diff --git a/test/integration/targets/lookup_csvfile/aliases b/test/integration/targets/lookup_csvfile/aliases
index 45489be8..765b70da 100644
--- a/test/integration/targets/lookup_csvfile/aliases
+++ b/test/integration/targets/lookup_csvfile/aliases
@@ -1,2 +1 @@
shippable/posix/group2
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_csvfile/tasks/main.yml b/test/integration/targets/lookup_csvfile/tasks/main.yml
index 14b79bd6..758da71e 100644
--- a/test/integration/targets/lookup_csvfile/tasks/main.yml
+++ b/test/integration/targets/lookup_csvfile/tasks/main.yml
@@ -1,15 +1,23 @@
-- set_fact:
- this_will_error: "{{ lookup('csvfile', 'file=people.csv delimiter=, col=1') }}"
+- name: using deprecated syntax but missing keyword
+ set_fact:
+ this_will_error: "{{ lookup('csvfile', 'file=people.csv, delimiter=, col=1') }}"
ignore_errors: yes
register: no_keyword
-- set_fact:
+- name: extra arg in k=v syntax (deprecated)
+ set_fact:
this_will_error: "{{ lookup('csvfile', 'foo file=people.csv delimiter=, col=1 thisarg=doesnotexist') }}"
ignore_errors: yes
register: invalid_arg
+- name: extra arg in config syntax
+ set_fact:
+ this_will_error: "{{ lookup('csvfile', 'foo', file='people.csv', delimiter=',' col=1, thisarg='doesnotexist') }}"
+ ignore_errors: yes
+ register: invalid_arg2
+
- set_fact:
- this_will_error: "{{ lookup('csvfile', 'foo file=doesnotexist delimiter=, col=1') }}"
+ this_will_error: "{{ lookup('csvfile', 'foo', file='doesnotexist', delimiter=',', col=1) }}"
ignore_errors: yes
register: missing_file
@@ -19,24 +27,30 @@
- no_keyword is failed
- >
"Search key is required but was not found" in no_keyword.msg
+ - invalid_arg is failed
+ - invalid_arg2 is failed
- >
- "not in paramvals" in invalid_arg.msg
+ "is not a valid option" in invalid_arg.msg
- missing_file is failed
- >
- "need string or buffer" in missing_file.msg or "expected str, bytes or os.PathLike object" in missing_file.msg
+ "need string or buffer" in missing_file.msg or
+ "expected str, bytes or os.PathLike object" in missing_file.msg or
+ "No such file or directory" in missing_file.msg
- name: Check basic comma-separated file
assert:
that:
- - lookup('csvfile', 'Smith file=people.csv delimiter=, col=1') == "Jane"
+ - lookup('csvfile', 'Smith', file='people.csv', delimiter=',', col=1) == "Jane"
- lookup('csvfile', 'German von Lastname file=people.csv delimiter=, col=1') == "Demo"
- name: Check tab-separated file
assert:
that:
- lookup('csvfile', 'electronics file=tabs.csv delimiter=TAB col=1') == "tvs"
- - lookup('csvfile', 'fruit file=tabs.csv delimiter=TAB col=1') == "bananas"
+ - "lookup('csvfile', 'fruit', file='tabs.csv', delimiter='TAB', col=1) == 'bananas'"
- lookup('csvfile', 'fruit file=tabs.csv delimiter="\t" col=1') == "bananas"
+ - lookup('csvfile', 'electronics', 'fruit', file='tabs.csv', delimiter='\t', col=1) == "tvs,bananas"
+ - lookup('csvfile', 'electronics', 'fruit', file='tabs.csv', delimiter='\t', col=1, wantlist=True) == ["tvs", "bananas"]
- name: Check \x1a-separated file
assert:
diff --git a/test/integration/targets/lookup_dict/aliases b/test/integration/targets/lookup_dict/aliases
index 07b87020..a6dafcf8 100644
--- a/test/integration/targets/lookup_dict/aliases
+++ b/test/integration/targets/lookup_dict/aliases
@@ -1,3 +1 @@
shippable/posix/group1
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_env/aliases b/test/integration/targets/lookup_env/aliases
index 07b87020..a6dafcf8 100644
--- a/test/integration/targets/lookup_env/aliases
+++ b/test/integration/targets/lookup_env/aliases
@@ -1,3 +1 @@
shippable/posix/group1
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_file/aliases b/test/integration/targets/lookup_file/aliases
index 07b87020..a6dafcf8 100644
--- a/test/integration/targets/lookup_file/aliases
+++ b/test/integration/targets/lookup_file/aliases
@@ -1,3 +1 @@
shippable/posix/group1
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_first_found/aliases b/test/integration/targets/lookup_first_found/aliases
index bc987654..765b70da 100644
--- a/test/integration/targets/lookup_first_found/aliases
+++ b/test/integration/targets/lookup_first_found/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_first_found/tasks/main.yml b/test/integration/targets/lookup_first_found/tasks/main.yml
index 87f2a404..e85f4f27 100644
--- a/test/integration/targets/lookup_first_found/tasks/main.yml
+++ b/test/integration/targets/lookup_first_found/tasks/main.yml
@@ -1,10 +1,9 @@
- name: test with_first_found
- #shell: echo {{ item }}
set_fact: "first_found={{ item }}"
with_first_found:
- - "{{ role_path + '/files/does_not_exist' }}"
- - "{{ role_path + '/files/foo1' }}"
- - "{{ role_path + '/files/bar1' }}"
+ - "does_not_exist"
+ - "foo1"
+ - "{{ role_path + '/files/bar1' }}" # will only hit this if dwim search is broken
- name: set expected
set_fact: first_expected="{{ role_path + '/files/foo1' }}"
@@ -24,6 +23,7 @@
vars:
params:
files: "not_a_file.yaml"
+ skip: True
- name: verify q(first_found) result
assert:
@@ -71,3 +71,16 @@
assert:
that:
- "this_not_set is not defined"
+
+- name: test legacy formats
+ set_fact: hatethisformat={{item}}
+ vars:
+ params:
+ files: not/a/file.yaml;hosts
+ paths: not/a/path:/etc
+ loop: "{{ q('first_found', params) }}"
+
+- name: verify /etc/hosts was found
+ assert:
+ that:
+ - "hatethisformat == '/etc/hosts'"
diff --git a/test/integration/targets/lookup_indexed_items/aliases b/test/integration/targets/lookup_indexed_items/aliases
index bc987654..765b70da 100644
--- a/test/integration/targets/lookup_indexed_items/aliases
+++ b/test/integration/targets/lookup_indexed_items/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_indexed_items/tasks/main.yml b/test/integration/targets/lookup_indexed_items/tasks/main.yml
index 84f5fbce..434fe0ff 100644
--- a/test/integration/targets/lookup_indexed_items/tasks/main.yml
+++ b/test/integration/targets/lookup_indexed_items/tasks/main.yml
@@ -14,3 +14,19 @@
- "x2 == 'set'"
- "x3 == 'set'"
- "x4 == 'set'"
+
+- block:
+ - name: "EXPECTED FAILURE - test not a list"
+ debug:
+ msg: "{{ item.0 }} is {{ item.1 }}"
+ with_indexed_items:
+ "a": 1
+
+ - fail:
+ msg: "should not get here"
+
+ rescue:
+ - assert:
+ that:
+ - ansible_failed_task.name == "EXPECTED FAILURE - test not a list"
+ - ansible_failed_result.msg == "with_indexed_items expects a list"
diff --git a/test/integration/targets/lookup_ini/aliases b/test/integration/targets/lookup_ini/aliases
index f9f29ef3..b5983214 100644
--- a/test/integration/targets/lookup_ini/aliases
+++ b/test/integration/targets/lookup_ini/aliases
@@ -1,2 +1 @@
shippable/posix/group3
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_ini/lookup_case_check.properties b/test/integration/targets/lookup_ini/lookup_case_check.properties
new file mode 100644
index 00000000..ed3faaf8
--- /dev/null
+++ b/test/integration/targets/lookup_ini/lookup_case_check.properties
@@ -0,0 +1,2 @@
+name = captain
+NAME = fantastic
diff --git a/test/integration/targets/lookup_ini/mysql.ini b/test/integration/targets/lookup_ini/mysql.ini
new file mode 100644
index 00000000..fa62d87e
--- /dev/null
+++ b/test/integration/targets/lookup_ini/mysql.ini
@@ -0,0 +1,8 @@
+[mysqld]
+user = mysql
+pid-file = /var/run/mysqld/mysqld.pid
+skip-external-locking
+old_passwords = 1
+skip-bdb
+# we don't need ACID today
+skip-innodb
diff --git a/test/integration/targets/lookup_ini/runme.sh b/test/integration/targets/lookup_ini/runme.sh
index 76f836a9..6f44332b 100755
--- a/test/integration/targets/lookup_ini/runme.sh
+++ b/test/integration/targets/lookup_ini/runme.sh
@@ -2,5 +2,4 @@
set -eux
-ansible-playbook test_lookup_properties.yml -i inventory -v "$@"
-ansible-playbook test_errors.yml -i inventory -v "$@"
+ansible-playbook test_ini.yml -i inventory -v "$@"
diff --git a/test/integration/targets/lookup_ini/test_allow_no_value.yml b/test/integration/targets/lookup_ini/test_allow_no_value.yml
new file mode 100644
index 00000000..bfdc3765
--- /dev/null
+++ b/test/integration/targets/lookup_ini/test_allow_no_value.yml
@@ -0,0 +1,23 @@
+- name: Lookup test
+ hosts: testhost
+ tasks:
+ - name: "Read mysql.ini allow_none=False (default)"
+ set_fact:
+ test1: "{{ lookup('ini', 'user', file='mysql.ini', section='mysqld') }}"
+ register: result
+ ignore_errors: true
+
+ - name: "Read mysql.ini allow_no_value=True"
+ set_fact:
+ test2: "{{ lookup('ini', 'user', file='mysql.ini', section='mysqld', allow_no_value=True) }}"
+
+ - name: "Read mysql.ini allow_none=True"
+ set_fact:
+ test3: "{{ lookup('ini', 'skip-innodb', file='mysql.ini', section='mysqld', allow_none=True) }}"
+
+ - assert:
+ that:
+ - result is failed
+ - test2 == 'mysql'
+ - test3 == []
+ - test3|length == 0
diff --git a/test/integration/targets/lookup_ini/test_case_sensitive.yml b/test/integration/targets/lookup_ini/test_case_sensitive.yml
new file mode 100644
index 00000000..f66674ca
--- /dev/null
+++ b/test/integration/targets/lookup_ini/test_case_sensitive.yml
@@ -0,0 +1,31 @@
+- name: Test case sensitive option
+ hosts: all
+
+ tasks:
+ - name: Lookup a file with keys that differ only in case with case sensitivity enabled
+ debug:
+ msg: "{{ lookup('ini', 'name', file='duplicate_case_check.ini', section='reggae', case_sensitive=True) }}"
+ register: duplicate_case_sensitive_name
+
+ - name: Lookup a file with keys that differ only in case with case sensitivity enabled
+ debug:
+ msg: "{{ lookup('ini', 'NAME', file='duplicate_case_check.ini', section='reggae', case_sensitive=True) }}"
+ register: duplicate_case_sensitive_NAME
+
+ - name: Lookup a properties file with keys that differ only in case with case sensitivity enabled
+ debug:
+ msg: "{{ lookup('ini', 'name', file='lookup_case_check.properties', type='properties', case_sensitive=True) }}"
+ register: duplicate_case_sensitive_properties_name
+
+ - name: Lookup a properties file with keys that differ only in case with case sensitivity enabled
+ debug:
+ msg: "{{ lookup('ini', 'NAME', file='lookup_case_check.properties', type='properties', case_sensitive=True) }}"
+ register: duplicate_case_sensitive_properties_NAME
+
+ - name: Ensure the correct case-sensitive values were retieved
+ assert:
+ that:
+ - duplicate_case_sensitive_name.msg == 'bob'
+ - duplicate_case_sensitive_NAME.msg == 'marley'
+ - duplicate_case_sensitive_properties_name.msg == 'captain'
+ - duplicate_case_sensitive_properties_NAME.msg == 'fantastic'
diff --git a/test/integration/targets/lookup_ini/test_errors.yml b/test/integration/targets/lookup_ini/test_errors.yml
index b7b04d90..c1832a35 100644
--- a/test/integration/targets/lookup_ini/test_errors.yml
+++ b/test/integration/targets/lookup_ini/test_errors.yml
@@ -7,17 +7,17 @@
block:
- name: Lookup a file with duplicate keys
debug:
- msg: "{{ lookup('ini', 'reggae file=duplicate.ini section=reggae') }}"
+ msg: "{{ lookup('ini', 'name', file='duplicate.ini', section='reggae') }}"
ignore_errors: yes
register: duplicate
- name: Lookup a file with keys that differ only in case
debug:
- msg: "{{ lookup('ini', 'reggae file=duplicate_case_check.ini section=reggae') }}"
+ msg: "{{ lookup('ini', 'name', file='duplicate_case_check.ini', section='reggae') }}"
ignore_errors: yes
register: duplicate_case_sensitive
- - name: Ensure duplicate key errers were handled properly
+ - name: Ensure duplicate key errors were handled properly
assert:
that:
- duplicate is failed
@@ -27,7 +27,7 @@
- name: Lookup a file with a missing section
debug:
- msg: "{{ lookup('ini', 'reggae file=lookup.ini section=missing') }}"
+ msg: "{{ lookup('ini', 'name', file='lookup.ini', section='missing') }}"
ignore_errors: yes
register: missing_section
@@ -36,3 +36,27 @@
that:
- missing_section is failed
- "'No section' in missing_section.msg"
+
+ - name: Mix options type and push key out of order
+ debug:
+ msg: "{{ lookup('ini', 'file=lookup.ini', 'value1', section='value_section') }}"
+ register: bad_mojo
+ ignore_errors: yes
+
+ - name: Verify bad behavior reported an error
+ assert:
+ that:
+ - bad_mojo is failed
+ - '"No key to lookup was provided as first term with in string inline option" in bad_mojo.msg'
+
+ - name: Test invalid option
+ debug:
+ msg: "{{ lookup('ini', 'invalid=option') }}"
+ ignore_errors: yes
+ register: invalid_option
+
+ - name: Ensure invalid option failed
+ assert:
+ that:
+ - invalid_option is failed
+ - "'is not a valid option' in invalid_option.msg"
diff --git a/test/integration/targets/lookup_ini/test_ini.yml b/test/integration/targets/lookup_ini/test_ini.yml
new file mode 100644
index 00000000..11a5e57a
--- /dev/null
+++ b/test/integration/targets/lookup_ini/test_ini.yml
@@ -0,0 +1,4 @@
+- import_playbook: test_lookup_properties.yml
+- import_playbook: test_errors.yml
+- import_playbook: test_case_sensitive.yml
+- import_playbook: test_allow_no_value.yml
diff --git a/test/integration/targets/lookup_ini/test_lookup_properties.yml b/test/integration/targets/lookup_ini/test_lookup_properties.yml
index 3a414bbc..a6fc0f7d 100644
--- a/test/integration/targets/lookup_ini/test_lookup_properties.yml
+++ b/test/integration/targets/lookup_ini/test_lookup_properties.yml
@@ -5,8 +5,8 @@
- name: "read properties value"
set_fact:
test1: "{{lookup('ini', 'value1 type=properties file=lookup.properties')}}"
- test2: "{{lookup('ini', 'value2 type=properties file=lookup.properties')}}"
- test_dot: "{{lookup('ini', 'value.dot type=properties file=lookup.properties')}}"
+ test2: "{{lookup('ini', 'value2', type='properties', file='lookup.properties')}}"
+ test_dot: "{{lookup('ini', 'value.dot', type='properties', file='lookup.properties')}}"
field_with_space: "{{lookup('ini', 'field.with.space type=properties file=lookup.properties')}}"
- assert:
@@ -15,10 +15,10 @@
- name: "read ini value"
set_fact:
- value1_global: "{{lookup('ini', 'value1 section=global file=lookup.ini')}}"
- value2_global: "{{lookup('ini', 'value2 section=global file=lookup.ini')}}"
- value1_section1: "{{lookup('ini', 'value1 section=section1 file=lookup.ini')}}"
- field_with_unicode: "{{lookup('ini', 'unicode section=global file=lookup.ini')}}"
+ value1_global: "{{lookup('ini', 'value1', section='global', file='lookup.ini')}}"
+ value2_global: "{{lookup('ini', 'value2', section='global', file='lookup.ini')}}"
+ value1_section1: "{{lookup('ini', 'value1', section='section1', file='lookup.ini')}}"
+ field_with_unicode: "{{lookup('ini', 'unicode', section='global', file='lookup.ini')}}"
- debug: var={{item}}
with_items: [ 'value1_global', 'value2_global', 'value1_section1', 'field_with_unicode' ]
diff --git a/test/integration/targets/lookup_inventory_hostnames/aliases b/test/integration/targets/lookup_inventory_hostnames/aliases
index 45489be8..765b70da 100644
--- a/test/integration/targets/lookup_inventory_hostnames/aliases
+++ b/test/integration/targets/lookup_inventory_hostnames/aliases
@@ -1,2 +1 @@
shippable/posix/group2
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_items/aliases b/test/integration/targets/lookup_items/aliases
index bc987654..765b70da 100644
--- a/test/integration/targets/lookup_items/aliases
+++ b/test/integration/targets/lookup_items/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_lines/aliases b/test/integration/targets/lookup_lines/aliases
index bc987654..765b70da 100644
--- a/test/integration/targets/lookup_lines/aliases
+++ b/test/integration/targets/lookup_lines/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_list/aliases b/test/integration/targets/lookup_list/aliases
index bc987654..765b70da 100644
--- a/test/integration/targets/lookup_list/aliases
+++ b/test/integration/targets/lookup_list/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_nested/aliases b/test/integration/targets/lookup_nested/aliases
index bc987654..765b70da 100644
--- a/test/integration/targets/lookup_nested/aliases
+++ b/test/integration/targets/lookup_nested/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_password/aliases b/test/integration/targets/lookup_password/aliases
index 07b87020..a6dafcf8 100644
--- a/test/integration/targets/lookup_password/aliases
+++ b/test/integration/targets/lookup_password/aliases
@@ -1,3 +1 @@
shippable/posix/group1
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_pipe/aliases b/test/integration/targets/lookup_pipe/aliases
index 07b87020..a6dafcf8 100644
--- a/test/integration/targets/lookup_pipe/aliases
+++ b/test/integration/targets/lookup_pipe/aliases
@@ -1,3 +1 @@
shippable/posix/group1
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_random_choice/aliases b/test/integration/targets/lookup_random_choice/aliases
index bc987654..765b70da 100644
--- a/test/integration/targets/lookup_random_choice/aliases
+++ b/test/integration/targets/lookup_random_choice/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_sequence/aliases b/test/integration/targets/lookup_sequence/aliases
index bc987654..765b70da 100644
--- a/test/integration/targets/lookup_sequence/aliases
+++ b/test/integration/targets/lookup_sequence/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_sequence/tasks/main.yml b/test/integration/targets/lookup_sequence/tasks/main.yml
index 72181a42..bd0a4d80 100644
--- a/test/integration/targets/lookup_sequence/tasks/main.yml
+++ b/test/integration/targets/lookup_sequence/tasks/main.yml
@@ -61,3 +61,138 @@
- "ws_z_6 == 'stride_6'"
- "ws_z_host07 == 'host07'"
- "ws_z_host08 == 'host08'"
+
+- block:
+ - name: EXPECTED FAILURE - test invalid arg
+ set_fact: "{{ 'x' + item }}={{ item }}"
+ with_sequence: start=0 junk=3
+
+ - fail:
+ msg: "should not get here"
+ rescue:
+ - assert:
+ that:
+ - ansible_failed_task.name == "EXPECTED FAILURE - test invalid arg"
+ - ansible_failed_result.msg in [expected1, expected2]
+ vars:
+ expected1: "unrecognized arguments to with_sequence: ['junk']"
+ expected2: "unrecognized arguments to with_sequence: [u'junk']"
+
+- block:
+ - name: EXPECTED FAILURE - test bad kv value
+ set_fact: "{{ 'x' + item }}={{ item }}"
+ with_sequence: start=A end=3
+
+ - fail:
+ msg: "should not get here"
+ rescue:
+ - assert:
+ that:
+ - ansible_failed_task.name == "EXPECTED FAILURE - test bad kv value"
+ - ansible_failed_result.msg == "can't parse start=A as integer"
+
+- block:
+ - name: EXPECTED FAILURE - test bad simple form start value
+ set_fact: "{{ 'x' + item }}={{ item }}"
+ with_sequence: A-4/2
+
+ - fail:
+ msg: "should not get here"
+ rescue:
+ - assert:
+ that:
+ - ansible_failed_task.name == "EXPECTED FAILURE - test bad simple form start value"
+ - ansible_failed_result.msg == "can't parse start=A as integer"
+
+- block:
+ - name: EXPECTED FAILURE - test bad simple form end value
+ set_fact: "{{ 'x' + item }}={{ item }}"
+ with_sequence: 1-B/2
+
+ - fail:
+ msg: "should not get here"
+ rescue:
+ - assert:
+ that:
+ - ansible_failed_task.name == "EXPECTED FAILURE - test bad simple form end value"
+ - ansible_failed_result.msg == "can't parse end=B as integer"
+
+- block:
+ - name: EXPECTED FAILURE - test bad simple form stride value
+ set_fact: "{{ 'x' + item }}={{ item }}"
+ with_sequence: 1-4/C
+
+ - fail:
+ msg: "should not get here"
+ rescue:
+ - assert:
+ that:
+ - ansible_failed_task.name == "EXPECTED FAILURE - test bad simple form stride value"
+ - ansible_failed_result.msg == "can't parse stride=C as integer"
+
+- block:
+ - name: EXPECTED FAILURE - test no count or end
+ set_fact: "{{ 'x' + item }}={{ item }}"
+ with_sequence: start=1
+
+ - fail:
+ msg: "should not get here"
+ rescue:
+ - assert:
+ that:
+ - ansible_failed_task.name == "EXPECTED FAILURE - test no count or end"
+ - ansible_failed_result.msg == "must specify count or end in with_sequence"
+
+- block:
+ - name: EXPECTED FAILURE - test both count and end
+ set_fact: "{{ 'x' + item }}={{ item }}"
+ with_sequence: start=1 end=4 count=2
+
+ - fail:
+ msg: "should not get here"
+ rescue:
+ - assert:
+ that:
+ - ansible_failed_task.name == "EXPECTED FAILURE - test both count and end"
+ - ansible_failed_result.msg == "can't specify both count and end in with_sequence"
+
+- block:
+ - name: EXPECTED FAILURE - test count backwards message
+ set_fact: "{{ 'x' + item }}={{ item }}"
+ with_sequence: start=4 end=1 stride=2
+
+ - fail:
+ msg: "should not get here"
+ rescue:
+ - assert:
+ that:
+ - ansible_failed_task.name == "EXPECTED FAILURE - test count backwards message"
+ - ansible_failed_result.msg == "to count backwards make stride negative"
+
+- block:
+ - name: EXPECTED FAILURE - test count forward message
+ set_fact: "{{ 'x' + item }}={{ item }}"
+ with_sequence: start=1 end=4 stride=-2
+
+ - fail:
+ msg: "should not get here"
+ rescue:
+ - assert:
+ that:
+ - ansible_failed_task.name == "EXPECTED FAILURE - test count forward message"
+ - ansible_failed_result.msg == "to count forward don't make stride negative"
+
+- block:
+ - name: EXPECTED FAILURE - test bad format string message
+ set_fact: "{{ 'x' + item }}={{ item }}"
+ with_sequence: start=1 end=4 format=d
+
+ - fail:
+ msg: "should not get here"
+ rescue:
+ - assert:
+ that:
+ - ansible_failed_task.name == "EXPECTED FAILURE - test bad format string message"
+ - ansible_failed_result.msg == expected
+ vars:
+ expected: "bad formatting string: d" \ No newline at end of file
diff --git a/test/integration/targets/lookup_subelements/aliases b/test/integration/targets/lookup_subelements/aliases
index bc987654..765b70da 100644
--- a/test/integration/targets/lookup_subelements/aliases
+++ b/test/integration/targets/lookup_subelements/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_template/aliases b/test/integration/targets/lookup_template/aliases
index 07b87020..a6dafcf8 100644
--- a/test/integration/targets/lookup_template/aliases
+++ b/test/integration/targets/lookup_template/aliases
@@ -1,3 +1 @@
shippable/posix/group1
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_template/tasks/main.yml b/test/integration/targets/lookup_template/tasks/main.yml
index df115766..36a8ee31 100644
--- a/test/integration/targets/lookup_template/tasks/main.yml
+++ b/test/integration/targets/lookup_template/tasks/main.yml
@@ -17,3 +17,11 @@
- assert:
that:
- "hello_world_string|trim == 'Hello world!'"
+
+- name: Test that we have a proper jinja search path in template lookup with different comment start and end string
+ set_fact:
+ hello_world_comment: "{{ lookup('template', 'hello_comment.txt', comment_start_string='[#', comment_end_string='#]') }}"
+
+- assert:
+ that:
+ - "hello_world_comment|trim == 'Hello world!'"
diff --git a/test/integration/targets/lookup_template/templates/hello_comment.txt b/test/integration/targets/lookup_template/templates/hello_comment.txt
new file mode 100644
index 00000000..92af4b37
--- /dev/null
+++ b/test/integration/targets/lookup_template/templates/hello_comment.txt
@@ -0,0 +1,2 @@
+[# Comment #]
+Hello world!
diff --git a/test/integration/targets/lookup_together/aliases b/test/integration/targets/lookup_together/aliases
index bc987654..765b70da 100644
--- a/test/integration/targets/lookup_together/aliases
+++ b/test/integration/targets/lookup_together/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_together/tasks/main.yml b/test/integration/targets/lookup_together/tasks/main.yml
index ee59a2ae..71365a15 100644
--- a/test/integration/targets/lookup_together/tasks/main.yml
+++ b/test/integration/targets/lookup_together/tasks/main.yml
@@ -12,3 +12,18 @@
- "b == '2'"
- "c == '3'"
- "d == '4'"
+
+- block:
+ - name: "EXPECTED FAILURE - test empty list"
+ debug:
+ msg: "{{ item.0 }} and {{ item.1 }}"
+ with_together: []
+
+ - fail:
+ msg: "should not get here"
+
+ rescue:
+ - assert:
+ that:
+ - ansible_failed_task.name == "EXPECTED FAILURE - test empty list"
+ - ansible_failed_result.msg == "with_together requires at least one element in each list" \ No newline at end of file
diff --git a/test/integration/targets/lookup_unvault/aliases b/test/integration/targets/lookup_unvault/aliases
index 4a2ce27c..6bd893d4 100644
--- a/test/integration/targets/lookup_unvault/aliases
+++ b/test/integration/targets/lookup_unvault/aliases
@@ -1,3 +1,2 @@
shippable/posix/group2
needs/root
-skip/aix
diff --git a/test/integration/targets/lookup_url/aliases b/test/integration/targets/lookup_url/aliases
index 28990148..90ef161f 100644
--- a/test/integration/targets/lookup_url/aliases
+++ b/test/integration/targets/lookup_url/aliases
@@ -1,5 +1,3 @@
destructive
shippable/posix/group1
needs/httptester
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_varnames/aliases b/test/integration/targets/lookup_varnames/aliases
index 45489be8..765b70da 100644
--- a/test/integration/targets/lookup_varnames/aliases
+++ b/test/integration/targets/lookup_varnames/aliases
@@ -1,2 +1 @@
shippable/posix/group2
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_vars/aliases b/test/integration/targets/lookup_vars/aliases
index 07b87020..a6dafcf8 100644
--- a/test/integration/targets/lookup_vars/aliases
+++ b/test/integration/targets/lookup_vars/aliases
@@ -1,3 +1 @@
shippable/posix/group1
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_vars/tasks/main.yml b/test/integration/targets/lookup_vars/tasks/main.yml
index f24d8657..57b05b8f 100644
--- a/test/integration/targets/lookup_vars/tasks/main.yml
+++ b/test/integration/targets/lookup_vars/tasks/main.yml
@@ -14,3 +14,43 @@
that:
- 'var_host_info[0] == ansible_host'
- 'var_host_info[1] == ansible_connection'
+
+- block:
+ - name: EXPECTED FAILURE - test invalid var
+ debug:
+ var: '{{ lookup("vars", "doesnotexist") }}'
+
+ - fail:
+ msg: "should not get here"
+
+ rescue:
+ - assert:
+ that:
+ - ansible_failed_task.name == "EXPECTED FAILURE - test invalid var"
+ - expected in ansible_failed_result.msg
+ vars:
+ expected: "No variable found with this name: doesnotexist"
+
+- block:
+ - name: EXPECTED FAILURE - test invalid var type
+ debug:
+ var: '{{ lookup("vars", 42) }}'
+
+ - fail:
+ msg: "should not get here"
+
+ rescue:
+ - assert:
+ that:
+ - ansible_failed_task.name == "EXPECTED FAILURE - test invalid var type"
+ - expected in ansible_failed_result.msg
+ vars:
+ expected: "Invalid setting identifier, \"42\" is not a string"
+
+- name: test default
+ set_fact:
+ expected_default_var: '{{ lookup("vars", "doesnotexist", default="some text") }}'
+
+- assert:
+ that:
+ - expected_default_var == "some text"
diff --git a/test/integration/targets/loop_control/aliases b/test/integration/targets/loop_control/aliases
index 765b70da..90ea9e12 100644
--- a/test/integration/targets/loop_control/aliases
+++ b/test/integration/targets/loop_control/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/loops/aliases b/test/integration/targets/loops/aliases
index ed821c27..90ea9e12 100644
--- a/test/integration/targets/loops/aliases
+++ b/test/integration/targets/loops/aliases
@@ -1,2 +1,2 @@
shippable/posix/group2
-skip/aix
+context/controller
diff --git a/test/integration/targets/meta_tasks/aliases b/test/integration/targets/meta_tasks/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/meta_tasks/aliases
+++ b/test/integration/targets/meta_tasks/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/meta_tasks/runme.sh b/test/integration/targets/meta_tasks/runme.sh
index 3ee419cb..f5916ec7 100755
--- a/test/integration/targets/meta_tasks/runme.sh
+++ b/test/integration/targets/meta_tasks/runme.sh
@@ -49,4 +49,19 @@ for test_strategy in linear free; do
grep -q "META: ending play" <<< "$out"
grep -qv 'Failed to end using end_play' <<< "$out"
+
+ out="$(ansible-playbook test_end_play_serial_one.yml -i inventory.yml -e test_strategy=$test_strategy -vv "$@")"
+
+ [ "$(grep -c "Testing end_play on host" <<< "$out" )" -eq 1 ]
+ grep -q "META: ending play" <<< "$out"
+ grep -qv 'Failed to end using end_play' <<< "$out"
+done
+
+# test end_batch meta task
+for test_strategy in linear free; do
+ out="$(ansible-playbook test_end_batch.yml -i inventory.yml -e test_strategy=$test_strategy -vv "$@")"
+
+ [ "$(grep -c "Using end_batch" <<< "$out" )" -eq 2 ]
+ [ "$(grep -c "META: ending batch" <<< "$out" )" -eq 2 ]
+ grep -qv 'Failed to end_batch' <<< "$out"
done
diff --git a/test/integration/targets/meta_tasks/test_end_batch.yml b/test/integration/targets/meta_tasks/test_end_batch.yml
new file mode 100644
index 00000000..4af020af
--- /dev/null
+++ b/test/integration/targets/meta_tasks/test_end_batch.yml
@@ -0,0 +1,13 @@
+- name: Testing end_batch with strategy {{ test_strategy | default('linear') }}
+ hosts: testhost:testhost2
+ gather_facts: no
+ serial: 1
+ strategy: "{{ test_strategy | default('linear') }}"
+ tasks:
+ - debug:
+ msg: "Using end_batch, current host: {{ inventory_hostname }}, current batch: {{ ansible_play_batch }}"
+
+ - meta: end_batch
+
+ - fail:
+ msg: "Failed to end_batch, current host: {{ inventory_hostname }}, current batch: {{ ansible_play_batch }}"
diff --git a/test/integration/targets/meta_tasks/test_end_play_serial_one.yml b/test/integration/targets/meta_tasks/test_end_play_serial_one.yml
new file mode 100644
index 00000000..f838d4a6
--- /dev/null
+++ b/test/integration/targets/meta_tasks/test_end_play_serial_one.yml
@@ -0,0 +1,13 @@
+- name: Testing end_play with serial 1 and strategy {{ test_strategy | default('linear') }}
+ hosts: testhost:testhost2
+ gather_facts: no
+ serial: 1
+ strategy: "{{ test_strategy | default('linear') }}"
+ tasks:
+ - debug:
+ msg: "Testing end_play on host {{ inventory_hostname }}"
+
+ - meta: end_play
+
+ - fail:
+ msg: 'Failed to end using end_play'
diff --git a/test/integration/targets/missing_required_lib/aliases b/test/integration/targets/missing_required_lib/aliases
index 70a7b7a9..1d28bdb2 100644
--- a/test/integration/targets/missing_required_lib/aliases
+++ b/test/integration/targets/missing_required_lib/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/module_defaults/action_plugins/debug.py b/test/integration/targets/module_defaults/action_plugins/debug.py
new file mode 100644
index 00000000..2584fd3d
--- /dev/null
+++ b/test/integration/targets/module_defaults/action_plugins/debug.py
@@ -0,0 +1,80 @@
+# Copyright 2012, Dag Wieers <dag@wieers.com>
+# Copyright 2016, Toshio Kuratomi <tkuratomi@ansible.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.errors import AnsibleUndefinedVariable
+from ansible.module_utils.six import string_types
+from ansible.module_utils._text import to_text
+from ansible.plugins.action import ActionBase
+
+
+class ActionModule(ActionBase):
+ ''' Print statements during execution '''
+
+ TRANSFERS_FILES = False
+ _VALID_ARGS = frozenset(('msg', 'var', 'verbosity'))
+
+ def run(self, tmp=None, task_vars=None):
+ if task_vars is None:
+ task_vars = dict()
+
+ if 'msg' in self._task.args and 'var' in self._task.args:
+ return {"failed": True, "msg": "'msg' and 'var' are incompatible options"}
+
+ result = super(ActionModule, self).run(tmp, task_vars)
+ del tmp # tmp no longer has any effect
+
+ # get task verbosity
+ verbosity = int(self._task.args.get('verbosity', 0))
+
+ if verbosity <= self._display.verbosity:
+ if 'msg' in self._task.args:
+ result['msg'] = self._task.args['msg']
+
+ elif 'var' in self._task.args:
+ try:
+ results = self._templar.template(self._task.args['var'], convert_bare=True, fail_on_undefined=True)
+ if results == self._task.args['var']:
+ # if results is not str/unicode type, raise an exception
+ if not isinstance(results, string_types):
+ raise AnsibleUndefinedVariable
+ # If var name is same as result, try to template it
+ results = self._templar.template("{{" + results + "}}", convert_bare=True, fail_on_undefined=True)
+ except AnsibleUndefinedVariable as e:
+ results = u"VARIABLE IS NOT DEFINED!"
+ if self._display.verbosity > 0:
+ results += u": %s" % to_text(e)
+
+ if isinstance(self._task.args['var'], (list, dict)):
+ # If var is a list or dict, use the type as key to display
+ result[to_text(type(self._task.args['var']))] = results
+ else:
+ result[self._task.args['var']] = results
+ else:
+ result['msg'] = 'Hello world!'
+
+ # force flag to make debug output module always verbose
+ result['_ansible_verbose_always'] = True
+ else:
+ result['skipped_reason'] = "Verbosity threshold not met."
+ result['skipped'] = True
+
+ result['failed'] = False
+
+ return result
diff --git a/test/integration/targets/module_defaults/aliases b/test/integration/targets/module_defaults/aliases
index a6dafcf8..13e01f0c 100644
--- a/test/integration/targets/module_defaults/aliases
+++ b/test/integration/targets/module_defaults/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/meta/runtime.yml b/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/meta/runtime.yml
index 62695fbc..081ee8c2 100644
--- a/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/meta/runtime.yml
+++ b/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/meta/runtime.yml
@@ -1,5 +1,10 @@
action_groups:
testgroup:
+ # Test metadata 'extend_group' feature does not get stuck in a recursive loop
+ - metadata:
+ extend_group: othergroup
+ - metadata
+ - ping
- testns.testcoll.echo1
- testns.testcoll.echo2
# note we can define defaults for an action
@@ -7,3 +12,28 @@ action_groups:
# note we can define defaults in this group for actions/modules in another collection
- testns.othercoll.other_echoaction
- testns.othercoll.other_echo1
+ othergroup:
+ - metadata:
+ extend_group:
+ - testgroup
+ empty_metadata:
+ - metadata: {}
+ bad_metadata_format:
+ - unexpected_key:
+ key: value
+ metadata:
+ extend_group: testgroup
+ multiple_metadata:
+ - metadata:
+ extend_group: testgroup
+ - metadata:
+ extend_group: othergroup
+ bad_metadata_options:
+ - metadata:
+ unexpected_key: testgroup
+ bad_metadata_type:
+ - metadata: [testgroup]
+ bad_metadata_option_type:
+ - metadata:
+ extend_group:
+ name: testgroup
diff --git a/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/modules/metadata.py b/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/modules/metadata.py
new file mode 100644
index 00000000..6a818fd8
--- /dev/null
+++ b/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/modules/metadata.py
@@ -0,0 +1,45 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = '''
+---
+module: metadata
+version_added: 2.12
+short_description: Test module with a specific name
+description: Test module with a specific name
+options:
+ data:
+ description: Required option to test module_defaults work
+ required: True
+ type: str
+author:
+ - Ansible Core Team
+'''
+
+EXAMPLES = '''
+'''
+
+RETURN = '''
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+
+
+def main():
+ module = AnsibleModule(
+ argument_spec=dict(
+ data=dict(type='str', required=True),
+ ),
+ )
+
+ module.exit_json()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/modules/ping.py b/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/modules/ping.py
new file mode 100644
index 00000000..2cb1fb23
--- /dev/null
+++ b/test/integration/targets/module_defaults/collections/ansible_collections/testns/testcoll/plugins/modules/ping.py
@@ -0,0 +1,83 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
+# (c) 2016, Toshio Kuratomi <tkuratomi@ansible.com>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = '''
+---
+module: ping
+version_added: historical
+short_description: Try to connect to host, verify a usable python and return C(pong) on success
+description:
+ - A trivial test module, this module always returns C(pong) on successful
+ contact. It does not make sense in playbooks, but it is useful from
+ C(/usr/bin/ansible) to verify the ability to login and that a usable Python is configured.
+ - This is NOT ICMP ping, this is just a trivial test module that requires Python on the remote-node.
+ - For Windows targets, use the M(ansible.windows.win_ping) module instead.
+ - For Network targets, use the M(ansible.netcommon.net_ping) module instead.
+options:
+ data:
+ description:
+ - Data to return for the C(ping) return value.
+ - If this parameter is set to C(crash), the module will cause an exception.
+ type: str
+ default: pong
+seealso:
+ - module: ansible.netcommon.net_ping
+ - module: ansible.windows.win_ping
+author:
+ - Ansible Core Team
+ - Michael DeHaan
+notes:
+ - Supports C(check_mode).
+'''
+
+EXAMPLES = '''
+# Test we can logon to 'webservers' and execute python with json lib.
+# ansible webservers -m ping
+
+- name: Example from an Ansible Playbook
+ ansible.builtin.ping:
+
+- name: Induce an exception to see what happens
+ ansible.builtin.ping:
+ data: crash
+'''
+
+RETURN = '''
+ping:
+ description: Value provided with the data parameter.
+ returned: success
+ type: str
+ sample: pong
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+
+
+def main():
+ module = AnsibleModule(
+ argument_spec=dict(
+ data=dict(type='str', default='pong'),
+ ),
+ supports_check_mode=True
+ )
+
+ if module.params['data'] == 'crash':
+ raise Exception("boom")
+
+ result = dict(
+ ping=module.params['data'],
+ )
+
+ module.exit_json(**result)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/integration/targets/module_defaults/library/legacy_ping.py b/test/integration/targets/module_defaults/library/legacy_ping.py
new file mode 100644
index 00000000..2cb1fb23
--- /dev/null
+++ b/test/integration/targets/module_defaults/library/legacy_ping.py
@@ -0,0 +1,83 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
+# (c) 2016, Toshio Kuratomi <tkuratomi@ansible.com>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = '''
+---
+module: ping
+version_added: historical
+short_description: Try to connect to host, verify a usable python and return C(pong) on success
+description:
+ - A trivial test module, this module always returns C(pong) on successful
+ contact. It does not make sense in playbooks, but it is useful from
+ C(/usr/bin/ansible) to verify the ability to login and that a usable Python is configured.
+ - This is NOT ICMP ping, this is just a trivial test module that requires Python on the remote-node.
+ - For Windows targets, use the M(ansible.windows.win_ping) module instead.
+ - For Network targets, use the M(ansible.netcommon.net_ping) module instead.
+options:
+ data:
+ description:
+ - Data to return for the C(ping) return value.
+ - If this parameter is set to C(crash), the module will cause an exception.
+ type: str
+ default: pong
+seealso:
+ - module: ansible.netcommon.net_ping
+ - module: ansible.windows.win_ping
+author:
+ - Ansible Core Team
+ - Michael DeHaan
+notes:
+ - Supports C(check_mode).
+'''
+
+EXAMPLES = '''
+# Test we can logon to 'webservers' and execute python with json lib.
+# ansible webservers -m ping
+
+- name: Example from an Ansible Playbook
+ ansible.builtin.ping:
+
+- name: Induce an exception to see what happens
+ ansible.builtin.ping:
+ data: crash
+'''
+
+RETURN = '''
+ping:
+ description: Value provided with the data parameter.
+ returned: success
+ type: str
+ sample: pong
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+
+
+def main():
+ module = AnsibleModule(
+ argument_spec=dict(
+ data=dict(type='str', default='pong'),
+ ),
+ supports_check_mode=True
+ )
+
+ if module.params['data'] == 'crash':
+ raise Exception("boom")
+
+ result = dict(
+ ping=module.params['data'],
+ )
+
+ module.exit_json(**result)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/integration/targets/module_defaults/runme.sh b/test/integration/targets/module_defaults/runme.sh
index c19e607b..082f4e5b 100755
--- a/test/integration/targets/module_defaults/runme.sh
+++ b/test/integration/targets/module_defaults/runme.sh
@@ -3,3 +3,7 @@
set -eux
ansible-playbook test_defaults.yml "$@"
+
+ansible-playbook test_action_groups.yml "$@"
+
+ansible-playbook test_action_group_metadata.yml "$@"
diff --git a/test/integration/targets/module_defaults/tasks/main.yml b/test/integration/targets/module_defaults/tasks/main.yml
index 3ed960d3..747c2f92 100644
--- a/test/integration/targets/module_defaults/tasks/main.yml
+++ b/test/integration/targets/module_defaults/tasks/main.yml
@@ -39,7 +39,7 @@
module_defaults:
# Meaningless values to make sure that 'module_defaults' gets
# evaluated for this block
- foo:
+ ping:
bar: baz
block:
- debug:
diff --git a/test/integration/targets/module_defaults/templates/test_metadata_warning.yml.j2 b/test/integration/targets/module_defaults/templates/test_metadata_warning.yml.j2
new file mode 100644
index 00000000..b45aaba2
--- /dev/null
+++ b/test/integration/targets/module_defaults/templates/test_metadata_warning.yml.j2
@@ -0,0 +1,8 @@
+---
+- hosts: localhost
+ gather_facts: no
+ module_defaults:
+ group/{{ group_name }}:
+ data: value
+ tasks:
+ - ping:
diff --git a/test/integration/targets/module_defaults/test_action_group_metadata.yml b/test/integration/targets/module_defaults/test_action_group_metadata.yml
new file mode 100644
index 00000000..d2ba8dc2
--- /dev/null
+++ b/test/integration/targets/module_defaults/test_action_group_metadata.yml
@@ -0,0 +1,123 @@
+---
+- hosts: localhost
+ gather_facts: no
+ vars:
+ reset_color: '\x1b\[0m'
+ color: '\x1b\[[0-9];[0-9]{2}m'
+ tasks:
+
+ - template:
+ src: test_metadata_warning.yml.j2
+ dest: test_metadata_warning.yml
+ vars:
+ group_name: testns.testcoll.empty_metadata
+
+ - command: ansible-playbook test_metadata_warning.yml
+ register: result
+
+ - assert:
+ that: metadata_warning not in warnings
+ vars:
+ warnings: "{{ result.stderr | regex_replace(reset_color) | regex_replace(color) | regex_replace('\\n', ' ') }}"
+ metadata_warning: "Invalid metadata was found"
+
+ - template:
+ src: test_metadata_warning.yml.j2
+ dest: test_metadata_warning.yml
+ vars:
+ group_name: testns.testcoll.bad_metadata_format
+
+ - command: ansible-playbook test_metadata_warning.yml
+ register: result
+
+ - assert:
+ that: metadata_warning in warnings
+ vars:
+ warnings: "{{ result.stderr | regex_replace(reset_color) | regex_replace(color) | regex_replace('\\n', ' ') }}"
+ metadata_warning: >-
+ Invalid metadata was found for action_group testns.testcoll.bad_metadata_format while loading module_defaults.
+ The only expected key is metadata, but got keys: metadata, unexpected_key
+
+ - template:
+ src: test_metadata_warning.yml.j2
+ dest: test_metadata_warning.yml
+ vars:
+ group_name: testns.testcoll.multiple_metadata
+
+ - command: ansible-playbook test_metadata_warning.yml
+ register: result
+
+ - assert:
+ that: metadata_warning in warnings
+ vars:
+ warnings: "{{ result.stderr | regex_replace(reset_color) | regex_replace(color) | regex_replace('\\n', ' ') }}"
+ metadata_warning: >-
+ Invalid metadata was found for action_group testns.testcoll.multiple_metadata while loading module_defaults.
+ The group contains multiple metadata entries.
+
+ - template:
+ src: test_metadata_warning.yml.j2
+ dest: test_metadata_warning.yml
+ vars:
+ group_name: testns.testcoll.bad_metadata_options
+
+ - command: 'ansible-playbook test_metadata_warning.yml'
+ register: result
+
+ - assert:
+ that: metadata_warning in warnings
+ vars:
+ warnings: "{{ result.stderr | regex_replace(reset_color) | regex_replace(color) | regex_replace('\\n', ' ') }}"
+ metadata_warning: >-
+ Invalid metadata was found for action_group testns.testcoll.bad_metadata_options while loading module_defaults.
+ The metadata contains unexpected keys: unexpected_key
+
+ - template:
+ src: test_metadata_warning.yml.j2
+ dest: test_metadata_warning.yml
+ vars:
+ group_name: testns.testcoll.bad_metadata_type
+
+ - command: ansible-playbook test_metadata_warning.yml
+ register: result
+
+ - assert:
+ that: metadata_warning in warnings
+ vars:
+ warnings: "{{ result.stderr | regex_replace(reset_color) | regex_replace(color) | regex_replace('\\n', ' ') }}"
+ metadata_warning: >-
+ Invalid metadata was found for action_group testns.testcoll.bad_metadata_type while loading module_defaults.
+ The metadata is not a dictionary. Got ['testgroup']
+
+ - template:
+ src: test_metadata_warning.yml.j2
+ dest: test_metadata_warning.yml
+ vars:
+ group_name: testns.testcoll.bad_metadata_option_type
+
+ - command: ansible-playbook test_metadata_warning.yml
+ register: result
+
+ - assert:
+ that: metadata_warning in warnings
+ vars:
+ warnings: "{{ result.stderr | regex_replace(reset_color) | regex_replace(color) | regex_replace('\\n', ' ') }}"
+ metadata_warning: >-
+ Invalid metadata was found for action_group testns.testcoll.bad_metadata_option_type while loading module_defaults.
+ The metadata contains unexpected key types: extend_group is {'name': 'testgroup'} (expected type list)
+
+ - name: test disabling action_group metadata validation
+ command: ansible-playbook test_metadata_warning.yml
+ environment:
+ ANSIBLE_VALIDATE_ACTION_GROUP_METADATA: False
+ register: result
+
+ - assert:
+ that: metadata_warning not in warnings
+ vars:
+ warnings: "{{ result.stderr | regex_replace(reset_color) | regex_replace(color) | regex_replace('\\n', ' ') }}"
+ metadata_warning: "Invalid metadata was found for action_group"
+
+ - file:
+ path: test_metadata_warning.yml
+ state: absent
diff --git a/test/integration/targets/module_defaults/test_action_groups.yml b/test/integration/targets/module_defaults/test_action_groups.yml
new file mode 100644
index 00000000..33a3c9c5
--- /dev/null
+++ b/test/integration/targets/module_defaults/test_action_groups.yml
@@ -0,0 +1,132 @@
+---
+- hosts: localhost
+ gather_facts: no
+ tasks:
+ - name: test ansible.legacy short group name
+ module_defaults:
+ group/testgroup:
+ data: test
+ block:
+ - legacy_ping:
+ register: result
+ - assert:
+ that: "result.ping == 'pong'"
+
+ - ansible.legacy.legacy_ping:
+ register: result
+ - assert:
+ that: "result.ping == 'pong'"
+
+ - ping:
+ register: result
+ - assert:
+ that: "result.ping == 'test'"
+
+ - ansible.legacy.ping: # resolves to ansible.builtin.ping
+ register: result
+ - assert:
+ that: "result.ping == 'test'"
+
+ - ansible.builtin.ping:
+ register: result
+ - assert:
+ that: "result.ping == 'test'"
+
+ - formerly_core_ping:
+ register: result
+ - assert:
+ that: "result.ping == 'test'"
+
+ - ansible.builtin.formerly_core_ping:
+ register: result
+ - assert:
+ that: "result.ping == 'test'"
+
+ - name: test group that includes a legacy action
+ module_defaults:
+ # As of 2.12, legacy actions must be included in the action group definition
+ group/testlegacy:
+ data: test
+ block:
+ - legacy_ping:
+ register: result
+ - assert:
+ that: "result.ping == 'test'"
+
+ - ansible.legacy.legacy_ping:
+ register: result
+ - assert:
+ that: "result.ping == 'test'"
+
+ - name: test ansible.builtin fully qualified group name
+ module_defaults:
+ group/ansible.builtin.testgroup:
+ data: test
+ block:
+ # ansible.builtin does not contain ansible.legacy
+ - legacy_ping:
+ register: result
+ - assert:
+ that: "result.ping != 'test'"
+
+ # ansible.builtin does not contain ansible.legacy
+ - ansible.legacy.legacy_ping:
+ register: result
+ - assert:
+ that: "result.ping != 'test'"
+
+ - ping:
+ register: result
+ - assert:
+ that: "result.ping == 'test'"
+
+ # Resolves to ansible.builtin.ping
+ - ansible.legacy.ping:
+ register: result
+ - assert:
+ that: "result.ping == 'test'"
+
+ - ansible.builtin.ping:
+ register: result
+ - assert:
+ that: "result.ping == 'test'"
+
+ - formerly_core_ping:
+ register: result
+ - assert:
+ that: "result.ping == 'test'"
+
+ - ansible.builtin.formerly_core_ping:
+ register: result
+ - assert:
+ that: "result.ping == 'test'"
+
+ - name: test collection group name
+ module_defaults:
+ group/testns.testcoll.testgroup:
+ data: test
+ block:
+ # Plugin resolving to a different collection does not get the default
+ - ping:
+ register: result
+ - assert:
+ that: "result.ping != 'test'"
+
+ - formerly_core_ping:
+ register: result
+ - assert:
+ that: "result.ping == 'test'"
+
+ - ansible.builtin.formerly_core_ping:
+ register: result
+ - assert:
+ that: "result.ping == 'test'"
+
+ - testns.testcoll.ping:
+ register: result
+ - assert:
+ that: "result.ping == 'test'"
+
+ - metadata:
+ collections:
+ - testns.testcoll
diff --git a/test/integration/targets/module_defaults/test_defaults.yml b/test/integration/targets/module_defaults/test_defaults.yml
index 15b66362..70377f12 100644
--- a/test/integration/targets/module_defaults/test_defaults.yml
+++ b/test/integration/targets/module_defaults/test_defaults.yml
@@ -44,6 +44,51 @@
- debug: var=echo1_fq
+ - legacy_ping:
+ register: legacy_ping_1
+ module_defaults:
+ legacy_ping:
+ data: from task
+
+ - legacy_ping:
+ register: legacy_ping_2
+ module_defaults:
+ ansible.legacy.legacy_ping:
+ data: from task
+
+ - ansible.legacy.legacy_ping:
+ register: legacy_ping_3
+ module_defaults:
+ legacy_ping:
+ data: from task
+
+ - ansible.legacy.legacy_ping:
+ register: legacy_ping_4
+ module_defaults:
+ ansible.legacy.legacy_ping:
+ data: from task
+
+ - name: builtin uses legacy defaults
+ ansible.builtin.debug:
+ module_defaults:
+ debug:
+ msg: legacy default
+ register: builtin_legacy_defaults_1
+
+ - name: builtin uses legacy defaults
+ ansible.builtin.debug:
+ module_defaults:
+ ansible.legacy.debug:
+ msg: legacy default
+ register: builtin_legacy_defaults_2
+
+ - name: legacy does not use builtin defaults
+ ansible.legacy.debug:
+ register: legacy_builtin_defaults
+ module_defaults:
+ ansible.builtin.debug:
+ msg: legacy default
+
- assert:
that:
- "echoaction_fq.args_in == {'task_arg': 'from task', 'explicit_module_default': 'from playbook', 'group_module_default': 'from playbook' }"
@@ -56,5 +101,12 @@
- "other_echoaction_unq.args_in == {'task_arg': 'from task', 'group_module_default': 'from playbook' }"
- "other_echo1_fq.args_in == {'task_arg': 'from task', 'group_module_default': 'from playbook' }"
- "other_echo1_unq.args_in == {'task_arg': 'from task', 'group_module_default': 'from playbook' }"
+ - "legacy_ping_1.ping == 'from task'"
+ - "legacy_ping_2.ping == 'from task'"
+ - "legacy_ping_3.ping == 'from task'"
+ - "legacy_ping_4.ping == 'from task'"
+ - "legacy_builtin_defaults.msg == 'Hello world!'"
+ - "builtin_legacy_defaults_1.msg == 'legacy default'"
+ - "builtin_legacy_defaults_2.msg == 'legacy default'"
- include_tasks: tasks/main.yml
diff --git a/test/integration/targets/module_no_log/aliases b/test/integration/targets/module_no_log/aliases
index cbbb8804..2e263309 100644
--- a/test/integration/targets/module_no_log/aliases
+++ b/test/integration/targets/module_no_log/aliases
@@ -1,5 +1,5 @@
shippable/posix/group1
-skip/aix # not configured to log user.info to /var/log/syslog
+context/controller
skip/freebsd # not configured to log user.info to /var/log/syslog
skip/osx # not configured to log user.info to /var/log/syslog
skip/macos # not configured to log user.info to /var/log/syslog
diff --git a/test/integration/targets/module_precedence/aliases b/test/integration/targets/module_precedence/aliases
index a6dafcf8..13e01f0c 100644
--- a/test/integration/targets/module_precedence/aliases
+++ b/test/integration/targets/module_precedence/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/module_tracebacks/aliases b/test/integration/targets/module_tracebacks/aliases
index 804f0460..757f4fb8 100644
--- a/test/integration/targets/module_tracebacks/aliases
+++ b/test/integration/targets/module_tracebacks/aliases
@@ -1,3 +1,3 @@
shippable/posix/group4
needs/ssh
-skip/aix
+context/controller
diff --git a/test/integration/targets/module_utils/aliases b/test/integration/targets/module_utils/aliases
index 2f5770ff..769d265d 100644
--- a/test/integration/targets/module_utils/aliases
+++ b/test/integration/targets/module_utils/aliases
@@ -1,3 +1,4 @@
shippable/posix/group3
needs/root
needs/target/setup_nobody
+context/target
diff --git a/test/integration/targets/module_utils/module_utils_basic_setcwd.yml b/test/integration/targets/module_utils/module_utils_basic_setcwd.yml
index 97dbf873..2b2b6dbd 100644
--- a/test/integration/targets/module_utils/module_utils_basic_setcwd.yml
+++ b/test/integration/targets/module_utils/module_utils_basic_setcwd.yml
@@ -15,8 +15,14 @@
become: yes
become_user: nobody # root can read cwd regardless of permissions, so a non-root user is required here
+ - name: get real path of home directory of the unprivileged user
+ raw: "{{ ansible_python_interpreter }} -c 'import os.path; print(os.path.realpath(os.path.expanduser(\"~\")))'"
+ register: home
+ become: yes
+ become_user: nobody
+
- name: verify AnsibleModule was able to adjust cwd as expected
assert:
that:
- missing.before != missing.after
- - unreadable.before != unreadable.after or unreadable.before == '/' # allow / fallback on macOS when using an unprivileged user
+ - unreadable.before != unreadable.after or unreadable.before == '/' or unreadable.before == home.stdout.strip() # allow / and $HOME fallback on macOS when using an unprivileged user
diff --git a/test/integration/targets/module_utils/module_utils_test.yml b/test/integration/targets/module_utils/module_utils_test.yml
index 96b2a9e0..a6019cda 100644
--- a/test/integration/targets/module_utils/module_utils_test.yml
+++ b/test/integration/targets/module_utils/module_utils_test.yml
@@ -57,8 +57,8 @@
- name: Assert that the deprecation message is given correctly
assert:
that:
- - result.deprecations[0].msg == "Alias 'baz' is deprecated. See the module docs for more information"
- - result.deprecations[0].version == '9.99'
+ - result.deprecations[-1].msg == "Alias 'baz' is deprecated. See the module docs for more information"
+ - result.deprecations[-1].version == '9.99'
- block:
- name: Get a string with a \0 in it
diff --git a/test/integration/targets/module_utils/module_utils_test_no_log.yml b/test/integration/targets/module_utils/module_utils_test_no_log.yml
index bad2efd4..2fa3e101 100644
--- a/test/integration/targets/module_utils/module_utils_test_no_log.yml
+++ b/test/integration/targets/module_utils/module_utils_test_no_log.yml
@@ -7,3 +7,6 @@
explicit_pass: abc
suboption:
explicit_sub_pass: def
+ environment:
+ SECRET_ENV: ghi
+ SECRET_SUB_ENV: jkl
diff --git a/test/integration/targets/module_utils/module_utils_vvvvv.yml b/test/integration/targets/module_utils/module_utils_vvvvv.yml
index 6a9f9201..fc2b0c1c 100644
--- a/test/integration/targets/module_utils/module_utils_vvvvv.yml
+++ b/test/integration/targets/module_utils/module_utils_vvvvv.yml
@@ -7,11 +7,10 @@
# Invocation usually is output with 3vs or more, our callback plugin displays it anyway
- name: Check no_log invocation results
command: ansible-playbook -i {{ inventory_file }} module_utils_test_no_log.yml
+ delegate_to: localhost
environment:
ANSIBLE_CALLBACK_PLUGINS: callback
ANSIBLE_STDOUT_CALLBACK: pure_json
- SECRET_ENV: ghi
- SECRET_SUB_ENV: jkl
register: no_log_invocation
- set_fact:
diff --git a/test/integration/targets/module_utils_common.respawn/aliases b/test/integration/targets/module_utils_common.respawn/aliases
new file mode 100644
index 00000000..a6dafcf8
--- /dev/null
+++ b/test/integration/targets/module_utils_common.respawn/aliases
@@ -0,0 +1 @@
+shippable/posix/group1
diff --git a/test/integration/targets/module_utils_respawn/library/respawnme.py b/test/integration/targets/module_utils_common.respawn/library/respawnme.py
index 6471dba4..6471dba4 100644
--- a/test/integration/targets/module_utils_respawn/library/respawnme.py
+++ b/test/integration/targets/module_utils_common.respawn/library/respawnme.py
diff --git a/test/integration/targets/module_utils_respawn/tasks/main.yml b/test/integration/targets/module_utils_common.respawn/tasks/main.yml
index 246c8f74..246c8f74 100644
--- a/test/integration/targets/module_utils_respawn/tasks/main.yml
+++ b/test/integration/targets/module_utils_common.respawn/tasks/main.yml
diff --git a/test/integration/targets/module_utils_distro/aliases b/test/integration/targets/module_utils_distro/aliases
new file mode 100644
index 00000000..8278ec8b
--- /dev/null
+++ b/test/integration/targets/module_utils_distro/aliases
@@ -0,0 +1,2 @@
+shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/module_utils_distro/meta/main.yml b/test/integration/targets/module_utils_distro/meta/main.yml
new file mode 100644
index 00000000..1810d4be
--- /dev/null
+++ b/test/integration/targets/module_utils_distro/meta/main.yml
@@ -0,0 +1,2 @@
+dependencies:
+ - setup_remote_tmp_dir
diff --git a/test/integration/targets/module_utils_distro/runme.sh b/test/integration/targets/module_utils_distro/runme.sh
new file mode 100755
index 00000000..e5d3d053
--- /dev/null
+++ b/test/integration/targets/module_utils_distro/runme.sh
@@ -0,0 +1,24 @@
+#!/usr/bin/env bash
+
+set -eux
+
+# Ensure that when a non-distro 'distro' package is in PYTHONPATH, we fallback
+# to our bundled one.
+new_pythonpath="$OUTPUT_DIR/pythonpath"
+mkdir -p "$new_pythonpath/distro"
+touch "$new_pythonpath/distro/__init__.py"
+
+export PYTHONPATH="$new_pythonpath:$PYTHONPATH"
+
+# Sanity test to make sure the above worked
+set +e
+distro_id_fail="$(python -c 'import distro; distro.id' 2>&1)"
+set -e
+grep -q "AttributeError:.*has no attribute 'id'" <<< "$distro_id_fail"
+
+# ansible.module_utils.common.sys_info imports distro, and itself gets imported
+# in DataLoader, so all we have to do to test the fallback is run `ansible`.
+ansirun="$(ansible -i ../../inventory -a "echo \$PYTHONPATH" localhost)"
+grep -q "$new_pythonpath" <<< "$ansirun"
+
+rm -rf "$new_pythonpath"
diff --git a/test/integration/targets/module_utils_selinux/aliases b/test/integration/targets/module_utils_facts.system.selinux/aliases
index aab3ff52..ee281d27 100644
--- a/test/integration/targets/module_utils_selinux/aliases
+++ b/test/integration/targets/module_utils_facts.system.selinux/aliases
@@ -1,5 +1,4 @@
shippable/posix/group1
-skip/aix
skip/osx
skip/macos
skip/freebsd
diff --git a/test/integration/targets/module_utils_selinux/tasks/main.yml b/test/integration/targets/module_utils_facts.system.selinux/tasks/main.yml
index c599377b..c599377b 100644
--- a/test/integration/targets/module_utils_selinux/tasks/main.yml
+++ b/test/integration/targets/module_utils_facts.system.selinux/tasks/main.yml
diff --git a/test/integration/targets/module_utils_selinux/tasks/selinux.yml b/test/integration/targets/module_utils_facts.system.selinux/tasks/selinux.yml
index 6a2b159c..6a2b159c 100644
--- a/test/integration/targets/module_utils_selinux/tasks/selinux.yml
+++ b/test/integration/targets/module_utils_facts.system.selinux/tasks/selinux.yml
diff --git a/test/integration/targets/no_log/aliases b/test/integration/targets/no_log/aliases
index 70a7b7a9..1d28bdb2 100644
--- a/test/integration/targets/no_log/aliases
+++ b/test/integration/targets/no_log/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/noexec/aliases b/test/integration/targets/noexec/aliases
index 66a77c7b..edabc85a 100644
--- a/test/integration/targets/noexec/aliases
+++ b/test/integration/targets/noexec/aliases
@@ -1,3 +1,4 @@
shippable/posix/group2
+context/controller
skip/docker
skip/macos
diff --git a/test/integration/targets/old_style_cache_plugins/aliases b/test/integration/targets/old_style_cache_plugins/aliases
index 05f65b71..c7c77ce6 100644
--- a/test/integration/targets/old_style_cache_plugins/aliases
+++ b/test/integration/targets/old_style_cache_plugins/aliases
@@ -1,4 +1,6 @@
+destructive
+needs/root
shippable/posix/group3
+context/controller
skip/osx
skip/macos
-disabled
diff --git a/test/integration/targets/old_style_cache_plugins/cleanup.yml b/test/integration/targets/old_style_cache_plugins/cleanup.yml
new file mode 100644
index 00000000..93f5cc58
--- /dev/null
+++ b/test/integration/targets/old_style_cache_plugins/cleanup.yml
@@ -0,0 +1,41 @@
+---
+- hosts: localhost
+ gather_facts: no
+ ignore_errors: yes
+ tasks:
+ - command: redis-cli keys
+
+ - name: delete cache keys
+ command: redis-cli del {{ item }}
+ loop:
+ - ansible_facts_localhost
+ - ansible_inventory_localhost
+ - ansible_cache_keys
+
+ - name: shutdown the server
+ command: redis-cli shutdown
+
+ - name: cleanup set up files
+ file:
+ path: "{{ item }}"
+ state: absent
+ loop:
+ - redis-stable.tar.gz
+
+ - name: remove executables
+ file:
+ state: absent
+ path: "/usr/local/bin/{{ item }}"
+ follow: no
+ become: yes
+ loop:
+ - redis-server
+ - redis-cli
+
+ - name: clean the rest of the files
+ file:
+ path: "{{ item }}"
+ state: absent
+ loop:
+ - ./redis-stable.tar.gz
+ - ./redis-stable
diff --git a/test/integration/targets/old_style_cache_plugins/inspect_cache.yml b/test/integration/targets/old_style_cache_plugins/inspect_cache.yml
new file mode 100644
index 00000000..72810e19
--- /dev/null
+++ b/test/integration/targets/old_style_cache_plugins/inspect_cache.yml
@@ -0,0 +1,36 @@
+---
+- hosts: localhost
+ gather_facts: no
+ vars:
+ json_cache: "{{ cache.stdout | from_json }}"
+ tasks:
+ - command: redis-cli get ansible_facts_localhost
+ register: cache
+ tags:
+ - always
+
+ - name: test that the cache only contains the set_fact var
+ assert:
+ that:
+ - "json_cache | length == 1"
+ - "json_cache.foo == ansible_facts.foo"
+ tags:
+ - set_fact
+
+ - name: test that the cache contains gathered facts and the var
+ assert:
+ that:
+ - "json_cache | length > 1"
+ - "json_cache.foo == 'bar'"
+ - "json_cache.ansible_distribution is defined"
+ tags:
+ - additive_gather_facts
+
+ - name: test that the cache contains only gathered facts
+ assert:
+ that:
+ - "json_cache | length > 1"
+ - "json_cache.foo is undefined"
+ - "json_cache.ansible_distribution is defined"
+ tags:
+ - gather_facts
diff --git a/test/integration/targets/old_style_cache_plugins/plugins/cache/configurable_redis.py b/test/integration/targets/old_style_cache_plugins/plugins/cache/configurable_redis.py
new file mode 100644
index 00000000..44b6cf93
--- /dev/null
+++ b/test/integration/targets/old_style_cache_plugins/plugins/cache/configurable_redis.py
@@ -0,0 +1,147 @@
+# (c) 2014, Brian Coca, Josh Drake, et al
+# (c) 2017 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+DOCUMENTATION = '''
+ cache: configurable_redis
+ short_description: Use Redis DB for cache
+ description:
+ - This cache uses JSON formatted, per host records saved in Redis.
+ version_added: "1.9"
+ requirements:
+ - redis>=2.4.5 (python lib)
+ options:
+ _uri:
+ description:
+ - A colon separated string of connection information for Redis.
+ required: True
+ env:
+ - name: ANSIBLE_CACHE_PLUGIN_CONNECTION
+ ini:
+ - key: fact_caching_connection
+ section: defaults
+ _prefix:
+ description: User defined prefix to use when creating the DB entries
+ default: ansible_facts
+ env:
+ - name: ANSIBLE_CACHE_PLUGIN_PREFIX
+ ini:
+ - key: fact_caching_prefix
+ section: defaults
+ _timeout:
+ default: 86400
+ description: Expiration timeout for the cache plugin data
+ env:
+ - name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
+ ini:
+ - key: fact_caching_timeout
+ section: defaults
+ type: integer
+'''
+
+import time
+import json
+
+from ansible import constants as C
+from ansible.errors import AnsibleError
+from ansible.parsing.ajson import AnsibleJSONEncoder, AnsibleJSONDecoder
+from ansible.plugins.cache import BaseCacheModule
+from ansible.utils.display import Display
+
+try:
+ from redis import StrictRedis, VERSION
+except ImportError:
+ raise AnsibleError("The 'redis' python module (version 2.4.5 or newer) is required for the redis fact cache, 'pip install redis'")
+
+display = Display()
+
+
+class CacheModule(BaseCacheModule):
+ """
+ A caching module backed by redis.
+ Keys are maintained in a zset with their score being the timestamp
+ when they are inserted. This allows for the usage of 'zremrangebyscore'
+ to expire keys. This mechanism is used or a pattern matched 'scan' for
+ performance.
+ """
+ def __init__(self, *args, **kwargs):
+ connection = []
+
+ super(CacheModule, self).__init__(*args, **kwargs)
+ if self.get_option('_uri'):
+ connection = self.get_option('_uri').split(':')
+ self._timeout = float(self.get_option('_timeout'))
+ self._prefix = self.get_option('_prefix')
+
+ self._cache = {}
+ self._db = StrictRedis(*connection)
+ self._keys_set = 'ansible_cache_keys'
+
+ def _make_key(self, key):
+ return self._prefix + key
+
+ def get(self, key):
+
+ if key not in self._cache:
+ value = self._db.get(self._make_key(key))
+ # guard against the key not being removed from the zset;
+ # this could happen in cases where the timeout value is changed
+ # between invocations
+ if value is None:
+ self.delete(key)
+ raise KeyError
+ self._cache[key] = json.loads(value, cls=AnsibleJSONDecoder)
+
+ return self._cache.get(key)
+
+ def set(self, key, value):
+
+ value2 = json.dumps(value, cls=AnsibleJSONEncoder, sort_keys=True, indent=4)
+ if self._timeout > 0: # a timeout of 0 is handled as meaning 'never expire'
+ self._db.setex(self._make_key(key), int(self._timeout), value2)
+ else:
+ self._db.set(self._make_key(key), value2)
+
+ if VERSION[0] == 2:
+ self._db.zadd(self._keys_set, time.time(), key)
+ else:
+ self._db.zadd(self._keys_set, {key: time.time()})
+ self._cache[key] = value
+
+ def _expire_keys(self):
+ if self._timeout > 0:
+ expiry_age = time.time() - self._timeout
+ self._db.zremrangebyscore(self._keys_set, 0, expiry_age)
+
+ def keys(self):
+ self._expire_keys()
+ return self._db.zrange(self._keys_set, 0, -1)
+
+ def contains(self, key):
+ self._expire_keys()
+ return (self._db.zrank(self._keys_set, key) is not None)
+
+ def delete(self, key):
+ if key in self._cache:
+ del self._cache[key]
+ self._db.delete(self._make_key(key))
+ self._db.zrem(self._keys_set, key)
+
+ def flush(self):
+ for key in self.keys():
+ self.delete(key)
+
+ def copy(self):
+ # TODO: there is probably a better way to do this in redis
+ ret = dict()
+ for key in self.keys():
+ ret[key] = self.get(key)
+ return ret
+
+ def __getstate__(self):
+ return dict()
+
+ def __setstate__(self, data):
+ self.__init__()
diff --git a/test/integration/targets/old_style_cache_plugins/plugins/cache/redis.py b/test/integration/targets/old_style_cache_plugins/plugins/cache/legacy_redis.py
index 9879dec9..9879dec9 100644
--- a/test/integration/targets/old_style_cache_plugins/plugins/cache/redis.py
+++ b/test/integration/targets/old_style_cache_plugins/plugins/cache/legacy_redis.py
diff --git a/test/integration/targets/old_style_cache_plugins/runme.sh b/test/integration/targets/old_style_cache_plugins/runme.sh
index 13911bd5..ffa6723b 100755
--- a/test/integration/targets/old_style_cache_plugins/runme.sh
+++ b/test/integration/targets/old_style_cache_plugins/runme.sh
@@ -4,77 +4,44 @@ set -eux
source virtualenv.sh
-# Run test if dependencies are installed
-failed_dep_1=$(ansible localhost -m pip -a "name=redis>=2.4.5 state=present" "$@" | tee out.txt | grep -c 'FAILED!' || true)
-cat out.txt
+trap 'ansible-playbook cleanup.yml' EXIT
-installed_redis=$(ansible localhost -m package -a "name=redis-server state=present" --become "$@" | tee out.txt | grep -c '"changed": true' || true)
-failed_dep_2=$(grep out.txt -ce 'FAILED!' || true)
-cat out.txt
+export PATH="$PATH:/usr/local/bin"
-started_redis=$(ansible localhost -m service -a "name=redis-server state=started" --become "$@" | tee out.txt | grep -c '"changed": true' || true)
-failed_dep_3=$(grep out.txt -ce 'FAILED!' || true)
-cat out.txt
+ansible-playbook setup_redis_cache.yml "$@"
-CLEANUP_REDIS () { if [ "${installed_redis}" -eq 1 ] ; then ansible localhost -m package -a "name=redis-server state=absent" --become ; fi }
-STOP_REDIS () { if [ "${installed_redis}" -ne 1 ] && [ "${started_redis}" -eq 1 ] ; then ansible localhost -m service -a "name=redis-server state=stopped" --become ; fi }
+# Cache should start empty
+redis-cli keys ansible_
+[ "$(redis-cli keys ansible_)" = "" ]
-if [ "${failed_dep_1}" -eq 1 ] || [ "${failed_dep_2}" -eq 1 ] || [ "${failed_dep_3}" -eq 1 ] ; then
- STOP_REDIS
- CLEANUP_REDIS
- exit 0
-fi
-
-export ANSIBLE_CACHE_PLUGIN=redis
-export ANSIBLE_CACHE_PLUGIN_CONNECTION=localhost:6379:0
export ANSIBLE_CACHE_PLUGINS=./plugins/cache
+export ANSIBLE_CACHE_PLUGIN_CONNECTION=localhost:6379:0
+export ANSIBLE_CACHE_PLUGIN_PREFIX='ansible_facts_'
+
+# Test legacy cache plugins (that use ansible.constants) and
+# new cache plugins that use config manager both work for facts.
+for fact_cache in legacy_redis configurable_redis; do
-# Use old redis for fact caching
-count=$(ansible-playbook test_fact_gathering.yml -vvv 2>&1 "$@" | tee out.txt | grep -c 'Gathering Facts' || true)
-failed_dep_version=$(grep out.txt -ce "'redis' python module (version 2.4.5 or newer) is required" || true)
-cat out.txt
-if [ "${failed_dep_version}" -eq 1 ] ; then
- STOP_REDIS
- CLEANUP_REDIS
- exit 0
-fi
-if [ "${count}" -ne 1 ] ; then
- STOP_REDIS
- CLEANUP_REDIS
- exit 1
-fi
+ export ANSIBLE_CACHE_PLUGIN="$fact_cache"
-# Attempt to use old redis for inventory caching; should not work
-export ANSIBLE_INVENTORY_CACHE=True
-export ANSIBLE_INVENTORY_CACHE_PLUGIN=redis
-export ANSIBLE_INVENTORY_ENABLED=test
-export ANSIBLE_INVENTORY_PLUGINS=./plugins/inventory
+ # test set_fact with cacheable: true
+ ansible-playbook test_fact_gathering.yml --tags set_fact "$@"
+ [ "$(redis-cli keys ansible_facts_localhost | wc -l)" -eq 1 ]
+ ansible-playbook inspect_cache.yml --tags set_fact "$@"
-ansible-inventory -i inventory_config --graph 2>&1 "$@" | tee out.txt | grep 'Cache options were provided but may not reconcile correctly unless set via set_options'
-res=$?
-cat out.txt
-if [ "${res}" -eq 1 ] ; then
- STOP_REDIS
- CLEANUP_REDIS
- exit 1
-fi
+ # cache gathered facts in addition
+ ansible-playbook test_fact_gathering.yml --tags gather_facts "$@"
+ ansible-playbook inspect_cache.yml --tags additive_gather_facts "$@"
-# Use new style redis for fact caching
-unset ANSIBLE_CACHE_PLUGINS
-count=$(ansible-playbook test_fact_gathering.yml -vvv "$@" | tee out.txt | grep -c 'Gathering Facts' || true)
-cat out.txt
-if [ "${count}" -ne 1 ] ; then
- STOP_REDIS
- CLEANUP_REDIS
- exit 1
-fi
+ # flush cache and only cache gathered facts
+ ansible-playbook test_fact_gathering.yml --flush-cache --tags gather_facts --tags flush "$@"
+ ansible-playbook inspect_cache.yml --tags gather_facts "$@"
-# Use new redis for inventory caching
-ansible-inventory -i inventory_config --graph "$@" 2>&1 | tee out.txt | grep 'host2'
-res=$?
-cat out.txt
+ redis-cli del ansible_facts_localhost
+ unset ANSIBLE_CACHE_PLUGIN
-STOP_REDIS
-CLEANUP_REDIS
+done
-exit $res
+# Legacy cache plugins need to be updated to use set_options/get_option to be compatible with inventory plugins.
+# Inventory plugins load cache options with the config manager.
+ansible-playbook test_inventory_cache.yml "$@"
diff --git a/test/integration/targets/old_style_cache_plugins/setup_redis_cache.yml b/test/integration/targets/old_style_cache_plugins/setup_redis_cache.yml
new file mode 100644
index 00000000..8aad37a3
--- /dev/null
+++ b/test/integration/targets/old_style_cache_plugins/setup_redis_cache.yml
@@ -0,0 +1,51 @@
+---
+- hosts: localhost
+ vars:
+ make: "{{ ( ansible_distribution != 'FreeBSD' ) | ternary('make', 'gmake') }}"
+ tasks:
+ - name: name ensure make is available
+ command: "which {{ make }}"
+ register: has_make
+ ignore_errors: yes
+
+ - command: apk add --no-cache make
+ when: "has_make is failed and ansible_distribution == 'Alpine'"
+ become: yes
+
+ - package:
+ name: "{{ make }}"
+ state: present
+ become: yes
+ when: "has_make is failed and ansible_distribution != 'Alpine'"
+
+ - name: get the latest stable redis server release
+ get_url:
+ url: http://download.redis.io/redis-stable.tar.gz
+ dest: ./
+
+ - name: unzip download
+ unarchive:
+ src: redis-stable.tar.gz
+ dest: ./
+
+ - command: "{{ make }}"
+ args:
+ chdir: redis-stable
+
+ - name: copy the executable into the path
+ copy:
+ src: "redis-stable/src/{{ item }}"
+ dest: /usr/local/bin/
+ mode: 755
+ become: yes
+ loop:
+ - redis-server
+ - redis-cli
+
+ - name: start the redis server in the background
+ command: redis-server --daemonize yes
+
+ - name: install dependency for the cache plugin
+ pip:
+ name: redis>2.4.5
+ state: present
diff --git a/test/integration/targets/old_style_cache_plugins/test_fact_gathering.yml b/test/integration/targets/old_style_cache_plugins/test_fact_gathering.yml
index 5c720b4e..2c77f0dd 100644
--- a/test/integration/targets/old_style_cache_plugins/test_fact_gathering.yml
+++ b/test/integration/targets/old_style_cache_plugins/test_fact_gathering.yml
@@ -1,6 +1,22 @@
---
- hosts: localhost
gather_facts: no
+ tags:
+ - flush
+ tasks:
+ - meta: clear_facts
- hosts: localhost
gather_facts: yes
+ gather_subset: min
+ tags:
+ - gather_facts
+
+- hosts: localhost
+ gather_facts: no
+ tags:
+ - set_fact
+ tasks:
+ - set_fact:
+ foo: bar
+ cacheable: true
diff --git a/test/integration/targets/old_style_cache_plugins/test_inventory_cache.yml b/test/integration/targets/old_style_cache_plugins/test_inventory_cache.yml
new file mode 100644
index 00000000..83b79831
--- /dev/null
+++ b/test/integration/targets/old_style_cache_plugins/test_inventory_cache.yml
@@ -0,0 +1,45 @@
+- hosts: localhost
+ gather_facts: no
+ vars:
+ reset_color: '\x1b\[0m'
+ color: '\x1b\[[0-9];[0-9]{2}m'
+ base_environment:
+ ANSIBLE_INVENTORY_PLUGINS: ./plugins/inventory
+ ANSIBLE_INVENTORY_ENABLED: test
+ ANSIBLE_INVENTORY_CACHE: true
+ ANSIBLE_CACHE_PLUGINS: ./plugins/cache
+ ANSIBLE_CACHE_PLUGIN_CONNECTION: localhost:6379:0
+ ANSIBLE_CACHE_PLUGIN_PREFIX: 'ansible_inventory_'
+ legacy_cache:
+ ANSIBLE_INVENTORY_CACHE_PLUGIN: legacy_redis
+ updated_cache:
+ ANSIBLE_INVENTORY_CACHE_PLUGIN: configurable_redis
+ tasks:
+ - name: legacy-style cache plugin should cause a warning
+ command: ansible-inventory -i inventory_config --graph
+ register: result
+ environment: "{{ base_environment | combine(legacy_cache) }}"
+
+ - name: test warning message
+ assert:
+ that:
+ - expected_warning in warning
+ - "'No inventory was parsed, only implicit localhost is available' in warning"
+ vars:
+ warning: "{{ result.stderr | regex_replace(reset_color) | regex_replace(color) | regex_replace('\\n', ' ') }}"
+ expected_warning: "Cache options were provided but may not reconcile correctly unless set via set_options"
+
+ - name: cache plugin updated to use config manager should work
+ command: ansible-inventory -i inventory_config --graph
+ register: result
+ environment: "{{ base_environment | combine(updated_cache) }}"
+
+ - name: test warning message
+ assert:
+ that:
+ - unexpected_warning not in warning
+ - "'No inventory was parsed, only implicit localhost is available' not in warning"
+ - '"host1" in result.stdout'
+ vars:
+ warning: "{{ result.stderr | regex_replace(reset_color) | regex_replace(color) | regex_replace('\\n', ' ') }}"
+ unexpected_warning: "Cache options were provided but may not reconcile correctly unless set via set_options"
diff --git a/test/integration/targets/old_style_modules_posix/aliases b/test/integration/targets/old_style_modules_posix/aliases
index b5983214..a3ada117 100644
--- a/test/integration/targets/old_style_modules_posix/aliases
+++ b/test/integration/targets/old_style_modules_posix/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/target
diff --git a/test/integration/targets/omit/aliases b/test/integration/targets/omit/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/omit/aliases
+++ b/test/integration/targets/omit/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/order/aliases b/test/integration/targets/order/aliases
index a6dafcf8..13e01f0c 100644
--- a/test/integration/targets/order/aliases
+++ b/test/integration/targets/order/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/package/tasks/main.yml b/test/integration/targets/package/tasks/main.yml
index 853b4711..c8b75da4 100644
--- a/test/integration/targets/package/tasks/main.yml
+++ b/test/integration/targets/package/tasks/main.yml
@@ -143,4 +143,108 @@
- name: verify at command is installed
shell: which at
+ - name: remove at package
+ package:
+ name: at
+ state: absent
+ register: at_install0
+
+ - name: validate package removal
+ assert:
+ that:
+ - "at_install0 is changed"
+
when: ansible_distribution in package_distros
+
+##
+## yum
+##
+#Validation for new parameter 'use' in yum action plugin which aliases to 'use_backend'
+#Issue: https://github.com/ansible/ansible/issues/70774
+- block:
+ - name: verify if using both the parameters 'use' and 'use_backend' throw error
+ yum:
+ name: at
+ state: present
+ use_backend: yum
+ use: yum
+ ignore_errors: yes
+ register: result
+
+ - name: verify error
+ assert:
+ that:
+ - "'parameters are mutually exclusive' in result.msg"
+ - "not result is changed"
+
+ - name: verify if package installation is successful using 'use' parameter
+ yum:
+ name: at
+ state: present
+ use: dnf
+ register: result
+
+ - name: verify the result
+ assert:
+ that:
+ - "result is changed"
+
+ - name: remove at package
+ yum:
+ name: at
+ state: absent
+ use: dnf
+ register: result
+
+ - name: verify package removal
+ assert:
+ that:
+ - "result is changed"
+
+ - name: verify if package installation is successful using 'use_backend' parameter
+ yum:
+ name: at
+ state: present
+ use_backend: dnf
+ register: result
+
+ - name: verify the result
+ assert:
+ that:
+ - "result is changed"
+
+ - name: remove at package
+ yum:
+ name: at
+ state: absent
+ use_backend: dnf
+ register: result
+
+ - name: verify package removal
+ assert:
+ that:
+ - "result is changed"
+
+ - name: verify if package installation is successful without using 'use_backend' and 'use' parameters
+ yum:
+ name: at
+ state: present
+ register: result
+
+ - name: verify the result
+ assert:
+ that:
+ - "result is changed"
+
+ - name: remove at package
+ yum:
+ name: at
+ state: absent
+ register: result
+
+ - name: verify package removal
+ assert:
+ that:
+ - "result is changed"
+
+ when: ansible_distribution == "Fedora" \ No newline at end of file
diff --git a/test/integration/targets/parsing/aliases b/test/integration/targets/parsing/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/parsing/aliases
+++ b/test/integration/targets/parsing/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/path_lookups/aliases b/test/integration/targets/path_lookups/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/path_lookups/aliases
+++ b/test/integration/targets/path_lookups/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/path_lookups/play.yml b/test/integration/targets/path_lookups/play.yml
index 7321589b..233f972d 100644
--- a/test/integration/targets/path_lookups/play.yml
+++ b/test/integration/targets/path_lookups/play.yml
@@ -10,31 +10,31 @@
- copy: dest={{playbook_dir}}/files/testfile content='in files'
- copy: dest={{playbook_dir}}/testfile content='in local'
-- include: testplay.yml
+- import_playbook: testplay.yml
vars:
remove: nothing
role_out: in role files
play_out: in files
-- include: testplay.yml
+- import_playbook: testplay.yml
vars:
remove: roles/showfile/files/testfile
role_out: in role
play_out: in files
-- include: testplay.yml
+- import_playbook: testplay.yml
vars:
remove: roles/showfile/testfile
role_out: in role tasks
play_out: in files
-- include: testplay.yml
+- import_playbook: testplay.yml
vars:
remove: roles/showfile/tasks/testfile
role_out: in files
play_out: in files
-- include: testplay.yml
+- import_playbook: testplay.yml
vars:
remove: files/testfile
role_out: in local
diff --git a/test/integration/targets/path_with_comma_in_inventory/aliases b/test/integration/targets/path_with_comma_in_inventory/aliases
index 70a7b7a9..1d28bdb2 100644
--- a/test/integration/targets/path_with_comma_in_inventory/aliases
+++ b/test/integration/targets/path_with_comma_in_inventory/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/pause/aliases b/test/integration/targets/pause/aliases
index 810f1ab6..b07d71c7 100644
--- a/test/integration/targets/pause/aliases
+++ b/test/integration/targets/pause/aliases
@@ -1,3 +1,3 @@
needs/target/setup_pexpect
shippable/posix/group1
-skip/aix
+context/controller # this is a controller-only action, the module is just for documentation
diff --git a/test/integration/targets/pip/tasks/pip.yml b/test/integration/targets/pip/tasks/pip.yml
index 572c7b6f..2b9ad561 100644
--- a/test/integration/targets/pip/tasks/pip.yml
+++ b/test/integration/targets/pip/tasks/pip.yml
@@ -310,18 +310,51 @@
- "not (pip_install_empty is changed)"
# https://github.com/ansible/ansible/issues/41043
-- name: do not consider an empty string as a version
- pip:
- name: q
- state: present
- version: ""
- virtualenv: "{{ output_dir }}/pipenv"
- register: pip_install_empty_version_string
+- block:
+ - name: Ensure previous virtualenv no longer exists
+ file:
+ state: absent
+ name: "{{ output_dir }}/pipenv"
-- name: ensure that task installation did not fail
- assert:
- that:
- - pip_install_empty_version_string is successful
+ - name: do not consider an empty string as a version
+ pip:
+ name: q
+ state: present
+ version: ""
+ virtualenv: "{{ output_dir }}/pipenv"
+ register: pip_empty_version_string
+
+ - name: test idempotency with empty string
+ pip:
+ name: q
+ state: present
+ version: ""
+ virtualenv: "{{ output_dir }}/pipenv"
+ register: pip_empty_version_string_idempotency
+
+ - name: test idempotency without empty string
+ pip:
+ name: q
+ state: present
+ virtualenv: "{{ output_dir }}/pipenv"
+ register: pip_no_empty_version_string_idempotency
+
+ # 'present' and version=="" is analogous to latest when first installed
+ - name: ensure we installed the latest version
+ pip:
+ name: q
+ state: latest
+ virtualenv: "{{ output_dir }}/pipenv"
+ register: pip_empty_version_idempotency
+
+ - name: ensure that installation worked and is idempotent
+ assert:
+ that:
+ - pip_empty_version_string is changed
+ - pip_empty_version_string is successful
+ - pip_empty_version_idempotency is not changed
+ - pip_no_empty_version_string_idempotency is not changed
+ - pip_empty_version_string_idempotency is not changed
# test version specifiers
- name: make sure no test_package installed now
diff --git a/test/integration/targets/pkg_resources/aliases b/test/integration/targets/pkg_resources/aliases
index a6dafcf8..13e01f0c 100644
--- a/test/integration/targets/pkg_resources/aliases
+++ b/test/integration/targets/pkg_resources/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/play_iterator/aliases b/test/integration/targets/play_iterator/aliases
index 3005e4b2..498fedd5 100644
--- a/test/integration/targets/play_iterator/aliases
+++ b/test/integration/targets/play_iterator/aliases
@@ -1 +1,2 @@
shippable/posix/group4
+context/controller
diff --git a/test/integration/targets/playbook/aliases b/test/integration/targets/playbook/aliases
index a6dafcf8..13e01f0c 100644
--- a/test/integration/targets/playbook/aliases
+++ b/test/integration/targets/playbook/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/playbook/empty.yml b/test/integration/targets/playbook/empty.yml
new file mode 100644
index 00000000..fe51488c
--- /dev/null
+++ b/test/integration/targets/playbook/empty.yml
@@ -0,0 +1 @@
+[]
diff --git a/test/integration/targets/playbook/empty_hosts.yml b/test/integration/targets/playbook/empty_hosts.yml
new file mode 100644
index 00000000..c9493c09
--- /dev/null
+++ b/test/integration/targets/playbook/empty_hosts.yml
@@ -0,0 +1,4 @@
+- hosts: []
+ tasks:
+ - debug:
+ msg: does not run
diff --git a/test/integration/targets/playbook/malformed_post_tasks.yml b/test/integration/targets/playbook/malformed_post_tasks.yml
new file mode 100644
index 00000000..4c134115
--- /dev/null
+++ b/test/integration/targets/playbook/malformed_post_tasks.yml
@@ -0,0 +1,2 @@
+- hosts: localhost
+ post_tasks: 123
diff --git a/test/integration/targets/playbook/malformed_pre_tasks.yml b/test/integration/targets/playbook/malformed_pre_tasks.yml
new file mode 100644
index 00000000..6c58477f
--- /dev/null
+++ b/test/integration/targets/playbook/malformed_pre_tasks.yml
@@ -0,0 +1,2 @@
+- hosts: localhost
+ pre_tasks: 123
diff --git a/test/integration/targets/playbook/malformed_roles.yml b/test/integration/targets/playbook/malformed_roles.yml
new file mode 100644
index 00000000..35db56e7
--- /dev/null
+++ b/test/integration/targets/playbook/malformed_roles.yml
@@ -0,0 +1,2 @@
+- hosts: localhost
+ roles: 123
diff --git a/test/integration/targets/playbook/malformed_tasks.yml b/test/integration/targets/playbook/malformed_tasks.yml
new file mode 100644
index 00000000..123c059f
--- /dev/null
+++ b/test/integration/targets/playbook/malformed_tasks.yml
@@ -0,0 +1,2 @@
+- hosts: localhost
+ tasks: 123
diff --git a/test/integration/targets/playbook/malformed_vars_prompt.yml b/test/integration/targets/playbook/malformed_vars_prompt.yml
new file mode 100644
index 00000000..5447197d
--- /dev/null
+++ b/test/integration/targets/playbook/malformed_vars_prompt.yml
@@ -0,0 +1,3 @@
+- hosts: localhost
+ vars_prompt:
+ - foo: bar
diff --git a/test/integration/targets/playbook/old_style_role.yml b/test/integration/targets/playbook/old_style_role.yml
new file mode 100644
index 00000000..015f263a
--- /dev/null
+++ b/test/integration/targets/playbook/old_style_role.yml
@@ -0,0 +1,3 @@
+- hosts: localhost
+ roles:
+ - foo,bar
diff --git a/test/integration/targets/playbook/remote_user_and_user.yml b/test/integration/targets/playbook/remote_user_and_user.yml
new file mode 100644
index 00000000..c9e2389d
--- /dev/null
+++ b/test/integration/targets/playbook/remote_user_and_user.yml
@@ -0,0 +1,6 @@
+- hosts: localhost
+ remote_user: a
+ user: b
+ tasks:
+ - debug:
+ msg: did not run
diff --git a/test/integration/targets/playbook/roles_null.yml b/test/integration/targets/playbook/roles_null.yml
new file mode 100644
index 00000000..d06bcd15
--- /dev/null
+++ b/test/integration/targets/playbook/roles_null.yml
@@ -0,0 +1,3 @@
+- name: null roles is okay
+ hosts: localhost
+ roles: null
diff --git a/test/integration/targets/playbook/runme.sh b/test/integration/targets/playbook/runme.sh
index 25e2e5a6..cc8d4957 100755
--- a/test/integration/targets/playbook/runme.sh
+++ b/test/integration/targets/playbook/runme.sh
@@ -7,3 +7,86 @@ ansible-playbook -i ../../inventory types.yml -v "$@"
# test timeout
ansible-playbook -i ../../inventory timeout.yml -v "$@"
+
+# our Play class allows for 'user' or 'remote_user', but not both.
+# first test that both user and remote_user work individually
+set +e
+result="$(ansible-playbook -i ../../inventory user.yml -v "$@" 2>&1)"
+set -e
+grep -q "worked with user" <<< "$result"
+grep -q "worked with remote_user" <<< "$result"
+
+# then test that the play errors if user and remote_user both exist
+echo "EXPECTED ERROR: Ensure we fail properly if a play has both user and remote_user."
+set +e
+result="$(ansible-playbook -i ../../inventory remote_user_and_user.yml -v "$@" 2>&1)"
+set -e
+grep -q "ERROR! both 'user' and 'remote_user' are set for this play." <<< "$result"
+
+# test that playbook errors if len(plays) == 0
+echo "EXPECTED ERROR: Ensure we fail properly if a playbook is an empty list."
+set +e
+result="$(ansible-playbook -i ../../inventory empty.yml -v "$@" 2>&1)"
+set -e
+grep -q "ERROR! A playbook must contain at least one play" <<< "$result"
+
+# test that play errors if len(hosts) == 0
+echo "EXPECTED ERROR: Ensure we fail properly if a play has 0 hosts."
+set +e
+result="$(ansible-playbook -i ../../inventory empty_hosts.yml -v "$@" 2>&1)"
+set -e
+grep -q "ERROR! Hosts list cannot be empty. Please check your playbook" <<< "$result"
+
+# test that play errors if tasks is malformed
+echo "EXPECTED ERROR: Ensure we fail properly if tasks is malformed."
+set +e
+result="$(ansible-playbook -i ../../inventory malformed_tasks.yml -v "$@" 2>&1)"
+set -e
+grep -q "ERROR! A malformed block was encountered while loading tasks: 123 should be a list or None" <<< "$result"
+
+# test that play errors if pre_tasks is malformed
+echo "EXPECTED ERROR: Ensure we fail properly if pre_tasks is malformed."
+set +e
+result="$(ansible-playbook -i ../../inventory malformed_pre_tasks.yml -v "$@" 2>&1)"
+set -e
+grep -q "ERROR! A malformed block was encountered while loading pre_tasks" <<< "$result"
+
+# test that play errors if post_tasks is malformed
+echo "EXPECTED ERROR: Ensure we fail properly if post_tasks is malformed."
+set +e
+result="$(ansible-playbook -i ../../inventory malformed_post_tasks.yml -v "$@" 2>&1)"
+set -e
+grep -q "ERROR! A malformed block was encountered while loading post_tasks" <<< "$result"
+
+# test roles: null -- it gets converted to [] internally
+ansible-playbook -i ../../inventory roles_null.yml -v "$@"
+
+# test roles: 123 -- errors
+echo "EXPECTED ERROR: Ensure we fail properly if roles is malformed."
+set +e
+result="$(ansible-playbook -i ../../inventory malformed_roles.yml -v "$@" 2>&1)"
+set -e
+grep -q "ERROR! A malformed role declaration was encountered." <<< "$result"
+
+# test roles: ["foo,bar"] -- errors about old style
+echo "EXPECTED ERROR: Ensure we fail properly if old style role is given."
+set +e
+result="$(ansible-playbook -i ../../inventory old_style_role.yml -v "$@" 2>&1)"
+set -e
+grep -q "ERROR! Invalid old style role requirement: foo,bar" <<< "$result"
+
+# test vars prompt that has no name
+echo "EXPECTED ERROR: Ensure we fail properly if vars_prompt has no name."
+set +e
+result="$(ansible-playbook -i ../../inventory malformed_vars_prompt.yml -v "$@" 2>&1)"
+set -e
+grep -q "ERROR! Invalid vars_prompt data structure, missing 'name' key" <<< "$result"
+
+# test vars_prompt: null
+ansible-playbook -i ../../inventory vars_prompt_null.yml -v "$@"
+
+# test vars_files: null
+ansible-playbook -i ../../inventory vars_files_null.yml -v "$@"
+
+# test vars_files: filename.yml
+ansible-playbook -i ../../inventory vars_files_string.yml -v "$@"
diff --git a/test/integration/targets/playbook/some_vars.yml b/test/integration/targets/playbook/some_vars.yml
new file mode 100644
index 00000000..78353654
--- /dev/null
+++ b/test/integration/targets/playbook/some_vars.yml
@@ -0,0 +1,2 @@
+a_variable: yep
+another: hi
diff --git a/test/integration/targets/playbook/user.yml b/test/integration/targets/playbook/user.yml
new file mode 100644
index 00000000..8b4029b8
--- /dev/null
+++ b/test/integration/targets/playbook/user.yml
@@ -0,0 +1,23 @@
+- hosts: localhost
+ tasks:
+ - command: whoami
+ register: whoami
+
+ - assert:
+ that:
+ - whoami is successful
+
+ - set_fact:
+ me: "{{ whoami.stdout }}"
+
+- hosts: localhost
+ user: "{{ me }}"
+ tasks:
+ - debug:
+ msg: worked with user ({{ me }})
+
+- hosts: localhost
+ remote_user: "{{ me }}"
+ tasks:
+ - debug:
+ msg: worked with remote_user ({{ me }})
diff --git a/test/integration/targets/playbook/vars_files_null.yml b/test/integration/targets/playbook/vars_files_null.yml
new file mode 100644
index 00000000..64c21c66
--- /dev/null
+++ b/test/integration/targets/playbook/vars_files_null.yml
@@ -0,0 +1,3 @@
+- name: null vars_files is okay
+ hosts: localhost
+ vars_files: null
diff --git a/test/integration/targets/playbook/vars_files_string.yml b/test/integration/targets/playbook/vars_files_string.yml
new file mode 100644
index 00000000..9191d3c1
--- /dev/null
+++ b/test/integration/targets/playbook/vars_files_string.yml
@@ -0,0 +1,6 @@
+- hosts: localhost
+ vars_files: some_vars.yml
+ tasks:
+ - assert:
+ that:
+ - 'a_variable == "yep"'
diff --git a/test/integration/targets/playbook/vars_prompt_null.yml b/test/integration/targets/playbook/vars_prompt_null.yml
new file mode 100644
index 00000000..4fdfa7c1
--- /dev/null
+++ b/test/integration/targets/playbook/vars_prompt_null.yml
@@ -0,0 +1,3 @@
+- name: null vars prompt is okay
+ hosts: localhost
+ vars_prompt: null
diff --git a/test/integration/targets/plugin_config_for_inventory/aliases b/test/integration/targets/plugin_config_for_inventory/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/plugin_config_for_inventory/aliases
+++ b/test/integration/targets/plugin_config_for_inventory/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/plugin_filtering/aliases b/test/integration/targets/plugin_filtering/aliases
index 3005e4b2..498fedd5 100644
--- a/test/integration/targets/plugin_filtering/aliases
+++ b/test/integration/targets/plugin_filtering/aliases
@@ -1 +1,2 @@
shippable/posix/group4
+context/controller
diff --git a/test/integration/targets/plugin_loader/aliases b/test/integration/targets/plugin_loader/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/plugin_loader/aliases
+++ b/test/integration/targets/plugin_loader/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/plugin_namespace/aliases b/test/integration/targets/plugin_namespace/aliases
index a6dafcf8..13e01f0c 100644
--- a/test/integration/targets/plugin_namespace/aliases
+++ b/test/integration/targets/plugin_namespace/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/prepare_http_tests/tasks/kerberos.yml b/test/integration/targets/prepare_http_tests/tasks/kerberos.yml
index 06feea1c..2678b468 100644
--- a/test/integration/targets/prepare_http_tests/tasks/kerberos.yml
+++ b/test/integration/targets/prepare_http_tests/tasks/kerberos.yml
@@ -38,7 +38,9 @@
- name: Install python gssapi
pip:
name:
- - gssapi
+ - decorator < 5.0.0 ; python_version < '3.5' # decorator 5.0.5 and later require python 3.5 or later
+ - gssapi < 1.6.0 ; python_version <= '2.7' # gssapi 1.6.0 and later require python 3 or later
+ - gssapi ; python_version > '2.7'
- importlib ; python_version < '2.7'
state: present
extra_args: '-c {{ remote_constraints }}'
diff --git a/test/integration/targets/rel_plugin_loading/aliases b/test/integration/targets/rel_plugin_loading/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/rel_plugin_loading/aliases
+++ b/test/integration/targets/rel_plugin_loading/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/remote_tmp/aliases b/test/integration/targets/remote_tmp/aliases
index 757c9966..4b8559d9 100644
--- a/test/integration/targets/remote_tmp/aliases
+++ b/test/integration/targets/remote_tmp/aliases
@@ -1,2 +1,4 @@
shippable/posix/group3
skip/aix
+context/target
+needs/target/setup_remote_tmp_dir
diff --git a/test/integration/targets/remote_tmp/playbook.yml b/test/integration/targets/remote_tmp/playbook.yml
index 43f99ca5..5adef626 100644
--- a/test/integration/targets/remote_tmp/playbook.yml
+++ b/test/integration/targets/remote_tmp/playbook.yml
@@ -31,13 +31,16 @@
hosts: testhost
gather_facts: false
tasks:
+ - import_role:
+ name: ../setup_remote_tmp_dir
+
- file:
state: touch
- path: "{{ output_dir }}/65393"
+ path: "{{ remote_tmp_dir }}/65393"
- copy:
- src: "{{ output_dir }}/65393"
- dest: "{{ output_dir }}/65393.2"
+ src: "{{ remote_tmp_dir }}/65393"
+ dest: "{{ remote_tmp_dir }}/65393.2"
remote_src: true
- find:
@@ -52,6 +55,5 @@
- assert:
that:
- # Should only be AnsiballZ_find.py because find is actively running
- - result.files|length == 1
- - result.files[0].path.endswith('/AnsiballZ_find.py')
+ # Should find nothing since pipelining is used
+ - result.files|length == 0
diff --git a/test/integration/targets/remote_tmp/runme.sh b/test/integration/targets/remote_tmp/runme.sh
index 8d1eebd6..69efd6e0 100755
--- a/test/integration/targets/remote_tmp/runme.sh
+++ b/test/integration/targets/remote_tmp/runme.sh
@@ -2,4 +2,4 @@
set -ux
-ansible-playbook -i ../../inventory playbook.yml -e "output_dir=${OUTPUT_DIR}" -v "$@"
+ansible-playbook -i ../../inventory playbook.yml -v "$@"
diff --git a/test/integration/targets/retry_task_name_in_callback/aliases b/test/integration/targets/retry_task_name_in_callback/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/retry_task_name_in_callback/aliases
+++ b/test/integration/targets/retry_task_name_in_callback/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/roles/aliases b/test/integration/targets/roles/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/roles/aliases
+++ b/test/integration/targets/roles/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/roles/no_dupes.yml b/test/integration/targets/roles/no_dupes.yml
index 0ac9ff94..7e1ecb15 100644
--- a/test/integration/targets/roles/no_dupes.yml
+++ b/test/integration/targets/roles/no_dupes.yml
@@ -17,3 +17,13 @@
tasks:
- name: execute role c which depends on a
import_role: name=c
+
+- name: play should only show 1 invocation of a, as dependencies in this play are deduped by include_role
+ hosts: testhost
+ gather_facts: false
+ tags: [ 'intasks' ]
+ tasks:
+ - name: execute role b which depends on a
+ include_role: name=b
+ - name: execute role c which also depends on a
+ include_role: name=c
diff --git a/test/integration/targets/roles/runme.sh b/test/integration/targets/roles/runme.sh
index f2058ff1..5f11c1fc 100755
--- a/test/integration/targets/roles/runme.sh
+++ b/test/integration/targets/roles/runme.sh
@@ -5,9 +5,10 @@ set -eux
# test no dupes when dependencies in b and c point to a in roles:
[ "$(ansible-playbook no_dupes.yml -i ../../inventory --tags inroles "$@" | grep -c '"msg": "A"')" = "1" ]
[ "$(ansible-playbook no_dupes.yml -i ../../inventory --tags acrossroles "$@" | grep -c '"msg": "A"')" = "1" ]
+[ "$(ansible-playbook no_dupes.yml -i ../../inventory --tags intasks "$@" | grep -c '"msg": "A"')" = "1" ]
# but still dupe across plays
-[ "$(ansible-playbook no_dupes.yml -i ../../inventory "$@" | grep -c '"msg": "A"')" = "2" ]
+[ "$(ansible-playbook no_dupes.yml -i ../../inventory "$@" | grep -c '"msg": "A"')" = "3" ]
# include/import can execute another instance of role
[ "$(ansible-playbook allowed_dupes.yml -i ../../inventory --tags importrole "$@" | grep -c '"msg": "A"')" = "2" ]
diff --git a/test/integration/targets/roles_arg_spec/aliases b/test/integration/targets/roles_arg_spec/aliases
index 70a7b7a9..1d28bdb2 100644
--- a/test/integration/targets/roles_arg_spec/aliases
+++ b/test/integration/targets/roles_arg_spec/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/roles_var_inheritance/aliases b/test/integration/targets/roles_var_inheritance/aliases
new file mode 100644
index 00000000..1d28bdb2
--- /dev/null
+++ b/test/integration/targets/roles_var_inheritance/aliases
@@ -0,0 +1,2 @@
+shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/roles_var_inheritance/play.yml b/test/integration/targets/roles_var_inheritance/play.yml
new file mode 100644
index 00000000..170eef57
--- /dev/null
+++ b/test/integration/targets/roles_var_inheritance/play.yml
@@ -0,0 +1,4 @@
+- hosts: localhost
+ roles:
+ - A
+ - B
diff --git a/test/integration/targets/roles_var_inheritance/roles/A/meta/main.yml b/test/integration/targets/roles_var_inheritance/roles/A/meta/main.yml
new file mode 100644
index 00000000..0e99e981
--- /dev/null
+++ b/test/integration/targets/roles_var_inheritance/roles/A/meta/main.yml
@@ -0,0 +1,4 @@
+dependencies:
+ - role: common_dep
+ vars:
+ test_var: A
diff --git a/test/integration/targets/roles_var_inheritance/roles/B/meta/main.yml b/test/integration/targets/roles_var_inheritance/roles/B/meta/main.yml
new file mode 100644
index 00000000..4da17403
--- /dev/null
+++ b/test/integration/targets/roles_var_inheritance/roles/B/meta/main.yml
@@ -0,0 +1,4 @@
+dependencies:
+ - role: common_dep
+ vars:
+ test_var: B
diff --git a/test/integration/targets/roles_var_inheritance/roles/child_nested_dep/vars/main.yml b/test/integration/targets/roles_var_inheritance/roles/child_nested_dep/vars/main.yml
new file mode 100644
index 00000000..6723fa07
--- /dev/null
+++ b/test/integration/targets/roles_var_inheritance/roles/child_nested_dep/vars/main.yml
@@ -0,0 +1 @@
+var_precedence: dependency
diff --git a/test/integration/targets/roles_var_inheritance/roles/common_dep/meta/main.yml b/test/integration/targets/roles_var_inheritance/roles/common_dep/meta/main.yml
new file mode 100644
index 00000000..1ede7be8
--- /dev/null
+++ b/test/integration/targets/roles_var_inheritance/roles/common_dep/meta/main.yml
@@ -0,0 +1,4 @@
+dependencies:
+ - role: nested_dep
+ vars:
+ nested_var: "{{ test_var }}"
diff --git a/test/integration/targets/roles_var_inheritance/roles/common_dep/vars/main.yml b/test/integration/targets/roles_var_inheritance/roles/common_dep/vars/main.yml
new file mode 100644
index 00000000..87b6b580
--- /dev/null
+++ b/test/integration/targets/roles_var_inheritance/roles/common_dep/vars/main.yml
@@ -0,0 +1 @@
+var_precedence: parent
diff --git a/test/integration/targets/roles_var_inheritance/roles/nested_dep/meta/main.yml b/test/integration/targets/roles_var_inheritance/roles/nested_dep/meta/main.yml
new file mode 100644
index 00000000..231c6c14
--- /dev/null
+++ b/test/integration/targets/roles_var_inheritance/roles/nested_dep/meta/main.yml
@@ -0,0 +1,3 @@
+allow_duplicates: yes
+dependencies:
+ - child_nested_dep
diff --git a/test/integration/targets/roles_var_inheritance/roles/nested_dep/tasks/main.yml b/test/integration/targets/roles_var_inheritance/roles/nested_dep/tasks/main.yml
new file mode 100644
index 00000000..c69070ca
--- /dev/null
+++ b/test/integration/targets/roles_var_inheritance/roles/nested_dep/tasks/main.yml
@@ -0,0 +1,5 @@
+- debug:
+ var: nested_var
+
+- debug:
+ var: var_precedence
diff --git a/test/integration/targets/roles_var_inheritance/runme.sh b/test/integration/targets/roles_var_inheritance/runme.sh
new file mode 100755
index 00000000..791155a8
--- /dev/null
+++ b/test/integration/targets/roles_var_inheritance/runme.sh
@@ -0,0 +1,9 @@
+#!/usr/bin/env bash
+
+set -eux
+
+ansible-playbook -i ../../inventory play.yml "$@" | tee out.txt
+
+test "$(grep out.txt -ce '"nested_var": "A"')" == 1
+test "$(grep out.txt -ce '"nested_var": "B"')" == 1
+test "$(grep out.txt -ce '"var_precedence": "dependency"')" == 2
diff --git a/test/integration/targets/rpm_key/tasks/main.yaml b/test/integration/targets/rpm_key/tasks/main.yaml
index 9f6fd4ec..6f71ca6e 100644
--- a/test/integration/targets/rpm_key/tasks/main.yaml
+++ b/test/integration/targets/rpm_key/tasks/main.yaml
@@ -1,2 +1,2 @@
- - include: 'rpm_key.yaml'
+ - include_tasks: 'rpm_key.yaml'
when: ansible_os_family == "RedHat"
diff --git a/test/integration/targets/run_modules/aliases b/test/integration/targets/run_modules/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/run_modules/aliases
+++ b/test/integration/targets/run_modules/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/set_fact/aliases b/test/integration/targets/set_fact/aliases
index 757c9966..10179323 100644
--- a/test/integration/targets/set_fact/aliases
+++ b/test/integration/targets/set_fact/aliases
@@ -1,2 +1,2 @@
shippable/posix/group3
-skip/aix
+context/controller # this is a controller-only action, the module is just for documentation
diff --git a/test/integration/targets/set_fact/runme.sh b/test/integration/targets/set_fact/runme.sh
index 781894a0..93093599 100755
--- a/test/integration/targets/set_fact/runme.sh
+++ b/test/integration/targets/set_fact/runme.sh
@@ -31,3 +31,6 @@ ANSIBLE_JINJA2_NATIVE=1 ansible-playbook -v set_fact_bool_conv_jinja2_native.yml
# Test parsing of values when using an empty string as a key
ansible-playbook -i inventory set_fact_empty_str_key.yml
+
+# https://github.com/ansible/ansible/issues/21088
+ansible-playbook -i inventory "$@" set_fact_auto_unsafe.yml
diff --git a/test/integration/targets/set_fact/set_fact_auto_unsafe.yml b/test/integration/targets/set_fact/set_fact_auto_unsafe.yml
new file mode 100644
index 00000000..b0fb4dcf
--- /dev/null
+++ b/test/integration/targets/set_fact/set_fact_auto_unsafe.yml
@@ -0,0 +1,10 @@
+- hosts: localhost
+ gather_facts: false
+ tasks:
+ - set_fact:
+ foo: bar
+ register: baz
+
+ - assert:
+ that:
+ - baz.ansible_facts.foo|type_debug != "AnsibleUnsafeText"
diff --git a/test/integration/targets/set_stats/aliases b/test/integration/targets/set_stats/aliases
new file mode 100644
index 00000000..a1b27a83
--- /dev/null
+++ b/test/integration/targets/set_stats/aliases
@@ -0,0 +1,2 @@
+shippable/posix/group5
+context/controller # this is a controller-only action, the module is just for documentation
diff --git a/test/integration/targets/set_stats/runme.sh b/test/integration/targets/set_stats/runme.sh
new file mode 100755
index 00000000..27193dc8
--- /dev/null
+++ b/test/integration/targets/set_stats/runme.sh
@@ -0,0 +1,13 @@
+#!/usr/bin/env bash
+
+set -eux
+
+export ANSIBLE_SHOW_CUSTOM_STATS=yes
+
+# Simple tests
+ansible-playbook test_simple.yml -i "${INVENTORY_PATH}"
+
+# This playbook does two set_stats calls setting my_int to 10 and 15.
+# The aggregated output should add to 25.
+output=$(ansible-playbook test_aggregate.yml -i "${INVENTORY_PATH}" | grep -c '"my_int": 25')
+test "$output" -eq 1
diff --git a/test/integration/targets/set_stats/test_aggregate.yml b/test/integration/targets/set_stats/test_aggregate.yml
new file mode 100644
index 00000000..7f12895d
--- /dev/null
+++ b/test/integration/targets/set_stats/test_aggregate.yml
@@ -0,0 +1,13 @@
+---
+- hosts: testhost
+ gather_facts: false
+ tasks:
+ - name: First set_stats
+ set_stats:
+ data:
+ my_int: 10
+
+ - name: Second set_stats
+ set_stats:
+ data:
+ my_int: 15
diff --git a/test/integration/targets/set_stats/test_simple.yml b/test/integration/targets/set_stats/test_simple.yml
new file mode 100644
index 00000000..0f62120d
--- /dev/null
+++ b/test/integration/targets/set_stats/test_simple.yml
@@ -0,0 +1,79 @@
+---
+- hosts: testhost
+ gather_facts: false
+ tasks:
+ - name: test simple data with defaults
+ set_stats:
+ data:
+ my_int: 42
+ my_string: "foo"
+ register: result
+
+ - name: assert simple data return
+ assert:
+ that:
+ - result is succeeded
+ - not result.changed
+ - '"ansible_stats" in result'
+ - '"data" in result.ansible_stats'
+ - result.ansible_stats.data.my_int == 42
+ - result.ansible_stats.data.my_string == "foo"
+ - '"per_host" in result.ansible_stats'
+ - not result.ansible_stats.per_host
+ - '"aggregate" in result.ansible_stats'
+ - result.ansible_stats.aggregate
+
+ - name: test per_host and aggregate settings
+ set_stats:
+ data:
+ my_int: 42
+ per_host: yes
+ aggregate: no
+ register: result
+
+ - name: assert per_host and aggregate changes
+ assert:
+ that:
+ - result is succeeded
+ - not result.changed
+ - '"ansible_stats" in result'
+ - '"per_host" in result.ansible_stats'
+ - result.ansible_stats.per_host
+ - '"aggregate" in result.ansible_stats'
+ - not result.ansible_stats.aggregate
+
+ - name: Test bad call
+ block:
+ - name: "EXPECTED FAILURE - test invalid data type"
+ set_stats:
+ data:
+ - 1
+ - 2
+
+ - fail:
+ msg: "should not get here"
+ rescue:
+ - assert:
+ that:
+ - ansible_failed_task.name == "EXPECTED FAILURE - test invalid data type"
+ - ansible_failed_result.msg == "The 'data' option needs to be a dictionary/hash"
+
+ - name: Test options from template
+ set_stats:
+ data:
+ my_string: "foo"
+ aggregate: "x"
+
+ - name: Test bad data
+ block:
+ - name: "EXPECTED FAILURE - bad data"
+ set_stats:
+ data:
+ .bad: 1
+ - fail:
+ msg: "should not get here"
+ rescue:
+ - assert:
+ that:
+ - ansible_failed_task.name == "EXPECTED FAILURE - bad data"
+ - ansible_failed_result.msg == "The variable name '.bad' is not valid. Variables must start with a letter or underscore character, and contain only letters, numbers and underscores."
diff --git a/test/integration/targets/setup_cron/defaults/main.yml b/test/integration/targets/setup_cron/defaults/main.yml
index e4b0123d..a6d1965f 100644
--- a/test/integration/targets/setup_cron/defaults/main.yml
+++ b/test/integration/targets/setup_cron/defaults/main.yml
@@ -1 +1 @@
-remote_dir: "{{ lookup('env', 'OUTPUT_DIR') }}"
+remote_dir: "{{ remote_tmp_dir }}"
diff --git a/test/integration/targets/setup_cron/meta/main.yml b/test/integration/targets/setup_cron/meta/main.yml
new file mode 100644
index 00000000..1810d4be
--- /dev/null
+++ b/test/integration/targets/setup_cron/meta/main.yml
@@ -0,0 +1,2 @@
+dependencies:
+ - setup_remote_tmp_dir
diff --git a/test/integration/targets/setup_cron/tasks/main.yml b/test/integration/targets/setup_cron/tasks/main.yml
index c5a988e0..d7ce3303 100644
--- a/test/integration/targets/setup_cron/tasks/main.yml
+++ b/test/integration/targets/setup_cron/tasks/main.yml
@@ -82,3 +82,15 @@
FAKETIME: "+0y x10"
LD_PRELOAD: "/usr/lib/faketime/libfaketime.so.1"
when: ansible_distribution == 'Alpine'
+
+- name: See if /etc/pam.d/crond exists
+ stat:
+ path: /etc/pam.d/crond
+ register: pamd
+
+# https://github.com/lxc/lxc/issues/661#issuecomment-222444916
+# https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=726661
+- name: Work around containers not being able to write to /proc/self/loginuid
+ command: sed -i '/pam_loginuid\.so$/ s/required/optional/' /etc/pam.d/crond
+ when:
+ - pamd.stat.exists
diff --git a/test/integration/targets/setup_paramiko/install-FreeBSD-11-python-2.yml b/test/integration/targets/setup_paramiko/install-FreeBSD-11-python-2.yml
deleted file mode 100644
index dec5b548..00000000
--- a/test/integration/targets/setup_paramiko/install-FreeBSD-11-python-2.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-- name: Install Paramiko for Python 2 on FreeBSD 11
- pkgng:
- name: py27-paramiko
diff --git a/test/integration/targets/setup_paramiko/install-FreeBSD-11-python-3.yml b/test/integration/targets/setup_paramiko/install-FreeBSD-11-python-3.yml
deleted file mode 100644
index eb01d00f..00000000
--- a/test/integration/targets/setup_paramiko/install-FreeBSD-11-python-3.yml
+++ /dev/null
@@ -1,12 +0,0 @@
-- name: Downgrade to pip version 18.1 to work around a PEP 517 virtualenv bug
- # pip 19.0.0 added support for PEP 517
- # versions as recent as 19.0.3 fail to install paramiko in a virtualenv due to a BackendUnavailable exception
- # installation without a virtualenv succeeds
- pip:
- name: pip==18.1
-- name: Setup remote constraints
- include_tasks: setup-remote-constraints.yml
-- name: Install Paramiko for Python 3 on FreeBSD 11
- pip: # no py36-paramiko package exists for FreeBSD 11
- name: paramiko
- extra_args: "-c {{ remote_constraints }}"
diff --git a/test/integration/targets/setup_paramiko/install-FreeBSD-11.4-python-3.yml b/test/integration/targets/setup_paramiko/install-FreeBSD-11.4-python-3.yml
deleted file mode 100644
index 9a7bfb67..00000000
--- a/test/integration/targets/setup_paramiko/install-FreeBSD-11.4-python-3.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-- name: Install Paramiko for Python 3 on FreeBSD 11.4
- pkgng:
- name: py37-paramiko
diff --git a/test/integration/targets/setup_paramiko/install-FreeBSD-12-python-2.yml b/test/integration/targets/setup_paramiko/install-FreeBSD-12-python-2.yml
deleted file mode 100644
index 29e78969..00000000
--- a/test/integration/targets/setup_paramiko/install-FreeBSD-12-python-2.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-- name: Install Paramiko for Python 2 on FreeBSD 12
- pkgng:
- name: py27-paramiko
diff --git a/test/integration/targets/setup_paramiko/install-FreeBSD-12-python-3.yml b/test/integration/targets/setup_paramiko/install-FreeBSD-12-python-3.yml
deleted file mode 100644
index 2aa7b500..00000000
--- a/test/integration/targets/setup_paramiko/install-FreeBSD-12-python-3.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-- name: Install Paramiko for Python 3 on FreeBSD 12
- pkgng:
- name: py36-paramiko
diff --git a/test/integration/targets/setup_paramiko/install-FreeBSD-12.2-python-3.yml b/test/integration/targets/setup_paramiko/install-FreeBSD-12.2-python-3.yml
deleted file mode 100644
index 4fe6011b..00000000
--- a/test/integration/targets/setup_paramiko/install-FreeBSD-12.2-python-3.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-- name: Install Paramiko for Python 3 on FreeBSD 12.2
- pkgng:
- name: py37-paramiko
diff --git a/test/integration/targets/setup_paramiko/install-FreeBSD-python-3.yml b/test/integration/targets/setup_paramiko/install-FreeBSD-python-3.yml
new file mode 100644
index 00000000..27daf3cf
--- /dev/null
+++ b/test/integration/targets/setup_paramiko/install-FreeBSD-python-3.yml
@@ -0,0 +1,6 @@
+- name: Setup remote constraints
+ include_tasks: setup-remote-constraints.yml
+- name: Install Paramiko for Python 3 on FreeBSD
+ pip: # no package in pkg, just use pip
+ name: paramiko
+ extra_args: "-c {{ remote_constraints }}"
diff --git a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-11-python-2.yml b/test/integration/targets/setup_paramiko/uninstall-FreeBSD-11-python-2.yml
deleted file mode 100644
index d27f831c..00000000
--- a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-11-python-2.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: Uninstall Paramiko for Python 2 on FreeBSD 11
- pkgng:
- name: py27-paramiko
- state: absent
diff --git a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-11-python-3.yml b/test/integration/targets/setup_paramiko/uninstall-FreeBSD-11-python-3.yml
deleted file mode 100644
index 33f292e8..00000000
--- a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-11-python-3.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: Uninstall Paramiko for Python 3 on FreeBSD 11
- pip: # no py36-paramiko package exists for FreeBSD 11
- name: paramiko
- state: absent
diff --git a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-11.4-python-3.yml b/test/integration/targets/setup_paramiko/uninstall-FreeBSD-11.4-python-3.yml
deleted file mode 100644
index 86956fd9..00000000
--- a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-11.4-python-3.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: Uninstall Paramiko for Python 3 on FreeBSD 11.4
- pkgng:
- name: py37-paramiko
- state: absent
diff --git a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-12-python-2.yml b/test/integration/targets/setup_paramiko/uninstall-FreeBSD-12-python-2.yml
deleted file mode 100644
index 79352487..00000000
--- a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-12-python-2.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: Uninstall Paramiko for Python 2 on FreeBSD 12
- pkgng:
- name: py27-paramiko
- state: absent
diff --git a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-12-python-3.yml b/test/integration/targets/setup_paramiko/uninstall-FreeBSD-12-python-3.yml
deleted file mode 100644
index 46d26ca3..00000000
--- a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-12-python-3.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: Uninstall Paramiko for Python 3 on FreeBSD 12
- pkgng:
- name: py36-paramiko
- state: absent
diff --git a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-12.2-python-3.yml b/test/integration/targets/setup_paramiko/uninstall-FreeBSD-12.2-python-3.yml
deleted file mode 100644
index 0359bf4c..00000000
--- a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-12.2-python-3.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: Uninstall Paramiko for Python 3 on FreeBSD 12.2
- pkgng:
- name: py37-paramiko
- state: absent
diff --git a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-python-3.yml b/test/integration/targets/setup_paramiko/uninstall-FreeBSD-python-3.yml
new file mode 100644
index 00000000..d3d3739b
--- /dev/null
+++ b/test/integration/targets/setup_paramiko/uninstall-FreeBSD-python-3.yml
@@ -0,0 +1,4 @@
+- name: Uninstall Paramiko for Python 3 on FreeBSD
+ pip:
+ name: paramiko
+ state: absent
diff --git a/test/integration/targets/setup_remote_tmp_dir/defaults/main.yml b/test/integration/targets/setup_remote_tmp_dir/defaults/main.yml
new file mode 100644
index 00000000..3375fdf9
--- /dev/null
+++ b/test/integration/targets/setup_remote_tmp_dir/defaults/main.yml
@@ -0,0 +1,2 @@
+setup_remote_tmp_dir_skip_cleanup: no
+setup_remote_tmp_dir_cache_path: no
diff --git a/test/integration/targets/setup_remote_tmp_dir/handlers/main.yml b/test/integration/targets/setup_remote_tmp_dir/handlers/main.yml
index 229037c8..3c5b14f2 100644
--- a/test/integration/targets/setup_remote_tmp_dir/handlers/main.yml
+++ b/test/integration/targets/setup_remote_tmp_dir/handlers/main.yml
@@ -1,5 +1,7 @@
- name: delete temporary directory
include_tasks: default-cleanup.yml
+ when: not setup_remote_tmp_dir_skip_cleanup | bool
- name: delete temporary directory (windows)
include_tasks: windows-cleanup.yml
+ when: not setup_remote_tmp_dir_skip_cleanup | bool
diff --git a/test/integration/targets/setup_remote_tmp_dir/tasks/default.yml b/test/integration/targets/setup_remote_tmp_dir/tasks/default.yml
index 1e0f51b8..3be42eff 100644
--- a/test/integration/targets/setup_remote_tmp_dir/tasks/default.yml
+++ b/test/integration/targets/setup_remote_tmp_dir/tasks/default.yml
@@ -9,3 +9,4 @@
- name: record temporary directory
set_fact:
remote_tmp_dir: "{{ remote_tmp_dir.path }}"
+ cacheable: "{{ setup_remote_tmp_dir_cache_path | bool }}"
diff --git a/test/integration/targets/setup_rpm_repo/files/create-repo.py b/test/integration/targets/setup_rpm_repo/files/create-repo.py
deleted file mode 100644
index a4d10140..00000000
--- a/test/integration/targets/setup_rpm_repo/files/create-repo.py
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/usr/bin/env python
-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import sys
-from collections import namedtuple
-
-try:
- from rpmfluff import SimpleRpmBuild
- from rpmfluff import YumRepoBuild
-except ImportError:
- from rpmfluff.rpmbuild import SimpleRpmBuild
- from rpmfluff.yumrepobuild import YumRepoBuild
-
-try:
- from rpmfluff import can_use_rpm_weak_deps
-except ImportError:
- try:
- from rpmfluff.utils import can_use_rpm_weak_deps
- except ImportError:
- can_use_rpm_weak_deps = None
-
-RPM = namedtuple('RPM', ['name', 'version', 'release', 'epoch', 'recommends'])
-
-
-SPECS = [
- RPM('dinginessentail', '1.0', '1', None, None),
- RPM('dinginessentail', '1.0', '2', '1', None),
- RPM('dinginessentail', '1.1', '1', '1', None),
- RPM('dinginessentail-olive', '1.0', '1', None, None),
- RPM('dinginessentail-olive', '1.1', '1', None, None),
- RPM('landsidescalping', '1.0', '1', None, None),
- RPM('landsidescalping', '1.1', '1', None, None),
- RPM('dinginessentail-with-weak-dep', '1.0', '1', None, ['dinginessentail-weak-dep']),
- RPM('dinginessentail-weak-dep', '1.0', '1', None, None),
-]
-
-
-def main():
- try:
- arch = sys.argv[1]
- except IndexError:
- arch = 'x86_64'
-
- pkgs = []
- for spec in SPECS:
- pkg = SimpleRpmBuild(spec.name, spec.version, spec.release, [arch])
- pkg.epoch = spec.epoch
-
- if spec.recommends:
- # Skip packages that require weak deps but an older version of RPM is being used
- if not can_use_rpm_weak_deps or not can_use_rpm_weak_deps():
- continue
-
- for recommend in spec.recommends:
- pkg.add_recommends(recommend)
-
- pkgs.append(pkg)
-
- repo = YumRepoBuild(pkgs)
- repo.make(arch)
-
- for pkg in pkgs:
- pkg.clean()
-
- print(repo.repoDir)
-
-
-if __name__ == "__main__":
- main()
diff --git a/test/integration/targets/special_vars/aliases b/test/integration/targets/special_vars/aliases
index 2d9e6788..55b8ec06 100644
--- a/test/integration/targets/special_vars/aliases
+++ b/test/integration/targets/special_vars/aliases
@@ -1,2 +1,3 @@
shippable/posix/group2
needs/target/include_parent_role_vars
+context/controller
diff --git a/test/integration/targets/special_vars_hosts/aliases b/test/integration/targets/special_vars_hosts/aliases
new file mode 100644
index 00000000..1d28bdb2
--- /dev/null
+++ b/test/integration/targets/special_vars_hosts/aliases
@@ -0,0 +1,2 @@
+shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/special_vars_hosts/inventory b/test/integration/targets/special_vars_hosts/inventory
new file mode 100644
index 00000000..8d69e574
--- /dev/null
+++ b/test/integration/targets/special_vars_hosts/inventory
@@ -0,0 +1,3 @@
+successful ansible_connection=local ansible_host=127.0.0.1 ansible_python_interpreter="{{ ansible_playbook_python }}"
+failed ansible_connection=local ansible_host=127.0.0.1 ansible_python_interpreter="{{ ansible_playbook_python }}"
+unreachable ansible_connection=ssh ansible_host=127.0.0.1 ansible_port=1011 # IANA Reserved port
diff --git a/test/integration/targets/special_vars_hosts/playbook.yml b/test/integration/targets/special_vars_hosts/playbook.yml
new file mode 100644
index 00000000..e3d9e435
--- /dev/null
+++ b/test/integration/targets/special_vars_hosts/playbook.yml
@@ -0,0 +1,53 @@
+---
+- hosts: all
+ gather_facts: no
+ tasks:
+ - name: test magic vars for hosts without any failed/unreachable (no serial)
+ assert:
+ that:
+ - ansible_play_batch | length == 3
+ - ansible_play_hosts | length == 3
+ - ansible_play_hosts_all | length == 3
+ run_once: True
+
+ - ping:
+ failed_when: "inventory_hostname == 'failed'"
+
+ - meta: clear_host_errors
+
+- hosts: all
+ gather_facts: no
+ tasks:
+ - name: test host errors were cleared
+ assert:
+ that:
+ - ansible_play_batch | length == 3
+ - ansible_play_hosts | length == 3
+ - ansible_play_hosts_all | length == 3
+ run_once: True
+
+ - ping:
+ failed_when: "inventory_hostname == 'failed'"
+
+ - name: test magic vars exclude failed/unreachable hosts
+ assert:
+ that:
+ - ansible_play_batch | length == 1
+ - ansible_play_hosts | length == 1
+ - "ansible_play_batch == ['successful']"
+ - "ansible_play_hosts == ['successful']"
+ - ansible_play_hosts_all | length == 3
+ run_once: True
+
+- hosts: all
+ gather_facts: no
+ tasks:
+ - name: test failed/unreachable persists between plays
+ assert:
+ that:
+ - ansible_play_batch | length == 1
+ - ansible_play_hosts | length == 1
+ - "ansible_play_batch == ['successful']"
+ - "ansible_play_hosts == ['successful']"
+ - ansible_play_hosts_all | length == 3
+ run_once: True
diff --git a/test/integration/targets/special_vars_hosts/runme.sh b/test/integration/targets/special_vars_hosts/runme.sh
new file mode 100755
index 00000000..81c1d9be
--- /dev/null
+++ b/test/integration/targets/special_vars_hosts/runme.sh
@@ -0,0 +1,7 @@
+#!/usr/bin/env bash
+
+set -eux
+
+ansible-playbook -i ./inventory playbook.yml "$@" | tee out.txt
+grep 'unreachable=2' out.txt
+grep 'failed=2' out.txt
diff --git a/test/integration/targets/split/aliases b/test/integration/targets/split/aliases
new file mode 100644
index 00000000..87958830
--- /dev/null
+++ b/test/integration/targets/split/aliases
@@ -0,0 +1,2 @@
+context/target
+shippable/posix/group1
diff --git a/test/integration/targets/split/tasks/main.yml b/test/integration/targets/split/tasks/main.yml
new file mode 100644
index 00000000..ead1c536
--- /dev/null
+++ b/test/integration/targets/split/tasks/main.yml
@@ -0,0 +1,30 @@
+- name: Get control host details
+ setup:
+ delegate_to: localhost
+ register: control_host
+- name: Get managed host details
+ setup:
+ register: managed_host
+- name: Check split state
+ stat:
+ path: "{{ output_dir }}"
+ register: split
+ ignore_errors: yes
+- name: Build non-split status message
+ set_fact:
+ message: "
+ {{ control_host.ansible_facts.ansible_distribution }} {{ control_host.ansible_facts.ansible_distribution_version }}
+ {{ control_host.ansible_facts.ansible_python.executable }} ({{ control_host.ansible_facts.ansible_python_version }}) ->
+ {{ managed_host.ansible_facts.ansible_python.executable }} ({{ managed_host.ansible_facts.ansible_python_version }})"
+ when: split is success and split.stat.exists
+- name: Build split status message
+ set_fact:
+ message: "
+ {{ control_host.ansible_facts.ansible_distribution }} {{ control_host.ansible_facts.ansible_distribution_version }}
+ {{ control_host.ansible_facts.ansible_python.executable }} ({{ control_host.ansible_facts.ansible_python_version }}) ->
+ {{ managed_host.ansible_facts.ansible_distribution }} {{ managed_host.ansible_facts.ansible_distribution_version }}
+ {{ managed_host.ansible_facts.ansible_python.executable }} ({{ managed_host.ansible_facts.ansible_python_version }})"
+ when: split is not success or not split.stat.exists
+- name: Show host details
+ debug:
+ msg: "{{ message }}"
diff --git a/test/integration/targets/subversion/roles/subversion/defaults/main.yml b/test/integration/targets/subversion/roles/subversion/defaults/main.yml
index f989345a..e647d598 100644
--- a/test/integration/targets/subversion/roles/subversion/defaults/main.yml
+++ b/test/integration/targets/subversion/roles/subversion/defaults/main.yml
@@ -1,11 +1,10 @@
---
apache_port: 11386 # cannot use 80 as httptester overrides this
-output_dir: "{{ lookup('env', 'OUTPUT_DIR') }}"
-subversion_test_dir: '{{ output_dir }}/svn-test'
+subversion_test_dir: /tmp/ansible-svn-test-dir
subversion_server_dir: /tmp/ansible-svn # cannot use a path in the home dir without userdir or granting exec permission to the apache user
subversion_repo_name: ansible-test-repo
subversion_repo_url: http://127.0.0.1:{{ apache_port }}/svn/{{ subversion_repo_name }}
subversion_repo_auth_url: http://127.0.0.1:{{ apache_port }}/svnauth/{{ subversion_repo_name }}
subversion_username: subsvn_user'''
subversion_password: Password123!
-subversion_external_repo_url: https://github.com/ansible/ansible-base-test-container # GitHub serves SVN
+subversion_external_repo_url: https://github.com/ansible/ansible.github.com # GitHub serves SVN
diff --git a/test/integration/targets/subversion/roles/subversion/tasks/setup.yml b/test/integration/targets/subversion/roles/subversion/tasks/setup.yml
index cab9151a..3cf5af56 100644
--- a/test/integration/targets/subversion/roles/subversion/tasks/setup.yml
+++ b/test/integration/targets/subversion/roles/subversion/tasks/setup.yml
@@ -44,13 +44,6 @@
chdir: '{{ subversion_server_dir }}'
creates: '{{ subversion_server_dir }}/{{ subversion_repo_name }}'
-- name: apply ownership for all SVN directories
- file:
- path: '{{ subversion_server_dir }}'
- owner: '{{ apache_user }}'
- group: '{{ apache_group }}'
- recurse: True
-
- name: add test user to htpasswd for Subversion site
htpasswd:
path: '{{ subversion_server_dir }}/svn-auth-users'
@@ -58,11 +51,22 @@
password: '{{ subversion_password }}'
state: present
+- name: apply ownership for all SVN directories
+ file:
+ path: '{{ subversion_server_dir }}'
+ owner: '{{ apache_user }}'
+ group: '{{ apache_group }}'
+ recurse: True
+
- name: start test Apache SVN site - non Red Hat
command: apachectl -k start -f {{ subversion_server_dir }}/subversion.conf
+ async: 3600 # We kill apache manually in the clean up phase
+ poll: 0
when: ansible_os_family not in ['RedHat', 'Alpine']
# On Red Hat based OS', we can't use apachectl to start up own instance, just use the raw httpd
- name: start test Apache SVN site - Red Hat
command: httpd -k start -f {{ subversion_server_dir }}/subversion.conf
+ async: 3600 # We kill apache manually in the clean up phase
+ poll: 0
when: ansible_os_family in ['RedHat', 'Alpine']
diff --git a/test/integration/targets/subversion/runme.sh b/test/integration/targets/subversion/runme.sh
index f505e581..c39bdc00 100755
--- a/test/integration/targets/subversion/runme.sh
+++ b/test/integration/targets/subversion/runme.sh
@@ -4,7 +4,7 @@ set -eu
cleanup() {
echo "Cleanup"
- ansible-playbook runme.yml -e "output_dir=${OUTPUT_DIR}" "$@" --tags cleanup
+ ansible-playbook runme.yml -i "${INVENTORY_PATH}" "$@" --tags cleanup
echo "Done"
}
@@ -13,16 +13,19 @@ trap cleanup INT TERM EXIT
export ANSIBLE_ROLES_PATH=roles/
# Ensure subversion is set up
-ansible-playbook runme.yml "$@" -v --tags setup
+ansible-playbook runme.yml -i "${INVENTORY_PATH}" "$@" -v --tags setup
# Test functionality
-ansible-playbook runme.yml "$@" -v --tags tests
+ansible-playbook runme.yml -i "${INVENTORY_PATH}" "$@" -v --tags tests
# Test a warning is displayed for versions < 1.10.0 when a password is provided
-ansible-playbook runme.yml "$@" --tags warnings 2>&1 | tee out.txt
+ansible-playbook runme.yml -i "${INVENTORY_PATH}" "$@" --tags warnings 2>&1 | tee out.txt
-version="$(svn --version -q)"
-secure=$(python -c "from distutils.version import LooseVersion; print(LooseVersion('$version') >= LooseVersion('1.10.0'))")
+version=$(ANSIBLE_FORCE_COLOR=0 ansible -i "${INVENTORY_PATH}" -m shell -a 'svn --version -q' testhost 2>/dev/null | tail -n 1)
+
+echo "svn --version is '${version}'"
+
+secure=$(python -c "from ansible.module_utils.compat.version import LooseVersion; print(LooseVersion('$version') >= LooseVersion('1.10.0'))")
if [[ "${secure}" = "False" ]] && [[ "$(grep -c 'To securely pass credentials, upgrade svn to version 1.10.0' out.txt)" -eq 1 ]]; then
echo "Found the expected warning"
diff --git a/test/integration/targets/subversion/runme.yml b/test/integration/targets/subversion/runme.yml
index c67d7b89..71c5e4b8 100644
--- a/test/integration/targets/subversion/runme.yml
+++ b/test/integration/targets/subversion/runme.yml
@@ -1,5 +1,5 @@
---
-- hosts: localhost
+- hosts: testhost
tasks:
- name: load OS specific vars
include_vars: '{{ item }}'
diff --git a/test/integration/targets/tags/aliases b/test/integration/targets/tags/aliases
index 757c9966..8278ec8b 100644
--- a/test/integration/targets/tags/aliases
+++ b/test/integration/targets/tags/aliases
@@ -1,2 +1,2 @@
shippable/posix/group3
-skip/aix
+context/controller
diff --git a/test/integration/targets/task_ordering/aliases b/test/integration/targets/task_ordering/aliases
index 765b70da..90ea9e12 100644
--- a/test/integration/targets/task_ordering/aliases
+++ b/test/integration/targets/task_ordering/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/task_ordering/tasks/main.yml b/test/integration/targets/task_ordering/tasks/main.yml
index 4a7828bf..a666006b 100644
--- a/test/integration/targets/task_ordering/tasks/main.yml
+++ b/test/integration/targets/task_ordering/tasks/main.yml
@@ -1,7 +1,7 @@
- set_fact:
temppath: "{{ remote_tmp_dir }}/output.txt"
-- include: taskorder-include.yml
+- include_tasks: taskorder-include.yml
with_items:
- 1
- 2
diff --git a/test/integration/targets/tasks/aliases b/test/integration/targets/tasks/aliases
index a6dafcf8..13e01f0c 100644
--- a/test/integration/targets/tasks/aliases
+++ b/test/integration/targets/tasks/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/tempfile/aliases b/test/integration/targets/tempfile/aliases
new file mode 100644
index 00000000..a6dafcf8
--- /dev/null
+++ b/test/integration/targets/tempfile/aliases
@@ -0,0 +1 @@
+shippable/posix/group1
diff --git a/test/integration/targets/tempfile/meta/main.yml b/test/integration/targets/tempfile/meta/main.yml
new file mode 100644
index 00000000..1810d4be
--- /dev/null
+++ b/test/integration/targets/tempfile/meta/main.yml
@@ -0,0 +1,2 @@
+dependencies:
+ - setup_remote_tmp_dir
diff --git a/test/integration/targets/tempfile/tasks/main.yml b/test/integration/targets/tempfile/tasks/main.yml
new file mode 100644
index 00000000..2d783e6f
--- /dev/null
+++ b/test/integration/targets/tempfile/tasks/main.yml
@@ -0,0 +1,63 @@
+- name: Create a temporary file with defaults
+ tempfile:
+ register: temp_file_default
+
+- name: Create a temporary directory with defaults
+ tempfile:
+ state: directory
+ register: temp_dir_default
+
+- name: Create a temporary file with optional parameters
+ tempfile:
+ path: "{{ remote_tmp_dir }}"
+ prefix: hello.
+ suffix: .goodbye
+ register: temp_file_options
+
+- name: Create a temporary directory with optional parameters
+ tempfile:
+ state: directory
+ path: "{{ remote_tmp_dir }}"
+ prefix: hello.
+ suffix: .goodbye
+ register: temp_dir_options
+
+- name: Create a temporary file in a non-existent directory
+ tempfile:
+ path: "{{ remote_tmp_dir }}/does_not_exist"
+ register: temp_file_non_existent_path
+ ignore_errors: yes
+
+- name: Create a temporary directory in a non-existent directory
+ tempfile:
+ state: directory
+ path: "{{ remote_tmp_dir }}/does_not_exist"
+ register: temp_dir_non_existent_path
+ ignore_errors: yes
+
+- name: Check results
+ assert:
+ that:
+ - temp_file_default is changed
+ - temp_file_default.state == 'file'
+ - temp_file_default.path | basename | split('.') | first == 'ansible'
+
+ - temp_dir_default is changed
+ - temp_dir_default.state == 'directory'
+ - temp_dir_default.path | basename | split('.') | first == 'ansible'
+
+ - temp_file_options is changed
+ - temp_file_options.state == 'file'
+ - temp_file_options.path.startswith(remote_tmp_dir)
+ - temp_file_options.path | basename | split('.') | first == 'hello'
+ - temp_file_options.path | basename | split('.') | last == 'goodbye'
+
+ - temp_dir_options is changed
+ - temp_dir_options.state == 'directory'
+ - temp_dir_options.path.startswith(remote_tmp_dir)
+ - temp_dir_options.path | basename | split('.') | first == 'hello'
+ - temp_dir_options.path | basename | split('.') | last == 'goodbye'
+
+ - temp_file_non_existent_path is failed
+
+ - temp_dir_non_existent_path is failed
diff --git a/test/integration/targets/template/aliases b/test/integration/targets/template/aliases
index f0c24d20..327f088b 100644
--- a/test/integration/targets/template/aliases
+++ b/test/integration/targets/template/aliases
@@ -1,3 +1,3 @@
needs/root
shippable/posix/group5
-skip/aix
+context/controller # this "module" is actually an action that runs on the controller
diff --git a/test/integration/targets/template/files/custom_comment_string.expected b/test/integration/targets/template/files/custom_comment_string.expected
new file mode 100644
index 00000000..f3a08f7d
--- /dev/null
+++ b/test/integration/targets/template/files/custom_comment_string.expected
@@ -0,0 +1,2 @@
+Before
+After
diff --git a/test/integration/targets/template/runme.sh b/test/integration/targets/template/runme.sh
index a4f0bbe5..78f8d7b5 100755
--- a/test/integration/targets/template/runme.sh
+++ b/test/integration/targets/template/runme.sh
@@ -4,8 +4,8 @@ set -eux
ANSIBLE_ROLES_PATH=../ ansible-playbook template.yml -i ../../inventory -v "$@"
-# Test for #35571
-ansible testhost -i testhost, -m debug -a 'msg={{ hostvars["localhost"] }}' -e "vars1={{ undef }}" -e "vars2={{ vars1 }}"
+# Test for https://github.com/ansible/ansible/pull/35571
+ansible testhost -i testhost, -m debug -a 'msg={{ hostvars["localhost"] }}' -e "vars1={{ undef() }}" -e "vars2={{ vars1 }}"
# Test for https://github.com/ansible/ansible/issues/27262
ansible-playbook ansible_managed.yml -c ansible_managed.cfg -i ../../inventory -v "$@"
diff --git a/test/integration/targets/template/tasks/main.yml b/test/integration/targets/template/tasks/main.yml
index c5744d0d..f8848ef5 100644
--- a/test/integration/targets/template/tasks/main.yml
+++ b/test/integration/targets/template/tasks/main.yml
@@ -141,6 +141,26 @@
- 'import_as_with_context_diff_result.stdout == ""'
- "import_as_with_context_diff_result.rc == 0"
+# VERIFY comment_start_string and comment_end_string
+
+- name: Render a template with "comment_start_string" set to [#
+ template:
+ src: custom_comment_string.j2
+ dest: "{{output_dir}}/custom_comment_string.templated"
+ comment_start_string: "[#"
+ comment_end_string: "#]"
+ register: custom_comment_string_result
+
+- name: Get checksum of known good custom_comment_string.expected
+ stat:
+ path: "{{role_path}}/files/custom_comment_string.expected"
+ register: custom_comment_string_good
+
+- name: Verify templated custom_comment_string matches known good using checksum
+ assert:
+ that:
+ - "custom_comment_string_result.checksum == custom_comment_string_good.stat.checksum"
+
# VERIFY trim_blocks
- name: Render a template with "trim_blocks" set to False
@@ -727,4 +747,4 @@
- out.stdout == "bar=lookedup_bar"
# aliases file requires root for template tests so this should be safe
-- include: backup_test.yml
+- import_tasks: backup_test.yml
diff --git a/test/integration/targets/template/templates/custom_comment_string.j2 b/test/integration/targets/template/templates/custom_comment_string.j2
new file mode 100644
index 00000000..db0af48a
--- /dev/null
+++ b/test/integration/targets/template/templates/custom_comment_string.j2
@@ -0,0 +1,3 @@
+Before
+[# Test comment_start_string #]
+After
diff --git a/test/integration/targets/template_jinja2_latest/aliases b/test/integration/targets/template_jinja2_latest/aliases
index 2a89ae7e..b9c19e3d 100644
--- a/test/integration/targets/template_jinja2_latest/aliases
+++ b/test/integration/targets/template_jinja2_latest/aliases
@@ -1,5 +1,5 @@
needs/root
shippable/posix/group2
needs/target/template
-skip/aix
+context/controller
needs/file/test/lib/ansible_test/_data/requirements/constraints.txt
diff --git a/test/integration/targets/template_jinja2_non_native/aliases b/test/integration/targets/template_jinja2_non_native/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/template_jinja2_non_native/aliases
+++ b/test/integration/targets/template_jinja2_non_native/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/templating_lookups/aliases b/test/integration/targets/templating_lookups/aliases
index f8e28c7e..13e01f0c 100644
--- a/test/integration/targets/templating_lookups/aliases
+++ b/test/integration/targets/templating_lookups/aliases
@@ -1,2 +1,2 @@
shippable/posix/group1
-skip/aix
+context/controller
diff --git a/test/integration/targets/templating_settings/aliases b/test/integration/targets/templating_settings/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/templating_settings/aliases
+++ b/test/integration/targets/templating_settings/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/test_core/aliases b/test/integration/targets/test_core/aliases
index 041b0cc7..70a7b7a9 100644
--- a/test/integration/targets/test_core/aliases
+++ b/test/integration/targets/test_core/aliases
@@ -1,2 +1 @@
shippable/posix/group5
-skip/python2.6 # tests are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/test_files/aliases b/test/integration/targets/test_files/aliases
index 041b0cc7..70a7b7a9 100644
--- a/test/integration/targets/test_files/aliases
+++ b/test/integration/targets/test_files/aliases
@@ -1,2 +1 @@
shippable/posix/group5
-skip/python2.6 # tests are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/test_mathstuff/aliases b/test/integration/targets/test_mathstuff/aliases
index 041b0cc7..70a7b7a9 100644
--- a/test/integration/targets/test_mathstuff/aliases
+++ b/test/integration/targets/test_mathstuff/aliases
@@ -1,2 +1 @@
shippable/posix/group5
-skip/python2.6 # tests are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/throttle/aliases b/test/integration/targets/throttle/aliases
index 765b70da..90ea9e12 100644
--- a/test/integration/targets/throttle/aliases
+++ b/test/integration/targets/throttle/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/unarchive/tasks/test_download.yml b/test/integration/targets/unarchive/tasks/test_download.yml
index 6b17449b..241f11b6 100644
--- a/test/integration/targets/unarchive/tasks/test_download.yml
+++ b/test/integration/targets/unarchive/tasks/test_download.yml
@@ -4,31 +4,41 @@
path: '{{remote_tmp_dir}}/test-unarchive-tar-gz'
state: directory
-- name: Install packages to make TLS connections work on CentOS 6
- pip:
- name:
- - urllib3==1.10.2
- - ndg_httpsclient==0.4.4
- - pyOpenSSL==16.2.0
- state: present
- when:
- - ansible_facts.distribution == 'CentOS'
- - not ansible_facts.python.has_sslcontext
-
-- name: unarchive a tar from an URL
- unarchive:
- src: "https://releases.ansible.com/ansible/ansible-latest.tar.gz"
- dest: "{{ remote_tmp_dir }}/test-unarchive-tar-gz"
- mode: "0700"
- remote_src: yes
- register: unarchive13
-
-- name: Test that unarchive succeeded
- assert:
- that:
- - "unarchive13.changed == true"
-
-- name: remove our tar.gz unarchive destination
- file:
- path: '{{ remote_tmp_dir }}/test-unarchive-tar-gz'
- state: absent
+- name: Test TLS download
+ block:
+ - name: Install packages to make TLS connections work on CentOS 6
+ pip:
+ name:
+ - urllib3==1.10.2
+ - ndg_httpsclient==0.4.4
+ - pyOpenSSL==16.2.0
+ state: present
+ when:
+ - ansible_facts.distribution == 'CentOS'
+ - not ansible_facts.python.has_sslcontext
+ - name: unarchive a tar from an URL
+ unarchive:
+ src: "https://releases.ansible.com/ansible/ansible-latest.tar.gz"
+ dest: "{{ remote_tmp_dir }}/test-unarchive-tar-gz"
+ mode: "0700"
+ remote_src: yes
+ register: unarchive13
+ - name: Test that unarchive succeeded
+ assert:
+ that:
+ - "unarchive13.changed == true"
+ always:
+ - name: Uninstall CentOS 6 TLS connections packages
+ pip:
+ name:
+ - urllib3
+ - ndg_httpsclient
+ - pyOpenSSL
+ state: absent
+ when:
+ - ansible_facts.distribution == 'CentOS'
+ - not ansible_facts.python.has_sslcontext
+ - name: remove our tar.gz unarchive destination
+ file:
+ path: '{{ remote_tmp_dir }}/test-unarchive-tar-gz'
+ state: absent
diff --git a/test/integration/targets/unarchive/tasks/test_include.yml b/test/integration/targets/unarchive/tasks/test_include.yml
index 3ed30fa3..04842e0e 100644
--- a/test/integration/targets/unarchive/tasks/test_include.yml
+++ b/test/integration/targets/unarchive/tasks/test_include.yml
@@ -15,6 +15,7 @@
unarchive:
src: "{{ remote_tmp_dir }}/test-unarchive.zip"
dest: "{{ remote_tmp_dir }}/include-zip"
+ remote_src: yes
include:
- FOO-UNAR.TXT
@@ -39,6 +40,7 @@
unarchive:
src: "{{ remote_tmp_dir }}/test-unarchive-multi.tar"
dest: "{{ remote_tmp_dir }}/include-tar"
+ remote_src: yes
include:
- foo-unarchive-777.txt
@@ -61,6 +63,7 @@
unarchive:
src: "{{ remote_tmp_dir }}/test-unarchive-multi.tar"
dest: "{{ remote_tmp_dir }}/include-tar"
+ remote_src: yes
include:
- foo-unarchive-777.txt
exclude:
diff --git a/test/integration/targets/unarchive/tasks/test_owner_group.yml b/test/integration/targets/unarchive/tasks/test_owner_group.yml
index 95f1457e..227ad9ce 100644
--- a/test/integration/targets/unarchive/tasks/test_owner_group.yml
+++ b/test/integration/targets/unarchive/tasks/test_owner_group.yml
@@ -155,6 +155,8 @@
user:
name: testuser
state: absent
+ remove: yes
+ force: yes
- name: Remove testgroup
group:
diff --git a/test/integration/targets/unarchive/tasks/test_tar_gz_owner_group.yml b/test/integration/targets/unarchive/tasks/test_tar_gz_owner_group.yml
index 257692e1..e99f0380 100644
--- a/test/integration/targets/unarchive/tasks/test_tar_gz_owner_group.yml
+++ b/test/integration/targets/unarchive/tasks/test_tar_gz_owner_group.yml
@@ -41,6 +41,8 @@
user:
name: testuser
state: absent
+ remove: yes
+ force: yes
- name: Remove testgroup
group:
diff --git a/test/integration/targets/unarchive/tasks/test_unprivileged_user.yml b/test/integration/targets/unarchive/tasks/test_unprivileged_user.yml
index e4c2bec5..7022bba1 100644
--- a/test/integration/targets/unarchive/tasks/test_unprivileged_user.yml
+++ b/test/integration/targets/unarchive/tasks/test_unprivileged_user.yml
@@ -75,7 +75,9 @@
name: unarchivetest1
state: absent
remove: yes
- become: no
+ force: yes
+ become: yes
+ become_user: root
- name: Remove user home directory on macOS
file:
diff --git a/test/integration/targets/undefined/aliases b/test/integration/targets/undefined/aliases
index 70a7b7a9..1d28bdb2 100644
--- a/test/integration/targets/undefined/aliases
+++ b/test/integration/targets/undefined/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/undefined/tasks/main.yml b/test/integration/targets/undefined/tasks/main.yml
index de6681a0..bbd82845 100644
--- a/test/integration/targets/undefined/tasks/main.yml
+++ b/test/integration/targets/undefined/tasks/main.yml
@@ -11,7 +11,8 @@
- assert:
that:
- - '"%r"|format(undef) == "AnsibleUndefined"'
+ - '"%r"|format(an_undefined_var) == "AnsibleUndefined"'
+ - '"%r"|format(undef()) == "AnsibleUndefined"'
# The existence of AnsibleUndefined in a templating result
# prevents safe_eval from turning the value into a python object
- names is string
diff --git a/test/integration/targets/unicode/aliases b/test/integration/targets/unicode/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/unicode/aliases
+++ b/test/integration/targets/unicode/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/unsafe_writes/aliases b/test/integration/targets/unsafe_writes/aliases
index 4fb7a116..cf954afc 100644
--- a/test/integration/targets/unsafe_writes/aliases
+++ b/test/integration/targets/unsafe_writes/aliases
@@ -1,6 +1,8 @@
+context/target
needs/root
skip/freebsd
skip/osx
skip/macos
skip/aix
shippable/posix/group3
+needs/target/setup_remote_tmp_dir
diff --git a/test/integration/targets/unsafe_writes/basic.yml b/test/integration/targets/unsafe_writes/basic.yml
index 410726ad..99a3195f 100644
--- a/test/integration/targets/unsafe_writes/basic.yml
+++ b/test/integration/targets/unsafe_writes/basic.yml
@@ -1,9 +1,23 @@
- hosts: testhost
gather_facts: false
- vars:
- testudir: '{{output_dir}}/unsafe_writes_test'
- testufile: '{{testudir}}/unreplacablefile.txt'
tasks:
+ - import_role:
+ name: ../setup_remote_tmp_dir
+ - name: define test directory
+ set_fact:
+ testudir: '{{remote_tmp_dir}}/unsafe_writes_test'
+ - name: define test file
+ set_fact:
+ testufile: '{{testudir}}/unreplacablefile.txt'
+ - name: define test environment with unsafe writes set
+ set_fact:
+ test_env:
+ ANSIBLE_UNSAFE_WRITES: "{{ lookup('env', 'ANSIBLE_UNSAFE_WRITES') }}"
+ when: lookup('env', 'ANSIBLE_UNSAFE_WRITES')
+ - name: define test environment without unsafe writes set
+ set_fact:
+ test_env: {}
+ when: not lookup('env', 'ANSIBLE_UNSAFE_WRITES')
- name: test unsafe_writes on immutable dir (file cannot be atomically replaced)
block:
- name: create target dir
@@ -61,6 +75,7 @@
msg: "Failed with envvar: {{env_enabled}}, due AUW: to {{q('env', 'ANSIBLE_UNSAFE_WRITES')}}"
that:
- env_enabled and copy_with_env is changed or not env_enabled and copy_with_env is failed
+ environment: "{{ test_env }}"
always:
- name: remove immutable flag from dir to prevent issues with cleanup
file: path={{testudir}} state=directory attributes="-i"
diff --git a/test/integration/targets/unsafe_writes/runme.sh b/test/integration/targets/unsafe_writes/runme.sh
index 791a5676..619ce025 100755
--- a/test/integration/targets/unsafe_writes/runme.sh
+++ b/test/integration/targets/unsafe_writes/runme.sh
@@ -3,10 +3,10 @@
set -eux
# test w/o fallback env var
-ansible-playbook basic.yml -i ../../inventory -e "output_dir=${OUTPUT_DIR}" "$@"
+ansible-playbook basic.yml -i ../../inventory "$@"
# test enabled fallback env var
-ANSIBLE_UNSAFE_WRITES=1 ansible-playbook basic.yml -i ../../inventory -e "output_dir=${OUTPUT_DIR}" "$@"
+ANSIBLE_UNSAFE_WRITES=1 ansible-playbook basic.yml -i ../../inventory "$@"
# test disnabled fallback env var
-ANSIBLE_UNSAFE_WRITES=0 ansible-playbook basic.yml -i ../../inventory -e "output_dir=${OUTPUT_DIR}" "$@"
+ANSIBLE_UNSAFE_WRITES=0 ansible-playbook basic.yml -i ../../inventory "$@"
diff --git a/test/integration/targets/until/aliases b/test/integration/targets/until/aliases
index 765b70da..90ea9e12 100644
--- a/test/integration/targets/until/aliases
+++ b/test/integration/targets/until/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/unvault/aliases b/test/integration/targets/unvault/aliases
index 765b70da..90ea9e12 100644
--- a/test/integration/targets/unvault/aliases
+++ b/test/integration/targets/unvault/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/uri/meta/main.yml b/test/integration/targets/uri/meta/main.yml
index 39b94950..2c2155ab 100644
--- a/test/integration/targets/uri/meta/main.yml
+++ b/test/integration/targets/uri/meta/main.yml
@@ -2,4 +2,3 @@ dependencies:
- prepare_tests
- prepare_http_tests
- setup_remote_tmp_dir
- - setup_remote_constraints
diff --git a/test/integration/targets/uri/tasks/main.yml b/test/integration/targets/uri/tasks/main.yml
index 4cefc6b3..700e7f10 100644
--- a/test/integration/targets/uri/tasks/main.yml
+++ b/test/integration/targets/uri/tasks/main.yml
@@ -228,6 +228,33 @@
headers:
Cookie: "fake=fake_value"
+- name: test unredirected_headers
+ uri:
+ url: 'https://{{ httpbin_host }}/redirect-to?status_code=301&url=/basic-auth/user/passwd'
+ user: user
+ password: passwd
+ force_basic_auth: true
+ unredirected_headers:
+ - authorization
+ ignore_errors: true
+ register: unredirected_headers
+
+- name: test omitting unredirected headers
+ uri:
+ url: 'https://{{ httpbin_host }}/redirect-to?status_code=301&url=/basic-auth/user/passwd'
+ user: user
+ password: passwd
+ force_basic_auth: true
+ register: redirected_headers
+
+- name: ensure unredirected_headers caused auth to fail
+ assert:
+ that:
+ - unredirected_headers is failed
+ - unredirected_headers.status == 401
+ - redirected_headers is successful
+ - redirected_headers.status == 200
+
- name: test PUT
uri:
url: 'https://{{ httpbin_host }}/put'
@@ -339,10 +366,25 @@
with_items: "{{ uri_os_packages[ansible_os_family].step2 | default([]) }}"
when: not ansible_python.has_sslcontext and not is_ubuntu_precise|bool
+- name: create constraints path
+ set_fact:
+ remote_constraints: "{{ remote_tmp_dir }}/constraints.txt"
+ when: not ansible_python.has_sslcontext and not is_ubuntu_precise|bool
+
+- name: create constraints file
+ copy:
+ content: |
+ cryptography == 2.1.4
+ idna == 2.5
+ pyopenssl == 17.5.0
+ six == 1.13.0
+ urllib3 == 1.23
+ dest: "{{ remote_constraints }}"
+ when: not ansible_python.has_sslcontext and not is_ubuntu_precise|bool
+
- name: install urllib3 and pyopenssl via pip
pip:
name: "{{ item }}"
- state: latest
extra_args: "-c {{ remote_constraints }}"
with_items:
- urllib3
@@ -474,6 +516,16 @@
- multipart.json.form.text_form_field1 == 'value1'
- multipart.json.form.text_form_field2 == 'value2'
+# https://github.com/ansible/ansible/issues/74276 - verifies we don't have a traceback
+- name: multipart/form-data with invalid value
+ uri:
+ url: https://{{ httpbin_host }}/post
+ method: POST
+ body_format: form-multipart
+ body:
+ integer_value: 1
+ register: multipart_invalid
+ failed_when: 'multipart_invalid.msg != "failed to parse body as form-multipart: value must be a string, or mapping, cannot be type int"'
- name: Validate invalid method
uri:
diff --git a/test/integration/targets/user/tasks/main.yml b/test/integration/targets/user/tasks/main.yml
index d3bae056..5e1d2d22 100644
--- a/test/integration/targets/user/tasks/main.yml
+++ b/test/integration/targets/user/tasks/main.yml
@@ -37,3 +37,5 @@
- import_tasks: test_password_lock.yml
- import_tasks: test_password_lock_new_user.yml
- import_tasks: test_local.yml
+- import_tasks: test_umask.yml
+ when: ansible_facts.system == 'Linux'
diff --git a/test/integration/targets/user/tasks/test_umask.yml b/test/integration/targets/user/tasks/test_umask.yml
new file mode 100644
index 00000000..9e162976
--- /dev/null
+++ b/test/integration/targets/user/tasks/test_umask.yml
@@ -0,0 +1,57 @@
+---
+- name: remove comments of /etc/login.defs
+ command: sed -e '/^[ \t]*#/d' /etc/login.defs
+ register: logindefs
+
+- block:
+ - name: Create user with 000 umask
+ user:
+ name: umaskuser_test_1
+ umask: "000"
+ register: umaskuser_test_1
+
+ - name: Create user with 077 umask
+ user:
+ name: umaskuser_test_2
+ umask: "077"
+ register: umaskuser_test_2
+
+ - name: check permissions on created home folder
+ stat:
+ path: "{{ user_home_prefix[ansible_facts.system] }}/umaskuser_test_1"
+ register: umaskuser_test_1_path
+
+ - name: check permissions on created home folder
+ stat:
+ path: "{{ user_home_prefix[ansible_facts.system] }}/umaskuser_test_2"
+ register: umaskuser_test_2_path
+
+ - name: remove created users
+ user:
+ name: "{{ item }}"
+ state: absent
+ register: umaskuser_test_remove
+ loop:
+ - umaskuser_test_1
+ - umaskuser_test_2
+
+ - name: Ensure correct umask has been set on created users
+ assert:
+ that:
+ - umaskuser_test_1_path.stat.mode == "0777"
+ - umaskuser_test_2_path.stat.mode == "0700"
+ - umaskuser_test_remove is changed
+ when: logindefs.stdout_lines is not search ("HOME_MODE")
+
+- name: Create user with setting both umask and local
+ user:
+ name: umaskuser_test_3
+ umask: "077"
+ local: true
+ register: umaskuser_test_3
+ ignore_errors: true
+
+- name: Ensure task has been failed
+ assert:
+ that:
+ - umaskuser_test_3 is failed
diff --git a/test/integration/targets/var_blending/aliases b/test/integration/targets/var_blending/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/var_blending/aliases
+++ b/test/integration/targets/var_blending/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/var_precedence/aliases b/test/integration/targets/var_precedence/aliases
index 3005e4b2..498fedd5 100644
--- a/test/integration/targets/var_precedence/aliases
+++ b/test/integration/targets/var_precedence/aliases
@@ -1 +1,2 @@
shippable/posix/group4
+context/controller
diff --git a/test/integration/targets/var_reserved/aliases b/test/integration/targets/var_reserved/aliases
index 765b70da..90ea9e12 100644
--- a/test/integration/targets/var_reserved/aliases
+++ b/test/integration/targets/var_reserved/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/var_templating/aliases b/test/integration/targets/var_templating/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/var_templating/aliases
+++ b/test/integration/targets/var_templating/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/vault/runme_change_pip_installed.sh b/test/integration/targets/vault/runme_change_pip_installed.sh
deleted file mode 100755
index 5ab2a8ec..00000000
--- a/test/integration/targets/vault/runme_change_pip_installed.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/env bash
-
-# start by removing pycrypto and cryptography
-
-pip uninstall -y cryptography
-pip uninstall -y pycrypto
-
-./runme.sh
-
-# now just pycrypto
-pip install --user pycrypto
-
-./runme.sh
-
-
-# now just cryptography
-
-pip uninstall -y pycrypto
-pip install --user cryptography
-
-./runme.sh
-
-# now both
-
-pip install --user pycrypto
-
-./runme.sh
diff --git a/test/integration/targets/want_json_modules_posix/aliases b/test/integration/targets/want_json_modules_posix/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/want_json_modules_posix/aliases
+++ b/test/integration/targets/want_json_modules_posix/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/yaml_parsing/aliases b/test/integration/targets/yaml_parsing/aliases
index b5983214..8278ec8b 100644
--- a/test/integration/targets/yaml_parsing/aliases
+++ b/test/integration/targets/yaml_parsing/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/yum/filter_plugins/filter_list_of_tuples_by_first_param.py b/test/integration/targets/yum/filter_plugins/filter_list_of_tuples_by_first_param.py
new file mode 100644
index 00000000..27f38ce5
--- /dev/null
+++ b/test/integration/targets/yum/filter_plugins/filter_list_of_tuples_by_first_param.py
@@ -0,0 +1,25 @@
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.errors import AnsibleError, AnsibleFilterError
+
+
+def filter_list_of_tuples_by_first_param(lst, search, startswith=False):
+ out = []
+ for element in lst:
+ if startswith:
+ if element[0].startswith(search):
+ out.append(element)
+ else:
+ if search in element[0]:
+ out.append(element)
+ return out
+
+
+class FilterModule(object):
+ ''' filter '''
+
+ def filters(self):
+ return {
+ 'filter_list_of_tuples_by_first_param': filter_list_of_tuples_by_first_param,
+ }
diff --git a/test/integration/targets/yum/tasks/cacheonly.yml b/test/integration/targets/yum/tasks/cacheonly.yml
new file mode 100644
index 00000000..03cbd0e9
--- /dev/null
+++ b/test/integration/targets/yum/tasks/cacheonly.yml
@@ -0,0 +1,16 @@
+---
+- name: Test cacheonly (clean before testing)
+ command: yum clean all
+
+- name: Try installing from cache where it has been cleaned
+ yum:
+ name: sos
+ state: latest
+ cacheonly: true
+ register: yum_result
+ ignore_errors: true
+
+- name: Verify yum failure
+ assert:
+ that:
+ - "yum_result is failed"
diff --git a/test/integration/targets/yum/tasks/main.yml b/test/integration/targets/yum/tasks/main.yml
index 3a7f4cf5..157124a9 100644
--- a/test/integration/targets/yum/tasks/main.yml
+++ b/test/integration/targets/yum/tasks/main.yml
@@ -10,7 +10,7 @@
name:
- sos
state: absent
-
+
- import_tasks: yum.yml
always:
- name: remove installed packages
@@ -69,3 +69,14 @@
- import_tasks: lock.yml
when:
- ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux']
+
+- import_tasks: multiarch.yml
+ when:
+ - ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux']
+ - ansible_architecture == 'x86_64'
+ # Our output parsing expects us to be on yum, not dnf
+ - ansible_distribution_major_version is version('7', '<=')
+
+- import_tasks: cacheonly.yml
+ when:
+ - ansible_distribution in ['RedHat', 'CentOS', 'ScientificLinux', 'Fedora']
diff --git a/test/integration/targets/yum/tasks/multiarch.yml b/test/integration/targets/yum/tasks/multiarch.yml
new file mode 100644
index 00000000..bced634c
--- /dev/null
+++ b/test/integration/targets/yum/tasks/multiarch.yml
@@ -0,0 +1,154 @@
+- block:
+ - name: Set up test yum repo
+ yum_repository:
+ name: multiarch-test-repo
+ description: ansible-test multiarch test repo
+ baseurl: "{{ multiarch_repo_baseurl }}"
+ gpgcheck: no
+ repo_gpgcheck: no
+
+ - name: Install two out of date packages from the repo
+ yum:
+ name:
+ - multiarch-a-1.0
+ - multiarch-b-1.0
+ register: outdated
+
+ - name: See what we installed
+ command: rpm -q multiarch-a multiarch-b
+ register: rpm_q
+
+ # Here we assume we're running on x86_64 (and limit to this in main.yml)
+ # (avoid comparing ansible_architecture because we only have test RPMs
+ # for i686 and x86_64 and ansible_architecture could be other things.)
+ - name: Assert that we got the right architecture
+ assert:
+ that:
+ - outdated is changed
+ - outdated.changes.installed | length == 2
+ - rpm_q.stdout_lines | length == 2
+ - rpm_q.stdout_lines[0].endswith('x86_64')
+ - rpm_q.stdout_lines[1].endswith('x86_64')
+
+ - name: Install the same versions, but i686 instead
+ yum:
+ name:
+ - multiarch-a-1.0*.i686
+ - multiarch-b-1.0*.i686
+ register: outdated_i686
+
+ - name: See what we installed
+ command: rpm -q multiarch-a multiarch-b
+ register: rpm_q
+
+ - name: Assert that all four are installed
+ assert:
+ that:
+ - outdated_i686 is changed
+ - outdated.changes.installed | length == 2
+ - rpm_q.stdout_lines | length == 4
+
+ - name: Update them all to 2.0
+ yum:
+ name: multiarch-*
+ state: latest
+ update_only: true
+ register: yum_latest
+
+ - name: Assert that all were updated and shown in results
+ assert:
+ that:
+ - yum_latest is changed
+ # This is just testing UI stability. The behavior is arguably not
+ # correct, because multiple packages are being updated. But the
+ # "because of (at least)..." wording kinda locks us in to only
+ # showing one update in this case. :(
+ - yum_latest.changes.updated | length == 1
+
+ - name: Downgrade them so we can upgrade them a different way
+ yum:
+ name:
+ - multiarch-a-1.0*
+ - multiarch-b-1.0*
+ allow_downgrade: true
+ register: downgrade
+
+ - name: See what we installed
+ command: rpm -q multiarch-a multiarch-b --queryformat '%{name}-%{version}.%{arch}\n'
+ register: rpm_q
+
+ - name: Ensure downgrade worked
+ assert:
+ that:
+ - downgrade is changed
+ - rpm_q.stdout_lines | sort == ['multiarch-a-1.0.i686', 'multiarch-a-1.0.x86_64', 'multiarch-b-1.0.i686', 'multiarch-b-1.0.x86_64']
+
+ # This triggers a different branch of logic that the partial wildcard
+ # above, but we're limited to check_mode here since it's '*'.
+ - name: Upgrade with full wildcard
+ yum:
+ name: '*'
+ state: latest
+ update_only: true
+ update_cache: true
+ check_mode: true
+ register: full_wildcard
+
+ # https://github.com/ansible/ansible/issues/73284
+ - name: Ensure we report things correctly (both arches)
+ assert:
+ that:
+ - full_wildcard is changed
+ - full_wildcard.changes.updated | filter_list_of_tuples_by_first_param('multiarch', startswith=True) | length == 4
+
+ - name: Downgrade them so we can upgrade them a different way
+ yum:
+ name:
+ - multiarch-a-1.0*
+ - multiarch-b-1.0*
+ allow_downgrade: true
+ register: downgrade
+
+ - name: Try to install again via virtual provides, should be unchanged
+ yum:
+ name:
+ - virtual-provides-multiarch-a
+ - virtual-provides-multiarch-b
+ state: present
+ register: install_vp
+
+ - name: Ensure the above did not change
+ assert:
+ that:
+ - install_vp is not changed
+
+ - name: Try to upgrade via virtual provides
+ yum:
+ name:
+ - virtual-provides-multiarch-a
+ - virtual-provides-multiarch-b
+ state: latest
+ update_only: true
+ register: upgrade_vp
+
+ - name: Ensure we report things correctly (both arches)
+ assert:
+ that:
+ - upgrade_vp is changed
+ # This is just testing UI stability, like above.
+ # We'll only have one package in "updated" per spec, even though
+ # (in this case) two are getting updated per spec.
+ - upgrade_vp.changes.updated | length == 2
+
+ always:
+ - name: Remove test yum repo
+ yum_repository:
+ name: multiarch-test-repo
+ state: absent
+
+ - name: Remove all test packages installed
+ yum:
+ name:
+ - multiarch-*
+ - virtual-provides-multiarch-*
+ state: absent
diff --git a/test/integration/targets/yum/tasks/repo.yml b/test/integration/targets/yum/tasks/repo.yml
index c1a7a016..f312b1ca 100644
--- a/test/integration/targets/yum/tasks/repo.yml
+++ b/test/integration/targets/yum/tasks/repo.yml
@@ -703,3 +703,27 @@
yum:
name: dinginessentail,dinginessentail-olive,landsidescalping
state: absent
+
+- block:
+ - yum:
+ name: dinginessentail
+ state: present
+
+ - yum:
+ list: dinginessentail*
+ register: list_out
+
+ - set_fact:
+ passed: true
+ loop: "{{ list_out.results }}"
+ when: item.yumstate == 'installed'
+
+ - name: Test that there is yumstate=installed in the result
+ assert:
+ that:
+ - passed is defined
+ always:
+ - name: Clean up
+ yum:
+ name: dinginessentail
+ state: absent
diff --git a/test/integration/targets/yum/tasks/yum.yml b/test/integration/targets/yum/tasks/yum.yml
index 7abfea17..e1caa852 100644
--- a/test/integration/targets/yum/tasks/yum.yml
+++ b/test/integration/targets/yum/tasks/yum.yml
@@ -867,3 +867,18 @@
- test-package-that-provides-toaster
- toaster
state: absent
+
+- yum:
+ list: "{{ package1 }}"
+ register: list_out
+
+- name: check that both yum and dnf return envra
+ assert:
+ that:
+ - '"envra" in list_out["results"][0]'
+
+- name: check that dnf returns nevra for backwards compat
+ assert:
+ that:
+ - '"nevra" in list_out["results"][0]'
+ when: ansible_pkg_mgr == 'dnf'
diff --git a/test/integration/targets/yum/vars/main.yml b/test/integration/targets/yum/vars/main.yml
new file mode 100644
index 00000000..2be15132
--- /dev/null
+++ b/test/integration/targets/yum/vars/main.yml
@@ -0,0 +1 @@
+multiarch_repo_baseurl: https://ansible-ci-files.s3.amazonaws.com/test/integration/targets/yum/multiarch-test-repo/RPMS/
diff --git a/test/integration/targets/yum_repository/tasks/main.yml b/test/integration/targets/yum_repository/tasks/main.yml
index d8195775..90866523 100644
--- a/test/integration/targets/yum_repository/tasks/main.yml
+++ b/test/integration/targets/yum_repository/tasks/main.yml
@@ -106,6 +106,11 @@
module_hotfixes: no
register: test_repo_add1
+ - name: Get repo file contents
+ slurp:
+ path: "{{ '/etc/yum.repos.d/' ~ yum_repository_test_repo.name ~ '2.repo' }}"
+ register: slurp
+
- name: check that options are correctly getting written to the repo file
assert:
that:
@@ -116,8 +121,7 @@
- "'keepalive = 0' in repo_file_contents"
- "'module_hotfixes = 0' in repo_file_contents"
vars:
- repo_file: "{{ '/etc/yum.repos.d/' ~ yum_repository_test_repo.name ~ '2.repo' }}"
- repo_file_contents: "{{ lookup('file', repo_file) }}"
+ repo_file_contents: "{{ slurp.content | b64decode }}"
- name: check new config doesn't change (Idempotant)
yum_repository:
@@ -163,7 +167,7 @@
description: Testing list feature
baseurl:
- "{{ yum_repository_test_repo.baseurl }}"
- - "{{ yum_repository_test_repo.baseurl | replace('download[0-9]?\\.', 'download2\\.', 1) }}"
+ - "{{ yum_repository_test_repo.baseurl ~ 'another_baseurl' }}"
gpgkey:
- gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL-{{ ansible_facts.distribution_major_version }}
- gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG2-KEY-EPEL-{{ ansible_facts.distribution_major_version }}
@@ -175,16 +179,20 @@
- ddd
notify: remove listtest repo
+ - name: Get repo file
+ slurp:
+ path: /etc/yum.repos.d/listtest.repo
+ register: slurp
+
- name: Assert that lists were properly inserted
assert:
that:
- - url_hostname in repofile
- - url_hostname2 in repofile
+ - yum_repository_test_repo.baseurl in repofile
+ - another_baseurl in repofile
- "'RPM-GPG-KEY-EPEL' in repofile"
- "'RPM-GPG2-KEY-EPEL' in repofile"
- "'aaa bbb' in repofile"
- "'ccc ddd' in repofile"
vars:
- repofile: "{{ lookup('file', '/etc/yum.repos.d/listtest.repo') }}"
- url_hostname: "{{ yum_repository_test_repo.baseurl | urlsplit('hostname') }}"
- url_hostname2: "{{ url_hostname | replace('download[0-9]?\\.', 'download2\\.', 1) }}"
+ repofile: "{{ slurp.content | b64decode }}"
+ another_baseurl: "{{ yum_repository_test_repo.baseurl ~ 'another_baseurl' }}"
diff --git a/test/lib/ansible_test/_data/cli/ansible_test_cli_stub.py b/test/lib/ansible_test/_data/cli/ansible_test_cli_stub.py
deleted file mode 100755
index d12b6334..00000000
--- a/test/lib/ansible_test/_data/cli/ansible_test_cli_stub.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env python
-# PYTHON_ARGCOMPLETE_OK
-"""Command line entry point for ansible-test."""
-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-import sys
-
-
-def main():
- """Main program entry point."""
- ansible_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
- source_root = os.path.join(ansible_root, 'test', 'lib')
-
- if os.path.exists(os.path.join(source_root, 'ansible_test', '_internal', 'cli.py')):
- # running from source, use that version of ansible-test instead of any version that may already be installed
- sys.path.insert(0, source_root)
-
- # noinspection PyProtectedMember
- from ansible_test._internal.cli import main as cli_main
-
- cli_main()
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/lib/ansible_test/_data/completion/docker.txt b/test/lib/ansible_test/_data/completion/docker.txt
index 2fba1a5d..cbb4c362 100644
--- a/test/lib/ansible_test/_data/completion/docker.txt
+++ b/test/lib/ansible_test/_data/completion/docker.txt
@@ -1,12 +1,13 @@
-default name=quay.io/ansible/default-test-container:3.2.2 python=3.6,2.6,2.7,3.5,3.7,3.8,3.9 seccomp=unconfined context=collection
-default name=quay.io/ansible/ansible-core-test-container:3.2.0 python=3.6,2.6,2.7,3.5,3.7,3.8,3.9 seccomp=unconfined context=ansible-core
-alpine3 name=quay.io/ansible/alpine3-test-container:2.0.2 python=3.8
-centos6 name=quay.io/ansible/centos6-test-container:2.0.2 python=2.6 seccomp=unconfined
-centos7 name=quay.io/ansible/centos7-test-container:2.0.2 python=2.7 seccomp=unconfined
-centos8 name=quay.io/ansible/centos8-test-container:2.0.2 python=3.6 seccomp=unconfined
-fedora32 name=quay.io/ansible/fedora32-test-container:2.0.2 python=3.8
-fedora33 name=quay.io/ansible/fedora33-test-container:2.0.2 python=3.9
-opensuse15py2 name=quay.io/ansible/opensuse15py2-test-container:2.0.2 python=2.7
-opensuse15 name=quay.io/ansible/opensuse15-test-container:2.0.2 python=3.6
-ubuntu1804 name=quay.io/ansible/ubuntu1804-test-container:2.0.2 python=3.6 seccomp=unconfined
-ubuntu2004 name=quay.io/ansible/ubuntu2004-test-container:2.0.2 python=3.8 seccomp=unconfined
+base image=quay.io/ansible/base-test-container:1.1.0 python=3.9,2.6,2.7,3.5,3.6,3.7,3.8,3.10 seccomp=unconfined
+default image=quay.io/ansible/default-test-container:4.1.0 python=3.9,2.6,2.7,3.5,3.6,3.7,3.8,3.10 seccomp=unconfined context=collection
+default image=quay.io/ansible/ansible-core-test-container:4.1.0 python=3.9,2.6,2.7,3.5,3.6,3.7,3.8,3.10 seccomp=unconfined context=ansible-core
+alpine3 image=quay.io/ansible/alpine3-test-container:3.1.0 python=3.9
+centos6 image=quay.io/ansible/centos6-test-container:3.1.0 python=2.6 seccomp=unconfined
+centos7 image=quay.io/ansible/centos7-test-container:3.1.0 python=2.7 seccomp=unconfined
+centos8 image=quay.io/ansible/centos8-test-container:3.1.0 python=3.6 seccomp=unconfined
+fedora33 image=quay.io/ansible/fedora33-test-container:3.1.0 python=3.9
+fedora34 image=quay.io/ansible/fedora34-test-container:3.1.0 python=3.9 seccomp=unconfined
+opensuse15py2 image=quay.io/ansible/opensuse15py2-test-container:3.1.0 python=2.7
+opensuse15 image=quay.io/ansible/opensuse15-test-container:3.1.0 python=3.6
+ubuntu1804 image=quay.io/ansible/ubuntu1804-test-container:3.1.0 python=3.6 seccomp=unconfined
+ubuntu2004 image=quay.io/ansible/ubuntu2004-test-container:3.1.0 python=3.8 seccomp=unconfined
diff --git a/test/lib/ansible_test/_data/completion/network.txt b/test/lib/ansible_test/_data/completion/network.txt
index dca911f8..8c6243e9 100644
--- a/test/lib/ansible_test/_data/completion/network.txt
+++ b/test/lib/ansible_test/_data/completion/network.txt
@@ -1,2 +1,2 @@
-ios/csr1000v collection=cisco.ios connection=ansible.netcommon.network_cli
-vyos/1.1.8 collection=vyos.vyos connection=ansible.netcommon.network_cli
+ios/csr1000v collection=cisco.ios connection=ansible.netcommon.network_cli provider=aws
+vyos/1.1.8 collection=vyos.vyos connection=ansible.netcommon.network_cli provider=aws
diff --git a/test/lib/ansible_test/_data/completion/remote.txt b/test/lib/ansible_test/_data/completion/remote.txt
index a15b0afa..97c9a677 100644
--- a/test/lib/ansible_test/_data/completion/remote.txt
+++ b/test/lib/ansible_test/_data/completion/remote.txt
@@ -1,6 +1,10 @@
-freebsd/11.4 python=2.7,3.7,3.8 python_dir=/usr/local/bin
-freebsd/12.2 python=3.7,2.7,3.8 python_dir=/usr/local/bin
-macos/11.1 python=3.9 python_dir=/usr/local/bin
-rhel/7.9 python=2.7
-rhel/8.3 python=3.6,3.8
-aix/7.2 python=2.7 httptester=disabled temp-unicode=disabled pip-check=disabled
+freebsd/12.2 python=3.7,2.7,3.8 python_dir=/usr/local/bin provider=aws
+freebsd/13.0 python=3.7,2.7,3.8,3.9 python_dir=/usr/local/bin provider=aws
+freebsd python_dir=/usr/local/bin provider=aws
+macos/11.1 python=3.9 python_dir=/usr/local/bin provider=parallels
+macos python_dir=/usr/local/bin provider=parallels
+rhel/7.9 python=2.7 provider=aws
+rhel/8.4 python=3.6,3.8 provider=aws
+rhel provider=aws
+aix/7.2 python=2.7,3.7 python_dir=/opt/freeware/bin provider=ibmps
+aix python_dir=/opt/freeware/bin provider=ibmps
diff --git a/test/lib/ansible_test/_data/completion/windows.txt b/test/lib/ansible_test/_data/completion/windows.txt
index 5704fb94..94868f06 100644
--- a/test/lib/ansible_test/_data/completion/windows.txt
+++ b/test/lib/ansible_test/_data/completion/windows.txt
@@ -1,4 +1,5 @@
-2012
-2012-R2
-2016
-2019 \ No newline at end of file
+windows/2012 provider=aws
+windows/2012-R2 provider=aws
+windows/2016 provider=aws
+windows/2019 provider=aws
+windows/2022 provider=aws
diff --git a/test/lib/ansible_test/_data/cryptography-constraints.txt b/test/lib/ansible_test/_data/cryptography-constraints.txt
deleted file mode 100644
index 8e3e99b4..00000000
--- a/test/lib/ansible_test/_data/cryptography-constraints.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-# do not add a cryptography constraint here, see the get_cryptography_requirement function in executor.py for details
-idna < 2.8 ; python_version < '2.7' # idna 2.8+ requires python 2.7+
-cffi != 1.14.4 # Fails on systems with older gcc. Should be fixed in the next release. https://foss.heptapod.net/pypy/cffi/-/issues/480
diff --git a/test/lib/ansible_test/_data/injector/ansible b/test/lib/ansible_test/_data/injector/ansible
deleted file mode 120000
index 6bbbfe4d..00000000
--- a/test/lib/ansible_test/_data/injector/ansible
+++ /dev/null
@@ -1 +0,0 @@
-python.py \ No newline at end of file
diff --git a/test/lib/ansible_test/_data/injector/ansible-config b/test/lib/ansible_test/_data/injector/ansible-config
deleted file mode 120000
index 6bbbfe4d..00000000
--- a/test/lib/ansible_test/_data/injector/ansible-config
+++ /dev/null
@@ -1 +0,0 @@
-python.py \ No newline at end of file
diff --git a/test/lib/ansible_test/_data/injector/ansible-connection b/test/lib/ansible_test/_data/injector/ansible-connection
deleted file mode 120000
index 6bbbfe4d..00000000
--- a/test/lib/ansible_test/_data/injector/ansible-connection
+++ /dev/null
@@ -1 +0,0 @@
-python.py \ No newline at end of file
diff --git a/test/lib/ansible_test/_data/injector/ansible-console b/test/lib/ansible_test/_data/injector/ansible-console
deleted file mode 120000
index 6bbbfe4d..00000000
--- a/test/lib/ansible_test/_data/injector/ansible-console
+++ /dev/null
@@ -1 +0,0 @@
-python.py \ No newline at end of file
diff --git a/test/lib/ansible_test/_data/injector/ansible-doc b/test/lib/ansible_test/_data/injector/ansible-doc
deleted file mode 120000
index 6bbbfe4d..00000000
--- a/test/lib/ansible_test/_data/injector/ansible-doc
+++ /dev/null
@@ -1 +0,0 @@
-python.py \ No newline at end of file
diff --git a/test/lib/ansible_test/_data/injector/ansible-galaxy b/test/lib/ansible_test/_data/injector/ansible-galaxy
deleted file mode 120000
index 6bbbfe4d..00000000
--- a/test/lib/ansible_test/_data/injector/ansible-galaxy
+++ /dev/null
@@ -1 +0,0 @@
-python.py \ No newline at end of file
diff --git a/test/lib/ansible_test/_data/injector/ansible-inventory b/test/lib/ansible_test/_data/injector/ansible-inventory
deleted file mode 120000
index 6bbbfe4d..00000000
--- a/test/lib/ansible_test/_data/injector/ansible-inventory
+++ /dev/null
@@ -1 +0,0 @@
-python.py \ No newline at end of file
diff --git a/test/lib/ansible_test/_data/injector/ansible-playbook b/test/lib/ansible_test/_data/injector/ansible-playbook
deleted file mode 120000
index 6bbbfe4d..00000000
--- a/test/lib/ansible_test/_data/injector/ansible-playbook
+++ /dev/null
@@ -1 +0,0 @@
-python.py \ No newline at end of file
diff --git a/test/lib/ansible_test/_data/injector/ansible-pull b/test/lib/ansible_test/_data/injector/ansible-pull
deleted file mode 120000
index 6bbbfe4d..00000000
--- a/test/lib/ansible_test/_data/injector/ansible-pull
+++ /dev/null
@@ -1 +0,0 @@
-python.py \ No newline at end of file
diff --git a/test/lib/ansible_test/_data/injector/ansible-test b/test/lib/ansible_test/_data/injector/ansible-test
deleted file mode 120000
index 6bbbfe4d..00000000
--- a/test/lib/ansible_test/_data/injector/ansible-test
+++ /dev/null
@@ -1 +0,0 @@
-python.py \ No newline at end of file
diff --git a/test/lib/ansible_test/_data/injector/ansible-vault b/test/lib/ansible_test/_data/injector/ansible-vault
deleted file mode 120000
index 6bbbfe4d..00000000
--- a/test/lib/ansible_test/_data/injector/ansible-vault
+++ /dev/null
@@ -1 +0,0 @@
-python.py \ No newline at end of file
diff --git a/test/lib/ansible_test/_data/injector/importer.py b/test/lib/ansible_test/_data/injector/importer.py
deleted file mode 120000
index 6bbbfe4d..00000000
--- a/test/lib/ansible_test/_data/injector/importer.py
+++ /dev/null
@@ -1 +0,0 @@
-python.py \ No newline at end of file
diff --git a/test/lib/ansible_test/_data/injector/pytest b/test/lib/ansible_test/_data/injector/pytest
deleted file mode 120000
index 6bbbfe4d..00000000
--- a/test/lib/ansible_test/_data/injector/pytest
+++ /dev/null
@@ -1 +0,0 @@
-python.py \ No newline at end of file
diff --git a/test/lib/ansible_test/_data/injector/virtualenv-isolated.sh b/test/lib/ansible_test/_data/injector/virtualenv-isolated.sh
deleted file mode 100644
index 316d094b..00000000
--- a/test/lib/ansible_test/_data/injector/virtualenv-isolated.sh
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/env bash
-# Create and activate a fresh virtual environment with `source virtualenv-isolated.sh`.
-
-rm -rf "${OUTPUT_DIR}/venv"
-
-# Try to use 'venv' if it is available, then fallback to 'virtualenv' since some systems provide 'venv' although it is non-functional.
-if [[ "${ANSIBLE_TEST_PYTHON_VERSION}" =~ ^2\. ]] || ! "${ANSIBLE_TEST_PYTHON_INTERPRETER}" -m venv "${OUTPUT_DIR}/venv" > /dev/null 2>&1; then
- rm -rf "${OUTPUT_DIR}/venv"
- "${ANSIBLE_TEST_PYTHON_INTERPRETER}" -m virtualenv --python "${ANSIBLE_TEST_PYTHON_INTERPRETER}" "${OUTPUT_DIR}/venv"
-fi
-
-set +ux
-source "${OUTPUT_DIR}/venv/bin/activate"
-set -ux
-
-if [[ "${ANSIBLE_TEST_COVERAGE}" ]]; then
- pip install coverage -c ../../../runner/requirements/constraints.txt --disable-pip-version-check
-fi
diff --git a/test/lib/ansible_test/_data/inventory b/test/lib/ansible_test/_data/inventory
deleted file mode 100644
index 1b77a7ea..00000000
--- a/test/lib/ansible_test/_data/inventory
+++ /dev/null
@@ -1,6 +0,0 @@
-# Do not put test specific entries in this inventory file.
-# For script based test targets (using runme.sh) put the inventory file in the test's directory instead.
-
-[testgroup]
-# ansible_python_interpreter must be set to avoid interpreter discovery
-testhost ansible_connection=local ansible_python_interpreter="{{ ansible_playbook_python }}"
diff --git a/test/lib/ansible_test/_data/playbooks/posix_coverage_setup.yml b/test/lib/ansible_test/_data/playbooks/posix_coverage_setup.yml
new file mode 100644
index 00000000..6ed86827
--- /dev/null
+++ b/test/lib/ansible_test/_data/playbooks/posix_coverage_setup.yml
@@ -0,0 +1,21 @@
+- name: Setup POSIX code coverage configuration
+ hosts: all
+ gather_facts: no
+ tasks:
+ - name: Create coverage temporary directory
+ file:
+ path: "{{ common_temp_dir }}"
+ mode: "{{ mode_directory }}"
+ state: directory
+
+ - name: Create coverage configuration file
+ copy:
+ dest: "{{ coverage_config_path }}"
+ content: "{{ coverage_config }}"
+ mode: "{{ mode_file }}"
+
+ - name: Create coverage output directory
+ file:
+ path: "{{ coverage_output_path }}"
+ mode: "{{ mode_directory_write }}"
+ state: directory
diff --git a/test/lib/ansible_test/_data/playbooks/posix_coverage_teardown.yml b/test/lib/ansible_test/_data/playbooks/posix_coverage_teardown.yml
new file mode 100644
index 00000000..290411b6
--- /dev/null
+++ b/test/lib/ansible_test/_data/playbooks/posix_coverage_teardown.yml
@@ -0,0 +1,8 @@
+- name: Teardown POSIX code coverage configuration
+ hosts: all
+ gather_facts: no
+ tasks:
+ - name: Remove coverage temporary directory
+ file:
+ path: "{{ common_temp_dir }}"
+ state: absent
diff --git a/test/lib/ansible_test/_data/playbooks/posix_hosts_prepare.yml b/test/lib/ansible_test/_data/playbooks/posix_hosts_prepare.yml
new file mode 100644
index 00000000..69a07131
--- /dev/null
+++ b/test/lib/ansible_test/_data/playbooks/posix_hosts_prepare.yml
@@ -0,0 +1,9 @@
+- name: Prepare POSIX hosts file
+ hosts: all
+ gather_facts: no
+ tasks:
+ - name: Add container hostname(s) to hosts file
+ blockinfile:
+ path: /etc/hosts
+ block: "{{ '\n'.join(hosts_entries) }}"
+ unsafe_writes: yes
diff --git a/test/lib/ansible_test/_data/playbooks/posix_hosts_restore.yml b/test/lib/ansible_test/_data/playbooks/posix_hosts_restore.yml
new file mode 100644
index 00000000..1549ed6b
--- /dev/null
+++ b/test/lib/ansible_test/_data/playbooks/posix_hosts_restore.yml
@@ -0,0 +1,10 @@
+- name: Restore POSIX hosts file
+ hosts: all
+ gather_facts: no
+ tasks:
+ - name: Remove container hostname(s) from hosts file
+ blockinfile:
+ path: /etc/hosts
+ block: "{{ '\n'.join(hosts_entries) }}"
+ unsafe_writes: yes
+ state: absent
diff --git a/test/lib/ansible_test/_data/playbooks/pypi_proxy_prepare.yml b/test/lib/ansible_test/_data/playbooks/pypi_proxy_prepare.yml
new file mode 100644
index 00000000..0f9948c7
--- /dev/null
+++ b/test/lib/ansible_test/_data/playbooks/pypi_proxy_prepare.yml
@@ -0,0 +1,23 @@
+- name: Prepare PyPI proxy configuration
+ hosts: all
+ gather_facts: no
+ tasks:
+ - name: Make sure the ~/.pip directory exists
+ file:
+ path: ~/.pip
+ state: directory
+ - name: Configure a custom index for pip based installs
+ copy:
+ content: |
+ [global]
+ index-url = {{ pypi_endpoint }}
+ trusted-host = {{ pypi_hostname }}
+ dest: ~/.pip/pip.conf
+ force: "{{ force }}"
+ - name: Configure a custom index for easy_install based installs
+ copy:
+ content: |
+ [easy_install]
+ index_url = {0}
+ dest: ~/.pydistutils.cfg
+ force: "{{ force }}"
diff --git a/test/lib/ansible_test/_data/playbooks/pypi_proxy_restore.yml b/test/lib/ansible_test/_data/playbooks/pypi_proxy_restore.yml
new file mode 100644
index 00000000..5410fb26
--- /dev/null
+++ b/test/lib/ansible_test/_data/playbooks/pypi_proxy_restore.yml
@@ -0,0 +1,12 @@
+- name: Restore PyPI proxy configuration
+ hosts: all
+ gather_facts: no
+ tasks:
+ - name: Remove custom index for pip based installs
+ file:
+ path: ~/.pip/pip.conf
+ state: absent
+ - name: Remove custom index for easy_install based installs
+ file:
+ path: ~/.pydistutils.cfg
+ state: absent
diff --git a/test/lib/ansible_test/_data/playbooks/windows_coverage_setup.yml b/test/lib/ansible_test/_data/playbooks/windows_coverage_setup.yml
index 2e5ff9c6..db7976e4 100644
--- a/test/lib/ansible_test/_data/playbooks/windows_coverage_setup.yml
+++ b/test/lib/ansible_test/_data/playbooks/windows_coverage_setup.yml
@@ -1,14 +1,13 @@
----
-- name: setup global coverage directory for Windows test targets
- hosts: windows
+- name: Setup Windows code coverage configuration
+ hosts: all
gather_facts: no
tasks:
- - name: create temp directory
+ - name: Create coverage temporary directory
ansible.windows.win_file:
path: '{{ remote_temp_path }}'
state: directory
- - name: allow everyone to write to coverage test dir
+ - name: Allow everyone to write to the temporary coverage directory
ansible.windows.win_acl:
path: '{{ remote_temp_path }}'
user: Everyone
@@ -16,4 +15,4 @@
inherit: ContainerInherit, ObjectInherit
propagation: 'None'
type: allow
- state: present \ No newline at end of file
+ state: present
diff --git a/test/lib/ansible_test/_data/playbooks/windows_coverage_teardown.yml b/test/lib/ansible_test/_data/playbooks/windows_coverage_teardown.yml
index ab34dc27..f1fa4332 100644
--- a/test/lib/ansible_test/_data/playbooks/windows_coverage_teardown.yml
+++ b/test/lib/ansible_test/_data/playbooks/windows_coverage_teardown.yml
@@ -1,15 +1,8 @@
----
-- name: collect the coverage files from the Windows host
- hosts: windows
+- name: Teardown Windows code coverage configuration
+ hosts: all
gather_facts: no
tasks:
- - name: make sure all vars have been set
- assert:
- that:
- - local_temp_path is defined
- - remote_temp_path is defined
-
- - name: zip up all coverage files in the
+ - name: Zip up all coverage files
ansible.windows.win_shell: |
$coverage_dir = '{{ remote_temp_path }}'
$zip_file = Join-Path -Path $coverage_dir -ChildPath 'coverage.zip'
@@ -65,13 +58,13 @@
}
}
- - name: fetch coverage zip file to localhost
+ - name: Fetch coverage zip
fetch:
src: '{{ remote_temp_path }}\coverage.zip'
- dest: '{{ local_temp_path }}/coverage-{{ inventory_hostname }}.zip'
+ dest: '{{ local_temp_path }}/{{ inventory_hostname }}.zip'
flat: yes
- - name: remove the temporary coverage directory
+ - name: Remove temporary coverage directory
ansible.windows.win_file:
path: '{{ remote_temp_path }}'
- state: absent \ No newline at end of file
+ state: absent
diff --git a/test/lib/ansible_test/_data/playbooks/windows_hosts_prepare.ps1 b/test/lib/ansible_test/_data/playbooks/windows_hosts_prepare.ps1
new file mode 100644
index 00000000..012af83b
--- /dev/null
+++ b/test/lib/ansible_test/_data/playbooks/windows_hosts_prepare.ps1
@@ -0,0 +1,34 @@
+<#
+.SYNOPSIS
+Add one or more hosts entries to the Windows hosts file.
+
+.PARAMETER Hosts
+A list of hosts entries, delimited by '|'.
+#>
+
+[CmdletBinding()]
+param(
+ [Parameter(Mandatory=$true, Position=0)][String]$Hosts
+)
+
+$ProgressPreference = "SilentlyContinue"
+$ErrorActionPreference = "Stop"
+
+Write-Verbose -Message "Adding host file entries"
+
+$hosts_entries = $Hosts.Split('|')
+$hosts_file = "$env:SystemRoot\System32\drivers\etc\hosts"
+$hosts_file_lines = [System.IO.File]::ReadAllLines($hosts_file)
+$changed = $false
+
+foreach ($entry in $hosts_entries) {
+ if ($entry -notin $hosts_file_lines) {
+ $hosts_file_lines += $entry
+ $changed = $true
+ }
+}
+
+if ($changed) {
+ Write-Verbose -Message "Host file is missing entries, adding missing entries"
+ [System.IO.File]::WriteAllLines($hosts_file, $hosts_file_lines)
+}
diff --git a/test/lib/ansible_test/_data/playbooks/windows_hosts_prepare.yml b/test/lib/ansible_test/_data/playbooks/windows_hosts_prepare.yml
new file mode 100644
index 00000000..0a23086b
--- /dev/null
+++ b/test/lib/ansible_test/_data/playbooks/windows_hosts_prepare.yml
@@ -0,0 +1,7 @@
+- name: Prepare Windows hosts file
+ hosts: all
+ gather_facts: no
+ tasks:
+ - name: Add container hostname(s) to hosts file
+ script:
+ cmd: "\"{{ playbook_dir }}/windows_hosts_prepare.ps1\" -Hosts \"{{ '|'.join(hosts_entries) }}\""
diff --git a/test/lib/ansible_test/_data/playbooks/windows_hosts_restore.ps1 b/test/lib/ansible_test/_data/playbooks/windows_hosts_restore.ps1
new file mode 100644
index 00000000..fdfb9616
--- /dev/null
+++ b/test/lib/ansible_test/_data/playbooks/windows_hosts_restore.ps1
@@ -0,0 +1,37 @@
+<#
+.SYNOPSIS
+Remove one or more hosts entries from the Windows hosts file.
+
+.PARAMETER Hosts
+A list of hosts entries, delimited by '|'.
+#>
+
+[CmdletBinding()]
+param(
+ [Parameter(Mandatory=$true, Position=0)][String]$Hosts
+)
+
+$ProgressPreference = "SilentlyContinue"
+$ErrorActionPreference = "Stop"
+
+Write-Verbose -Message "Removing host file entries"
+
+$hosts_entries = $Hosts.Split('|')
+$hosts_file = "$env:SystemRoot\System32\drivers\etc\hosts"
+$hosts_file_lines = [System.IO.File]::ReadAllLines($hosts_file)
+$changed = $false
+
+$new_lines = [System.Collections.ArrayList]@()
+
+foreach ($host_line in $hosts_file_lines) {
+ if ($host_line -in $hosts_entries) {
+ $changed = $true
+ } else {
+ $new_lines += $host_line
+ }
+}
+
+if ($changed) {
+ Write-Verbose -Message "Host file has extra entries, removing extra entries"
+ [System.IO.File]::WriteAllLines($hosts_file, $new_lines)
+}
diff --git a/test/lib/ansible_test/_data/playbooks/windows_hosts_restore.yml b/test/lib/ansible_test/_data/playbooks/windows_hosts_restore.yml
new file mode 100644
index 00000000..c595d5fb
--- /dev/null
+++ b/test/lib/ansible_test/_data/playbooks/windows_hosts_restore.yml
@@ -0,0 +1,7 @@
+- name: Restore Windows hosts file
+ hosts: all
+ gather_facts: no
+ tasks:
+ - name: Remove container hostname(s) from hosts file
+ script:
+ cmd: "\"{{ playbook_dir }}/windows_hosts_restore.ps1\" -Hosts \"{{ '|'.join(hosts_entries) }}\""
diff --git a/test/lib/ansible_test/_data/requirements/ansible-test.txt b/test/lib/ansible_test/_data/requirements/ansible-test.txt
deleted file mode 100644
index 7b596e1b..00000000
--- a/test/lib/ansible_test/_data/requirements/ansible-test.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-argparse ; python_version < '2.7'
-
-# pip 7.1 added support for constraints, which are required by ansible-test to install most python requirements
-# see https://github.com/pypa/pip/blame/e648e00dc0226ade30ade99591b245b0c98e86c9/NEWS.rst#L1258
-pip >= 7.1, < 10 ; python_version < '2.7' # pip 10+ drops support for python 2.6 (sanity_ok)
-pip >= 7.1 ; python_version >= '2.7' # sanity_ok
diff --git a/test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt b/test/lib/ansible_test/_data/requirements/ansible.txt
index 40cf83a6..40cf83a6 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt
+++ b/test/lib/ansible_test/_data/requirements/ansible.txt
diff --git a/test/lib/ansible_test/_data/requirements/constraints.txt b/test/lib/ansible_test/_data/requirements/constraints.txt
index a8ae3a93..6b5b064a 100644
--- a/test/lib/ansible_test/_data/requirements/constraints.txt
+++ b/test/lib/ansible_test/_data/requirements/constraints.txt
@@ -1,50 +1,30 @@
+# do not add a cryptography or pyopenssl constraint to this file, they require special handling, see get_cryptography_requirements in python_requirements.py
+# do not add a coverage constraint to this file, it is handled internally by ansible-test
packaging < 21.0 ; python_version < '3.6' # packaging 21.0 requires Python 3.6 or newer
-resolvelib >= 0.5.3, < 0.6.0 # keep in sync with `requirements.txt`
-coverage >= 4.5.1, < 5.0.0 ; python_version < '3.7' # coverage 4.4 required for "disable_warnings" support but 4.5.1 needed for bug fixes, coverage 5.0+ incompatible
-coverage >= 4.5.2, < 5.0.0 ; python_version == '3.7' # coverage 4.5.2 fixes bugs in support for python 3.7, coverage 5.0+ incompatible
-coverage >= 4.5.4, < 5.0.0 ; python_version > '3.7' # coverage had a bug in < 4.5.4 that would cause unit tests to hang in Python 3.8, coverage 5.0+ incompatible
-decorator < 5.0.0 ; python_version < '3.5' # decorator 5.0.5 and later require python 3.5 or later
six < 1.14.0 ; python_version < '2.7' # six 1.14.0 drops support for python 2.6
-cryptography < 2.2 ; python_version < '2.7' # cryptography 2.2 drops support for python 2.6
-# do not add a cryptography constraint here unless it is for python version incompatibility, see the get_cryptography_requirement function in executor.py for details
-deepdiff < 4.0.0 ; python_version < '3' # deepdiff 4.0.0 and later require python 3
jinja2 < 2.11 ; python_version < '2.7' # jinja2 2.11 and later require python 2.7 or later
urllib3 < 1.24 ; python_version < '2.7' # urllib3 1.24 and later require python 2.7 or later
pywinrm >= 0.3.0 # message encryption support
-sphinx < 1.6 ; python_version < '2.7' # sphinx 1.6 and later require python 2.7 or later
-sphinx <= 2.1.2 ; python_version >= '2.7' # docs team hasn't tested beyond 2.1.2 yet
-rstcheck >=3.3.1 # required for sphinx version >= 1.8
-pygments >= 2.4.0 # Pygments 2.4.0 includes bugfixes for YAML and YAML+Jinja lexers
wheel < 0.30.0 ; python_version < '2.7' # wheel 0.30.0 and later require python 2.7 or later
-pycrypto >= 2.6 # Need features found in 2.6 and greater
-ncclient >= 0.5.2 # Need features added in 0.5.2 and greater
idna < 2.6, >= 2.5 # linode requires idna < 2.9, >= 2.5, requests requires idna < 2.6, but cryptography will cause the latest version to be installed instead
paramiko < 2.4.0 ; python_version < '2.7' # paramiko 2.4.0 drops support for python 2.6
-pytest < 3.3.0 ; python_version < '2.7' # pytest 3.3.0 drops support for python 2.6
-pytest < 5.0.0 ; python_version == '2.7' # pytest 5.0.0 and later will no longer support python 2.7
+pytest < 3.3.0, >= 3.1.0 ; python_version < '2.7' # pytest 3.3.0 drops support for python 2.6
+pytest < 5.0.0, >= 4.5.0 ; python_version == '2.7' # pytest 5.0.0 and later will no longer support python 2.7
+pytest >= 4.5.0 ; python_version > '2.7' # pytest 4.5.0 added support for --strict-markers
pytest-forked < 1.0.2 ; python_version < '2.7' # pytest-forked 1.0.2 and later require python 2.7 or later
pytest-forked >= 1.0.2 ; python_version >= '2.7' # pytest-forked before 1.0.2 does not work with pytest 4.2.0+ (which requires python 2.7+)
ntlm-auth >= 1.3.0 # message encryption support using cryptography
requests < 2.20.0 ; python_version < '2.7' # requests 2.20.0 drops support for python 2.6
requests-ntlm >= 1.1.0 # message encryption support
requests-credssp >= 0.1.0 # message encryption support
-openshift >= 0.6.2, < 0.9.0 # merge_type support
virtualenv < 16.0.0 ; python_version < '2.7' # virtualenv 16.0.0 and later require python 2.7 or later
-pathspec < 0.6.0 ; python_version < '2.7' # pathspec 0.6.0 and later require python 2.7 or later
-pyopenssl < 18.0.0 ; python_version < '2.7' # pyOpenSSL 18.0.0 and later require python 2.7 or later
pyparsing < 3.0.0 ; python_version < '3.5' # pyparsing 3 and later require python 3.5 or later
pyyaml < 5.1 ; python_version < '2.7' # pyyaml 5.1 and later require python 2.7 or later
pycparser < 2.19 ; python_version < '2.7' # pycparser 2.19 and later require python 2.7 or later
mock >= 2.0.0 # needed for features backported from Python 3.6 unittest.mock (assert_called, assert_called_once...)
pytest-mock >= 1.4.0 # needed for mock_use_standalone_module pytest option
xmltodict < 0.12.0 ; python_version < '2.7' # xmltodict 0.12.0 and later require python 2.7 or later
-lxml < 4.3.0 ; python_version < '2.7' # lxml 4.3.0 and later require python 2.7 or later
-pyvmomi < 6.0.0 ; python_version < '2.7' # pyvmomi 6.0.0 and later require python 2.7 or later
-pyone == 1.1.9 # newer versions do not pass current integration tests
-boto3 < 1.11 ; python_version < '2.7' # boto3 1.11 drops Python 2.6 support
-botocore >= 1.10.0, < 1.14 ; python_version < '2.7' # adds support for the following AWS services: secretsmanager, fms, and acm-pca; botocore 1.14 drops Python 2.6 support
-botocore >= 1.10.0 ; python_version >= '2.7' # adds support for the following AWS services: secretsmanager, fms, and acm-pca
setuptools < 37 ; python_version == '2.6' # setuptools 37 and later require python 2.7 or later
setuptools < 45 ; python_version == '2.7' # setuptools 45 and later require python 3.5 or later
-gssapi < 1.6.0 ; python_version <= '2.7' # gssapi 1.6.0 and later require python 3 or later
+pyspnego >= 0.1.6 ; python_version >= '3.10' # bug in older releases breaks on Python 3.10
MarkupSafe < 2.0.0 ; python_version < '3.6' # MarkupSafe >= 2.0.0. requires Python >= 3.6
diff --git a/test/lib/ansible_test/_data/requirements/coverage.txt b/test/lib/ansible_test/_data/requirements/coverage.txt
deleted file mode 100644
index 4ebc8aea..00000000
--- a/test/lib/ansible_test/_data/requirements/coverage.txt
+++ /dev/null
@@ -1 +0,0 @@
-coverage
diff --git a/test/lib/ansible_test/_data/requirements/integration.cloud.aws.txt b/test/lib/ansible_test/_data/requirements/integration.cloud.aws.txt
deleted file mode 100644
index aa2f71cc..00000000
--- a/test/lib/ansible_test/_data/requirements/integration.cloud.aws.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-boto
-boto3
-botocore
diff --git a/test/lib/ansible_test/_data/requirements/integration.cloud.azure.txt b/test/lib/ansible_test/_data/requirements/integration.cloud.azure.txt
deleted file mode 100644
index 6df1a4e8..00000000
--- a/test/lib/ansible_test/_data/requirements/integration.cloud.azure.txt
+++ /dev/null
@@ -1,39 +0,0 @@
-packaging
-requests[security]
-xmltodict
-azure-cli-core==2.0.35
-azure-cli-nspkg==3.0.2
-azure-common==1.1.11
-azure-mgmt-authorization==0.51.1
-azure-mgmt-batch==5.0.1
-azure-mgmt-cdn==3.0.0
-azure-mgmt-compute==10.0.0
-azure-mgmt-containerinstance==1.4.0
-azure-mgmt-containerregistry==2.0.0
-azure-mgmt-containerservice==4.4.0
-azure-mgmt-dns==2.1.0
-azure-mgmt-keyvault==1.1.0
-azure-mgmt-marketplaceordering==0.1.0
-azure-mgmt-monitor==0.5.2
-azure-mgmt-network==4.0.0
-azure-mgmt-nspkg==2.0.0
-azure-mgmt-redis==5.0.0
-azure-mgmt-resource==2.1.0
-azure-mgmt-rdbms==1.4.1
-azure-mgmt-servicebus==0.5.3
-azure-mgmt-sql==0.10.0
-azure-mgmt-storage==3.1.0
-azure-mgmt-trafficmanager==0.50.0
-azure-mgmt-web==0.41.0
-azure-nspkg==2.0.0
-azure-storage==0.35.1
-msrest==0.6.10
-msrestazure==0.6.2
-azure-keyvault==1.0.0a1
-azure-graphrbac==0.40.0
-azure-mgmt-cosmosdb==0.5.2
-azure-mgmt-hdinsight==0.1.0
-azure-mgmt-devtestlabs==3.0.0
-azure-mgmt-loganalytics==0.2.0
-azure-mgmt-automation==0.1.1
-azure-mgmt-iothub==0.7.0
diff --git a/test/lib/ansible_test/_data/requirements/integration.cloud.cs.txt b/test/lib/ansible_test/_data/requirements/integration.cloud.cs.txt
deleted file mode 100644
index f0a89b91..00000000
--- a/test/lib/ansible_test/_data/requirements/integration.cloud.cs.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-cs
-sshpubkeys
diff --git a/test/lib/ansible_test/_data/requirements/integration.cloud.hcloud.txt b/test/lib/ansible_test/_data/requirements/integration.cloud.hcloud.txt
deleted file mode 100644
index a6580e69..00000000
--- a/test/lib/ansible_test/_data/requirements/integration.cloud.hcloud.txt
+++ /dev/null
@@ -1 +0,0 @@
-hcloud>=1.6.0 ; python_version >= '2.7' and python_version < '3.9' # Python 2.6 is not supported (sanity_ok); Only hcloud >= 1.6.0 supports Floating IPs with names; Python 3.9 and later are not supported
diff --git a/test/lib/ansible_test/_data/requirements/integration.cloud.nios.txt b/test/lib/ansible_test/_data/requirements/integration.cloud.nios.txt
deleted file mode 100644
index be611454..00000000
--- a/test/lib/ansible_test/_data/requirements/integration.cloud.nios.txt
+++ /dev/null
@@ -1 +0,0 @@
-infoblox-client
diff --git a/test/lib/ansible_test/_data/requirements/integration.cloud.opennebula.txt b/test/lib/ansible_test/_data/requirements/integration.cloud.opennebula.txt
deleted file mode 100644
index acd34668..00000000
--- a/test/lib/ansible_test/_data/requirements/integration.cloud.opennebula.txt
+++ /dev/null
@@ -1 +0,0 @@
-pyone \ No newline at end of file
diff --git a/test/lib/ansible_test/_data/requirements/integration.cloud.openshift.txt b/test/lib/ansible_test/_data/requirements/integration.cloud.openshift.txt
deleted file mode 100644
index 269bf090..00000000
--- a/test/lib/ansible_test/_data/requirements/integration.cloud.openshift.txt
+++ /dev/null
@@ -1 +0,0 @@
-openshift
diff --git a/test/lib/ansible_test/_data/requirements/integration.cloud.vcenter.txt b/test/lib/ansible_test/_data/requirements/integration.cloud.vcenter.txt
deleted file mode 100644
index fd8f1398..00000000
--- a/test/lib/ansible_test/_data/requirements/integration.cloud.vcenter.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-pyvmomi
-git+https://github.com/vmware/vsphere-automation-sdk-python.git ; python_version >= '2.7' # Python 2.6 is not supported
diff --git a/test/lib/ansible_test/_data/requirements/integration.txt b/test/lib/ansible_test/_data/requirements/integration.txt
deleted file mode 100644
index 71d78dbc..00000000
--- a/test/lib/ansible_test/_data/requirements/integration.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-cryptography
-jinja2
-junit-xml
-ordereddict ; python_version < '2.7'
-packaging
-pyyaml
-resolvelib
diff --git a/test/lib/ansible_test/_data/requirements/network-integration.txt b/test/lib/ansible_test/_data/requirements/network-integration.txt
deleted file mode 100644
index 726d2943..00000000
--- a/test/lib/ansible_test/_data/requirements/network-integration.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-cryptography
-jinja2
-junit-xml
-ordereddict ; python_version < '2.7' # ansible-test junit callback plugin requirement
-packaging
-paramiko
-pyyaml
diff --git a/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt b/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt
index c910f106..660620dc 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt
@@ -1,3 +1,7 @@
-jinja2 # ansible-core requirement
-pyyaml # ansible-core requirement
-packaging # ansible-doc requirement
+jinja2 == 3.0.1 # ansible-core requirement
+pyyaml == 5.4.1 # ansible-core requirement
+packaging == 21.0 # ansible-doc requirement
+
+# dependencies
+MarkupSafe == 2.0.1
+pyparsing == 2.4.7
diff --git a/test/lib/ansible_test/_data/requirements/sanity.changelog.txt b/test/lib/ansible_test/_data/requirements/sanity.changelog.txt
index a346a8d9..cb9f02f8 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.changelog.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.changelog.txt
@@ -1 +1,9 @@
antsibull-changelog == 0.9.0
+
+# dependencies
+pyyaml == 5.4.1
+docutils == 0.17.1
+packaging == 21.0
+pyparsing == 2.4.7
+rstcheck == 3.3.1
+semantic-version == 2.8.5
diff --git a/test/lib/ansible_test/_data/requirements/sanity.import.txt b/test/lib/ansible_test/_data/requirements/sanity.import.txt
deleted file mode 100644
index 17e375ce..00000000
--- a/test/lib/ansible_test/_data/requirements/sanity.import.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-pyyaml # required for the collection loader to parse yaml for plugin routing
-virtualenv ; python_version <= '2.7' # virtualenv required on Python 2.x, but on Python 3.x we can use the built-in venv instead
diff --git a/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt b/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt
index 7044777a..cc530e42 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt
@@ -1 +1 @@
-pyyaml # not frozen due to usage outside sanity tests
+pyyaml == 5.4.1
diff --git a/test/lib/ansible_test/_data/requirements/sanity.ps1 b/test/lib/ansible_test/_data/requirements/sanity.pslint.ps1
index 1ea1f8e5..79ee8152 100755..100644
--- a/test/lib/ansible_test/_data/requirements/sanity.ps1
+++ b/test/lib/ansible_test/_data/requirements/sanity.pslint.ps1
@@ -1,4 +1,3 @@
-#!/usr/bin/env pwsh
param (
[Switch]
$IsContainer
diff --git a/test/lib/ansible_test/_data/requirements/sanity.pylint.txt b/test/lib/ansible_test/_data/requirements/sanity.pylint.txt
index ecdc2197..7332d162 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.pylint.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.pylint.txt
@@ -1,12 +1,10 @@
-pylint == 2.6.0
-pyyaml # needed for collection_detail.py
+pylint == 2.9.3
+pyyaml == 5.4.1 # needed for collection_detail.py
# dependencies
-astroid == 2.4.2
-isort == 5.7.0
-lazy-object-proxy == 1.4.3
+astroid == 2.6.6
+isort == 5.9.3
+lazy-object-proxy == 1.6.0
mccabe == 0.6.1
-six # not frozen due to usage outside sanity tests
toml == 0.10.2
-typed-ast == 1.4.2
wrapt == 1.12.1
diff --git a/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt b/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt
index 1eaef006..1281a045 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt
@@ -1,2 +1,2 @@
-pyyaml # not frozen due to usage outside sanity tests
+pyyaml == 5.4.1
voluptuous == 0.12.1
diff --git a/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt b/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt
index 8288b14b..4b1d5f05 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt
@@ -1,3 +1,6 @@
-jinja2 # ansible-core requirement
-pyyaml # needed for collection_detail.py
+jinja2 == 3.0.1 # ansible-core requirement
+pyyaml == 5.4.1 # needed for collection_detail.py
voluptuous == 0.12.1
+
+# dependencies
+MarkupSafe == 2.0.1
diff --git a/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt b/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt
index e0eac4e7..67384863 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt
@@ -1,5 +1,5 @@
yamllint == 1.26.0
# dependencies
-pathspec # not frozen since it should not impact test results
-pyyaml # not frozen due to usage outside sanity tests
+pathspec == 0.9.0
+pyyaml == 5.4.1
diff --git a/test/lib/ansible_test/_data/requirements/units.txt b/test/lib/ansible_test/_data/requirements/units.txt
index b237a5c3..d723a65f 100644
--- a/test/lib/ansible_test/_data/requirements/units.txt
+++ b/test/lib/ansible_test/_data/requirements/units.txt
@@ -1,8 +1,5 @@
-cryptography
-jinja2
mock
pytest
pytest-mock
pytest-xdist
-pyyaml
-resolvelib
+pyyaml # required by the collection loader (only needed for collections)
diff --git a/test/lib/ansible_test/_data/requirements/windows-integration.txt b/test/lib/ansible_test/_data/requirements/windows-integration.txt
index 86de35ee..b3554dea 100644
--- a/test/lib/ansible_test/_data/requirements/windows-integration.txt
+++ b/test/lib/ansible_test/_data/requirements/windows-integration.txt
@@ -1,11 +1,5 @@
-cryptography
-jinja2
-junit-xml
ntlm-auth
-ordereddict ; python_version < '2.7' # ansible-test junit callback plugin requirement
requests-ntlm
requests-credssp
-packaging
pypsrp
pywinrm[credssp]
-pyyaml
diff --git a/test/lib/ansible_test/_data/sanity/compile/compile.py b/test/lib/ansible_test/_data/sanity/compile/compile.py
deleted file mode 100755
index 61910eee..00000000
--- a/test/lib/ansible_test/_data/sanity/compile/compile.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-"""Python syntax checker with lint friendly output."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import warnings
-
-with warnings.catch_warnings():
- # The parser module is deprecated as of Python 3.9.
- # This implementation will need to be updated to use another solution.
- # Until then, disable the deprecation warnings to prevent test failures.
- warnings.simplefilter('ignore', DeprecationWarning)
- import parser
-
-import sys
-
-
-def main():
- status = 0
-
- for path in sys.argv[1:] or sys.stdin.read().splitlines():
- with open(path, 'rb') as source_fd:
- if sys.version_info[0] == 3:
- source = source_fd.read().decode('utf-8')
- else:
- source = source_fd.read()
-
- try:
- parser.suite(source)
- except SyntaxError:
- ex = sys.exc_info()[1]
- status = 1
- message = ex.text.splitlines()[0].strip()
- sys.stdout.write("%s:%d:%d: SyntaxError: %s\n" % (path, ex.lineno, ex.offset, message))
- sys.stdout.flush()
-
- sys.exit(status)
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/lib/ansible_test/_data/setup/docker.sh b/test/lib/ansible_test/_data/setup/docker.sh
deleted file mode 100644
index ea60e1a6..00000000
--- a/test/lib/ansible_test/_data/setup/docker.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/sh
-
-set -eu
-
-# Required for newer mysql-server packages to install/upgrade on Ubuntu 16.04.
-rm -f /usr/sbin/policy-rc.d
-
-# Improve prompts on remote host for interactive use.
-# `cat << EOF > ~/.bashrc` flakes sometimes since /tmp may not be ready yet in
-# the container. So don't do that
-echo "alias ls='ls --color=auto'" > ~/.bashrc
-echo "export PS1='\[\e]0;\u@\h: \w\a\]\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ '" >> ~/.bashrc
-echo "cd ~/ansible/" >> ~/.bashrc
diff --git a/test/lib/ansible_test/_data/setup/remote.sh b/test/lib/ansible_test/_data/setup/remote.sh
deleted file mode 100644
index 9348ac6f..00000000
--- a/test/lib/ansible_test/_data/setup/remote.sh
+++ /dev/null
@@ -1,185 +0,0 @@
-#!/bin/sh
-
-set -eu
-
-platform=#{platform}
-platform_version=#{platform_version}
-python_version=#{python_version}
-
-python_interpreter="python${python_version}"
-
-cd ~/
-
-install_pip () {
- if ! "${python_interpreter}" -m pip.__main__ --version --disable-pip-version-check 2>/dev/null; then
- case "${python_version}" in
- *)
- pip_bootstrap_url="https://ansible-ci-files.s3.amazonaws.com/ansible-test/get-pip-20.3.4.py"
- ;;
- esac
- curl --silent --show-error "${pip_bootstrap_url}" -o /tmp/get-pip.py
- "${python_interpreter}" /tmp/get-pip.py --disable-pip-version-check --quiet
- rm /tmp/get-pip.py
- fi
-}
-
-if [ "${platform}" = "freebsd" ]; then
- py_version="$(echo "${python_version}" | tr -d '.')"
-
- if [ "${py_version}" = "27" ]; then
- # on Python 2.7 our only option is to use virtualenv
- virtualenv_pkg="py27-virtualenv"
- else
- # on Python 3.x we'll use the built-in venv instead
- virtualenv_pkg=""
- fi
-
- # Declare platform/python version combinations which do not have supporting OS packages available.
- # For these combinations ansible-test will use pip to install the requirements instead.
- case "${platform_version}/${python_version}" in
- "11.4/3.8")
- have_os_packages=""
- ;;
- "12.2/3.8")
- have_os_packages=""
- ;;
- *)
- have_os_packages="yes"
- ;;
- esac
-
- # PyYAML is never installed with an OS package since it does not include libyaml support.
- # Instead, ansible-test will always install it using pip.
- if [ "${have_os_packages}" ]; then
- jinja2_pkg="py${py_version}-Jinja2"
- cryptography_pkg="py${py_version}-cryptography"
- else
- jinja2_pkg=""
- cryptography_pkg=""
- fi
-
- while true; do
- # shellcheck disable=SC2086
- env ASSUME_ALWAYS_YES=YES pkg bootstrap && \
- pkg install -q -y \
- bash \
- curl \
- gtar \
- libyaml \
- "python${py_version}" \
- ${jinja2_pkg} \
- ${cryptography_pkg} \
- ${virtualenv_pkg} \
- sudo \
- && break
- echo "Failed to install packages. Sleeping before trying again..."
- sleep 10
- done
-
- install_pip
-
- if ! grep '^PermitRootLogin yes$' /etc/ssh/sshd_config > /dev/null; then
- sed -i '' 's/^# *PermitRootLogin.*$/PermitRootLogin yes/;' /etc/ssh/sshd_config
- service sshd restart
- fi
-elif [ "${platform}" = "rhel" ]; then
- if grep '8\.' /etc/redhat-release; then
- py_version="$(echo "${python_version}" | tr -d '.')"
-
- if [ "${py_version}" = "36" ]; then
- py_pkg_prefix="python3"
- else
- py_pkg_prefix="python${py_version}"
- fi
-
- while true; do
- yum module install -q -y "python${py_version}" && \
- yum install -q -y \
- gcc \
- "${py_pkg_prefix}-devel" \
- "${py_pkg_prefix}-jinja2" \
- "${py_pkg_prefix}-cryptography" \
- iptables \
- && break
- echo "Failed to install packages. Sleeping before trying again..."
- sleep 10
- done
- else
- while true; do
- yum install -q -y \
- gcc \
- python-devel \
- python-virtualenv \
- python2-cryptography \
- && break
- echo "Failed to install packages. Sleeping before trying again..."
- sleep 10
- done
-
- install_pip
- fi
-
- # pin packaging and pyparsing to match the downstream vendored versions
- "${python_interpreter}" -m pip install packaging==20.4 pyparsing==2.4.7 --disable-pip-version-check
-elif [ "${platform}" = "centos" ]; then
- while true; do
- yum install -q -y \
- gcc \
- python-devel \
- python-virtualenv \
- python2-cryptography \
- libffi-devel \
- openssl-devel \
- && break
- echo "Failed to install packages. Sleeping before trying again..."
- sleep 10
- done
-
- install_pip
-elif [ "${platform}" = "osx" ]; then
- while true; do
- pip install --disable-pip-version-check --quiet \
- 'virtualenv==16.7.10' \
- && break
- echo "Failed to install packages. Sleeping before trying again..."
- sleep 10
- done
-elif [ "${platform}" = "aix" ]; then
- chfs -a size=1G /
- chfs -a size=4G /usr
- chfs -a size=1G /var
- chfs -a size=1G /tmp
- chfs -a size=2G /opt
- while true; do
- yum install -q -y \
- gcc \
- libffi-devel \
- python-jinja2 \
- python-cryptography \
- python-pip && \
- pip install --disable-pip-version-check --quiet \
- 'virtualenv==16.7.10' \
- && break
- echo "Failed to install packages. Sleeping before trying again..."
- sleep 10
- done
-fi
-
-# Improve prompts on remote host for interactive use.
-# shellcheck disable=SC1117
-cat << EOF > ~/.bashrc
-if ls --color > /dev/null 2>&1; then
- alias ls='ls --color'
-elif ls -G > /dev/null 2>&1; then
- alias ls='ls -G'
-fi
-export PS1='\[\e]0;\u@\h: \w\a\]\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ '
-EOF
-
-# Make sure ~/ansible/ is the starting directory for interactive shells.
-if [ "${platform}" = "osx" ]; then
- echo "cd ~/ansible/" >> ~/.bashrc
-elif [ "${platform}" = "macos" ] ; then
- echo "export BASH_SILENCE_DEPRECATION_WARNING=1" >> ~/.bashrc
- echo "cd ~/ansible/" >> ~/.bashrc
-fi
diff --git a/test/lib/ansible_test/_data/setup/ssh-keys.sh b/test/lib/ansible_test/_data/setup/ssh-keys.sh
deleted file mode 100644
index 7846f3fe..00000000
--- a/test/lib/ansible_test/_data/setup/ssh-keys.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/bin/sh
-# Configure SSH keys.
-
-ssh_public_key=#{ssh_public_key}
-ssh_private_key=#{ssh_private_key}
-ssh_key_type=#{ssh_key_type}
-
-ssh_path="${HOME}/.ssh"
-private_key_path="${ssh_path}/id_${ssh_key_type}"
-
-if [ ! -f "${private_key_path}" ]; then
- # write public/private ssh key pair
- public_key_path="${private_key_path}.pub"
-
- # shellcheck disable=SC2174
- mkdir -m 0700 -p "${ssh_path}"
- touch "${public_key_path}" "${private_key_path}"
- chmod 0600 "${public_key_path}" "${private_key_path}"
- echo "${ssh_public_key}" > "${public_key_path}"
- echo "${ssh_private_key}" > "${private_key_path}"
-
- # add public key to authorized_keys
- authoried_keys_path="${HOME}/.ssh/authorized_keys"
-
- # the existing file is overwritten to avoid conflicts (ex: RHEL on EC2 blocks root login)
- cat "${public_key_path}" > "${authoried_keys_path}"
- chmod 0600 "${authoried_keys_path}"
-
- # add localhost's server keys to known_hosts
- known_hosts_path="${HOME}/.ssh/known_hosts"
-
- for key in /etc/ssh/ssh_host_*_key.pub; do
- echo "localhost $(cat "${key}")" >> "${known_hosts_path}"
- done
-fi
diff --git a/test/lib/ansible_test/_data/setup/windows-httptester.ps1 b/test/lib/ansible_test/_data/setup/windows-httptester.ps1
deleted file mode 100644
index 46b2f129..00000000
--- a/test/lib/ansible_test/_data/setup/windows-httptester.ps1
+++ /dev/null
@@ -1,229 +0,0 @@
-<#
-.SYNOPSIS
-Designed to set a Windows host to connect to the httptester container running
-on the Ansible host. This will setup the Windows host file and forward the
-local ports to use this connection. This will continue to run in the background
-until the script is deleted.
-
-Run this with SSH with the -R arguments to forward ports 8080, 8443 and 8444 to the
-httptester container.
-
-.PARAMETER Hosts
-A list of hostnames, delimited by '|', to add to the Windows hosts file for the
-httptester container, e.g. 'ansible.host.com|secondary.host.test'.
-#>
-[CmdletBinding()]
-param(
- [Parameter(Mandatory=$true, Position=0)][String]$Hosts
-)
-$Hosts = $Hosts.Split('|')
-
-$ProgressPreference = "SilentlyContinue"
-$ErrorActionPreference = "Stop"
-$os_version = [Version](Get-Item -Path "$env:SystemRoot\System32\kernel32.dll").VersionInfo.ProductVersion
-Write-Verbose -Message "Configuring HTTP Tester on Windows $os_version for '$($Hosts -join "', '")'"
-
-Function Get-PmapperRuleBytes {
- <#
- .SYNOPSIS
- Create the byte values that configures a rule in the PMapper configuration
- file. This isn't really documented but because PMapper is only used for
- Server 2008 R2 we will stick to 1 version and just live with the legacy
- work for now.
-
- .PARAMETER ListenPort
- The port to listen on localhost, this will be forwarded to the host defined
- by ConnectAddress and ConnectPort.
-
- .PARAMETER ConnectAddress
- The hostname or IP to map the traffic to.
-
- .PARAMETER ConnectPort
- This port of ConnectAddress to map the traffic to.
- #>
- param(
- [Parameter(Mandatory=$true)][UInt16]$ListenPort,
- [Parameter(Mandatory=$true)][String]$ConnectAddress,
- [Parameter(Mandatory=$true)][Int]$ConnectPort
- )
-
- $connect_field = "$($ConnectAddress):$ConnectPort"
- $connect_bytes = [System.Text.Encoding]::ASCII.GetBytes($connect_field)
- $data_length = [byte]($connect_bytes.Length + 6) # size of payload minus header, length, and footer
- $port_bytes = [System.BitConverter]::GetBytes($ListenPort)
-
- $payload = [System.Collections.Generic.List`1[Byte]]@()
- $payload.Add([byte]16) > $null # header is \x10, means Configure Mapping rule
- $payload.Add($data_length) > $null
- $payload.AddRange($connect_bytes)
- $payload.AddRange($port_bytes)
- $payload.AddRange([byte[]]@(0, 0)) # 2 extra bytes of padding
- $payload.Add([byte]0) > $null # 0 is TCP, 1 is UDP
- $payload.Add([byte]0) > $null # 0 is Any, 1 is Internet
- $payload.Add([byte]31) > $null # footer is \x1f, means end of Configure Mapping rule
-
- return ,$payload.ToArray()
-}
-
-Write-Verbose -Message "Adding host file entries"
-$hosts_file = "$env:SystemRoot\System32\drivers\etc\hosts"
-$hosts_file_lines = [System.IO.File]::ReadAllLines($hosts_file)
-$changed = $false
-foreach ($httptester_host in $Hosts) {
- $host_line = "127.0.0.1 $httptester_host # ansible-test httptester"
- if ($host_line -notin $hosts_file_lines) {
- $hosts_file_lines += $host_line
- $changed = $true
- }
-}
-if ($changed) {
- Write-Verbose -Message "Host file is missing entries, adding missing entries"
- [System.IO.File]::WriteAllLines($hosts_file, $hosts_file_lines)
-}
-
-# forward ports
-$forwarded_ports = @{
- 80 = 8080
- 443 = 8443
- 444 = 8444
-}
-if ($os_version -ge [Version]"6.2") {
- Write-Verbose -Message "Using netsh to configure forwarded ports"
- foreach ($forwarded_port in $forwarded_ports.GetEnumerator()) {
- $port_set = netsh interface portproxy show v4tov4 | `
- Where-Object { $_ -match "127.0.0.1\s*$($forwarded_port.Key)\s*127.0.0.1\s*$($forwarded_port.Value)" }
-
- if (-not $port_set) {
- Write-Verbose -Message "Adding netsh portproxy rule for $($forwarded_port.Key) -> $($forwarded_port.Value)"
- $add_args = @(
- "interface",
- "portproxy",
- "add",
- "v4tov4",
- "listenaddress=127.0.0.1",
- "listenport=$($forwarded_port.Key)",
- "connectaddress=127.0.0.1",
- "connectport=$($forwarded_port.Value)"
- )
- $null = netsh $add_args 2>&1
- }
- }
-} else {
- Write-Verbose -Message "Using Port Mapper to configure forwarded ports"
- # netsh interface portproxy doesn't work on local addresses in older
- # versions of Windows. Use custom application Port Mapper to acheive the
- # same outcome
- # http://www.analogx.com/contents/download/Network/pmapper/Freeware.htm
- $s3_url = "https://ansible-ci-files.s3.amazonaws.com/ansible-test/pmapper-1.04.exe"
-
- # download the Port Mapper executable to a temporary directory
- $pmapper_folder = Join-Path -Path ([System.IO.Path]::GetTempPath()) -ChildPath ([System.IO.Path]::GetRandomFileName())
- $pmapper_exe = Join-Path -Path $pmapper_folder -ChildPath pmapper.exe
- $pmapper_config = Join-Path -Path $pmapper_folder -ChildPath pmapper.dat
- New-Item -Path $pmapper_folder -ItemType Directory > $null
-
- $stop = $false
- do {
- try {
- Write-Verbose -Message "Attempting download of '$s3_url'"
- (New-Object -TypeName System.Net.WebClient).DownloadFile($s3_url, $pmapper_exe)
- $stop = $true
- } catch { Start-Sleep -Second 5 }
- } until ($stop)
-
- # create the Port Mapper rule file that contains our forwarded ports
- $fs = [System.IO.File]::Create($pmapper_config)
- try {
- foreach ($forwarded_port in $forwarded_ports.GetEnumerator()) {
- Write-Verbose -Message "Creating forwarded port rule for $($forwarded_port.Key) -> $($forwarded_port.Value)"
- $pmapper_rule = Get-PmapperRuleBytes -ListenPort $forwarded_port.Key -ConnectAddress 127.0.0.1 -ConnectPort $forwarded_port.Value
- $fs.Write($pmapper_rule, 0, $pmapper_rule.Length)
- }
- } finally {
- $fs.Close()
- }
-
- Write-Verbose -Message "Starting Port Mapper '$pmapper_exe' in the background"
- $start_args = @{
- CommandLine = $pmapper_exe
- CurrentDirectory = $pmapper_folder
- }
- $res = Invoke-CimMethod -ClassName Win32_Process -MethodName Create -Arguments $start_args
- if ($res.ReturnValue -ne 0) {
- $error_msg = switch($res.ReturnValue) {
- 2 { "Access denied" }
- 3 { "Insufficient privilege" }
- 8 { "Unknown failure" }
- 9 { "Path not found" }
- 21 { "Invalid parameter" }
- default { "Undefined Error: $($res.ReturnValue)" }
- }
- Write-Error -Message "Failed to start pmapper: $error_msg"
- }
- $pmapper_pid = $res.ProcessId
- Write-Verbose -Message "Port Mapper PID: $pmapper_pid"
-}
-
-Write-Verbose -Message "Wait for current script at '$PSCommandPath' to be deleted before running cleanup"
-$fsw = New-Object -TypeName System.IO.FileSystemWatcher
-$fsw.Path = Split-Path -Path $PSCommandPath -Parent
-$fsw.Filter = Split-Path -Path $PSCommandPath -Leaf
-$fsw.WaitForChanged([System.IO.WatcherChangeTypes]::Deleted, 3600000) > $null
-Write-Verbose -Message "Script delete or timeout reached, cleaning up Windows httptester artifacts"
-
-Write-Verbose -Message "Cleanup host file entries"
-$hosts_file_lines = [System.IO.File]::ReadAllLines($hosts_file)
-$new_lines = [System.Collections.ArrayList]@()
-$changed = $false
-foreach ($host_line in $hosts_file_lines) {
- if ($host_line.EndsWith("# ansible-test httptester")) {
- $changed = $true
- continue
- }
- $new_lines.Add($host_line) > $null
-}
-if ($changed) {
- Write-Verbose -Message "Host file has extra entries, removing extra entries"
- [System.IO.File]::WriteAllLines($hosts_file, $new_lines)
-}
-
-if ($os_version -ge [Version]"6.2") {
- Write-Verbose -Message "Cleanup of forwarded port configured in netsh"
- foreach ($forwarded_port in $forwarded_ports.GetEnumerator()) {
- $port_set = netsh interface portproxy show v4tov4 | `
- Where-Object { $_ -match "127.0.0.1\s*$($forwarded_port.Key)\s*127.0.0.1\s*$($forwarded_port.Value)" }
-
- if ($port_set) {
- Write-Verbose -Message "Removing netsh portproxy rule for $($forwarded_port.Key) -> $($forwarded_port.Value)"
- $delete_args = @(
- "interface",
- "portproxy",
- "delete",
- "v4tov4",
- "listenaddress=127.0.0.1",
- "listenport=$($forwarded_port.Key)"
- )
- $null = netsh $delete_args 2>&1
- }
- }
-} else {
- Write-Verbose -Message "Stopping Port Mapper executable based on pid $pmapper_pid"
- Stop-Process -Id $pmapper_pid -Force
-
- # the process may not stop straight away, try multiple times to delete the Port Mapper folder
- $attempts = 1
- do {
- try {
- Write-Verbose -Message "Cleanup temporary files for Port Mapper at '$pmapper_folder' - Attempt: $attempts"
- Remove-Item -Path $pmapper_folder -Force -Recurse
- break
- } catch {
- Write-Verbose -Message "Cleanup temporary files for Port Mapper failed, waiting 5 seconds before trying again:$($_ | Out-String)"
- if ($attempts -ge 5) {
- break
- }
- $attempts += 1
- Start-Sleep -Second 5
- }
- } until ($true)
-}
diff --git a/test/lib/ansible_test/_data/versions.py b/test/lib/ansible_test/_data/versions.py
deleted file mode 100755
index 4babef01..00000000
--- a/test/lib/ansible_test/_data/versions.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env python
-"""Show python and pip versions."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-import sys
-import warnings
-
-warnings.simplefilter('ignore') # avoid python version deprecation warnings when using newer pip dependencies
-
-try:
- import pip
-except ImportError:
- pip = None
-
-print(sys.version)
-
-if pip:
- print('pip %s from %s' % (pip.__version__, os.path.dirname(pip.__file__)))
diff --git a/test/lib/ansible_test/_internal/__init__.py b/test/lib/ansible_test/_internal/__init__.py
index 35f04422..e604a2b3 100644
--- a/test/lib/ansible_test/_internal/__init__.py
+++ b/test/lib/ansible_test/_internal/__init__.py
@@ -1,3 +1,99 @@
-"""Support code for Ansible testing infrastructure."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+"""Test runner for all Ansible tests."""
+from __future__ import annotations
+
+import os
+import sys
+
+# This import should occur as early as possible.
+# It must occur before subprocess has been imported anywhere in the current process.
+from .init import (
+ CURRENT_RLIMIT_NOFILE,
+)
+
+from .util import (
+ ApplicationError,
+ display,
+ MAXFD,
+)
+
+from .delegation import (
+ delegate,
+)
+
+from .executor import (
+ ApplicationWarning,
+ Delegate,
+ ListTargets,
+)
+
+from .timeout import (
+ configure_timeout,
+)
+
+from .data import (
+ data_context,
+)
+
+from .util_common import (
+ CommonConfig,
+)
+
+from .cli import (
+ parse_args,
+)
+
+from .provisioning import (
+ PrimeContainers,
+)
+
+
+def main():
+ """Main program function."""
+ try:
+ os.chdir(data_context().content.root)
+ args = parse_args()
+ config = args.config(args) # type: CommonConfig
+ display.verbosity = config.verbosity
+ display.truncate = config.truncate
+ display.redact = config.redact
+ display.color = config.color
+ display.info_stderr = config.info_stderr
+ configure_timeout(config)
+
+ display.info('RLIMIT_NOFILE: %s' % (CURRENT_RLIMIT_NOFILE,), verbosity=2)
+ display.info('MAXFD: %d' % MAXFD, verbosity=2)
+
+ delegate_args = None
+ target_names = None
+
+ try:
+ args.func(config)
+ except PrimeContainers:
+ pass
+ except ListTargets as ex:
+ # save target_names for use once we exit the exception handler
+ target_names = ex.target_names
+ except Delegate as ex:
+ # save delegation args for use once we exit the exception handler
+ delegate_args = (ex.host_state, ex.exclude, ex.require)
+
+ if delegate_args:
+ # noinspection PyTypeChecker
+ delegate(config, *delegate_args)
+
+ if target_names:
+ for target_name in target_names:
+ print(target_name) # info goes to stderr, this should be on stdout
+
+ display.review_warnings()
+ config.success = True
+ except ApplicationWarning as ex:
+ display.warning(u'%s' % ex)
+ sys.exit(0)
+ except ApplicationError as ex:
+ display.error(u'%s' % ex)
+ sys.exit(1)
+ except KeyboardInterrupt:
+ sys.exit(2)
+ except BrokenPipeError:
+ sys.exit(3)
diff --git a/test/lib/ansible_test/_internal/ansible_util.py b/test/lib/ansible_test/_internal/ansible_util.py
index 339eff69..5c689bed 100644
--- a/test/lib/ansible_test/_internal/ansible_util.py
+++ b/test/lib/ansible_test/_internal/ansible_util.py
@@ -1,11 +1,9 @@
"""Miscellaneous utility functions and classes specific to ansible cli tools."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
import os
-
-from . import types as t
+import typing as t
from .constants import (
SOFT_RLIMIT_NOFILE,
@@ -17,13 +15,12 @@ from .io import (
from .util import (
common_environment,
- display,
- find_python,
ApplicationError,
ANSIBLE_LIB_ROOT,
ANSIBLE_TEST_DATA_ROOT,
ANSIBLE_BIN_PATH,
ANSIBLE_SOURCE_ROOT,
+ ANSIBLE_TEST_TOOLS_ROOT,
get_ansible_version,
)
@@ -31,6 +28,8 @@ from .util_common import (
create_temp_dir,
run_command,
ResultType,
+ intercept_python,
+ get_injector_path,
)
from .config import (
@@ -44,16 +43,34 @@ from .data import (
data_context,
)
-CHECK_YAML_VERSIONS = {}
+from .python_requirements import (
+ install_requirements,
+)
+from .host_configs import (
+ PythonConfig,
+)
-def ansible_environment(args, color=True, ansible_config=None):
- """
- :type args: CommonConfig
- :type color: bool
- :type ansible_config: str | None
- :rtype: dict[str, str]
- """
+
+def parse_inventory(args, inventory_path): # type: (EnvironmentConfig, str) -> t.Dict[str, t.Any]
+ """Return a dict parsed from the given inventory file."""
+ cmd = ['ansible-inventory', '-i', inventory_path, '--list']
+ env = ansible_environment(args)
+ inventory = json.loads(intercept_python(args, args.controller_python, cmd, env, capture=True, always=True)[0])
+ return inventory
+
+
+def get_hosts(inventory, group_name): # type: (t.Dict[str, t.Any], str) -> t.Dict[str, t.Dict[str, t.Any]]
+ """Return a dict of hosts from the specified group in the given inventory."""
+ hostvars = inventory.get('_meta', {}).get('hostvars', {})
+ group = inventory.get(group_name, {})
+ host_names = group.get('hosts', [])
+ hosts = dict((name, hostvars.get(name, {})) for name in host_names)
+ return hosts
+
+
+def ansible_environment(args, color=True, ansible_config=None): # type: (CommonConfig, bool, t.Optional[str]) -> t.Dict[str, str]
+ """Return a dictionary of environment variables to use when running Ansible commands."""
env = common_environment()
path = env['PATH']
@@ -79,7 +96,7 @@ def ansible_environment(args, color=True, ansible_config=None):
ANSIBLE_CONFIG=ansible_config,
ANSIBLE_LIBRARY='/dev/null',
ANSIBLE_DEVEL_WARNING='false', # Don't show warnings that CI is running devel
- ANSIBLE_CONTROLLER_PYTHON_WARNING='false', # Don't show warnings in CI for old controller Python
+ ANSIBLE_JINJA2_NATIVE_WARNING='false', # Don't show warnings in CI for old Jinja for native
PYTHONPATH=get_ansible_python_path(args),
PAGER='/bin/cat',
PATH=path,
@@ -95,7 +112,7 @@ def ansible_environment(args, color=True, ansible_config=None):
# ansible-connection only requires the injector for code coverage
# the correct python interpreter is already selected using the sys.executable used to invoke ansible
ansible.update(dict(
- ANSIBLE_CONNECTION_PATH=os.path.join(ANSIBLE_TEST_DATA_ROOT, 'injector', 'ansible-connection'),
+ ANSIBLE_CONNECTION_PATH=os.path.join(get_injector_path(), 'ansible-connection'),
))
if isinstance(args, PosixIntegrationConfig):
@@ -229,41 +246,6 @@ License: GPLv3+
write_text_file(pkg_info_path, pkg_info.lstrip(), create_directories=True)
-def check_pyyaml(args, version, required=True, quiet=False):
- """
- :type args: EnvironmentConfig
- :type version: str
- :type required: bool
- :type quiet: bool
- """
- try:
- return CHECK_YAML_VERSIONS[version]
- except KeyError:
- pass
-
- python = find_python(version)
- stdout, _dummy = run_command(args, [python, os.path.join(ANSIBLE_TEST_DATA_ROOT, 'yamlcheck.py')],
- capture=True, always=True)
-
- result = json.loads(stdout)
-
- yaml = result['yaml']
- cloader = result['cloader']
-
- if yaml or required:
- # results are cached only if pyyaml is required or present
- # it is assumed that tests will not uninstall/re-install pyyaml -- if they do, those changes will go undetected
- CHECK_YAML_VERSIONS[version] = result
-
- if not quiet:
- if not yaml and required:
- display.warning('PyYAML is not installed for interpreter: %s' % python)
- elif not cloader:
- display.warning('PyYAML will be slow due to installation without libyaml support for interpreter: %s' % python)
-
- return result
-
-
class CollectionDetail:
"""Collection detail."""
def __init__(self): # type: () -> None
@@ -273,16 +255,16 @@ class CollectionDetail:
class CollectionDetailError(ApplicationError):
"""An error occurred retrieving collection detail."""
def __init__(self, reason): # type: (str) -> None
- super(CollectionDetailError, self).__init__('Error collecting collection detail: %s' % reason)
+ super().__init__('Error collecting collection detail: %s' % reason)
self.reason = reason
-def get_collection_detail(args, python): # type: (EnvironmentConfig, str) -> CollectionDetail
+def get_collection_detail(args, python): # type: (EnvironmentConfig, PythonConfig) -> CollectionDetail
"""Return collection detail."""
collection = data_context().content.collection
directory = os.path.join(collection.root, collection.directory)
- stdout = run_command(args, [python, os.path.join(ANSIBLE_TEST_DATA_ROOT, 'collection_detail.py'), directory], capture=True, always=True)[0]
+ stdout = run_command(args, [python.path, os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'collection_detail.py'), directory], capture=True, always=True)[0]
result = json.loads(stdout)
error = result.get('error')
@@ -295,3 +277,25 @@ def get_collection_detail(args, python): # type: (EnvironmentConfig, str) -> Co
detail.version = str(version) if version is not None else None
return detail
+
+
+def run_playbook(
+ args, # type: EnvironmentConfig
+ inventory_path, # type: str
+ playbook, # type: str
+ run_playbook_vars=None, # type: t.Optional[t.Dict[str, t.Any]]
+ capture=False, # type: bool
+): # type: (...) -> None
+ """Run the specified playbook using the given inventory file and playbook variables."""
+ playbook_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'playbooks', playbook)
+ cmd = ['ansible-playbook', '-i', inventory_path, playbook_path]
+
+ if run_playbook_vars:
+ cmd.extend(['-e', json.dumps(run_playbook_vars)])
+
+ if args.verbosity:
+ cmd.append('-%s' % ('v' * args.verbosity))
+
+ install_requirements(args, args.controller_python, ansible=True) # run_playbook()
+ env = ansible_environment(args)
+ intercept_python(args, args.controller_python, cmd, env, capture=capture)
diff --git a/test/lib/ansible_test/_internal/become.py b/test/lib/ansible_test/_internal/become.py
new file mode 100644
index 00000000..dc0a208a
--- /dev/null
+++ b/test/lib/ansible_test/_internal/become.py
@@ -0,0 +1,52 @@
+"""Become abstraction for interacting with test hosts."""
+from __future__ import annotations
+
+import abc
+import shlex
+import typing as t
+
+
+class Become(metaclass=abc.ABCMeta):
+ """Base class for become implementations."""
+ @property
+ @abc.abstractmethod
+ def method(self): # type: () -> str
+ """The name of the Ansible become plugin that is equivalent to this."""
+
+ @abc.abstractmethod
+ def prepare_command(self, command): # type: (t.List[str]) -> t.List[str]
+ """Return the given command, if any, with privilege escalation."""
+
+
+class Su(Become):
+ """Become using 'su'."""
+ @property
+ def method(self): # type: () -> str
+ """The name of the Ansible become plugin that is equivalent to this."""
+ return 'su'
+
+ def prepare_command(self, command): # type: (t.List[str]) -> t.List[str]
+ """Return the given command, if any, with privilege escalation."""
+ become = ['su', '-l', 'root']
+
+ if command:
+ become.extend(['-c', ' '.join(shlex.quote(c) for c in command)])
+
+ return become
+
+
+class Sudo(Become):
+ """Become using 'sudo'."""
+ @property
+ def method(self): # type: () -> str
+ """The name of the Ansible become plugin that is equivalent to this."""
+ return 'sudo'
+
+ def prepare_command(self, command): # type: (t.List[str]) -> t.List[str]
+ """Return the given command, if any, with privilege escalation."""
+ become = ['sudo', '-in']
+
+ if command:
+ become.extend(['sh', '-c', ' '.join(shlex.quote(c) for c in command)])
+
+ return become
diff --git a/test/lib/ansible_test/_internal/bootstrap.py b/test/lib/ansible_test/_internal/bootstrap.py
new file mode 100644
index 00000000..9eb26de7
--- /dev/null
+++ b/test/lib/ansible_test/_internal/bootstrap.py
@@ -0,0 +1,95 @@
+"""Bootstrapping for test hosts."""
+from __future__ import annotations
+
+import dataclasses
+import os
+import typing as t
+
+from .io import (
+ read_text_file,
+)
+
+from .util import (
+ ANSIBLE_TEST_TARGET_ROOT,
+)
+
+from .util_common import (
+ ShellScriptTemplate,
+ set_shebang,
+)
+
+from .core_ci import (
+ SshKey,
+)
+
+
+@dataclasses.dataclass
+class Bootstrap:
+ """Base class for bootstrapping systems."""
+ controller: bool
+ python_versions: t.List[str]
+ ssh_key: SshKey
+
+ @property
+ def bootstrap_type(self): # type: () -> str
+ """The bootstrap type to pass to the bootstrapping script."""
+ return self.__class__.__name__.replace('Bootstrap', '').lower()
+
+ def get_variables(self): # type: () -> t.Dict[str, str]
+ """The variables to template in the boostrapping script."""
+ return dict(
+ bootstrap_type=self.bootstrap_type,
+ controller='yes' if self.controller else '',
+ python_versions=self.python_versions,
+ ssh_key_type=self.ssh_key.KEY_TYPE,
+ ssh_private_key=self.ssh_key.key_contents,
+ ssh_public_key=self.ssh_key.pub_contents,
+ )
+
+ def get_script(self): # type: () -> str
+ """Return a shell script to bootstrap the specified host."""
+ path = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'bootstrap.sh')
+
+ content = read_text_file(path)
+ content = set_shebang(content, '/bin/sh')
+
+ template = ShellScriptTemplate(content)
+
+ variables = self.get_variables()
+
+ script = template.substitute(**variables)
+
+ return script
+
+
+@dataclasses.dataclass
+class BootstrapDocker(Bootstrap):
+ """Bootstrap docker instances."""
+ def get_variables(self): # type: () -> t.Dict[str, str]
+ """The variables to template in the boostrapping script."""
+ variables = super().get_variables()
+
+ variables.update(
+ platform='',
+ platform_version='',
+ )
+
+ return variables
+
+
+@dataclasses.dataclass
+class BootstrapRemote(Bootstrap):
+ """Bootstrap remote instances."""
+ platform: str
+ platform_version: str
+
+ def get_variables(self): # type: () -> t.Dict[str, str]
+ """The variables to template in the boostrapping script."""
+ variables = super().get_variables()
+
+ variables.update(
+ platform=self.platform,
+ platform_version=self.platform_version,
+ )
+
+ return variables
diff --git a/test/lib/ansible_test/_internal/cache.py b/test/lib/ansible_test/_internal/cache.py
index 85fdbb1f..50a6f5e5 100644
--- a/test/lib/ansible_test/_internal/cache.py
+++ b/test/lib/ansible_test/_internal/cache.py
@@ -1,34 +1,29 @@
"""Cache for commonly shared data that is intended to be immutable."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+
+import typing as t
+
+from .config import (
+ CommonConfig,
+)
+
+TValue = t.TypeVar('TValue')
class CommonCache:
"""Common cache."""
- def __init__(self, args):
- """
- :param args: CommonConfig
- """
+ def __init__(self, args): # type: (CommonConfig) -> None
self.args = args
- def get(self, key, factory):
- """
- :param key: str
- :param factory: () -> any
- :rtype: any
- """
+ def get(self, key, factory): # type: (str, t.Callable[[], TValue]) -> TValue
+ """Return the value from the cache identified by the given key, using the specified factory method if it is not found."""
if key not in self.args.cache:
self.args.cache[key] = factory()
return self.args.cache[key]
- def get_with_args(self, key, factory):
- """
- :param key: str
- :param factory: (CommonConfig) -> any
- :rtype: any
- """
-
+ def get_with_args(self, key, factory): # type: (str, t.Callable[[CommonConfig], TValue]) -> TValue
+ """Return the value from the cache identified by the given key, using the specified factory method (which accepts args) if it is not found."""
if key not in self.args.cache:
self.args.cache[key] = factory(self.args)
diff --git a/test/lib/ansible_test/_internal/ci/__init__.py b/test/lib/ansible_test/_internal/ci/__init__.py
index 18a09793..db5ca501 100644
--- a/test/lib/ansible_test/_internal/ci/__init__.py
+++ b/test/lib/ansible_test/_internal/ci/__init__.py
@@ -1,15 +1,12 @@
"""Support code for CI environments."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import abc
import base64
import json
import os
import tempfile
-
-
-from .. import types as t
+import typing as t
from ..encoding import (
to_bytes,
@@ -27,12 +24,12 @@ from ..config import (
)
from ..util import (
- ABC,
ApplicationError,
display,
get_subclasses,
import_plugins,
raw_command,
+ cache,
)
@@ -40,13 +37,7 @@ class ChangeDetectionNotSupported(ApplicationError):
"""Exception for cases where change detection is not supported."""
-class AuthContext:
- """Context information required for Ansible Core CI authentication."""
- def __init__(self): # type: () -> None
- pass
-
-
-class CIProvider(ABC):
+class CIProvider(metaclass=abc.ABCMeta):
"""Base class for CI provider plugins."""
priority = 500
@@ -78,11 +69,11 @@ class CIProvider(ABC):
"""Initialize change detection."""
@abc.abstractmethod
- def supports_core_ci_auth(self, context): # type: (AuthContext) -> bool
+ def supports_core_ci_auth(self): # type: () -> bool
"""Return True if Ansible Core CI is supported."""
@abc.abstractmethod
- def prepare_core_ci_auth(self, context): # type: (AuthContext) -> t.Dict[str, t.Any]
+ def prepare_core_ci_auth(self): # type: () -> t.Dict[str, t.Any]
"""Return authentication details for Ansible Core CI."""
@abc.abstractmethod
@@ -90,13 +81,9 @@ class CIProvider(ABC):
"""Return details about git in the current environment."""
+@cache
def get_ci_provider(): # type: () -> CIProvider
"""Return a CI provider instance for the current environment."""
- try:
- return get_ci_provider.provider
- except AttributeError:
- pass
-
provider = None
import_plugins('ci')
@@ -111,12 +98,10 @@ def get_ci_provider(): # type: () -> CIProvider
if provider.code:
display.info('Detected CI provider: %s' % provider.name)
- get_ci_provider.provider = provider
-
return provider
-class AuthHelper(ABC):
+class AuthHelper(metaclass=abc.ABCMeta):
"""Public key based authentication helper for Ansible Core CI."""
def sign_request(self, request): # type: (t.Dict[str, t.Any]) -> None
"""Sign the given auth request and make the public key available."""
@@ -154,7 +139,7 @@ class AuthHelper(ABC):
"""Generate a new key pair, publishing the public key and returning the private key."""
-class CryptographyAuthHelper(AuthHelper, ABC): # pylint: disable=abstract-method
+class CryptographyAuthHelper(AuthHelper, metaclass=abc.ABCMeta):
"""Cryptography based public key based authentication helper for Ansible Core CI."""
def sign_bytes(self, payload_bytes): # type: (bytes) -> bytes
"""Sign the given payload and return the signature, initializing a new key pair if required."""
@@ -199,7 +184,7 @@ class CryptographyAuthHelper(AuthHelper, ABC): # pylint: disable=abstract-metho
return private_key_pem
-class OpenSSLAuthHelper(AuthHelper, ABC): # pylint: disable=abstract-method
+class OpenSSLAuthHelper(AuthHelper, metaclass=abc.ABCMeta):
"""OpenSSL based public key based authentication helper for Ansible Core CI."""
def sign_bytes(self, payload_bytes): # type: (bytes) -> bytes
"""Sign the given payload and return the signature, initializing a new key pair if required."""
diff --git a/test/lib/ansible_test/_internal/ci/azp.py b/test/lib/ansible_test/_internal/ci/azp.py
index e981e832..d5b3999a 100644
--- a/test/lib/ansible_test/_internal/ci/azp.py
+++ b/test/lib/ansible_test/_internal/ci/azp.py
@@ -1,13 +1,11 @@
"""Support code for working with Azure Pipelines."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-import re
import tempfile
import uuid
-
-from .. import types as t
+import typing as t
+import urllib.parse
from ..encoding import (
to_bytes,
@@ -24,7 +22,6 @@ from ..git import (
from ..http import (
HttpClient,
- urlencode,
)
from ..util import (
@@ -33,7 +30,6 @@ from ..util import (
)
from . import (
- AuthContext,
ChangeDetectionNotSupported,
CIProvider,
CryptographyAuthHelper,
@@ -106,11 +102,11 @@ class AzurePipelines(CIProvider):
return result.paths
- def supports_core_ci_auth(self, context): # type: (AuthContext) -> bool
+ def supports_core_ci_auth(self): # type: () -> bool
"""Return True if Ansible Core CI is supported."""
return True
- def prepare_core_ci_auth(self, context): # type: (AuthContext) -> t.Dict[str, t.Any]
+ def prepare_core_ci_auth(self): # type: () -> t.Dict[str, t.Any]
"""Return authentication details for Ansible Core CI."""
try:
request = dict(
@@ -228,7 +224,7 @@ class AzurePipelinesChanges:
repositoryId='%s/%s' % (self.org, self.project),
)
- url = '%s%s/_apis/build/builds?api-version=6.0&%s' % (self.org_uri, self.project, urlencode(parameters))
+ url = '%s%s/_apis/build/builds?api-version=6.0&%s' % (self.org_uri, self.project, urllib.parse.urlencode(parameters))
http = HttpClient(self.args, always=True)
response = http.get(url)
diff --git a/test/lib/ansible_test/_internal/ci/local.py b/test/lib/ansible_test/_internal/ci/local.py
index 5886601b..e1277533 100644
--- a/test/lib/ansible_test/_internal/ci/local.py
+++ b/test/lib/ansible_test/_internal/ci/local.py
@@ -1,13 +1,11 @@
"""Support code for working without a supported CI provider."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import platform
import random
import re
-
-from .. import types as t
+import typing as t
from ..config import (
CommonConfig,
@@ -30,7 +28,6 @@ from ..util import (
)
from . import (
- AuthContext,
CIProvider,
)
@@ -119,12 +116,12 @@ class Local(CIProvider):
return sorted(names)
- def supports_core_ci_auth(self, context): # type: (AuthContext) -> bool
+ def supports_core_ci_auth(self): # type: () -> bool
"""Return True if Ansible Core CI is supported."""
path = self._get_aci_key_path()
return os.path.exists(path)
- def prepare_core_ci_auth(self, context): # type: (AuthContext) -> t.Dict[str, t.Any]
+ def prepare_core_ci_auth(self): # type: () -> t.Dict[str, t.Any]
"""Return authentication details for Ansible Core CI."""
path = self._get_aci_key_path()
auth_key = read_text_file(path).strip()
@@ -144,7 +141,8 @@ class Local(CIProvider):
"""Return details about git in the current environment."""
return None # not yet implemented for local
- def _get_aci_key_path(self): # type: () -> str
+ @staticmethod
+ def _get_aci_key_path(): # type: () -> str
path = os.path.expanduser('~/.ansible-core-ci.key')
return path
@@ -154,7 +152,7 @@ class InvalidBranch(ApplicationError):
def __init__(self, branch, reason): # type: (str, str) -> None
message = 'Invalid branch: %s\n%s' % (branch, reason)
- super(InvalidBranch, self).__init__(message)
+ super().__init__(message)
self.branch = branch
diff --git a/test/lib/ansible_test/_internal/ci/shippable.py b/test/lib/ansible_test/_internal/ci/shippable.py
deleted file mode 100644
index f9f0a192..00000000
--- a/test/lib/ansible_test/_internal/ci/shippable.py
+++ /dev/null
@@ -1,269 +0,0 @@
-"""Support code for working with Shippable."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-import re
-import time
-
-from .. import types as t
-
-from ..config import (
- CommonConfig,
- TestConfig,
-)
-
-from ..git import (
- Git,
-)
-
-from ..http import (
- HttpClient,
- urlencode,
-)
-
-from ..util import (
- ApplicationError,
- display,
- MissingEnvironmentVariable,
- SubprocessError,
-)
-
-from . import (
- AuthContext,
- ChangeDetectionNotSupported,
- CIProvider,
- OpenSSLAuthHelper,
-)
-
-
-CODE = 'shippable'
-
-
-class Shippable(CIProvider):
- """CI provider implementation for Shippable."""
- def __init__(self):
- self.auth = ShippableAuthHelper()
-
- @staticmethod
- def is_supported(): # type: () -> bool
- """Return True if this provider is supported in the current running environment."""
- return os.environ.get('SHIPPABLE') == 'true'
-
- @property
- def code(self): # type: () -> str
- """Return a unique code representing this provider."""
- return CODE
-
- @property
- def name(self): # type: () -> str
- """Return descriptive name for this provider."""
- return 'Shippable'
-
- def generate_resource_prefix(self): # type: () -> str
- """Return a resource prefix specific to this CI provider."""
- try:
- prefix = 'shippable-%s-%s' % (
- os.environ['SHIPPABLE_BUILD_NUMBER'],
- os.environ['SHIPPABLE_JOB_NUMBER'],
- )
- except KeyError as ex:
- raise MissingEnvironmentVariable(name=ex.args[0])
-
- return prefix
-
- def get_base_branch(self): # type: () -> str
- """Return the base branch or an empty string."""
- base_branch = os.environ.get('BASE_BRANCH')
-
- if base_branch:
- base_branch = 'origin/%s' % base_branch
-
- return base_branch or ''
-
- def detect_changes(self, args): # type: (TestConfig) -> t.Optional[t.List[str]]
- """Initialize change detection."""
- result = ShippableChanges(args)
-
- if result.is_pr:
- job_type = 'pull request'
- elif result.is_tag:
- job_type = 'tag'
- else:
- job_type = 'merge commit'
-
- display.info('Processing %s for branch %s commit %s' % (job_type, result.branch, result.commit))
-
- if not args.metadata.changes:
- args.metadata.populate_changes(result.diff)
-
- if result.paths is None:
- # There are several likely causes of this:
- # - First run on a new branch.
- # - Too many pull requests passed since the last merge run passed.
- display.warning('No successful commit found. All tests will be executed.')
-
- return result.paths
-
- def supports_core_ci_auth(self, context): # type: (AuthContext) -> bool
- """Return True if Ansible Core CI is supported."""
- return True
-
- def prepare_core_ci_auth(self, context): # type: (AuthContext) -> t.Dict[str, t.Any]
- """Return authentication details for Ansible Core CI."""
- try:
- request = dict(
- run_id=os.environ['SHIPPABLE_BUILD_ID'],
- job_number=int(os.environ['SHIPPABLE_JOB_NUMBER']),
- )
- except KeyError as ex:
- raise MissingEnvironmentVariable(name=ex.args[0])
-
- self.auth.sign_request(request)
-
- auth = dict(
- shippable=request,
- )
-
- return auth
-
- def get_git_details(self, args): # type: (CommonConfig) -> t.Optional[t.Dict[str, t.Any]]
- """Return details about git in the current environment."""
- commit = os.environ.get('COMMIT')
- base_commit = os.environ.get('BASE_COMMIT')
-
- details = dict(
- base_commit=base_commit,
- commit=commit,
- merged_commit=self._get_merged_commit(args, commit),
- )
-
- return details
-
- # noinspection PyUnusedLocal
- def _get_merged_commit(self, args, commit): # type: (CommonConfig, str) -> t.Optional[str] # pylint: disable=unused-argument
- """Find the merged commit that should be present."""
- if not commit:
- return None
-
- git = Git()
-
- try:
- show_commit = git.run_git(['show', '--no-patch', '--no-abbrev', commit])
- except SubprocessError as ex:
- # This should only fail for pull requests where the commit does not exist.
- # Merge runs would fail much earlier when attempting to checkout the commit.
- raise ApplicationError('Commit %s was not found:\n\n%s\n\n'
- 'GitHub may not have fully replicated the commit across their infrastructure.\n'
- 'It is also possible the commit was removed by a force push between job creation and execution.\n'
- 'Find the latest run for the pull request and restart failed jobs as needed.'
- % (commit, ex.stderr.strip()))
-
- head_commit = git.run_git(['show', '--no-patch', '--no-abbrev', 'HEAD'])
-
- if show_commit == head_commit:
- # Commit is HEAD, so this is not a pull request or the base branch for the pull request is up-to-date.
- return None
-
- match_merge = re.search(r'^Merge: (?P<parents>[0-9a-f]{40} [0-9a-f]{40})$', head_commit, flags=re.MULTILINE)
-
- if not match_merge:
- # The most likely scenarios resulting in a failure here are:
- # A new run should or does supersede this job, but it wasn't cancelled in time.
- # A job was superseded and then later restarted.
- raise ApplicationError('HEAD is not commit %s or a merge commit:\n\n%s\n\n'
- 'This job has likely been superseded by another run due to additional commits being pushed.\n'
- 'Find the latest run for the pull request and restart failed jobs as needed.'
- % (commit, head_commit.strip()))
-
- parents = set(match_merge.group('parents').split(' '))
-
- if len(parents) != 2:
- raise ApplicationError('HEAD is a %d-way octopus merge.' % len(parents))
-
- if commit not in parents:
- raise ApplicationError('Commit %s is not a parent of HEAD.' % commit)
-
- parents.remove(commit)
-
- last_commit = parents.pop()
-
- return last_commit
-
-
-class ShippableAuthHelper(OpenSSLAuthHelper):
- """
- Authentication helper for Shippable.
- Based on OpenSSL since cryptography is not provided by the default Shippable environment.
- """
- def publish_public_key(self, public_key_pem): # type: (str) -> None
- """Publish the given public key."""
- # display the public key as a single line to avoid mangling such as when prefixing each line with a timestamp
- display.info(public_key_pem.replace('\n', ' '))
- # allow time for logs to become available to reduce repeated API calls
- time.sleep(3)
-
-
-class ShippableChanges:
- """Change information for Shippable build."""
- def __init__(self, args): # type: (TestConfig) -> None
- self.args = args
- self.git = Git()
-
- try:
- self.branch = os.environ['BRANCH']
- self.is_pr = os.environ['IS_PULL_REQUEST'] == 'true'
- self.is_tag = os.environ['IS_GIT_TAG'] == 'true'
- self.commit = os.environ['COMMIT']
- self.project_id = os.environ['PROJECT_ID']
- self.commit_range = os.environ['SHIPPABLE_COMMIT_RANGE']
- except KeyError as ex:
- raise MissingEnvironmentVariable(name=ex.args[0])
-
- if self.is_tag:
- raise ChangeDetectionNotSupported('Change detection is not supported for tags.')
-
- if self.is_pr:
- self.paths = sorted(self.git.get_diff_names([self.commit_range]))
- self.diff = self.git.get_diff([self.commit_range])
- else:
- commits = self.get_successful_merge_run_commits(self.project_id, self.branch)
- last_successful_commit = self.get_last_successful_commit(commits)
-
- if last_successful_commit:
- self.paths = sorted(self.git.get_diff_names([last_successful_commit, self.commit]))
- self.diff = self.git.get_diff([last_successful_commit, self.commit])
- else:
- # first run for branch
- self.paths = None # act as though change detection not enabled, do not filter targets
- self.diff = []
-
- def get_successful_merge_run_commits(self, project_id, branch): # type: (str, str) -> t.Set[str]
- """Return a set of recent successsful merge commits from Shippable for the given project and branch."""
- parameters = dict(
- isPullRequest='false',
- projectIds=project_id,
- branch=branch,
- )
-
- url = 'https://api.shippable.com/runs?%s' % urlencode(parameters)
-
- http = HttpClient(self.args, always=True)
- response = http.get(url)
- result = response.json()
-
- if 'id' in result and result['id'] == 4004:
- # most likely due to a private project, which returns an HTTP 200 response with JSON
- display.warning('Unable to find project. Cannot determine changes. All tests will be executed.')
- return set()
-
- commits = set(run['commitSha'] for run in result if run['statusCode'] == 30)
-
- return commits
-
- def get_last_successful_commit(self, successful_commits): # type: (t.Set[str]) -> t.Optional[str]
- """Return the last successful commit from git history that is found in the given commit list, or None."""
- commit_history = self.git.get_rev_list(max_count=100)
- ordered_successful_commits = [commit for commit in commit_history if commit in successful_commits]
- last_successful_commit = ordered_successful_commits[0] if ordered_successful_commits else None
- return last_successful_commit
diff --git a/test/lib/ansible_test/_internal/classification.py b/test/lib/ansible_test/_internal/classification/__init__.py
index ff44b804..532fa680 100644
--- a/test/lib/ansible_test/_internal/classification.py
+++ b/test/lib/ansible_test/_internal/classification/__init__.py
@@ -1,15 +1,13 @@
"""Classify changes in Ansible code."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import collections
import os
import re
import time
+import typing as t
-from . import types as t
-
-from .target import (
+from ..target import (
walk_module_targets,
walk_integration_targets,
walk_units_targets,
@@ -19,49 +17,44 @@ from .target import (
analyze_integration_target_dependencies,
)
-from .util import (
+from ..util import (
display,
is_subdir,
)
-from .import_analysis import (
+from .python import (
get_python_module_utils_imports,
get_python_module_utils_name,
)
-from .csharp_import_analysis import (
+from .csharp import (
get_csharp_module_utils_imports,
get_csharp_module_utils_name,
)
-from .powershell_import_analysis import (
+from .powershell import (
get_powershell_module_utils_imports,
get_powershell_module_utils_name,
)
-from .config import (
+from ..config import (
TestConfig,
IntegrationConfig,
)
-from .metadata import (
+from ..metadata import (
ChangeDescription,
)
-from .data import (
+from ..data import (
data_context,
)
FOCUSED_TARGET = '__focused__'
-def categorize_changes(args, paths, verbose_command=None):
- """
- :type args: TestConfig
- :type paths: list[str]
- :type verbose_command: str
- :rtype: ChangeDescription
- """
+def categorize_changes(args, paths, verbose_command=None): # type: (TestConfig, t.List[str], t.Optional[str]) -> ChangeDescription
+ """Categorize the given list of changed paths and return a description of the changes."""
mapper = PathMapper(args)
commands = {
@@ -156,17 +149,17 @@ def categorize_changes(args, paths, verbose_command=None):
if none_count > 0 and args.verbosity < 2:
display.notice('Omitted %d file(s) that triggered no tests.' % none_count)
- for command in commands:
- commands[command].discard('none')
+ for command, targets in commands.items():
+ targets.discard('none')
- if any(target == 'all' for target in commands[command]):
- commands[command] = set(['all'])
+ if any(target == 'all' for target in targets):
+ commands[command] = {'all'}
- commands = dict((c, sorted(commands[c])) for c in commands if commands[c])
- focused_commands = dict((c, sorted(focused_commands[c])) for c in focused_commands)
+ commands = dict((c, sorted(targets)) for c, targets in commands.items() if targets)
+ focused_commands = dict((c, sorted(targets)) for c, targets in focused_commands.items())
- for command in commands:
- if commands[command] == ['all']:
+ for command, targets in commands.items():
+ if targets == ['all']:
commands[command] = [] # changes require testing all targets, do not filter targets
changes = ChangeDescription()
@@ -182,10 +175,7 @@ def categorize_changes(args, paths, verbose_command=None):
class PathMapper:
"""Map file paths to test commands and targets."""
- def __init__(self, args):
- """
- :type args: TestConfig
- """
+ def __init__(self, args): # type: (TestConfig) -> None
self.args = args
self.integration_all_target = get_integration_all_target(self.args)
@@ -228,11 +218,8 @@ class PathMapper:
self.paths_to_dependent_targets[path].add(target)
- def get_dependent_paths(self, path):
- """
- :type path: str
- :rtype: list[str]
- """
+ def get_dependent_paths(self, path): # type: (str) -> t.List[str]
+ """Return a list of paths which depend on the given path, recursively expanding dependent paths as well."""
unprocessed_paths = set(self.get_dependent_paths_non_recursive(path))
paths = set()
@@ -250,22 +237,16 @@ class PathMapper:
return sorted(paths)
- def get_dependent_paths_non_recursive(self, path):
- """
- :type path: str
- :rtype: list[str]
- """
+ def get_dependent_paths_non_recursive(self, path): # type: (str) -> t.List[str]
+ """Return a list of paths which depend on the given path, including dependent integration test target paths."""
paths = self.get_dependent_paths_internal(path)
paths += [target.path + '/' for target in self.paths_to_dependent_targets.get(path, set())]
paths = sorted(set(paths))
return paths
- def get_dependent_paths_internal(self, path):
- """
- :type path: str
- :rtype: list[str]
- """
+ def get_dependent_paths_internal(self, path): # type: (str) -> t.List[str]
+ """Return a list of paths which depend on the given path."""
ext = os.path.splitext(os.path.split(path)[1])[1]
if is_subdir(path, data_context().content.module_utils_path):
@@ -283,11 +264,8 @@ class PathMapper:
return []
- def get_python_module_utils_usage(self, path):
- """
- :type path: str
- :rtype: list[str]
- """
+ def get_python_module_utils_usage(self, path): # type: (str) -> t.List[str]
+ """Return a list of paths which depend on the given path which is a Python module_utils file."""
if not self.python_module_utils_imports:
display.info('Analyzing python module_utils imports...')
before = time.time()
@@ -299,11 +277,8 @@ class PathMapper:
return sorted(self.python_module_utils_imports[name])
- def get_powershell_module_utils_usage(self, path):
- """
- :type path: str
- :rtype: list[str]
- """
+ def get_powershell_module_utils_usage(self, path): # type: (str) -> t.List[str]
+ """Return a list of paths which depend on the given path which is a PowerShell module_utils file."""
if not self.powershell_module_utils_imports:
display.info('Analyzing powershell module_utils imports...')
before = time.time()
@@ -315,11 +290,8 @@ class PathMapper:
return sorted(self.powershell_module_utils_imports[name])
- def get_csharp_module_utils_usage(self, path):
- """
- :type path: str
- :rtype: list[str]
- """
+ def get_csharp_module_utils_usage(self, path): # type: (str) -> t.List[str]
+ """Return a list of paths which depend on the given path which is a C# module_utils file."""
if not self.csharp_module_utils_imports:
display.info('Analyzing C# module_utils imports...')
before = time.time()
@@ -331,22 +303,16 @@ class PathMapper:
return sorted(self.csharp_module_utils_imports[name])
- def get_integration_target_usage(self, path):
- """
- :type path: str
- :rtype: list[str]
- """
+ def get_integration_target_usage(self, path): # type: (str) -> t.List[str]
+ """Return a list of paths which depend on the given path which is an integration target file."""
target_name = path.split('/')[3]
dependents = [os.path.join(data_context().content.integration_targets_path, target) + os.path.sep
for target in sorted(self.integration_dependencies.get(target_name, set()))]
return dependents
- def classify(self, path):
- """
- :type path: str
- :rtype: dict[str, str] | None
- """
+ def classify(self, path): # type: (str) -> t.Optional[t.Dict[str, str]]
+ """Classify the given path and return an optional dictionary of the results."""
result = self._classify(path)
# run all tests when no result given
@@ -380,7 +346,6 @@ class PathMapper:
if os.path.sep not in path:
if filename in (
'azure-pipelines.yml',
- 'shippable.yml',
):
return all_tests(self.args) # test infrastructure, run all tests
@@ -717,11 +682,6 @@ class PathMapper:
if path.startswith('changelogs/'):
return minimal
- if path.startswith('contrib/'):
- return {
- 'units': 'test/units/contrib/'
- }
-
if path.startswith('docs/'):
return minimal
@@ -754,22 +714,6 @@ class PathMapper:
return minimal
if path.startswith('packaging/'):
- if path.startswith('packaging/requirements/'):
- if name.startswith('requirements-') and ext == '.txt':
- component = name.split('-', 1)[1]
-
- candidates = (
- 'cloud/%s/' % component,
- )
-
- for candidate in candidates:
- if candidate in self.integration_targets_by_alias:
- return {
- 'integration': candidate,
- }
-
- return all_tests(self.args) # broad impact, run all tests
-
return minimal
if path.startswith('test/ansible_test/'):
@@ -789,7 +733,7 @@ class PathMapper:
if path == 'test/lib/ansible_test/_data/completion/docker.txt':
return all_tests(self.args, force=True) # force all tests due to risk of breaking changes in new test environment
- if path.startswith('test/lib/ansible_test/_internal/cloud/'):
+ if path.startswith('test/lib/ansible_test/_internal/commands/integration/cloud/'):
cloud_target = 'cloud/%s/' % name
if cloud_target in self.integration_targets_by_alias:
@@ -799,31 +743,13 @@ class PathMapper:
return all_tests(self.args) # test infrastructure, run all tests
- if path.startswith('test/lib/ansible_test/_internal/sanity/'):
+ if path.startswith('test/lib/ansible_test/_internal/commands/sanity/'):
return {
'sanity': 'all', # test infrastructure, run all sanity checks
'integration': 'ansible-test', # run ansible-test self tests
}
- if path.startswith('test/lib/ansible_test/_data/sanity/'):
- return {
- 'sanity': 'all', # test infrastructure, run all sanity checks
- 'integration': 'ansible-test', # run ansible-test self tests
- }
-
- if path.startswith('test/lib/ansible_test/_internal/units/'):
- return {
- 'units': 'all', # test infrastructure, run all unit tests
- 'integration': 'ansible-test', # run ansible-test self tests
- }
-
- if path.startswith('test/lib/ansible_test/_data/units/'):
- return {
- 'units': 'all', # test infrastructure, run all unit tests
- 'integration': 'ansible-test', # run ansible-test self tests
- }
-
- if path.startswith('test/lib/ansible_test/_data/pytest/'):
+ if path.startswith('test/lib/ansible_test/_internal/commands/units/'):
return {
'units': 'all', # test infrastructure, run all unit tests
'integration': 'ansible-test', # run ansible-test self tests
@@ -847,13 +773,17 @@ class PathMapper:
name: 'all',
}
- if name.startswith('integration.cloud.'):
- cloud_target = 'cloud/%s/' % name.split('.')[2]
+ if path.startswith('test/lib/ansible_test/_util/controller/sanity/') or path.startswith('test/lib/ansible_test/_util/target/sanity/'):
+ return {
+ 'sanity': 'all', # test infrastructure, run all sanity checks
+ 'integration': 'ansible-test', # run ansible-test self tests
+ }
- if cloud_target in self.integration_targets_by_alias:
- return {
- 'integration': cloud_target,
- }
+ if path.startswith('test/lib/ansible_test/_util/target/pytest/'):
+ return {
+ 'units': 'all', # test infrastructure, run all unit tests
+ 'integration': 'ansible-test', # run ansible-test self tests
+ }
if path.startswith('test/lib/'):
return all_tests(self.args) # test infrastructure, run all tests
@@ -946,12 +876,8 @@ class PathMapper:
)
-def all_tests(args, force=False):
- """
- :type args: TestConfig
- :type force: bool
- :rtype: dict[str, str]
- """
+def all_tests(args, force=False): # type: (TestConfig, bool) -> t.Dict[str, str]
+ """Return the targets for each test command when all tests should be run."""
if force:
integration_all_target = 'all'
else:
@@ -966,11 +892,8 @@ def all_tests(args, force=False):
}
-def get_integration_all_target(args):
- """
- :type args: TestConfig
- :rtype: str
- """
+def get_integration_all_target(args): # type: (TestConfig) -> str
+ """Return the target to use when all tests should be run."""
if isinstance(args, IntegrationConfig):
return args.changed_all_target
diff --git a/test/lib/ansible_test/_internal/classification/common.py b/test/lib/ansible_test/_internal/classification/common.py
new file mode 100644
index 00000000..40683827
--- /dev/null
+++ b/test/lib/ansible_test/_internal/classification/common.py
@@ -0,0 +1,26 @@
+"""Common classification code used by multiple languages."""
+from __future__ import annotations
+
+import os
+
+from ..data import (
+ data_context,
+)
+
+
+def resolve_csharp_ps_util(import_name, path): # type: (str, str) -> str
+ """Return the fully qualified name of the given import if possible, otherwise return the original import name."""
+ if data_context().content.is_ansible or not import_name.startswith('.'):
+ # We don't support relative paths for builtin utils, there's no point.
+ return import_name
+
+ packages = import_name.split('.')
+ module_packages = path.split(os.path.sep)
+
+ for package in packages:
+ if not module_packages or package:
+ break
+ del module_packages[-1]
+
+ return 'ansible_collections.%s%s' % (data_context().content.prefix,
+ '.'.join(module_packages + [p for p in packages if p]))
diff --git a/test/lib/ansible_test/_internal/csharp_import_analysis.py b/test/lib/ansible_test/_internal/classification/csharp.py
index daa8892c..af7f9c7f 100644
--- a/test/lib/ansible_test/_internal/csharp_import_analysis.py
+++ b/test/lib/ansible_test/_internal/classification/csharp.py
@@ -1,34 +1,33 @@
"""Analyze C# import statements."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import re
+import typing as t
-from .io import (
+from ..io import (
open_text_file,
)
-from .util import (
+from ..util import (
display,
)
-from .util_common import (
+from .common import (
resolve_csharp_ps_util,
)
-from .data import (
+from ..data import (
data_context,
)
+from ..target import (
+ TestTarget,
+)
-def get_csharp_module_utils_imports(powershell_targets, csharp_targets):
- """Return a dictionary of module_utils names mapped to sets of powershell file paths.
- :type powershell_targets: list[TestTarget] - C# files
- :type csharp_targets: list[TestTarget] - PS files
- :rtype: dict[str, set[str]]
- """
+def get_csharp_module_utils_imports(powershell_targets, csharp_targets): # type: (t.List[TestTarget], t.List[TestTarget]) -> t.Dict[str, t.Set[str]]
+ """Return a dictionary of module_utils names mapped to sets of powershell file paths."""
module_utils = enumerate_module_utils()
imports_by_target_path = {}
@@ -39,10 +38,10 @@ def get_csharp_module_utils_imports(powershell_targets, csharp_targets):
for target in csharp_targets:
imports_by_target_path[target.path] = extract_csharp_module_utils_imports(target.path, module_utils, True)
- imports = dict([(module_util, set()) for module_util in module_utils])
+ imports = {module_util: set() for module_util in module_utils} # type: t.Dict[str, t.Set[str]]
- for target_path in imports_by_target_path:
- for module_util in imports_by_target_path[target_path]:
+ for target_path, modules in imports_by_target_path.items():
+ for module_util in modules:
imports[module_util].add(target_path)
for module_util in sorted(imports):
@@ -66,22 +65,15 @@ def get_csharp_module_utils_name(path): # type: (str) -> str
return name
-def enumerate_module_utils():
- """Return a list of available module_utils imports.
- :rtype: set[str]
- """
+def enumerate_module_utils(): # type: () -> t.Set[str]
+ """Return a set of available module_utils imports."""
return set(get_csharp_module_utils_name(p)
for p in data_context().content.walk_files(data_context().content.module_utils_csharp_path)
if os.path.splitext(p)[1] == '.cs')
-def extract_csharp_module_utils_imports(path, module_utils, is_pure_csharp):
- """Return a list of module_utils imports found in the specified source file.
- :type path: str
- :type module_utils: set[str]
- :type is_pure_csharp: bool
- :rtype: set[str]
- """
+def extract_csharp_module_utils_imports(path, module_utils, is_pure_csharp): # type: (str, t.Set[str], bool) -> t.Set[str]
+ """Return a set of module_utils imports found in the specified source file."""
imports = set()
if is_pure_csharp:
pattern = re.compile(r'(?i)^using\s((?:Ansible|AnsibleCollections)\..+);$')
diff --git a/test/lib/ansible_test/_internal/powershell_import_analysis.py b/test/lib/ansible_test/_internal/classification/powershell.py
index cfc61859..72715de0 100644
--- a/test/lib/ansible_test/_internal/powershell_import_analysis.py
+++ b/test/lib/ansible_test/_internal/classification/powershell.py
@@ -1,33 +1,33 @@
"""Analyze powershell import statements."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import re
+import typing as t
-from .io import (
+from ..io import (
read_text_file,
)
-from .util import (
+from ..util import (
display,
)
-from .util_common import (
+from .common import (
resolve_csharp_ps_util,
)
-from .data import (
+from ..data import (
data_context,
)
+from ..target import (
+ TestTarget,
+)
-def get_powershell_module_utils_imports(powershell_targets):
- """Return a dictionary of module_utils names mapped to sets of powershell file paths.
- :type powershell_targets: list[TestTarget]
- :rtype: dict[str, set[str]]
- """
+def get_powershell_module_utils_imports(powershell_targets): # type: (t.List[TestTarget]) -> t.Dict[str, t.Set[str]]
+ """Return a dictionary of module_utils names mapped to sets of powershell file paths."""
module_utils = enumerate_module_utils()
imports_by_target_path = {}
@@ -35,10 +35,10 @@ def get_powershell_module_utils_imports(powershell_targets):
for target in powershell_targets:
imports_by_target_path[target.path] = extract_powershell_module_utils_imports(target.path, module_utils)
- imports = dict([(module_util, set()) for module_util in module_utils])
+ imports = {module_util: set() for module_util in module_utils} # type: t.Dict[str, t.Set[str]]
- for target_path in imports_by_target_path:
- for module_util in imports_by_target_path[target_path]:
+ for target_path, modules in imports_by_target_path.items():
+ for module_util in modules:
imports[module_util].add(target_path)
for module_util in sorted(imports):
@@ -62,21 +62,15 @@ def get_powershell_module_utils_name(path): # type: (str) -> str
return name
-def enumerate_module_utils():
- """Return a list of available module_utils imports.
- :rtype: set[str]
- """
+def enumerate_module_utils(): # type: () -> t.Set[str]
+ """Return a set of available module_utils imports."""
return set(get_powershell_module_utils_name(p)
for p in data_context().content.walk_files(data_context().content.module_utils_powershell_path)
if os.path.splitext(p)[1] == '.psm1')
-def extract_powershell_module_utils_imports(path, module_utils):
- """Return a list of module_utils imports found in the specified source file.
- :type path: str
- :type module_utils: set[str]
- :rtype: set[str]
- """
+def extract_powershell_module_utils_imports(path, module_utils): # type: (str, t.Set[str]) -> t.Set[str]
+ """Return a set of module_utils imports found in the specified source file."""
imports = set()
code = read_text_file(path)
diff --git a/test/lib/ansible_test/_internal/import_analysis.py b/test/lib/ansible_test/_internal/classification/python.py
index 9cc5376f..ac2d99a7 100644
--- a/test/lib/ansible_test/_internal/import_analysis.py
+++ b/test/lib/ansible_test/_internal/classification/python.py
@@ -1,38 +1,36 @@
"""Analyze python import statements."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import ast
import os
import re
+import typing as t
-from . import types as t
-
-from .io import (
+from ..io import (
read_binary_file,
)
-from .util import (
+from ..util import (
display,
ApplicationError,
is_subdir,
)
-from .data import (
+from ..data import (
data_context,
)
-VIRTUAL_PACKAGES = set([
+from ..target import (
+ TestTarget,
+)
+
+VIRTUAL_PACKAGES = {
'ansible.module_utils.six',
-])
+}
-def get_python_module_utils_imports(compile_targets):
- """Return a dictionary of module_utils names mapped to sets of python file paths.
- :type compile_targets: list[TestTarget]
- :rtype: dict[str, set[str]]
- """
-
+def get_python_module_utils_imports(compile_targets): # type: (t.List[TestTarget]) -> t.Dict[str, t.Set[str]]
+ """Return a dictionary of module_utils names mapped to sets of python file paths."""
module_utils = enumerate_module_utils()
virtual_utils = set(m for m in module_utils if any(m.startswith('%s.' % v) for v in VIRTUAL_PACKAGES))
@@ -48,9 +46,9 @@ def get_python_module_utils_imports(compile_targets):
display.info('module_utils import: %s%s' % (' ' * depth, import_name), verbosity=4)
if seen is None:
- seen = set([import_name])
+ seen = {import_name}
- results = set([import_name])
+ results = {import_name}
# virtual packages depend on the modules they contain instead of the reverse
if import_name in VIRTUAL_PACKAGES:
@@ -97,17 +95,17 @@ def get_python_module_utils_imports(compile_targets):
module_util_imports.remove(module_util)
# add recursive imports to all path entries which import this module_util
- for target_path in imports_by_target_path:
- if module_util in imports_by_target_path[target_path]:
+ for target_path, modules in imports_by_target_path.items():
+ if module_util in modules:
for module_util_import in sorted(module_util_imports):
- if module_util_import not in imports_by_target_path[target_path]:
+ if module_util_import not in modules:
display.info('%s inherits import %s via %s' % (target_path, module_util_import, module_util), verbosity=6)
- imports_by_target_path[target_path].add(module_util_import)
+ modules.add(module_util_import)
- imports = dict([(module_util, set()) for module_util in module_utils | virtual_utils])
+ imports = {module_util: set() for module_util in module_utils | virtual_utils} # type: t.Dict[str, t.Set[str]]
- for target_path in imports_by_target_path:
- for module_util in imports_by_target_path[target_path]:
+ for target_path, modules in imports_by_target_path.items():
+ for module_util in modules:
imports[module_util].add(target_path)
# for purposes of mapping module_utils to paths, treat imports of virtual utils the same as the parent package
@@ -165,12 +163,8 @@ def enumerate_module_utils():
return set(module_utils)
-def extract_python_module_utils_imports(path, module_utils):
- """Return a list of module_utils imports found in the specified source file.
- :type path: str
- :type module_utils: set[str]
- :rtype: set[str]
- """
+def extract_python_module_utils_imports(path, module_utils): # type: (str, t.Set[str]) -> t.Set[str]
+ """Return a list of module_utils imports found in the specified source file."""
# Python code must be read as bytes to avoid a SyntaxError when the source uses comments to declare the file encoding.
# See: https://www.python.org/dev/peps/pep-0263
# Specifically: If a Unicode string with a coding declaration is passed to compile(), a SyntaxError will be raised.
@@ -239,11 +233,7 @@ def relative_to_absolute(name, level, module, path, lineno): # type: (str, int,
class ModuleUtilFinder(ast.NodeVisitor):
"""AST visitor to find valid module_utils imports."""
- def __init__(self, path, module_utils):
- """Return a list of module_utils imports found in the specified source file.
- :type path: str
- :type module_utils: set[str]
- """
+ def __init__(self, path, module_utils): # type: (str, t.Set[str]) -> None
self.path = path
self.module_utils = module_utils
self.imports = set()
@@ -269,7 +259,8 @@ class ModuleUtilFinder(ast.NodeVisitor):
path_map = (
('^hacking/build_library/build_ansible/', 'build_ansible/'),
('^lib/ansible/', 'ansible/'),
- ('^test/lib/ansible_test/_data/sanity/validate-modules/', 'validate_modules/'),
+ ('^test/lib/ansible_test/_util/controller/sanity/validate-modules/', 'validate_modules/'),
+ ('^test/lib/ansible_test/_util/target/legacy_collection_loader/', 'legacy_collection_loader/'),
('^test/units/', 'test/units/'),
('^test/lib/ansible_test/_internal/', 'ansible_test/_internal/'),
('^test/integration/targets/.*/ansible_collections/(?P<ns>[^/]*)/(?P<col>[^/]*)/', r'ansible_collections/\g<ns>/\g<col>/'),
@@ -288,10 +279,8 @@ class ModuleUtilFinder(ast.NodeVisitor):
# noinspection PyPep8Naming
# pylint: disable=locally-disabled, invalid-name
- def visit_Import(self, node):
- """
- :type node: ast.Import
- """
+ def visit_Import(self, node): # type: (ast.Import) -> None
+ """Visit an import node."""
self.generic_visit(node)
# import ansible.module_utils.MODULE[.MODULE]
@@ -300,10 +289,8 @@ class ModuleUtilFinder(ast.NodeVisitor):
# noinspection PyPep8Naming
# pylint: disable=locally-disabled, invalid-name
- def visit_ImportFrom(self, node):
- """
- :type node: ast.ImportFrom
- """
+ def visit_ImportFrom(self, node): # type: (ast.ImportFrom) -> None
+ """Visit an import from node."""
self.generic_visit(node)
if not node.module:
@@ -320,11 +307,8 @@ class ModuleUtilFinder(ast.NodeVisitor):
# from ansible_collections.{ns}.{col}.plugins.module_utils.MODULE[.MODULE] import MODULE[, MODULE]
self.add_imports(['%s.%s' % (module, alias.name) for alias in node.names], node.lineno)
- def add_import(self, name, line_number):
- """
- :type name: str
- :type line_number: int
- """
+ def add_import(self, name, line_number): # type: (str, int) -> None
+ """Record the specified import."""
import_name = name
while self.is_module_util_name(name):
diff --git a/test/lib/ansible_test/_internal/cli.py b/test/lib/ansible_test/_internal/cli.py
deleted file mode 100644
index 15a23518..00000000
--- a/test/lib/ansible_test/_internal/cli.py
+++ /dev/null
@@ -1,1224 +0,0 @@
-"""Test runner for all Ansible tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import errno
-import os
-import sys
-
-# This import should occur as early as possible.
-# It must occur before subprocess has been imported anywhere in the current process.
-from .init import (
- CURRENT_RLIMIT_NOFILE,
-)
-
-from . import types as t
-
-from .util import (
- ApplicationError,
- display,
- raw_command,
- generate_pip_command,
- read_lines_without_comments,
- MAXFD,
- ANSIBLE_TEST_DATA_ROOT,
-)
-
-from .delegation import (
- check_delegation_args,
- delegate,
-)
-
-from .executor import (
- command_posix_integration,
- command_network_integration,
- command_windows_integration,
- command_shell,
- SUPPORTED_PYTHON_VERSIONS,
- ApplicationWarning,
- Delegate,
- generate_pip_install,
- check_startup,
- configure_pypi_proxy,
-)
-
-from .config import (
- PosixIntegrationConfig,
- WindowsIntegrationConfig,
- NetworkIntegrationConfig,
- SanityConfig,
- UnitsConfig,
- ShellConfig,
-)
-
-from .env import (
- EnvConfig,
- command_env,
- configure_timeout,
-)
-
-from .sanity import (
- command_sanity,
- sanity_init,
- sanity_get_tests,
-)
-
-from .units import (
- command_units,
-)
-
-from .target import (
- find_target_completion,
- walk_posix_integration_targets,
- walk_network_integration_targets,
- walk_windows_integration_targets,
- walk_units_targets,
- walk_sanity_targets,
-)
-
-from .cloud import (
- initialize_cloud_plugins,
-)
-
-from .core_ci import (
- AnsibleCoreCI,
-)
-
-from .data import (
- data_context,
-)
-
-from .util_common import (
- get_docker_completion,
- get_network_completion,
- get_remote_completion,
- CommonConfig,
-)
-
-from .coverage.combine import (
- command_coverage_combine,
-)
-
-from .coverage.erase import (
- command_coverage_erase,
-)
-
-from .coverage.html import (
- command_coverage_html,
-)
-
-from .coverage.report import (
- command_coverage_report,
- CoverageReportConfig,
-)
-
-from .coverage.xml import (
- command_coverage_xml,
-)
-
-from .coverage.analyze.targets.generate import (
- command_coverage_analyze_targets_generate,
- CoverageAnalyzeTargetsGenerateConfig,
-)
-
-from .coverage.analyze.targets.expand import (
- command_coverage_analyze_targets_expand,
- CoverageAnalyzeTargetsExpandConfig,
-)
-
-from .coverage.analyze.targets.filter import (
- command_coverage_analyze_targets_filter,
- CoverageAnalyzeTargetsFilterConfig,
-)
-
-from .coverage.analyze.targets.combine import (
- command_coverage_analyze_targets_combine,
- CoverageAnalyzeTargetsCombineConfig,
-)
-
-from .coverage.analyze.targets.missing import (
- command_coverage_analyze_targets_missing,
- CoverageAnalyzeTargetsMissingConfig,
-)
-
-from .coverage import (
- COVERAGE_GROUPS,
- CoverageConfig,
-)
-
-if t.TYPE_CHECKING:
- import argparse as argparse_module
-
-
-def main():
- """Main program function."""
- try:
- os.chdir(data_context().content.root)
- initialize_cloud_plugins()
- sanity_init()
- args = parse_args()
- config = args.config(args) # type: CommonConfig
- display.verbosity = config.verbosity
- display.truncate = config.truncate
- display.redact = config.redact
- display.color = config.color
- display.info_stderr = config.info_stderr
- check_startup()
- check_delegation_args(config)
- configure_timeout(config)
-
- display.info('RLIMIT_NOFILE: %s' % (CURRENT_RLIMIT_NOFILE,), verbosity=2)
- display.info('MAXFD: %d' % MAXFD, verbosity=2)
-
- try:
- configure_pypi_proxy(config)
- args.func(config)
- delegate_args = None
- except Delegate as ex:
- # save delegation args for use once we exit the exception handler
- delegate_args = (ex.exclude, ex.require, ex.integration_targets)
-
- if delegate_args:
- # noinspection PyTypeChecker
- delegate(config, *delegate_args)
-
- display.review_warnings()
- except ApplicationWarning as ex:
- display.warning(u'%s' % ex)
- sys.exit(0)
- except ApplicationError as ex:
- display.error(u'%s' % ex)
- sys.exit(1)
- except KeyboardInterrupt:
- sys.exit(2)
- except IOError as ex:
- if ex.errno == errno.EPIPE:
- sys.exit(3)
- raise
-
-
-def parse_args():
- """Parse command line arguments."""
- try:
- import argparse
- except ImportError:
- if '--requirements' not in sys.argv:
- raise
- # install argparse without using constraints since pip may be too old to support them
- # not using the ansible-test requirements file since this install is for sys.executable rather than the delegated python (which may be different)
- # argparse has no special requirements, so upgrading pip is not required here
- raw_command(generate_pip_install(generate_pip_command(sys.executable), '', packages=['argparse'], use_constraints=False))
- import argparse
-
- try:
- import argcomplete
- except ImportError:
- argcomplete = None
-
- if argcomplete:
- epilog = 'Tab completion available using the "argcomplete" python package.'
- else:
- epilog = 'Install the "argcomplete" python package to enable tab completion.'
-
- def key_value_type(value): # type: (str) -> t.Tuple[str, str]
- """Wrapper around key_value."""
- return key_value(argparse, value)
-
- parser = argparse.ArgumentParser(epilog=epilog)
-
- common = argparse.ArgumentParser(add_help=False)
-
- common.add_argument('-e', '--explain',
- action='store_true',
- help='explain commands that would be executed')
-
- common.add_argument('-v', '--verbose',
- dest='verbosity',
- action='count',
- default=0,
- help='display more output')
-
- common.add_argument('--pypi-proxy',
- action='store_true',
- help=argparse.SUPPRESS) # internal use only
-
- common.add_argument('--pypi-endpoint',
- metavar='URI',
- default=None,
- help=argparse.SUPPRESS) # internal use only
-
- common.add_argument('--color',
- metavar='COLOR',
- nargs='?',
- help='generate color output: %(choices)s',
- choices=('yes', 'no', 'auto'),
- const='yes',
- default='auto')
-
- common.add_argument('--debug',
- action='store_true',
- help='run ansible commands in debug mode')
-
- # noinspection PyTypeChecker
- common.add_argument('--truncate',
- dest='truncate',
- metavar='COLUMNS',
- type=int,
- default=display.columns,
- help='truncate some long output (0=disabled) (default: auto)')
-
- common.add_argument('--redact',
- dest='redact',
- action='store_true',
- default=True,
- help='redact sensitive values in output')
-
- common.add_argument('--no-redact',
- dest='redact',
- action='store_false',
- default=False,
- help='show sensitive values in output')
-
- common.add_argument('--check-python',
- choices=SUPPORTED_PYTHON_VERSIONS,
- help=argparse.SUPPRESS)
-
- test = argparse.ArgumentParser(add_help=False, parents=[common])
-
- test.add_argument('include',
- metavar='TARGET',
- nargs='*',
- help='test the specified target').completer = complete_target
-
- test.add_argument('--include',
- metavar='TARGET',
- action='append',
- help='include the specified target').completer = complete_target
-
- test.add_argument('--exclude',
- metavar='TARGET',
- action='append',
- help='exclude the specified target').completer = complete_target
-
- test.add_argument('--require',
- metavar='TARGET',
- action='append',
- help='require the specified target').completer = complete_target
-
- test.add_argument('--coverage',
- action='store_true',
- help='analyze code coverage when running tests')
-
- test.add_argument('--coverage-label',
- default='',
- help='label to include in coverage output file names')
-
- test.add_argument('--coverage-check',
- action='store_true',
- help='only verify code coverage can be enabled')
-
- test.add_argument('--metadata',
- help=argparse.SUPPRESS)
-
- test.add_argument('--base-branch',
- help='base branch used for change detection')
-
- add_changes(test, argparse)
- add_environments(test)
-
- integration = argparse.ArgumentParser(add_help=False, parents=[test])
-
- integration.add_argument('--python',
- metavar='VERSION',
- choices=SUPPORTED_PYTHON_VERSIONS + ('default',),
- help='python version: %s' % ', '.join(SUPPORTED_PYTHON_VERSIONS))
-
- integration.add_argument('--start-at',
- metavar='TARGET',
- help='start at the specified target').completer = complete_target
-
- integration.add_argument('--start-at-task',
- metavar='TASK',
- help='start at the specified task')
-
- integration.add_argument('--tags',
- metavar='TAGS',
- help='only run plays and tasks tagged with these values')
-
- integration.add_argument('--skip-tags',
- metavar='TAGS',
- help='only run plays and tasks whose tags do not match these values')
-
- integration.add_argument('--diff',
- action='store_true',
- help='show diff output')
-
- integration.add_argument('--allow-destructive',
- action='store_true',
- help='allow destructive tests')
-
- integration.add_argument('--allow-root',
- action='store_true',
- help='allow tests requiring root when not root')
-
- integration.add_argument('--allow-disabled',
- action='store_true',
- help='allow tests which have been marked as disabled')
-
- integration.add_argument('--allow-unstable',
- action='store_true',
- help='allow tests which have been marked as unstable')
-
- integration.add_argument('--allow-unstable-changed',
- action='store_true',
- help='allow tests which have been marked as unstable when focused changes are detected')
-
- integration.add_argument('--allow-unsupported',
- action='store_true',
- help='allow tests which have been marked as unsupported')
-
- integration.add_argument('--retry-on-error',
- action='store_true',
- help='retry failed test with increased verbosity')
-
- integration.add_argument('--continue-on-error',
- action='store_true',
- help='continue after failed test')
-
- integration.add_argument('--debug-strategy',
- action='store_true',
- help='run test playbooks using the debug strategy')
-
- integration.add_argument('--changed-all-target',
- metavar='TARGET',
- default='all',
- help='target to run when all tests are needed')
-
- integration.add_argument('--changed-all-mode',
- metavar='MODE',
- choices=('default', 'include', 'exclude'),
- help='include/exclude behavior with --changed-all-target: %(choices)s')
-
- integration.add_argument('--list-targets',
- action='store_true',
- help='list matching targets instead of running tests')
-
- integration.add_argument('--no-temp-workdir',
- action='store_true',
- help='do not run tests from a temporary directory (use only for verifying broken tests)')
-
- integration.add_argument('--no-temp-unicode',
- action='store_true',
- help='avoid unicode characters in temporary directory (use only for verifying broken tests)')
-
- subparsers = parser.add_subparsers(metavar='COMMAND')
- subparsers.required = True # work-around for python 3 bug which makes subparsers optional
-
- posix_integration = subparsers.add_parser('integration',
- parents=[integration],
- help='posix integration tests')
-
- posix_integration.set_defaults(func=command_posix_integration,
- targets=walk_posix_integration_targets,
- config=PosixIntegrationConfig)
-
- add_extra_docker_options(posix_integration)
- add_httptester_options(posix_integration, argparse)
-
- network_integration = subparsers.add_parser('network-integration',
- parents=[integration],
- help='network integration tests')
-
- network_integration.set_defaults(func=command_network_integration,
- targets=walk_network_integration_targets,
- config=NetworkIntegrationConfig)
-
- add_extra_docker_options(network_integration, integration=False)
-
- network_integration.add_argument('--platform',
- metavar='PLATFORM',
- action='append',
- help='network platform/version').completer = complete_network_platform
-
- network_integration.add_argument('--platform-collection',
- type=key_value_type,
- metavar='PLATFORM=COLLECTION',
- action='append',
- help='collection used to test platform').completer = complete_network_platform_collection
-
- network_integration.add_argument('--platform-connection',
- type=key_value_type,
- metavar='PLATFORM=CONNECTION',
- action='append',
- help='connection used to test platform').completer = complete_network_platform_connection
-
- network_integration.add_argument('--inventory',
- metavar='PATH',
- help='path to inventory used for tests')
-
- network_integration.add_argument('--testcase',
- metavar='TESTCASE',
- help='limit a test to a specified testcase').completer = complete_network_testcase
-
- windows_integration = subparsers.add_parser('windows-integration',
- parents=[integration],
- help='windows integration tests')
-
- windows_integration.set_defaults(func=command_windows_integration,
- targets=walk_windows_integration_targets,
- config=WindowsIntegrationConfig)
-
- add_extra_docker_options(windows_integration, integration=False)
- add_httptester_options(windows_integration, argparse)
-
- windows_integration.add_argument('--windows',
- metavar='VERSION',
- action='append',
- help='windows version').completer = complete_windows
-
- windows_integration.add_argument('--inventory',
- metavar='PATH',
- help='path to inventory used for tests')
-
- units = subparsers.add_parser('units',
- parents=[test],
- help='unit tests')
-
- units.set_defaults(func=command_units,
- targets=walk_units_targets,
- config=UnitsConfig)
-
- units.add_argument('--python',
- metavar='VERSION',
- choices=SUPPORTED_PYTHON_VERSIONS + ('default',),
- help='python version: %s' % ', '.join(SUPPORTED_PYTHON_VERSIONS))
-
- units.add_argument('--collect-only',
- action='store_true',
- help='collect tests but do not execute them')
-
- # noinspection PyTypeChecker
- units.add_argument('--num-workers',
- type=int,
- help='number of workers to use (default: auto)')
-
- units.add_argument('--requirements-mode',
- choices=('only', 'skip'),
- help=argparse.SUPPRESS)
-
- add_extra_docker_options(units, integration=False)
-
- sanity = subparsers.add_parser('sanity',
- parents=[test],
- help='sanity tests')
-
- sanity.set_defaults(func=command_sanity,
- targets=walk_sanity_targets,
- config=SanityConfig)
-
- sanity.add_argument('--test',
- metavar='TEST',
- action='append',
- choices=[test.name for test in sanity_get_tests()],
- help='tests to run').completer = complete_sanity_test
-
- sanity.add_argument('--skip-test',
- metavar='TEST',
- action='append',
- choices=[test.name for test in sanity_get_tests()],
- help='tests to skip').completer = complete_sanity_test
-
- sanity.add_argument('--allow-disabled',
- action='store_true',
- help='allow tests to run which are disabled by default')
-
- sanity.add_argument('--list-tests',
- action='store_true',
- help='list available tests')
-
- sanity.add_argument('--python',
- metavar='VERSION',
- choices=SUPPORTED_PYTHON_VERSIONS + ('default',),
- help='python version: %s' % ', '.join(SUPPORTED_PYTHON_VERSIONS))
-
- sanity.add_argument('--enable-optional-errors',
- action='store_true',
- help='enable optional errors')
-
- add_lint(sanity)
- add_extra_docker_options(sanity, integration=False)
-
- shell = subparsers.add_parser('shell',
- parents=[common],
- help='open an interactive shell')
-
- shell.add_argument('--python',
- metavar='VERSION',
- choices=SUPPORTED_PYTHON_VERSIONS + ('default',),
- help='python version: %s' % ', '.join(SUPPORTED_PYTHON_VERSIONS))
-
- shell.set_defaults(func=command_shell,
- config=ShellConfig)
-
- shell.add_argument('--raw',
- action='store_true',
- help='direct to shell with no setup')
-
- add_environments(shell)
- add_extra_docker_options(shell)
- add_httptester_options(shell, argparse)
-
- coverage_common = argparse.ArgumentParser(add_help=False, parents=[common])
-
- add_environments(coverage_common, isolated_delegation=False)
-
- coverage = subparsers.add_parser('coverage',
- help='code coverage management and reporting')
-
- coverage_subparsers = coverage.add_subparsers(metavar='COMMAND')
- coverage_subparsers.required = True # work-around for python 3 bug which makes subparsers optional
-
- add_coverage_analyze(coverage_subparsers, coverage_common)
-
- coverage_combine = coverage_subparsers.add_parser('combine',
- parents=[coverage_common],
- help='combine coverage data and rewrite remote paths')
-
- coverage_combine.set_defaults(func=command_coverage_combine,
- config=CoverageConfig)
-
- coverage_combine.add_argument('--export',
- help='directory to export combined coverage files to')
-
- add_extra_coverage_options(coverage_combine)
-
- coverage_erase = coverage_subparsers.add_parser('erase',
- parents=[coverage_common],
- help='erase coverage data files')
-
- coverage_erase.set_defaults(func=command_coverage_erase,
- config=CoverageConfig)
-
- coverage_report = coverage_subparsers.add_parser('report',
- parents=[coverage_common],
- help='generate console coverage report')
-
- coverage_report.set_defaults(func=command_coverage_report,
- config=CoverageReportConfig)
-
- coverage_report.add_argument('--show-missing',
- action='store_true',
- help='show line numbers of statements not executed')
- coverage_report.add_argument('--include',
- metavar='PAT1,PAT2,...',
- help='include only files whose paths match one of these '
- 'patterns. Accepts shell-style wildcards, which must be '
- 'quoted.')
- coverage_report.add_argument('--omit',
- metavar='PAT1,PAT2,...',
- help='omit files whose paths match one of these patterns. '
- 'Accepts shell-style wildcards, which must be quoted.')
-
- add_extra_coverage_options(coverage_report)
-
- coverage_html = coverage_subparsers.add_parser('html',
- parents=[coverage_common],
- help='generate html coverage report')
-
- coverage_html.set_defaults(func=command_coverage_html,
- config=CoverageConfig)
-
- add_extra_coverage_options(coverage_html)
-
- coverage_xml = coverage_subparsers.add_parser('xml',
- parents=[coverage_common],
- help='generate xml coverage report')
-
- coverage_xml.set_defaults(func=command_coverage_xml,
- config=CoverageConfig)
-
- add_extra_coverage_options(coverage_xml)
-
- env = subparsers.add_parser('env',
- parents=[common],
- help='show information about the test environment')
-
- env.set_defaults(func=command_env,
- config=EnvConfig)
-
- env.add_argument('--show',
- action='store_true',
- help='show environment on stdout')
-
- env.add_argument('--dump',
- action='store_true',
- help='dump environment to disk')
-
- env.add_argument('--list-files',
- action='store_true',
- help='list files on stdout')
-
- # noinspection PyTypeChecker
- env.add_argument('--timeout',
- type=int,
- metavar='MINUTES',
- help='timeout for future ansible-test commands (0 clears)')
-
- if argcomplete:
- argcomplete.autocomplete(parser, always_complete_options=False, validator=lambda i, k: True)
-
- args = parser.parse_args()
-
- if args.explain and not args.verbosity:
- args.verbosity = 1
-
- if args.color == 'yes':
- args.color = True
- elif args.color == 'no':
- args.color = False
- else:
- args.color = sys.stdout.isatty()
-
- return args
-
-
-def key_value(argparse, value): # type: (argparse_module, str) -> t.Tuple[str, str]
- """Type parsing and validation for argparse key/value pairs separated by an '=' character."""
- parts = value.split('=')
-
- if len(parts) != 2:
- raise argparse.ArgumentTypeError('"%s" must be in the format "key=value"' % value)
-
- return parts[0], parts[1]
-
-
-# noinspection PyProtectedMember,PyUnresolvedReferences
-def add_coverage_analyze(coverage_subparsers, coverage_common): # type: (argparse_module._SubParsersAction, argparse_module.ArgumentParser) -> None
- """Add the `coverage analyze` subcommand."""
- analyze = coverage_subparsers.add_parser(
- 'analyze',
- help='analyze collected coverage data',
- )
-
- analyze_subparsers = analyze.add_subparsers(metavar='COMMAND')
- analyze_subparsers.required = True # work-around for python 3 bug which makes subparsers optional
-
- targets = analyze_subparsers.add_parser(
- 'targets',
- help='analyze integration test target coverage',
- )
-
- targets_subparsers = targets.add_subparsers(metavar='COMMAND')
- targets_subparsers.required = True # work-around for python 3 bug which makes subparsers optional
-
- targets_generate = targets_subparsers.add_parser(
- 'generate',
- parents=[coverage_common],
- help='aggregate coverage by integration test target',
- )
-
- targets_generate.set_defaults(
- func=command_coverage_analyze_targets_generate,
- config=CoverageAnalyzeTargetsGenerateConfig,
- )
-
- targets_generate.add_argument(
- 'input_dir',
- nargs='?',
- help='directory to read coverage from',
- )
-
- targets_generate.add_argument(
- 'output_file',
- help='output file for aggregated coverage',
- )
-
- targets_expand = targets_subparsers.add_parser(
- 'expand',
- parents=[coverage_common],
- help='expand target names from integers in aggregated coverage',
- )
-
- targets_expand.set_defaults(
- func=command_coverage_analyze_targets_expand,
- config=CoverageAnalyzeTargetsExpandConfig,
- )
-
- targets_expand.add_argument(
- 'input_file',
- help='input file to read aggregated coverage from',
- )
-
- targets_expand.add_argument(
- 'output_file',
- help='output file to write expanded coverage to',
- )
-
- targets_filter = targets_subparsers.add_parser(
- 'filter',
- parents=[coverage_common],
- help='filter aggregated coverage data',
- )
-
- targets_filter.set_defaults(
- func=command_coverage_analyze_targets_filter,
- config=CoverageAnalyzeTargetsFilterConfig,
- )
-
- targets_filter.add_argument(
- 'input_file',
- help='input file to read aggregated coverage from',
- )
-
- targets_filter.add_argument(
- 'output_file',
- help='output file to write expanded coverage to',
- )
-
- targets_filter.add_argument(
- '--include-target',
- dest='include_targets',
- action='append',
- help='include the specified targets',
- )
-
- targets_filter.add_argument(
- '--exclude-target',
- dest='exclude_targets',
- action='append',
- help='exclude the specified targets',
- )
-
- targets_filter.add_argument(
- '--include-path',
- help='include paths matching the given regex',
- )
-
- targets_filter.add_argument(
- '--exclude-path',
- help='exclude paths matching the given regex',
- )
-
- targets_combine = targets_subparsers.add_parser(
- 'combine',
- parents=[coverage_common],
- help='combine multiple aggregated coverage files',
- )
-
- targets_combine.set_defaults(
- func=command_coverage_analyze_targets_combine,
- config=CoverageAnalyzeTargetsCombineConfig,
- )
-
- targets_combine.add_argument(
- 'input_file',
- nargs='+',
- help='input file to read aggregated coverage from',
- )
-
- targets_combine.add_argument(
- 'output_file',
- help='output file to write aggregated coverage to',
- )
-
- targets_missing = targets_subparsers.add_parser(
- 'missing',
- parents=[coverage_common],
- help='identify coverage in one file missing in another',
- )
-
- targets_missing.set_defaults(
- func=command_coverage_analyze_targets_missing,
- config=CoverageAnalyzeTargetsMissingConfig,
- )
-
- targets_missing.add_argument(
- 'from_file',
- help='input file containing aggregated coverage',
- )
-
- targets_missing.add_argument(
- 'to_file',
- help='input file containing aggregated coverage',
- )
-
- targets_missing.add_argument(
- 'output_file',
- help='output file to write aggregated coverage to',
- )
-
- targets_missing.add_argument(
- '--only-gaps',
- action='store_true',
- help='report only arcs/lines not hit by any target',
- )
-
- targets_missing.add_argument(
- '--only-exists',
- action='store_true',
- help='limit results to files that exist',
- )
-
-
-def add_lint(parser):
- """
- :type parser: argparse.ArgumentParser
- """
- parser.add_argument('--lint',
- action='store_true',
- help='write lint output to stdout, everything else stderr')
-
- parser.add_argument('--junit',
- action='store_true',
- help='write test failures to junit xml files')
-
- parser.add_argument('--failure-ok',
- action='store_true',
- help='exit successfully on failed tests after saving results')
-
-
-def add_changes(parser, argparse):
- """
- :type parser: argparse.ArgumentParser
- :type argparse: argparse
- """
- parser.add_argument('--changed', action='store_true', help='limit targets based on changes')
-
- changes = parser.add_argument_group(title='change detection arguments')
-
- changes.add_argument('--tracked', action='store_true', help=argparse.SUPPRESS)
- changes.add_argument('--untracked', action='store_true', help='include untracked files')
- changes.add_argument('--ignore-committed', dest='committed', action='store_false', help='exclude committed files')
- changes.add_argument('--ignore-staged', dest='staged', action='store_false', help='exclude staged files')
- changes.add_argument('--ignore-unstaged', dest='unstaged', action='store_false', help='exclude unstaged files')
-
- changes.add_argument('--changed-from', metavar='PATH', help=argparse.SUPPRESS)
- changes.add_argument('--changed-path', metavar='PATH', action='append', help=argparse.SUPPRESS)
-
-
-def add_environments(parser, isolated_delegation=True):
- """
- :type parser: argparse.ArgumentParser
- :type isolated_delegation: bool
- """
- parser.add_argument('--requirements',
- action='store_true',
- help='install command requirements')
-
- parser.add_argument('--python-interpreter',
- metavar='PATH',
- default=None,
- help='path to the docker or remote python interpreter')
-
- parser.add_argument('--no-pip-check',
- dest='pip_check',
- default=True,
- action='store_false',
- help='do not run "pip check" to verify requirements')
-
- environments = parser.add_mutually_exclusive_group()
-
- environments.add_argument('--local',
- action='store_true',
- help='run from the local environment')
-
- environments.add_argument('--venv',
- action='store_true',
- help='run from ansible-test managed virtual environments')
-
- venv = parser.add_argument_group(title='venv arguments')
-
- venv.add_argument('--venv-system-site-packages',
- action='store_true',
- help='enable system site packages')
-
- if not isolated_delegation:
- environments.set_defaults(
- docker=None,
- remote=None,
- remote_stage=None,
- remote_provider=None,
- remote_terminate=None,
- remote_endpoint=None,
- python_interpreter=None,
- )
-
- return
-
- environments.add_argument('--docker',
- metavar='IMAGE',
- nargs='?',
- default=None,
- const='default',
- help='run from a docker container').completer = complete_docker
-
- environments.add_argument('--remote',
- metavar='PLATFORM',
- default=None,
- help='run from a remote instance').completer = complete_remote_shell if parser.prog.endswith(' shell') else complete_remote
-
- remote = parser.add_argument_group(title='remote arguments')
-
- remote.add_argument('--remote-stage',
- metavar='STAGE',
- help='remote stage to use: prod, dev',
- default='prod').completer = complete_remote_stage
-
- remote.add_argument('--remote-provider',
- metavar='PROVIDER',
- help='remote provider to use: %(choices)s',
- choices=['default'] + sorted(AnsibleCoreCI.PROVIDERS.keys()),
- default='default')
-
- remote.add_argument('--remote-endpoint',
- metavar='ENDPOINT',
- help='remote provisioning endpoint to use (default: auto)',
- default=None)
-
- remote.add_argument('--remote-terminate',
- metavar='WHEN',
- help='terminate remote instance: %(choices)s (default: %(default)s)',
- choices=['never', 'always', 'success'],
- default='never')
-
-
-def add_extra_coverage_options(parser):
- """
- :type parser: argparse.ArgumentParser
- """
- parser.add_argument('--group-by',
- metavar='GROUP',
- action='append',
- choices=COVERAGE_GROUPS,
- help='group output by: %s' % ', '.join(COVERAGE_GROUPS))
-
- parser.add_argument('--all',
- action='store_true',
- help='include all python/powershell source files')
-
- parser.add_argument('--stub',
- action='store_true',
- help='generate empty report of all python/powershell source files')
-
-
-def add_httptester_options(parser, argparse):
- """
- :type parser: argparse.ArgumentParser
- :type argparse: argparse
- """
- group = parser.add_mutually_exclusive_group()
-
- group.add_argument('--httptester',
- metavar='IMAGE',
- default='quay.io/ansible/http-test-container:1.3.0',
- help='docker image to use for the httptester container')
-
- group.add_argument('--disable-httptester',
- dest='httptester',
- action='store_const',
- const='',
- help='do not use the httptester container')
-
- parser.add_argument('--inject-httptester',
- action='store_true',
- help=argparse.SUPPRESS) # internal use only
-
- parser.add_argument('--httptester-krb5-password',
- help=argparse.SUPPRESS) # internal use only
-
-
-def add_extra_docker_options(parser, integration=True):
- """
- :type parser: argparse.ArgumentParser
- :type integration: bool
- """
- docker = parser.add_argument_group(title='docker arguments')
-
- docker.add_argument('--docker-no-pull',
- action='store_false',
- dest='docker_pull',
- help='do not explicitly pull the latest docker images')
-
- if data_context().content.is_ansible:
- docker.add_argument('--docker-keep-git',
- action='store_true',
- help='transfer git related files into the docker container')
- else:
- docker.set_defaults(
- docker_keep_git=False,
- )
-
- docker.add_argument('--docker-seccomp',
- metavar='SC',
- choices=('default', 'unconfined'),
- default=None,
- help='set seccomp confinement for the test container: %(choices)s')
-
- docker.add_argument('--docker-terminate',
- metavar='WHEN',
- help='terminate docker container: %(choices)s (default: %(default)s)',
- choices=['never', 'always', 'success'],
- default='always')
-
- if not integration:
- return
-
- docker.add_argument('--docker-privileged',
- action='store_true',
- help='run docker container in privileged mode')
-
- docker.add_argument('--docker-network',
- help='run using the specified docker network')
-
- # noinspection PyTypeChecker
- docker.add_argument('--docker-memory',
- help='memory limit for docker in bytes', type=int)
-
-
-# noinspection PyUnusedLocal
-def complete_remote_stage(prefix, parsed_args, **_): # pylint: disable=unused-argument
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- return [stage for stage in ('prod', 'dev') if stage.startswith(prefix)]
-
-
-def complete_target(prefix, parsed_args, **_):
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- return find_target_completion(parsed_args.targets, prefix)
-
-
-# noinspection PyUnusedLocal
-def complete_remote(prefix, parsed_args, **_):
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- del parsed_args
-
- images = sorted(get_remote_completion().keys())
-
- return [i for i in images if i.startswith(prefix)]
-
-
-# noinspection PyUnusedLocal
-def complete_remote_shell(prefix, parsed_args, **_):
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- del parsed_args
-
- images = sorted(get_remote_completion().keys())
-
- # 2008 doesn't support SSH so we do not add to the list of valid images
- windows_completion_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'completion', 'windows.txt')
- images.extend(["windows/%s" % i for i in read_lines_without_comments(windows_completion_path, remove_blank_lines=True) if i != '2008'])
-
- return [i for i in images if i.startswith(prefix)]
-
-
-# noinspection PyUnusedLocal
-def complete_docker(prefix, parsed_args, **_):
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- del parsed_args
-
- images = sorted(get_docker_completion().keys())
-
- return [i for i in images if i.startswith(prefix)]
-
-
-def complete_windows(prefix, parsed_args, **_):
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- images = read_lines_without_comments(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'completion', 'windows.txt'), remove_blank_lines=True)
-
- return [i for i in images if i.startswith(prefix) and (not parsed_args.windows or i not in parsed_args.windows)]
-
-
-def complete_network_platform(prefix, parsed_args, **_):
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- images = sorted(get_network_completion())
-
- return [i for i in images if i.startswith(prefix) and (not parsed_args.platform or i not in parsed_args.platform)]
-
-
-def complete_network_platform_collection(prefix, parsed_args, **_):
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- left = prefix.split('=')[0]
- images = sorted(set(image.split('/')[0] for image in get_network_completion()))
-
- return [i + '=' for i in images if i.startswith(left) and (not parsed_args.platform_collection or i not in [x[0] for x in parsed_args.platform_collection])]
-
-
-def complete_network_platform_connection(prefix, parsed_args, **_):
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- left = prefix.split('=')[0]
- images = sorted(set(image.split('/')[0] for image in get_network_completion()))
-
- return [i + '=' for i in images if i.startswith(left) and (not parsed_args.platform_connection or i not in [x[0] for x in parsed_args.platform_connection])]
-
-
-def complete_network_testcase(prefix, parsed_args, **_):
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- testcases = []
-
- # since testcases are module specific, don't autocomplete if more than one
- # module is specidied
- if len(parsed_args.include) != 1:
- return []
-
- test_dir = os.path.join(data_context().content.integration_targets_path, parsed_args.include[0], 'tests')
- connection_dirs = data_context().content.get_dirs(test_dir)
-
- for connection_dir in connection_dirs:
- for testcase in [os.path.basename(path) for path in data_context().content.get_files(connection_dir)]:
- if testcase.startswith(prefix):
- testcases.append(testcase.split('.')[0])
-
- return testcases
-
-
-# noinspection PyUnusedLocal
-def complete_sanity_test(prefix, parsed_args, **_):
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- del parsed_args
-
- tests = sorted(test.name for test in sanity_get_tests())
-
- return [i for i in tests if i.startswith(prefix)]
diff --git a/test/lib/ansible_test/_internal/cli/__init__.py b/test/lib/ansible_test/_internal/cli/__init__.py
new file mode 100644
index 00000000..21c45b6e
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/__init__.py
@@ -0,0 +1,55 @@
+"""Command line parsing."""
+from __future__ import annotations
+
+import argparse
+import os
+import sys
+
+from .argparsing import (
+ CompositeActionCompletionFinder,
+)
+
+from .commands import (
+ do_commands,
+)
+
+
+from .compat import (
+ HostSettings,
+ convert_legacy_args,
+)
+
+
+def parse_args(): # type: () -> argparse.Namespace
+ """Parse command line arguments."""
+ completer = CompositeActionCompletionFinder()
+
+ if completer.enabled:
+ epilog = 'Tab completion available using the "argcomplete" python package.'
+ else:
+ epilog = 'Install the "argcomplete" python package to enable tab completion.'
+
+ parser = argparse.ArgumentParser(epilog=epilog)
+
+ do_commands(parser, completer)
+
+ completer(
+ parser,
+ always_complete_options=False,
+ )
+
+ argv = sys.argv[1:]
+ args = parser.parse_args(argv)
+
+ if args.explain and not args.verbosity:
+ args.verbosity = 1
+
+ if args.no_environment:
+ pass
+ elif args.host_path:
+ args.host_settings = HostSettings.deserialize(os.path.join(args.host_path, 'settings.dat'))
+ else:
+ args.host_settings = convert_legacy_args(argv, args, args.target_mode)
+ args.host_settings.apply_defaults()
+
+ return args
diff --git a/test/lib/ansible_test/_internal/cli/actions.py b/test/lib/ansible_test/_internal/cli/actions.py
new file mode 100644
index 00000000..e22a7b0e
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/actions.py
@@ -0,0 +1,90 @@
+"""Actions for handling composite arguments with argparse."""
+from __future__ import annotations
+
+from .argparsing import (
+ CompositeAction,
+ NamespaceParser,
+)
+
+from .parsers import (
+ DelegatedControllerParser,
+ NetworkSshTargetParser,
+ NetworkTargetParser,
+ OriginControllerParser,
+ PosixSshTargetParser,
+ PosixTargetParser,
+ SanityPythonTargetParser,
+ UnitsPythonTargetParser,
+ WindowsSshTargetParser,
+ WindowsTargetParser,
+)
+
+
+class OriginControllerAction(CompositeAction):
+ """Composite action parser for the controller when the only option is `origin`."""
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+ return OriginControllerParser()
+
+
+class DelegatedControllerAction(CompositeAction):
+ """Composite action parser for the controller when delegation is supported."""
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+ return DelegatedControllerParser()
+
+
+class PosixTargetAction(CompositeAction):
+ """Composite action parser for a POSIX target."""
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+ return PosixTargetParser()
+
+
+class WindowsTargetAction(CompositeAction):
+ """Composite action parser for a Windows target."""
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+ return WindowsTargetParser()
+
+
+class NetworkTargetAction(CompositeAction):
+ """Composite action parser for a network target."""
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+ return NetworkTargetParser()
+
+
+class SanityPythonTargetAction(CompositeAction):
+ """Composite action parser for a sanity target."""
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+ return SanityPythonTargetParser()
+
+
+class UnitsPythonTargetAction(CompositeAction):
+ """Composite action parser for a units target."""
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+ return UnitsPythonTargetParser()
+
+
+class PosixSshTargetAction(CompositeAction):
+ """Composite action parser for a POSIX SSH target."""
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+ return PosixSshTargetParser()
+
+
+class WindowsSshTargetAction(CompositeAction):
+ """Composite action parser for a Windows SSH target."""
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+ return WindowsSshTargetParser()
+
+
+class NetworkSshTargetAction(CompositeAction):
+ """Composite action parser for a network SSH target."""
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+ return NetworkSshTargetParser()
diff --git a/test/lib/ansible_test/_internal/cli/argparsing/__init__.py b/test/lib/ansible_test/_internal/cli/argparsing/__init__.py
new file mode 100644
index 00000000..8a087ebf
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/argparsing/__init__.py
@@ -0,0 +1,263 @@
+"""Completion finder which brings together custom options and completion logic."""
+from __future__ import annotations
+
+import abc
+import argparse
+import os
+import re
+import typing as t
+
+from .argcompletion import (
+ OptionCompletionFinder,
+ get_comp_type,
+ register_safe_action,
+ warn,
+)
+
+from .parsers import (
+ Completion,
+ CompletionError,
+ CompletionSuccess,
+ CompletionUnavailable,
+ DocumentationState,
+ NamespaceParser,
+ Parser,
+ ParserError,
+ ParserMode,
+ ParserState,
+)
+
+
+class RegisteredCompletionFinder(OptionCompletionFinder):
+ """
+ Custom option completion finder for argcomplete which allows completion results to be registered.
+ These registered completions, if provided, are used to filter the final completion results.
+ This works around a known bug: https://github.com/kislyuk/argcomplete/issues/221
+ """
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ self.registered_completions = None # type: t.Optional[str]
+
+ def completer(
+ self,
+ prefix, # type: str
+ action, # type: argparse.Action
+ parsed_args, # type: argparse.Namespace
+ **kwargs,
+ ): # type: (...) -> t.List[str]
+ """
+ Return a list of completions for the specified prefix and action.
+ Use this as the completer function for argcomplete.
+ """
+ kwargs.clear()
+ del kwargs
+
+ completions = self.get_completions(prefix, action, parsed_args)
+
+ if action.nargs and not isinstance(action.nargs, int):
+ # prevent argcomplete from including unrelated arguments in the completion results
+ self.registered_completions = completions
+
+ return completions
+
+ @abc.abstractmethod
+ def get_completions(
+ self,
+ prefix, # type: str
+ action, # type: argparse.Action
+ parsed_args, # type: argparse.Namespace
+ ): # type: (...) -> t.List[str]
+ """
+ Return a list of completions for the specified prefix and action.
+ Called by the complete function.
+ """
+
+ def quote_completions(self, completions, cword_prequote, last_wordbreak_pos):
+ """Modify completion results before returning them."""
+ if self.registered_completions is not None:
+ # If one of the completion handlers registered their results, only allow those exact results to be returned.
+ # This prevents argcomplete from adding results from other completers when they are known to be invalid.
+ allowed_completions = set(self.registered_completions)
+ completions = [completion for completion in completions if completion in allowed_completions]
+
+ return super().quote_completions(completions, cword_prequote, last_wordbreak_pos)
+
+
+class CompositeAction(argparse.Action, metaclass=abc.ABCMeta):
+ """Base class for actions that parse composite arguments."""
+ documentation_state = {} # type: t.Dict[t.Type[CompositeAction], DocumentationState]
+
+ # noinspection PyUnusedLocal
+ def __init__(
+ self,
+ *args,
+ dest, # type: str
+ **kwargs,
+ ):
+ del dest
+
+ self.definition = self.create_parser()
+ self.documentation_state[type(self)] = documentation_state = DocumentationState()
+ self.definition.document(documentation_state)
+
+ super().__init__(*args, dest=self.definition.dest, **kwargs)
+
+ register_safe_action(type(self))
+
+ @abc.abstractmethod
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+
+ def __call__(
+ self,
+ parser,
+ namespace,
+ values,
+ option_string=None,
+ ):
+ state = ParserState(mode=ParserMode.PARSE, namespaces=[namespace], remainder=values)
+
+ try:
+ self.definition.parse(state)
+ except ParserError as ex:
+ error = str(ex)
+ except CompletionError as ex:
+ error = ex.message
+ else:
+ return
+
+ if get_comp_type():
+ # FUTURE: It may be possible to enhance error handling by surfacing this error message during downstream completion.
+ return # ignore parse errors during completion to avoid breaking downstream completion
+
+ raise argparse.ArgumentError(self, error)
+
+
+class CompositeActionCompletionFinder(RegisteredCompletionFinder):
+ """Completion finder with support for composite argument parsing."""
+ def get_completions(
+ self,
+ prefix, # type: str
+ action, # type: CompositeAction
+ parsed_args, # type: argparse.Namespace
+ ): # type: (...) -> t.List[str]
+ """Return a list of completions appropriate for the given prefix and action, taking into account the arguments that have already been parsed."""
+ state = ParserState(
+ mode=ParserMode.LIST if self.list_mode else ParserMode.COMPLETE,
+ remainder=prefix,
+ namespaces=[parsed_args],
+ )
+
+ answer = complete(action.definition, state)
+
+ completions = []
+
+ if isinstance(answer, CompletionSuccess):
+ self.disable_completion_mangling = answer.preserve
+ completions = answer.completions
+
+ if isinstance(answer, CompletionError):
+ warn(answer.message)
+
+ return completions
+
+
+def detect_file_listing(value, mode): # type: (str, ParserMode) -> bool
+ """
+ Return True if Bash will show a file listing and redraw the prompt, otherwise return False.
+
+ If there are no list results, a file listing will be shown if the value after the last `=` or `:` character:
+
+ - is empty
+ - matches a full path
+ - matches a partial path
+
+ Otherwise Bash will play the bell sound and display nothing.
+
+ see: https://github.com/kislyuk/argcomplete/issues/328
+ see: https://github.com/kislyuk/argcomplete/pull/284
+ """
+ listing = False
+
+ if mode == ParserMode.LIST:
+ right = re.split('[=:]', value)[-1]
+ listing = not right or os.path.exists(right)
+
+ if not listing:
+ directory = os.path.dirname(right)
+
+ # noinspection PyBroadException
+ try:
+ filenames = os.listdir(directory or '.')
+ except Exception: # pylint: disable=broad-except
+ pass
+ else:
+ listing = any(filename.startswith(right) for filename in filenames)
+
+ return listing
+
+
+def detect_false_file_completion(value, mode): # type: (str, ParserMode) -> bool
+ """
+ Return True if Bash will provide an incorrect file completion, otherwise return False.
+
+ If there are no completion results, a filename will be automatically completed if the value after the last `=` or `:` character:
+
+ - matches exactly one partial path
+
+ Otherwise Bash will play the bell sound and display nothing.
+
+ see: https://github.com/kislyuk/argcomplete/issues/328
+ see: https://github.com/kislyuk/argcomplete/pull/284
+ """
+ completion = False
+
+ if mode == ParserMode.COMPLETE:
+ completion = True
+
+ right = re.split('[=:]', value)[-1]
+ directory, prefix = os.path.split(right)
+
+ # noinspection PyBroadException
+ try:
+ filenames = os.listdir(directory or '.')
+ except Exception: # pylint: disable=broad-except
+ pass
+ else:
+ matches = [filename for filename in filenames if filename.startswith(prefix)]
+ completion = len(matches) == 1
+
+ return completion
+
+
+def complete(
+ completer, # type: Parser
+ state, # type: ParserState
+): # type: (...) -> Completion
+ """Perform argument completion using the given completer and return the completion result."""
+ value = state.remainder
+
+ try:
+ completer.parse(state)
+ raise ParserError('completion expected')
+ except CompletionUnavailable as ex:
+ if detect_file_listing(value, state.mode):
+ # Displaying a warning before the file listing informs the user it is invalid. Bash will redraw the prompt after the list.
+ # If the file listing is not shown, a warning could be helpful, but would introduce noise on the terminal since the prompt is not redrawn.
+ answer = CompletionError(ex.message)
+ elif detect_false_file_completion(value, state.mode):
+ # When the current prefix provides no matches, but matches files a single file on disk, Bash will perform an incorrect completion.
+ # Returning multiple invalid matches instead of no matches will prevent Bash from using its own completion logic in this case.
+ answer = CompletionSuccess(
+ list_mode=True, # abuse list mode to enable preservation of the literal results
+ consumed='',
+ continuation='',
+ matches=['completion', 'invalid']
+ )
+ else:
+ answer = ex
+ except Completion as ex:
+ answer = ex
+
+ return answer
diff --git a/test/lib/ansible_test/_internal/cli/argparsing/actions.py b/test/lib/ansible_test/_internal/cli/argparsing/actions.py
new file mode 100644
index 00000000..c2b573e6
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/argparsing/actions.py
@@ -0,0 +1,18 @@
+"""Actions for argparse."""
+from __future__ import annotations
+
+import argparse
+import enum
+import typing as t
+
+
+class EnumAction(argparse.Action):
+ """Parse an enum using the lowercases enum names."""
+ def __init__(self, **kwargs): # type: (t.Dict[str, t.Any]) -> None
+ self.enum_type = kwargs.pop('type', None) # type: t.Type[enum.Enum]
+ kwargs.setdefault('choices', tuple(e.name.lower() for e in self.enum_type))
+ super().__init__(**kwargs)
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ value = self.enum_type[values.upper()]
+ setattr(namespace, self.dest, value)
diff --git a/test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py b/test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py
new file mode 100644
index 00000000..ca502c53
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py
@@ -0,0 +1,124 @@
+"""Wrapper around argcomplete providing bug fixes and additional features."""
+from __future__ import annotations
+
+import argparse
+import enum
+import os
+import typing as t
+
+
+class Substitute:
+ """Substitute for missing class which accepts all arguments."""
+ def __init__(self, *args, **kwargs):
+ pass
+
+
+try:
+ import argcomplete
+
+ from argcomplete import (
+ CompletionFinder,
+ default_validator,
+ )
+
+ warn = argcomplete.warn # pylint: disable=invalid-name
+except ImportError:
+ argcomplete = None
+
+ CompletionFinder = Substitute
+ default_validator = Substitute # pylint: disable=invalid-name
+ warn = Substitute # pylint: disable=invalid-name
+
+
+class CompType(enum.Enum):
+ """
+ Bash COMP_TYPE argument completion types.
+ For documentation, see: https://www.gnu.org/software/bash/manual/html_node/Bash-Variables.html#index-COMP_005fTYPE
+ """
+ COMPLETION = '\t'
+ """
+ Standard completion, typically triggered by a single tab.
+ """
+ MENU_COMPLETION = '%'
+ """
+ Menu completion, which cyles through each completion instead of showing a list.
+ For help using this feature, see: https://stackoverflow.com/questions/12044574/getting-complete-and-menu-complete-to-work-together
+ """
+ LIST = '?'
+ """
+ Standard list, typically triggered by a double tab.
+ """
+ LIST_AMBIGUOUS = '!'
+ """
+ Listing with `show-all-if-ambiguous` set.
+ For documentation, see https://www.gnu.org/software/bash/manual/html_node/Readline-Init-File-Syntax.html#index-show_002dall_002dif_002dambiguous
+ For additional details, see: https://unix.stackexchange.com/questions/614123/explanation-of-bash-completion-comp-type
+ """
+ LIST_UNMODIFIED = '@'
+ """
+ Listing with `show-all-if-unmodified` set.
+ For documentation, see https://www.gnu.org/software/bash/manual/html_node/Readline-Init-File-Syntax.html#index-show_002dall_002dif_002dunmodified
+ For additional details, see: : https://unix.stackexchange.com/questions/614123/explanation-of-bash-completion-comp-type
+ """
+
+ @property
+ def list_mode(self): # type: () -> bool
+ """True if completion is running in list mode, otherwise False."""
+ return self in (CompType.LIST, CompType.LIST_AMBIGUOUS, CompType.LIST_UNMODIFIED)
+
+
+def register_safe_action(action_type): # type: (t.Type[argparse.Action]) -> None
+ """Register the given action as a safe action for argcomplete to use during completion if it is not already registered."""
+ if argcomplete and action_type not in argcomplete.safe_actions:
+ argcomplete.safe_actions += (action_type,)
+
+
+def get_comp_type(): # type: () -> t.Optional[CompType]
+ """Parse the COMP_TYPE environment variable (if present) and return the associated CompType enum value."""
+ value = os.environ.get('COMP_TYPE')
+ comp_type = CompType(chr(int(value))) if value else None
+ return comp_type
+
+
+class OptionCompletionFinder(CompletionFinder):
+ """
+ Custom completion finder for argcomplete.
+ It provides support for running completion in list mode, which argcomplete natively handles the same as standard completion.
+ """
+ enabled = bool(argcomplete)
+
+ def __init__(self, *args, validator=None, **kwargs):
+ if validator:
+ raise ValueError()
+
+ self.comp_type = get_comp_type()
+ self.list_mode = self.comp_type.list_mode if self.comp_type else False
+ self.disable_completion_mangling = False
+
+ finder = self
+
+ def custom_validator(completion, prefix):
+ """Completion validator used to optionally bypass validation."""
+ if finder.disable_completion_mangling:
+ return True
+
+ return default_validator(completion, prefix)
+
+ super().__init__(
+ *args,
+ validator=custom_validator,
+ **kwargs,
+ )
+
+ def __call__(self, *args, **kwargs):
+ if self.enabled:
+ super().__call__(*args, **kwargs)
+
+ def quote_completions(self, completions, cword_prequote, last_wordbreak_pos):
+ """Intercept default quoting behavior to optionally block mangling of completion entries."""
+ if self.disable_completion_mangling:
+ # Word breaks have already been handled when generating completions, don't mangle them further.
+ # This is needed in many cases when returning completion lists which lack the existing completion prefix.
+ last_wordbreak_pos = None
+
+ return super().quote_completions(completions, cword_prequote, last_wordbreak_pos)
diff --git a/test/lib/ansible_test/_internal/cli/argparsing/parsers.py b/test/lib/ansible_test/_internal/cli/argparsing/parsers.py
new file mode 100644
index 00000000..fe80a68e
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/argparsing/parsers.py
@@ -0,0 +1,581 @@
+"""General purpose composite argument parsing and completion."""
+from __future__ import annotations
+
+import abc
+import contextlib
+import dataclasses
+import enum
+import os
+import re
+import typing as t
+
+# NOTE: When choosing delimiters, take into account Bash and argcomplete behavior.
+#
+# Recommended characters for assignment and/or continuation: `/` `:` `=`
+#
+# The recommended assignment_character list is due to how argcomplete handles continuation characters.
+# see: https://github.com/kislyuk/argcomplete/blob/5a20d6165fbb4d4d58559378919b05964870cc16/argcomplete/__init__.py#L557-L558
+
+PAIR_DELIMITER = ','
+ASSIGNMENT_DELIMITER = '='
+PATH_DELIMITER = '/'
+
+
+@dataclasses.dataclass(frozen=True)
+class Completion(Exception):
+ """Base class for argument completion results."""
+
+
+@dataclasses.dataclass(frozen=True)
+class CompletionUnavailable(Completion):
+ """Argument completion unavailable."""
+ message: str = 'No completions available.'
+
+
+@dataclasses.dataclass(frozen=True)
+class CompletionError(Completion):
+ """Argument completion error."""
+ message: t.Optional[str] = None
+
+
+@dataclasses.dataclass(frozen=True)
+class CompletionSuccess(Completion):
+ """Successful argument completion result."""
+ list_mode: bool
+ consumed: str
+ continuation: str
+ matches: t.List[str] = dataclasses.field(default_factory=list)
+
+ @property
+ def preserve(self): # type: () -> bool
+ """
+ True if argcomplete should not mangle completion values, otherwise False.
+ Only used when more than one completion exists to avoid overwriting the word undergoing completion.
+ """
+ return len(self.matches) > 1 and self.list_mode
+
+ @property
+ def completions(self): # type: () -> t.List[str]
+ """List of completion values to return to argcomplete."""
+ completions = self.matches
+ continuation = '' if self.list_mode else self.continuation
+
+ if not self.preserve:
+ # include the existing prefix to avoid rewriting the word undergoing completion
+ completions = [f'{self.consumed}{completion}{continuation}' for completion in completions]
+
+ return completions
+
+
+class ParserMode(enum.Enum):
+ """Mode the parser is operating in."""
+ PARSE = enum.auto()
+ COMPLETE = enum.auto()
+ LIST = enum.auto()
+
+
+class ParserError(Exception):
+ """Base class for all parsing exceptions."""
+
+
+@dataclasses.dataclass
+class ParserBoundary:
+ """Boundary details for parsing composite input."""
+ delimiters: str
+ required: bool
+ match: t.Optional[str] = None
+ ready: bool = True
+
+
+@dataclasses.dataclass
+class ParserState:
+ """State of the composite argument parser."""
+ mode: ParserMode
+ remainder: str = ''
+ consumed: str = ''
+ boundaries: t.List[ParserBoundary] = dataclasses.field(default_factory=list)
+ namespaces: t.List[t.Any] = dataclasses.field(default_factory=list)
+ parts: t.List[str] = dataclasses.field(default_factory=list)
+
+ @property
+ def incomplete(self): # type: () -> bool
+ """True if parsing is incomplete (unparsed input remains), otherwise False."""
+ return self.remainder is not None
+
+ def match(self, value, choices): # type: (str, t.List[str]) -> bool
+ """Return True if the given value matches the provided choices, taking into account parsing boundaries, otherwise return False."""
+ if self.current_boundary:
+ delimiters, delimiter = self.current_boundary.delimiters, self.current_boundary.match
+ else:
+ delimiters, delimiter = '', None
+
+ for choice in choices:
+ if choice.rstrip(delimiters) == choice:
+ # choice is not delimited
+ if value == choice:
+ return True # value matched
+ else:
+ # choice is delimited
+ if f'{value}{delimiter}' == choice:
+ return True # value and delimiter matched
+
+ return False
+
+ def read(self): # type: () -> str
+ """Read and return the next input segment, taking into account parsing boundaries."""
+ delimiters = "".join(boundary.delimiters for boundary in self.boundaries)
+
+ if delimiters:
+ pattern = '([' + re.escape(delimiters) + '])'
+ regex = re.compile(pattern)
+ parts = regex.split(self.remainder, 1)
+ else:
+ parts = [self.remainder]
+
+ if len(parts) > 1:
+ value, delimiter, remainder = parts
+ else:
+ value, delimiter, remainder = parts[0], None, None
+
+ for boundary in reversed(self.boundaries):
+ if delimiter and delimiter in boundary.delimiters:
+ boundary.match = delimiter
+ self.consumed += value + delimiter
+ break
+
+ boundary.match = None
+ boundary.ready = False
+
+ if boundary.required:
+ break
+
+ self.remainder = remainder
+
+ return value
+
+ @property
+ def root_namespace(self): # type: () -> t.Any
+ """THe root namespace."""
+ return self.namespaces[0]
+
+ @property
+ def current_namespace(self): # type: () -> t.Any
+ """The current namespace."""
+ return self.namespaces[-1]
+
+ @property
+ def current_boundary(self): # type: () -> t.Optional[ParserBoundary]
+ """The current parser boundary, if any, otherwise None."""
+ return self.boundaries[-1] if self.boundaries else None
+
+ def set_namespace(self, namespace): # type: (t.Any) -> None
+ """Set the current namespace."""
+ self.namespaces.append(namespace)
+
+ @contextlib.contextmanager
+ def delimit(self, delimiters, required=True): # type: (str, bool) -> t.ContextManager[ParserBoundary]
+ """Context manager for delimiting parsing of input."""
+ boundary = ParserBoundary(delimiters=delimiters, required=required)
+
+ self.boundaries.append(boundary)
+
+ try:
+ yield boundary
+ finally:
+ self.boundaries.pop()
+
+ if boundary.required and not boundary.match:
+ raise ParserError('required delimiter not found, hit up-level delimiter or end of input instead')
+
+
+@dataclasses.dataclass
+class DocumentationState:
+ """State of the composite argument parser's generated documentation."""
+ sections: t.Dict[str, str] = dataclasses.field(default_factory=dict)
+
+
+class Parser(metaclass=abc.ABCMeta):
+ """Base class for all composite argument parsers."""
+ @abc.abstractmethod
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ raise Exception(f'Undocumented parser: {type(self)}')
+
+
+class MatchConditions(enum.Flag):
+ """Acceptable condition(s) for matching user input to available choices."""
+ CHOICE = enum.auto()
+ """Match any choice."""
+ ANY = enum.auto()
+ """Match any non-empty string."""
+ NOTHING = enum.auto()
+ """Match an empty string which is not followed by a boundary match."""
+
+
+class DynamicChoicesParser(Parser, metaclass=abc.ABCMeta):
+ """Base class for composite argument parsers which use a list of choices that can be generated during completion."""
+ def __init__(self, conditions=MatchConditions.CHOICE): # type: (MatchConditions) -> None
+ self.conditions = conditions
+
+ @abc.abstractmethod
+ def get_choices(self, value): # type: (str) -> t.List[str]
+ """Return a list of valid choices based on the given input value."""
+
+ def no_completion_match(self, value): # type: (str) -> CompletionUnavailable # pylint: disable=unused-argument
+ """Return an instance of CompletionUnavailable when no match was found for the given value."""
+ return CompletionUnavailable()
+
+ def no_choices_available(self, value): # type: (str) -> ParserError # pylint: disable=unused-argument
+ """Return an instance of ParserError when parsing fails and no choices are available."""
+ return ParserError('No choices available.')
+
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ value = state.read()
+ choices = self.get_choices(value)
+
+ if state.mode == ParserMode.PARSE or state.incomplete:
+ if self.conditions & MatchConditions.CHOICE and state.match(value, choices):
+ return value
+
+ if self.conditions & MatchConditions.ANY and value:
+ return value
+
+ if self.conditions & MatchConditions.NOTHING and not value and state.current_boundary and not state.current_boundary.match:
+ return value
+
+ if state.mode == ParserMode.PARSE:
+ if choices:
+ raise ParserError(f'"{value}" not in: {", ".join(choices)}')
+
+ raise self.no_choices_available(value)
+
+ raise CompletionUnavailable()
+
+ matches = [choice for choice in choices if choice.startswith(value)]
+
+ if not matches:
+ raise self.no_completion_match(value)
+
+ continuation = state.current_boundary.delimiters if state.current_boundary and state.current_boundary.required else ''
+
+ raise CompletionSuccess(
+ list_mode=state.mode == ParserMode.LIST,
+ consumed=state.consumed,
+ continuation=continuation,
+ matches=matches,
+ )
+
+
+class ChoicesParser(DynamicChoicesParser):
+ """Composite argument parser which relies on a static list of choices."""
+ def __init__(self, choices, conditions=MatchConditions.CHOICE): # type: (t.List[str], MatchConditions) -> None
+ self.choices = choices
+
+ super().__init__(conditions=conditions)
+
+ def get_choices(self, value): # type: (str) -> t.List[str]
+ """Return a list of valid choices based on the given input value."""
+ return self.choices
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ return '|'.join(self.choices)
+
+
+class IntegerParser(DynamicChoicesParser):
+ """Composite argument parser for integers."""
+ PATTERN = re.compile('^[1-9][0-9]*$')
+
+ def __init__(self, maximum=None): # type: (t.Optional[int]) -> None
+ self.maximum = maximum
+
+ super().__init__()
+
+ def get_choices(self, value): # type: (str) -> t.List[str]
+ """Return a list of valid choices based on the given input value."""
+ if not value:
+ numbers = list(range(1, 10))
+ elif self.PATTERN.search(value):
+ int_prefix = int(value)
+ base = int_prefix * 10
+ numbers = [int_prefix] + [base + i for i in range(0, 10)]
+ else:
+ numbers = []
+
+ # NOTE: the minimum is currently fixed at 1
+
+ if self.maximum is not None:
+ numbers = [n for n in numbers if n <= self.maximum]
+
+ return [str(n) for n in numbers]
+
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ value = super().parse(state)
+ return int(value)
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ return '{integer}'
+
+
+class BooleanParser(ChoicesParser):
+ """Composite argument parser for boolean (yes/no) values."""
+ def __init__(self):
+ super().__init__(['yes', 'no'])
+
+ def parse(self, state): # type: (ParserState) -> bool
+ """Parse the input from the given state and return the result."""
+ value = super().parse(state)
+ return value == 'yes'
+
+
+class AnyParser(ChoicesParser):
+ """Composite argument parser which accepts any input value."""
+ def __init__(self, nothing=False, no_match_message=None): # type: (bool, t.Optional[str]) -> None
+ self.no_match_message = no_match_message
+
+ conditions = MatchConditions.ANY
+
+ if nothing:
+ conditions |= MatchConditions.NOTHING
+
+ super().__init__([], conditions=conditions)
+
+ def no_completion_match(self, value): # type: (str) -> CompletionUnavailable
+ """Return an instance of CompletionUnavailable when no match was found for the given value."""
+ if self.no_match_message:
+ return CompletionUnavailable(message=self.no_match_message)
+
+ return super().no_completion_match(value)
+
+ def no_choices_available(self, value): # type: (str) -> ParserError
+ """Return an instance of ParserError when parsing fails and no choices are available."""
+ if self.no_match_message:
+ return ParserError(self.no_match_message)
+
+ return super().no_choices_available(value)
+
+
+class RelativePathNameParser(DynamicChoicesParser):
+ """Composite argument parser for relative path names."""
+ RELATIVE_NAMES = ['.', '..']
+
+ def __init__(self, choices): # type: (t.List[str]) -> None
+ self.choices = choices
+
+ super().__init__()
+
+ def get_choices(self, value): # type: (str) -> t.List[str]
+ """Return a list of valid choices based on the given input value."""
+ choices = list(self.choices)
+
+ if value in self.RELATIVE_NAMES:
+ # complete relative names, but avoid suggesting them unless the current name is relative
+ # unfortunately this will be sorted in reverse of what bash presents ("../ ./" instead of "./ ../")
+ choices.extend(f'{item}{PATH_DELIMITER}' for item in self.RELATIVE_NAMES)
+
+ return choices
+
+
+class FileParser(Parser):
+ """Composite argument parser for absolute or relative file paths."""
+ def parse(self, state): # type: (ParserState) -> str
+ """Parse the input from the given state and return the result."""
+ if state.mode == ParserMode.PARSE:
+ path = AnyParser().parse(state)
+
+ if not os.path.isfile(path):
+ raise ParserError(f'Not a file: {path}')
+ else:
+ path = ''
+
+ with state.delimit(PATH_DELIMITER, required=False) as boundary:
+ while boundary.ready:
+ directory = path or '.'
+
+ try:
+ with os.scandir(directory) as scan: # type: t.Iterator[os.DirEntry]
+ choices = [f'{item.name}{PATH_DELIMITER}' if item.is_dir() else item.name for item in scan]
+ except OSError:
+ choices = []
+
+ if not path:
+ choices.append(PATH_DELIMITER) # allow absolute paths
+ choices.append('../') # suggest relative paths
+
+ part = RelativePathNameParser(choices).parse(state)
+ path += f'{part}{boundary.match or ""}'
+
+ return path
+
+
+class AbsolutePathParser(Parser):
+ """Composite argument parser for absolute file paths. Paths are only verified for proper syntax, not for existence."""
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ path = ''
+
+ with state.delimit(PATH_DELIMITER, required=False) as boundary:
+ while boundary.ready:
+ if path:
+ path += AnyParser(nothing=True).parse(state)
+ else:
+ path += ChoicesParser([PATH_DELIMITER]).parse(state)
+
+ path += (boundary.match or '')
+
+ return path
+
+
+class NamespaceParser(Parser, metaclass=abc.ABCMeta):
+ """Base class for composite argument parsers that store their results in a namespace."""
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ namespace = state.current_namespace
+ current = getattr(namespace, self.dest)
+
+ if current and self.limit_one:
+ if state.mode == ParserMode.PARSE:
+ raise ParserError('Option cannot be specified more than once.')
+
+ raise CompletionError('Option cannot be specified more than once.')
+
+ value = self.get_value(state)
+
+ if self.use_list:
+ if not current:
+ current = []
+ setattr(namespace, self.dest, current)
+
+ current.append(value)
+ else:
+ setattr(namespace, self.dest, value)
+
+ return value
+
+ def get_value(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result, without storing the result in the namespace."""
+ return super().parse(state)
+
+ @property
+ def use_list(self): # type: () -> bool
+ """True if the destination is a list, otherwise False."""
+ return False
+
+ @property
+ def limit_one(self): # type: () -> bool
+ """True if only one target is allowed, otherwise False."""
+ return not self.use_list
+
+ @property
+ @abc.abstractmethod
+ def dest(self): # type: () -> str
+ """The name of the attribute where the value should be stored."""
+
+
+class NamespaceWrappedParser(NamespaceParser):
+ """Composite argument parser that wraps a non-namespace parser and stores the result in a namespace."""
+ def __init__(self, dest, parser): # type: (str, Parser) -> None
+ self._dest = dest
+ self.parser = parser
+
+ def get_value(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result, without storing the result in the namespace."""
+ return self.parser.parse(state)
+
+ @property
+ def dest(self): # type: () -> str
+ """The name of the attribute where the value should be stored."""
+ return self._dest
+
+
+class KeyValueParser(Parser, metaclass=abc.ABCMeta):
+ """Base class for key/value composite argument parsers."""
+ @abc.abstractmethod
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of key names and value parsers."""
+
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ namespace = state.current_namespace
+ parsers = self.get_parsers(state)
+ keys = list(parsers)
+
+ with state.delimit(PAIR_DELIMITER, required=False) as pair:
+ while pair.ready:
+ with state.delimit(ASSIGNMENT_DELIMITER):
+ key = ChoicesParser(keys).parse(state)
+
+ value = parsers[key].parse(state)
+
+ setattr(namespace, key, value)
+
+ keys.remove(key)
+
+ return namespace
+
+
+class PairParser(Parser, metaclass=abc.ABCMeta):
+ """Base class for composite argument parsers consisting of a left and right argument parser, with input separated by a delimiter."""
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ namespace = self.create_namespace()
+
+ state.set_namespace(namespace)
+
+ with state.delimit(self.delimiter, self.required) as boundary:
+ choice = self.get_left_parser(state).parse(state)
+
+ if boundary.match:
+ self.get_right_parser(choice).parse(state)
+
+ return namespace
+
+ @property
+ def required(self): # type: () -> bool
+ """True if the delimiter (and thus right parser) is required, otherwise False."""
+ return False
+
+ @property
+ def delimiter(self): # type: () -> str
+ """The delimiter to use between the left and right parser."""
+ return PAIR_DELIMITER
+
+ @abc.abstractmethod
+ def create_namespace(self): # type: () -> t.Any
+ """Create and return a namespace."""
+
+ @abc.abstractmethod
+ def get_left_parser(self, state): # type: (ParserState) -> Parser
+ """Return the parser for the left side."""
+
+ @abc.abstractmethod
+ def get_right_parser(self, choice): # type: (t.Any) -> Parser
+ """Return the parser for the right side."""
+
+
+class TypeParser(Parser, metaclass=abc.ABCMeta):
+ """Base class for composite argument parsers which parse a type name, a colon and then parse results based on the type given by the type name."""
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser] # pylint: disable=unused-argument
+ """Return a dictionary of type names and type parsers."""
+ return self.get_stateless_parsers()
+
+ @abc.abstractmethod
+ def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser]
+ """Return a dictionary of type names and type parsers."""
+
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ parsers = self.get_parsers(state)
+
+ with state.delimit(':'):
+ key = ChoicesParser(list(parsers)).parse(state)
+
+ value = parsers[key].parse(state)
+
+ return value
diff --git a/test/lib/ansible_test/_internal/cli/commands/__init__.py b/test/lib/ansible_test/_internal/cli/commands/__init__.py
new file mode 100644
index 00000000..5cd37f4f
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/__init__.py
@@ -0,0 +1,240 @@
+"""Command line parsing for all commands."""
+from __future__ import annotations
+
+import argparse
+import functools
+import sys
+
+from ...util import (
+ display,
+)
+
+from ..completers import (
+ complete_target,
+)
+
+from ..environments import (
+ CompositeActionCompletionFinder,
+)
+
+from .coverage import (
+ do_coverage,
+)
+
+from .env import (
+ do_env,
+)
+
+from .integration import (
+ do_integration,
+)
+
+from .sanity import (
+ do_sanity,
+)
+
+from .shell import (
+ do_shell,
+)
+
+from .units import (
+ do_units,
+)
+
+
+def do_commands(
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+): # type: (...) -> None
+ """Command line parsing for all commands."""
+ common = argparse.ArgumentParser(add_help=False)
+
+ common.add_argument(
+ '-e',
+ '--explain',
+ action='store_true',
+ help='explain commands that would be executed',
+ )
+
+ common.add_argument(
+ '-v',
+ '--verbose',
+ dest='verbosity',
+ action='count',
+ default=0,
+ help='display more output',
+ )
+
+ common.add_argument(
+ '--color',
+ metavar='COLOR',
+ nargs='?',
+ help='generate color output: yes, no, auto',
+ const='yes',
+ default='auto',
+ type=color,
+ )
+
+ common.add_argument(
+ '--debug',
+ action='store_true',
+ help='run ansible commands in debug mode',
+ )
+
+ common.add_argument(
+ '--truncate',
+ dest='truncate',
+ metavar='COLUMNS',
+ type=int,
+ default=display.columns,
+ help='truncate some long output (0=disabled) (default: auto)',
+ )
+
+ common.add_argument(
+ '--redact',
+ dest='redact',
+ action='store_true',
+ default=True,
+ help=argparse.SUPPRESS, # kept for backwards compatibility, but no point in advertising since it's the default
+ )
+
+ common.add_argument(
+ '--no-redact',
+ dest='redact',
+ action='store_false',
+ default=False,
+ help='show sensitive values in output',
+ )
+
+ test = argparse.ArgumentParser(add_help=False, parents=[common])
+
+ testing = test.add_argument_group(title='common testing arguments')
+
+ testing.add_argument(
+ 'include',
+ metavar='TARGET',
+ nargs='*',
+ help='test the specified target',
+ ).completer = functools.partial(complete_target, completer)
+
+ testing.add_argument(
+ '--include',
+ metavar='TARGET',
+ action='append',
+ help='include the specified target',
+ ).completer = functools.partial(complete_target, completer)
+
+ testing.add_argument(
+ '--exclude',
+ metavar='TARGET',
+ action='append',
+ help='exclude the specified target',
+ ).completer = functools.partial(complete_target, completer)
+
+ testing.add_argument(
+ '--require',
+ metavar='TARGET',
+ action='append',
+ help='require the specified target',
+ ).completer = functools.partial(complete_target, completer)
+
+ testing.add_argument(
+ '--coverage',
+ action='store_true',
+ help='analyze code coverage when running tests',
+ )
+
+ testing.add_argument(
+ '--coverage-check',
+ action='store_true',
+ help='only verify code coverage can be enabled',
+ )
+
+ testing.add_argument(
+ '--metadata',
+ help=argparse.SUPPRESS,
+ )
+
+ testing.add_argument(
+ '--base-branch',
+ metavar='BRANCH',
+ help='base branch used for change detection',
+ )
+
+ testing.add_argument(
+ '--changed',
+ action='store_true',
+ help='limit targets based on changes',
+ )
+
+ changes = test.add_argument_group(title='change detection arguments')
+
+ changes.add_argument(
+ '--tracked',
+ action='store_true',
+ help=argparse.SUPPRESS,
+ )
+
+ changes.add_argument(
+ '--untracked',
+ action='store_true',
+ help='include untracked files',
+ )
+
+ changes.add_argument(
+ '--ignore-committed',
+ dest='committed',
+ action='store_false',
+ help='exclude committed files',
+ )
+
+ changes.add_argument(
+ '--ignore-staged',
+ dest='staged',
+ action='store_false',
+ help='exclude staged files',
+ )
+
+ changes.add_argument(
+ '--ignore-unstaged',
+ dest='unstaged',
+ action='store_false',
+ help='exclude unstaged files',
+ )
+
+ changes.add_argument(
+ '--changed-from',
+ metavar='PATH',
+ help=argparse.SUPPRESS,
+ )
+
+ changes.add_argument(
+ '--changed-path',
+ metavar='PATH',
+ action='append',
+ help=argparse.SUPPRESS,
+ )
+
+ subparsers = parent.add_subparsers(metavar='COMMAND', required=True)
+
+ do_coverage(subparsers, common, completer)
+ do_env(subparsers, common, completer)
+ do_shell(subparsers, common, completer)
+
+ do_integration(subparsers, test, completer)
+ do_sanity(subparsers, test, completer)
+ do_units(subparsers, test, completer)
+
+
+def color(value): # type: (str) -> bool
+ """Strict converter for color option."""
+ if value == 'yes':
+ return True
+
+ if value == 'no':
+ return False
+
+ if value == 'auto':
+ return sys.stdout.isatty()
+
+ raise argparse.ArgumentTypeError(f"invalid choice: '{value}' (choose from 'yes', 'no', 'auto')")
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/__init__.py b/test/lib/ansible_test/_internal/cli/commands/coverage/__init__.py
new file mode 100644
index 00000000..a57ed126
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/__init__.py
@@ -0,0 +1,85 @@
+"""Command line parsing for all `coverage` commands."""
+from __future__ import annotations
+
+import argparse
+
+from ....commands.coverage import (
+ COVERAGE_GROUPS,
+)
+
+from ...environments import (
+ CompositeActionCompletionFinder,
+)
+
+from .analyze import (
+ do_analyze,
+)
+
+from .combine import (
+ do_combine,
+)
+
+from .erase import (
+ do_erase,
+)
+
+from .html import (
+ do_html,
+)
+
+from .report import (
+ do_report,
+)
+
+from .xml import (
+ do_xml,
+)
+
+
+def do_coverage(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+): # type: (...) -> None
+ """Command line parsing for all `coverage` commands."""
+ coverage_common = argparse.ArgumentParser(add_help=False, parents=[parent])
+
+ parser = subparsers.add_parser(
+ 'coverage',
+ help='code coverage management and reporting',
+ )
+
+ coverage_subparsers = parser.add_subparsers(metavar='COMMAND', required=True)
+
+ do_analyze(coverage_subparsers, coverage_common, completer)
+ do_erase(coverage_subparsers, coverage_common, completer)
+
+ do_combine(coverage_subparsers, parent, add_coverage_common, completer)
+ do_report(coverage_subparsers, parent, add_coverage_common, completer)
+ do_html(coverage_subparsers, parent, add_coverage_common, completer)
+ do_xml(coverage_subparsers, parent, add_coverage_common, completer)
+
+
+def add_coverage_common(
+ parser, # type: argparse.ArgumentParser
+):
+ """Add common coverage arguments."""
+ parser.add_argument(
+ '--group-by',
+ metavar='GROUP',
+ action='append',
+ choices=COVERAGE_GROUPS,
+ help='group output by: %s' % ', '.join(COVERAGE_GROUPS),
+ )
+
+ parser.add_argument(
+ '--all',
+ action='store_true',
+ help='include all python/powershell source files',
+ )
+
+ parser.add_argument(
+ '--stub',
+ action='store_true',
+ help='generate empty report of all python/powershell source files',
+ )
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/__init__.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/__init__.py
new file mode 100644
index 00000000..0f4568dc
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/__init__.py
@@ -0,0 +1,28 @@
+"""Command line parsing for all `coverage analyze` commands."""
+from __future__ import annotations
+
+import argparse
+
+from .targets import (
+ do_targets,
+)
+
+from ....environments import (
+ CompositeActionCompletionFinder,
+)
+
+
+def do_analyze(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+): # type: (...) -> None
+ """Command line parsing for all `coverage analyze` commands."""
+ parser = subparsers.add_parser(
+ 'analyze',
+ help='analyze collected coverage data',
+ ) # type: argparse.ArgumentParser
+
+ analyze_subparsers = parser.add_subparsers(metavar='COMMAND', required=True)
+
+ do_targets(analyze_subparsers, parent, completer)
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/__init__.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/__init__.py
new file mode 100644
index 00000000..c572b3bb
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/__init__.py
@@ -0,0 +1,48 @@
+"""Command line parsing for all `coverage analyze targets` commands."""
+from __future__ import annotations
+
+import argparse
+
+from .....environments import (
+ CompositeActionCompletionFinder,
+)
+
+from .combine import (
+ do_combine,
+)
+
+from .expand import (
+ do_expand,
+)
+
+from .filter import (
+ do_filter,
+)
+
+from .generate import (
+ do_generate,
+)
+
+from .missing import (
+ do_missing,
+)
+
+
+def do_targets(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+): # type: (...) -> None
+ """Command line parsing for all `coverage analyze targets` commands."""
+ targets = subparsers.add_parser(
+ 'targets',
+ help='analyze integration test target coverage',
+ )
+
+ targets_subparsers = targets.add_subparsers(metavar='COMMAND', required=True)
+
+ do_generate(targets_subparsers, parent, completer)
+ do_expand(targets_subparsers, parent, completer)
+ do_filter(targets_subparsers, parent, completer)
+ do_combine(targets_subparsers, parent, completer)
+ do_missing(targets_subparsers, parent, completer)
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/combine.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/combine.py
new file mode 100644
index 00000000..c5b666f6
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/combine.py
@@ -0,0 +1,49 @@
+"""Command line parsing for the `coverage analyze targets combine` command."""
+from __future__ import annotations
+
+import argparse
+
+from ......commands.coverage.analyze.targets.combine import (
+ command_coverage_analyze_targets_combine,
+ CoverageAnalyzeTargetsCombineConfig,
+)
+
+from .....environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_combine(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `coverage analyze targets combine` command."""
+ parser = subparsers.add_parser(
+ 'combine',
+ parents=[parent],
+ help='combine multiple aggregated coverage files',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_coverage_analyze_targets_combine,
+ config=CoverageAnalyzeTargetsCombineConfig,
+ )
+
+ targets_combine = parser.add_argument_group('coverage arguments')
+
+ targets_combine.add_argument(
+ 'input_file',
+ nargs='+',
+ help='input file to read aggregated coverage from',
+ )
+
+ targets_combine.add_argument(
+ 'output_file',
+ help='output file to write aggregated coverage to',
+ )
+
+ add_environments(parser, completer, ControllerMode.ORIGIN, TargetMode.NO_TARGETS) # coverage analyze targets combine
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/expand.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/expand.py
new file mode 100644
index 00000000..ec74cab6
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/expand.py
@@ -0,0 +1,48 @@
+"""Command line parsing for the `coverage analyze targets expand` command."""
+from __future__ import annotations
+
+import argparse
+
+from ......commands.coverage.analyze.targets.expand import (
+ command_coverage_analyze_targets_expand,
+ CoverageAnalyzeTargetsExpandConfig,
+)
+
+from .....environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_expand(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `coverage analyze targets expand` command."""
+ parser = subparsers.add_parser(
+ 'expand',
+ parents=[parent],
+ help='expand target names from integers in aggregated coverage',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_coverage_analyze_targets_expand,
+ config=CoverageAnalyzeTargetsExpandConfig,
+ )
+
+ targets_expand = parser.add_argument_group(title='coverage arguments')
+
+ targets_expand.add_argument(
+ 'input_file',
+ help='input file to read aggregated coverage from',
+ )
+
+ targets_expand.add_argument(
+ 'output_file',
+ help='output file to write expanded coverage to',
+ )
+
+ add_environments(parser, completer, ControllerMode.ORIGIN, TargetMode.NO_TARGETS) # coverage analyze targets expand
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/filter.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/filter.py
new file mode 100644
index 00000000..b746fe7b
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/filter.py
@@ -0,0 +1,76 @@
+"""Command line parsing for the `coverage analyze targets filter` command."""
+from __future__ import annotations
+
+import argparse
+
+from ......commands.coverage.analyze.targets.filter import (
+ command_coverage_analyze_targets_filter,
+ CoverageAnalyzeTargetsFilterConfig,
+)
+
+from .....environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_filter(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `coverage analyze targets filter` command."""
+ parser = subparsers.add_parser(
+ 'filter',
+ parents=[parent],
+ help='filter aggregated coverage data',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_coverage_analyze_targets_filter,
+ config=CoverageAnalyzeTargetsFilterConfig,
+ )
+
+ targets_filter = parser.add_argument_group(title='coverage arguments')
+
+ targets_filter.add_argument(
+ 'input_file',
+ help='input file to read aggregated coverage from',
+ )
+
+ targets_filter.add_argument(
+ 'output_file',
+ help='output file to write expanded coverage to',
+ )
+
+ targets_filter.add_argument(
+ '--include-target',
+ metavar='TGT',
+ dest='include_targets',
+ action='append',
+ help='include the specified targets',
+ )
+
+ targets_filter.add_argument(
+ '--exclude-target',
+ metavar='TGT',
+ dest='exclude_targets',
+ action='append',
+ help='exclude the specified targets',
+ )
+
+ targets_filter.add_argument(
+ '--include-path',
+ metavar='REGEX',
+ help='include paths matching the given regex',
+ )
+
+ targets_filter.add_argument(
+ '--exclude-path',
+ metavar='REGEX',
+ help='exclude paths matching the given regex',
+ )
+
+ add_environments(parser, completer, ControllerMode.ORIGIN, TargetMode.NO_TARGETS) # coverage analyze targets filter
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/generate.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/generate.py
new file mode 100644
index 00000000..ed7be95d
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/generate.py
@@ -0,0 +1,49 @@
+"""Command line parsing for the `coverage analyze targets generate` command."""
+from __future__ import annotations
+
+import argparse
+
+from ......commands.coverage.analyze.targets.generate import (
+ command_coverage_analyze_targets_generate,
+ CoverageAnalyzeTargetsGenerateConfig,
+)
+
+from .....environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_generate(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `coverage analyze targets generate` command."""
+ parser = subparsers.add_parser(
+ 'generate',
+ parents=[parent],
+ help='aggregate coverage by integration test target',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_coverage_analyze_targets_generate,
+ config=CoverageAnalyzeTargetsGenerateConfig,
+ )
+
+ targets_generate = parser.add_argument_group(title='coverage arguments')
+
+ targets_generate.add_argument(
+ 'input_dir',
+ nargs='?',
+ help='directory to read coverage from',
+ )
+
+ targets_generate.add_argument(
+ 'output_file',
+ help='output file for aggregated coverage',
+ )
+
+ add_environments(parser, completer, ControllerMode.ORIGIN, TargetMode.NO_TARGETS) # coverage analyze targets generate
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/missing.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/missing.py
new file mode 100644
index 00000000..45db16e0
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/missing.py
@@ -0,0 +1,65 @@
+"""Command line parsing for the `coverage analyze targets missing` command."""
+from __future__ import annotations
+
+import argparse
+
+from ......commands.coverage.analyze.targets.missing import (
+ command_coverage_analyze_targets_missing,
+ CoverageAnalyzeTargetsMissingConfig,
+)
+
+from .....environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_missing(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `coverage analyze targets missing` command."""
+ parser = subparsers.add_parser(
+ 'missing',
+ parents=[parent],
+ help='identify coverage in one file missing in another',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_coverage_analyze_targets_missing,
+ config=CoverageAnalyzeTargetsMissingConfig,
+ )
+
+ targets_missing = parser.add_argument_group(title='coverage arguments')
+
+ targets_missing.add_argument(
+ 'from_file',
+ help='input file containing aggregated coverage',
+ )
+
+ targets_missing.add_argument(
+ 'to_file',
+ help='input file containing aggregated coverage',
+ )
+
+ targets_missing.add_argument(
+ 'output_file',
+ help='output file to write aggregated coverage to',
+ )
+
+ targets_missing.add_argument(
+ '--only-gaps',
+ action='store_true',
+ help='report only arcs/lines not hit by any target',
+ )
+
+ targets_missing.add_argument(
+ '--only-exists',
+ action='store_true',
+ help='limit results to files that exist',
+ )
+
+ add_environments(parser, completer, ControllerMode.ORIGIN, TargetMode.NO_TARGETS) # coverage analyze targets missing
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/combine.py b/test/lib/ansible_test/_internal/cli/commands/coverage/combine.py
new file mode 100644
index 00000000..fd4b0003
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/combine.py
@@ -0,0 +1,48 @@
+"""Command line parsing for the `coverage combine` command."""
+from __future__ import annotations
+
+import argparse
+import typing as t
+
+from ....commands.coverage.combine import (
+ command_coverage_combine,
+ CoverageCombineConfig,
+)
+
+from ...environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_combine(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ add_coverage_common, # type: t.Callable[[argparse.ArgumentParser], None]
+ completer, # type: CompositeActionCompletionFinder
+): # type: (...) -> None
+ """Command line parsing for the `coverage combine` command."""
+ parser = subparsers.add_parser(
+ 'combine',
+ parents=[parent],
+ help='combine coverage data and rewrite remote paths',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_coverage_combine,
+ config=CoverageCombineConfig,
+ )
+
+ coverage_combine = t.cast(argparse.ArgumentParser, parser.add_argument_group(title='coverage arguments'))
+
+ add_coverage_common(coverage_combine)
+
+ coverage_combine.add_argument(
+ '--export',
+ metavar='DIR',
+ help='directory to export combined coverage files to',
+ )
+
+ add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.NO_TARGETS) # coverage combine
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/erase.py b/test/lib/ansible_test/_internal/cli/commands/coverage/erase.py
new file mode 100644
index 00000000..31432849
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/erase.py
@@ -0,0 +1,36 @@
+"""Command line parsing for the `coverage erase` command."""
+from __future__ import annotations
+
+import argparse
+
+from ....commands.coverage.erase import (
+ command_coverage_erase,
+ CoverageEraseConfig,
+)
+
+from ...environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_erase(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+): # type: (...) -> None
+ """Command line parsing for the `coverage erase` command."""
+ parser = subparsers.add_parser(
+ 'erase',
+ parents=[parent],
+ help='erase coverage data files',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_coverage_erase,
+ config=CoverageEraseConfig,
+ )
+
+ add_environments(parser, completer, ControllerMode.ORIGIN, TargetMode.NO_TARGETS) # coverage erase
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/html.py b/test/lib/ansible_test/_internal/cli/commands/coverage/html.py
new file mode 100644
index 00000000..e4b023ff
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/html.py
@@ -0,0 +1,42 @@
+"""Command line parsing for the `coverage html` command."""
+from __future__ import annotations
+
+import argparse
+import typing as t
+
+from ....commands.coverage.html import (
+ command_coverage_html,
+ CoverageHtmlConfig,
+)
+
+from ...environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_html(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ add_coverage_common, # type: t.Callable[[argparse.ArgumentParser], None]
+ completer, # type: CompositeActionCompletionFinder
+): # type: (...) -> None
+ """Command line parsing for the `coverage html` command."""
+ parser = subparsers.add_parser(
+ 'html',
+ parents=[parent],
+ help='generate html coverage report',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_coverage_html,
+ config=CoverageHtmlConfig,
+ )
+
+ coverage_combine = t.cast(argparse.ArgumentParser, parser.add_argument_group(title='coverage arguments'))
+
+ add_coverage_common(coverage_combine)
+
+ add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.NO_TARGETS) # coverage html
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/report.py b/test/lib/ansible_test/_internal/cli/commands/coverage/report.py
new file mode 100644
index 00000000..af5950b3
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/report.py
@@ -0,0 +1,60 @@
+"""Command line parsing for the `coverage report` command."""
+from __future__ import annotations
+
+import argparse
+import typing as t
+
+from ....commands.coverage.report import (
+ command_coverage_report,
+ CoverageReportConfig,
+)
+
+from ...environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_report(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ add_coverage_common, # type: t.Callable[[argparse.ArgumentParser], None]
+ completer, # type: CompositeActionCompletionFinder
+): # type: (...) -> None
+ """Command line parsing for the `coverage report` command."""
+ parser = subparsers.add_parser(
+ 'report',
+ parents=[parent],
+ help='generate console coverage report',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_coverage_report,
+ config=CoverageReportConfig,
+ )
+
+ coverage_report = t.cast(argparse.ArgumentParser, parser.add_argument_group('coverage arguments'))
+
+ add_coverage_common(coverage_report)
+
+ coverage_report.add_argument(
+ '--show-missing',
+ action='store_true',
+ help='show line numbers of statements not executed',
+ )
+
+ coverage_report.add_argument(
+ '--include',
+ metavar='PAT[,...]',
+ help='only include paths that match a pattern (accepts quoted shell wildcards)',
+ )
+
+ coverage_report.add_argument(
+ '--omit',
+ metavar='PAT[,...]',
+ help='omit paths that match a pattern (accepts quoted shell wildcards)',
+ )
+
+ add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.NO_TARGETS) # coverage report
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/xml.py b/test/lib/ansible_test/_internal/cli/commands/coverage/xml.py
new file mode 100644
index 00000000..5079c8f7
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/xml.py
@@ -0,0 +1,42 @@
+"""Command line parsing for the `coverage xml` command."""
+from __future__ import annotations
+
+import argparse
+import typing as t
+
+from ....commands.coverage.xml import (
+ command_coverage_xml,
+ CoverageXmlConfig,
+)
+
+from ...environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_xml(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ add_coverage_common, # type: t.Callable[[argparse.ArgumentParser], None]
+ completer, # type: CompositeActionCompletionFinder
+): # type: (...) -> None
+ """Command line parsing for the `coverage xml` command."""
+ parser = subparsers.add_parser(
+ 'xml',
+ parents=[parent],
+ help='generate xml coverage report',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_coverage_xml,
+ config=CoverageXmlConfig,
+ )
+
+ coverage_combine = t.cast(argparse.ArgumentParser, parser.add_argument_group(title='coverage arguments'))
+
+ add_coverage_common(coverage_combine)
+
+ add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.NO_TARGETS) # coverage xml
diff --git a/test/lib/ansible_test/_internal/cli/commands/env.py b/test/lib/ansible_test/_internal/cli/commands/env.py
new file mode 100644
index 00000000..53437a1f
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/env.py
@@ -0,0 +1,63 @@
+"""Command line parsing for the `env` command."""
+from __future__ import annotations
+
+import argparse
+
+from ...commands.env import (
+ EnvConfig,
+ command_env,
+)
+
+from ..environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_env(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `env` command."""
+ parser = subparsers.add_parser(
+ 'env',
+ parents=[parent],
+ help='show information about the test environment',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_env,
+ config=EnvConfig,
+ )
+
+ env = parser.add_argument_group(title='env arguments')
+
+ env.add_argument(
+ '--show',
+ action='store_true',
+ help='show environment on stdout',
+ )
+
+ env.add_argument(
+ '--dump',
+ action='store_true',
+ help='dump environment to disk',
+ )
+
+ env.add_argument(
+ '--list-files',
+ action='store_true',
+ help='list files on stdout',
+ )
+
+ env.add_argument(
+ '--timeout',
+ type=int,
+ metavar='MINUTES',
+ help='timeout for future ansible-test commands (0 clears)',
+ )
+
+ add_environments(parser, completer, ControllerMode.NO_DELEGATION, TargetMode.NO_TARGETS) # env
diff --git a/test/lib/ansible_test/_internal/cli/commands/integration/__init__.py b/test/lib/ansible_test/_internal/cli/commands/integration/__init__.py
new file mode 100644
index 00000000..f79fb1cf
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/integration/__init__.py
@@ -0,0 +1,161 @@
+"""Command line parsing for all integration commands."""
+from __future__ import annotations
+
+import argparse
+
+from ...completers import (
+ complete_target,
+)
+
+from ...environments import (
+ CompositeActionCompletionFinder,
+)
+
+from .network import (
+ do_network_integration,
+)
+
+from .posix import (
+ do_posix_integration,
+)
+
+from .windows import (
+ do_windows_integration,
+)
+
+
+def do_integration(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for all integration commands."""
+ parser = argparse.ArgumentParser(
+ add_help=False,
+ parents=[parent],
+ ) # type: argparse.ArgumentParser
+
+ do_posix_integration(subparsers, parser, add_integration_common, completer)
+ do_network_integration(subparsers, parser, add_integration_common, completer)
+ do_windows_integration(subparsers, parser, add_integration_common, completer)
+
+
+def add_integration_common(
+ parser, # type: argparse.ArgumentParser
+):
+ """Add common integration argumetns."""
+ parser.add_argument(
+ '--start-at',
+ metavar='TARGET',
+ help='start at the specified target',
+ ).completer = complete_target
+
+ parser.add_argument(
+ '--start-at-task',
+ metavar='TASK',
+ help='start at the specified task',
+ )
+
+ parser.add_argument(
+ '--tags',
+ metavar='TAGS',
+ help='only run plays and tasks tagged with these values',
+ )
+
+ parser.add_argument(
+ '--skip-tags',
+ metavar='TAGS',
+ help='only run plays and tasks whose tags do not match these values',
+ )
+
+ parser.add_argument(
+ '--diff',
+ action='store_true',
+ help='show diff output',
+ )
+
+ parser.add_argument(
+ '--allow-destructive',
+ action='store_true',
+ help='allow destructive tests',
+ )
+
+ parser.add_argument(
+ '--allow-root',
+ action='store_true',
+ help='allow tests requiring root when not root',
+ )
+
+ parser.add_argument(
+ '--allow-disabled',
+ action='store_true',
+ help='allow tests which have been marked as disabled',
+ )
+
+ parser.add_argument(
+ '--allow-unstable',
+ action='store_true',
+ help='allow tests which have been marked as unstable',
+ )
+
+ parser.add_argument(
+ '--allow-unstable-changed',
+ action='store_true',
+ help='allow tests which have been marked as unstable when focused changes are detected',
+ )
+
+ parser.add_argument(
+ '--allow-unsupported',
+ action='store_true',
+ help='allow tests which have been marked as unsupported',
+ )
+
+ parser.add_argument(
+ '--retry-on-error',
+ action='store_true',
+ help='retry failed test with increased verbosity',
+ )
+
+ parser.add_argument(
+ '--continue-on-error',
+ action='store_true',
+ help='continue after failed test',
+ )
+
+ parser.add_argument(
+ '--debug-strategy',
+ action='store_true',
+ help='run test playbooks using the debug strategy',
+ )
+
+ parser.add_argument(
+ '--changed-all-target',
+ metavar='TARGET',
+ default='all',
+ help='target to run when all tests are needed',
+ )
+
+ parser.add_argument(
+ '--changed-all-mode',
+ metavar='MODE',
+ choices=('default', 'include', 'exclude'),
+ help='include/exclude behavior with --changed-all-target: %(choices)s',
+ )
+
+ parser.add_argument(
+ '--list-targets',
+ action='store_true',
+ help='list matching targets instead of running tests',
+ )
+
+ parser.add_argument(
+ '--no-temp-workdir',
+ action='store_true',
+ help='do not run tests from a temporary directory (use only for verifying broken tests)',
+ )
+
+ parser.add_argument(
+ '--no-temp-unicode',
+ action='store_true',
+ help='avoid unicode characters in temporary directory (use only for verifying broken tests)',
+ )
diff --git a/test/lib/ansible_test/_internal/cli/commands/integration/network.py b/test/lib/ansible_test/_internal/cli/commands/integration/network.py
new file mode 100644
index 00000000..d070afda
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/integration/network.py
@@ -0,0 +1,81 @@
+"""Command line parsing for the `network-integration` command."""
+from __future__ import annotations
+
+import argparse
+import os
+import typing as t
+
+from ....commands.integration.network import (
+ command_network_integration,
+)
+
+from ....config import (
+ NetworkIntegrationConfig,
+)
+
+from ....target import (
+ walk_network_integration_targets,
+)
+
+from ....data import (
+ data_context,
+)
+
+from ...environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_network_integration(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ add_integration_common, # type: t.Callable[[argparse.ArgumentParser], None]
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `network-integration` command."""
+ parser = subparsers.add_parser(
+ 'network-integration',
+ parents=[parent],
+ help='network integration tests',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_network_integration,
+ targets_func=walk_network_integration_targets,
+ config=NetworkIntegrationConfig)
+
+ network_integration = t.cast(argparse.ArgumentParser, parser.add_argument_group(title='network integration test arguments'))
+
+ add_integration_common(network_integration)
+
+ network_integration.add_argument(
+ '--testcase',
+ metavar='TESTCASE',
+ help='limit a test to a specified testcase',
+ ).completer = complete_network_testcase
+
+ add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.NETWORK_INTEGRATION) # network-integration
+
+
+def complete_network_testcase(prefix, parsed_args, **_): # type: (str, argparse.Namespace, ...) -> t.List[str]
+ """Return a list of test cases matching the given prefix if only one target was parsed from the command line, otherwise return an empty list."""
+ testcases = []
+
+ # since testcases are module specific, don't autocomplete if more than one
+ # module is specidied
+ if len(parsed_args.include) != 1:
+ return []
+
+ target = parsed_args.include[0]
+ test_dir = os.path.join(data_context().content.integration_targets_path, target, 'tests')
+ connection_dirs = data_context().content.get_dirs(test_dir)
+
+ for connection_dir in connection_dirs:
+ for testcase in [os.path.basename(path) for path in data_context().content.get_files(connection_dir)]:
+ if testcase.startswith(prefix):
+ testcases.append(testcase.split('.', 1)[0])
+
+ return testcases
diff --git a/test/lib/ansible_test/_internal/cli/commands/integration/posix.py b/test/lib/ansible_test/_internal/cli/commands/integration/posix.py
new file mode 100644
index 00000000..01d906b2
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/integration/posix.py
@@ -0,0 +1,50 @@
+"""Command line parsing for the `integration` command."""
+from __future__ import annotations
+
+import argparse
+import typing as t
+
+from ....commands.integration.posix import (
+ command_posix_integration,
+)
+
+from ....config import (
+ PosixIntegrationConfig,
+)
+
+from ....target import (
+ walk_posix_integration_targets,
+)
+
+from ...environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_posix_integration(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ add_integration_common, # type: t.Callable[[argparse.ArgumentParser], None]
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `integration` command."""
+ parser = subparsers.add_parser(
+ 'integration',
+ parents=[parent],
+ help='posix integration tests',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_posix_integration,
+ targets_func=walk_posix_integration_targets,
+ config=PosixIntegrationConfig,
+ )
+
+ posix_integration = t.cast(argparse.ArgumentParser, parser.add_argument_group(title='integration test arguments'))
+
+ add_integration_common(posix_integration)
+
+ add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.POSIX_INTEGRATION) # integration
diff --git a/test/lib/ansible_test/_internal/cli/commands/integration/windows.py b/test/lib/ansible_test/_internal/cli/commands/integration/windows.py
new file mode 100644
index 00000000..6fef9334
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/integration/windows.py
@@ -0,0 +1,50 @@
+"""Command line parsing for the `windows-integration` command."""
+from __future__ import annotations
+
+import argparse
+import typing as t
+
+from ....commands.integration.windows import (
+ command_windows_integration,
+)
+
+from ....config import (
+ WindowsIntegrationConfig,
+)
+
+from ....target import (
+ walk_windows_integration_targets,
+)
+
+from ...environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_windows_integration(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ add_integration_common, # type: t.Callable[[argparse.ArgumentParser], None]
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `windows-integration` command."""
+ parser = subparsers.add_parser(
+ 'windows-integration',
+ parents=[parent],
+ help='windows integration tests',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_windows_integration,
+ targets_func=walk_windows_integration_targets,
+ config=WindowsIntegrationConfig,
+ )
+
+ windows_integration = t.cast(argparse.ArgumentParser, parser.add_argument_group(title='windows integration test arguments'))
+
+ add_integration_common(windows_integration)
+
+ add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.WINDOWS_INTEGRATION) # windows-integration
diff --git a/test/lib/ansible_test/_internal/cli/commands/sanity.py b/test/lib/ansible_test/_internal/cli/commands/sanity.py
new file mode 100644
index 00000000..009be08b
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/sanity.py
@@ -0,0 +1,119 @@
+"""Command line parsing for the `sanity` command."""
+from __future__ import annotations
+
+import argparse
+
+from ...config import (
+ SanityConfig,
+)
+
+from ...commands.sanity import (
+ command_sanity,
+ sanity_get_tests,
+)
+
+from ...target import (
+ walk_sanity_targets,
+)
+
+from ...data import (
+ data_context,
+)
+
+from ..environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_sanity(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `sanity` command."""
+ parser = subparsers.add_parser(
+ 'sanity',
+ parents=[parent],
+ help='sanity tests',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_sanity,
+ targets_func=walk_sanity_targets,
+ config=SanityConfig)
+
+ sanity = parser.add_argument_group(title='sanity test arguments')
+
+ sanity.add_argument(
+ '--test',
+ metavar='TEST',
+ action='append',
+ choices=[test.name for test in sanity_get_tests()],
+ help='tests to run',
+ )
+
+ sanity.add_argument(
+ '--skip-test',
+ metavar='TEST',
+ action='append',
+ choices=[test.name for test in sanity_get_tests()],
+ help='tests to skip',
+ )
+
+ sanity.add_argument(
+ '--allow-disabled',
+ action='store_true',
+ help='allow tests to run which are disabled by default',
+ )
+
+ sanity.add_argument(
+ '--list-tests',
+ action='store_true',
+ help='list available tests',
+ )
+
+ sanity.add_argument(
+ '--enable-optional-errors',
+ action='store_true',
+ help='enable optional errors',
+ )
+
+ if data_context().content.is_ansible:
+ sanity.add_argument(
+ '--keep-git',
+ action='store_true',
+ help='transfer git related files to the remote host/container',
+ )
+ else:
+ sanity.set_defaults(
+ keep_git=False,
+ )
+
+ sanity.add_argument(
+ '--lint',
+ action='store_true',
+ help='write lint output to stdout, everything else stderr',
+ )
+
+ sanity.add_argument(
+ '--junit',
+ action='store_true',
+ help='write test failures to junit xml files',
+ )
+
+ sanity.add_argument(
+ '--failure-ok',
+ action='store_true',
+ help='exit successfully on failed tests after saving results',
+ )
+
+ sanity.add_argument(
+ '--prime-venvs',
+ action='store_true',
+ help='prepare virtual environments without running tests'
+ )
+
+ add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.SANITY) # sanity
diff --git a/test/lib/ansible_test/_internal/cli/commands/shell.py b/test/lib/ansible_test/_internal/cli/commands/shell.py
new file mode 100644
index 00000000..301ff70e
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/shell.py
@@ -0,0 +1,47 @@
+"""Command line parsing for the `shell` command."""
+from __future__ import annotations
+
+import argparse
+
+from ...commands.shell import (
+ command_shell,
+)
+
+from ...config import (
+ ShellConfig,
+)
+
+from ..environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_shell(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `shell` command."""
+ parser = subparsers.add_parser(
+ 'shell',
+ parents=[parent],
+ help='open an interactive shell',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_shell,
+ config=ShellConfig,
+ )
+
+ shell = parser.add_argument_group(title='shell arguments')
+
+ shell.add_argument(
+ '--raw',
+ action='store_true',
+ help='direct to shell with no setup',
+ )
+
+ add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.SHELL) # shell
diff --git a/test/lib/ansible_test/_internal/cli/commands/units.py b/test/lib/ansible_test/_internal/cli/commands/units.py
new file mode 100644
index 00000000..fdbbbc49
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/units.py
@@ -0,0 +1,65 @@
+"""Command line parsing for the `units` command."""
+from __future__ import annotations
+
+import argparse
+
+from ...config import (
+ UnitsConfig,
+)
+
+from ...commands.units import (
+ command_units,
+)
+
+from ...target import (
+ walk_units_targets,
+)
+
+from ..environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_units(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `units` command."""
+ parser = subparsers.add_parser(
+ 'units',
+ parents=[parent],
+ help='unit tests',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_units,
+ targets_func=walk_units_targets,
+ config=UnitsConfig,
+ )
+
+ units = parser.add_argument_group(title='unit test arguments')
+
+ units.add_argument(
+ '--collect-only',
+ action='store_true',
+ help='collect tests but do not execute them',
+ )
+
+ units.add_argument(
+ '--num-workers',
+ metavar='INT',
+ type=int,
+ help='number of workers to use (default: auto)',
+ )
+
+ units.add_argument(
+ '--requirements-mode',
+ choices=('only', 'skip'),
+ help=argparse.SUPPRESS,
+ )
+
+ add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.UNITS) # units
diff --git a/test/lib/ansible_test/_internal/cli/compat.py b/test/lib/ansible_test/_internal/cli/compat.py
new file mode 100644
index 00000000..cf6c01f1
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/compat.py
@@ -0,0 +1,482 @@
+"""Provides compatibility with first-generation host delegation options in ansible-test."""
+from __future__ import annotations
+
+import argparse
+import dataclasses
+import enum
+import os
+import types
+import typing as t
+
+from ..constants import (
+ CONTROLLER_PYTHON_VERSIONS,
+ SUPPORTED_PYTHON_VERSIONS,
+)
+
+from ..util import (
+ ApplicationError,
+ display,
+ filter_args,
+ sorted_versions,
+ str_to_version,
+)
+
+from ..docker_util import (
+ docker_available,
+)
+
+from ..completion import (
+ DOCKER_COMPLETION,
+ REMOTE_COMPLETION,
+ filter_completion,
+)
+
+from ..host_configs import (
+ ControllerConfig,
+ ControllerHostConfig,
+ DockerConfig,
+ FallbackDetail,
+ FallbackReason,
+ HostConfig,
+ HostContext,
+ HostSettings,
+ NativePythonConfig,
+ NetworkInventoryConfig,
+ NetworkRemoteConfig,
+ OriginConfig,
+ PosixRemoteConfig,
+ VirtualPythonConfig,
+ WindowsInventoryConfig,
+ WindowsRemoteConfig,
+)
+
+from ..data import (
+ data_context,
+)
+
+
+def filter_python(version, versions): # type: (t.Optional[str], t.Optional[t.List[str]]) -> t.Optional[str]
+ """If a Python version is given and is in the given version list, return that Python version, otherwise return None."""
+ return version if version in versions else None
+
+
+def controller_python(version): # type: (t.Optional[str]) -> t.Optional[str]
+ """If a Python version is given and is supported by the controller, return that Python version, otherwise return None."""
+ return filter_python(version, CONTROLLER_PYTHON_VERSIONS)
+
+
+def get_fallback_remote_controller(): # type: () -> str
+ """Return the remote fallback platform for the controller."""
+ platform = 'freebsd' # lower cost than RHEL and macOS
+ candidates = [item for item in filter_completion(REMOTE_COMPLETION).values() if item.controller_supported and item.platform == platform]
+ fallback = sorted(candidates, key=lambda value: str_to_version(value.version), reverse=True)[0]
+ return fallback.name
+
+
+def get_option_name(name): # type: (str) -> str
+ """Return a command-line option name from the given option name."""
+ if name == 'targets':
+ name = 'target'
+
+ return f'--{name.replace("_", "-")}'
+
+
+class PythonVersionUnsupportedError(ApplicationError):
+ """A Python version was requested for a context which does not support that version."""
+ def __init__(self, context, version, versions):
+ super().__init__(f'Python {version} is not supported by environment `{context}`. Supported Python version(s) are: {", ".join(versions)}')
+
+
+class PythonVersionUnspecifiedError(ApplicationError):
+ """A Python version was not specified for a context which is unknown, thus the Python version is unknown."""
+ def __init__(self, context):
+ super().__init__(f'A Python version was not specified for environment `{context}`. Use the `--python` option to specify a Python version.')
+
+
+class ControllerNotSupportedError(ApplicationError):
+ """Option(s) were specified which do not provide support for the controller and would be ignored because they are irrelevant for the target."""
+ def __init__(self, context):
+ super().__init__(f'Environment `{context}` does not provide a Python version supported by the controller.')
+
+
+class OptionsConflictError(ApplicationError):
+ """Option(s) were specified which conflict with other options."""
+ def __init__(self, first, second):
+ super().__init__(f'Options `{" ".join(first)}` cannot be combined with options `{" ".join(second)}`.')
+
+
+@dataclasses.dataclass(frozen=True)
+class LegacyHostOptions:
+ """Legacy host options used prior to the availability of separate controller and target host configuration."""
+ python: t.Optional[str] = None
+ python_interpreter: t.Optional[str] = None
+ local: t.Optional[bool] = None
+ venv: t.Optional[bool] = None
+ venv_system_site_packages: t.Optional[bool] = None
+ remote: t.Optional[str] = None
+ remote_provider: t.Optional[str] = None
+ docker: t.Optional[str] = None
+ docker_privileged: t.Optional[bool] = None
+ docker_seccomp: t.Optional[str] = None
+ docker_memory: t.Optional[int] = None
+ windows: t.Optional[t.List[str]] = None
+ platform: t.Optional[t.List[str]] = None
+ platform_collection: t.Optional[t.List[t.Tuple[str, str]]] = None
+ platform_connection: t.Optional[t.List[t.Tuple[str, str]]] = None
+ inventory: t.Optional[str] = None
+
+ @staticmethod
+ def create(namespace): # type: (t.Union[argparse.Namespace, types.SimpleNamespace]) -> LegacyHostOptions
+ """Create legacy host options from the given namespace."""
+ kwargs = {field.name: getattr(namespace, field.name, None) for field in dataclasses.fields(LegacyHostOptions)}
+
+ if kwargs['python'] == 'default':
+ kwargs['python'] = None
+
+ return LegacyHostOptions(**kwargs)
+
+ @staticmethod
+ def purge_namespace(namespace): # type: (t.Union[argparse.Namespace, types.SimpleNamespace]) -> None
+ """Purge legacy host options fields from the given namespace."""
+ for field in dataclasses.fields(LegacyHostOptions): # type: dataclasses.Field
+ if hasattr(namespace, field.name):
+ delattr(namespace, field.name)
+
+ @staticmethod
+ def purge_args(args): # type: (t.List[str]) -> t.List[str]
+ """Purge legacy host options from the given command line arguments."""
+ fields = dataclasses.fields(LegacyHostOptions) # type: t.Tuple[dataclasses.Field, ...]
+ filters = {get_option_name(field.name): 0 if field.type is t.Optional[bool] else 1 for field in fields} # type: t.Dict[str, int]
+
+ return filter_args(args, filters)
+
+ def get_options_used(self): # type: () -> t.Tuple[str, ...]
+ """Return a tuple of the command line options used."""
+ fields = dataclasses.fields(self) # type: t.Tuple[dataclasses.Field, ...]
+ options = tuple(sorted(get_option_name(field.name) for field in fields if getattr(self, field.name)))
+ return options
+
+
+class TargetMode(enum.Enum):
+ """Type of provisioning to use for the targets."""
+ WINDOWS_INTEGRATION = enum.auto() # windows-integration
+ NETWORK_INTEGRATION = enum.auto() # network-integration
+ POSIX_INTEGRATION = enum.auto() # integration
+ SANITY = enum.auto() # sanity
+ UNITS = enum.auto() # units
+ SHELL = enum.auto() # shell
+ NO_TARGETS = enum.auto() # coverage
+
+ @property
+ def one_host(self):
+ """Return True if only one host (the controller) should be used, otherwise return False."""
+ return self in (TargetMode.SANITY, TargetMode.UNITS, TargetMode.NO_TARGETS)
+
+ @property
+ def no_fallback(self):
+ """Return True if no fallback is acceptable for the controller (due to options not applying to the target), otherwise return False."""
+ return self in (TargetMode.WINDOWS_INTEGRATION, TargetMode.NETWORK_INTEGRATION, TargetMode.NO_TARGETS)
+
+ @property
+ def multiple_pythons(self):
+ """Return True if multiple Python versions are allowed, otherwise False."""
+ return self in (TargetMode.SANITY, TargetMode.UNITS)
+
+ @property
+ def has_python(self):
+ """Return True if this mode uses Python, otherwise False."""
+ return self in (TargetMode.POSIX_INTEGRATION, TargetMode.SANITY, TargetMode.UNITS, TargetMode.SHELL)
+
+
+def convert_legacy_args(
+ argv, # type: t.List[str]
+ args, # type: t.Union[argparse.Namespace, types.SimpleNamespace]
+ mode, # type: TargetMode
+): # type: (...) -> HostSettings
+ """Convert pre-split host arguments in the given namespace to their split counterparts."""
+ old_options = LegacyHostOptions.create(args)
+ old_options.purge_namespace(args)
+
+ new_options = [
+ '--controller',
+ '--target',
+ '--target-python',
+ ]
+
+ used_old_options = old_options.get_options_used()
+ used_new_options = [name for name in new_options if name in argv]
+
+ if used_old_options:
+ if used_new_options:
+ raise OptionsConflictError(used_old_options, used_new_options)
+
+ controller, targets, controller_fallback = get_legacy_host_config(mode, old_options)
+
+ if controller_fallback:
+ if mode.one_host:
+ display.info(controller_fallback.message, verbosity=1)
+ else:
+ display.warning(controller_fallback.message)
+
+ used_default_pythons = mode in (TargetMode.SANITY, TargetMode.UNITS) and not native_python(old_options)
+ else:
+ controller = args.controller or OriginConfig()
+ controller_fallback = None
+
+ if mode == TargetMode.NO_TARGETS:
+ targets = []
+ used_default_pythons = False
+ elif args.targets:
+ targets = args.targets
+ used_default_pythons = False
+ else:
+ targets = default_targets(mode, controller)
+ used_default_pythons = mode in (TargetMode.SANITY, TargetMode.UNITS)
+
+ args.controller = controller
+ args.targets = targets
+
+ if used_default_pythons:
+ targets = t.cast(t.List[ControllerConfig], targets)
+ skipped_python_versions = sorted_versions(list(set(SUPPORTED_PYTHON_VERSIONS) - {target.python.version for target in targets}))
+ else:
+ skipped_python_versions = []
+
+ filtered_args = old_options.purge_args(argv)
+ filtered_args = filter_args(filtered_args, {name: 1 for name in new_options})
+
+ host_settings = HostSettings(
+ controller=controller,
+ targets=targets,
+ skipped_python_versions=skipped_python_versions,
+ filtered_args=filtered_args,
+ controller_fallback=controller_fallback,
+ )
+
+ return host_settings
+
+
+def controller_targets(
+ mode, # type: TargetMode
+ options, # type: LegacyHostOptions
+ controller, # type: ControllerHostConfig
+): # type: (...) -> t.List[ControllerConfig]
+ """Return the configuration for controller targets."""
+ python = native_python(options)
+
+ if python:
+ targets = [ControllerConfig(python=python)]
+ else:
+ targets = default_targets(mode, controller)
+
+ return targets
+
+
+def native_python(options): # type: (LegacyHostOptions) -> t.Optional[NativePythonConfig]
+ """Return a NativePythonConfig for the given version if it is not None, otherwise return None."""
+ if not options.python and not options.python_interpreter:
+ return None
+
+ return NativePythonConfig(version=options.python, path=options.python_interpreter)
+
+
+def get_legacy_host_config(
+ mode, # type: TargetMode
+ options, # type: LegacyHostOptions
+): # type: (...) -> t.Tuple[HostConfig, t.List[HostConfig], t.Optional[FallbackDetail]]
+ """
+ Returns controller and target host configs derived from the provided legacy host options.
+ The goal is to match the original behavior, by using non-split testing whenever possible.
+ When the options support the controller, use the options for the controller and use ControllerConfig for the targets.
+ When the options do not support the controller, use the options for the targets and use a default controller config influenced by the options.
+ """
+ venv_fallback = 'venv/default'
+ docker_fallback = 'default'
+ remote_fallback = get_fallback_remote_controller()
+
+ controller_fallback = None # type: t.Optional[t.Tuple[str, str, FallbackReason]]
+
+ if options.venv:
+ if controller_python(options.python) or not options.python:
+ controller = OriginConfig(python=VirtualPythonConfig(version=options.python or 'default', system_site_packages=options.venv_system_site_packages))
+ else:
+ controller_fallback = f'origin:python={venv_fallback}', f'--venv --python {options.python}', FallbackReason.PYTHON
+ controller = OriginConfig(python=VirtualPythonConfig(version='default', system_site_packages=options.venv_system_site_packages))
+
+ if mode in (TargetMode.SANITY, TargetMode.UNITS):
+ targets = controller_targets(mode, options, controller)
+
+ # Target sanity tests either have no Python requirements or manage their own virtual environments.
+ # Thus there is no point in setting up virtual environments ahead of time for them.
+
+ if mode == TargetMode.UNITS:
+ targets = [ControllerConfig(python=VirtualPythonConfig(version=target.python.version, path=target.python.path,
+ system_site_packages=options.venv_system_site_packages)) for target in targets]
+ else:
+ targets = [ControllerConfig(python=VirtualPythonConfig(version=options.python or 'default',
+ system_site_packages=options.venv_system_site_packages))]
+ elif options.docker:
+ docker_config = filter_completion(DOCKER_COMPLETION).get(options.docker)
+
+ if docker_config:
+ if options.python and options.python not in docker_config.supported_pythons:
+ raise PythonVersionUnsupportedError(f'--docker {options.docker}', options.python, docker_config.supported_pythons)
+
+ if docker_config.controller_supported:
+ if controller_python(options.python) or not options.python:
+ controller = DockerConfig(name=options.docker, python=native_python(options),
+ privileged=options.docker_privileged, seccomp=options.docker_seccomp, memory=options.docker_memory)
+ targets = controller_targets(mode, options, controller)
+ else:
+ controller_fallback = f'docker:{options.docker}', f'--docker {options.docker} --python {options.python}', FallbackReason.PYTHON
+ controller = DockerConfig(name=options.docker)
+ targets = controller_targets(mode, options, controller)
+ else:
+ controller_fallback = f'docker:{docker_fallback}', f'--docker {options.docker}', FallbackReason.ENVIRONMENT
+ controller = DockerConfig(name=docker_fallback)
+ targets = [DockerConfig(name=options.docker, python=native_python(options),
+ privileged=options.docker_privileged, seccomp=options.docker_seccomp, memory=options.docker_memory)]
+ else:
+ if not options.python:
+ raise PythonVersionUnspecifiedError(f'--docker {options.docker}')
+
+ if controller_python(options.python):
+ controller = DockerConfig(name=options.docker, python=native_python(options),
+ privileged=options.docker_privileged, seccomp=options.docker_seccomp, memory=options.docker_memory)
+ targets = controller_targets(mode, options, controller)
+ else:
+ controller_fallback = f'docker:{docker_fallback}', f'--docker {options.docker} --python {options.python}', FallbackReason.PYTHON
+ controller = DockerConfig(name=docker_fallback)
+ targets = [DockerConfig(name=options.docker, python=native_python(options),
+ privileged=options.docker_privileged, seccomp=options.docker_seccomp, memory=options.docker_memory)]
+ elif options.remote:
+ remote_config = filter_completion(REMOTE_COMPLETION).get(options.remote)
+ context, reason = None, None
+
+ if remote_config:
+ if options.python and options.python not in remote_config.supported_pythons:
+ raise PythonVersionUnsupportedError(f'--remote {options.remote}', options.python, remote_config.supported_pythons)
+
+ if remote_config.controller_supported:
+ if controller_python(options.python) or not options.python:
+ controller = PosixRemoteConfig(name=options.remote, python=native_python(options), provider=options.remote_provider)
+ targets = controller_targets(mode, options, controller)
+ else:
+ controller_fallback = f'remote:{options.remote}', f'--remote {options.remote} --python {options.python}', FallbackReason.PYTHON
+ controller = PosixRemoteConfig(name=options.remote, provider=options.remote_provider)
+ targets = controller_targets(mode, options, controller)
+ else:
+ context, reason = f'--remote {options.remote}', FallbackReason.ENVIRONMENT
+ controller = None
+ targets = [PosixRemoteConfig(name=options.remote, python=native_python(options), provider=options.remote_provider)]
+ elif mode == TargetMode.SHELL and options.remote.startswith('windows/'):
+ if options.python and options.python not in CONTROLLER_PYTHON_VERSIONS:
+ raise ControllerNotSupportedError(f'--python {options.python}')
+
+ controller = OriginConfig(python=native_python(options))
+ targets = [WindowsRemoteConfig(name=options.remote, provider=options.remote_provider)]
+ else:
+ if not options.python:
+ raise PythonVersionUnspecifiedError(f'--remote {options.remote}')
+
+ if controller_python(options.python):
+ controller = PosixRemoteConfig(name=options.remote, python=native_python(options), provider=options.remote_provider)
+ targets = controller_targets(mode, options, controller)
+ else:
+ context, reason = f'--remote {options.remote} --python {options.python}', FallbackReason.PYTHON
+ controller = None
+ targets = [PosixRemoteConfig(name=options.remote, python=native_python(options), provider=options.remote_provider)]
+
+ if not controller:
+ if docker_available():
+ controller_fallback = f'docker:{docker_fallback}', context, reason
+ controller = DockerConfig(name=docker_fallback)
+ else:
+ controller_fallback = f'remote:{remote_fallback}', context, reason
+ controller = PosixRemoteConfig(name=remote_fallback)
+ else: # local/unspecified
+ # There are several changes in behavior from the legacy implementation when using no delegation (or the `--local` option).
+ # These changes are due to ansible-test now maintaining consistency between its own Python and that of controller Python subprocesses.
+ #
+ # 1) The `--python-interpreter` option (if different from sys.executable) now affects controller subprocesses and triggers re-execution of ansible-test.
+ # Previously this option was completely ignored except when used with the `--docker` or `--remote` options.
+ # 2) The `--python` option now triggers re-execution of ansible-test if it differs from sys.version_info.
+ # Previously it affected Python subprocesses, but not ansible-test itself.
+
+ if controller_python(options.python) or not options.python:
+ controller = OriginConfig(python=native_python(options))
+ targets = controller_targets(mode, options, controller)
+ else:
+ controller_fallback = 'origin:python=default', f'--python {options.python}', FallbackReason.PYTHON
+ controller = OriginConfig()
+ targets = controller_targets(mode, options, controller)
+
+ if controller_fallback:
+ controller_option, context, reason = controller_fallback
+
+ if mode.no_fallback:
+ raise ControllerNotSupportedError(context)
+
+ fallback_detail = FallbackDetail(
+ reason=reason,
+ message=f'Using `--controller {controller_option}` since `{context}` does not support the controller.',
+ )
+ else:
+ fallback_detail = None
+
+ if mode.one_host and any(not isinstance(target, ControllerConfig) for target in targets):
+ raise ControllerNotSupportedError(controller_fallback[1])
+
+ if mode == TargetMode.NO_TARGETS:
+ targets = []
+ else:
+ targets = handle_non_posix_targets(mode, options, targets)
+
+ return controller, targets, fallback_detail
+
+
+def handle_non_posix_targets(
+ mode, # type: TargetMode
+ options, # type: LegacyHostOptions
+ targets, # type: t.List[HostConfig]
+): # type: (...) -> t.List[HostConfig]
+ """Return a list of non-POSIX targets if the target mode is non-POSIX."""
+ if mode == TargetMode.WINDOWS_INTEGRATION:
+ if options.windows:
+ targets = [WindowsRemoteConfig(name=f'windows/{version}', provider=options.remote_provider) for version in options.windows]
+ else:
+ targets = [WindowsInventoryConfig(path=options.inventory)]
+ elif mode == TargetMode.NETWORK_INTEGRATION:
+ if options.platform:
+ targets = [NetworkRemoteConfig(name=platform, provider=options.remote_provider) for platform in options.platform]
+
+ for platform, collection in options.platform_collection or []:
+ for entry in targets:
+ if entry.platform == platform:
+ entry.collection = collection
+
+ for platform, connection in options.platform_connection or []:
+ for entry in targets:
+ if entry.platform == platform:
+ entry.connection = connection
+ else:
+ targets = [NetworkInventoryConfig(path=options.inventory)]
+
+ return targets
+
+
+def default_targets(
+ mode, # type: TargetMode
+ controller, # type: ControllerHostConfig
+): # type: (...) -> t.List[HostConfig]
+ """Return a list of default targets for the given target mode."""
+ if mode == TargetMode.WINDOWS_INTEGRATION:
+ targets = [WindowsInventoryConfig(path=os.path.abspath(os.path.join(data_context().content.integration_path, 'inventory.winrm')))]
+ elif mode == TargetMode.NETWORK_INTEGRATION:
+ targets = [NetworkInventoryConfig(path=os.path.abspath(os.path.join(data_context().content.integration_path, 'inventory.networking')))]
+ elif mode.multiple_pythons:
+ targets = controller.get_default_targets(HostContext(controller_config=controller))
+ else:
+ targets = [ControllerConfig()]
+
+ return targets
diff --git a/test/lib/ansible_test/_internal/cli/completers.py b/test/lib/ansible_test/_internal/cli/completers.py
new file mode 100644
index 00000000..a4b9c04f
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/completers.py
@@ -0,0 +1,26 @@
+"""Completers for use with argcomplete."""
+from __future__ import annotations
+
+import argparse
+import typing as t
+
+from ..target import (
+ find_target_completion,
+)
+
+from .argparsing.argcompletion import (
+ OptionCompletionFinder,
+)
+
+
+def complete_target(completer, prefix, parsed_args, **_): # type: (OptionCompletionFinder, str, argparse.Namespace, ...) -> t.List[str]
+ """Perform completion for the targets configured for the command being parsed."""
+ matches = find_target_completion(parsed_args.targets_func, prefix, completer.list_mode)
+ completer.disable_completion_mangling = completer.list_mode and len(matches) > 1
+ return matches
+
+
+def complete_choices(choices, prefix, **_): # type: (t.List[str], str, ...) -> t.List[str]
+ """Perform completion using the provided choices."""
+ matches = [choice for choice in choices if choice.startswith(prefix)]
+ return matches
diff --git a/test/lib/ansible_test/_internal/cli/converters.py b/test/lib/ansible_test/_internal/cli/converters.py
new file mode 100644
index 00000000..46562738
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/converters.py
@@ -0,0 +1,20 @@
+"""Converters for use as the type argument for arparse's add_argument method."""
+from __future__ import annotations
+
+import argparse
+import typing as t
+
+
+def key_value_type(value): # type: (str) -> t.Tuple[str, str]
+ """Wrapper around key_value."""
+ return key_value(value)
+
+
+def key_value(value): # type: (str) -> t.Tuple[str, str]
+ """Type parsing and validation for argparse key/value pairs separated by an '=' character."""
+ parts = value.split('=')
+
+ if len(parts) != 2:
+ raise argparse.ArgumentTypeError('"%s" must be in the format "key=value"' % value)
+
+ return parts[0], parts[1]
diff --git a/test/lib/ansible_test/_internal/cli/environments.py b/test/lib/ansible_test/_internal/cli/environments.py
new file mode 100644
index 00000000..640ff56b
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/environments.py
@@ -0,0 +1,574 @@
+"""Command line parsing for test environments."""
+from __future__ import annotations
+
+import argparse
+import enum
+import functools
+import typing as t
+
+from ..constants import (
+ CONTROLLER_PYTHON_VERSIONS,
+ REMOTE_PROVIDERS,
+ SECCOMP_CHOICES,
+ SUPPORTED_PYTHON_VERSIONS,
+)
+
+from ..completion import (
+ DOCKER_COMPLETION,
+ NETWORK_COMPLETION,
+ REMOTE_COMPLETION,
+ WINDOWS_COMPLETION,
+ filter_completion,
+)
+
+from ..cli.argparsing import (
+ CompositeAction,
+ CompositeActionCompletionFinder,
+)
+
+from ..cli.argparsing.actions import (
+ EnumAction,
+)
+
+from ..cli.actions import (
+ DelegatedControllerAction,
+ NetworkSshTargetAction,
+ NetworkTargetAction,
+ OriginControllerAction,
+ PosixSshTargetAction,
+ PosixTargetAction,
+ SanityPythonTargetAction,
+ UnitsPythonTargetAction,
+ WindowsSshTargetAction,
+ WindowsTargetAction,
+)
+
+from ..cli.compat import (
+ TargetMode,
+)
+
+from ..config import (
+ TerminateMode,
+)
+
+from .completers import (
+ complete_choices,
+)
+
+from .converters import (
+ key_value_type,
+)
+
+from ..ci import (
+ get_ci_provider,
+)
+
+
+class ControllerMode(enum.Enum):
+ """Type of provisioning to use for the controller."""
+ NO_DELEGATION = enum.auto()
+ ORIGIN = enum.auto()
+ DELEGATED = enum.auto()
+
+
+def add_environments(
+ parser, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+ controller_mode, # type: ControllerMode
+ target_mode, # type: TargetMode
+): # type: (...) -> None
+ """Add arguments for the environments used to run ansible-test and commands it invokes."""
+ no_environment = controller_mode == ControllerMode.NO_DELEGATION and target_mode == TargetMode.NO_TARGETS
+
+ parser.set_defaults(no_environment=no_environment)
+
+ if no_environment:
+ return
+
+ parser.set_defaults(target_mode=target_mode)
+
+ add_global_options(parser, controller_mode)
+ add_legacy_environment_options(parser, controller_mode, target_mode)
+ action_types = add_composite_environment_options(parser, completer, controller_mode, target_mode)
+
+ sections = [f'{heading}\n{content}'
+ for action_type, documentation_state in CompositeAction.documentation_state.items() if action_type in action_types
+ for heading, content in documentation_state.sections.items()]
+
+ if not get_ci_provider().supports_core_ci_auth():
+ sections.append('Remote provisioning options have been hidden since no Ansible Core CI API key was found.')
+
+ parser.formatter_class = argparse.RawDescriptionHelpFormatter
+ parser.epilog = '\n\n'.join(sections)
+
+
+def add_global_options(
+ parser, # type: argparse.ArgumentParser
+ controller_mode, # type: ControllerMode
+):
+ """Add global options for controlling the test environment that work with both the legacy and composite options."""
+ global_parser = t.cast(argparse.ArgumentParser, parser.add_argument_group(title='global environment arguments'))
+
+ global_parser.add_argument(
+ '--containers',
+ metavar='JSON',
+ help=argparse.SUPPRESS,
+ )
+
+ global_parser.add_argument(
+ '--pypi-proxy',
+ action='store_true',
+ help=argparse.SUPPRESS,
+ )
+
+ global_parser.add_argument(
+ '--pypi-endpoint',
+ metavar='URI',
+ help=argparse.SUPPRESS,
+ )
+
+ global_parser.add_argument(
+ '--requirements',
+ action='store_true',
+ default=False,
+ help='install command requirements',
+ )
+
+ global_parser.add_argument(
+ '--no-pip-check',
+ action='store_true',
+ help=argparse.SUPPRESS, # deprecated, kept for now (with a warning) for backwards compatibility
+ )
+
+ add_global_remote(global_parser, controller_mode)
+ add_global_docker(global_parser, controller_mode)
+
+
+def add_composite_environment_options(
+ parser, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+ controller_mode, # type: ControllerMode
+ target_mode, # type: TargetMode
+): # type: (...) -> t.List[t.Type[CompositeAction]]
+ """Add composite options for controlling the test environment."""
+ composite_parser = t.cast(argparse.ArgumentParser, parser.add_argument_group(
+ title='composite environment arguments (mutually exclusive with "environment arguments" above)'))
+
+ composite_parser.add_argument(
+ '--host-path',
+ help=argparse.SUPPRESS,
+ )
+
+ action_types = [] # type: t.List[t.Type[CompositeAction]]
+
+ def register_action_type(action_type): # type: (t.Type[CompositeAction]) -> t.Type[CompositeAction]
+ """Register the provided composite action type and return it."""
+ action_types.append(action_type)
+ return action_type
+
+ if controller_mode == ControllerMode.NO_DELEGATION:
+ composite_parser.set_defaults(controller=None)
+ else:
+ composite_parser.add_argument(
+ '--controller',
+ metavar='OPT',
+ action=register_action_type(DelegatedControllerAction if controller_mode == ControllerMode.DELEGATED else OriginControllerAction),
+ help='configuration for the controller',
+ ).completer = completer.completer
+
+ if target_mode == TargetMode.NO_TARGETS:
+ composite_parser.set_defaults(targets=[])
+ elif target_mode == TargetMode.SHELL:
+ group = composite_parser.add_mutually_exclusive_group()
+
+ group.add_argument(
+ '--target-posix',
+ metavar='OPT',
+ action=register_action_type(PosixSshTargetAction),
+ help='configuration for the target',
+ ).completer = completer.completer
+
+ suppress = None if get_ci_provider().supports_core_ci_auth() else argparse.SUPPRESS
+
+ group.add_argument(
+ '--target-windows',
+ metavar='OPT',
+ action=WindowsSshTargetAction if suppress else register_action_type(WindowsSshTargetAction),
+ help=suppress or 'configuration for the target',
+ ).completer = completer.completer
+
+ group.add_argument(
+ '--target-network',
+ metavar='OPT',
+ action=NetworkSshTargetAction if suppress else register_action_type(NetworkSshTargetAction),
+ help=suppress or 'configuration for the target',
+ ).completer = completer.completer
+ else:
+ if target_mode.multiple_pythons:
+ target_option = '--target-python'
+ target_help = 'configuration for the target python interpreter(s)'
+ elif target_mode == TargetMode.POSIX_INTEGRATION:
+ target_option = '--target'
+ target_help = 'configuration for the target'
+ else:
+ target_option = '--target'
+ target_help = 'configuration for the target(s)'
+
+ target_actions = {
+ TargetMode.POSIX_INTEGRATION: PosixTargetAction,
+ TargetMode.WINDOWS_INTEGRATION: WindowsTargetAction,
+ TargetMode.NETWORK_INTEGRATION: NetworkTargetAction,
+ TargetMode.SANITY: SanityPythonTargetAction,
+ TargetMode.UNITS: UnitsPythonTargetAction,
+ }
+
+ target_action = target_actions[target_mode]
+
+ composite_parser.add_argument(
+ target_option,
+ metavar='OPT',
+ action=register_action_type(target_action),
+ help=target_help,
+ ).completer = completer.completer
+
+ return action_types
+
+
+def add_legacy_environment_options(
+ parser, # type: argparse.ArgumentParser
+ controller_mode, # type: ControllerMode
+ target_mode, # type: TargetMode
+):
+ """Add legacy options for controlling the test environment."""
+ # noinspection PyTypeChecker
+ environment = parser.add_argument_group(
+ title='environment arguments (mutually exclusive with "composite environment arguments" below)') # type: argparse.ArgumentParser
+
+ add_environments_python(environment, target_mode)
+ add_environments_host(environment, controller_mode, target_mode)
+
+
+def add_environments_python(
+ environments_parser, # type: argparse.ArgumentParser
+ target_mode, # type: TargetMode
+): # type: (...) -> None
+ """Add environment arguments to control the Python version(s) used."""
+ if target_mode.has_python:
+ python_versions = SUPPORTED_PYTHON_VERSIONS
+ else:
+ python_versions = CONTROLLER_PYTHON_VERSIONS
+
+ environments_parser.add_argument(
+ '--python',
+ metavar='X.Y',
+ choices=python_versions + ('default',),
+ help='python version: %s' % ', '.join(python_versions),
+ )
+
+ environments_parser.add_argument(
+ '--python-interpreter',
+ metavar='PATH',
+ help='path to the python interpreter',
+ )
+
+
+def add_environments_host(
+ environments_parser, # type: argparse.ArgumentParser
+ controller_mode, # type: ControllerMode
+ target_mode # type: TargetMode
+): # type: (...) -> None
+ """Add environment arguments for the given host and argument modes."""
+ # noinspection PyTypeChecker
+ environments_exclusive_group = environments_parser.add_mutually_exclusive_group() # type: argparse.ArgumentParser
+
+ add_environment_local(environments_exclusive_group)
+ add_environment_venv(environments_exclusive_group, environments_parser)
+
+ if controller_mode == ControllerMode.DELEGATED:
+ add_environment_remote(environments_exclusive_group, environments_parser, target_mode)
+ add_environment_docker(environments_exclusive_group, environments_parser, target_mode)
+
+ if target_mode == TargetMode.WINDOWS_INTEGRATION:
+ add_environment_windows(environments_parser)
+
+ if target_mode == TargetMode.NETWORK_INTEGRATION:
+ add_environment_network(environments_parser)
+
+
+def add_environment_network(
+ environments_parser, # type: argparse.ArgumentParser
+): # type: (...) -> None
+ """Add environment arguments for running on a windows host."""
+ environments_parser.add_argument(
+ '--platform',
+ metavar='PLATFORM',
+ action='append',
+ help='network platform/version',
+ ).completer = complete_network_platform
+
+ environments_parser.add_argument(
+ '--platform-collection',
+ type=key_value_type,
+ metavar='PLATFORM=COLLECTION',
+ action='append',
+ help='collection used to test platform',
+ ).completer = complete_network_platform_collection
+
+ environments_parser.add_argument(
+ '--platform-connection',
+ type=key_value_type,
+ metavar='PLATFORM=CONNECTION',
+ action='append',
+ help='connection used to test platform',
+ ).completer = complete_network_platform_connection
+
+ environments_parser.add_argument(
+ '--inventory',
+ metavar='PATH',
+ help='path to inventory used for tests',
+ )
+
+
+def add_environment_windows(
+ environments_parser, # type: argparse.ArgumentParser
+): # type: (...) -> None
+ """Add environment arguments for running on a windows host."""
+ environments_parser.add_argument(
+ '--windows',
+ metavar='VERSION',
+ action='append',
+ help='windows version',
+ ).completer = complete_windows
+
+ environments_parser.add_argument(
+ '--inventory',
+ metavar='PATH',
+ help='path to inventory used for tests',
+ )
+
+
+def add_environment_local(
+ exclusive_parser, # type: argparse.ArgumentParser
+): # type: (...) -> None
+ """Add environment arguments for running on the local (origin) host."""
+ exclusive_parser.add_argument(
+ '--local',
+ action='store_true',
+ help='run from the local environment',
+ )
+
+
+def add_environment_venv(
+ exclusive_parser, # type: argparse.ArgumentParser
+ environments_parser, # type: argparse.ArgumentParser
+): # type: (...) -> None
+ """Add environment arguments for running in ansible-test managed virtual environments."""
+ exclusive_parser.add_argument(
+ '--venv',
+ action='store_true',
+ help='run from a virtual environment',
+ )
+
+ environments_parser.add_argument(
+ '--venv-system-site-packages',
+ action='store_true',
+ help='enable system site packages')
+
+
+def add_global_docker(
+ parser, # type: argparse.ArgumentParser
+ controller_mode, # type: ControllerMode
+): # type: (...) -> None
+ """Add global options for Docker."""
+ if controller_mode != ControllerMode.DELEGATED:
+ parser.set_defaults(
+ docker_no_pull=False,
+ docker_network=None,
+ docker_terminate=None,
+ prime_containers=False,
+ )
+
+ return
+
+ parser.add_argument(
+ '--docker-no-pull',
+ action='store_true',
+ help=argparse.SUPPRESS, # deprecated, kept for now (with a warning) for backwards compatibility
+ )
+
+ parser.add_argument(
+ '--docker-network',
+ metavar='NET',
+ help='run using the specified network',
+ )
+
+ parser.add_argument(
+ '--docker-terminate',
+ metavar='T',
+ default=TerminateMode.ALWAYS,
+ type=TerminateMode,
+ action=EnumAction,
+ help='terminate the container: %(choices)s (default: %(default)s)',
+ )
+
+ parser.add_argument(
+ '--prime-containers',
+ action='store_true',
+ help='download containers without running tests',
+ )
+
+
+def add_environment_docker(
+ exclusive_parser, # type: argparse.ArgumentParser
+ environments_parser, # type: argparse.ArgumentParser
+ target_mode, # type: TargetMode
+): # type: (...) -> None
+ """Add environment arguments for running in docker containers."""
+ if target_mode in (TargetMode.POSIX_INTEGRATION, TargetMode.SHELL):
+ docker_images = sorted(filter_completion(DOCKER_COMPLETION))
+ else:
+ docker_images = sorted(filter_completion(DOCKER_COMPLETION, controller_only=True))
+
+ exclusive_parser.add_argument(
+ '--docker',
+ metavar='IMAGE',
+ nargs='?',
+ const='default',
+ help='run from a docker container',
+ ).completer = functools.partial(complete_choices, docker_images)
+
+ environments_parser.add_argument(
+ '--docker-privileged',
+ action='store_true',
+ help='run docker container in privileged mode',
+ )
+
+ environments_parser.add_argument(
+ '--docker-seccomp',
+ metavar='SC',
+ choices=SECCOMP_CHOICES,
+ help='set seccomp confinement for the test container: %(choices)s',
+ )
+
+ environments_parser.add_argument(
+ '--docker-memory',
+ metavar='INT',
+ type=int,
+ help='memory limit for docker in bytes',
+ )
+
+
+def add_global_remote(
+ parser, # type: argparse.ArgumentParser
+ controller_mode, # type: ControllerMode
+): # type: (...) -> None
+ """Add global options for remote instances."""
+ if controller_mode != ControllerMode.DELEGATED:
+ parser.set_defaults(
+ remote_stage=None,
+ remote_endpoint=None,
+ remote_terminate=None,
+ )
+
+ return
+
+ suppress = None if get_ci_provider().supports_core_ci_auth() else argparse.SUPPRESS
+
+ parser.add_argument(
+ '--remote-stage',
+ metavar='STAGE',
+ default='prod',
+ help=suppress or 'remote stage to use: prod, dev',
+ ).completer = complete_remote_stage
+
+ parser.add_argument(
+ '--remote-endpoint',
+ metavar='EP',
+ help=suppress or 'remote provisioning endpoint to use',
+ )
+
+ parser.add_argument(
+ '--remote-terminate',
+ metavar='T',
+ default=TerminateMode.NEVER,
+ type=TerminateMode,
+ action=EnumAction,
+ help=suppress or 'terminate the remote instance: %(choices)s (default: %(default)s)',
+ )
+
+
+def add_environment_remote(
+ exclusive_parser, # type: argparse.ArgumentParser
+ environments_parser, # type: argparse.ArgumentParser
+ target_mode, # type: TargetMode
+): # type: (...) -> None
+ """Add environment arguments for running in ansible-core-ci provisioned remote virtual machines."""
+ if target_mode == TargetMode.POSIX_INTEGRATION:
+ remote_platforms = get_remote_platform_choices()
+ elif target_mode == TargetMode.SHELL:
+ remote_platforms = sorted(set(get_remote_platform_choices()) | set(get_windows_platform_choices()))
+ else:
+ remote_platforms = get_remote_platform_choices(True)
+
+ suppress = None if get_ci_provider().supports_core_ci_auth() else argparse.SUPPRESS
+
+ exclusive_parser.add_argument(
+ '--remote',
+ metavar='NAME',
+ help=suppress or 'run from a remote instance',
+ ).completer = functools.partial(complete_choices, remote_platforms)
+
+ environments_parser.add_argument(
+ '--remote-provider',
+ metavar='PR',
+ choices=REMOTE_PROVIDERS,
+ help=suppress or 'remote provider to use: %(choices)s',
+ )
+
+
+def complete_remote_stage(prefix, **_): # type: (str, ...) -> t.List[str]
+ """Return a list of supported stages matching the given prefix."""
+ return [stage for stage in ('prod', 'dev') if stage.startswith(prefix)]
+
+
+def complete_windows(prefix, parsed_args, **_): # type: (str, argparse.Namespace, ...) -> t.List[str]
+ """Return a list of supported Windows versions matching the given prefix, excluding versions already parsed from the command line."""
+ return [i for i in get_windows_version_choices() if i.startswith(prefix) and (not parsed_args.windows or i not in parsed_args.windows)]
+
+
+def complete_network_platform(prefix, parsed_args, **_): # type: (str, argparse.Namespace, ...) -> t.List[str]
+ """Return a list of supported network platforms matching the given prefix, excluding platforms already parsed from the command line."""
+ images = sorted(filter_completion(NETWORK_COMPLETION))
+
+ return [i for i in images if i.startswith(prefix) and (not parsed_args.platform or i not in parsed_args.platform)]
+
+
+def complete_network_platform_collection(prefix, parsed_args, **_): # type: (str, argparse.Namespace, ...) -> t.List[str]
+ """Return a list of supported network platforms matching the given prefix, excluding collection platforms already parsed from the command line."""
+ left = prefix.split('=')[0]
+ images = sorted(set(image.platform for image in filter_completion(NETWORK_COMPLETION).values()))
+
+ return [i + '=' for i in images if i.startswith(left) and (not parsed_args.platform_collection or i not in [x[0] for x in parsed_args.platform_collection])]
+
+
+def complete_network_platform_connection(prefix, parsed_args, **_): # type: (str, argparse.Namespace, ...) -> t.List[str]
+ """Return a list of supported network platforms matching the given prefix, excluding connection platforms already parsed from the command line."""
+ left = prefix.split('=')[0]
+ images = sorted(set(image.platform for image in filter_completion(NETWORK_COMPLETION).values()))
+
+ return [i + '=' for i in images if i.startswith(left) and (not parsed_args.platform_connection or i not in [x[0] for x in parsed_args.platform_connection])]
+
+
+def get_remote_platform_choices(controller=False): # type: (bool) -> t.List[str]
+ """Return a list of supported remote platforms matching the given prefix."""
+ return sorted(filter_completion(REMOTE_COMPLETION, controller_only=controller))
+
+
+def get_windows_platform_choices(): # type: () -> t.List[str]
+ """Return a list of supported Windows versions matching the given prefix."""
+ return sorted(f'windows/{windows.version}' for windows in filter_completion(WINDOWS_COMPLETION).values())
+
+
+def get_windows_version_choices(): # type: () -> t.List[str]
+ """Return a list of supported Windows versions."""
+ return sorted(windows.version for windows in filter_completion(WINDOWS_COMPLETION).values())
diff --git a/test/lib/ansible_test/_internal/cli/parsers/__init__.py b/test/lib/ansible_test/_internal/cli/parsers/__init__.py
new file mode 100644
index 00000000..25bac916
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/parsers/__init__.py
@@ -0,0 +1,303 @@
+"""Composite argument parsers for ansible-test specific command-line arguments."""
+from __future__ import annotations
+
+import typing as t
+
+from ...constants import (
+ SUPPORTED_PYTHON_VERSIONS,
+)
+
+from ...ci import (
+ get_ci_provider,
+)
+
+from ...host_configs import (
+ ControllerConfig,
+ NetworkConfig,
+ NetworkInventoryConfig,
+ PosixConfig,
+ WindowsConfig,
+ WindowsInventoryConfig,
+)
+
+from ..argparsing.parsers import (
+ DocumentationState,
+ Parser,
+ ParserState,
+ TypeParser,
+)
+
+from .value_parsers import (
+ PythonParser,
+)
+
+from .host_config_parsers import (
+ ControllerParser,
+ DockerParser,
+ NetworkInventoryParser,
+ NetworkRemoteParser,
+ OriginParser,
+ PosixRemoteParser,
+ PosixSshParser,
+ WindowsInventoryParser,
+ WindowsRemoteParser,
+)
+
+
+from .base_argument_parsers import (
+ ControllerNamespaceParser,
+ TargetNamespaceParser,
+ TargetsNamespaceParser,
+)
+
+
+class OriginControllerParser(ControllerNamespaceParser, TypeParser):
+ """Composite argument parser for the controller when delegation is not supported."""
+ def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser]
+ """Return a dictionary of type names and type parsers."""
+ return dict(
+ origin=OriginParser(),
+ )
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ section = '--controller options:'
+
+ state.sections[section] = '' # place this section before the sections created by the parsers below
+ state.sections[section] = '\n'.join([f' {name}:{parser.document(state)}' for name, parser in self.get_stateless_parsers().items()])
+
+ return None
+
+
+class DelegatedControllerParser(ControllerNamespaceParser, TypeParser):
+ """Composite argument parser for the controller when delegation is supported."""
+ def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser]
+ """Return a dictionary of type names and type parsers."""
+ parsers = dict(
+ origin=OriginParser(),
+ docker=DockerParser(controller=True),
+ )
+
+ if get_ci_provider().supports_core_ci_auth():
+ parsers.update(
+ remote=PosixRemoteParser(controller=True),
+ )
+
+ return parsers
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ section = '--controller options:'
+
+ state.sections[section] = '' # place this section before the sections created by the parsers below
+ state.sections[section] = '\n'.join([f' {name}:{parser.document(state)}' for name, parser in self.get_stateless_parsers().items()])
+
+ return None
+
+
+class PosixTargetParser(TargetNamespaceParser, TypeParser):
+ """Composite argument parser for a POSIX target."""
+ def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser]
+ """Return a dictionary of type names and type parsers."""
+ parsers = dict(
+ controller=ControllerParser(),
+ docker=DockerParser(controller=False),
+ )
+
+ if get_ci_provider().supports_core_ci_auth():
+ parsers.update(
+ remote=PosixRemoteParser(controller=False),
+ )
+
+ parsers.update(
+ ssh=PosixSshParser(),
+ )
+
+ return parsers
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ section = f'{self.option_name} options (choose one):'
+
+ state.sections[section] = '' # place this section before the sections created by the parsers below
+ state.sections[section] = '\n'.join([f' {name}:{parser.document(state)}' for name, parser in self.get_stateless_parsers().items()])
+
+ return None
+
+
+class WindowsTargetParser(TargetsNamespaceParser, TypeParser):
+ """Composite argument parser for a Windows target."""
+ @property
+ def allow_inventory(self): # type: () -> bool
+ """True if inventory is allowed, otherwise False."""
+ return True
+
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of type names and type parsers."""
+ return self.get_internal_parsers(state.root_namespace.targets)
+
+ def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser]
+ """Return a dictionary of type names and type parsers."""
+ return self.get_internal_parsers([])
+
+ def get_internal_parsers(self, targets): # type: (t.List[WindowsConfig]) -> t.Dict[str, Parser]
+ """Return a dictionary of type names and type parsers."""
+ parsers = {}
+
+ if self.allow_inventory and not targets:
+ parsers.update(
+ inventory=WindowsInventoryParser(),
+ )
+
+ if not targets or not any(isinstance(target, WindowsInventoryConfig) for target in targets):
+ if get_ci_provider().supports_core_ci_auth():
+ parsers.update(
+ remote=WindowsRemoteParser(),
+ )
+
+ return parsers
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ section = f'{self.option_name} options (choose one):'
+
+ state.sections[section] = '' # place this section before the sections created by the parsers below
+ state.sections[section] = '\n'.join([f' {name}:{parser.document(state)}' for name, parser in self.get_stateless_parsers().items()])
+
+ return None
+
+
+class NetworkTargetParser(TargetsNamespaceParser, TypeParser):
+ """Composite argument parser for a network target."""
+ @property
+ def allow_inventory(self): # type: () -> bool
+ """True if inventory is allowed, otherwise False."""
+ return True
+
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of type names and type parsers."""
+ return self.get_internal_parsers(state.root_namespace.targets)
+
+ def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser]
+ """Return a dictionary of type names and type parsers."""
+ return self.get_internal_parsers([])
+
+ def get_internal_parsers(self, targets): # type: (t.List[NetworkConfig]) -> t.Dict[str, Parser]
+ """Return a dictionary of type names and type parsers."""
+ parsers = {}
+
+ if self.allow_inventory and not targets:
+ parsers.update(
+ inventory=NetworkInventoryParser(),
+ )
+
+ if not targets or not any(isinstance(target, NetworkInventoryConfig) for target in targets):
+ if get_ci_provider().supports_core_ci_auth():
+ parsers.update(
+ remote=NetworkRemoteParser(),
+ )
+
+ return parsers
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ section = f'{self.option_name} options (choose one):'
+
+ state.sections[section] = '' # place this section before the sections created by the parsers below
+ state.sections[section] = '\n'.join([f' {name}:{parser.document(state)}' for name, parser in self.get_stateless_parsers().items()])
+
+ return None
+
+
+class PythonTargetParser(TargetsNamespaceParser, Parser):
+ """Composite argument parser for a Python target."""
+ def __init__(self, allow_venv): # type: (bool) -> None
+ super().__init__()
+
+ self.allow_venv = allow_venv
+
+ @property
+ def option_name(self): # type: () -> str
+ """The option name used for this parser."""
+ return '--target-python'
+
+ def get_value(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result, without storing the result in the namespace."""
+ versions = list(SUPPORTED_PYTHON_VERSIONS)
+
+ for target in state.root_namespace.targets or []: # type: PosixConfig
+ versions.remove(target.python.version)
+
+ parser = PythonParser(versions, allow_venv=self.allow_venv, allow_default=True)
+ python = parser.parse(state)
+
+ value = ControllerConfig(python=python)
+
+ return value
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ section = f'{self.option_name} options (choose one):'
+
+ state.sections[section] = '\n'.join([
+ f' {PythonParser(SUPPORTED_PYTHON_VERSIONS, allow_venv=False, allow_default=True).document(state)} # non-origin controller',
+ f' {PythonParser(SUPPORTED_PYTHON_VERSIONS, allow_venv=True, allow_default=True).document(state)} # origin controller',
+ ])
+
+ return None
+
+
+class SanityPythonTargetParser(PythonTargetParser):
+ """Composite argument parser for a sanity Python target."""
+ def __init__(self): # type: () -> None
+ super().__init__(allow_venv=False)
+
+
+class UnitsPythonTargetParser(PythonTargetParser):
+ """Composite argument parser for a units Python target."""
+ def __init__(self): # type: () -> None
+ super().__init__(allow_venv=True)
+
+
+class PosixSshTargetParser(PosixTargetParser):
+ """Composite argument parser for a POSIX SSH target."""
+ @property
+ def option_name(self): # type: () -> str
+ """The option name used for this parser."""
+ return '--target-posix'
+
+
+class WindowsSshTargetParser(WindowsTargetParser):
+ """Composite argument parser for a Windows SSH target."""
+ @property
+ def option_name(self): # type: () -> str
+ """The option name used for this parser."""
+ return '--target-windows'
+
+ @property
+ def allow_inventory(self): # type: () -> bool
+ """True if inventory is allowed, otherwise False."""
+ return False
+
+ @property
+ def limit_one(self): # type: () -> bool
+ """True if only one target is allowed, otherwise False."""
+ return True
+
+
+class NetworkSshTargetParser(NetworkTargetParser):
+ """Composite argument parser for a network SSH target."""
+ @property
+ def option_name(self): # type: () -> str
+ """The option name used for this parser."""
+ return '--target-network'
+
+ @property
+ def allow_inventory(self): # type: () -> bool
+ """True if inventory is allowed, otherwise False."""
+ return False
+
+ @property
+ def limit_one(self): # type: () -> bool
+ """True if only one target is allowed, otherwise False."""
+ return True
diff --git a/test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py b/test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py
new file mode 100644
index 00000000..2f17affa
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py
@@ -0,0 +1,73 @@
+"""Base classes for the primary parsers for composite command line arguments."""
+from __future__ import annotations
+
+import abc
+import typing as t
+
+from ..argparsing.parsers import (
+ CompletionError,
+ NamespaceParser,
+ ParserState,
+)
+
+
+class ControllerNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta):
+ """Base class for controller namespace parsers."""
+ @property
+ def dest(self): # type: () -> str
+ """The name of the attribute where the value should be stored."""
+ return 'controller'
+
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ if state.root_namespace.targets:
+ raise ControllerRequiredFirstError()
+
+ return super().parse(state)
+
+
+class TargetNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta):
+ """Base class for target namespace parsers involving a single target."""
+ @property
+ def option_name(self): # type: () -> str
+ """The option name used for this parser."""
+ return '--target'
+
+ @property
+ def dest(self): # type: () -> str
+ """The name of the attribute where the value should be stored."""
+ return 'targets'
+
+ @property
+ def use_list(self): # type: () -> bool
+ """True if the destination is a list, otherwise False."""
+ return True
+
+ @property
+ def limit_one(self): # type: () -> bool
+ """True if only one target is allowed, otherwise False."""
+ return True
+
+
+class TargetsNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta):
+ """Base class for controller namespace parsers involving multiple targets."""
+ @property
+ def option_name(self): # type: () -> str
+ """The option name used for this parser."""
+ return '--target'
+
+ @property
+ def dest(self): # type: () -> str
+ """The name of the attribute where the value should be stored."""
+ return 'targets'
+
+ @property
+ def use_list(self): # type: () -> bool
+ """True if the destination is a list, otherwise False."""
+ return True
+
+
+class ControllerRequiredFirstError(CompletionError):
+ """Exception raised when controller and target options are specified out-of-order."""
+ def __init__(self):
+ super().__init__('The `--controller` option must be specified before `--target` option(s).')
diff --git a/test/lib/ansible_test/_internal/cli/parsers/helpers.py b/test/lib/ansible_test/_internal/cli/parsers/helpers.py
new file mode 100644
index 00000000..0cf13f8d
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/parsers/helpers.py
@@ -0,0 +1,59 @@
+"""Helper functions for composite parsers."""
+from __future__ import annotations
+
+import typing as t
+
+from ...constants import (
+ CONTROLLER_PYTHON_VERSIONS,
+ SUPPORTED_PYTHON_VERSIONS,
+)
+
+from ...completion import (
+ DOCKER_COMPLETION,
+ REMOTE_COMPLETION,
+ filter_completion,
+)
+
+from ...host_configs import (
+ DockerConfig,
+ HostConfig,
+ PosixRemoteConfig,
+)
+
+
+def get_docker_pythons(name, controller, strict): # type: (str, bool, bool) -> t.List[str]
+ """Return a list of docker instance Python versions supported by the specified host config."""
+ image_config = filter_completion(DOCKER_COMPLETION).get(name)
+ available_pythons = CONTROLLER_PYTHON_VERSIONS if controller else SUPPORTED_PYTHON_VERSIONS
+
+ if not image_config:
+ return [] if strict else available_pythons
+
+ supported_pythons = [python for python in image_config.supported_pythons if python in available_pythons]
+
+ return supported_pythons
+
+
+def get_remote_pythons(name, controller, strict): # type: (str, bool, bool) -> t.List[str]
+ """Return a list of remote instance Python versions supported by the specified host config."""
+ platform_config = filter_completion(REMOTE_COMPLETION).get(name)
+ available_pythons = CONTROLLER_PYTHON_VERSIONS if controller else SUPPORTED_PYTHON_VERSIONS
+
+ if not platform_config:
+ return [] if strict else available_pythons
+
+ supported_pythons = [python for python in platform_config.supported_pythons if python in available_pythons]
+
+ return supported_pythons
+
+
+def get_controller_pythons(controller_config, strict): # type: (HostConfig, bool) -> t.List[str]
+ """Return a list of controller Python versions supported by the specified host config."""
+ if isinstance(controller_config, DockerConfig):
+ pythons = get_docker_pythons(controller_config.name, False, strict)
+ elif isinstance(controller_config, PosixRemoteConfig):
+ pythons = get_remote_pythons(controller_config.name, False, strict)
+ else:
+ pythons = SUPPORTED_PYTHON_VERSIONS
+
+ return pythons
diff --git a/test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py b/test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py
new file mode 100644
index 00000000..37322630
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py
@@ -0,0 +1,310 @@
+"""Composite parsers for the various types of hosts."""
+from __future__ import annotations
+
+import typing as t
+
+from ...completion import (
+ DOCKER_COMPLETION,
+ NETWORK_COMPLETION,
+ REMOTE_COMPLETION,
+ WINDOWS_COMPLETION,
+ filter_completion,
+)
+
+from ...host_configs import (
+ ControllerConfig,
+ DockerConfig,
+ NetworkInventoryConfig,
+ NetworkRemoteConfig,
+ OriginConfig,
+ PosixRemoteConfig,
+ PosixSshConfig,
+ WindowsInventoryConfig,
+ WindowsRemoteConfig,
+)
+
+from ..compat import (
+ get_fallback_remote_controller,
+)
+
+from ..argparsing.parsers import (
+ ChoicesParser,
+ DocumentationState,
+ FileParser,
+ MatchConditions,
+ NamespaceWrappedParser,
+ PairParser,
+ Parser,
+ ParserError,
+ ParserState,
+)
+
+from .value_parsers import (
+ PlatformParser,
+ SshConnectionParser,
+)
+
+from .key_value_parsers import (
+ ControllerKeyValueParser,
+ DockerKeyValueParser,
+ EmptyKeyValueParser,
+ NetworkRemoteKeyValueParser,
+ OriginKeyValueParser,
+ PosixRemoteKeyValueParser,
+ PosixSshKeyValueParser,
+ WindowsRemoteKeyValueParser,
+)
+
+from .helpers import (
+ get_docker_pythons,
+ get_remote_pythons,
+)
+
+
+class OriginParser(Parser):
+ """Composite argument parser for the origin."""
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ namespace = OriginConfig()
+
+ state.set_namespace(namespace)
+
+ parser = OriginKeyValueParser()
+ parser.parse(state)
+
+ return namespace
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ return OriginKeyValueParser().document(state)
+
+
+class ControllerParser(Parser):
+ """Composite argument parser for the controller."""
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ namespace = ControllerConfig()
+
+ state.set_namespace(namespace)
+
+ parser = ControllerKeyValueParser()
+ parser.parse(state)
+
+ return namespace
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ return ControllerKeyValueParser().document(state)
+
+
+class DockerParser(PairParser):
+ """Composite argument parser for a docker host."""
+ def __init__(self, controller): # type: (bool) -> None
+ self.controller = controller
+
+ def create_namespace(self): # type: () -> t.Any
+ """Create and return a namespace."""
+ return DockerConfig()
+
+ def get_left_parser(self, state): # type: (ParserState) -> Parser
+ """Return the parser for the left side."""
+ return NamespaceWrappedParser('name', ChoicesParser(list(filter_completion(DOCKER_COMPLETION, controller_only=self.controller)),
+ conditions=MatchConditions.CHOICE | MatchConditions.ANY))
+
+ def get_right_parser(self, choice): # type: (t.Any) -> Parser
+ """Return the parser for the right side."""
+ return DockerKeyValueParser(choice, self.controller)
+
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ value = super().parse(state) # type: DockerConfig
+
+ if not value.python and not get_docker_pythons(value.name, self.controller, True):
+ raise ParserError(f'Python version required for docker image: {value.name}')
+
+ return value
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ default = 'default'
+ content = '\n'.join([f' {image} ({", ".join(get_docker_pythons(image, self.controller, False))})'
+ for image, item in filter_completion(DOCKER_COMPLETION, controller_only=self.controller).items()])
+
+ content += '\n'.join([
+ '',
+ ' {image} # python must be specified for custom images',
+ ])
+
+ state.sections[f'{"controller" if self.controller else "target"} docker images and supported python version (choose one):'] = content
+
+ return f'{{image}}[,{DockerKeyValueParser(default, self.controller).document(state)}]'
+
+
+class PosixRemoteParser(PairParser):
+ """Composite argument parser for a POSIX remote host."""
+ def __init__(self, controller): # type: (bool) -> None
+ self.controller = controller
+
+ def create_namespace(self): # type: () -> t.Any
+ """Create and return a namespace."""
+ return PosixRemoteConfig()
+
+ def get_left_parser(self, state): # type: (ParserState) -> Parser
+ """Return the parser for the left side."""
+ return NamespaceWrappedParser('name', PlatformParser(list(filter_completion(REMOTE_COMPLETION, controller_only=self.controller))))
+
+ def get_right_parser(self, choice): # type: (t.Any) -> Parser
+ """Return the parser for the right side."""
+ return PosixRemoteKeyValueParser(choice, self.controller)
+
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ value = super().parse(state) # type: PosixRemoteConfig
+
+ if not value.python and not get_remote_pythons(value.name, self.controller, True):
+ raise ParserError(f'Python version required for remote: {value.name}')
+
+ return value
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ default = get_fallback_remote_controller()
+ content = '\n'.join([f' {name} ({", ".join(get_remote_pythons(name, self.controller, False))})'
+ for name, item in filter_completion(REMOTE_COMPLETION, controller_only=self.controller).items()])
+
+ content += '\n'.join([
+ '',
+ ' {platform}/{version} # python must be specified for unknown systems',
+ ])
+
+ state.sections[f'{"controller" if self.controller else "target"} remote systems and supported python versions (choose one):'] = content
+
+ return f'{{system}}[,{PosixRemoteKeyValueParser(default, self.controller).document(state)}]'
+
+
+class WindowsRemoteParser(PairParser):
+ """Composite argument parser for a Windows remote host."""
+ def create_namespace(self): # type: () -> t.Any
+ """Create and return a namespace."""
+ return WindowsRemoteConfig()
+
+ def get_left_parser(self, state): # type: (ParserState) -> Parser
+ """Return the parser for the left side."""
+ names = list(filter_completion(WINDOWS_COMPLETION))
+
+ for target in state.root_namespace.targets or []: # type: WindowsRemoteConfig
+ names.remove(target.name)
+
+ return NamespaceWrappedParser('name', PlatformParser(names))
+
+ def get_right_parser(self, choice): # type: (t.Any) -> Parser
+ """Return the parser for the right side."""
+ return WindowsRemoteKeyValueParser()
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ content = '\n'.join([f' {name}' for name, item in filter_completion(WINDOWS_COMPLETION).items()])
+
+ content += '\n'.join([
+ '',
+ ' windows/{version} # use an unknown windows version',
+ ])
+
+ state.sections['target remote systems (choose one):'] = content
+
+ return f'{{system}}[,{WindowsRemoteKeyValueParser().document(state)}]'
+
+
+class NetworkRemoteParser(PairParser):
+ """Composite argument parser for a network remote host."""
+ def create_namespace(self): # type: () -> t.Any
+ """Create and return a namespace."""
+ return NetworkRemoteConfig()
+
+ def get_left_parser(self, state): # type: (ParserState) -> Parser
+ """Return the parser for the left side."""
+ names = list(filter_completion(NETWORK_COMPLETION))
+
+ for target in state.root_namespace.targets or []: # type: NetworkRemoteConfig
+ names.remove(target.name)
+
+ return NamespaceWrappedParser('name', PlatformParser(names))
+
+ def get_right_parser(self, choice): # type: (t.Any) -> Parser
+ """Return the parser for the right side."""
+ return NetworkRemoteKeyValueParser()
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ content = '\n'.join([f' {name}' for name, item in filter_completion(NETWORK_COMPLETION).items()])
+
+ content += '\n'.join([
+ '',
+ ' {platform}/{version} # use an unknown platform and version',
+ ])
+
+ state.sections['target remote systems (choose one):'] = content
+
+ return f'{{system}}[,{NetworkRemoteKeyValueParser().document(state)}]'
+
+
+class WindowsInventoryParser(PairParser):
+ """Composite argument parser for a Windows inventory."""
+ def create_namespace(self): # type: () -> t.Any
+ """Create and return a namespace."""
+ return WindowsInventoryConfig()
+
+ def get_left_parser(self, state): # type: (ParserState) -> Parser
+ """Return the parser for the left side."""
+ return NamespaceWrappedParser('path', FileParser())
+
+ def get_right_parser(self, choice): # type: (t.Any) -> Parser
+ """Return the parser for the right side."""
+ return EmptyKeyValueParser()
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ return '{path} # INI format inventory file'
+
+
+class NetworkInventoryParser(PairParser):
+ """Composite argument parser for a network inventory."""
+ def create_namespace(self): # type: () -> t.Any
+ """Create and return a namespace."""
+ return NetworkInventoryConfig()
+
+ def get_left_parser(self, state): # type: (ParserState) -> Parser
+ """Return the parser for the left side."""
+ return NamespaceWrappedParser('path', FileParser())
+
+ def get_right_parser(self, choice): # type: (t.Any) -> Parser
+ """Return the parser for the right side."""
+ return EmptyKeyValueParser()
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ return '{path} # INI format inventory file'
+
+
+class PosixSshParser(PairParser):
+ """Composite argument parser for a POSIX SSH host."""
+ def create_namespace(self): # type: () -> t.Any
+ """Create and return a namespace."""
+ return PosixSshConfig()
+
+ def get_left_parser(self, state): # type: (ParserState) -> Parser
+ """Return the parser for the left side."""
+ return SshConnectionParser()
+
+ def get_right_parser(self, choice): # type: (t.Any) -> Parser
+ """Return the parser for the right side."""
+ return PosixSshKeyValueParser()
+
+ @property
+ def required(self): # type: () -> bool
+ """True if the delimiter (and thus right parser) is required, otherwise False."""
+ return True
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ return f'{SshConnectionParser().document(state)}[,{PosixSshKeyValueParser().document(state)}]'
diff --git a/test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py b/test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py
new file mode 100644
index 00000000..b22705f7
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py
@@ -0,0 +1,213 @@
+"""Composite argument key-value parsers used by other parsers."""
+from __future__ import annotations
+
+import typing as t
+
+from ...constants import (
+ CONTROLLER_PYTHON_VERSIONS,
+ REMOTE_PROVIDERS,
+ SECCOMP_CHOICES,
+ SUPPORTED_PYTHON_VERSIONS,
+)
+
+from ...host_configs import (
+ OriginConfig,
+)
+
+from ..argparsing.parsers import (
+ AnyParser,
+ BooleanParser,
+ ChoicesParser,
+ DocumentationState,
+ IntegerParser,
+ KeyValueParser,
+ Parser,
+ ParserState,
+)
+
+from .value_parsers import (
+ PythonParser,
+)
+
+from .helpers import (
+ get_controller_pythons,
+ get_remote_pythons,
+ get_docker_pythons,
+)
+
+
+class OriginKeyValueParser(KeyValueParser):
+ """Composite argument parser for origin key/value pairs."""
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of key names and value parsers."""
+ versions = CONTROLLER_PYTHON_VERSIONS
+
+ return dict(
+ python=PythonParser(versions=versions, allow_venv=True, allow_default=True),
+ )
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ python_parser = PythonParser(versions=CONTROLLER_PYTHON_VERSIONS, allow_venv=True, allow_default=True)
+
+ section_name = 'origin options'
+
+ state.sections[f'controller {section_name} (comma separated):'] = '\n'.join([
+ f' python={python_parser.document(state)}',
+ ])
+
+ return f'{{{section_name}}} # default'
+
+
+class ControllerKeyValueParser(KeyValueParser):
+ """Composite argument parser for controller key/value pairs."""
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of key names and value parsers."""
+ versions = get_controller_pythons(state.root_namespace.controller, False)
+ allow_default = bool(get_controller_pythons(state.root_namespace.controller, True))
+ allow_venv = isinstance(state.root_namespace.controller, OriginConfig) or not state.root_namespace.controller
+
+ return dict(
+ python=PythonParser(versions=versions, allow_venv=allow_venv, allow_default=allow_default),
+ )
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ section_name = 'controller options'
+
+ state.sections[f'target {section_name} (comma separated):'] = '\n'.join([
+ f' python={PythonParser(SUPPORTED_PYTHON_VERSIONS, allow_venv=False, allow_default=True).document(state)} # non-origin controller',
+ f' python={PythonParser(SUPPORTED_PYTHON_VERSIONS, allow_venv=True, allow_default=True).document(state)} # origin controller',
+ ])
+
+ return f'{{{section_name}}} # default'
+
+
+class DockerKeyValueParser(KeyValueParser):
+ """Composite argument parser for docker key/value pairs."""
+ def __init__(self, image, controller):
+ self.controller = controller
+ self.versions = get_docker_pythons(image, controller, False)
+ self.allow_default = bool(get_docker_pythons(image, controller, True))
+
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of key names and value parsers."""
+ return dict(
+ python=PythonParser(versions=self.versions, allow_venv=False, allow_default=self.allow_default),
+ seccomp=ChoicesParser(SECCOMP_CHOICES),
+ privileged=BooleanParser(),
+ memory=IntegerParser(),
+ )
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ python_parser = PythonParser(versions=[], allow_venv=False, allow_default=self.allow_default)
+
+ section_name = 'docker options'
+
+ state.sections[f'{"controller" if self.controller else "target"} {section_name} (comma separated):'] = '\n'.join([
+ f' python={python_parser.document(state)}',
+ f' seccomp={ChoicesParser(SECCOMP_CHOICES).document(state)}',
+ f' privileged={BooleanParser().document(state)}',
+ f' memory={IntegerParser().document(state)} # bytes',
+ ])
+
+ return f'{{{section_name}}}'
+
+
+class PosixRemoteKeyValueParser(KeyValueParser):
+ """Composite argument parser for POSIX remote key/value pairs."""
+ def __init__(self, name, controller):
+ self.controller = controller
+ self.versions = get_remote_pythons(name, controller, False)
+ self.allow_default = bool(get_remote_pythons(name, controller, True))
+
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of key names and value parsers."""
+ return dict(
+ provider=ChoicesParser(REMOTE_PROVIDERS),
+ python=PythonParser(versions=self.versions, allow_venv=False, allow_default=self.allow_default),
+ )
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ python_parser = PythonParser(versions=[], allow_venv=False, allow_default=self.allow_default)
+
+ section_name = 'remote options'
+
+ state.sections[f'{"controller" if self.controller else "target"} {section_name} (comma separated):'] = '\n'.join([
+ f' provider={ChoicesParser(REMOTE_PROVIDERS).document(state)}',
+ f' python={python_parser.document(state)}',
+ ])
+
+ return f'{{{section_name}}}'
+
+
+class WindowsRemoteKeyValueParser(KeyValueParser):
+ """Composite argument parser for Windows remote key/value pairs."""
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of key names and value parsers."""
+ return dict(
+ provider=ChoicesParser(REMOTE_PROVIDERS),
+ )
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ section_name = 'remote options'
+
+ state.sections[f'target {section_name} (comma separated):'] = '\n'.join([
+ f' provider={ChoicesParser(REMOTE_PROVIDERS).document(state)}',
+ ])
+
+ return f'{{{section_name}}}'
+
+
+class NetworkRemoteKeyValueParser(KeyValueParser):
+ """Composite argument parser for network remote key/value pairs."""
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of key names and value parsers."""
+ return dict(
+ provider=ChoicesParser(REMOTE_PROVIDERS),
+ collection=AnyParser(),
+ connection=AnyParser(),
+ )
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ section_name = 'remote options'
+
+ state.sections[f'target {section_name} (comma separated):'] = '\n'.join([
+ f' provider={ChoicesParser(REMOTE_PROVIDERS).document(state)}',
+ ' collection={collecton}',
+ ' connection={connection}',
+ ])
+
+ return f'{{{section_name}}}'
+
+
+class PosixSshKeyValueParser(KeyValueParser):
+ """Composite argument parser for POSIX SSH host key/value pairs."""
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of key names and value parsers."""
+ return dict(
+ python=PythonParser(versions=list(SUPPORTED_PYTHON_VERSIONS), allow_venv=False, allow_default=False),
+ )
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ python_parser = PythonParser(versions=SUPPORTED_PYTHON_VERSIONS, allow_venv=False, allow_default=False)
+
+ section_name = 'ssh options'
+
+ state.sections[f'target {section_name} (comma separated):'] = '\n'.join([
+ f' python={python_parser.document(state)}',
+ ])
+
+ return f'{{{section_name}}}'
+
+
+class EmptyKeyValueParser(KeyValueParser):
+ """Composite argument parser when a key/value parser is required but there are no keys available."""
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of key names and value parsers."""
+ return {}
diff --git a/test/lib/ansible_test/_internal/cli/parsers/value_parsers.py b/test/lib/ansible_test/_internal/cli/parsers/value_parsers.py
new file mode 100644
index 00000000..1aae8821
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/parsers/value_parsers.py
@@ -0,0 +1,172 @@
+"""Composite argument value parsers used by other parsers."""
+from __future__ import annotations
+
+import typing as t
+
+from ...host_configs import (
+ NativePythonConfig,
+ VirtualPythonConfig,
+)
+
+from ..argparsing.parsers import (
+ AbsolutePathParser,
+ AnyParser,
+ ChoicesParser,
+ DocumentationState,
+ IntegerParser,
+ MatchConditions,
+ Parser,
+ ParserError,
+ ParserState,
+)
+
+
+class PythonParser(Parser):
+ """
+ Composite argument parser for Python versions, with support for specifying paths and using virtual environments.
+
+ Allowed formats:
+
+ {version}
+ venv/{version}
+ venv/system-site-packages/{version}
+
+ The `{version}` has two possible formats:
+
+ X.Y
+ X.Y@{path}
+
+ Where `X.Y` is the Python major and minor version number and `{path}` is an absolute path with one of the following formats:
+
+ /path/to/python
+ /path/to/python/directory/
+
+ When a trailing slash is present, it is considered a directory, and `python{version}` will be appended to it automatically.
+
+ The default path depends on the context:
+
+ - Known docker/remote environments can declare their own path.
+ - The origin host uses `sys.executable` if `{version}` matches the current version in `sys.version_info`.
+ - The origin host (as a controller or target) use the `$PATH` environment variable to find `python{version}`.
+ - As a fallback/default, the path `/usr/bin/python{version}` is used.
+
+ NOTE: The Python path determines where to find the Python interpreter.
+ In the case of an ansible-test managed virtual environment, that Python interpreter will be used to create the virtual environment.
+ So the path given will not be the one actually used for the controller or target.
+
+ Known docker/remote environments limit the available Python versions to configured values known to be valid.
+ The origin host and unknown environments assume all relevant Python versions are available.
+ """
+ def __init__(self,
+ versions, # type: t.List[str]
+ *,
+ allow_default, # type: bool
+ allow_venv, # type: bool
+ ):
+ version_choices = list(versions)
+
+ if allow_default:
+ version_choices.append('default')
+
+ first_choices = list(version_choices)
+
+ if allow_venv:
+ first_choices.append('venv/')
+
+ venv_choices = list(version_choices) + ['system-site-packages/']
+
+ self.versions = versions
+ self.allow_default = allow_default
+ self.allow_venv = allow_venv
+ self.version_choices = version_choices
+ self.first_choices = first_choices
+ self.venv_choices = venv_choices
+ self.venv_choices = venv_choices
+
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ with state.delimit('@/', required=False) as boundary:
+ version = ChoicesParser(self.first_choices).parse(state)
+
+ if version == 'venv':
+ with state.delimit('@/', required=False) as boundary:
+ version = ChoicesParser(self.venv_choices).parse(state)
+
+ if version == 'system-site-packages':
+ system_site_packages = True
+
+ with state.delimit('@', required=False) as boundary:
+ version = ChoicesParser(self.version_choices).parse(state)
+ else:
+ system_site_packages = False
+
+ python = VirtualPythonConfig(version=version, system_site_packages=system_site_packages)
+ else:
+ python = NativePythonConfig(version=version)
+
+ if boundary.match == '@':
+ # FUTURE: For OriginConfig or ControllerConfig->OriginConfig the path could be validated with an absolute path parser (file or directory).
+ python.path = AbsolutePathParser().parse(state)
+
+ return python
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+
+ docs = '[venv/[system-site-packages/]]' if self.allow_venv else ''
+
+ if self.versions:
+ docs += '|'.join(self.version_choices)
+ else:
+ docs += '{X.Y}'
+
+ docs += '[@{path|dir/}]'
+
+ return docs
+
+
+class PlatformParser(ChoicesParser):
+ """Composite argument parser for "{platform}/{version}" formatted choices."""
+ def __init__(self, choices): # type: (t.List[str]) -> None
+ super().__init__(choices, conditions=MatchConditions.CHOICE | MatchConditions.ANY)
+
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ value = super().parse(state)
+
+ if len(value.split('/')) != 2:
+ raise ParserError(f'invalid platform format: {value}')
+
+ return value
+
+
+class SshConnectionParser(Parser):
+ """
+ Composite argument parser for connecting to a host using SSH.
+ Format: user@host[:port]
+ """
+ EXPECTED_FORMAT = '{user}@{host}[:{port}]'
+
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ namespace = state.current_namespace
+
+ with state.delimit('@'):
+ user = AnyParser(no_match_message=f'Expected {{user}} from: {self.EXPECTED_FORMAT}').parse(state)
+
+ setattr(namespace, 'user', user)
+
+ with state.delimit(':', required=False) as colon:
+ host = AnyParser(no_match_message=f'Expected {{host}} from: {self.EXPECTED_FORMAT}').parse(state)
+
+ setattr(namespace, 'host', host)
+
+ if colon.match:
+ port = IntegerParser(65535).parse(state)
+ setattr(namespace, 'port', port)
+
+ return namespace
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ return self.EXPECTED_FORMAT
diff --git a/test/lib/ansible_test/_internal/cloud/acme.py b/test/lib/ansible_test/_internal/cloud/acme.py
deleted file mode 100644
index 3d0ace24..00000000
--- a/test/lib/ansible_test/_internal/cloud/acme.py
+++ /dev/null
@@ -1,193 +0,0 @@
-"""ACME plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-import time
-
-from . import (
- CloudProvider,
- CloudEnvironment,
- CloudEnvironmentConfig,
-)
-
-from ..util import (
- find_executable,
- display,
- ApplicationError,
- SubprocessError,
-)
-
-from ..http import (
- HttpClient,
-)
-
-from ..docker_util import (
- docker_run,
- docker_rm,
- docker_inspect,
- docker_pull,
- get_docker_container_id,
- get_docker_hostname,
- get_docker_container_ip,
- get_docker_preferred_network_name,
- is_docker_user_defined_network,
-)
-
-
-class ACMEProvider(CloudProvider):
- """ACME plugin. Sets up cloud resources for tests."""
- DOCKER_SIMULATOR_NAME = 'acme-simulator'
-
- def __init__(self, args):
- """
- :type args: TestConfig
- """
- super(ACMEProvider, self).__init__(args)
-
- # The simulator must be pinned to a specific version to guarantee CI passes with the version used.
- if os.environ.get('ANSIBLE_ACME_CONTAINER'):
- self.image = os.environ.get('ANSIBLE_ACME_CONTAINER')
- else:
- self.image = 'quay.io/ansible/acme-test-container:2.0.0'
- self.container_name = ''
-
- def _wait_for_service(self, protocol, acme_host, port, local_part, name):
- """Wait for an endpoint to accept connections."""
- if self.args.explain:
- return
-
- client = HttpClient(self.args, always=True, insecure=True)
- endpoint = '%s://%s:%d/%s' % (protocol, acme_host, port, local_part)
-
- for dummy in range(1, 30):
- display.info('Waiting for %s: %s' % (name, endpoint), verbosity=1)
-
- try:
- client.get(endpoint)
- return
- except SubprocessError:
- pass
-
- time.sleep(1)
-
- raise ApplicationError('Timeout waiting for %s.' % name)
-
- def filter(self, targets, exclude):
- """Filter out the cloud tests when the necessary config and resources are not available.
- :type targets: tuple[TestTarget]
- :type exclude: list[str]
- """
- docker = find_executable('docker', required=False)
-
- if docker:
- return
-
- skip = 'cloud/%s/' % self.platform
- skipped = [target.name for target in targets if skip in target.aliases]
-
- if skipped:
- exclude.append(skip)
- display.warning('Excluding tests marked "%s" which require the "docker" command: %s'
- % (skip.rstrip('/'), ', '.join(skipped)))
-
- def setup(self):
- """Setup the cloud resource before delegation and register a cleanup callback."""
- super(ACMEProvider, self).setup()
-
- if self._use_static_config():
- self._setup_static()
- else:
- self._setup_dynamic()
-
- def get_docker_run_options(self):
- """Get any additional options needed when delegating tests to a docker container.
- :rtype: list[str]
- """
- network = get_docker_preferred_network_name(self.args)
-
- if self.managed and not is_docker_user_defined_network(network):
- return ['--link', self.DOCKER_SIMULATOR_NAME]
-
- return []
-
- def cleanup(self):
- """Clean up the cloud resource and any temporary configuration files after tests complete."""
- if self.container_name:
- docker_rm(self.args, self.container_name)
-
- super(ACMEProvider, self).cleanup()
-
- def _setup_dynamic(self):
- """Create a ACME test container using docker."""
- container_id = get_docker_container_id()
-
- self.container_name = self.DOCKER_SIMULATOR_NAME
-
- results = docker_inspect(self.args, self.container_name)
-
- if results and not results[0].get('State', {}).get('Running'):
- docker_rm(self.args, self.container_name)
- results = []
-
- if results:
- display.info('Using the existing ACME docker test container.', verbosity=1)
- else:
- display.info('Starting a new ACME docker test container.', verbosity=1)
-
- if not container_id:
- # publish the simulator ports when not running inside docker
- publish_ports = [
- '-p', '5000:5000', # control port for flask app in container
- '-p', '14000:14000', # Pebble ACME CA
- ]
- else:
- publish_ports = []
-
- if not os.environ.get('ANSIBLE_ACME_CONTAINER'):
- docker_pull(self.args, self.image)
-
- docker_run(
- self.args,
- self.image,
- ['-d', '--name', self.container_name] + publish_ports,
- )
-
- if self.args.docker:
- acme_host = self.DOCKER_SIMULATOR_NAME
- elif container_id:
- acme_host = self._get_simulator_address()
- display.info('Found ACME test container address: %s' % acme_host, verbosity=1)
- else:
- acme_host = get_docker_hostname()
-
- if container_id:
- acme_host_ip = self._get_simulator_address()
- else:
- acme_host_ip = get_docker_hostname()
-
- self._set_cloud_config('acme_host', acme_host)
-
- self._wait_for_service('http', acme_host_ip, 5000, '', 'ACME controller')
- self._wait_for_service('https', acme_host_ip, 14000, 'dir', 'ACME CA endpoint')
-
- def _get_simulator_address(self):
- return get_docker_container_ip(self.args, self.container_name)
-
- def _setup_static(self):
- raise NotImplementedError()
-
-
-class ACMEEnvironment(CloudEnvironment):
- """ACME environment plugin. Updates integration test environment after delegation."""
- def get_environment_config(self):
- """
- :rtype: CloudEnvironmentConfig
- """
- ansible_vars = dict(
- acme_host=self._get_cloud_config('acme_host'),
- )
-
- return CloudEnvironmentConfig(
- ansible_vars=ansible_vars,
- )
diff --git a/test/lib/ansible_test/_internal/cloud/cloudscale.py b/test/lib/ansible_test/_internal/cloud/cloudscale.py
deleted file mode 100644
index 8e5885b2..00000000
--- a/test/lib/ansible_test/_internal/cloud/cloudscale.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# (c) 2018, Gaudenz Steinlin <gaudenz.steinlin@cloudscale.ch>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-"""Cloudscale plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-
-from . import (
- CloudProvider,
- CloudEnvironment,
- CloudEnvironmentConfig,
-)
-
-from ..util import ConfigParser, display
-
-
-class CloudscaleCloudProvider(CloudProvider):
- """Cloudscale cloud provider plugin. Sets up cloud resources before
- delegation.
- """
-
- def __init__(self, args):
- """
- :type args: TestConfig
- """
- super(CloudscaleCloudProvider, self).__init__(args)
-
- def filter(self, targets, exclude):
- """Filter out the cloud tests when the necessary config and resources are not available.
- :type targets: tuple[TestTarget]
- :type exclude: list[str]
- """
- if os.path.isfile(self.config_static_path):
- return
-
- super(CloudscaleCloudProvider, self).filter(targets, exclude)
-
- def setup(self):
- """Setup the cloud resource before delegation and register a cleanup callback."""
- super(CloudscaleCloudProvider, self).setup()
-
- if os.path.isfile(self.config_static_path):
- display.info('Using existing %s cloud config: %s'
- % (self.platform, self.config_static_path),
- verbosity=1)
- self.config_path = self.config_static_path
- self.managed = False
-
-
-class CloudscaleCloudEnvironment(CloudEnvironment):
- """Cloudscale cloud environment plugin. Updates integration test environment
- after delegation.
- """
- def get_environment_config(self):
- """
- :rtype: CloudEnvironmentConfig
- """
- parser = ConfigParser()
- parser.read(self.config_path)
-
- env_vars = dict(
- CLOUDSCALE_API_TOKEN=parser.get('default', 'cloudscale_api_token'),
- )
-
- display.sensitive.add(env_vars['CLOUDSCALE_API_TOKEN'])
-
- ansible_vars = dict(
- cloudscale_resource_prefix=self.resource_prefix,
- )
-
- ansible_vars.update(dict((key.lower(), value) for key, value in env_vars.items()))
-
- return CloudEnvironmentConfig(
- env_vars=env_vars,
- ansible_vars=ansible_vars,
- )
diff --git a/test/lib/ansible_test/_internal/cloud/cs.py b/test/lib/ansible_test/_internal/cloud/cs.py
deleted file mode 100644
index 1f30b984..00000000
--- a/test/lib/ansible_test/_internal/cloud/cs.py
+++ /dev/null
@@ -1,299 +0,0 @@
-"""CloudStack plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import json
-import os
-import re
-import time
-
-from . import (
- CloudProvider,
- CloudEnvironment,
- CloudEnvironmentConfig,
-)
-
-from ..util import (
- find_executable,
- ApplicationError,
- display,
- SubprocessError,
- ConfigParser,
-)
-
-from ..http import (
- HttpClient,
- HttpError,
- urlparse,
-)
-
-from ..docker_util import (
- docker_run,
- docker_rm,
- docker_inspect,
- docker_pull,
- docker_network_inspect,
- docker_exec,
- get_docker_container_id,
- get_docker_preferred_network_name,
- get_docker_hostname,
- is_docker_user_defined_network,
-)
-
-
-class CsCloudProvider(CloudProvider):
- """CloudStack cloud provider plugin. Sets up cloud resources before delegation."""
- DOCKER_SIMULATOR_NAME = 'cloudstack-sim'
-
- def __init__(self, args):
- """
- :type args: TestConfig
- """
- super(CsCloudProvider, self).__init__(args)
-
- self.image = os.environ.get('ANSIBLE_CLOUDSTACK_CONTAINER', 'quay.io/ansible/cloudstack-test-container:1.4.0')
- self.container_name = ''
- self.endpoint = ''
- self.host = ''
- self.port = 0
-
- def filter(self, targets, exclude):
- """Filter out the cloud tests when the necessary config and resources are not available.
- :type targets: tuple[TestTarget]
- :type exclude: list[str]
- """
- if os.path.isfile(self.config_static_path):
- return
-
- docker = find_executable('docker', required=False)
-
- if docker:
- return
-
- skip = 'cloud/%s/' % self.platform
- skipped = [target.name for target in targets if skip in target.aliases]
-
- if skipped:
- exclude.append(skip)
- display.warning('Excluding tests marked "%s" which require the "docker" command or config (see "%s"): %s'
- % (skip.rstrip('/'), self.config_template_path, ', '.join(skipped)))
-
- def setup(self):
- """Setup the cloud resource before delegation and register a cleanup callback."""
- super(CsCloudProvider, self).setup()
-
- if self._use_static_config():
- self._setup_static()
- else:
- self._setup_dynamic()
-
- def get_remote_ssh_options(self):
- """Get any additional options needed when delegating tests to a remote instance via SSH.
- :rtype: list[str]
- """
- if self.managed:
- return ['-R', '8888:%s:8888' % get_docker_hostname()]
-
- return []
-
- def get_docker_run_options(self):
- """Get any additional options needed when delegating tests to a docker container.
- :rtype: list[str]
- """
- network = get_docker_preferred_network_name(self.args)
-
- if self.managed and not is_docker_user_defined_network(network):
- return ['--link', self.DOCKER_SIMULATOR_NAME]
-
- return []
-
- def cleanup(self):
- """Clean up the cloud resource and any temporary configuration files after tests complete."""
- if self.container_name:
- if self.ci_provider.code:
- docker_rm(self.args, self.container_name)
- elif not self.args.explain:
- display.notice('Remember to run `docker rm -f %s` when finished testing.' % self.container_name)
-
- super(CsCloudProvider, self).cleanup()
-
- def _setup_static(self):
- """Configure CloudStack tests for use with static configuration."""
- parser = ConfigParser()
- parser.read(self.config_static_path)
-
- self.endpoint = parser.get('cloudstack', 'endpoint')
-
- parts = urlparse(self.endpoint)
-
- self.host = parts.hostname
-
- if not self.host:
- raise ApplicationError('Could not determine host from endpoint: %s' % self.endpoint)
-
- if parts.port:
- self.port = parts.port
- elif parts.scheme == 'http':
- self.port = 80
- elif parts.scheme == 'https':
- self.port = 443
- else:
- raise ApplicationError('Could not determine port from endpoint: %s' % self.endpoint)
-
- display.info('Read cs host "%s" and port %d from config: %s' % (self.host, self.port, self.config_static_path), verbosity=1)
-
- self._wait_for_service()
-
- def _setup_dynamic(self):
- """Create a CloudStack simulator using docker."""
- config = self._read_config_template()
-
- self.container_name = self.DOCKER_SIMULATOR_NAME
-
- results = docker_inspect(self.args, self.container_name)
-
- if results and not results[0]['State']['Running']:
- docker_rm(self.args, self.container_name)
- results = []
-
- if results:
- display.info('Using the existing CloudStack simulator docker container.', verbosity=1)
- else:
- display.info('Starting a new CloudStack simulator docker container.', verbosity=1)
- docker_pull(self.args, self.image)
- docker_run(self.args, self.image, ['-d', '-p', '8888:8888', '--name', self.container_name])
-
- # apply work-around for OverlayFS issue
- # https://github.com/docker/for-linux/issues/72#issuecomment-319904698
- docker_exec(self.args, self.container_name, ['find', '/var/lib/mysql', '-type', 'f', '-exec', 'touch', '{}', ';'])
-
- if not self.args.explain:
- display.notice('The CloudStack simulator will probably be ready in 2 - 4 minutes.')
-
- container_id = get_docker_container_id()
-
- if container_id:
- self.host = self._get_simulator_address()
- display.info('Found CloudStack simulator container address: %s' % self.host, verbosity=1)
- else:
- self.host = get_docker_hostname()
-
- self.port = 8888
- self.endpoint = 'http://%s:%d' % (self.host, self.port)
-
- self._wait_for_service()
-
- if self.args.explain:
- values = dict(
- HOST=self.host,
- PORT=str(self.port),
- )
- else:
- credentials = self._get_credentials()
-
- if self.args.docker:
- host = self.DOCKER_SIMULATOR_NAME
- elif self.args.remote:
- host = 'localhost'
- else:
- host = self.host
-
- values = dict(
- HOST=host,
- PORT=str(self.port),
- KEY=credentials['apikey'],
- SECRET=credentials['secretkey'],
- )
-
- display.sensitive.add(values['SECRET'])
-
- config = self._populate_config_template(config, values)
-
- self._write_config(config)
-
- def _get_simulator_address(self):
- current_network = get_docker_preferred_network_name(self.args)
- networks = docker_network_inspect(self.args, current_network)
-
- try:
- network = [network for network in networks if network['Name'] == current_network][0]
- containers = network['Containers']
- container = [containers[container] for container in containers if containers[container]['Name'] == self.DOCKER_SIMULATOR_NAME][0]
- return re.sub(r'/[0-9]+$', '', container['IPv4Address'])
- except Exception:
- display.error('Failed to process the following docker network inspect output:\n%s' %
- json.dumps(networks, indent=4, sort_keys=True))
- raise
-
- def _wait_for_service(self):
- """Wait for the CloudStack service endpoint to accept connections."""
- if self.args.explain:
- return
-
- client = HttpClient(self.args, always=True)
- endpoint = self.endpoint
-
- for _iteration in range(1, 30):
- display.info('Waiting for CloudStack service: %s' % endpoint, verbosity=1)
-
- try:
- client.get(endpoint)
- return
- except SubprocessError:
- pass
-
- time.sleep(10)
-
- raise ApplicationError('Timeout waiting for CloudStack service.')
-
- def _get_credentials(self):
- """Wait for the CloudStack simulator to return credentials.
- :rtype: dict[str, str]
- """
- client = HttpClient(self.args, always=True)
- endpoint = '%s/admin.json' % self.endpoint
-
- for _iteration in range(1, 30):
- display.info('Waiting for CloudStack credentials: %s' % endpoint, verbosity=1)
-
- response = client.get(endpoint)
-
- if response.status_code == 200:
- try:
- return response.json()
- except HttpError as ex:
- display.error(ex)
-
- time.sleep(10)
-
- raise ApplicationError('Timeout waiting for CloudStack credentials.')
-
-
-class CsCloudEnvironment(CloudEnvironment):
- """CloudStack cloud environment plugin. Updates integration test environment after delegation."""
- def get_environment_config(self):
- """
- :rtype: CloudEnvironmentConfig
- """
- parser = ConfigParser()
- parser.read(self.config_path)
-
- config = dict(parser.items('default'))
-
- env_vars = dict(
- CLOUDSTACK_ENDPOINT=config['endpoint'],
- CLOUDSTACK_KEY=config['key'],
- CLOUDSTACK_SECRET=config['secret'],
- CLOUDSTACK_TIMEOUT=config['timeout'],
- )
-
- display.sensitive.add(env_vars['CLOUDSTACK_SECRET'])
-
- ansible_vars = dict(
- cs_resource_prefix=self.resource_prefix,
- )
-
- return CloudEnvironmentConfig(
- env_vars=env_vars,
- ansible_vars=ansible_vars,
- )
diff --git a/test/lib/ansible_test/_internal/cloud/foreman.py b/test/lib/ansible_test/_internal/cloud/foreman.py
deleted file mode 100644
index 7517f1f6..00000000
--- a/test/lib/ansible_test/_internal/cloud/foreman.py
+++ /dev/null
@@ -1,191 +0,0 @@
-"""Foreman plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-
-from . import (
- CloudProvider,
- CloudEnvironment,
- CloudEnvironmentConfig,
-)
-
-from ..util import (
- find_executable,
- display,
-)
-
-from ..docker_util import (
- docker_run,
- docker_rm,
- docker_inspect,
- docker_pull,
- get_docker_container_id,
- get_docker_hostname,
- get_docker_container_ip,
- get_docker_preferred_network_name,
- is_docker_user_defined_network,
-)
-
-
-class ForemanProvider(CloudProvider):
- """Foreman plugin.
-
- Sets up Foreman stub server for tests.
- """
-
- DOCKER_SIMULATOR_NAME = 'foreman-stub'
-
- DOCKER_IMAGE = 'quay.io/ansible/foreman-test-container:1.4.0'
- """Default image to run Foreman stub from.
-
- The simulator must be pinned to a specific version
- to guarantee CI passes with the version used.
-
- It's source source itself resides at:
- https://github.com/ansible/foreman-test-container
- """
-
- def __init__(self, args):
- """Set up container references for provider.
-
- :type args: TestConfig
- """
- super(ForemanProvider, self).__init__(args)
-
- self.__container_from_env = os.environ.get('ANSIBLE_FRMNSIM_CONTAINER')
- """Overrides target container, might be used for development.
-
- Use ANSIBLE_FRMNSIM_CONTAINER=whatever_you_want if you want
- to use other image. Omit/empty otherwise.
- """
-
- self.image = self.__container_from_env or self.DOCKER_IMAGE
- self.container_name = ''
-
- def filter(self, targets, exclude):
- """Filter out the tests with the necessary config and res unavailable.
-
- :type targets: tuple[TestTarget]
- :type exclude: list[str]
- """
- docker_cmd = 'docker'
- docker = find_executable(docker_cmd, required=False)
-
- if docker:
- return
-
- skip = 'cloud/%s/' % self.platform
- skipped = [target.name for target in targets if skip in target.aliases]
-
- if skipped:
- exclude.append(skip)
- display.warning(
- 'Excluding tests marked "%s" '
- 'which require the "%s" command: %s'
- % (skip.rstrip('/'), docker_cmd, ', '.join(skipped))
- )
-
- def setup(self):
- """Setup cloud resource before delegation and reg cleanup callback."""
- super(ForemanProvider, self).setup()
-
- if self._use_static_config():
- self._setup_static()
- else:
- self._setup_dynamic()
-
- def get_docker_run_options(self):
- """Get additional options needed when delegating tests to a container.
-
- :rtype: list[str]
- """
- network = get_docker_preferred_network_name(self.args)
-
- if self.managed and not is_docker_user_defined_network(network):
- return ['--link', self.DOCKER_SIMULATOR_NAME]
-
- return []
-
- def cleanup(self):
- """Clean up the resource and temporary configs files after tests."""
- if self.container_name:
- docker_rm(self.args, self.container_name)
-
- super(ForemanProvider, self).cleanup()
-
- def _setup_dynamic(self):
- """Spawn a Foreman stub within docker container."""
- foreman_port = 8080
- container_id = get_docker_container_id()
-
- self.container_name = self.DOCKER_SIMULATOR_NAME
-
- results = docker_inspect(self.args, self.container_name)
-
- if results and not results[0].get('State', {}).get('Running'):
- docker_rm(self.args, self.container_name)
- results = []
-
- display.info(
- '%s Foreman simulator docker container.'
- % ('Using the existing' if results else 'Starting a new'),
- verbosity=1,
- )
-
- if not results:
- if self.args.docker or container_id:
- publish_ports = []
- else:
- # publish the simulator ports when not running inside docker
- publish_ports = [
- '-p', ':'.join((str(foreman_port), ) * 2),
- ]
-
- if not self.__container_from_env:
- docker_pull(self.args, self.image)
-
- docker_run(
- self.args,
- self.image,
- ['-d', '--name', self.container_name] + publish_ports,
- )
-
- if self.args.docker:
- foreman_host = self.DOCKER_SIMULATOR_NAME
- elif container_id:
- foreman_host = self._get_simulator_address()
- display.info(
- 'Found Foreman simulator container address: %s'
- % foreman_host, verbosity=1
- )
- else:
- foreman_host = get_docker_hostname()
-
- self._set_cloud_config('FOREMAN_HOST', foreman_host)
- self._set_cloud_config('FOREMAN_PORT', str(foreman_port))
-
- def _get_simulator_address(self):
- return get_docker_container_ip(self.args, self.container_name)
-
- def _setup_static(self):
- raise NotImplementedError
-
-
-class ForemanEnvironment(CloudEnvironment):
- """Foreman environment plugin.
-
- Updates integration test environment after delegation.
- """
- def get_environment_config(self):
- """
- :rtype: CloudEnvironmentConfig
- """
- env_vars = dict(
- FOREMAN_HOST=self._get_cloud_config('FOREMAN_HOST'),
- FOREMAN_PORT=self._get_cloud_config('FOREMAN_PORT'),
- )
-
- return CloudEnvironmentConfig(
- env_vars=env_vars,
- )
diff --git a/test/lib/ansible_test/_internal/cloud/nios.py b/test/lib/ansible_test/_internal/cloud/nios.py
deleted file mode 100644
index b9a1a4e4..00000000
--- a/test/lib/ansible_test/_internal/cloud/nios.py
+++ /dev/null
@@ -1,193 +0,0 @@
-"""NIOS plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-
-from . import (
- CloudProvider,
- CloudEnvironment,
- CloudEnvironmentConfig,
-)
-
-from ..util import (
- find_executable,
- display,
-)
-
-from ..docker_util import (
- docker_run,
- docker_rm,
- docker_inspect,
- docker_pull,
- get_docker_container_id,
- get_docker_hostname,
- get_docker_container_ip,
- get_docker_preferred_network_name,
- is_docker_user_defined_network,
-)
-
-
-class NiosProvider(CloudProvider):
- """Nios plugin.
-
- Sets up NIOS mock server for tests.
- """
-
- DOCKER_SIMULATOR_NAME = 'nios-simulator'
-
- DOCKER_IMAGE = 'quay.io/ansible/nios-test-container:1.3.0'
- """Default image to run the nios simulator.
-
- The simulator must be pinned to a specific version
- to guarantee CI passes with the version used.
-
- It's source source itself resides at:
- https://github.com/ansible/nios-test-container
- """
-
- def __init__(self, args):
- """Set up container references for provider.
-
- :type args: TestConfig
- """
- super(NiosProvider, self).__init__(args)
-
- self.__container_from_env = os.environ.get('ANSIBLE_NIOSSIM_CONTAINER')
- """Overrides target container, might be used for development.
-
- Use ANSIBLE_NIOSSIM_CONTAINER=whatever_you_want if you want
- to use other image. Omit/empty otherwise.
- """
-
- self.image = self.__container_from_env or self.DOCKER_IMAGE
- self.container_name = ''
-
- def filter(self, targets, exclude):
- """Filter out the tests with the necessary config and res unavailable.
-
- :type targets: tuple[TestTarget]
- :type exclude: list[str]
- """
- docker_cmd = 'docker'
- docker = find_executable(docker_cmd, required=False)
-
- if docker:
- return
-
- skip = 'cloud/%s/' % self.platform
- skipped = [target.name for target in targets if skip in target.aliases]
-
- if skipped:
- exclude.append(skip)
- display.warning(
- 'Excluding tests marked "%s" '
- 'which require the "%s" command: %s'
- % (skip.rstrip('/'), docker_cmd, ', '.join(skipped))
- )
-
- def setup(self):
- """Setup cloud resource before delegation and reg cleanup callback."""
- super(NiosProvider, self).setup()
-
- if self._use_static_config():
- self._setup_static()
- else:
- self._setup_dynamic()
-
- def get_docker_run_options(self):
- """Get additional options needed when delegating tests to a container.
-
- :rtype: list[str]
- """
- network = get_docker_preferred_network_name(self.args)
-
- if self.managed and not is_docker_user_defined_network(network):
- return ['--link', self.DOCKER_SIMULATOR_NAME]
-
- return []
-
- def cleanup(self):
- """Clean up the resource and temporary configs files after tests."""
- if self.container_name:
- docker_rm(self.args, self.container_name)
-
- super(NiosProvider, self).cleanup()
-
- def _setup_dynamic(self):
- """Spawn a NIOS simulator within docker container."""
- nios_port = 443
- container_id = get_docker_container_id()
-
- self.container_name = self.DOCKER_SIMULATOR_NAME
-
- results = docker_inspect(self.args, self.container_name)
-
- if results and not results[0].get('State', {}).get('Running'):
- docker_rm(self.args, self.container_name)
- results = []
-
- display.info(
- '%s NIOS simulator docker container.'
- % ('Using the existing' if results else 'Starting a new'),
- verbosity=1,
- )
-
- if not results:
- if self.args.docker or container_id:
- publish_ports = []
- else:
- # publish the simulator ports when not running inside docker
- publish_ports = [
- '-p', ':'.join((str(nios_port), ) * 2),
- ]
-
- if not self.__container_from_env:
- docker_pull(self.args, self.image)
-
- docker_run(
- self.args,
- self.image,
- ['-d', '--name', self.container_name] + publish_ports,
- )
-
- if self.args.docker:
- nios_host = self.DOCKER_SIMULATOR_NAME
- elif container_id:
- nios_host = self._get_simulator_address()
- display.info(
- 'Found NIOS simulator container address: %s'
- % nios_host, verbosity=1
- )
- else:
- nios_host = get_docker_hostname()
-
- self._set_cloud_config('NIOS_HOST', nios_host)
-
- def _get_simulator_address(self):
- return get_docker_container_ip(self.args, self.container_name)
-
- def _setup_static(self):
- raise NotImplementedError
-
-
-class NiosEnvironment(CloudEnvironment):
- """NIOS environment plugin.
-
- Updates integration test environment after delegation.
- """
- def get_environment_config(self):
- """
- :rtype: CloudEnvironmentConfig
- """
- ansible_vars = dict(
- nios_provider=dict(
- host=self._get_cloud_config('NIOS_HOST'),
- username='admin',
- password='infoblox',
- ),
- )
-
- return CloudEnvironmentConfig(
- ansible_vars=ansible_vars,
- )
diff --git a/test/lib/ansible_test/_internal/cloud/openshift.py b/test/lib/ansible_test/_internal/cloud/openshift.py
deleted file mode 100644
index 450816bf..00000000
--- a/test/lib/ansible_test/_internal/cloud/openshift.py
+++ /dev/null
@@ -1,236 +0,0 @@
-"""OpenShift plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import json
-import os
-import re
-import time
-
-from . import (
- CloudProvider,
- CloudEnvironment,
- CloudEnvironmentConfig,
-)
-
-from ..io import (
- read_text_file,
-)
-
-from ..util import (
- find_executable,
- ApplicationError,
- display,
- SubprocessError,
-)
-
-from ..http import (
- HttpClient,
-)
-
-from ..docker_util import (
- docker_exec,
- docker_run,
- docker_rm,
- docker_inspect,
- docker_pull,
- docker_network_inspect,
- get_docker_container_id,
- get_docker_preferred_network_name,
- get_docker_hostname,
- is_docker_user_defined_network,
-)
-
-
-class OpenShiftCloudProvider(CloudProvider):
- """OpenShift cloud provider plugin. Sets up cloud resources before delegation."""
- DOCKER_CONTAINER_NAME = 'openshift-origin'
-
- def __init__(self, args):
- """
- :type args: TestConfig
- """
- super(OpenShiftCloudProvider, self).__init__(args, config_extension='.kubeconfig')
-
- # The image must be pinned to a specific version to guarantee CI passes with the version used.
- self.image = 'openshift/origin:v3.9.0'
- self.container_name = ''
-
- def filter(self, targets, exclude):
- """Filter out the cloud tests when the necessary config and resources are not available.
- :type targets: tuple[TestTarget]
- :type exclude: list[str]
- """
- if os.path.isfile(self.config_static_path):
- return
-
- docker = find_executable('docker', required=False)
-
- if docker:
- return
-
- skip = 'cloud/%s/' % self.platform
- skipped = [target.name for target in targets if skip in target.aliases]
-
- if skipped:
- exclude.append(skip)
- display.warning('Excluding tests marked "%s" which require the "docker" command or config (see "%s"): %s'
- % (skip.rstrip('/'), self.config_template_path, ', '.join(skipped)))
-
- def setup(self):
- """Setup the cloud resource before delegation and register a cleanup callback."""
- super(OpenShiftCloudProvider, self).setup()
-
- if self._use_static_config():
- self._setup_static()
- else:
- self._setup_dynamic()
-
- def get_remote_ssh_options(self):
- """Get any additional options needed when delegating tests to a remote instance via SSH.
- :rtype: list[str]
- """
- if self.managed:
- return ['-R', '8443:%s:8443' % get_docker_hostname()]
-
- return []
-
- def get_docker_run_options(self):
- """Get any additional options needed when delegating tests to a docker container.
- :rtype: list[str]
- """
- network = get_docker_preferred_network_name(self.args)
-
- if self.managed and not is_docker_user_defined_network(network):
- return ['--link', self.DOCKER_CONTAINER_NAME]
-
- return []
-
- def cleanup(self):
- """Clean up the cloud resource and any temporary configuration files after tests complete."""
- if self.container_name:
- docker_rm(self.args, self.container_name)
-
- super(OpenShiftCloudProvider, self).cleanup()
-
- def _setup_static(self):
- """Configure OpenShift tests for use with static configuration."""
- config = read_text_file(self.config_static_path)
-
- match = re.search(r'^ *server: (?P<server>.*)$', config, flags=re.MULTILINE)
-
- if match:
- endpoint = match.group('server')
- self._wait_for_service(endpoint)
- else:
- display.warning('Could not find OpenShift endpoint in kubeconfig. Skipping check for OpenShift service availability.')
-
- def _setup_dynamic(self):
- """Create a OpenShift container using docker."""
- self.container_name = self.DOCKER_CONTAINER_NAME
-
- results = docker_inspect(self.args, self.container_name)
-
- if results and not results[0]['State']['Running']:
- docker_rm(self.args, self.container_name)
- results = []
-
- if results:
- display.info('Using the existing OpenShift docker container.', verbosity=1)
- else:
- display.info('Starting a new OpenShift docker container.', verbosity=1)
- docker_pull(self.args, self.image)
- cmd = ['start', 'master', '--listen', 'https://0.0.0.0:8443']
- docker_run(self.args, self.image, ['-d', '-p', '8443:8443', '--name', self.container_name], cmd)
-
- container_id = get_docker_container_id()
-
- if container_id:
- host = self._get_container_address()
- display.info('Found OpenShift container address: %s' % host, verbosity=1)
- else:
- host = get_docker_hostname()
-
- port = 8443
- endpoint = 'https://%s:%s/' % (host, port)
-
- self._wait_for_service(endpoint)
-
- if self.args.explain:
- config = '# Unknown'
- else:
- if self.args.docker:
- host = self.DOCKER_CONTAINER_NAME
- elif self.args.remote:
- host = 'localhost'
-
- server = 'https://%s:%s' % (host, port)
- config = self._get_config(server)
-
- self._write_config(config)
-
- def _get_container_address(self):
- current_network = get_docker_preferred_network_name(self.args)
- networks = docker_network_inspect(self.args, current_network)
-
- try:
- network = [network for network in networks if network['Name'] == current_network][0]
- containers = network['Containers']
- container = [containers[container] for container in containers if containers[container]['Name'] == self.DOCKER_CONTAINER_NAME][0]
- return re.sub(r'/[0-9]+$', '', container['IPv4Address'])
- except Exception:
- display.error('Failed to process the following docker network inspect output:\n%s' %
- json.dumps(networks, indent=4, sort_keys=True))
- raise
-
- def _wait_for_service(self, endpoint):
- """Wait for the OpenShift service endpoint to accept connections.
- :type endpoint: str
- """
- if self.args.explain:
- return
-
- client = HttpClient(self.args, always=True, insecure=True)
-
- for dummy in range(1, 30):
- display.info('Waiting for OpenShift service: %s' % endpoint, verbosity=1)
-
- try:
- client.get(endpoint)
- return
- except SubprocessError:
- pass
-
- time.sleep(10)
-
- raise ApplicationError('Timeout waiting for OpenShift service.')
-
- def _get_config(self, server):
- """Get OpenShift config from container.
- :type server: str
- :rtype: dict[str, str]
- """
- cmd = ['cat', '/var/lib/origin/openshift.local.config/master/admin.kubeconfig']
-
- stdout, dummy = docker_exec(self.args, self.container_name, cmd, capture=True)
-
- config = stdout
- config = re.sub(r'^( *)certificate-authority-data: .*$', r'\1insecure-skip-tls-verify: true', config, flags=re.MULTILINE)
- config = re.sub(r'^( *)server: .*$', r'\1server: %s' % server, config, flags=re.MULTILINE)
-
- return config
-
-
-class OpenShiftCloudEnvironment(CloudEnvironment):
- """OpenShift cloud environment plugin. Updates integration test environment after delegation."""
- def get_environment_config(self):
- """
- :rtype: CloudEnvironmentConfig
- """
- env_vars = dict(
- K8S_AUTH_KUBECONFIG=self.config_path,
- )
-
- return CloudEnvironmentConfig(
- env_vars=env_vars,
- )
diff --git a/test/lib/ansible_test/_internal/cloud/scaleway.py b/test/lib/ansible_test/_internal/cloud/scaleway.py
deleted file mode 100644
index 22abe197..00000000
--- a/test/lib/ansible_test/_internal/cloud/scaleway.py
+++ /dev/null
@@ -1,72 +0,0 @@
-"""Scaleway plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-
-from . import (
- CloudProvider,
- CloudEnvironment,
- CloudEnvironmentConfig,
-)
-
-from ..util import (
- ConfigParser,
- display,
-)
-
-
-class ScalewayCloudProvider(CloudProvider):
- """Checks if a configuration file has been passed or fixtures are going to be used for testing"""
-
- def __init__(self, args):
- """
- :type args: TestConfig
- """
- super(ScalewayCloudProvider, self).__init__(args)
-
- def filter(self, targets, exclude):
- """Filter out the cloud tests when the necessary config and resources are not available.
- :type targets: tuple[TestTarget]
- :type exclude: list[str]
- """
- if os.path.isfile(self.config_static_path):
- return
-
- super(ScalewayCloudProvider, self).filter(targets, exclude)
-
- def setup(self):
- """Setup the cloud resource before delegation and register a cleanup callback."""
- super(ScalewayCloudProvider, self).setup()
-
- if os.path.isfile(self.config_static_path):
- self.config_path = self.config_static_path
- self.managed = False
-
-
-class ScalewayCloudEnvironment(CloudEnvironment):
- """
- Updates integration test environment after delegation. Will setup the config file as parameter.
- """
- def get_environment_config(self):
- """
- :rtype: CloudEnvironmentConfig
- """
- parser = ConfigParser()
- parser.read(self.config_path)
-
- env_vars = dict(
- SCW_API_KEY=parser.get('default', 'key'),
- SCW_ORG=parser.get('default', 'org')
- )
-
- display.sensitive.add(env_vars['SCW_API_KEY'])
-
- ansible_vars = dict(
- scw_org=parser.get('default', 'org'),
- )
-
- return CloudEnvironmentConfig(
- env_vars=env_vars,
- ansible_vars=ansible_vars,
- )
diff --git a/test/lib/ansible_test/_internal/cloud/vcenter.py b/test/lib/ansible_test/_internal/cloud/vcenter.py
deleted file mode 100644
index 3b38a19e..00000000
--- a/test/lib/ansible_test/_internal/cloud/vcenter.py
+++ /dev/null
@@ -1,232 +0,0 @@
-"""VMware vCenter plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-
-from . import (
- CloudProvider,
- CloudEnvironment,
- CloudEnvironmentConfig,
-)
-
-from ..util import (
- find_executable,
- display,
- ConfigParser,
- ApplicationError,
-)
-
-from ..docker_util import (
- docker_run,
- docker_rm,
- docker_inspect,
- docker_pull,
- get_docker_container_id,
- get_docker_hostname,
- get_docker_container_ip,
- get_docker_preferred_network_name,
- is_docker_user_defined_network,
-)
-
-
-class VcenterProvider(CloudProvider):
- """VMware vcenter/esx plugin. Sets up cloud resources for tests."""
- DOCKER_SIMULATOR_NAME = 'vcenter-simulator'
-
- def __init__(self, args):
- """
- :type args: TestConfig
- """
- super(VcenterProvider, self).__init__(args)
-
- # The simulator must be pinned to a specific version to guarantee CI passes with the version used.
- if os.environ.get('ANSIBLE_VCSIM_CONTAINER'):
- self.image = os.environ.get('ANSIBLE_VCSIM_CONTAINER')
- else:
- self.image = 'quay.io/ansible/vcenter-test-container:1.7.0'
- self.container_name = ''
-
- # VMware tests can be run on govcsim or BYO with a static config file.
- # The simulator is the default if no config is provided.
- self.vmware_test_platform = os.environ.get('VMWARE_TEST_PLATFORM', 'govcsim')
- self.insecure = False
- self.proxy = None
- self.platform = 'vcenter'
-
- def filter(self, targets, exclude):
- """Filter out the cloud tests when the necessary config and resources are not available.
- :type targets: tuple[TestTarget]
- :type exclude: list[str]
- """
- if self.vmware_test_platform == 'govcsim' or (self.vmware_test_platform == '' and not os.path.isfile(self.config_static_path)):
- docker = find_executable('docker', required=False)
-
- if docker:
- return
-
- skip = 'cloud/%s/' % self.platform
- skipped = [target.name for target in targets if skip in target.aliases]
-
- if skipped:
- exclude.append(skip)
- display.warning('Excluding tests marked "%s" which require the "docker" command or config (see "%s"): %s'
- % (skip.rstrip('/'), self.config_template_path, ', '.join(skipped)))
- elif self.vmware_test_platform == 'static':
- if os.path.isfile(self.config_static_path):
- return
-
- super(VcenterProvider, self).filter(targets, exclude)
-
- def setup(self):
- """Setup the cloud resource before delegation and register a cleanup callback."""
- super(VcenterProvider, self).setup()
-
- self._set_cloud_config('vmware_test_platform', self.vmware_test_platform)
- if self.vmware_test_platform == 'govcsim':
- self._setup_dynamic_simulator()
- self.managed = True
- elif self.vmware_test_platform == 'static':
- self._use_static_config()
- self._setup_static()
- else:
- raise ApplicationError('Unknown vmware_test_platform: %s' % self.vmware_test_platform)
-
- def get_docker_run_options(self):
- """Get any additional options needed when delegating tests to a docker container.
- :rtype: list[str]
- """
- network = get_docker_preferred_network_name(self.args)
-
- if self.managed and not is_docker_user_defined_network(network):
- return ['--link', self.DOCKER_SIMULATOR_NAME]
-
- return []
-
- def cleanup(self):
- """Clean up the cloud resource and any temporary configuration files after tests complete."""
- if self.container_name:
- docker_rm(self.args, self.container_name)
-
- super(VcenterProvider, self).cleanup()
-
- def _setup_dynamic_simulator(self):
- """Create a vcenter simulator using docker."""
- container_id = get_docker_container_id()
-
- self.container_name = self.DOCKER_SIMULATOR_NAME
-
- results = docker_inspect(self.args, self.container_name)
-
- if results and not results[0].get('State', {}).get('Running'):
- docker_rm(self.args, self.container_name)
- results = []
-
- if results:
- display.info('Using the existing vCenter simulator docker container.', verbosity=1)
- else:
- display.info('Starting a new vCenter simulator docker container.', verbosity=1)
-
- if not self.args.docker and not container_id:
- # publish the simulator ports when not running inside docker
- publish_ports = [
- '-p', '1443:443',
- '-p', '8080:8080',
- '-p', '8989:8989',
- '-p', '5000:5000', # control port for flask app in simulator
- ]
- else:
- publish_ports = []
-
- if not os.environ.get('ANSIBLE_VCSIM_CONTAINER'):
- docker_pull(self.args, self.image)
-
- docker_run(
- self.args,
- self.image,
- ['-d', '--name', self.container_name] + publish_ports,
- )
-
- if self.args.docker:
- vcenter_hostname = self.DOCKER_SIMULATOR_NAME
- elif container_id:
- vcenter_hostname = self._get_simulator_address()
- display.info('Found vCenter simulator container address: %s' % vcenter_hostname, verbosity=1)
- else:
- vcenter_hostname = get_docker_hostname()
-
- self._set_cloud_config('vcenter_hostname', vcenter_hostname)
-
- def _get_simulator_address(self):
- return get_docker_container_ip(self.args, self.container_name)
-
- def _setup_static(self):
- if not os.path.exists(self.config_static_path):
- raise ApplicationError('Configuration file does not exist: %s' % self.config_static_path)
-
- parser = ConfigParser({
- 'vcenter_port': '443',
- 'vmware_proxy_host': '',
- 'vmware_proxy_port': '8080'})
- parser.read(self.config_static_path)
-
- if parser.get('DEFAULT', 'vmware_validate_certs').lower() in ('no', 'false'):
- self.insecure = True
- proxy_host = parser.get('DEFAULT', 'vmware_proxy_host')
- proxy_port = int(parser.get('DEFAULT', 'vmware_proxy_port'))
- if proxy_host and proxy_port:
- self.proxy = 'http://%s:%d' % (proxy_host, proxy_port)
-
-
-class VcenterEnvironment(CloudEnvironment):
- """VMware vcenter/esx environment plugin. Updates integration test environment after delegation."""
- def get_environment_config(self):
- """
- :rtype: CloudEnvironmentConfig
- """
- try:
- # We may be in a container, so we cannot just reach VMWARE_TEST_PLATFORM,
- # We do a try/except instead
- parser = ConfigParser()
- parser.read(self.config_path) # static
-
- env_vars = dict()
- ansible_vars = dict(
- resource_prefix=self.resource_prefix,
- )
- ansible_vars.update(dict(parser.items('DEFAULT', raw=True)))
- except KeyError: # govcsim
- env_vars = dict(
- VCENTER_HOSTNAME=self._get_cloud_config('vcenter_hostname'),
- VCENTER_USERNAME='user',
- VCENTER_PASSWORD='pass',
- )
-
- ansible_vars = dict(
- vcsim=self._get_cloud_config('vcenter_hostname'),
- vcenter_hostname=self._get_cloud_config('vcenter_hostname'),
- vcenter_username='user',
- vcenter_password='pass',
- )
- # Shippable starts ansible-test from withing an existing container,
- # and in this case, we don't have to change the vcenter port.
- if not self.args.docker and not get_docker_container_id():
- ansible_vars['vcenter_port'] = '1443'
-
- for key, value in ansible_vars.items():
- if key.endswith('_password'):
- display.sensitive.add(value)
-
- return CloudEnvironmentConfig(
- env_vars=env_vars,
- ansible_vars=ansible_vars,
- module_defaults={
- 'group/vmware': {
- 'hostname': ansible_vars['vcenter_hostname'],
- 'username': ansible_vars['vcenter_username'],
- 'password': ansible_vars['vcenter_password'],
- 'port': ansible_vars.get('vcenter_port', '443'),
- 'validate_certs': ansible_vars.get('vmware_validate_certs', 'no'),
- },
- },
- )
diff --git a/test/lib/ansible_test/_internal/cloud/vultr.py b/test/lib/ansible_test/_internal/cloud/vultr.py
deleted file mode 100644
index ce6184f7..00000000
--- a/test/lib/ansible_test/_internal/cloud/vultr.py
+++ /dev/null
@@ -1,71 +0,0 @@
-"""Vultr plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-
-from . import (
- CloudProvider,
- CloudEnvironment,
- CloudEnvironmentConfig,
-)
-
-from ..util import (
- ConfigParser,
- display,
-)
-
-
-class VultrCloudProvider(CloudProvider):
- """Checks if a configuration file has been passed or fixtures are going to be used for testing"""
-
- def __init__(self, args):
- """
- :type args: TestConfig
- """
- super(VultrCloudProvider, self).__init__(args)
-
- def filter(self, targets, exclude):
- """Filter out the cloud tests when the necessary config and resources are not available.
- :type targets: tuple[TestTarget]
- :type exclude: list[str]
- """
- if os.path.isfile(self.config_static_path):
- return
-
- super(VultrCloudProvider, self).filter(targets, exclude)
-
- def setup(self):
- """Setup the cloud resource before delegation and register a cleanup callback."""
- super(VultrCloudProvider, self).setup()
-
- if os.path.isfile(self.config_static_path):
- self.config_path = self.config_static_path
- self.managed = False
-
-
-class VultrCloudEnvironment(CloudEnvironment):
- """
- Updates integration test environment after delegation. Will setup the config file as parameter.
- """
- def get_environment_config(self):
- """
- :rtype: CloudEnvironmentConfig
- """
- parser = ConfigParser()
- parser.read(self.config_path)
-
- env_vars = dict(
- VULTR_API_KEY=parser.get('default', 'key'),
- )
-
- display.sensitive.add(env_vars['VULTR_API_KEY'])
-
- ansible_vars = dict(
- vultr_resource_prefix=self.resource_prefix,
- )
-
- return CloudEnvironmentConfig(
- env_vars=env_vars,
- ansible_vars=ansible_vars,
- )
diff --git a/test/lib/ansible_test/_internal/commands/__init__.py b/test/lib/ansible_test/_internal/commands/__init__.py
new file mode 100644
index 00000000..e9cb6816
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/__init__.py
@@ -0,0 +1,2 @@
+"""Nearly empty __init__.py to keep pylint happy."""
+from __future__ import annotations
diff --git a/test/lib/ansible_test/_internal/coverage/__init__.py b/test/lib/ansible_test/_internal/commands/coverage/__init__.py
index 183efa67..50bc8263 100644
--- a/test/lib/ansible_test/_internal/coverage/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/__init__.py
@@ -1,50 +1,60 @@
"""Common logic for the coverage subcommand."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+import errno
import os
import re
+import typing as t
-from .. import types as t
+from ...constants import (
+ COVERAGE_REQUIRED_VERSION,
+)
-from ..encoding import (
+from ...encoding import (
to_bytes,
)
-from ..io import (
+from ...io import (
open_binary_file,
read_json_file,
)
-from ..util import (
+from ...util import (
ApplicationError,
common_environment,
display,
ANSIBLE_TEST_DATA_ROOT,
)
-from ..util_common import (
- intercept_command,
+from ...util_common import (
+ intercept_python,
ResultType,
)
-from ..config import (
+from ...config import (
EnvironmentConfig,
)
-from ..executor import (
- Delegate,
- install_command_requirements,
+from ...python_requirements import (
+ install_requirements,
)
-from .. target import (
+from ... target import (
walk_module_targets,
)
-from ..data import (
+from ...data import (
data_context,
)
+from ...pypi_proxy import (
+ configure_pypi_proxy,
+)
+
+from ...provisioning import (
+ HostState,
+)
+
if t.TYPE_CHECKING:
import coverage as coverage_module
@@ -56,22 +66,13 @@ COVERAGE_OUTPUT_FILE_NAME = 'coverage'
class CoverageConfig(EnvironmentConfig):
"""Configuration for the coverage command."""
def __init__(self, args): # type: (t.Any) -> None
- super(CoverageConfig, self).__init__(args, 'coverage')
-
- self.group_by = frozenset(args.group_by) if 'group_by' in args and args.group_by else set() # type: t.FrozenSet[str]
- self.all = args.all if 'all' in args else False # type: bool
- self.stub = args.stub if 'stub' in args else False # type: bool
- self.export = args.export if 'export' in args else None # type: str
- self.coverage = False # temporary work-around to support intercept_command in cover.py
+ super().__init__(args, 'coverage')
-def initialize_coverage(args): # type: (CoverageConfig) -> coverage_module
+def initialize_coverage(args, host_state): # type: (CoverageConfig, HostState) -> coverage_module
"""Delegate execution if requested, install requirements, then import and return the coverage module. Raises an exception if coverage is not available."""
- if args.delegate:
- raise Delegate()
-
- if args.requirements:
- install_command_requirements(args)
+ configure_pypi_proxy(args, host_state.controller_profile) # coverage
+ install_requirements(args, host_state.controller_profile.python, coverage=True) # coverage
try:
import coverage
@@ -79,35 +80,27 @@ def initialize_coverage(args): # type: (CoverageConfig) -> coverage_module
coverage = None
if not coverage:
- raise ApplicationError('You must install the "coverage" python module to use this command.')
-
- coverage_version_string = coverage.__version__
- coverage_version = tuple(int(v) for v in coverage_version_string.split('.'))
+ raise ApplicationError(f'Version {COVERAGE_REQUIRED_VERSION} of the Python "coverage" module must be installed to use this command.')
- min_version = (4, 2)
- max_version = (5, 0)
-
- supported_version = True
- recommended_version = '4.5.4'
-
- if coverage_version < min_version or coverage_version >= max_version:
- supported_version = False
-
- if not supported_version:
- raise ApplicationError('Version %s of "coverage" is not supported. Version %s is known to work and is recommended.' % (
- coverage_version_string, recommended_version))
+ if coverage.__version__ != COVERAGE_REQUIRED_VERSION:
+ raise ApplicationError(f'Version {COVERAGE_REQUIRED_VERSION} of the Python "coverage" module is required. Version {coverage.__version__} was found.')
return coverage
-def run_coverage(args, output_file, command, cmd): # type: (CoverageConfig, str, str, t.List[str]) -> None
+def run_coverage(args, host_state, output_file, command, cmd): # type: (CoverageConfig, HostState, str, str, t.List[str]) -> None
"""Run the coverage cli tool with the specified options."""
env = common_environment()
env.update(dict(COVERAGE_FILE=output_file))
cmd = ['python', '-m', 'coverage.__main__', command, '--rcfile', COVERAGE_CONFIG_PATH] + cmd
- intercept_command(args, target_name='coverage', env=env, cmd=cmd, disable_coverage=True)
+ intercept_python(args, host_state.controller_profile.python, cmd, env)
+
+
+def get_all_coverage_files(): # type: () -> t.List[str]
+ """Return a list of all coverage file paths."""
+ return get_python_coverage_files() + get_powershell_coverage_files()
def get_python_coverage_files(path=None): # type: (t.Optional[str]) -> t.List[str]
@@ -123,8 +116,15 @@ def get_powershell_coverage_files(path=None): # type: (t.Optional[str]) -> t.Li
def get_coverage_files(language, path=None): # type: (str, t.Optional[str]) -> t.List[str]
"""Return the list of coverage file paths for the given language."""
coverage_dir = path or ResultType.COVERAGE.path
- coverage_files = [os.path.join(coverage_dir, f) for f in os.listdir(coverage_dir)
- if '=coverage.' in f and '=%s' % language in f]
+
+ try:
+ coverage_files = [os.path.join(coverage_dir, f) for f in os.listdir(coverage_dir)
+ if '=coverage.' in f and '=%s' % language in f]
+ except IOError as ex:
+ if ex.errno == errno.ENOENT:
+ return []
+
+ raise
return coverage_files
diff --git a/test/lib/ansible_test/_internal/coverage/analyze/__init__.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/__init__.py
index 45770373..db169fd7 100644
--- a/test/lib/ansible_test/_internal/coverage/analyze/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/__init__.py
@@ -1,8 +1,6 @@
"""Common logic for the `coverage analyze` subcommand."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-from ... import types as t
+from __future__ import annotations
+import typing as t
from .. import (
CoverageConfig,
@@ -12,7 +10,7 @@ from .. import (
class CoverageAnalyzeConfig(CoverageConfig):
"""Configuration for the `coverage analyze` command."""
def __init__(self, args): # type: (t.Any) -> None
- super(CoverageAnalyzeConfig, self).__init__(args)
+ super().__init__(args)
# avoid mixing log messages with file output when using `/dev/stdout` for the output file on commands
# this may be worth considering as the default behavior in the future, instead of being dependent on the command or options used
diff --git a/test/lib/ansible_test/_internal/coverage/analyze/targets/__init__.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py
index 8fe571b8..a39d12c8 100644
--- a/test/lib/ansible_test/_internal/coverage/analyze/targets/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py
@@ -1,17 +1,15 @@
"""Analyze integration test target code coverage."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
+import typing as t
-from .... import types as t
-
-from ....io import (
+from .....io import (
read_json_file,
write_json_file,
)
-from ....util import (
+from .....util import (
ApplicationError,
display,
)
@@ -33,7 +31,7 @@ if t.TYPE_CHECKING:
class CoverageAnalyzeTargetsConfig(CoverageAnalyzeConfig):
"""Configuration for the `coverage analyze targets` command."""
def __init__(self, args): # type: (t.Any) -> None
- super(CoverageAnalyzeTargetsConfig, self).__init__(args)
+ super().__init__(args)
self.info_stderr = True
@@ -121,7 +119,7 @@ def get_target_index(name, target_indexes): # type: (str, TargetIndexes) -> int
def expand_indexes(
source_data, # type: IndexedPoints
source_index, # type: t.List[str]
- format_func, # type: t.Callable[t.Tuple[t.Any], str]
+ format_func, # type: t.Callable[[TargetKey], str]
): # type: (...) -> NamedPoints
"""Expand indexes from the source into target names for easier processing of the data (arcs or lines)."""
combined_data = {} # type: t.Dict[str, t.Dict[t.Any, t.Set[str]]]
diff --git a/test/lib/ansible_test/_internal/coverage/analyze/targets/combine.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py
index 35148ff6..d68edc02 100644
--- a/test/lib/ansible_test/_internal/coverage/analyze/targets/combine.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py
@@ -1,8 +1,14 @@
"""Combine integration test target code coverage reports."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+import typing as t
-from .... import types as t
+from .....executor import (
+ Delegate,
+)
+
+from .....provisioning import (
+ prepare_profiles,
+)
from . import (
CoverageAnalyzeTargetsConfig,
@@ -24,7 +30,7 @@ if t.TYPE_CHECKING:
class CoverageAnalyzeTargetsCombineConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets combine` command."""
def __init__(self, args): # type: (t.Any) -> None
- super(CoverageAnalyzeTargetsCombineConfig, self).__init__(args)
+ super().__init__(args)
self.input_files = args.input_file # type: t.List[str]
self.output_file = args.output_file # type: str
@@ -32,6 +38,11 @@ class CoverageAnalyzeTargetsCombineConfig(CoverageAnalyzeTargetsConfig):
def command_coverage_analyze_targets_combine(args): # type: (CoverageAnalyzeTargetsCombineConfig) -> None
"""Combine integration test target code coverage reports."""
+ host_state = prepare_profiles(args) # coverage analyze targets combine
+
+ if args.delegate:
+ raise Delegate(host_state=host_state)
+
combined_target_indexes = {} # type: TargetIndexes
combined_path_arcs = {} # type: Arcs
combined_path_lines = {} # type: Lines
diff --git a/test/lib/ansible_test/_internal/coverage/analyze/targets/expand.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py
index 388dd6cb..6ca6e6d3 100644
--- a/test/lib/ansible_test/_internal/coverage/analyze/targets/expand.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py
@@ -1,14 +1,20 @@
"""Expand target names in an aggregated coverage file."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+import typing as t
-from .... import types as t
-
-from ....io import (
+from .....io import (
SortedSetEncoder,
write_json_file,
)
+from .....executor import (
+ Delegate,
+)
+
+from .....provisioning import (
+ prepare_profiles,
+)
+
from . import (
CoverageAnalyzeTargetsConfig,
expand_indexes,
@@ -20,7 +26,7 @@ from . import (
class CoverageAnalyzeTargetsExpandConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets expand` command."""
def __init__(self, args): # type: (t.Any) -> None
- super(CoverageAnalyzeTargetsExpandConfig, self).__init__(args)
+ super().__init__(args)
self.input_file = args.input_file # type: str
self.output_file = args.output_file # type: str
@@ -28,6 +34,11 @@ class CoverageAnalyzeTargetsExpandConfig(CoverageAnalyzeTargetsConfig):
def command_coverage_analyze_targets_expand(args): # type: (CoverageAnalyzeTargetsExpandConfig) -> None
"""Expand target names in an aggregated coverage file."""
+ host_state = prepare_profiles(args) # coverage analyze targets expand
+
+ if args.delegate:
+ raise Delegate(host_state=host_state)
+
covered_targets, covered_path_arcs, covered_path_lines = read_report(args.input_file)
report = dict(
diff --git a/test/lib/ansible_test/_internal/coverage/analyze/targets/filter.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py
index e90fb227..e5d2f500 100644
--- a/test/lib/ansible_test/_internal/coverage/analyze/targets/filter.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py
@@ -1,10 +1,16 @@
"""Filter an aggregated coverage file, keeping only the specified targets."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
+import typing as t
-from .... import types as t
+from .....executor import (
+ Delegate,
+)
+
+from .....provisioning import (
+ prepare_profiles,
+)
from . import (
CoverageAnalyzeTargetsConfig,
@@ -25,7 +31,7 @@ if t.TYPE_CHECKING:
class CoverageAnalyzeTargetsFilterConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets filter` command."""
def __init__(self, args): # type: (t.Any) -> None
- super(CoverageAnalyzeTargetsFilterConfig, self).__init__(args)
+ super().__init__(args)
self.input_file = args.input_file # type: str
self.output_file = args.output_file # type: str
@@ -37,6 +43,11 @@ class CoverageAnalyzeTargetsFilterConfig(CoverageAnalyzeTargetsConfig):
def command_coverage_analyze_targets_filter(args): # type: (CoverageAnalyzeTargetsFilterConfig) -> None
"""Filter target names in an aggregated coverage file."""
+ host_state = prepare_profiles(args) # coverage analyze targets filter
+
+ if args.delegate:
+ raise Delegate(host_state=host_state)
+
covered_targets, covered_path_arcs, covered_path_lines = read_report(args.input_file)
filtered_path_arcs = expand_indexes(covered_path_arcs, covered_targets, lambda v: v)
@@ -49,6 +60,7 @@ def command_coverage_analyze_targets_filter(args): # type: (CoverageAnalyzeTarg
exclude_path = re.compile(args.exclude_path) if args.exclude_path else None
def path_filter_func(path):
+ """Return True if the given path should be included, otherwise return False."""
if include_path and not re.search(include_path, path):
return False
@@ -58,6 +70,7 @@ def command_coverage_analyze_targets_filter(args): # type: (CoverageAnalyzeTarg
return True
def target_filter_func(targets):
+ """Filter the given targets and return the result based on the defined includes and excludes."""
if include_targets:
targets &= include_targets
diff --git a/test/lib/ansible_test/_internal/coverage/analyze/targets/generate.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py
index a14b6f55..3f9bca74 100644
--- a/test/lib/ansible_test/_internal/coverage/analyze/targets/generate.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py
@@ -1,23 +1,30 @@
"""Analyze code coverage data to determine which integration test targets provide coverage for each arc or line."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
+import typing as t
-from .... import types as t
-
-from ....encoding import (
+from .....encoding import (
to_text,
)
-from ....data import (
+from .....data import (
data_context,
)
-from ....util_common import (
+from .....util_common import (
ResultType,
)
+from .....executor import (
+ Delegate,
+)
+
+from .....provisioning import (
+ prepare_profiles,
+ HostState,
+)
+
from ... import (
enumerate_powershell_lines,
enumerate_python_arcs,
@@ -47,7 +54,7 @@ if t.TYPE_CHECKING:
class CoverageAnalyzeTargetsGenerateConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets generate` command."""
def __init__(self, args): # type: (t.Any) -> None
- super(CoverageAnalyzeTargetsGenerateConfig, self).__init__(args)
+ super().__init__(args)
self.input_dir = args.input_dir or ResultType.COVERAGE.path # type: str
self.output_file = args.output_file # type: str
@@ -55,9 +62,14 @@ class CoverageAnalyzeTargetsGenerateConfig(CoverageAnalyzeTargetsConfig):
def command_coverage_analyze_targets_generate(args): # type: (CoverageAnalyzeTargetsGenerateConfig) -> None
"""Analyze code coverage data to determine which integration test targets provide coverage for each arc or line."""
+ host_state = prepare_profiles(args) # coverage analyze targets generate
+
+ if args.delegate:
+ raise Delegate(host_state)
+
root = data_context().content.root
target_indexes = {}
- arcs = dict((os.path.relpath(path, root), data) for path, data in analyze_python_coverage(args, args.input_dir, target_indexes).items())
+ arcs = dict((os.path.relpath(path, root), data) for path, data in analyze_python_coverage(args, host_state, args.input_dir, target_indexes).items())
lines = dict((os.path.relpath(path, root), data) for path, data in analyze_powershell_coverage(args, args.input_dir, target_indexes).items())
report = make_report(target_indexes, arcs, lines)
write_report(args, report, args.output_file)
@@ -65,6 +77,7 @@ def command_coverage_analyze_targets_generate(args): # type: (CoverageAnalyzeTa
def analyze_python_coverage(
args, # type: CoverageAnalyzeTargetsGenerateConfig
+ host_state, # type: HostState
path, # type: str
target_indexes, # type: TargetIndexes
): # type: (...) -> Arcs
@@ -73,7 +86,7 @@ def analyze_python_coverage(
collection_search_re, collection_sub_re = get_collection_path_regexes()
modules = get_python_modules()
python_files = get_python_coverage_files(path)
- coverage = initialize_coverage(args)
+ coverage = initialize_coverage(args, host_state)
for python_file in python_files:
if not is_integration_coverage_file(python_file):
diff --git a/test/lib/ansible_test/_internal/coverage/analyze/targets/missing.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py
index 613a0ef2..9b6d696d 100644
--- a/test/lib/ansible_test/_internal/coverage/analyze/targets/missing.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py
@@ -1,15 +1,21 @@
"""Identify aggregated coverage in one file missing from another."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
+import typing as t
-from .... import types as t
-
-from ....encoding import (
+from .....encoding import (
to_bytes,
)
+from .....executor import (
+ Delegate,
+)
+
+from .....provisioning import (
+ prepare_profiles,
+)
+
from . import (
CoverageAnalyzeTargetsConfig,
get_target_index,
@@ -28,7 +34,7 @@ if t.TYPE_CHECKING:
class CoverageAnalyzeTargetsMissingConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets missing` command."""
def __init__(self, args): # type: (t.Any) -> None
- super(CoverageAnalyzeTargetsMissingConfig, self).__init__(args)
+ super().__init__(args)
self.from_file = args.from_file # type: str
self.to_file = args.to_file # type: str
@@ -40,6 +46,11 @@ class CoverageAnalyzeTargetsMissingConfig(CoverageAnalyzeTargetsConfig):
def command_coverage_analyze_targets_missing(args): # type: (CoverageAnalyzeTargetsMissingConfig) -> None
"""Identify aggregated coverage in one file missing from another."""
+ host_state = prepare_profiles(args) # coverage analyze targets missing
+
+ if args.delegate:
+ raise Delegate(host_state=host_state)
+
from_targets, from_path_arcs, from_path_lines = read_report(args.from_file)
to_targets, to_path_arcs, to_path_lines = read_report(args.to_file)
target_indexes = {}
diff --git a/test/lib/ansible_test/_internal/coverage/combine.py b/test/lib/ansible_test/_internal/commands/coverage/combine.py
index 7f726267..b240df46 100644
--- a/test/lib/ansible_test/_internal/coverage/combine.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/combine.py
@@ -1,32 +1,55 @@
"""Combine code coverage files."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
+import json
+import typing as t
-from ..target import (
+from ...target import (
walk_compile_targets,
walk_powershell_targets,
)
-from ..io import (
+from ...io import (
read_text_file,
)
-from ..util import (
+from ...util import (
+ ANSIBLE_TEST_TOOLS_ROOT,
display,
+ ApplicationError,
)
-from ..util_common import (
+from ...util_common import (
ResultType,
+ run_command,
write_json_file,
write_json_test_results,
)
+from ...executor import (
+ Delegate,
+)
+
+from ...data import (
+ data_context,
+)
+
+from ...host_configs import (
+ DockerConfig,
+ RemoteConfig,
+)
+
+from ...provisioning import (
+ HostState,
+ prepare_profiles,
+)
+
from . import (
enumerate_python_arcs,
enumerate_powershell_lines,
get_collection_path_regexes,
+ get_all_coverage_files,
get_python_coverage_files,
get_python_modules,
get_powershell_coverage_files,
@@ -38,12 +61,34 @@ from . import (
)
-def command_coverage_combine(args):
- """Patch paths in coverage files and merge into a single file.
- :type args: CoverageConfig
- :rtype: list[str]
- """
- paths = _command_coverage_combine_powershell(args) + _command_coverage_combine_python(args)
+def command_coverage_combine(args): # type: (CoverageCombineConfig) -> None
+ """Patch paths in coverage files and merge into a single file."""
+ host_state = prepare_profiles(args) # coverage combine
+ combine_coverage_files(args, host_state)
+
+
+def combine_coverage_files(args, host_state): # type: (CoverageCombineConfig, HostState) -> t.List[str]
+ """Combine coverage and return a list of the resulting files."""
+ if args.delegate:
+ if isinstance(args.controller, (DockerConfig, RemoteConfig)):
+ paths = get_all_coverage_files()
+ exported_paths = [path for path in paths if os.path.basename(path).split('=')[-1].split('.')[:2] == ['coverage', 'combined']]
+
+ if not exported_paths:
+ raise ExportedCoverageDataNotFound()
+
+ pairs = [(path, os.path.relpath(path, data_context().content.root)) for path in exported_paths]
+
+ def coverage_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
+ """Add the coverage files to the payload file list."""
+ display.info('Including %d exported coverage file(s) in payload.' % len(pairs), verbosity=1)
+ files.extend(pairs)
+
+ data_context().register_payload_callback(coverage_callback)
+
+ raise Delegate(host_state=host_state)
+
+ paths = _command_coverage_combine_powershell(args) + _command_coverage_combine_python(args, host_state)
for path in paths:
display.info('Generated combined output: %s' % path, verbosity=1)
@@ -51,12 +96,18 @@ def command_coverage_combine(args):
return paths
-def _command_coverage_combine_python(args):
- """
- :type args: CoverageConfig
- :rtype: list[str]
- """
- coverage = initialize_coverage(args)
+class ExportedCoverageDataNotFound(ApplicationError):
+ """Exception when no combined coverage data is present yet is required."""
+ def __init__(self):
+ super().__init__(
+ 'Coverage data must be exported before processing with the `--docker` or `--remote` option.\n'
+ 'Export coverage with `ansible-test coverage combine` using the `--export` option.\n'
+ 'The exported files must be in the directory: %s/' % ResultType.COVERAGE.relative_path)
+
+
+def _command_coverage_combine_python(args, host_state): # type: (CoverageCombineConfig, HostState) -> t.List[str]
+ """Combine Python coverage files and return a list of the output files."""
+ coverage = initialize_coverage(args, host_state)
modules = get_python_modules()
@@ -64,7 +115,7 @@ def _command_coverage_combine_python(args):
counter = 0
sources = _get_coverage_targets(args, walk_compile_targets)
- groups = _build_stub_groups(args, sources, lambda line_count: set())
+ groups = _build_stub_groups(args, sources, lambda s: dict((name, set()) for name in s))
collection_search_re, collection_sub_re = get_collection_path_regexes()
@@ -130,18 +181,17 @@ def _command_coverage_combine_python(args):
return sorted(output_files)
-def _command_coverage_combine_powershell(args):
- """
- :type args: CoverageConfig
- :rtype: list[str]
- """
+def _command_coverage_combine_powershell(args): # type: (CoverageCombineConfig) -> t.List[str]
+ """Combine PowerShell coverage files and return a list of the output files."""
coverage_files = get_powershell_coverage_files()
- def _default_stub_value(lines):
- val = {}
- for line in range(lines):
- val[line] = 0
- return val
+ def _default_stub_value(source_paths):
+ cmd = ['pwsh', os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'coverage_stub.ps1')]
+ cmd.extend(source_paths)
+
+ stubs = json.loads(run_command(args, cmd, capture=True, always=True)[0])
+
+ return dict((d['Path'], dict((line, 0) for line in d['Lines'])) for d in stubs)
counter = 0
sources = _get_coverage_targets(args, walk_powershell_targets)
@@ -209,12 +259,8 @@ def _command_coverage_combine_powershell(args):
return sorted(output_files)
-def _get_coverage_targets(args, walk_func):
- """
- :type args: CoverageConfig
- :type walk_func: Func
- :rtype: list[tuple[str, int]]
- """
+def _get_coverage_targets(args, walk_func): # type: (CoverageCombineConfig, t.Callable) -> t.List[t.Tuple[str, int]]
+ """Return a list of files to cover and the number of lines in each file, using the given function as the source of the files."""
sources = []
if args.all or args.stub:
@@ -234,9 +280,9 @@ def _get_coverage_targets(args, walk_func):
def _build_stub_groups(args, sources, default_stub_value):
"""
- :type args: CoverageConfig
+ :type args: CoverageCombineConfig
:type sources: List[tuple[str, int]]
- :type default_stub_value: Func[int]
+ :type default_stub_value: Func[List[str]]
:rtype: dict
"""
groups = {}
@@ -248,7 +294,7 @@ def _build_stub_groups(args, sources, default_stub_value):
stub_line_count = 0
for source, source_line_count in sources:
- stub_group.append((source, source_line_count))
+ stub_group.append(source)
stub_line_count += source_line_count
if stub_line_count > stub_line_limit:
@@ -260,18 +306,13 @@ def _build_stub_groups(args, sources, default_stub_value):
if not stub_group:
continue
- groups['=stub-%02d' % (stub_index + 1)] = dict((source, default_stub_value(line_count))
- for source, line_count in stub_group)
+ groups['=stub-%02d' % (stub_index + 1)] = default_stub_value(stub_group)
return groups
-def get_coverage_group(args, coverage_file):
- """
- :type args: CoverageConfig
- :type coverage_file: str
- :rtype: str
- """
+def get_coverage_group(args, coverage_file): # type: (CoverageCombineConfig, str) -> t.Optional[str]
+ """Return the name of the coverage group for the specified coverage file, or None if no group was found."""
parts = os.path.basename(coverage_file).split('=', 4)
# noinspection PyTypeChecker
@@ -301,3 +342,16 @@ def get_coverage_group(args, coverage_file):
group = group.lstrip('=')
return group
+
+
+class CoverageCombineConfig(CoverageConfig):
+ """Configuration for the coverage combine command."""
+ def __init__(self, args): # type: (t.Any) -> None
+ super().__init__(args)
+
+ self.group_by = frozenset(args.group_by) if args.group_by else frozenset() # type: t.FrozenSet[str]
+ self.all = args.all # type: bool
+ self.stub = args.stub # type: bool
+
+ # only available to coverage combine
+ self.export = args.export if 'export' in args else False # type: str
diff --git a/test/lib/ansible_test/_internal/commands/coverage/erase.py b/test/lib/ansible_test/_internal/commands/coverage/erase.py
new file mode 100644
index 00000000..9a459a38
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/coverage/erase.py
@@ -0,0 +1,43 @@
+"""Erase code coverage files."""
+from __future__ import annotations
+
+import os
+
+from ...util_common import (
+ ResultType,
+)
+
+from ...executor import (
+ Delegate,
+)
+
+from ...provisioning import (
+ prepare_profiles,
+)
+
+from . import (
+ CoverageConfig,
+)
+
+
+def command_coverage_erase(args): # type: (CoverageEraseConfig) -> None
+ """Erase code coverage data files collected during test runs."""
+ host_state = prepare_profiles(args) # coverage erase
+
+ if args.delegate:
+ raise Delegate(host_state=host_state)
+
+ coverage_dir = ResultType.COVERAGE.path
+
+ for name in os.listdir(coverage_dir):
+ if not name.startswith('coverage') and '=coverage.' not in name:
+ continue
+
+ path = os.path.join(coverage_dir, name)
+
+ if not args.explain:
+ os.remove(path)
+
+
+class CoverageEraseConfig(CoverageConfig):
+ """Configuration for the coverage erase command."""
diff --git a/test/lib/ansible_test/_internal/coverage/html.py b/test/lib/ansible_test/_internal/commands/coverage/html.py
index 63956a19..12caa179 100644
--- a/test/lib/ansible_test/_internal/coverage/html.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/html.py
@@ -1,36 +1,38 @@
"""Generate HTML code coverage reports."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-from ..io import (
+from ...io import (
make_dirs,
)
-from ..util import (
+from ...util import (
display,
)
-from ..util_common import (
+from ...util_common import (
ResultType,
)
+from ...provisioning import (
+ prepare_profiles,
+)
+
from .combine import (
- command_coverage_combine,
+ combine_coverage_files,
+ CoverageCombineConfig,
)
from . import (
run_coverage,
- CoverageConfig,
)
-def command_coverage_html(args):
- """
- :type args: CoverageConfig
- """
- output_files = command_coverage_combine(args)
+def command_coverage_html(args): # type: (CoverageHtmlConfig) -> None
+ """Generate an HTML coverage report."""
+ host_state = prepare_profiles(args) # coverage html
+ output_files = combine_coverage_files(args, host_state)
for output_file in output_files:
if output_file.endswith('-powershell'):
@@ -40,6 +42,10 @@ def command_coverage_html(args):
dir_name = os.path.join(ResultType.REPORTS.path, os.path.basename(output_file))
make_dirs(dir_name)
- run_coverage(args, output_file, 'html', ['-i', '-d', dir_name])
+ run_coverage(args, host_state, output_file, 'html', ['-i', '-d', dir_name])
display.info('HTML report generated: file:///%s' % os.path.join(dir_name, 'index.html'))
+
+
+class CoverageHtmlConfig(CoverageCombineConfig):
+ """Configuration for the coverage html command."""
diff --git a/test/lib/ansible_test/_internal/coverage/report.py b/test/lib/ansible_test/_internal/commands/coverage/report.py
index 24efa637..2d53362e 100644
--- a/test/lib/ansible_test/_internal/coverage/report.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/report.py
@@ -1,36 +1,39 @@
"""Generate console code coverage reports."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
+import typing as t
-from ..io import (
+from ...io import (
read_json_file,
)
-from ..util import (
+from ...util import (
display,
)
-from ..data import (
+from ...data import (
data_context,
)
+from ...provisioning import (
+ prepare_profiles,
+)
+
from .combine import (
- command_coverage_combine,
+ combine_coverage_files,
+ CoverageCombineConfig,
)
from . import (
run_coverage,
- CoverageConfig,
)
-def command_coverage_report(args):
- """
- :type args: CoverageReportConfig
- """
- output_files = command_coverage_combine(args)
+def command_coverage_report(args): # type: (CoverageReportConfig) -> None
+ """Generate a console coverage report."""
+ host_state = prepare_profiles(args) # coverage report
+ output_files = combine_coverage_files(args, host_state)
for output_file in output_files:
if args.group_by or args.stub:
@@ -50,15 +53,11 @@ def command_coverage_report(args):
if args.omit:
options.extend(['--omit', args.omit])
- run_coverage(args, output_file, 'report', options)
+ run_coverage(args, host_state, output_file, 'report', options)
-def _generate_powershell_output_report(args, coverage_file):
- """
- :type args: CoverageReportConfig
- :type coverage_file: str
- :rtype: str
- """
+def _generate_powershell_output_report(args, coverage_file): # type: (CoverageReportConfig, str) -> str
+ """Generate and return a PowerShell coverage report for the given coverage file."""
coverage_info = read_json_file(coverage_file)
root_path = data_context().content.root + '/'
@@ -143,13 +142,10 @@ def _generate_powershell_output_report(args, coverage_file):
return report
-class CoverageReportConfig(CoverageConfig):
+class CoverageReportConfig(CoverageCombineConfig):
"""Configuration for the coverage report command."""
- def __init__(self, args):
- """
- :type args: any
- """
- super(CoverageReportConfig, self).__init__(args)
+ def __init__(self, args): # type: (t.Any) -> None
+ super().__init__(args)
self.show_missing = args.show_missing # type: bool
self.include = args.include # type: str
diff --git a/test/lib/ansible_test/_internal/coverage/xml.py b/test/lib/ansible_test/_internal/commands/coverage/xml.py
index 94b5abc5..ed9603c2 100644
--- a/test/lib/ansible_test/_internal/coverage/xml.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/xml.py
@@ -1,9 +1,9 @@
"""Generate XML code coverage reports."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import time
+import typing as t
from xml.etree.ElementTree import (
Comment,
@@ -16,39 +16,42 @@ from xml.dom import (
minidom,
)
-from ..io import (
+from ...io import (
make_dirs,
read_json_file,
)
-from ..util_common import (
+from ...util_common import (
ResultType,
write_text_test_results,
)
-from ..env import (
+from ...util import (
get_ansible_version,
)
-from ..data import (
+from ...data import (
data_context,
)
+from ...provisioning import (
+ prepare_profiles,
+)
+
from .combine import (
- command_coverage_combine,
+ combine_coverage_files,
+ CoverageCombineConfig,
)
from . import (
run_coverage,
- CoverageConfig,
)
-def command_coverage_xml(args):
- """
- :type args: CoverageConfig
- """
- output_files = command_coverage_combine(args)
+def command_coverage_xml(args): # type: (CoverageXmlConfig) -> None
+ """Generate an XML coverage report."""
+ host_state = prepare_profiles(args) # coverage xml
+ output_files = combine_coverage_files(args, host_state)
for output_file in output_files:
xml_name = '%s.xml' % os.path.basename(output_file)
@@ -63,14 +66,11 @@ def command_coverage_xml(args):
else:
xml_path = os.path.join(ResultType.REPORTS.path, xml_name)
make_dirs(ResultType.REPORTS.path)
- run_coverage(args, output_file, 'xml', ['-i', '-o', xml_path])
+ run_coverage(args, host_state, output_file, 'xml', ['-i', '-o', xml_path])
-def _generate_powershell_xml(coverage_file):
- """
- :type coverage_file: str
- :rtype: Element
- """
+def _generate_powershell_xml(coverage_file): # type: (str) -> Element
+ """Generate a PowerShell coverage report XML element from the specified coverage file and return it."""
coverage_info = read_json_file(coverage_file)
content_root = data_context().content.root
@@ -131,13 +131,8 @@ def _generate_powershell_xml(coverage_file):
return elem_coverage
-def _add_cobertura_package(packages, package_name, package_data):
- """
- :type packages: SubElement
- :type package_name: str
- :type package_data: Dict[str, Dict[str, int]]
- :rtype: Tuple[int, int]
- """
+def _add_cobertura_package(packages, package_name, package_data): # type: (SubElement, str, t.Dict[str, t.Dict[str, int]]) -> t.Tuple[int, int]
+ """Add a package element to the given packages element."""
elem_package = SubElement(packages, 'package')
elem_classes = SubElement(elem_package, 'classes')
@@ -189,3 +184,7 @@ def _add_cobertura_package(packages, package_name, package_data):
})
return total_lines_hit, total_line_count
+
+
+class CoverageXmlConfig(CoverageCombineConfig):
+ """Configuration for the coverage xml command."""
diff --git a/test/lib/ansible_test/_internal/env.py b/test/lib/ansible_test/_internal/commands/env/__init__.py
index 60c0245e..c625209c 100644
--- a/test/lib/ansible_test/_internal/env.py
+++ b/test/lib/ansible_test/_internal/commands/env/__init__.py
@@ -1,73 +1,52 @@
"""Show information about the test environment."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import datetime
-import functools
import os
import platform
-import signal
import sys
-import time
+import typing as t
-from .config import (
+from ...config import (
CommonConfig,
- TestConfig,
)
-from .io import (
+from ...io import (
write_json_file,
- read_json_file,
)
-from .util import (
+from ...util import (
display,
- find_executable,
SubprocessError,
- ApplicationError,
get_ansible_version,
get_available_python_versions,
)
-from .util_common import (
+from ...util_common import (
data_context,
write_json_test_results,
ResultType,
)
-from .docker_util import (
+from ...docker_util import (
+ get_docker_command,
docker_info,
docker_version
)
-from .thread import (
- WrappedThread,
-)
-
-from .constants import (
+from ...constants import (
TIMEOUT_PATH,
)
-from .test import (
- TestTimeout,
-)
-
-from .executor import (
- SUPPORTED_PYTHON_VERSIONS,
-)
-
-from .ci import (
+from ...ci import (
get_ci_provider,
)
class EnvConfig(CommonConfig):
- """Configuration for the tools command."""
- def __init__(self, args):
- """
- :type args: any
- """
- super(EnvConfig, self).__init__(args, 'env')
+ """Configuration for the `env` command."""
+ def __init__(self, args): # type: (t.Any) -> None
+ super().__init__(args, 'env')
self.show = args.show
self.dump = args.dump
@@ -79,19 +58,15 @@ class EnvConfig(CommonConfig):
self.show = True
-def command_env(args):
- """
- :type args: EnvConfig
- """
+def command_env(args): # type: (EnvConfig) -> None
+ """Entry point for the `env` command."""
show_dump_env(args)
list_files_env(args)
set_timeout(args)
-def show_dump_env(args):
- """
- :type args: EnvConfig
- """
+def show_dump_env(args): # type: (EnvConfig) -> None
+ """Show information about the current environment and/or write the information to disk."""
if not args.show and not args.dump:
return
@@ -115,7 +90,7 @@ def show_dump_env(args):
executable=sys.executable,
version=platform.python_version(),
),
- interpreters=get_available_python_versions(SUPPORTED_PYTHON_VERSIONS),
+ interpreters=get_available_python_versions(),
)
if args.show:
@@ -141,10 +116,8 @@ def list_files_env(args): # type: (EnvConfig) -> None
display.info(path)
-def set_timeout(args):
- """
- :type args: EnvConfig
- """
+def set_timeout(args): # type: (EnvConfig) -> None
+ """Set an execution timeout for subsequent ansible-test invocations."""
if args.timeout is None:
return
@@ -171,79 +144,8 @@ def set_timeout(args):
os.remove(TIMEOUT_PATH)
-def get_timeout():
- """
- :rtype: dict[str, any] | None
- """
- if not os.path.exists(TIMEOUT_PATH):
- return None
-
- data = read_json_file(TIMEOUT_PATH)
- data['deadline'] = datetime.datetime.strptime(data['deadline'], '%Y-%m-%dT%H:%M:%SZ')
-
- return data
-
-
-def configure_timeout(args):
- """
- :type args: CommonConfig
- """
- if isinstance(args, TestConfig):
- configure_test_timeout(args) # only tests are subject to the timeout
-
-
-def configure_test_timeout(args):
- """
- :type args: TestConfig
- """
- timeout = get_timeout()
-
- if not timeout:
- return
-
- timeout_start = datetime.datetime.utcnow()
- timeout_duration = timeout['duration']
- timeout_deadline = timeout['deadline']
- timeout_remaining = timeout_deadline - timeout_start
-
- test_timeout = TestTimeout(timeout_duration)
-
- if timeout_remaining <= datetime.timedelta():
- test_timeout.write(args)
-
- raise ApplicationError('The %d minute test timeout expired %s ago at %s.' % (
- timeout_duration, timeout_remaining * -1, timeout_deadline))
-
- display.info('The %d minute test timeout expires in %s at %s.' % (
- timeout_duration, timeout_remaining, timeout_deadline), verbosity=1)
-
- def timeout_handler(_dummy1, _dummy2):
- """Runs when SIGUSR1 is received."""
- test_timeout.write(args)
-
- raise ApplicationError('Tests aborted after exceeding the %d minute time limit.' % timeout_duration)
-
- def timeout_waiter(timeout_seconds):
- """
- :type timeout_seconds: int
- """
- time.sleep(timeout_seconds)
- os.kill(os.getpid(), signal.SIGUSR1)
-
- signal.signal(signal.SIGUSR1, timeout_handler)
-
- instance = WrappedThread(functools.partial(timeout_waiter, timeout_remaining.seconds))
- instance.daemon = True
- instance.start()
-
-
-def show_dict(data, verbose, root_verbosity=0, path=None):
- """
- :type data: dict[str, any]
- :type verbose: dict[str, int]
- :type root_verbosity: int
- :type path: list[str] | None
- """
+def show_dict(data, verbose, root_verbosity=0, path=None): # type: (t.Dict[str, t.Any], t.Dict[str, int], int, t.Optional[t.List[str]]) -> None
+ """Show a dict with varying levels of verbosity."""
path = path if path else []
for key, value in sorted(data.items()):
@@ -264,16 +166,17 @@ def show_dict(data, verbose, root_verbosity=0, path=None):
display.info(indent + '%s: %s' % (key, value), verbosity=verbosity)
-def get_docker_details(args):
- """
- :type args: CommonConfig
- :rtype: dict[str, any]
- """
- docker = find_executable('docker', required=False)
+def get_docker_details(args): # type: (EnvConfig) -> t.Dict[str, str]
+ """Return details about docker."""
+ docker = get_docker_command()
+
+ executable = None
info = None
version = None
if docker:
+ executable = docker.executable
+
try:
info = docker_info(args)
except SubprocessError as ex:
@@ -285,7 +188,7 @@ def get_docker_details(args):
display.warning('Failed to collect docker version:\n%s' % ex)
docker_details = dict(
- executable=docker,
+ executable=executable,
info=info,
version=version,
)
diff --git a/test/lib/ansible_test/_internal/commands/integration/__init__.py b/test/lib/ansible_test/_internal/commands/integration/__init__.py
new file mode 100644
index 00000000..09eb889c
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/integration/__init__.py
@@ -0,0 +1,950 @@
+"""Ansible integration test infrastructure."""
+from __future__ import annotations
+
+import contextlib
+import datetime
+import json
+import os
+import re
+import shutil
+import tempfile
+import time
+import typing as t
+
+from ...encoding import (
+ to_bytes,
+)
+
+from ...ansible_util import (
+ ansible_environment,
+)
+
+from ...executor import (
+ get_changes_filter,
+ AllTargetsSkipped,
+ Delegate,
+ ListTargets,
+)
+
+from ...python_requirements import (
+ install_requirements,
+)
+
+from ...ci import (
+ get_ci_provider,
+)
+
+from ...target import (
+ analyze_integration_target_dependencies,
+ walk_integration_targets,
+ IntegrationTarget,
+ walk_internal_targets,
+ TIntegrationTarget,
+ IntegrationTargetType,
+)
+
+from ...config import (
+ IntegrationConfig,
+ NetworkIntegrationConfig,
+ PosixIntegrationConfig,
+ WindowsIntegrationConfig,
+ TIntegrationConfig,
+)
+
+from ...io import (
+ make_dirs,
+ read_text_file,
+)
+
+from ...util import (
+ ApplicationError,
+ display,
+ SubprocessError,
+ remove_tree,
+)
+
+from ...util_common import (
+ named_temporary_file,
+ ResultType,
+ run_command,
+ write_json_test_results,
+ check_pyyaml,
+)
+
+from ...coverage_util import (
+ cover_python,
+)
+
+from ...cache import (
+ CommonCache,
+)
+
+from .cloud import (
+ CloudEnvironmentConfig,
+ cloud_filter,
+ cloud_init,
+ get_cloud_environment,
+ get_cloud_platforms,
+)
+
+from ...data import (
+ data_context,
+)
+
+from ...host_configs import (
+ OriginConfig,
+)
+
+from ...host_profiles import (
+ ControllerProfile,
+ HostProfile,
+ PosixProfile,
+ SshTargetHostProfile,
+)
+
+from ...provisioning import (
+ HostState,
+ prepare_profiles,
+)
+
+from ...pypi_proxy import (
+ configure_pypi_proxy,
+)
+
+from ...inventory import (
+ create_controller_inventory,
+ create_windows_inventory,
+ create_network_inventory,
+ create_posix_inventory,
+)
+
+from .filters import (
+ get_target_filter,
+)
+
+from .coverage import (
+ CoverageManager,
+)
+
+THostProfile = t.TypeVar('THostProfile', bound=HostProfile)
+
+
+def generate_dependency_map(integration_targets): # type: (t.List[IntegrationTarget]) -> t.Dict[str, t.Set[IntegrationTarget]]
+ """Analyze the given list of integration test targets and return a dictionary expressing target names and the targets on which they depend."""
+ targets_dict = dict((target.name, target) for target in integration_targets)
+ target_dependencies = analyze_integration_target_dependencies(integration_targets)
+ dependency_map = {}
+
+ invalid_targets = set()
+
+ for dependency, dependents in target_dependencies.items():
+ dependency_target = targets_dict.get(dependency)
+
+ if not dependency_target:
+ invalid_targets.add(dependency)
+ continue
+
+ for dependent in dependents:
+ if dependent not in dependency_map:
+ dependency_map[dependent] = set()
+
+ dependency_map[dependent].add(dependency_target)
+
+ if invalid_targets:
+ raise ApplicationError('Non-existent target dependencies: %s' % ', '.join(sorted(invalid_targets)))
+
+ return dependency_map
+
+
+def get_files_needed(target_dependencies): # type: (t.List[IntegrationTarget]) -> t.List[str]
+ """Return a list of files needed by the given list of target dependencies."""
+ files_needed = []
+
+ for target_dependency in target_dependencies:
+ files_needed += target_dependency.needs_file
+
+ files_needed = sorted(set(files_needed))
+
+ invalid_paths = [path for path in files_needed if not os.path.isfile(path)]
+
+ if invalid_paths:
+ raise ApplicationError('Invalid "needs/file/*" aliases:\n%s' % '\n'.join(invalid_paths))
+
+ return files_needed
+
+
+def check_inventory(args, inventory_path): # type: (IntegrationConfig, str) -> None
+ """Check the given inventory for issues."""
+ if not isinstance(args.controller, OriginConfig):
+ if os.path.exists(inventory_path):
+ inventory = read_text_file(inventory_path)
+
+ if 'ansible_ssh_private_key_file' in inventory:
+ display.warning('Use of "ansible_ssh_private_key_file" in inventory with the --docker or --remote option is unsupported and will likely fail.')
+
+
+def get_inventory_relative_path(args): # type: (IntegrationConfig) -> str
+ """Return the inventory path used for the given integration configuration relative to the content root."""
+ inventory_names = {
+ PosixIntegrationConfig: 'inventory',
+ WindowsIntegrationConfig: 'inventory.winrm',
+ NetworkIntegrationConfig: 'inventory.networking',
+ } # type: t.Dict[t.Type[IntegrationConfig], str]
+
+ return os.path.join(data_context().content.integration_path, inventory_names[type(args)])
+
+
+def delegate_inventory(args, inventory_path_src): # type: (IntegrationConfig, str) -> None
+ """Make the given inventory available during delegation."""
+ if isinstance(args, PosixIntegrationConfig):
+ return
+
+ def inventory_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
+ """
+ Add the inventory file to the payload file list.
+ This will preserve the file during delegation even if it is ignored or is outside the content and install roots.
+ """
+ inventory_path = get_inventory_relative_path(args)
+ inventory_tuple = inventory_path_src, inventory_path
+
+ if os.path.isfile(inventory_path_src) and inventory_tuple not in files:
+ originals = [item for item in files if item[1] == inventory_path]
+
+ if originals:
+ for original in originals:
+ files.remove(original)
+
+ display.warning('Overriding inventory file "%s" with "%s".' % (inventory_path, inventory_path_src))
+ else:
+ display.notice('Sourcing inventory file "%s" from "%s".' % (inventory_path, inventory_path_src))
+
+ files.append(inventory_tuple)
+
+ data_context().register_payload_callback(inventory_callback)
+
+
+@contextlib.contextmanager
+def integration_test_environment(
+ args, # type: IntegrationConfig
+ target, # type: IntegrationTarget
+ inventory_path_src, # type: str
+): # type: (...) -> t.ContextManager[IntegrationEnvironment]
+ """Context manager that prepares the integration test environment and cleans it up."""
+ ansible_config_src = args.get_ansible_config()
+ ansible_config_relative = os.path.join(data_context().content.integration_path, '%s.cfg' % args.command)
+
+ if args.no_temp_workdir or 'no/temp_workdir/' in target.aliases:
+ display.warning('Disabling the temp work dir is a temporary debugging feature that may be removed in the future without notice.')
+
+ integration_dir = os.path.join(data_context().content.root, data_context().content.integration_path)
+ targets_dir = os.path.join(data_context().content.root, data_context().content.integration_targets_path)
+ inventory_path = inventory_path_src
+ ansible_config = ansible_config_src
+ vars_file = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
+
+ yield IntegrationEnvironment(integration_dir, targets_dir, inventory_path, ansible_config, vars_file)
+ return
+
+ # When testing a collection, the temporary directory must reside within the collection.
+ # This is necessary to enable support for the default collection for non-collection content (playbooks and roles).
+ root_temp_dir = os.path.join(ResultType.TMP.path, 'integration')
+
+ prefix = '%s-' % target.name
+ suffix = u'-\u00c5\u00d1\u015a\u00cc\u03b2\u0141\u00c8'
+
+ if args.no_temp_unicode or 'no/temp_unicode/' in target.aliases:
+ display.warning('Disabling unicode in the temp work dir is a temporary debugging feature that may be removed in the future without notice.')
+ suffix = '-ansible'
+
+ if args.explain:
+ temp_dir = os.path.join(root_temp_dir, '%stemp%s' % (prefix, suffix))
+ else:
+ make_dirs(root_temp_dir)
+ temp_dir = tempfile.mkdtemp(prefix=prefix, suffix=suffix, dir=root_temp_dir)
+
+ try:
+ display.info('Preparing temporary directory: %s' % temp_dir, verbosity=2)
+
+ inventory_relative_path = get_inventory_relative_path(args)
+ inventory_path = os.path.join(temp_dir, inventory_relative_path)
+
+ cache = IntegrationCache(args)
+
+ target_dependencies = sorted([target] + list(cache.dependency_map.get(target.name, set())))
+
+ files_needed = get_files_needed(target_dependencies)
+
+ integration_dir = os.path.join(temp_dir, data_context().content.integration_path)
+ targets_dir = os.path.join(temp_dir, data_context().content.integration_targets_path)
+ ansible_config = os.path.join(temp_dir, ansible_config_relative)
+
+ vars_file_src = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
+ vars_file = os.path.join(temp_dir, data_context().content.integration_vars_path)
+
+ file_copies = [
+ (ansible_config_src, ansible_config),
+ (inventory_path_src, inventory_path),
+ ]
+
+ if os.path.exists(vars_file_src):
+ file_copies.append((vars_file_src, vars_file))
+
+ file_copies += [(path, os.path.join(temp_dir, path)) for path in files_needed]
+
+ integration_targets_relative_path = data_context().content.integration_targets_path
+
+ directory_copies = [
+ (
+ os.path.join(integration_targets_relative_path, target.relative_path),
+ os.path.join(temp_dir, integration_targets_relative_path, target.relative_path)
+ )
+ for target in target_dependencies
+ ]
+
+ directory_copies = sorted(set(directory_copies))
+ file_copies = sorted(set(file_copies))
+
+ if not args.explain:
+ make_dirs(integration_dir)
+
+ for dir_src, dir_dst in directory_copies:
+ display.info('Copying %s/ to %s/' % (dir_src, dir_dst), verbosity=2)
+
+ if not args.explain:
+ shutil.copytree(to_bytes(dir_src), to_bytes(dir_dst), symlinks=True)
+
+ for file_src, file_dst in file_copies:
+ display.info('Copying %s to %s' % (file_src, file_dst), verbosity=2)
+
+ if not args.explain:
+ make_dirs(os.path.dirname(file_dst))
+ shutil.copy2(file_src, file_dst)
+
+ yield IntegrationEnvironment(integration_dir, targets_dir, inventory_path, ansible_config, vars_file)
+ finally:
+ if not args.explain:
+ remove_tree(temp_dir)
+
+
+@contextlib.contextmanager
+def integration_test_config_file(
+ args, # type: IntegrationConfig
+ env_config, # type: CloudEnvironmentConfig
+ integration_dir, # type: str
+): # type: (...) -> t.ContextManager[t.Optional[str]]
+ """Context manager that provides a config file for integration tests, if needed."""
+ if not env_config:
+ yield None
+ return
+
+ config_vars = (env_config.ansible_vars or {}).copy()
+
+ config_vars.update(dict(
+ ansible_test=dict(
+ environment=env_config.env_vars,
+ module_defaults=env_config.module_defaults,
+ )
+ ))
+
+ config_file = json.dumps(config_vars, indent=4, sort_keys=True)
+
+ with named_temporary_file(args, 'config-file-', '.json', integration_dir, config_file) as path:
+ filename = os.path.relpath(path, integration_dir)
+
+ display.info('>>> Config File: %s\n%s' % (filename, config_file), verbosity=3)
+
+ yield path
+
+
+def create_inventory(
+ args, # type: IntegrationConfig
+ host_state, # type: HostState
+ inventory_path, # type: str
+ target, # type: IntegrationTarget
+): # type: (...) -> None
+ """Create inventory."""
+ if isinstance(args, PosixIntegrationConfig):
+ if target.target_type == IntegrationTargetType.CONTROLLER:
+ display.info('Configuring controller inventory.', verbosity=1)
+ create_controller_inventory(args, inventory_path, host_state.controller_profile)
+ elif target.target_type == IntegrationTargetType.TARGET:
+ display.info('Configuring target inventory.', verbosity=1)
+ create_posix_inventory(args, inventory_path, host_state.target_profiles, 'needs/ssh/' in target.aliases)
+ else:
+ raise Exception(f'Unhandled test type for target "{target.name}": {target.target_type.name.lower()}')
+ elif isinstance(args, WindowsIntegrationConfig):
+ display.info('Configuring target inventory.', verbosity=1)
+ target_profiles = filter_profiles_for_target(args, host_state.target_profiles, target)
+ create_windows_inventory(args, inventory_path, target_profiles)
+ elif isinstance(args, NetworkIntegrationConfig):
+ display.info('Configuring target inventory.', verbosity=1)
+ target_profiles = filter_profiles_for_target(args, host_state.target_profiles, target)
+ create_network_inventory(args, inventory_path, target_profiles)
+
+
+def command_integration_filtered(
+ args, # type: IntegrationConfig
+ host_state, # type: HostState
+ targets, # type: t.Tuple[IntegrationTarget]
+ all_targets, # type: t.Tuple[IntegrationTarget]
+ inventory_path, # type: str
+ pre_target=None, # type: t.Optional[t.Callable[[IntegrationTarget], None]]
+ post_target=None, # type: t.Optional[t.Callable[[IntegrationTarget], None]]
+):
+ """Run integration tests for the specified targets."""
+ found = False
+ passed = []
+ failed = []
+
+ targets_iter = iter(targets)
+ all_targets_dict = dict((target.name, target) for target in all_targets)
+
+ setup_errors = []
+ setup_targets_executed = set()
+
+ for target in all_targets:
+ for setup_target in target.setup_once + target.setup_always:
+ if setup_target not in all_targets_dict:
+ setup_errors.append('Target "%s" contains invalid setup target: %s' % (target.name, setup_target))
+
+ if setup_errors:
+ raise ApplicationError('Found %d invalid setup aliases:\n%s' % (len(setup_errors), '\n'.join(setup_errors)))
+
+ check_pyyaml(host_state.controller_profile.python)
+
+ test_dir = os.path.join(ResultType.TMP.path, 'output_dir')
+
+ if not args.explain and any('needs/ssh/' in target.aliases for target in targets):
+ max_tries = 20
+ display.info('SSH connection to controller required by tests. Checking the connection.')
+ for i in range(1, max_tries + 1):
+ try:
+ run_command(args, ['ssh', '-o', 'BatchMode=yes', 'localhost', 'id'], capture=True)
+ display.info('SSH service responded.')
+ break
+ except SubprocessError:
+ if i == max_tries:
+ raise
+ seconds = 3
+ display.warning('SSH service not responding. Waiting %d second(s) before checking again.' % seconds)
+ time.sleep(seconds)
+
+ start_at_task = args.start_at_task
+
+ results = {}
+
+ target_profile = host_state.target_profiles[0]
+
+ if isinstance(target_profile, PosixProfile):
+ target_python = target_profile.python
+
+ if isinstance(target_profile, ControllerProfile):
+ if host_state.controller_profile.python.path != target_profile.python.path:
+ install_requirements(args, target_python, command=True) # integration
+ elif isinstance(target_profile, SshTargetHostProfile):
+ install_requirements(args, target_python, command=True, connection=target_profile.get_controller_target_connections()[0]) # integration
+
+ coverage_manager = CoverageManager(args, host_state, inventory_path)
+ coverage_manager.setup()
+
+ try:
+ for target in targets_iter:
+ if args.start_at and not found:
+ found = target.name == args.start_at
+
+ if not found:
+ continue
+
+ create_inventory(args, host_state, inventory_path, target)
+
+ tries = 2 if args.retry_on_error else 1
+ verbosity = args.verbosity
+
+ cloud_environment = get_cloud_environment(args, target)
+
+ try:
+ while tries:
+ tries -= 1
+
+ try:
+ if cloud_environment:
+ cloud_environment.setup_once()
+
+ run_setup_targets(args, host_state, test_dir, target.setup_once, all_targets_dict, setup_targets_executed, inventory_path,
+ coverage_manager, False)
+
+ start_time = time.time()
+
+ if pre_target:
+ pre_target(target)
+
+ run_setup_targets(args, host_state, test_dir, target.setup_always, all_targets_dict, setup_targets_executed, inventory_path,
+ coverage_manager, True)
+
+ if not args.explain:
+ # create a fresh test directory for each test target
+ remove_tree(test_dir)
+ make_dirs(test_dir)
+
+ try:
+ if target.script_path:
+ command_integration_script(args, host_state, target, test_dir, inventory_path, coverage_manager)
+ else:
+ command_integration_role(args, host_state, target, start_at_task, test_dir, inventory_path, coverage_manager)
+ start_at_task = None
+ finally:
+ if post_target:
+ post_target(target)
+
+ end_time = time.time()
+
+ results[target.name] = dict(
+ name=target.name,
+ type=target.type,
+ aliases=target.aliases,
+ modules=target.modules,
+ run_time_seconds=int(end_time - start_time),
+ setup_once=target.setup_once,
+ setup_always=target.setup_always,
+ )
+
+ break
+ except SubprocessError:
+ if cloud_environment:
+ cloud_environment.on_failure(target, tries)
+
+ if not tries:
+ raise
+
+ display.warning('Retrying test target "%s" with maximum verbosity.' % target.name)
+ display.verbosity = args.verbosity = 6
+
+ passed.append(target)
+ except Exception as ex:
+ failed.append(target)
+
+ if args.continue_on_error:
+ display.error(ex)
+ continue
+
+ display.notice('To resume at this test target, use the option: --start-at %s' % target.name)
+
+ next_target = next(targets_iter, None)
+
+ if next_target:
+ display.notice('To resume after this test target, use the option: --start-at %s' % next_target.name)
+
+ raise
+ finally:
+ display.verbosity = args.verbosity = verbosity
+
+ finally:
+ if not args.explain:
+ coverage_manager.teardown()
+
+ result_name = '%s-%s.json' % (
+ args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0))))
+
+ data = dict(
+ targets=results,
+ )
+
+ write_json_test_results(ResultType.DATA, result_name, data)
+
+ if failed:
+ raise ApplicationError('The %d integration test(s) listed below (out of %d) failed. See error output above for details:\n%s' % (
+ len(failed), len(passed) + len(failed), '\n'.join(target.name for target in failed)))
+
+
+def command_integration_script(
+ args, # type: IntegrationConfig
+ host_state, # type: HostState
+ target, # type: IntegrationTarget
+ test_dir, # type: str
+ inventory_path, # type: str
+ coverage_manager, # type: CoverageManager
+):
+ """Run an integration test script."""
+ display.info('Running %s integration test script' % target.name)
+
+ env_config = None
+
+ if isinstance(args, PosixIntegrationConfig):
+ cloud_environment = get_cloud_environment(args, target)
+
+ if cloud_environment:
+ env_config = cloud_environment.get_environment_config()
+
+ if env_config:
+ display.info('>>> Environment Config\n%s' % json.dumps(dict(
+ env_vars=env_config.env_vars,
+ ansible_vars=env_config.ansible_vars,
+ callback_plugins=env_config.callback_plugins,
+ module_defaults=env_config.module_defaults,
+ ), indent=4, sort_keys=True), verbosity=3)
+
+ with integration_test_environment(args, target, inventory_path) as test_env:
+ cmd = ['./%s' % os.path.basename(target.script_path)]
+
+ if args.verbosity:
+ cmd.append('-' + ('v' * args.verbosity))
+
+ env = integration_environment(args, target, test_dir, test_env.inventory_path, test_env.ansible_config, env_config)
+ cwd = os.path.join(test_env.targets_dir, target.relative_path)
+
+ env.update(dict(
+ # support use of adhoc ansible commands in collections without specifying the fully qualified collection name
+ ANSIBLE_PLAYBOOK_DIR=cwd,
+ ))
+
+ if env_config and env_config.env_vars:
+ env.update(env_config.env_vars)
+
+ with integration_test_config_file(args, env_config, test_env.integration_dir) as config_path:
+ if config_path:
+ cmd += ['-e', '@%s' % config_path]
+
+ env.update(coverage_manager.get_environment(target.name, target.aliases))
+ cover_python(args, host_state.controller_profile.python, cmd, target.name, env, cwd=cwd)
+
+
+def command_integration_role(
+ args, # type: IntegrationConfig
+ host_state, # type: HostState
+ target, # type: IntegrationTarget
+ start_at_task, # type: t.Optional[str]
+ test_dir, # type: str
+ inventory_path, # type: str
+ coverage_manager, # type: CoverageManager
+):
+ """Run an integration test role."""
+ display.info('Running %s integration test role' % target.name)
+
+ env_config = None
+
+ vars_files = []
+ variables = dict(
+ output_dir=test_dir,
+ )
+
+ if isinstance(args, WindowsIntegrationConfig):
+ hosts = 'windows'
+ gather_facts = False
+ variables.update(dict(
+ win_output_dir=r'C:\ansible_testing',
+ ))
+ elif isinstance(args, NetworkIntegrationConfig):
+ hosts = target.network_platform
+ gather_facts = False
+ else:
+ hosts = 'testhost'
+ gather_facts = True
+
+ if 'gather_facts/yes/' in target.aliases:
+ gather_facts = True
+ elif 'gather_facts/no/' in target.aliases:
+ gather_facts = False
+
+ if not isinstance(args, NetworkIntegrationConfig):
+ cloud_environment = get_cloud_environment(args, target)
+
+ if cloud_environment:
+ env_config = cloud_environment.get_environment_config()
+
+ if env_config:
+ display.info('>>> Environment Config\n%s' % json.dumps(dict(
+ env_vars=env_config.env_vars,
+ ansible_vars=env_config.ansible_vars,
+ callback_plugins=env_config.callback_plugins,
+ module_defaults=env_config.module_defaults,
+ ), indent=4, sort_keys=True), verbosity=3)
+
+ with integration_test_environment(args, target, inventory_path) as test_env:
+ if os.path.exists(test_env.vars_file):
+ vars_files.append(os.path.relpath(test_env.vars_file, test_env.integration_dir))
+
+ play = dict(
+ hosts=hosts,
+ gather_facts=gather_facts,
+ vars_files=vars_files,
+ vars=variables,
+ roles=[
+ target.name,
+ ],
+ )
+
+ if env_config:
+ if env_config.ansible_vars:
+ variables.update(env_config.ansible_vars)
+
+ play.update(dict(
+ environment=env_config.env_vars,
+ module_defaults=env_config.module_defaults,
+ ))
+
+ playbook = json.dumps([play], indent=4, sort_keys=True)
+
+ with named_temporary_file(args=args, directory=test_env.integration_dir, prefix='%s-' % target.name, suffix='.yml', content=playbook) as playbook_path:
+ filename = os.path.basename(playbook_path)
+
+ display.info('>>> Playbook: %s\n%s' % (filename, playbook.strip()), verbosity=3)
+
+ cmd = ['ansible-playbook', filename, '-i', os.path.relpath(test_env.inventory_path, test_env.integration_dir)]
+
+ if start_at_task:
+ cmd += ['--start-at-task', start_at_task]
+
+ if args.tags:
+ cmd += ['--tags', args.tags]
+
+ if args.skip_tags:
+ cmd += ['--skip-tags', args.skip_tags]
+
+ if args.diff:
+ cmd += ['--diff']
+
+ if isinstance(args, NetworkIntegrationConfig):
+ if args.testcase:
+ cmd += ['-e', 'testcase=%s' % args.testcase]
+
+ if args.verbosity:
+ cmd.append('-' + ('v' * args.verbosity))
+
+ env = integration_environment(args, target, test_dir, test_env.inventory_path, test_env.ansible_config, env_config)
+ cwd = test_env.integration_dir
+
+ env.update(dict(
+ # support use of adhoc ansible commands in collections without specifying the fully qualified collection name
+ ANSIBLE_PLAYBOOK_DIR=cwd,
+ ))
+
+ if env_config and env_config.env_vars:
+ env.update(env_config.env_vars)
+
+ env['ANSIBLE_ROLES_PATH'] = test_env.targets_dir
+
+ env.update(coverage_manager.get_environment(target.name, target.aliases))
+ cover_python(args, host_state.controller_profile.python, cmd, target.name, env, cwd=cwd)
+
+
+def run_setup_targets(
+ args, # type: IntegrationConfig
+ host_state, # type: HostState
+ test_dir, # type: str
+ target_names, # type: t.List[str]
+ targets_dict, # type: t.Dict[str, IntegrationTarget]
+ targets_executed, # type: t.Set[str]
+ inventory_path, # type: str
+ coverage_manager, # type: CoverageManager
+ always, # type: bool
+):
+ """Run setup targets."""
+ for target_name in target_names:
+ if not always and target_name in targets_executed:
+ continue
+
+ target = targets_dict[target_name]
+
+ if not args.explain:
+ # create a fresh test directory for each test target
+ remove_tree(test_dir)
+ make_dirs(test_dir)
+
+ if target.script_path:
+ command_integration_script(args, host_state, target, test_dir, inventory_path, coverage_manager)
+ else:
+ command_integration_role(args, host_state, target, None, test_dir, inventory_path, coverage_manager)
+
+ targets_executed.add(target_name)
+
+
+def integration_environment(
+ args, # type: IntegrationConfig
+ target, # type: IntegrationTarget
+ test_dir, # type: str
+ inventory_path, # type: str
+ ansible_config, # type: t.Optional[str]
+ env_config, # type: t.Optional[CloudEnvironmentConfig]
+): # type: (...) -> t.Dict[str, str]
+ """Return a dictionary of environment variables to use when running the given integration test target."""
+ env = ansible_environment(args, ansible_config=ansible_config)
+
+ callback_plugins = ['junit'] + (env_config.callback_plugins or [] if env_config else [])
+
+ integration = dict(
+ JUNIT_OUTPUT_DIR=ResultType.JUNIT.path,
+ ANSIBLE_CALLBACKS_ENABLED=','.join(sorted(set(callback_plugins))),
+ ANSIBLE_TEST_CI=args.metadata.ci_provider or get_ci_provider().code,
+ ANSIBLE_TEST_COVERAGE='check' if args.coverage_check else ('yes' if args.coverage else ''),
+ OUTPUT_DIR=test_dir,
+ INVENTORY_PATH=os.path.abspath(inventory_path),
+ )
+
+ if args.debug_strategy:
+ env.update(dict(ANSIBLE_STRATEGY='debug'))
+
+ if 'non_local/' in target.aliases:
+ if args.coverage:
+ display.warning('Skipping coverage reporting on Ansible modules for non-local test: %s' % target.name)
+
+ env.update(dict(ANSIBLE_TEST_REMOTE_INTERPRETER=''))
+
+ env.update(integration)
+
+ return env
+
+
+class IntegrationEnvironment:
+ """Details about the integration environment."""
+ def __init__(self, integration_dir, targets_dir, inventory_path, ansible_config, vars_file):
+ self.integration_dir = integration_dir
+ self.targets_dir = targets_dir
+ self.inventory_path = inventory_path
+ self.ansible_config = ansible_config
+ self.vars_file = vars_file
+
+
+class IntegrationCache(CommonCache):
+ """Integration cache."""
+ @property
+ def integration_targets(self):
+ """
+ :rtype: list[IntegrationTarget]
+ """
+ return self.get('integration_targets', lambda: list(walk_integration_targets()))
+
+ @property
+ def dependency_map(self):
+ """
+ :rtype: dict[str, set[IntegrationTarget]]
+ """
+ return self.get('dependency_map', lambda: generate_dependency_map(self.integration_targets))
+
+
+def filter_profiles_for_target(args, profiles, target): # type: (IntegrationConfig, t.List[THostProfile], IntegrationTarget) -> t.List[THostProfile]
+ """Return a list of profiles after applying target filters."""
+ if target.target_type == IntegrationTargetType.CONTROLLER:
+ profile_filter = get_target_filter(args, [args.controller], True)
+ elif target.target_type == IntegrationTargetType.TARGET:
+ profile_filter = get_target_filter(args, args.targets, False)
+ else:
+ raise Exception(f'Unhandled test type for target "{target.name}": {target.target_type.name.lower()}')
+
+ profiles = profile_filter.filter_profiles(profiles, target)
+
+ return profiles
+
+
+def get_integration_filter(args, targets): # type: (IntegrationConfig, t.List[IntegrationTarget]) -> t.Set[str]
+ """Return a list of test targets to skip based on the host(s) that will be used to run the specified test targets."""
+ invalid_targets = sorted(target.name for target in targets if target.target_type not in (IntegrationTargetType.CONTROLLER, IntegrationTargetType.TARGET))
+
+ if invalid_targets and not args.list_targets:
+ message = f'''Unable to determine context for the following test targets: {", ".join(invalid_targets)}
+
+Make sure the test targets are correctly named:
+
+ - Modules - The target name should match the module name.
+ - Plugins - The target name should be "{{plugin_type}}_{{plugin_name}}".
+
+If necessary, context can be controlled by adding entries to the "aliases" file for a test target:
+
+ - Add the name(s) of modules which are tested.
+ - Add "context/target" for module and module_utils tests (these will run on the target host).
+ - Add "context/controller" for other test types (these will run on the controller).'''
+
+ raise ApplicationError(message)
+
+ invalid_targets = sorted(target.name for target in targets if target.actual_type not in (IntegrationTargetType.CONTROLLER, IntegrationTargetType.TARGET))
+
+ if invalid_targets:
+ if data_context().content.is_ansible:
+ display.warning(f'Unable to determine context for the following test targets: {", ".join(invalid_targets)}')
+ else:
+ display.warning(f'Unable to determine context for the following test targets, they will be run on the target host: {", ".join(invalid_targets)}')
+
+ exclude = set() # type: t.Set[str]
+
+ controller_targets = [target for target in targets if target.target_type == IntegrationTargetType.CONTROLLER]
+ target_targets = [target for target in targets if target.target_type == IntegrationTargetType.TARGET]
+
+ controller_filter = get_target_filter(args, [args.controller], True)
+ target_filter = get_target_filter(args, args.targets, False)
+
+ controller_filter.filter_targets(controller_targets, exclude)
+ target_filter.filter_targets(target_targets, exclude)
+
+ return exclude
+
+
+def command_integration_filter(args, # type: TIntegrationConfig
+ targets, # type: t.Iterable[TIntegrationTarget]
+ ): # type: (...) -> t.Tuple[HostState, t.Tuple[TIntegrationTarget, ...]]
+ """Filter the given integration test targets."""
+ targets = tuple(target for target in targets if 'hidden/' not in target.aliases)
+ changes = get_changes_filter(args)
+
+ # special behavior when the --changed-all-target target is selected based on changes
+ if args.changed_all_target in changes:
+ # act as though the --changed-all-target target was in the include list
+ if args.changed_all_mode == 'include' and args.changed_all_target not in args.include:
+ args.include.append(args.changed_all_target)
+ args.delegate_args += ['--include', args.changed_all_target]
+ # act as though the --changed-all-target target was in the exclude list
+ elif args.changed_all_mode == 'exclude' and args.changed_all_target not in args.exclude:
+ args.exclude.append(args.changed_all_target)
+
+ require = args.require + changes
+ exclude = args.exclude
+
+ internal_targets = walk_internal_targets(targets, args.include, exclude, require)
+ environment_exclude = get_integration_filter(args, list(internal_targets))
+
+ environment_exclude |= set(cloud_filter(args, internal_targets))
+
+ if environment_exclude:
+ exclude = sorted(set(exclude) | environment_exclude)
+ internal_targets = walk_internal_targets(targets, args.include, exclude, require)
+
+ if not internal_targets:
+ raise AllTargetsSkipped()
+
+ if args.start_at and not any(target.name == args.start_at for target in internal_targets):
+ raise ApplicationError('Start at target matches nothing: %s' % args.start_at)
+
+ cloud_init(args, internal_targets)
+
+ vars_file_src = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
+
+ if os.path.exists(vars_file_src):
+ def integration_config_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
+ """
+ Add the integration config vars file to the payload file list.
+ This will preserve the file during delegation even if the file is ignored by source control.
+ """
+ files.append((vars_file_src, data_context().content.integration_vars_path))
+
+ data_context().register_payload_callback(integration_config_callback)
+
+ if args.list_targets:
+ raise ListTargets([target.name for target in internal_targets])
+
+ # requirements are installed using a callback since the windows-integration and network-integration host status checks depend on them
+ host_state = prepare_profiles(args, targets_use_pypi=True, requirements=requirements) # integration, windows-integration, network-integration
+
+ if args.delegate:
+ raise Delegate(host_state=host_state, require=require, exclude=exclude)
+
+ return host_state, internal_targets
+
+
+def requirements(args, host_state): # type: (IntegrationConfig, HostState) -> None
+ """Install requirements."""
+ target_profile = host_state.target_profiles[0]
+
+ configure_pypi_proxy(args, host_state.controller_profile) # integration, windows-integration, network-integration
+
+ if isinstance(target_profile, PosixProfile) and not isinstance(target_profile, ControllerProfile):
+ configure_pypi_proxy(args, target_profile) # integration
+
+ install_requirements(args, host_state.controller_profile.python, ansible=True, command=True) # integration, windows-integration, network-integration
diff --git a/test/lib/ansible_test/_internal/cloud/__init__.py b/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py
index 08a1183e..70f8afaf 100644
--- a/test/lib/ansible_test/_internal/cloud/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py
@@ -1,73 +1,87 @@
"""Plugin system for cloud providers and environments for use in integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import abc
import atexit
import datetime
-import time
import os
import re
import tempfile
+import time
+import typing as t
-from .. import types as t
-
-from ..encoding import (
+from ....encoding import (
to_bytes,
)
-from ..io import (
+from ....io import (
read_text_file,
)
-from ..util import (
+from ....util import (
+ ANSIBLE_TEST_CONFIG_ROOT,
ApplicationError,
display,
import_plugins,
load_plugins,
- ABC,
- ANSIBLE_TEST_CONFIG_ROOT,
+ cache,
)
-from ..util_common import (
- write_json_test_results,
+from ....util_common import (
ResultType,
+ write_json_test_results,
)
-from ..target import (
- TestTarget,
+from ....target import (
+ IntegrationTarget,
)
-from ..config import (
+from ....config import (
IntegrationConfig,
+ TestConfig,
)
-from ..ci import (
+from ....ci import (
get_ci_provider,
)
-from ..data import (
+from ....data import (
data_context,
)
-PROVIDERS = {}
-ENVIRONMENTS = {}
+from ....docker_util import (
+ docker_available,
+)
-def initialize_cloud_plugins():
+@cache
+def get_cloud_plugins(): # type: () -> t.Tuple[t.Dict[str, t.Type[CloudProvider]], t.Dict[str, t.Type[CloudEnvironment]]]
"""Import cloud plugins and load them into the plugin dictionaries."""
- import_plugins('cloud')
+ import_plugins('commands/integration/cloud')
+
+ providers = {}
+ environments = {}
+
+ load_plugins(CloudProvider, providers)
+ load_plugins(CloudEnvironment, environments)
+
+ return providers, environments
+
+
+@cache
+def get_provider_plugins(): # type: () -> t.Dict[str, t.Type[CloudProvider]]
+ """Return a dictionary of the available cloud provider plugins."""
+ return get_cloud_plugins()[0]
+
- load_plugins(CloudProvider, PROVIDERS)
- load_plugins(CloudEnvironment, ENVIRONMENTS)
+@cache
+def get_environment_plugins(): # type: () -> t.Dict[str, t.Type[CloudEnvironment]]
+ """Return a dictionary of the available cloud environment plugins."""
+ return get_cloud_plugins()[1]
-def get_cloud_platforms(args, targets=None):
- """
- :type args: TestConfig
- :type targets: tuple[IntegrationTarget] | None
- :rtype: list[str]
- """
+def get_cloud_platforms(args, targets=None): # type: (TestConfig, t.Optional[t.Tuple[IntegrationTarget, ...]]) -> t.List[str]
+ """Return cloud platform names for the specified targets."""
if isinstance(args, IntegrationConfig):
if args.list_targets:
return []
@@ -82,11 +96,8 @@ def get_cloud_platforms(args, targets=None):
return sorted(cloud_platforms)
-def get_cloud_platform(target):
- """
- :type target: IntegrationTarget
- :rtype: str | None
- """
+def get_cloud_platform(target): # type: (IntegrationTarget) -> t.Optional[str]
+ """Return the name of the cloud platform used for the given target, or None if no cloud platform is used."""
cloud_platforms = set(a.split('/')[1] for a in target.aliases if a.startswith('cloud/') and a.endswith('/') and a != 'cloud/')
if not cloud_platforms:
@@ -95,7 +106,7 @@ def get_cloud_platform(target):
if len(cloud_platforms) == 1:
cloud_platform = cloud_platforms.pop()
- if cloud_platform not in PROVIDERS:
+ if cloud_platform not in get_provider_plugins():
raise ApplicationError('Target %s aliases contains unknown cloud platform: %s' % (target.name, cloud_platform))
return cloud_platform
@@ -103,35 +114,23 @@ def get_cloud_platform(target):
raise ApplicationError('Target %s aliases contains multiple cloud platforms: %s' % (target.name, ', '.join(sorted(cloud_platforms))))
-def get_cloud_providers(args, targets=None):
- """
- :type args: IntegrationConfig
- :type targets: tuple[IntegrationTarget] | None
- :rtype: list[CloudProvider]
- """
- return [PROVIDERS[p](args) for p in get_cloud_platforms(args, targets)]
+def get_cloud_providers(args, targets=None): # type: (IntegrationConfig, t.Optional[t.Tuple[IntegrationTarget, ...]]) -> t.List[CloudProvider]
+ """Return a list of cloud providers for the given targets."""
+ return [get_provider_plugins()[p](args) for p in get_cloud_platforms(args, targets)]
-def get_cloud_environment(args, target):
- """
- :type args: IntegrationConfig
- :type target: IntegrationTarget
- :rtype: CloudEnvironment
- """
+def get_cloud_environment(args, target): # type: (IntegrationConfig, IntegrationTarget) -> t.Optional[CloudEnvironment]
+ """Return the cloud environment for the given target, or None if no cloud environment is used for the target."""
cloud_platform = get_cloud_platform(target)
if not cloud_platform:
return None
- return ENVIRONMENTS[cloud_platform](args)
+ return get_environment_plugins()[cloud_platform](args)
-def cloud_filter(args, targets):
- """
- :type args: IntegrationConfig
- :type targets: tuple[IntegrationTarget]
- :return: list[str]
- """
+def cloud_filter(args, targets): # type: (IntegrationConfig, t.Tuple[IntegrationTarget, ...]) -> t.List[str]
+ """Return a list of target names to exclude based on the given targets."""
if args.metadata.cloud_config is not None:
return [] # cloud filter already performed prior to delegation
@@ -143,11 +142,8 @@ def cloud_filter(args, targets):
return exclude
-def cloud_init(args, targets):
- """
- :type args: IntegrationConfig
- :type targets: tuple[IntegrationTarget]
- """
+def cloud_init(args, targets): # type: (IntegrationConfig, t.Tuple[IntegrationTarget, ...]) -> None
+ """Initialize cloud plugins for the given targets."""
if args.metadata.cloud_config is not None:
return # cloud configuration already established prior to delegation
@@ -155,7 +151,10 @@ def cloud_init(args, targets):
results = {}
- for provider in get_cloud_providers(args, targets):
+ for provider in get_cloud_providers(args, targets): # type: CloudProvider
+ if args.prime_containers and not provider.uses_docker:
+ continue
+
args.metadata.cloud_config[provider.platform] = {}
start_time = time.time()
@@ -179,24 +178,22 @@ def cloud_init(args, targets):
write_json_test_results(ResultType.DATA, result_name, data)
-class CloudBase(ABC):
+class CloudBase(metaclass=abc.ABCMeta):
"""Base class for cloud plugins."""
- __metaclass__ = abc.ABCMeta
-
_CONFIG_PATH = 'config_path'
_RESOURCE_PREFIX = 'resource_prefix'
_MANAGED = 'managed'
_SETUP_EXECUTED = 'setup_executed'
- def __init__(self, args):
- """
- :type args: IntegrationConfig
- """
+ def __init__(self, args): # type: (IntegrationConfig) -> None
self.args = args
- self.platform = self.__module__.split('.')[-1]
+ self.platform = self.__module__.rsplit('.', 1)[-1]
def config_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
"""Add the config file to the payload file list."""
+ if self.platform not in self.args.metadata.cloud_config:
+ return # platform was initialized, but not used -- such as being skipped due to all tests being disabled
+
if self._get_cloud_config(self._CONFIG_PATH, ''):
pair = (self.config_path, os.path.relpath(self.config_path, data_context().content.root))
@@ -207,88 +204,61 @@ class CloudBase(ABC):
data_context().register_payload_callback(config_callback)
@property
- def setup_executed(self):
- """
- :rtype: bool
- """
+ def setup_executed(self): # type: () -> bool
+ """True if setup has been executed, otherwise False."""
return self._get_cloud_config(self._SETUP_EXECUTED, False)
@setup_executed.setter
- def setup_executed(self, value):
- """
- :type value: bool
- """
+ def setup_executed(self, value): # type: (bool) -> None
+ """True if setup has been executed, otherwise False."""
self._set_cloud_config(self._SETUP_EXECUTED, value)
@property
- def config_path(self):
- """
- :rtype: str
- """
+ def config_path(self): # type: () -> str
+ """Path to the configuration file."""
return os.path.join(data_context().content.root, self._get_cloud_config(self._CONFIG_PATH))
@config_path.setter
- def config_path(self, value):
- """
- :type value: str
- """
+ def config_path(self, value): # type: (str) -> None
+ """Path to the configuration file."""
self._set_cloud_config(self._CONFIG_PATH, value)
@property
- def resource_prefix(self):
- """
- :rtype: str
- """
+ def resource_prefix(self): # type: () -> str
+ """Resource prefix."""
return self._get_cloud_config(self._RESOURCE_PREFIX)
@resource_prefix.setter
- def resource_prefix(self, value):
- """
- :type value: str
- """
+ def resource_prefix(self, value): # type: (str) -> None
+ """Resource prefix."""
self._set_cloud_config(self._RESOURCE_PREFIX, value)
@property
- def managed(self):
- """
- :rtype: bool
- """
+ def managed(self): # type: () -> bool
+ """True if resources are managed by ansible-test, otherwise False."""
return self._get_cloud_config(self._MANAGED)
@managed.setter
- def managed(self, value):
- """
- :type value: bool
- """
+ def managed(self, value): # type: (bool) -> None
+ """True if resources are managed by ansible-test, otherwise False."""
self._set_cloud_config(self._MANAGED, value)
- def _get_cloud_config(self, key, default=None):
- """
- :type key: str
- :type default: str | int | bool | None
- :rtype: str | int | bool
- """
+ def _get_cloud_config(self, key, default=None): # type: (str, t.Optional[t.Union[str, int, bool]]) -> t.Union[str, int, bool]
+ """Return the specified value from the internal configuration."""
if default is not None:
return self.args.metadata.cloud_config[self.platform].get(key, default)
return self.args.metadata.cloud_config[self.platform][key]
- def _set_cloud_config(self, key, value):
- """
- :type key: str
- :type value: str | int | bool
- """
+ def _set_cloud_config(self, key, value): # type: (str, t.Union[str, int, bool]) -> None
+ """Set the specified key and value in the internal configuration."""
self.args.metadata.cloud_config[self.platform][key] = value
class CloudProvider(CloudBase):
"""Base class for cloud provider plugins. Sets up cloud resources before delegation."""
- def __init__(self, args, config_extension='.ini'):
- """
- :type args: IntegrationConfig
- :type config_extension: str
- """
- super(CloudProvider, self).__init__(args)
+ def __init__(self, args, config_extension='.ini'): # type: (IntegrationConfig, str) -> None
+ super().__init__(args)
self.ci_provider = get_ci_provider()
self.remove_config = False
@@ -297,47 +267,50 @@ class CloudProvider(CloudBase):
self.config_template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, '%s.template' % self.config_static_name)
self.config_extension = config_extension
- def filter(self, targets, exclude):
- """Filter out the cloud tests when the necessary config and resources are not available.
- :type targets: tuple[TestTarget]
- :type exclude: list[str]
- """
+ self.uses_config = False
+ self.uses_docker = False
+
+ def filter(self, targets, exclude): # type: (t.Tuple[IntegrationTarget, ...], t.List[str]) -> None
+ """Filter out the cloud tests when the necessary config and resources are not available."""
+ if not self.uses_docker and not self.uses_config:
+ return
+
+ if self.uses_docker and docker_available():
+ return
+
+ if self.uses_config and os.path.exists(self.config_static_path):
+ return
+
skip = 'cloud/%s/' % self.platform
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
- display.warning('Excluding tests marked "%s" which require config (see "%s"): %s'
- % (skip.rstrip('/'), self.config_template_path, ', '.join(skipped)))
- def setup(self):
+ if not self.uses_docker and self.uses_config:
+ display.warning('Excluding tests marked "%s" which require config (see "%s"): %s'
+ % (skip.rstrip('/'), self.config_template_path, ', '.join(skipped)))
+ elif self.uses_docker and not self.uses_config:
+ display.warning('Excluding tests marked "%s" which requires container support: %s'
+ % (skip.rstrip('/'), ', '.join(skipped)))
+ elif self.uses_docker and self.uses_config:
+ display.warning('Excluding tests marked "%s" which requires container support or config (see "%s"): %s'
+ % (skip.rstrip('/'), self.config_template_path, ', '.join(skipped)))
+
+ def setup(self): # type: () -> None
"""Setup the cloud resource before delegation and register a cleanup callback."""
self.resource_prefix = self.ci_provider.generate_resource_prefix()
self.resource_prefix = re.sub(r'[^a-zA-Z0-9]+', '-', self.resource_prefix)[:63].lower().rstrip('-')
atexit.register(self.cleanup)
- def get_remote_ssh_options(self):
- """Get any additional options needed when delegating tests to a remote instance via SSH.
- :rtype: list[str]
- """
- return []
-
- def get_docker_run_options(self):
- """Get any additional options needed when delegating tests to a docker container.
- :rtype: list[str]
- """
- return []
-
- def cleanup(self):
+ def cleanup(self): # type: () -> None
"""Clean up the cloud resource and any temporary configuration files after tests complete."""
if self.remove_config:
os.remove(self.config_path)
- def _use_static_config(self):
- """
- :rtype: bool
- """
+ def _use_static_config(self): # type: () -> bool
+ """Use a static config file if available. Returns True if static config is used, otherwise returns False."""
if os.path.isfile(self.config_static_path):
display.info('Using existing %s cloud config: %s' % (self.platform, self.config_static_path), verbosity=1)
self.config_path = self.config_static_path
@@ -349,10 +322,8 @@ class CloudProvider(CloudBase):
return static
- def _write_config(self, content):
- """
- :type content: str
- """
+ def _write_config(self, content): # type: (t.Text) -> None
+ """Write the given content to the config file."""
prefix = '%s-' % os.path.splitext(os.path.basename(self.config_static_path))[0]
with tempfile.NamedTemporaryFile(dir=data_context().content.integration_path, prefix=prefix, suffix=self.config_extension, delete=False) as config_fd:
@@ -366,22 +337,16 @@ class CloudProvider(CloudBase):
config_fd.write(to_bytes(content))
config_fd.flush()
- def _read_config_template(self):
- """
- :rtype: str
- """
+ def _read_config_template(self): # type: () -> t.Text
+ """Read and return the configuration template."""
lines = read_text_file(self.config_template_path).splitlines()
lines = [line for line in lines if not line.startswith('#')]
config = '\n'.join(lines).strip() + '\n'
return config
@staticmethod
- def _populate_config_template(template, values):
- """
- :type template: str
- :type values: dict[str, str]
- :rtype: str
- """
+ def _populate_config_template(template, values): # type: (t.Text, t.Dict[str, str]) -> t.Text
+ """Populate and return the given template with the provided values."""
for key in sorted(values):
value = values[key]
template = template.replace('@%s' % key, value)
@@ -391,7 +356,7 @@ class CloudProvider(CloudBase):
class CloudEnvironment(CloudBase):
"""Base class for cloud environment plugins. Updates integration test environment after delegation."""
- def setup_once(self):
+ def setup_once(self): # type: () -> None
"""Run setup if it has not already been run."""
if self.setup_executed:
return
@@ -399,31 +364,25 @@ class CloudEnvironment(CloudBase):
self.setup()
self.setup_executed = True
- def setup(self):
+ def setup(self): # type: () -> None
"""Setup which should be done once per environment instead of once per test target."""
@abc.abstractmethod
- def get_environment_config(self):
- """
- :rtype: CloudEnvironmentConfig
- """
+ def get_environment_config(self): # type: () -> CloudEnvironmentConfig
+ """Return environment configuration for use in the test environment after delegation."""
- def on_failure(self, target, tries):
- """
- :type target: IntegrationTarget
- :type tries: int
- """
+ def on_failure(self, target, tries): # type: (IntegrationTarget, int) -> None
+ """Callback to run when an integration target fails."""
class CloudEnvironmentConfig:
"""Configuration for the environment."""
- def __init__(self, env_vars=None, ansible_vars=None, module_defaults=None, callback_plugins=None):
- """
- :type env_vars: dict[str, str] | None
- :type ansible_vars: dict[str, any] | None
- :type module_defaults: dict[str, dict[str, any]] | None
- :type callback_plugins: list[str] | None
- """
+ def __init__(self,
+ env_vars=None, # type: t.Optional[t.Dict[str, str]]
+ ansible_vars=None, # type: t.Optional[t.Dict[str, t.Any]]
+ module_defaults=None, # type: t.Optional[t.Dict[str, t.Dict[str, t.Any]]]
+ callback_plugins=None, # type: t.Optional[t.List[str]]
+ ):
self.env_vars = env_vars
self.ansible_vars = ansible_vars
self.module_defaults = module_defaults
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py b/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py
new file mode 100644
index 00000000..42d6f0bc
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py
@@ -0,0 +1,79 @@
+"""ACME plugin for integration tests."""
+from __future__ import annotations
+
+import os
+
+from ....config import (
+ IntegrationConfig,
+)
+
+from ....containers import (
+ CleanupMode,
+ run_support_container,
+)
+
+from . import (
+ CloudEnvironment,
+ CloudEnvironmentConfig,
+ CloudProvider,
+)
+
+
+class ACMEProvider(CloudProvider):
+ """ACME plugin. Sets up cloud resources for tests."""
+ DOCKER_SIMULATOR_NAME = 'acme-simulator'
+
+ def __init__(self, args): # type: (IntegrationConfig) -> None
+ super().__init__(args)
+
+ # The simulator must be pinned to a specific version to guarantee CI passes with the version used.
+ if os.environ.get('ANSIBLE_ACME_CONTAINER'):
+ self.image = os.environ.get('ANSIBLE_ACME_CONTAINER')
+ else:
+ self.image = 'quay.io/ansible/acme-test-container:2.0.0'
+
+ self.uses_docker = True
+
+ def setup(self): # type: () -> None
+ """Setup the cloud resource before delegation and register a cleanup callback."""
+ super().setup()
+
+ if self._use_static_config():
+ self._setup_static()
+ else:
+ self._setup_dynamic()
+
+ def _setup_dynamic(self): # type: () -> None
+ """Create a ACME test container using docker."""
+ ports = [
+ 5000, # control port for flask app in container
+ 14000, # Pebble ACME CA
+ ]
+
+ run_support_container(
+ self.args,
+ self.platform,
+ self.image,
+ self.DOCKER_SIMULATOR_NAME,
+ ports,
+ allow_existing=True,
+ cleanup=CleanupMode.YES,
+ )
+
+ self._set_cloud_config('acme_host', self.DOCKER_SIMULATOR_NAME)
+
+ def _setup_static(self): # type: () -> None
+ raise NotImplementedError()
+
+
+class ACMEEnvironment(CloudEnvironment):
+ """ACME environment plugin. Updates integration test environment after delegation."""
+ def get_environment_config(self): # type: () -> CloudEnvironmentConfig
+ """Return environment configuration for use in the test environment after delegation."""
+ ansible_vars = dict(
+ acme_host=self._get_cloud_config('acme_host'),
+ )
+
+ return CloudEnvironmentConfig(
+ ansible_vars=ansible_vars,
+ )
diff --git a/test/lib/ansible_test/_internal/cloud/aws.py b/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py
index e3c811b6..f4493933 100644
--- a/test/lib/ansible_test/_internal/cloud/aws.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py
@@ -1,57 +1,68 @@
"""AWS plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import uuid
+import configparser
+import typing as t
-from ..util import (
+from ....util import (
ApplicationError,
display,
- ConfigParser,
)
-from . import (
- CloudProvider,
- CloudEnvironment,
- CloudEnvironmentConfig,
+from ....config import (
+ IntegrationConfig,
)
-from ..core_ci import (
+from ....target import (
+ IntegrationTarget,
+)
+
+from ....core_ci import (
AnsibleCoreCI,
)
+from ....host_configs import (
+ OriginConfig,
+)
+
+from . import (
+ CloudEnvironment,
+ CloudEnvironmentConfig,
+ CloudProvider,
+)
+
class AwsCloudProvider(CloudProvider):
"""AWS cloud provider plugin. Sets up cloud resources before delegation."""
- def filter(self, targets, exclude):
- """Filter out the cloud tests when the necessary config and resources are not available.
- :type targets: tuple[TestTarget]
- :type exclude: list[str]
- """
- if os.path.isfile(self.config_static_path):
- return
+ def __init__(self, args): # type: (IntegrationConfig) -> None
+ super().__init__(args)
+
+ self.uses_config = True
+ def filter(self, targets, exclude): # type: (t.Tuple[IntegrationTarget, ...], t.List[str]) -> None
+ """Filter out the cloud tests when the necessary config and resources are not available."""
aci = self._create_ansible_core_ci()
if aci.available:
return
- super(AwsCloudProvider, self).filter(targets, exclude)
+ super().filter(targets, exclude)
- def setup(self):
+ def setup(self): # type: () -> None
"""Setup the cloud resource before delegation and register a cleanup callback."""
- super(AwsCloudProvider, self).setup()
+ super().setup()
aws_config_path = os.path.expanduser('~/.aws')
- if os.path.exists(aws_config_path) and not self.args.docker and not self.args.remote:
+ if os.path.exists(aws_config_path) and isinstance(self.args.controller, OriginConfig):
raise ApplicationError('Rename "%s" or use the --docker or --remote option to isolate tests.' % aws_config_path)
if not self._use_static_config():
self._setup_dynamic()
- def _setup_dynamic(self):
+ def _setup_dynamic(self): # type: () -> None
"""Request AWS credentials through the Ansible Core CI service."""
display.info('Provisioning %s cloud environment.' % self.platform, verbosity=1)
@@ -78,20 +89,16 @@ class AwsCloudProvider(CloudProvider):
self._write_config(config)
- def _create_ansible_core_ci(self):
- """
- :rtype: AnsibleCoreCI
- """
- return AnsibleCoreCI(self.args, 'aws', 'aws', persist=False, stage=self.args.remote_stage, provider='aws', internal=True)
+ def _create_ansible_core_ci(self): # type: () -> AnsibleCoreCI
+ """Return an AWS instance of AnsibleCoreCI."""
+ return AnsibleCoreCI(self.args, 'aws', 'aws', 'aws', persist=False)
class AwsCloudEnvironment(CloudEnvironment):
"""AWS cloud environment plugin. Updates integration test environment after delegation."""
- def get_environment_config(self):
- """
- :rtype: CloudEnvironmentConfig
- """
- parser = ConfigParser()
+ def get_environment_config(self): # type: () -> CloudEnvironmentConfig
+ """Return environment configuration for use in the test environment after delegation."""
+ parser = configparser.ConfigParser()
parser.read(self.config_path)
ansible_vars = dict(
@@ -116,11 +123,8 @@ class AwsCloudEnvironment(CloudEnvironment):
callback_plugins=['aws_resource_actions'],
)
- def on_failure(self, target, tries):
- """
- :type target: TestTarget
- :type tries: int
- """
+ def on_failure(self, target, tries): # type: (IntegrationTarget, int) -> None
+ """Callback to run when an integration target fails."""
if not tries and self.managed:
display.notice('If %s failed due to permissions, the IAM test policy may need to be updated. '
'https://docs.ansible.com/ansible/devel/dev_guide/platforms/aws_guidelines.html#aws-permissions-for-integration-tests.'
diff --git a/test/lib/ansible_test/_internal/cloud/azure.py b/test/lib/ansible_test/_internal/commands/integration/cloud/azure.py
index 2efe96f8..002fa581 100644
--- a/test/lib/ansible_test/_internal/cloud/azure.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/azure.py
@@ -1,57 +1,56 @@
"""Azure plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+import configparser
import os
+import urllib.parse
+import typing as t
-from ..io import (
+from ....io import (
read_text_file,
)
-from ..util import (
+from ....util import (
ApplicationError,
display,
- ConfigParser,
)
-from . import (
- CloudProvider,
- CloudEnvironment,
- CloudEnvironmentConfig,
+from ....config import (
+ IntegrationConfig,
)
-from ..http import (
+from ....target import (
+ IntegrationTarget,
+)
+
+from ....http import (
HttpClient,
- urlparse,
- urlunparse,
- parse_qs,
)
-from ..core_ci import (
+from ....core_ci import (
AnsibleCoreCI,
)
+from . import (
+ CloudEnvironment,
+ CloudEnvironmentConfig,
+ CloudProvider,
+)
+
class AzureCloudProvider(CloudProvider):
"""Azure cloud provider plugin. Sets up cloud resources before delegation."""
SHERLOCK_CONFIG_PATH = os.path.expanduser('~/.ansible-sherlock-ci.cfg')
- def __init__(self, args):
- """
- :type args: TestConfig
- """
- super(AzureCloudProvider, self).__init__(args)
+ def __init__(self, args): # type: (IntegrationConfig) -> None
+ super().__init__(args)
self.aci = None
- def filter(self, targets, exclude):
- """Filter out the cloud tests when the necessary config and resources are not available.
- :type targets: tuple[TestTarget]
- :type exclude: list[str]
- """
- if os.path.isfile(self.config_static_path):
- return
+ self.uses_config = True
+ def filter(self, targets, exclude): # type: (t.Tuple[IntegrationTarget, ...], t.List[str]) -> None
+ """Filter out the cloud tests when the necessary config and resources are not available."""
aci = self._create_ansible_core_ci()
if aci.available:
@@ -60,25 +59,25 @@ class AzureCloudProvider(CloudProvider):
if os.path.isfile(self.SHERLOCK_CONFIG_PATH):
return
- super(AzureCloudProvider, self).filter(targets, exclude)
+ super().filter(targets, exclude)
- def setup(self):
+ def setup(self): # type: () -> None
"""Setup the cloud resource before delegation and register a cleanup callback."""
- super(AzureCloudProvider, self).setup()
+ super().setup()
if not self._use_static_config():
self._setup_dynamic()
get_config(self.config_path) # check required variables
- def cleanup(self):
+ def cleanup(self): # type: () -> None
"""Clean up the cloud resource and any temporary configuration files after tests complete."""
if self.aci:
self.aci.stop()
- super(AzureCloudProvider, self).cleanup()
+ super().cleanup()
- def _setup_dynamic(self):
+ def _setup_dynamic(self): # type: () -> None
"""Request Azure credentials through Sherlock."""
display.info('Provisioning %s cloud environment.' % self.platform, verbosity=1)
@@ -88,9 +87,9 @@ class AzureCloudProvider(CloudProvider):
if os.path.isfile(self.SHERLOCK_CONFIG_PATH):
sherlock_uri = read_text_file(self.SHERLOCK_CONFIG_PATH).splitlines()[0].strip() + '&rgcount=2'
- parts = urlparse(sherlock_uri)
- query_string = parse_qs(parts.query)
- base_uri = urlunparse(parts[:4] + ('', ''))
+ parts = urllib.parse.urlparse(sherlock_uri)
+ query_string = urllib.parse.parse_qs(parts.query)
+ base_uri = urllib.parse.urlunparse(parts[:4] + ('', ''))
if 'code' not in query_string:
example_uri = 'https://example.azurewebsites.net/api/sandbox-provisioning'
@@ -132,19 +131,15 @@ class AzureCloudProvider(CloudProvider):
self._write_config(config)
- def _create_ansible_core_ci(self):
- """
- :rtype: AnsibleCoreCI
- """
- return AnsibleCoreCI(self.args, 'azure', 'azure', persist=False, stage=self.args.remote_stage, provider='azure', internal=True)
+ def _create_ansible_core_ci(self): # type: () -> AnsibleCoreCI
+ """Return an Azure instance of AnsibleCoreCI."""
+ return AnsibleCoreCI(self.args, 'azure', 'azure', 'azure', persist=False)
class AzureCloudEnvironment(CloudEnvironment):
"""Azure cloud environment plugin. Updates integration test environment after delegation."""
- def get_environment_config(self):
- """
- :rtype: CloudEnvironmentConfig
- """
+ def get_environment_config(self): # type: () -> CloudEnvironmentConfig
+ """Return environment configuration for use in the test environment after delegation."""
env_vars = get_config(self.config_path)
display.sensitive.add(env_vars.get('AZURE_SECRET'))
@@ -161,22 +156,15 @@ class AzureCloudEnvironment(CloudEnvironment):
ansible_vars=ansible_vars,
)
- def on_failure(self, target, tries):
- """
- :type target: TestTarget
- :type tries: int
- """
+ def on_failure(self, target, tries): # type: (IntegrationTarget, int) -> None
+ """Callback to run when an integration target fails."""
if not tries and self.managed:
- display.notice('If %s failed due to permissions, the test policy may need to be updated. '
- 'For help, consult @mattclay or @gundalow on GitHub or #ansible-devel on IRC.' % target.name)
+ display.notice('If %s failed due to permissions, the test policy may need to be updated.' % target.name)
-def get_config(config_path):
- """
- :type config_path: str
- :rtype: dict[str, str]
- """
- parser = ConfigParser()
+def get_config(config_path): # type: (str) -> t.Dict[str, str]
+ """Return a configuration dictionary parsed from the given configuration path."""
+ parser = configparser.ConfigParser()
parser.read(config_path)
config = dict((key.upper(), value) for key, value in parser.items('default'))
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/cloudscale.py b/test/lib/ansible_test/_internal/commands/integration/cloud/cloudscale.py
new file mode 100644
index 00000000..0a17fb25
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/cloudscale.py
@@ -0,0 +1,62 @@
+# -*- coding: utf-8 -*-
+#
+# (c) 2018, Gaudenz Steinlin <gaudenz.steinlin@cloudscale.ch>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+"""Cloudscale plugin for integration tests."""
+from __future__ import annotations
+
+import configparser
+
+from ....util import (
+ display,
+)
+
+from ....config import (
+ IntegrationConfig,
+)
+
+from . import (
+ CloudEnvironment,
+ CloudEnvironmentConfig,
+ CloudProvider,
+)
+
+
+class CloudscaleCloudProvider(CloudProvider):
+ """Cloudscale cloud provider plugin. Sets up cloud resources before delegation."""
+ def __init__(self, args): # type: (IntegrationConfig) -> None
+ super().__init__(args)
+
+ self.uses_config = True
+
+ def setup(self): # type: () -> None
+ """Setup the cloud resource before delegation and register a cleanup callback."""
+ super().setup()
+
+ self._use_static_config()
+
+
+class CloudscaleCloudEnvironment(CloudEnvironment):
+ """Cloudscale cloud environment plugin. Updates integration test environment after delegation."""
+ def get_environment_config(self): # type: () -> CloudEnvironmentConfig
+ """Return environment configuration for use in the test environment after delegation."""
+ parser = configparser.ConfigParser()
+ parser.read(self.config_path)
+
+ env_vars = dict(
+ CLOUDSCALE_API_TOKEN=parser.get('default', 'cloudscale_api_token'),
+ )
+
+ display.sensitive.add(env_vars['CLOUDSCALE_API_TOKEN'])
+
+ ansible_vars = dict(
+ cloudscale_resource_prefix=self.resource_prefix,
+ )
+
+ ansible_vars.update(dict((key.lower(), value) for key, value in env_vars.items()))
+
+ return CloudEnvironmentConfig(
+ env_vars=env_vars,
+ ansible_vars=ansible_vars,
+ )
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py b/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py
new file mode 100644
index 00000000..f20a7d88
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py
@@ -0,0 +1,174 @@
+"""CloudStack plugin for integration tests."""
+from __future__ import annotations
+
+import json
+import configparser
+import os
+import urllib.parse
+import typing as t
+
+from ....util import (
+ ApplicationError,
+ display,
+)
+
+from ....config import (
+ IntegrationConfig,
+)
+
+from ....docker_util import (
+ docker_exec,
+)
+
+from ....containers import (
+ CleanupMode,
+ run_support_container,
+ wait_for_file,
+)
+
+from . import (
+ CloudEnvironment,
+ CloudEnvironmentConfig,
+ CloudProvider,
+)
+
+
+class CsCloudProvider(CloudProvider):
+ """CloudStack cloud provider plugin. Sets up cloud resources before delegation."""
+ DOCKER_SIMULATOR_NAME = 'cloudstack-sim'
+
+ def __init__(self, args): # type: (IntegrationConfig) -> None
+ super().__init__(args)
+
+ self.image = os.environ.get('ANSIBLE_CLOUDSTACK_CONTAINER', 'quay.io/ansible/cloudstack-test-container:1.4.0')
+ self.host = ''
+ self.port = 0
+
+ self.uses_docker = True
+ self.uses_config = True
+
+ def setup(self): # type: () -> None
+ """Setup the cloud resource before delegation and register a cleanup callback."""
+ super().setup()
+
+ if self._use_static_config():
+ self._setup_static()
+ else:
+ self._setup_dynamic()
+
+ def _setup_static(self): # type: () -> None
+ """Configure CloudStack tests for use with static configuration."""
+ parser = configparser.ConfigParser()
+ parser.read(self.config_static_path)
+
+ endpoint = parser.get('cloudstack', 'endpoint')
+
+ parts = urllib.parse.urlparse(endpoint)
+
+ self.host = parts.hostname
+
+ if not self.host:
+ raise ApplicationError('Could not determine host from endpoint: %s' % endpoint)
+
+ if parts.port:
+ self.port = parts.port
+ elif parts.scheme == 'http':
+ self.port = 80
+ elif parts.scheme == 'https':
+ self.port = 443
+ else:
+ raise ApplicationError('Could not determine port from endpoint: %s' % endpoint)
+
+ display.info('Read cs host "%s" and port %d from config: %s' % (self.host, self.port, self.config_static_path), verbosity=1)
+
+ def _setup_dynamic(self): # type: () -> None
+ """Create a CloudStack simulator using docker."""
+ config = self._read_config_template()
+
+ self.port = 8888
+
+ ports = [
+ self.port,
+ ]
+
+ descriptor = run_support_container(
+ self.args,
+ self.platform,
+ self.image,
+ self.DOCKER_SIMULATOR_NAME,
+ ports,
+ allow_existing=True,
+ cleanup=CleanupMode.YES,
+ )
+
+ if not descriptor:
+ return
+
+ # apply work-around for OverlayFS issue
+ # https://github.com/docker/for-linux/issues/72#issuecomment-319904698
+ docker_exec(self.args, self.DOCKER_SIMULATOR_NAME, ['find', '/var/lib/mysql', '-type', 'f', '-exec', 'touch', '{}', ';'])
+
+ if self.args.explain:
+ values = dict(
+ HOST=self.host,
+ PORT=str(self.port),
+ )
+ else:
+ credentials = self._get_credentials(self.DOCKER_SIMULATOR_NAME)
+
+ values = dict(
+ HOST=self.DOCKER_SIMULATOR_NAME,
+ PORT=str(self.port),
+ KEY=credentials['apikey'],
+ SECRET=credentials['secretkey'],
+ )
+
+ display.sensitive.add(values['SECRET'])
+
+ config = self._populate_config_template(config, values)
+
+ self._write_config(config)
+
+ def _get_credentials(self, container_name): # type: (str) -> t.Dict[str, t.Any]
+ """Wait for the CloudStack simulator to return credentials."""
+ def check(value):
+ """Return True if the given configuration is valid JSON, otherwise return False."""
+ # noinspection PyBroadException
+ try:
+ json.loads(value)
+ except Exception: # pylint: disable=broad-except
+ return False # sometimes the file exists but is not yet valid JSON
+
+ return True
+
+ stdout = wait_for_file(self.args, container_name, '/var/www/html/admin.json', sleep=10, tries=30, check=check)
+
+ return json.loads(stdout)
+
+
+class CsCloudEnvironment(CloudEnvironment):
+ """CloudStack cloud environment plugin. Updates integration test environment after delegation."""
+ def get_environment_config(self): # type: () -> CloudEnvironmentConfig
+ """Return environment configuration for use in the test environment after delegation."""
+ parser = configparser.ConfigParser()
+ parser.read(self.config_path)
+
+ config = dict(parser.items('default'))
+
+ env_vars = dict(
+ CLOUDSTACK_ENDPOINT=config['endpoint'],
+ CLOUDSTACK_KEY=config['key'],
+ CLOUDSTACK_SECRET=config['secret'],
+ CLOUDSTACK_TIMEOUT=config['timeout'],
+ )
+
+ display.sensitive.add(env_vars['CLOUDSTACK_SECRET'])
+
+ ansible_vars = dict(
+ cs_resource_prefix=self.resource_prefix,
+ )
+
+ return CloudEnvironmentConfig(
+ env_vars=env_vars,
+ ansible_vars=ansible_vars,
+ )
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/digitalocean.py b/test/lib/ansible_test/_internal/commands/integration/cloud/digitalocean.py
new file mode 100644
index 00000000..00b05d75
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/digitalocean.py
@@ -0,0 +1,55 @@
+"""DigitalOcean plugin for integration tests."""
+from __future__ import annotations
+
+import configparser
+
+from ....util import (
+ display,
+)
+
+from ....config import (
+ IntegrationConfig,
+)
+
+from . import (
+ CloudEnvironment,
+ CloudEnvironmentConfig,
+ CloudProvider,
+)
+
+
+class DigitalOceanCloudProvider(CloudProvider):
+ """Checks if a configuration file has been passed or fixtures are going to be used for testing"""
+ def __init__(self, args): # type: (IntegrationConfig) -> None
+ super().__init__(args)
+
+ self.uses_config = True
+
+ def setup(self): # type: () -> None
+ """Setup the cloud resource before delegation and register a cleanup callback."""
+ super().setup()
+
+ self._use_static_config()
+
+
+class DigitalOceanCloudEnvironment(CloudEnvironment):
+ """Updates integration test environment after delegation. Will setup the config file as parameter."""
+ def get_environment_config(self): # type: () -> CloudEnvironmentConfig
+ """Return environment configuration for use in the test environment after delegation."""
+ parser = configparser.ConfigParser()
+ parser.read(self.config_path)
+
+ env_vars = dict(
+ DO_API_KEY=parser.get('default', 'key'),
+ )
+
+ display.sensitive.add(env_vars['DO_API_KEY'])
+
+ ansible_vars = dict(
+ resource_prefix=self.resource_prefix,
+ )
+
+ return CloudEnvironmentConfig(
+ env_vars=env_vars,
+ ansible_vars=ansible_vars,
+ )
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py b/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py
new file mode 100644
index 00000000..b4ca48f7
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py
@@ -0,0 +1,94 @@
+"""Foreman plugin for integration tests."""
+from __future__ import annotations
+
+import os
+
+from ....config import (
+ IntegrationConfig,
+)
+
+from ....containers import (
+ CleanupMode,
+ run_support_container,
+)
+
+from . import (
+ CloudEnvironment,
+ CloudEnvironmentConfig,
+ CloudProvider,
+)
+
+
+class ForemanProvider(CloudProvider):
+ """Foreman plugin. Sets up Foreman stub server for tests."""
+ DOCKER_SIMULATOR_NAME = 'foreman-stub'
+
+ # Default image to run Foreman stub from.
+ #
+ # The simulator must be pinned to a specific version
+ # to guarantee CI passes with the version used.
+ #
+ # It's source source itself resides at:
+ # https://github.com/ansible/foreman-test-container
+ DOCKER_IMAGE = 'quay.io/ansible/foreman-test-container:1.4.0'
+
+ def __init__(self, args): # type: (IntegrationConfig) -> None
+ super().__init__(args)
+
+ self.__container_from_env = os.environ.get('ANSIBLE_FRMNSIM_CONTAINER')
+ """
+ Overrides target container, might be used for development.
+
+ Use ANSIBLE_FRMNSIM_CONTAINER=whatever_you_want if you want
+ to use other image. Omit/empty otherwise.
+ """
+ self.image = self.__container_from_env or self.DOCKER_IMAGE
+
+ self.uses_docker = True
+
+ def setup(self): # type: () -> None
+ """Setup cloud resource before delegation and reg cleanup callback."""
+ super().setup()
+
+ if self._use_static_config():
+ self._setup_static()
+ else:
+ self._setup_dynamic()
+
+ def _setup_dynamic(self): # type: () -> None
+ """Spawn a Foreman stub within docker container."""
+ foreman_port = 8080
+
+ ports = [
+ foreman_port,
+ ]
+
+ run_support_container(
+ self.args,
+ self.platform,
+ self.image,
+ self.DOCKER_SIMULATOR_NAME,
+ ports,
+ allow_existing=True,
+ cleanup=CleanupMode.YES,
+ )
+
+ self._set_cloud_config('FOREMAN_HOST', self.DOCKER_SIMULATOR_NAME)
+ self._set_cloud_config('FOREMAN_PORT', str(foreman_port))
+
+ def _setup_static(self): # type: () -> None
+ raise NotImplementedError()
+
+
+class ForemanEnvironment(CloudEnvironment):
+ """Foreman environment plugin. Updates integration test environment after delegation."""
+ def get_environment_config(self): # type: () -> CloudEnvironmentConfig
+ """Return environment configuration for use in the test environment after delegation."""
+ env_vars = dict(
+ FOREMAN_HOST=self._get_cloud_config('FOREMAN_HOST'),
+ FOREMAN_PORT=self._get_cloud_config('FOREMAN_PORT'),
+ )
+
+ return CloudEnvironmentConfig(
+ env_vars=env_vars,
+ )
diff --git a/test/lib/ansible_test/_internal/cloud/galaxy.py b/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py
index c045a362..9c900071 100644
--- a/test/lib/ansible_test/_internal/cloud/galaxy.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py
@@ -1,33 +1,25 @@
"""Galaxy (ansible-galaxy) plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import tempfile
-from . import (
- CloudProvider,
- CloudEnvironment,
- CloudEnvironmentConfig,
+from ....config import (
+ IntegrationConfig,
+)
+
+from ....docker_util import (
+ docker_cp_to,
)
-from ..util import (
- find_executable,
- display,
+from ....containers import (
+ run_support_container,
)
-from ..docker_util import (
- docker_command,
- docker_run,
- docker_start,
- docker_rm,
- docker_inspect,
- docker_pull,
- get_docker_container_id,
- get_docker_hostname,
- get_docker_container_ip,
- get_docker_preferred_network_name,
- is_docker_user_defined_network,
+from . import (
+ CloudEnvironment,
+ CloudEnvironmentConfig,
+ CloudProvider,
)
@@ -81,18 +73,12 @@ foreground {
class GalaxyProvider(CloudProvider):
- """Galaxy plugin.
-
- Sets up pulp (ansible-galaxy) servers for tests.
-
+ """
+ Galaxy plugin. Sets up pulp (ansible-galaxy) servers for tests.
The pulp source itself resides at: https://github.com/pulp/pulp-oci-images
"""
-
- def __init__(self, args):
- """
- :type args: TestConfig
- """
- super(GalaxyProvider, self).__init__(args)
+ def __init__(self, args): # type: (IntegrationConfig) -> None
+ super().__init__(args)
# Cannot use the latest container image as either galaxy_ng 4.2.0rc2 or pulp 0.5.0 has sporatic issues with
# dropping published collections in CI. Try running the tests multiple times when updating. Will also need to
@@ -103,68 +89,37 @@ class GalaxyProvider(CloudProvider):
'docker.io/pulp/pulp-galaxy-ng@sha256:b79a7be64eff86d8f58db9ca83ed4967bd8b4e45c99addb17a91d11926480cf1'
)
- self.containers = []
-
- def filter(self, targets, exclude):
- """Filter out the tests with the necessary config and res unavailable.
-
- :type targets: tuple[TestTarget]
- :type exclude: list[str]
- """
- docker_cmd = 'docker'
- docker = find_executable(docker_cmd, required=False)
-
- if docker:
- return
-
- skip = 'cloud/%s/' % self.platform
- skipped = [target.name for target in targets if skip in target.aliases]
+ self.uses_docker = True
- if skipped:
- exclude.append(skip)
- display.warning('Excluding tests marked "%s" which require the "%s" command: %s'
- % (skip.rstrip('/'), docker_cmd, ', '.join(skipped)))
-
- def setup(self):
+ def setup(self): # type: () -> None
"""Setup cloud resource before delegation and reg cleanup callback."""
- super(GalaxyProvider, self).setup()
-
- container_id = get_docker_container_id()
-
- p_results = docker_inspect(self.args, 'ansible-ci-pulp')
-
- if p_results and not p_results[0].get('State', {}).get('Running'):
- docker_rm(self.args, 'ansible-ci-pulp')
- p_results = []
-
- display.info('%s ansible-ci-pulp docker container.'
- % ('Using the existing' if p_results else 'Starting a new'),
- verbosity=1)
+ super().setup()
galaxy_port = 80
+ pulp_host = 'ansible-ci-pulp'
pulp_port = 24817
- if not p_results:
- if self.args.docker or container_id:
- publish_ports = []
- else:
- # publish the simulator ports when not running inside docker
- publish_ports = [
- '-p', ':'.join((str(galaxy_port),) * 2),
- '-p', ':'.join((str(pulp_port),) * 2),
- ]
-
- docker_pull(self.args, self.pulp)
+ ports = [
+ galaxy_port,
+ pulp_port,
+ ]
+
+ # Create the container, don't run it, we need to inject configs before it starts
+ descriptor = run_support_container(
+ self.args,
+ self.platform,
+ self.pulp,
+ pulp_host,
+ ports,
+ start=False,
+ allow_existing=True,
+ )
- # Create the container, don't run it, we need to inject configs before it starts
- stdout, _dummy = docker_run(
- self.args,
- self.pulp,
- ['--name', 'ansible-ci-pulp'] + publish_ports,
- create_only=True
- )
+ if not descriptor:
+ return
- pulp_id = stdout.strip()
+ if not descriptor.running:
+ pulp_id = descriptor.container_id
injected_files = {
'/etc/pulp/settings.py': SETTINGS,
@@ -175,20 +130,9 @@ class GalaxyProvider(CloudProvider):
with tempfile.NamedTemporaryFile() as temp_fd:
temp_fd.write(content)
temp_fd.flush()
- docker_command(self.args, ['cp', temp_fd.name, '%s:%s' % (pulp_id, path)])
-
- # Start the container
- docker_start(self.args, 'ansible-ci-pulp', [])
-
- self.containers.append('ansible-ci-pulp')
+ docker_cp_to(self.args, pulp_id, temp_fd.name, path)
- if self.args.docker:
- pulp_host = 'ansible-ci-pulp'
- elif container_id:
- pulp_host = self._get_simulator_address('ansible-ci-pulp')
- display.info('Found Galaxy simulator container address: %s' % pulp_host, verbosity=1)
- else:
- pulp_host = get_docker_hostname()
+ descriptor.start(self.args)
self._set_cloud_config('PULP_HOST', pulp_host)
self._set_cloud_config('PULP_PORT', str(pulp_port))
@@ -196,38 +140,11 @@ class GalaxyProvider(CloudProvider):
self._set_cloud_config('PULP_USER', 'admin')
self._set_cloud_config('PULP_PASSWORD', 'password')
- def get_docker_run_options(self):
- """Get additional options needed when delegating tests to a container.
-
- :rtype: list[str]
- """
- network = get_docker_preferred_network_name(self.args)
-
- if not is_docker_user_defined_network(network):
- return ['--link', 'ansible-ci-pulp']
-
- return []
-
- def cleanup(self):
- """Clean up the resource and temporary configs files after tests."""
- for container_name in self.containers:
- docker_rm(self.args, container_name)
-
- super(GalaxyProvider, self).cleanup()
-
- def _get_simulator_address(self, container_name):
- return get_docker_container_ip(self.args, container_name)
-
class GalaxyEnvironment(CloudEnvironment):
- """Galaxy environment plugin.
-
- Updates integration test environment after delegation.
- """
- def get_environment_config(self):
- """
- :rtype: CloudEnvironmentConfig
- """
+ """Galaxy environment plugin. Updates integration test environment after delegation."""
+ def get_environment_config(self): # type: () -> CloudEnvironmentConfig
+ """Return environment configuration for use in the test environment after delegation."""
pulp_user = self._get_cloud_config('PULP_USER')
pulp_password = self._get_cloud_config('PULP_PASSWORD')
pulp_host = self._get_cloud_config('PULP_HOST')
diff --git a/test/lib/ansible_test/_internal/cloud/gcp.py b/test/lib/ansible_test/_internal/commands/integration/cloud/gcp.py
index c8de1835..b23097a7 100644
--- a/test/lib/ansible_test/_internal/cloud/gcp.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/gcp.py
@@ -1,40 +1,35 @@
# Copyright: (c) 2018, Google Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""GCP plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
-import os
+import configparser
-from ..util import (
+from ....util import (
display,
- ConfigParser,
+)
+
+from ....config import (
+ IntegrationConfig,
)
from . import (
- CloudProvider,
CloudEnvironment,
CloudEnvironmentConfig,
+ CloudProvider,
)
class GcpCloudProvider(CloudProvider):
"""GCP cloud provider plugin. Sets up cloud resources before delegation."""
+ def __init__(self, args): # type: (IntegrationConfig) -> None
+ super().__init__(args)
- def filter(self, targets, exclude):
- """Filter out the cloud tests when the necessary config and resources are not available.
- :type targets: tuple[TestTarget]
- :type exclude: list[str]
- """
-
- if os.path.isfile(self.config_static_path):
- return
-
- super(GcpCloudProvider, self).filter(targets, exclude)
+ self.uses_config = True
- def setup(self):
+ def setup(self): # type: () -> None
"""Setup the cloud resource before delegation and register a cleanup callback."""
- super(GcpCloudProvider, self).setup()
+ super().setup()
if not self._use_static_config():
display.notice(
@@ -44,11 +39,9 @@ class GcpCloudProvider(CloudProvider):
class GcpCloudEnvironment(CloudEnvironment):
"""GCP cloud environment plugin. Updates integration test environment after delegation."""
- def get_environment_config(self):
- """
- :rtype: CloudEnvironmentConfig
- """
- parser = ConfigParser()
+ def get_environment_config(self): # type: () -> CloudEnvironmentConfig
+ """Return environment configuration for use in the test environment after delegation."""
+ parser = configparser.ConfigParser()
parser.read(self.config_path)
ansible_vars = dict(
diff --git a/test/lib/ansible_test/_internal/cloud/hcloud.py b/test/lib/ansible_test/_internal/commands/integration/cloud/hcloud.py
index 3c422fb4..28b07e72 100644
--- a/test/lib/ansible_test/_internal/cloud/hcloud.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/hcloud.py
@@ -1,59 +1,56 @@
"""Hetzner Cloud plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
-import os
+import configparser
+import typing as t
-from ..util import (
+from ....util import (
display,
- ConfigParser,
)
-from . import (
- CloudProvider,
- CloudEnvironment,
- CloudEnvironmentConfig,
+from ....config import (
+ IntegrationConfig,
)
-from ..core_ci import (
+from ....target import (
+ IntegrationTarget,
+)
+
+from ....core_ci import (
AnsibleCoreCI,
)
+from . import (
+ CloudEnvironment,
+ CloudEnvironmentConfig,
+ CloudProvider,
+)
+
class HcloudCloudProvider(CloudProvider):
- """Hetzner Cloud provider plugin. Sets up cloud resources before
- delegation.
- """
-
- def __init__(self, args):
- """
- :type args: TestConfig
- """
- super(HcloudCloudProvider, self).__init__(args)
-
- def filter(self, targets, exclude):
- """Filter out the cloud tests when the necessary config and resources are not available.
- :type targets: tuple[TestTarget]
- :type exclude: list[str]
- """
- if os.path.isfile(self.config_static_path):
- return
+ """Hetzner Cloud provider plugin. Sets up cloud resources before delegation."""
+ def __init__(self, args): # type: (IntegrationConfig) -> None
+ super().__init__(args)
+
+ self.uses_config = True
+ def filter(self, targets, exclude): # type: (t.Tuple[IntegrationTarget, ...], t.List[str]) -> None
+ """Filter out the cloud tests when the necessary config and resources are not available."""
aci = self._create_ansible_core_ci()
if aci.available:
return
- super(HcloudCloudProvider, self).filter(targets, exclude)
+ super().filter(targets, exclude)
- def setup(self):
+ def setup(self): # type: () -> None
"""Setup the cloud resource before delegation and register a cleanup callback."""
- super(HcloudCloudProvider, self).setup()
+ super().setup()
if not self._use_static_config():
self._setup_dynamic()
- def _setup_dynamic(self):
+ def _setup_dynamic(self): # type: () -> None
"""Request Hetzner credentials through the Ansible Core CI service."""
display.info('Provisioning %s cloud environment.' % self.platform, verbosity=1)
@@ -79,23 +76,16 @@ class HcloudCloudProvider(CloudProvider):
self._write_config(config)
- def _create_ansible_core_ci(self):
- """
- :rtype: AnsibleCoreCI
- """
- return AnsibleCoreCI(self.args, 'hetzner', 'hetzner', persist=False, stage=self.args.remote_stage, provider='hetzner', internal=True)
+ def _create_ansible_core_ci(self): # type: () -> AnsibleCoreCI
+ """Return a Heztner instance of AnsibleCoreCI."""
+ return AnsibleCoreCI(self.args, 'hetzner', 'hetzner', 'hetzner', persist=False)
class HcloudCloudEnvironment(CloudEnvironment):
- """Hetzner Cloud cloud environment plugin. Updates integration test environment
- after delegation.
- """
-
- def get_environment_config(self):
- """
- :rtype: CloudEnvironmentConfig
- """
- parser = ConfigParser()
+ """Hetzner Cloud cloud environment plugin. Updates integration test environment after delegation."""
+ def get_environment_config(self): # type: () -> CloudEnvironmentConfig
+ """Return environment configuration for use in the test environment after delegation."""
+ parser = configparser.ConfigParser()
parser.read(self.config_path)
env_vars = dict(
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py b/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py
new file mode 100644
index 00000000..2d8217e9
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py
@@ -0,0 +1,92 @@
+"""HTTP Tester plugin for integration tests."""
+from __future__ import annotations
+
+import os
+
+from ....util import (
+ display,
+ generate_password,
+)
+
+from ....config import (
+ IntegrationConfig,
+)
+
+from ....containers import (
+ CleanupMode,
+ run_support_container,
+)
+
+from . import (
+ CloudEnvironment,
+ CloudEnvironmentConfig,
+ CloudProvider,
+)
+
+KRB5_PASSWORD_ENV = 'KRB5_PASSWORD'
+
+
+class HttptesterProvider(CloudProvider):
+ """HTTP Tester provider plugin. Sets up resources before delegation."""
+ def __init__(self, args): # type: (IntegrationConfig) -> None
+ super().__init__(args)
+
+ self.image = os.environ.get('ANSIBLE_HTTP_TEST_CONTAINER', 'quay.io/ansible/http-test-container:1.3.0')
+
+ self.uses_docker = True
+
+ def setup(self): # type: () -> None
+ """Setup resources before delegation."""
+ super().setup()
+
+ ports = [
+ 80,
+ 88,
+ 443,
+ 444,
+ 749,
+ ]
+
+ aliases = [
+ 'ansible.http.tests',
+ 'sni1.ansible.http.tests',
+ 'fail.ansible.http.tests',
+ 'self-signed.ansible.http.tests',
+ ]
+
+ descriptor = run_support_container(
+ self.args,
+ self.platform,
+ self.image,
+ 'http-test-container',
+ ports,
+ aliases=aliases,
+ allow_existing=True,
+ cleanup=CleanupMode.YES,
+ env={
+ KRB5_PASSWORD_ENV: generate_password(),
+ },
+ )
+
+ if not descriptor:
+ return
+
+ # Read the password from the container environment.
+ # This allows the tests to work when re-using an existing container.
+ # The password is marked as sensitive, since it may differ from the one we generated.
+ krb5_password = descriptor.details.container.env_dict()[KRB5_PASSWORD_ENV]
+ display.sensitive.add(krb5_password)
+
+ self._set_cloud_config(KRB5_PASSWORD_ENV, krb5_password)
+
+
+class HttptesterEnvironment(CloudEnvironment):
+ """HTTP Tester environment plugin. Updates integration test environment after delegation."""
+ def get_environment_config(self): # type: () -> CloudEnvironmentConfig
+ """Return environment configuration for use in the test environment after delegation."""
+ return CloudEnvironmentConfig(
+ env_vars=dict(
+ HTTPTESTER='1', # backwards compatibility for tests intended to work with or without HTTP Tester
+ KRB5_PASSWORD=self._get_cloud_config(KRB5_PASSWORD_ENV),
+ )
+ )
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py b/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py
new file mode 100644
index 00000000..4c695fc6
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py
@@ -0,0 +1,97 @@
+"""NIOS plugin for integration tests."""
+from __future__ import annotations
+
+import os
+
+from ....config import (
+ IntegrationConfig,
+)
+
+from ....containers import (
+ CleanupMode,
+ run_support_container,
+)
+
+from . import (
+ CloudEnvironment,
+ CloudEnvironmentConfig,
+ CloudProvider,
+)
+
+
+class NiosProvider(CloudProvider):
+ """Nios plugin. Sets up NIOS mock server for tests."""
+ DOCKER_SIMULATOR_NAME = 'nios-simulator'
+
+ # Default image to run the nios simulator.
+ #
+ # The simulator must be pinned to a specific version
+ # to guarantee CI passes with the version used.
+ #
+ # It's source source itself resides at:
+ # https://github.com/ansible/nios-test-container
+ DOCKER_IMAGE = 'quay.io/ansible/nios-test-container:1.3.0'
+
+ def __init__(self, args): # type: (IntegrationConfig) -> None
+ super().__init__(args)
+
+ self.__container_from_env = os.environ.get('ANSIBLE_NIOSSIM_CONTAINER')
+ """
+ Overrides target container, might be used for development.
+
+ Use ANSIBLE_NIOSSIM_CONTAINER=whatever_you_want if you want
+ to use other image. Omit/empty otherwise.
+ """
+
+ self.image = self.__container_from_env or self.DOCKER_IMAGE
+
+ self.uses_docker = True
+
+ def setup(self): # type: () -> None
+ """Setup cloud resource before delegation and reg cleanup callback."""
+ super().setup()
+
+ if self._use_static_config():
+ self._setup_static()
+ else:
+ self._setup_dynamic()
+
+ def _setup_dynamic(self): # type: () -> None
+ """Spawn a NIOS simulator within docker container."""
+ nios_port = 443
+
+ ports = [
+ nios_port,
+ ]
+
+ run_support_container(
+ self.args,
+ self.platform,
+ self.image,
+ self.DOCKER_SIMULATOR_NAME,
+ ports,
+ allow_existing=True,
+ cleanup=CleanupMode.YES,
+ )
+
+ self._set_cloud_config('NIOS_HOST', self.DOCKER_SIMULATOR_NAME)
+
+ def _setup_static(self): # type: () -> None
+ raise NotImplementedError()
+
+
+class NiosEnvironment(CloudEnvironment):
+ """NIOS environment plugin. Updates integration test environment after delegation."""
+ def get_environment_config(self): # type: () -> CloudEnvironmentConfig
+ """Return environment configuration for use in the test environment after delegation."""
+ ansible_vars = dict(
+ nios_provider=dict(
+ host=self._get_cloud_config('NIOS_HOST'),
+ username='admin',
+ password='infoblox',
+ ),
+ )
+
+ return CloudEnvironmentConfig(
+ ansible_vars=ansible_vars,
+ )
diff --git a/test/lib/ansible_test/_internal/cloud/opennebula.py b/test/lib/ansible_test/_internal/commands/integration/cloud/opennebula.py
index 559093e3..3019f310 100644
--- a/test/lib/ansible_test/_internal/cloud/opennebula.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/opennebula.py
@@ -1,33 +1,31 @@
"""OpenNebula plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+
+import configparser
+
+from ....util import (
+ display,
+)
from . import (
- CloudProvider,
CloudEnvironment,
CloudEnvironmentConfig,
-)
-
-from ..util import (
- display,
- ConfigParser,
+ CloudProvider,
)
class OpenNebulaCloudProvider(CloudProvider):
"""Checks if a configuration file has been passed or fixtures are going to be used for testing"""
-
- def filter(self, targets, exclude):
- """ no need to filter modules, they can either run from config file or from fixtures"""
-
- def setup(self):
+ def setup(self): # type: () -> None
"""Setup the cloud resource before delegation and register a cleanup callback."""
- super(OpenNebulaCloudProvider, self).setup()
+ super().setup()
if not self._use_static_config():
self._setup_dynamic()
- def _setup_dynamic(self):
+ self.uses_config = True
+
+ def _setup_dynamic(self): # type: () -> None
display.info('No config file provided, will run test from fixtures')
config = self._read_config_template()
@@ -43,14 +41,10 @@ class OpenNebulaCloudProvider(CloudProvider):
class OpenNebulaCloudEnvironment(CloudEnvironment):
- """
- Updates integration test environment after delegation. Will setup the config file as parameter.
- """
- def get_environment_config(self):
- """
- :rtype: CloudEnvironmentConfig
- """
- parser = ConfigParser()
+ """Updates integration test environment after delegation. Will setup the config file as parameter."""
+ def get_environment_config(self): # type: () -> CloudEnvironmentConfig
+ """Return environment configuration for use in the test environment after delegation."""
+ parser = configparser.ConfigParser()
parser.read(self.config_path)
ansible_vars = dict(
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py b/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py
new file mode 100644
index 00000000..c30785af
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py
@@ -0,0 +1,114 @@
+"""OpenShift plugin for integration tests."""
+from __future__ import annotations
+
+import re
+
+from ....io import (
+ read_text_file,
+)
+
+from ....util import (
+ display,
+)
+
+from ....config import (
+ IntegrationConfig,
+)
+
+from ....containers import (
+ CleanupMode,
+ run_support_container,
+ wait_for_file,
+)
+
+from . import (
+ CloudEnvironment,
+ CloudEnvironmentConfig,
+ CloudProvider,
+)
+
+
+class OpenShiftCloudProvider(CloudProvider):
+ """OpenShift cloud provider plugin. Sets up cloud resources before delegation."""
+ DOCKER_CONTAINER_NAME = 'openshift-origin'
+
+ def __init__(self, args): # type: (IntegrationConfig) -> None
+ super().__init__(args, config_extension='.kubeconfig')
+
+ # The image must be pinned to a specific version to guarantee CI passes with the version used.
+ self.image = 'openshift/origin:v3.9.0'
+
+ self.uses_docker = True
+ self.uses_config = True
+
+ def setup(self): # type: () -> None
+ """Setup the cloud resource before delegation and register a cleanup callback."""
+ super().setup()
+
+ if self._use_static_config():
+ self._setup_static()
+ else:
+ self._setup_dynamic()
+
+ def _setup_static(self): # type: () -> None
+ """Configure OpenShift tests for use with static configuration."""
+ config = read_text_file(self.config_static_path)
+
+ match = re.search(r'^ *server: (?P<server>.*)$', config, flags=re.MULTILINE)
+
+ if not match:
+ display.warning('Could not find OpenShift endpoint in kubeconfig.')
+
+ def _setup_dynamic(self): # type: () -> None
+ """Create a OpenShift container using docker."""
+ port = 8443
+
+ ports = [
+ port,
+ ]
+
+ cmd = ['start', 'master', '--listen', 'https://0.0.0.0:%d' % port]
+
+ descriptor = run_support_container(
+ self.args,
+ self.platform,
+ self.image,
+ self.DOCKER_CONTAINER_NAME,
+ ports,
+ allow_existing=True,
+ cleanup=CleanupMode.YES,
+ cmd=cmd,
+ )
+
+ if not descriptor:
+ return
+
+ if self.args.explain:
+ config = '# Unknown'
+ else:
+ config = self._get_config(self.DOCKER_CONTAINER_NAME, 'https://%s:%s/' % (self.DOCKER_CONTAINER_NAME, port))
+
+ self._write_config(config)
+
+ def _get_config(self, container_name, server): # type: (str, str) -> str
+ """Get OpenShift config from container."""
+ stdout = wait_for_file(self.args, container_name, '/var/lib/origin/openshift.local.config/master/admin.kubeconfig', sleep=10, tries=30)
+
+ config = stdout
+ config = re.sub(r'^( *)certificate-authority-data: .*$', r'\1insecure-skip-tls-verify: true', config, flags=re.MULTILINE)
+ config = re.sub(r'^( *)server: .*$', r'\1server: %s' % server, config, flags=re.MULTILINE)
+
+ return config
+
+
+class OpenShiftCloudEnvironment(CloudEnvironment):
+ """OpenShift cloud environment plugin. Updates integration test environment after delegation."""
+ def get_environment_config(self): # type: () -> CloudEnvironmentConfig
+ """Return environment configuration for use in the test environment after delegation."""
+ env_vars = dict(
+ K8S_AUTH_KUBECONFIG=self.config_path,
+ )
+
+ return CloudEnvironmentConfig(
+ env_vars=env_vars,
+ )
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/scaleway.py b/test/lib/ansible_test/_internal/commands/integration/cloud/scaleway.py
new file mode 100644
index 00000000..1ef158b2
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/scaleway.py
@@ -0,0 +1,56 @@
+"""Scaleway plugin for integration tests."""
+from __future__ import annotations
+
+import configparser
+
+from ....util import (
+ display,
+)
+
+from ....config import (
+ IntegrationConfig,
+)
+
+from . import (
+ CloudEnvironment,
+ CloudEnvironmentConfig,
+ CloudProvider,
+)
+
+
+class ScalewayCloudProvider(CloudProvider):
+ """Checks if a configuration file has been passed or fixtures are going to be used for testing"""
+ def __init__(self, args): # type: (IntegrationConfig) -> None
+ super().__init__(args)
+
+ self.uses_config = True
+
+ def setup(self): # type: () -> None
+ """Setup the cloud resource before delegation and register a cleanup callback."""
+ super().setup()
+
+ self._use_static_config()
+
+
+class ScalewayCloudEnvironment(CloudEnvironment):
+ """Updates integration test environment after delegation. Will setup the config file as parameter."""
+ def get_environment_config(self): # type: () -> CloudEnvironmentConfig
+ """Return environment configuration for use in the test environment after delegation."""
+ parser = configparser.ConfigParser()
+ parser.read(self.config_path)
+
+ env_vars = dict(
+ SCW_API_KEY=parser.get('default', 'key'),
+ SCW_ORG=parser.get('default', 'org')
+ )
+
+ display.sensitive.add(env_vars['SCW_API_KEY'])
+
+ ansible_vars = dict(
+ scw_org=parser.get('default', 'org'),
+ )
+
+ return CloudEnvironmentConfig(
+ env_vars=env_vars,
+ ansible_vars=ansible_vars,
+ )
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py b/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py
new file mode 100644
index 00000000..fb69b9b2
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py
@@ -0,0 +1,138 @@
+"""VMware vCenter plugin for integration tests."""
+from __future__ import annotations
+
+import configparser
+import os
+
+from ....util import (
+ ApplicationError,
+ display,
+)
+
+from ....config import (
+ IntegrationConfig,
+)
+
+from ....containers import (
+ CleanupMode,
+ run_support_container,
+)
+
+from . import (
+ CloudEnvironment,
+ CloudEnvironmentConfig,
+ CloudProvider,
+)
+
+
+class VcenterProvider(CloudProvider):
+ """VMware vcenter/esx plugin. Sets up cloud resources for tests."""
+ DOCKER_SIMULATOR_NAME = 'vcenter-simulator'
+
+ def __init__(self, args): # type: (IntegrationConfig) -> None
+ super().__init__(args)
+
+ # The simulator must be pinned to a specific version to guarantee CI passes with the version used.
+ if os.environ.get('ANSIBLE_VCSIM_CONTAINER'):
+ self.image = os.environ.get('ANSIBLE_VCSIM_CONTAINER')
+ else:
+ self.image = 'quay.io/ansible/vcenter-test-container:1.7.0'
+
+ # VMware tests can be run on govcsim or BYO with a static config file.
+ # The simulator is the default if no config is provided.
+ self.vmware_test_platform = os.environ.get('VMWARE_TEST_PLATFORM', 'govcsim')
+
+ if self.vmware_test_platform == 'govcsim':
+ self.uses_docker = True
+ self.uses_config = False
+ elif self.vmware_test_platform == 'static':
+ self.uses_docker = False
+ self.uses_config = True
+
+ def setup(self): # type: () -> None
+ """Setup the cloud resource before delegation and register a cleanup callback."""
+ super().setup()
+
+ self._set_cloud_config('vmware_test_platform', self.vmware_test_platform)
+
+ if self.vmware_test_platform == 'govcsim':
+ self._setup_dynamic_simulator()
+ self.managed = True
+ elif self.vmware_test_platform == 'static':
+ self._use_static_config()
+ self._setup_static()
+ else:
+ raise ApplicationError('Unknown vmware_test_platform: %s' % self.vmware_test_platform)
+
+ def _setup_dynamic_simulator(self): # type: () -> None
+ """Create a vcenter simulator using docker."""
+ ports = [
+ 443,
+ 8080,
+ 8989,
+ 5000, # control port for flask app in simulator
+ ]
+
+ run_support_container(
+ self.args,
+ self.platform,
+ self.image,
+ self.DOCKER_SIMULATOR_NAME,
+ ports,
+ allow_existing=True,
+ cleanup=CleanupMode.YES,
+ )
+
+ self._set_cloud_config('vcenter_hostname', self.DOCKER_SIMULATOR_NAME)
+
+ def _setup_static(self): # type: () -> None
+ if not os.path.exists(self.config_static_path):
+ raise ApplicationError('Configuration file does not exist: %s' % self.config_static_path)
+
+
+class VcenterEnvironment(CloudEnvironment):
+ """VMware vcenter/esx environment plugin. Updates integration test environment after delegation."""
+ def get_environment_config(self): # type: () -> CloudEnvironmentConfig
+ """Return environment configuration for use in the test environment after delegation."""
+ try:
+ # We may be in a container, so we cannot just reach VMWARE_TEST_PLATFORM,
+ # We do a try/except instead
+ parser = configparser.ConfigParser()
+ parser.read(self.config_path) # static
+
+ env_vars = dict()
+ ansible_vars = dict(
+ resource_prefix=self.resource_prefix,
+ )
+ ansible_vars.update(dict(parser.items('DEFAULT', raw=True)))
+ except KeyError: # govcsim
+ env_vars = dict(
+ VCENTER_HOSTNAME=self._get_cloud_config('vcenter_hostname'),
+ VCENTER_USERNAME='user',
+ VCENTER_PASSWORD='pass',
+ )
+
+ ansible_vars = dict(
+ vcsim=self._get_cloud_config('vcenter_hostname'),
+ vcenter_hostname=self._get_cloud_config('vcenter_hostname'),
+ vcenter_username='user',
+ vcenter_password='pass',
+ )
+
+ for key, value in ansible_vars.items():
+ if key.endswith('_password'):
+ display.sensitive.add(value)
+
+ return CloudEnvironmentConfig(
+ env_vars=env_vars,
+ ansible_vars=ansible_vars,
+ module_defaults={
+ 'group/vmware': {
+ 'hostname': ansible_vars['vcenter_hostname'],
+ 'username': ansible_vars['vcenter_username'],
+ 'password': ansible_vars['vcenter_password'],
+ 'port': ansible_vars.get('vcenter_port', '443'),
+ 'validate_certs': ansible_vars.get('vmware_validate_certs', 'no'),
+ },
+ },
+ )
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/vultr.py b/test/lib/ansible_test/_internal/commands/integration/cloud/vultr.py
new file mode 100644
index 00000000..2e8b1b3f
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/vultr.py
@@ -0,0 +1,55 @@
+"""Vultr plugin for integration tests."""
+from __future__ import annotations
+
+import configparser
+
+from ....util import (
+ display,
+)
+
+from ....config import (
+ IntegrationConfig,
+)
+
+from . import (
+ CloudEnvironment,
+ CloudEnvironmentConfig,
+ CloudProvider,
+)
+
+
+class VultrCloudProvider(CloudProvider):
+ """Checks if a configuration file has been passed or fixtures are going to be used for testing"""
+ def __init__(self, args): # type: (IntegrationConfig) -> None
+ super().__init__(args)
+
+ self.uses_config = True
+
+ def setup(self): # type: () -> None
+ """Setup the cloud resource before delegation and register a cleanup callback."""
+ super().setup()
+
+ self._use_static_config()
+
+
+class VultrCloudEnvironment(CloudEnvironment):
+ """Updates integration test environment after delegation. Will setup the config file as parameter."""
+ def get_environment_config(self): # type: () -> CloudEnvironmentConfig
+ """Return environment configuration for use in the test environment after delegation."""
+ parser = configparser.ConfigParser()
+ parser.read(self.config_path)
+
+ env_vars = dict(
+ VULTR_API_KEY=parser.get('default', 'key'),
+ )
+
+ display.sensitive.add(env_vars['VULTR_API_KEY'])
+
+ ansible_vars = dict(
+ vultr_resource_prefix=self.resource_prefix,
+ )
+
+ return CloudEnvironmentConfig(
+ env_vars=env_vars,
+ ansible_vars=ansible_vars,
+ )
diff --git a/test/lib/ansible_test/_internal/commands/integration/coverage.py b/test/lib/ansible_test/_internal/commands/integration/coverage.py
new file mode 100644
index 00000000..c36b4403
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/integration/coverage.py
@@ -0,0 +1,416 @@
+"""Code coverage support for integration tests."""
+from __future__ import annotations
+
+import abc
+import os
+import shutil
+import tempfile
+import typing as t
+import zipfile
+
+from ...io import (
+ write_text_file,
+)
+
+from ...ansible_util import (
+ run_playbook,
+)
+
+from ...config import (
+ IntegrationConfig,
+)
+
+from ...util import (
+ COVERAGE_CONFIG_NAME,
+ MODE_DIRECTORY,
+ MODE_DIRECTORY_WRITE,
+ MODE_FILE,
+ SubprocessError,
+ cache,
+ display,
+ generate_name,
+ get_generic_type,
+ get_type_map,
+ remove_tree,
+ sanitize_host_name,
+)
+
+from ...util_common import (
+ ResultType,
+)
+
+from ...coverage_util import (
+ generate_coverage_config,
+ get_coverage_platform,
+)
+
+from ...host_configs import (
+ HostConfig,
+ PosixConfig,
+ WindowsConfig,
+ WindowsInventoryConfig,
+ WindowsRemoteConfig,
+)
+
+from ...data import (
+ data_context,
+)
+
+from ...host_profiles import (
+ ControllerProfile,
+ HostProfile,
+ PosixProfile,
+ SshTargetHostProfile,
+)
+
+from ...provisioning import (
+ HostState,
+)
+
+from ...connections import (
+ LocalConnection,
+)
+
+from ...inventory import (
+ create_windows_inventory,
+ create_posix_inventory,
+)
+
+THostConfig = t.TypeVar('THostConfig', bound=HostConfig)
+
+
+class CoverageHandler(t.Generic[THostConfig], metaclass=abc.ABCMeta):
+ """Base class for configuring hosts for integration test code coverage."""
+ def __init__(self, args, host_state, inventory_path): # type: (IntegrationConfig, HostState, str) -> None
+ self.args = args
+ self.host_state = host_state
+ self.inventory_path = inventory_path
+ self.profiles = self.get_profiles()
+
+ def get_profiles(self): # type: () -> t.List[HostProfile]
+ """Return a list of profiles relevant for this handler."""
+ profile_type = get_generic_type(type(self), HostConfig)
+ profiles = [profile for profile in self.host_state.target_profiles if isinstance(profile.config, profile_type)]
+
+ return profiles
+
+ @property
+ @abc.abstractmethod
+ def is_active(self): # type: () -> bool
+ """True if the handler should be used, otherwise False."""
+
+ @abc.abstractmethod
+ def setup(self): # type: () -> None
+ """Perform setup for code coverage."""
+
+ @abc.abstractmethod
+ def teardown(self): # type: () -> None
+ """Perform teardown for code coverage."""
+
+ @abc.abstractmethod
+ def create_inventory(self): # type: () -> None
+ """Create inventory, if needed."""
+
+ @abc.abstractmethod
+ def get_environment(self, target_name, aliases): # type: (str, t.Tuple[str, ...]) -> t.Dict[str, str]
+ """Return a dictionary of environment variables for running tests with code coverage."""
+
+ def run_playbook(self, playbook, variables): # type: (str, t.Dict[str, str]) -> None
+ """Run the specified playbook using the current inventory."""
+ self.create_inventory()
+ run_playbook(self.args, self.inventory_path, playbook, variables)
+
+
+class PosixCoverageHandler(CoverageHandler[PosixConfig]):
+ """Configure integration test code coverage for POSIX hosts."""
+ def __init__(self, args, host_state, inventory_path): # type: (IntegrationConfig, HostState, str) -> None
+ super().__init__(args, host_state, inventory_path)
+
+ # Common temporary directory used on all POSIX hosts that will be created world writeable.
+ self.common_temp_path = f'/tmp/ansible-test-{generate_name()}'
+
+ def get_profiles(self): # type: () -> t.List[HostProfile]
+ """Return a list of profiles relevant for this handler."""
+ profiles = super().get_profiles()
+ profiles = [profile for profile in profiles if not isinstance(profile, ControllerProfile) or
+ profile.python.path != self.host_state.controller_profile.python.path]
+
+ return profiles
+
+ @property
+ def is_active(self): # type: () -> bool
+ """True if the handler should be used, otherwise False."""
+ return True
+
+ @property
+ def target_profile(self): # type: () -> t.Optional[PosixProfile]
+ """The POSIX target profile, if it uses a different Python interpreter than the controller, otherwise None."""
+ return t.cast(PosixProfile, self.profiles[0]) if self.profiles else None
+
+ def setup(self): # type: () -> None
+ """Perform setup for code coverage."""
+ self.setup_controller()
+ self.setup_target()
+
+ def teardown(self): # type: () -> None
+ """Perform teardown for code coverage."""
+ self.teardown_controller()
+ self.teardown_target()
+
+ def setup_controller(self):
+ """Perform setup for code coverage on the controller."""
+ coverage_config_path = os.path.join(self.common_temp_path, COVERAGE_CONFIG_NAME)
+ coverage_output_path = os.path.join(self.common_temp_path, ResultType.COVERAGE.name)
+
+ coverage_config = generate_coverage_config(self.args)
+
+ write_text_file(coverage_config_path, coverage_config, create_directories=True)
+
+ os.chmod(coverage_config_path, MODE_FILE)
+ os.mkdir(coverage_output_path)
+ os.chmod(coverage_output_path, MODE_DIRECTORY_WRITE)
+
+ def setup_target(self):
+ """Perform setup for code coverage on the target."""
+ if not self.target_profile:
+ return
+
+ if isinstance(self.target_profile, ControllerProfile):
+ return
+
+ self.run_playbook('posix_coverage_setup.yml', self.get_playbook_variables())
+
+ def teardown_controller(self): # type: () -> None
+ """Perform teardown for code coverage on the controller."""
+ coverage_temp_path = os.path.join(self.common_temp_path, ResultType.COVERAGE.name)
+ platform = get_coverage_platform(self.args.controller)
+
+ for filename in os.listdir(coverage_temp_path):
+ shutil.copyfile(os.path.join(coverage_temp_path, filename), os.path.join(ResultType.COVERAGE.path, update_coverage_filename(filename, platform)))
+
+ remove_tree(self.common_temp_path)
+
+ def teardown_target(self): # type: () -> None
+ """Perform teardown for code coverage on the target."""
+ if not self.target_profile:
+ return
+
+ if isinstance(self.target_profile, ControllerProfile):
+ return
+
+ profile = t.cast(SshTargetHostProfile, self.target_profile)
+ platform = get_coverage_platform(profile.config)
+ con = profile.get_controller_target_connections()[0]
+
+ with tempfile.NamedTemporaryFile(prefix='ansible-test-coverage-', suffix='.tgz') as coverage_tgz:
+ try:
+ con.create_archive(chdir=self.common_temp_path, name=ResultType.COVERAGE.name, dst=coverage_tgz)
+ except SubprocessError as ex:
+ display.warning(f'Failed to download coverage results: {ex}')
+ else:
+ coverage_tgz.seek(0)
+
+ with tempfile.TemporaryDirectory() as temp_dir:
+ local_con = LocalConnection(self.args)
+ local_con.extract_archive(chdir=temp_dir, src=coverage_tgz)
+
+ base_dir = os.path.join(temp_dir, ResultType.COVERAGE.name)
+
+ for filename in os.listdir(base_dir):
+ shutil.copyfile(os.path.join(base_dir, filename), os.path.join(ResultType.COVERAGE.path, update_coverage_filename(filename, platform)))
+
+ self.run_playbook('posix_coverage_teardown.yml', self.get_playbook_variables())
+
+ def get_environment(self, target_name, aliases): # type: (str, t.Tuple[str, ...]) -> t.Dict[str, str]
+ """Return a dictionary of environment variables for running tests with code coverage."""
+
+ # Enable code coverage collection on Ansible modules (both local and remote).
+ # Used by the AnsiballZ wrapper generator in lib/ansible/executor/module_common.py to support code coverage.
+ config_file = os.path.join(self.common_temp_path, COVERAGE_CONFIG_NAME)
+
+ # Include the command, target and platform marker so the remote host can create a filename with that info.
+ # The generated AnsiballZ wrapper is responsible for adding '=python-{X.Y}=coverage.{hostname}.{pid}.{id}'
+ coverage_file = os.path.join(self.common_temp_path, ResultType.COVERAGE.name, '='.join((self.args.command, target_name, 'platform')))
+
+ if self.args.coverage_check:
+ # cause the 'coverage' module to be found, but not imported or enabled
+ coverage_file = ''
+
+ variables = dict(
+ _ANSIBLE_COVERAGE_CONFIG=config_file,
+ _ANSIBLE_COVERAGE_OUTPUT=coverage_file,
+ )
+
+ return variables
+
+ def create_inventory(self): # type: () -> None
+ """Create inventory."""
+ create_posix_inventory(self.args, self.inventory_path, self.host_state.target_profiles)
+
+ def get_playbook_variables(self): # type: () -> t.Dict[str, str]
+ """Return a dictionary of variables for setup and teardown of POSIX coverage."""
+ return dict(
+ common_temp_dir=self.common_temp_path,
+ coverage_config=generate_coverage_config(self.args),
+ coverage_config_path=os.path.join(self.common_temp_path, COVERAGE_CONFIG_NAME),
+ coverage_output_path=os.path.join(self.common_temp_path, ResultType.COVERAGE.name),
+ mode_directory=f'{MODE_DIRECTORY:04o}',
+ mode_directory_write=f'{MODE_DIRECTORY_WRITE:04o}',
+ mode_file=f'{MODE_FILE:04o}',
+ )
+
+
+class WindowsCoverageHandler(CoverageHandler[WindowsConfig]):
+ """Configure integration test code coverage for Windows hosts."""
+ def __init__(self, args, host_state, inventory_path): # type: (IntegrationConfig, HostState, str) -> None
+ super().__init__(args, host_state, inventory_path)
+
+ # Common temporary directory used on all Windows hosts that will be created writable by everyone.
+ self.remote_temp_path = f'C:\\ansible_test_coverage_{generate_name()}'
+
+ @property
+ def is_active(self): # type: () -> bool
+ """True if the handler should be used, otherwise False."""
+ return self.profiles and not self.args.coverage_check
+
+ def setup(self): # type: () -> None
+ """Perform setup for code coverage."""
+ self.run_playbook('windows_coverage_setup.yml', self.get_playbook_variables())
+
+ def teardown(self): # type: () -> None
+ """Perform teardown for code coverage."""
+ with tempfile.TemporaryDirectory() as local_temp_path:
+ variables = self.get_playbook_variables()
+ variables.update(
+ local_temp_path=local_temp_path,
+ )
+
+ self.run_playbook('windows_coverage_teardown.yml', variables)
+
+ for filename in os.listdir(local_temp_path):
+ if all(isinstance(profile.config, WindowsRemoteConfig) for profile in self.profiles):
+ prefix = 'remote'
+ elif all(isinstance(profile.config, WindowsInventoryConfig) for profile in self.profiles):
+ prefix = 'inventory'
+ else:
+ raise NotImplementedError()
+
+ platform = f'{prefix}-{sanitize_host_name(os.path.splitext(filename)[0])}'
+
+ with zipfile.ZipFile(os.path.join(local_temp_path, filename)) as coverage_zip:
+ for item in coverage_zip.infolist():
+ if item.is_dir():
+ raise Exception(f'Unexpected directory in zip file: {item.filename}')
+
+ item.filename = update_coverage_filename(item.filename, platform)
+
+ coverage_zip.extract(item, ResultType.COVERAGE.path)
+
+ def get_environment(self, target_name, aliases): # type: (str, t.Tuple[str, ...]) -> t.Dict[str, str]
+ """Return a dictionary of environment variables for running tests with code coverage."""
+
+ # Include the command, target and platform marker so the remote host can create a filename with that info.
+ # The remote is responsible for adding '={language-version}=coverage.{hostname}.{pid}.{id}'
+ coverage_name = '='.join((self.args.command, target_name, 'platform'))
+
+ variables = dict(
+ _ANSIBLE_COVERAGE_REMOTE_OUTPUT=os.path.join(self.remote_temp_path, coverage_name),
+ _ANSIBLE_COVERAGE_REMOTE_PATH_FILTER=os.path.join(data_context().content.root, '*'),
+ )
+
+ return variables
+
+ def create_inventory(self): # type: () -> None
+ """Create inventory."""
+ create_windows_inventory(self.args, self.inventory_path, self.host_state.target_profiles)
+
+ def get_playbook_variables(self): # type: () -> t.Dict[str, str]
+ """Return a dictionary of variables for setup and teardown of Windows coverage."""
+ return dict(
+ remote_temp_path=self.remote_temp_path,
+ )
+
+
+class CoverageManager:
+ """Manager for code coverage configuration and state."""
+ def __init__(self, args, host_state, inventory_path): # type: (IntegrationConfig, HostState, str) -> None
+ self.args = args
+ self.host_state = host_state
+ self.inventory_path = inventory_path
+
+ if self.args.coverage:
+ handler_types = set(get_handler_type(type(profile.config)) for profile in host_state.profiles)
+ handler_types.discard(None)
+ else:
+ handler_types = set()
+
+ handlers = [handler_type(args=args, host_state=host_state, inventory_path=inventory_path) for handler_type in handler_types]
+
+ self.handlers = [handler for handler in handlers if handler.is_active]
+
+ def setup(self): # type: () -> None
+ """Perform setup for code coverage."""
+ if not self.args.coverage:
+ return
+
+ for handler in self.handlers:
+ handler.setup()
+
+ def teardown(self): # type: () -> None
+ """Perform teardown for code coverage."""
+ if not self.args.coverage:
+ return
+
+ for handler in self.handlers:
+ handler.teardown()
+
+ def get_environment(self, target_name, aliases): # type: (str, t.Tuple[str, ...]) -> t.Dict[str, str]
+ """Return a dictionary of environment variables for running tests with code coverage."""
+ if not self.args.coverage or 'non_local/' in aliases:
+ return {}
+
+ env = {}
+
+ for handler in self.handlers:
+ env.update(handler.get_environment(target_name, aliases))
+
+ return env
+
+
+@cache
+def get_config_handler_type_map(): # type: () -> t.Dict[t.Type[HostConfig], t.Type[CoverageHandler]]
+ """Create and return a mapping of HostConfig types to CoverageHandler types."""
+ return get_type_map(CoverageHandler, HostConfig)
+
+
+def get_handler_type(config_type): # type: (t.Type[HostConfig]) -> t.Optional[t.Type[CoverageHandler]]
+ """Return the coverage handler type associated with the given host config type if found, otherwise return None."""
+ queue = [config_type]
+ type_map = get_config_handler_type_map()
+
+ while queue:
+ config_type = queue.pop(0)
+ handler_type = type_map.get(config_type)
+
+ if handler_type:
+ return handler_type
+
+ queue.extend(config_type.__bases__)
+
+ return None
+
+
+def update_coverage_filename(original_filename, platform): # type: (str, str) -> str
+ """Validate the given filename and insert the specified platform, then return the result."""
+ parts = original_filename.split('=')
+
+ if original_filename != os.path.basename(original_filename) or len(parts) != 5 or parts[2] != 'platform':
+ raise Exception(f'Unexpected coverage filename: {original_filename}')
+
+ parts[2] = platform
+
+ updated_filename = '='.join(parts)
+
+ display.info(f'Coverage file for platform "{platform}": {original_filename} -> {updated_filename}', verbosity=3)
+
+ return updated_filename
diff --git a/test/lib/ansible_test/_internal/commands/integration/filters.py b/test/lib/ansible_test/_internal/commands/integration/filters.py
new file mode 100644
index 00000000..9854de57
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/integration/filters.py
@@ -0,0 +1,273 @@
+"""Logic for filtering out integration test targets which are unsupported for the currently provided arguments and available hosts."""
+from __future__ import annotations
+
+import abc
+import typing as t
+
+from ...config import (
+ IntegrationConfig,
+)
+
+from ...util import (
+ cache,
+ display,
+ get_type_map,
+)
+
+from ...target import (
+ IntegrationTarget,
+)
+
+from ...host_configs import (
+ ControllerConfig,
+ DockerConfig,
+ FallbackReason,
+ HostConfig,
+ NetworkInventoryConfig,
+ NetworkRemoteConfig,
+ OriginConfig,
+ PosixConfig,
+ PosixRemoteConfig,
+ PosixSshConfig,
+ RemoteConfig,
+ WindowsInventoryConfig,
+ WindowsRemoteConfig,
+)
+
+from ...host_profiles import (
+ HostProfile,
+)
+
+THostConfig = t.TypeVar('THostConfig', bound=HostConfig)
+TPosixConfig = t.TypeVar('TPosixConfig', bound=PosixConfig)
+TRemoteConfig = t.TypeVar('TRemoteConfig', bound=RemoteConfig)
+THostProfile = t.TypeVar('THostProfile', bound=HostProfile)
+
+
+class TargetFilter(t.Generic[THostConfig], metaclass=abc.ABCMeta):
+ """Base class for target filters."""
+ def __init__(self, args, configs, controller): # type: (IntegrationConfig, t.List[THostConfig], bool) -> None
+ self.args = args
+ self.configs = configs
+ self.controller = controller
+ self.host_type = 'controller' if controller else 'target'
+
+ # values which are not host specific
+ self.include_targets = args.include
+ self.allow_root = args.allow_root
+ self.allow_destructive = args.allow_destructive
+
+ @property
+ def config(self): # type: () -> THostConfig
+ """The configuration to filter. Only valid when there is a single config."""
+ if len(self.configs) != 1:
+ raise Exception()
+
+ return self.configs[0]
+
+ def skip(
+ self,
+ skip, # type: str
+ reason, # type: str
+ targets, # type: t.List[IntegrationTarget]
+ exclude, # type: t.Set[str]
+ override=None, # type: t.Optional[t.List[str]]
+ ): # type: (...) -> None
+ """Apply the specified skip rule to the given targets by updating the provided exclude list."""
+ if skip.startswith('skip/'):
+ skipped = [target.name for target in targets if skip in target.skips and (not override or target.name not in override)]
+ else:
+ skipped = [target.name for target in targets if f'{skip}/' in target.aliases and (not override or target.name not in override)]
+
+ self.apply_skip(f'"{skip}"', reason, skipped, exclude)
+
+ def apply_skip(self, marked, reason, skipped, exclude): # type: (str, str, t.List[str], t.Set[str]) -> None
+ """Apply the provided skips to the given exclude list."""
+ if not skipped:
+ return
+
+ exclude.update(skipped)
+ display.warning(f'Excluding {self.host_type} tests marked {marked} {reason}: {", ".join(skipped)}')
+
+ def filter_profiles(self, profiles, target): # type: (t.List[THostProfile], IntegrationTarget) -> t.List[THostProfile]
+ """Filter the list of profiles, returning only those which are not skipped for the given target."""
+ del target
+ return profiles
+
+ def filter_targets(self, targets, exclude): # type: (t.List[IntegrationTarget], t.Set[str]) -> None
+ """Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
+ if self.controller and self.args.host_settings.controller_fallback and targets:
+ affected_targets = [target.name for target in targets]
+ reason = self.args.host_settings.controller_fallback.reason
+
+ if reason == FallbackReason.ENVIRONMENT:
+ exclude.update(affected_targets)
+ display.warning(f'Excluding {self.host_type} tests since a fallback controller is in use: {", ".join(affected_targets)}')
+ elif reason == FallbackReason.PYTHON:
+ display.warning(f'Some {self.host_type} tests may be redundant since a fallback python is in use: {", ".join(affected_targets)}')
+
+ if not self.allow_destructive and not self.config.is_managed:
+ override_destructive = set(target for target in self.include_targets if target.startswith('destructive/'))
+ override = [target.name for target in targets if override_destructive & set(target.skips)]
+
+ self.skip('destructive', 'which require --allow-destructive or prefixing with "destructive/" to run on unmanaged hosts', targets, exclude, override)
+
+ if not self.args.allow_disabled:
+ override_disabled = set(target for target in self.args.include if target.startswith('disabled/'))
+ override = [target.name for target in targets if override_disabled & set(target.skips)]
+
+ self.skip('disabled', 'which require --allow-disabled or prefixing with "disabled/"', targets, exclude, override)
+
+ if not self.args.allow_unsupported:
+ override_unsupported = set(target for target in self.args.include if target.startswith('unsupported/'))
+ override = [target.name for target in targets if override_unsupported & set(target.skips)]
+
+ self.skip('unsupported', 'which require --allow-unsupported or prefixing with "unsupported/"', targets, exclude, override)
+
+ if not self.args.allow_unstable:
+ override_unstable = set(target for target in self.args.include if target.startswith('unstable/'))
+
+ if self.args.allow_unstable_changed:
+ override_unstable |= set(self.args.metadata.change_description.focused_targets or [])
+
+ override = [target.name for target in targets if override_unstable & set(target.skips)]
+
+ self.skip('unstable', 'which require --allow-unstable or prefixing with "unstable/"', targets, exclude, override)
+
+
+class PosixTargetFilter(TargetFilter[TPosixConfig]):
+ """Target filter for POSIX hosts."""
+ def filter_targets(self, targets, exclude): # type: (t.List[IntegrationTarget], t.Set[str]) -> None
+ """Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
+ super().filter_targets(targets, exclude)
+
+ if not self.allow_root and not self.config.have_root:
+ self.skip('needs/root', 'which require --allow-root or running as root', targets, exclude)
+
+ self.skip(f'skip/python{self.config.python.version}', f'which are not supported by Python {self.config.python.version}', targets, exclude)
+ self.skip(f'skip/python{self.config.python.major_version}', f'which are not supported by Python {self.config.python.major_version}', targets, exclude)
+
+
+class DockerTargetFilter(PosixTargetFilter[DockerConfig]):
+ """Target filter for docker hosts."""
+ def filter_targets(self, targets, exclude): # type: (t.List[IntegrationTarget], t.Set[str]) -> None
+ """Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
+ super().filter_targets(targets, exclude)
+
+ self.skip('skip/docker', 'which cannot run under docker', targets, exclude)
+
+ if not self.config.privileged:
+ self.skip('needs/privileged', 'which require --docker-privileged to run under docker', targets, exclude)
+
+
+class PosixSshTargetFilter(PosixTargetFilter[PosixSshConfig]):
+ """Target filter for POSIX SSH hosts."""
+
+
+class RemoteTargetFilter(TargetFilter[TRemoteConfig]):
+ """Target filter for remote Ansible Core CI managed hosts."""
+ def filter_profiles(self, profiles, target): # type: (t.List[THostProfile], IntegrationTarget) -> t.List[THostProfile]
+ """Filter the list of profiles, returning only those which are not skipped for the given target."""
+ profiles = super().filter_profiles(profiles, target)
+
+ skipped_profiles = [profile for profile in profiles if any(skip in target.skips for skip in get_remote_skip_aliases(profile.config))]
+
+ if skipped_profiles:
+ configs = [profile.config for profile in skipped_profiles] # type: t.List[TRemoteConfig]
+ display.warning(f'Excluding skipped hosts from inventory: {", ".join(config.name for config in configs)}')
+
+ profiles = [profile for profile in profiles if profile not in skipped_profiles]
+
+ return profiles
+
+ def filter_targets(self, targets, exclude): # type: (t.List[IntegrationTarget], t.Set[str]) -> None
+ """Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
+ super().filter_targets(targets, exclude)
+
+ if len(self.configs) > 1:
+ host_skips = {host.name: get_remote_skip_aliases(host) for host in self.configs}
+
+ # Skip only targets which skip all hosts.
+ # Targets that skip only some hosts will be handled during inventory generation.
+ skipped = [target.name for target in targets if all(any(skip in target.skips for skip in skips) for skips in host_skips.values())]
+
+ if skipped:
+ exclude.update(skipped)
+ display.warning(f'Excluding tests which do not support {", ".join(host_skips.keys())}: {", ".join(skipped)}')
+ else:
+ skips = get_remote_skip_aliases(self.config)
+
+ for skip, reason in skips.items():
+ self.skip(skip, reason, targets, exclude)
+
+
+class PosixRemoteTargetFilter(PosixTargetFilter[PosixRemoteConfig], RemoteTargetFilter[PosixRemoteConfig]):
+ """Target filter for POSIX remote hosts."""
+
+
+class WindowsRemoteTargetFilter(RemoteTargetFilter[WindowsRemoteConfig]):
+ """Target filter for remote Windows hosts."""
+
+
+class WindowsInventoryTargetFilter(TargetFilter[WindowsInventoryConfig]):
+ """Target filter for Windows inventory."""
+
+
+class NetworkRemoteTargetFilter(RemoteTargetFilter[NetworkRemoteConfig]):
+ """Target filter for remote network hosts."""
+
+
+class NetworkInventoryTargetFilter(TargetFilter[NetworkInventoryConfig]):
+ """Target filter for network inventory."""
+
+
+class OriginTargetFilter(TargetFilter[OriginConfig]):
+ """Target filter for localhost."""
+
+
+@cache
+def get_host_target_type_map(): # type: () -> t.Dict[t.Type[HostConfig], t.Type[TargetFilter]]
+ """Create and return a mapping of HostConfig types to TargetFilter types."""
+ return get_type_map(TargetFilter, HostConfig)
+
+
+def get_target_filter(args, configs, controller): # type: (IntegrationConfig, t.List[HostConfig], bool) -> TargetFilter
+ """Return an integration test target filter instance for the provided host configurations."""
+ target_type = type(configs[0])
+
+ if issubclass(target_type, ControllerConfig):
+ target_type = type(args.controller)
+ configs = [args.controller]
+
+ filter_type = get_host_target_type_map()[target_type]
+ filter_instance = filter_type(args, configs, controller)
+
+ return filter_instance
+
+
+def get_remote_skip_aliases(config): # type: (RemoteConfig) -> t.Dict[str, str]
+ """Return a dictionary of skip aliases and the reason why they apply."""
+ if isinstance(config, PosixRemoteConfig):
+ return get_platform_skip_aliases(config.platform, config.version, config.arch)
+
+ return get_platform_skip_aliases(config.platform, config.version, None)
+
+
+def get_platform_skip_aliases(platform, version, arch): # type: (str, str, t.Optional[str]) -> t.Dict[str, str]
+ """Return a dictionary of skip aliases and the reason why they apply."""
+ skips = {
+ f'skip/{platform}': platform,
+ f'skip/{platform}/{version}': f'{platform} {version}',
+ f'skip/{platform}{version}': f'{platform} {version}', # legacy syntax, use above format
+ }
+
+ if arch:
+ skips.update({
+ f'skip/{arch}': arch,
+ f'skip/{arch}/{platform}': f'{platform} on {arch}',
+ f'skip/{arch}/{platform}/{version}': f'{platform} {version} on {arch}',
+ })
+
+ skips = {alias: f'which are not supported by {description}' for alias, description in skips.items()}
+
+ return skips
diff --git a/test/lib/ansible_test/_internal/commands/integration/network.py b/test/lib/ansible_test/_internal/commands/integration/network.py
new file mode 100644
index 00000000..f9953144
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/integration/network.py
@@ -0,0 +1,73 @@
+"""Network integration testing."""
+from __future__ import annotations
+
+import os
+
+from ...util import (
+ ApplicationError,
+ ANSIBLE_TEST_CONFIG_ROOT,
+)
+
+from ...util_common import (
+ handle_layout_messages,
+)
+
+from ...target import (
+ walk_network_integration_targets,
+)
+
+from ...config import (
+ NetworkIntegrationConfig,
+)
+
+from . import (
+ command_integration_filter,
+ command_integration_filtered,
+ get_inventory_relative_path,
+ check_inventory,
+ delegate_inventory,
+)
+
+from ...data import (
+ data_context,
+)
+
+from ...host_configs import (
+ NetworkInventoryConfig,
+ NetworkRemoteConfig,
+)
+
+
+def command_network_integration(args): # type: (NetworkIntegrationConfig) -> None
+ """Entry point for the `network-integration` command."""
+ handle_layout_messages(data_context().content.integration_messages)
+
+ inventory_relative_path = get_inventory_relative_path(args)
+ template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template'
+
+ if issubclass(args.target_type, NetworkInventoryConfig):
+ inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path,
+ args.only_target(NetworkInventoryConfig).path or os.path.basename(inventory_relative_path))
+ else:
+ inventory_path = os.path.join(data_context().content.root, inventory_relative_path)
+
+ if args.no_temp_workdir:
+ # temporary solution to keep DCI tests working
+ inventory_exists = os.path.exists(inventory_path)
+ else:
+ inventory_exists = os.path.isfile(inventory_path)
+
+ if not args.explain and not issubclass(args.target_type, NetworkRemoteConfig) and not inventory_exists:
+ raise ApplicationError(
+ 'Inventory not found: %s\n'
+ 'Use --inventory to specify the inventory path.\n'
+ 'Use --platform to provision resources and generate an inventory file.\n'
+ 'See also inventory template: %s' % (inventory_path, template_path)
+ )
+
+ check_inventory(args, inventory_path)
+ delegate_inventory(args, inventory_path)
+
+ all_targets = tuple(walk_network_integration_targets(include_hidden=True))
+ host_state, internal_targets = command_integration_filter(args, all_targets)
+ command_integration_filtered(args, host_state, internal_targets, all_targets, inventory_path)
diff --git a/test/lib/ansible_test/_internal/commands/integration/posix.py b/test/lib/ansible_test/_internal/commands/integration/posix.py
new file mode 100644
index 00000000..be78359c
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/integration/posix.py
@@ -0,0 +1,48 @@
+"""POSIX integration testing."""
+from __future__ import annotations
+
+import os
+
+from ...util_common import (
+ handle_layout_messages,
+)
+
+from ...containers import (
+ create_container_hooks,
+ local_ssh,
+ root_ssh,
+)
+
+from ...target import (
+ walk_posix_integration_targets,
+)
+
+from ...config import (
+ PosixIntegrationConfig,
+)
+
+from . import (
+ command_integration_filter,
+ command_integration_filtered,
+ get_inventory_relative_path,
+)
+
+from ...data import (
+ data_context,
+)
+
+
+def command_posix_integration(args): # type: (PosixIntegrationConfig) -> None
+ """Entry point for the `integration` command."""
+ handle_layout_messages(data_context().content.integration_messages)
+
+ inventory_relative_path = get_inventory_relative_path(args)
+ inventory_path = os.path.join(data_context().content.root, inventory_relative_path)
+
+ all_targets = tuple(walk_posix_integration_targets(include_hidden=True))
+ host_state, internal_targets = command_integration_filter(args, all_targets)
+ control_connections = [local_ssh(args, host_state.controller_profile.python)]
+ managed_connections = [root_ssh(ssh) for ssh in host_state.get_controller_target_connections()]
+ pre_target, post_target = create_container_hooks(args, control_connections, managed_connections)
+
+ command_integration_filtered(args, host_state, internal_targets, all_targets, inventory_path, pre_target=pre_target, post_target=post_target)
diff --git a/test/lib/ansible_test/_internal/commands/integration/windows.py b/test/lib/ansible_test/_internal/commands/integration/windows.py
new file mode 100644
index 00000000..f6b44942
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/integration/windows.py
@@ -0,0 +1,77 @@
+"""Windows integration testing."""
+from __future__ import annotations
+
+import os
+
+from ...util import (
+ ApplicationError,
+ ANSIBLE_TEST_CONFIG_ROOT,
+)
+
+from ...util_common import (
+ handle_layout_messages,
+)
+
+from ...containers import (
+ create_container_hooks,
+ local_ssh,
+ root_ssh,
+)
+
+from ...target import (
+ walk_windows_integration_targets,
+)
+
+from ...config import (
+ WindowsIntegrationConfig,
+)
+
+from ...host_configs import (
+ WindowsInventoryConfig,
+ WindowsRemoteConfig,
+)
+
+from . import (
+ command_integration_filter,
+ command_integration_filtered,
+ get_inventory_relative_path,
+ check_inventory,
+ delegate_inventory,
+)
+
+from ...data import (
+ data_context,
+)
+
+
+def command_windows_integration(args): # type: (WindowsIntegrationConfig) -> None
+ """Entry point for the `windows-integration` command."""
+ handle_layout_messages(data_context().content.integration_messages)
+
+ inventory_relative_path = get_inventory_relative_path(args)
+ template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template'
+
+ if issubclass(args.target_type, WindowsInventoryConfig):
+ inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path,
+ args.only_target(WindowsInventoryConfig).path or os.path.basename(inventory_relative_path))
+ else:
+ inventory_path = os.path.join(data_context().content.root, inventory_relative_path)
+
+ if not args.explain and not issubclass(args.target_type, WindowsRemoteConfig) and not os.path.isfile(inventory_path):
+ raise ApplicationError(
+ 'Inventory not found: %s\n'
+ 'Use --inventory to specify the inventory path.\n'
+ 'Use --windows to provision resources and generate an inventory file.\n'
+ 'See also inventory template: %s' % (inventory_path, template_path)
+ )
+
+ check_inventory(args, inventory_path)
+ delegate_inventory(args, inventory_path)
+
+ all_targets = tuple(walk_windows_integration_targets(include_hidden=True))
+ host_state, internal_targets = command_integration_filter(args, all_targets)
+ control_connections = [local_ssh(args, host_state.controller_profile.python)]
+ managed_connections = [root_ssh(ssh) for ssh in host_state.get_controller_target_connections()]
+ pre_target, post_target = create_container_hooks(args, control_connections, managed_connections)
+
+ command_integration_filtered(args, host_state, internal_targets, all_targets, inventory_path, pre_target=pre_target, post_target=post_target)
diff --git a/test/lib/ansible_test/_internal/sanity/__init__.py b/test/lib/ansible_test/_internal/commands/sanity/__init__.py
index 44cdd9e6..542e078a 100644
--- a/test/lib/ansible_test/_internal/sanity/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/__init__.py
@@ -1,86 +1,138 @@
"""Execute Ansible sanity tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import abc
import glob
+import hashlib
+import json
import os
+import pathlib
import re
import collections
+import typing as t
-from .. import types as t
+from ...constants import (
+ CONTROLLER_PYTHON_VERSIONS,
+ REMOTE_ONLY_PYTHON_VERSIONS,
+ SUPPORTED_PYTHON_VERSIONS,
+)
+
+from ...encoding import (
+ to_bytes,
+)
-from ..io import (
+from ...io import (
read_json_file,
+ write_json_file,
+ write_text_file,
)
-from ..util import (
+from ...util import (
ApplicationError,
SubprocessError,
display,
import_plugins,
load_plugins,
parse_to_list_of_dict,
- ABC,
- ANSIBLE_TEST_DATA_ROOT,
+ ANSIBLE_TEST_CONTROLLER_ROOT,
+ ANSIBLE_TEST_TARGET_ROOT,
is_binary_file,
read_lines_without_comments,
- get_available_python_versions,
- find_python,
is_subdir,
paths_to_dirs,
get_ansible_version,
str_to_version,
+ cache,
+ remove_tree,
)
-from ..util_common import (
- run_command,
- intercept_command,
+from ...util_common import (
+ intercept_python,
handle_layout_messages,
+ yamlcheck,
+ create_result_directories,
)
-from ..ansible_util import (
+from ...ansible_util import (
ansible_environment,
)
-from ..target import (
+from ...target import (
walk_internal_targets,
walk_sanity_targets,
TestTarget,
)
-from ..executor import (
+from ...executor import (
get_changes_filter,
AllTargetsSkipped,
Delegate,
- install_command_requirements,
- SUPPORTED_PYTHON_VERSIONS,
)
-from ..config import (
+from ...python_requirements import (
+ PipInstall,
+ collect_requirements,
+ run_pip,
+)
+
+from ...config import (
SanityConfig,
)
-from ..test import (
+from ...test import (
TestSuccess,
TestFailure,
TestSkipped,
TestMessage,
+ TestResult,
calculate_best_confidence,
)
-from ..data import (
+from ...data import (
data_context,
)
+from ...content_config import (
+ get_content_config,
+)
+
+from ...host_configs import (
+ DockerConfig,
+ PosixConfig,
+ PythonConfig,
+ VirtualPythonConfig,
+)
+
+from ...host_profiles import (
+ PosixProfile,
+)
+
+from ...provisioning import (
+ prepare_profiles,
+)
+
+from ...pypi_proxy import (
+ configure_pypi_proxy,
+)
+
+from ...venv import (
+ create_virtual_environment,
+)
+
COMMAND = 'sanity'
-SANITY_ROOT = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'sanity')
+SANITY_ROOT = os.path.join(ANSIBLE_TEST_CONTROLLER_ROOT, 'sanity')
+TARGET_SANITY_ROOT = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'sanity')
+
+created_venvs = [] # type: t.List[str]
+
+
+def command_sanity(args): # type: (SanityConfig) -> None
+ """Run sanity tests."""
+ create_result_directories(args)
+ target_configs = t.cast(t.List[PosixConfig], args.targets)
+ target_versions = {target.python.version: target for target in target_configs} # type: t.Dict[str, PosixConfig]
-def command_sanity(args):
- """
- :type args: SanityConfig
- """
handle_layout_messages(data_context().content.sanity_messages)
changes = get_changes_filter(args)
@@ -90,23 +142,31 @@ def command_sanity(args):
if not targets.include:
raise AllTargetsSkipped()
- if args.delegate:
- raise Delegate(require=changes, exclude=args.exclude)
-
tests = sanity_get_tests()
if args.test:
+ disabled = []
tests = [target for target in tests if target.name in args.test]
else:
disabled = [target.name for target in tests if not target.enabled and not args.allow_disabled]
tests = [target for target in tests if target.enabled or args.allow_disabled]
- if disabled:
- display.warning('Skipping tests disabled by default without --allow-disabled: %s' % ', '.join(sorted(disabled)))
-
if args.skip_test:
tests = [target for target in tests if target.name not in args.skip_test]
+ targets_use_pypi = any(isinstance(test, SanityMultipleVersion) and test.needs_pypi for test in tests) and not args.list_tests
+ host_state = prepare_profiles(args, targets_use_pypi=targets_use_pypi) # sanity
+
+ if args.delegate:
+ raise Delegate(host_state=host_state, require=changes, exclude=args.exclude)
+
+ configure_pypi_proxy(args, host_state.controller_profile) # sanity
+
+ if disabled:
+ display.warning('Skipping tests disabled by default without --allow-disabled: %s' % ', '.join(sorted(disabled)))
+
+ target_profiles = {profile.config.python.version: profile for profile in host_state.targets(PosixProfile)} # type: t.Dict[str, PosixProfile]
+
total = 0
failed = []
@@ -115,44 +175,21 @@ def command_sanity(args):
display.info(test.name)
continue
- available_versions = sorted(get_available_python_versions(SUPPORTED_PYTHON_VERSIONS).keys())
-
- if args.python:
- # specific version selected
- versions = (args.python,)
- elif isinstance(test, SanityMultipleVersion):
- # try all supported versions for multi-version tests when a specific version has not been selected
- versions = test.supported_python_versions
- elif not test.supported_python_versions or args.python_version in test.supported_python_versions:
- # the test works with any version or the version we're already running
- versions = (args.python_version,)
- else:
- # available versions supported by the test
- versions = tuple(sorted(set(available_versions) & set(test.supported_python_versions)))
- # use the lowest available version supported by the test or the current version as a fallback (which will be skipped)
- versions = versions[:1] or (args.python_version,)
+ for version in SUPPORTED_PYTHON_VERSIONS:
+ options = ''
- for version in versions:
if isinstance(test, SanityMultipleVersion):
- skip_version = version
+ if version not in target_versions and version not in args.host_settings.skipped_python_versions:
+ continue # version was not requested, skip it silently
else:
- skip_version = None
-
- options = ''
+ if version != args.controller_python.version:
+ continue # only multi-version sanity tests use target versions, the rest use the controller version
if test.supported_python_versions and version not in test.supported_python_versions:
- display.warning("Skipping sanity test '%s' on Python %s. Supported Python versions: %s" % (
- test.name, version, ', '.join(test.supported_python_versions)))
- result = SanitySkipped(test.name, skip_version)
- elif not args.python and version not in available_versions:
- display.warning("Skipping sanity test '%s' on Python %s due to missing interpreter." % (test.name, version))
- result = SanitySkipped(test.name, skip_version)
+ result = SanitySkipped(test.name, version)
+ result.reason = f'Skipping sanity test "{test.name}" on Python {version} because it is unsupported.' \
+ f' Supported Python versions: {", ".join(test.supported_python_versions)}'
else:
- if test.supported_python_versions:
- display.info("Running sanity test '%s' with Python %s" % (test.name, version))
- else:
- display.info("Running sanity test '%s'" % test.name)
-
if isinstance(test, SanityCodeSmellTest):
settings = test.load_processor(args)
elif isinstance(test, SanityMultipleVersion):
@@ -176,26 +213,66 @@ def command_sanity(args):
all_targets = SanityTargets.filter_and_inject_targets(test, all_targets)
usable_targets = SanityTargets.filter_and_inject_targets(test, usable_targets)
- usable_targets = sorted(test.filter_targets(list(usable_targets)))
+ usable_targets = sorted(test.filter_targets_by_version(list(usable_targets), version))
usable_targets = settings.filter_skipped_targets(usable_targets)
sanity_targets = SanityTargets(tuple(all_targets), tuple(usable_targets))
- if usable_targets or test.no_targets:
- install_command_requirements(args, version, context=test.name, enable_pyyaml_check=True)
+ test_needed = bool(usable_targets or test.no_targets or args.prime_venvs)
+ result = None
- if isinstance(test, SanityCodeSmellTest):
- result = test.test(args, sanity_targets, version)
- elif isinstance(test, SanityMultipleVersion):
- result = test.test(args, sanity_targets, version)
+ if test_needed and version in args.host_settings.skipped_python_versions:
+ # Deferred checking of Python availability. Done here since it is now known to be required for running the test.
+ # Earlier checking could cause a spurious warning to be generated for a collection which does not support the Python version.
+ # If the user specified a Python version, an error will be generated before reaching this point when the Python interpreter is not found.
+ result = SanitySkipped(test.name, version)
+ result.reason = f'Skipping sanity test "{test.name}" on Python {version} because it could not be found.'
+
+ if not result:
+ if isinstance(test, SanityMultipleVersion):
+ display.info(f'Running sanity test "{test.name}" on Python {version}')
+ else:
+ display.info(f'Running sanity test "{test.name}"')
+
+ if test_needed and not result:
+ if isinstance(test, SanityMultipleVersion):
+ # multi-version sanity tests handle their own requirements (if any) and use the target python
+ test_profile = target_profiles[version]
+ result = test.test(args, sanity_targets, test_profile.python)
options = ' --python %s' % version
elif isinstance(test, SanitySingleVersion):
- result = test.test(args, sanity_targets, version)
+ # single version sanity tests use the controller python
+ test_profile = host_state.controller_profile
+ virtualenv_python = create_sanity_virtualenv(args, test_profile.python, test.name, context=test.name)
+
+ if virtualenv_python:
+ virtualenv_yaml = check_sanity_virtualenv_yaml(virtualenv_python)
+
+ if test.require_libyaml and not virtualenv_yaml:
+ result = SanitySkipped(test.name)
+ result.reason = f'Skipping sanity test "{test.name}" on Python {version} due to missing libyaml support in PyYAML.'
+ else:
+ if virtualenv_yaml is False:
+ display.warning(f'Sanity test "{test.name}" on Python {version} may be slow due to missing libyaml support in PyYAML.')
+
+ if args.prime_venvs:
+ result = SanitySkipped(test.name)
+ else:
+ result = test.test(args, sanity_targets, virtualenv_python)
+ else:
+ result = SanitySkipped(test.name, version)
+ result.reason = f'Skipping sanity test "{test.name}" on Python {version} due to missing virtual environment support.'
elif isinstance(test, SanityVersionNeutral):
- result = test.test(args, sanity_targets)
+ if args.prime_venvs:
+ result = SanitySkipped(test.name)
+ else:
+ # version neutral sanity tests handle their own requirements (if any)
+ result = test.test(args, sanity_targets)
else:
raise Exception('Unsupported test type: %s' % type(test))
+ elif result:
+ pass
else:
- result = SanitySkipped(test.name, skip_version)
+ result = SanitySkipped(test.name, version)
result.write(args)
@@ -204,6 +281,12 @@ def command_sanity(args):
if isinstance(result, SanityFailure):
failed.append(result.test + options)
+ controller = args.controller
+
+ if created_venvs and isinstance(controller, DockerConfig) and controller.name == 'default' and not args.prime_venvs:
+ names = ', '.join(created_venvs)
+ display.warning(f'There following sanity test virtual environments are out-of-date in the "default" container: {names}')
+
if failed:
message = 'The %d sanity test(s) listed below (out of %d) failed. See error output above for details.\n%s' % (
len(failed), total, '\n'.join(failed))
@@ -214,7 +297,8 @@ def command_sanity(args):
raise ApplicationError(message)
-def collect_code_smell_tests(): # type: () -> t.Tuple[SanityFunc, ...]
+@cache
+def collect_code_smell_tests(): # type: () -> t.Tuple[SanityTest, ...]
"""Return a tuple of available code smell sanity tests."""
paths = glob.glob(os.path.join(SANITY_ROOT, 'code-smell', '*.py'))
@@ -224,19 +308,11 @@ def collect_code_smell_tests(): # type: () -> t.Tuple[SanityFunc, ...]
skip_tests = read_lines_without_comments(os.path.join(ansible_code_smell_root, 'skip.txt'), remove_blank_lines=True, optional=True)
paths.extend(path for path in glob.glob(os.path.join(ansible_code_smell_root, '*.py')) if os.path.basename(path) not in skip_tests)
- paths = sorted(p for p in paths if os.access(p, os.X_OK) and os.path.isfile(p))
tests = tuple(SanityCodeSmellTest(p) for p in paths)
return tests
-def sanity_get_tests():
- """
- :rtype: tuple[SanityFunc]
- """
- return SANITY_TESTS
-
-
class SanityIgnoreParser:
"""Parser for the consolidated sanity test ignore file."""
NO_CODE = '_'
@@ -273,13 +349,18 @@ class SanityIgnoreParser:
for test in sanity_get_tests():
test_targets = SanityTargets.filter_and_inject_targets(test, targets)
- paths_by_test[test.name] = set(target.path for target in test.filter_targets(test_targets))
-
if isinstance(test, SanityMultipleVersion):
versioned_test_names.add(test.name)
- tests_by_name.update(dict(('%s-%s' % (test.name, python_version), test) for python_version in test.supported_python_versions))
+
+ for python_version in test.supported_python_versions:
+ test_name = '%s-%s' % (test.name, python_version)
+
+ paths_by_test[test_name] = set(target.path for target in test.filter_targets_by_version(test_targets, python_version))
+ tests_by_name[test_name] = test
else:
unversioned_test_names.update(dict(('%s-%s' % (test.name, python_version), test.name) for python_version in SUPPORTED_PYTHON_VERSIONS))
+
+ paths_by_test[test.name] = set(target.path for target in test.filter_targets_by_version(test_targets, ''))
tests_by_name[test.name] = test
for line_no, line in enumerate(lines, start=1):
@@ -336,7 +417,7 @@ class SanityIgnoreParser:
unversioned_name, test_name)))
elif test_name in versioned_test_names:
self.parse_errors.append((line_no, len(path) + len(test_name) + 1, "Sanity test '%s' requires a Python version like '%s-%s'" % (
- test_name, test_name, args.python_version)))
+ test_name, test_name, args.controller_python.version)))
else:
self.parse_errors.append((line_no, len(path) + 2, "Sanity test '%s' does not exist" % test_name))
@@ -346,7 +427,7 @@ class SanityIgnoreParser:
self.parse_errors.append((line_no, 1, "Sanity test '%s' does not support directory paths" % test_name))
continue
- if path not in paths_by_test[test.name] and not test.no_targets:
+ if path not in paths_by_test[test_name] and not test.no_targets:
self.parse_errors.append((line_no, 1, "Sanity test '%s' does not test path '%s'" % (test_name, path)))
continue
@@ -524,34 +605,26 @@ class SanityIgnoreProcessor:
class SanitySuccess(TestSuccess):
"""Sanity test success."""
- def __init__(self, test, python_version=None):
- """
- :type test: str
- :type python_version: str
- """
- super(SanitySuccess, self).__init__(COMMAND, test, python_version)
+ def __init__(self, test, python_version=None): # type: (str, t.Optional[str]) -> None
+ super().__init__(COMMAND, test, python_version)
class SanitySkipped(TestSkipped):
"""Sanity test skipped."""
- def __init__(self, test, python_version=None):
- """
- :type test: str
- :type python_version: str
- """
- super(SanitySkipped, self).__init__(COMMAND, test, python_version)
+ def __init__(self, test, python_version=None): # type: (str, t.Optional[str]) -> None
+ super().__init__(COMMAND, test, python_version)
class SanityFailure(TestFailure):
"""Sanity test failure."""
- def __init__(self, test, python_version=None, messages=None, summary=None):
- """
- :type test: str
- :type python_version: str
- :type messages: list[SanityMessage]
- :type summary: unicode
- """
- super(SanityFailure, self).__init__(COMMAND, test, python_version, messages, summary)
+ def __init__(
+ self,
+ test, # type: str
+ python_version=None, # type: t.Optional[str]
+ messages=None, # type: t.Optional[t.List[SanityMessage]]
+ summary=None, # type: t.Optional[str]
+ ): # type: (...) -> None
+ super().__init__(COMMAND, test, python_version, messages, summary)
class SanityMessage(TestMessage):
@@ -605,13 +678,16 @@ class SanityTargets:
return SanityTargets.get_targets.targets
-class SanityTest(ABC):
+class SanityTest(metaclass=abc.ABCMeta):
"""Sanity test base class."""
- __metaclass__ = abc.ABCMeta
-
ansible_only = False
- def __init__(self, name):
+ def __init__(self, name=None): # type: (t.Optional[str]) -> None
+ if not name:
+ name = self.__class__.__name__
+ name = re.sub(r'Test$', '', name) # drop Test suffix
+ name = re.sub(r'(.)([A-Z][a-z]+)', r'\1-\2', name).lower() # use dashes instead of capitalization
+
self.name = name
self.enabled = True
@@ -657,9 +733,14 @@ class SanityTest(ABC):
return False
@property
+ def py2_compat(self): # type: () -> bool
+ """True if the test only applies to code that runs on Python 2.x."""
+ return False
+
+ @property
def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]]
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
- return tuple(python_version for python_version in SUPPORTED_PYTHON_VERSIONS if str_to_version(python_version) >= (3, 6))
+ return CONTROLLER_PYTHON_VERSIONS
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget] # pylint: disable=unused-argument
"""Return the given list of test targets, filtered to include only those relevant for the test."""
@@ -668,14 +749,72 @@ class SanityTest(ABC):
raise NotImplementedError('Sanity test "%s" must implement "filter_targets" or set "no_targets" to True.' % self.name)
+ def filter_targets_by_version(self, targets, python_version): # type: (t.List[TestTarget], str) -> t.List[TestTarget]
+ """Return the given list of test targets, filtered to include only those relevant for the test, taking into account the Python version."""
+ del python_version # python_version is not used here, but derived classes may make use of it
+
+ targets = self.filter_targets(targets)
+
+ if self.py2_compat:
+ # This sanity test is a Python 2.x compatibility test.
+ content_config = get_content_config()
+
+ if content_config.py2_support:
+ # This collection supports Python 2.x.
+ # Filter targets to include only those that require support for remote-only Python versions.
+ targets = self.filter_remote_targets(targets)
+ else:
+ # This collection does not support Python 2.x.
+ # There are no targets to test.
+ targets = []
+
+ return targets
+
+ @staticmethod
+ def filter_remote_targets(targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
+ """Return a filtered list of the given targets, including only those that require support for remote-only Python versions."""
+ targets = [target for target in targets if (
+ is_subdir(target.path, data_context().content.module_path) or
+ is_subdir(target.path, data_context().content.module_utils_path) or
+ is_subdir(target.path, data_context().content.unit_module_path) or
+ is_subdir(target.path, data_context().content.unit_module_utils_path) or
+ # include modules/module_utils within integration test library directories
+ re.search('^%s/.*/library/' % re.escape(data_context().content.integration_targets_path), target.path) or
+ # special handling for content in ansible-core
+ (data_context().content.is_ansible and (
+ # utility code that runs in target environments and requires support for remote-only Python versions
+ is_subdir(target.path, 'test/lib/ansible_test/_util/target/') or
+ # integration test support modules/module_utils continue to require support for remote-only Python versions
+ re.search('^test/support/integration/.*/(modules|module_utils)/', target.path)
+ ))
+ )]
+
+ return targets
+
-class SanityCodeSmellTest(SanityTest):
+class SanitySingleVersion(SanityTest, metaclass=abc.ABCMeta):
+ """Base class for sanity test plugins which should run on a single python version."""
+ @property
+ def require_libyaml(self): # type: () -> bool
+ """True if the test requires PyYAML to have libyaml support."""
+ return False
+
+ @abc.abstractmethod
+ def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
+ """Run the sanity test and return the result."""
+
+ def load_processor(self, args): # type: (SanityConfig) -> SanityIgnoreProcessor
+ """Load the ignore processor for this sanity test."""
+ return SanityIgnoreProcessor(args, self, None)
+
+
+class SanityCodeSmellTest(SanitySingleVersion):
"""Sanity test script."""
def __init__(self, path):
name = os.path.splitext(os.path.basename(path))[0]
config_path = os.path.splitext(path)[0] + '.json'
- super(SanityCodeSmellTest, self).__init__(name)
+ super().__init__(name=name)
self.path = path
self.config_path = config_path if os.path.exists(config_path) else None
@@ -693,13 +832,13 @@ class SanityCodeSmellTest(SanityTest):
self.files = self.config.get('files') # type: t.List[str]
self.text = self.config.get('text') # type: t.Optional[bool]
self.ignore_self = self.config.get('ignore_self') # type: bool
- self.intercept = self.config.get('intercept') # type: bool
self.minimum_python_version = self.config.get('minimum_python_version') # type: t.Optional[str]
self.__all_targets = self.config.get('all_targets') # type: bool
self.__no_targets = self.config.get('no_targets') # type: bool
self.__include_directories = self.config.get('include_directories') # type: bool
self.__include_symlinks = self.config.get('include_symlinks') # type: bool
+ self.__py2_compat = self.config.get('py2_compat', False) # type: bool
else:
self.output = None
self.extensions = []
@@ -707,13 +846,13 @@ class SanityCodeSmellTest(SanityTest):
self.files = []
self.text = None # type: t.Optional[bool]
self.ignore_self = False
- self.intercept = False
self.minimum_python_version = None # type: t.Optional[str]
self.__all_targets = False
self.__no_targets = True
self.__include_directories = False
self.__include_symlinks = False
+ self.__py2_compat = False
if self.no_targets:
mutually_exclusive = (
@@ -753,9 +892,14 @@ class SanityCodeSmellTest(SanityTest):
return self.__include_symlinks
@property
+ def py2_compat(self): # type: () -> bool
+ """True if the test only applies to code that runs on Python 2.x."""
+ return self.__py2_compat
+
+ @property
def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]]
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
- versions = super(SanityCodeSmellTest, self).supported_python_versions
+ versions = super().supported_python_versions
if self.minimum_python_version:
versions = tuple(version for version in versions if str_to_version(version) >= str_to_version(self.minimum_python_version))
@@ -789,14 +933,9 @@ class SanityCodeSmellTest(SanityTest):
return targets
- def test(self, args, targets, python_version):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :type python_version: str
- :rtype: TestResult
- """
- cmd = [find_python(python_version), self.path]
+ def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
+ """Run the sanity test and return the result."""
+ cmd = [python.path, self.path]
env = ansible_environment(args, color=False)
@@ -822,11 +961,7 @@ class SanityCodeSmellTest(SanityTest):
display.info(data, verbosity=4)
try:
- if self.intercept:
- stdout, stderr = intercept_command(args, cmd, target_name='sanity.%s' % self.name, data=data, env=env, capture=True, disable_coverage=True)
- else:
- stdout, stderr = run_command(args, cmd, data=data, env=env, capture=True)
-
+ stdout, stderr = intercept_python(args, python, cmd, data=data, env=env, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
@@ -870,25 +1005,11 @@ class SanityCodeSmellTest(SanityTest):
return SanityIgnoreProcessor(args, self, None)
-class SanityFunc(SanityTest):
- """Base class for sanity test plugins."""
- def __init__(self):
- name = self.__class__.__name__
- name = re.sub(r'Test$', '', name) # drop Test suffix
- name = re.sub(r'(.)([A-Z][a-z]+)', r'\1-\2', name).lower() # use dashes instead of capitalization
-
- super(SanityFunc, self).__init__(name)
-
-
-class SanityVersionNeutral(SanityFunc):
+class SanityVersionNeutral(SanityTest, metaclass=abc.ABCMeta):
"""Base class for sanity test plugins which are idependent of the python version being used."""
@abc.abstractmethod
- def test(self, args, targets):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :rtype: TestResult
- """
+ def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult
+ """Run the sanity test and return the result."""
def load_processor(self, args): # type: (SanityConfig) -> SanityIgnoreProcessor
"""Load the ignore processor for this sanity test."""
@@ -900,52 +1021,133 @@ class SanityVersionNeutral(SanityFunc):
return None
-class SanitySingleVersion(SanityFunc):
- """Base class for sanity test plugins which should run on a single python version."""
- @abc.abstractmethod
- def test(self, args, targets, python_version):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :type python_version: str
- :rtype: TestResult
- """
-
- def load_processor(self, args): # type: (SanityConfig) -> SanityIgnoreProcessor
- """Load the ignore processor for this sanity test."""
- return SanityIgnoreProcessor(args, self, None)
-
-
-class SanityMultipleVersion(SanityFunc):
+class SanityMultipleVersion(SanityTest, metaclass=abc.ABCMeta):
"""Base class for sanity test plugins which should run on multiple python versions."""
@abc.abstractmethod
- def test(self, args, targets, python_version):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :type python_version: str
- :rtype: TestResult
- """
+ def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
+ """Run the sanity test and return the result."""
def load_processor(self, args, python_version): # type: (SanityConfig, str) -> SanityIgnoreProcessor
"""Load the ignore processor for this sanity test."""
return SanityIgnoreProcessor(args, self, python_version)
@property
+ def needs_pypi(self): # type: () -> bool
+ """True if the test requires PyPI, otherwise False."""
+ return False
+
+ @property
def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]]
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
return SUPPORTED_PYTHON_VERSIONS
+ def filter_targets_by_version(self, targets, python_version): # type: (t.List[TestTarget], str) -> t.List[TestTarget]
+ """Return the given list of test targets, filtered to include only those relevant for the test, taking into account the Python version."""
+ if not python_version:
+ raise Exception('python_version is required to filter multi-version tests')
+
+ targets = super().filter_targets_by_version(targets, python_version)
+
+ if python_version in REMOTE_ONLY_PYTHON_VERSIONS:
+ content_config = get_content_config()
+
+ if python_version not in content_config.modules.python_versions:
+ # when a remote-only python version is not supported there are no paths to test
+ return []
+
+ # when a remote-only python version is supported, tests must be applied only to targets that support remote-only Python versions
+ targets = self.filter_remote_targets(targets)
+
+ return targets
+
+
+@cache
+def sanity_get_tests(): # type: () -> t.Tuple[SanityTest, ...]
+ """Return a tuple of the available sanity tests."""
+ import_plugins('commands/sanity')
+ sanity_plugins = {} # type: t.Dict[str, t.Type[SanityTest]]
+ load_plugins(SanityTest, sanity_plugins)
+ sanity_plugins.pop('sanity') # SanityCodeSmellTest
+ sanity_tests = tuple(plugin() for plugin in sanity_plugins.values() if data_context().content.is_ansible or not plugin.ansible_only)
+ sanity_tests = tuple(sorted(sanity_tests + collect_code_smell_tests(), key=lambda k: k.name))
+ return sanity_tests
+
+
+def create_sanity_virtualenv(
+ args, # type: SanityConfig
+ python, # type: PythonConfig
+ name, # type: str
+ ansible=False, # type: bool
+ coverage=False, # type: bool
+ minimize=False, # type: bool
+ context=None, # type: t.Optional[str]
+): # type: (...) -> t.Optional[VirtualPythonConfig]
+ """Return an existing sanity virtual environment matching the requested parameters or create a new one."""
+ commands = collect_requirements( # create_sanity_virtualenv()
+ python=python,
+ controller=True,
+ virtualenv=False,
+ command=None,
+ # used by import tests
+ ansible=ansible,
+ cryptography=ansible,
+ coverage=coverage,
+ minimize=minimize,
+ # used by non-import tests
+ sanity=context,
+ )
+
+ if commands:
+ label = f'sanity.{name}'
+ else:
+ label = 'sanity' # use a single virtualenv name for tests which have no requirements
+
+ # The path to the virtual environment must be kept short to avoid the 127 character shebang length limit on Linux.
+ # If the limit is exceeded, generated entry point scripts from pip installed packages will fail with syntax errors.
+ virtualenv_install = json.dumps([command.serialize() for command in commands], indent=4)
+ virtualenv_hash = hashlib.sha256(to_bytes(virtualenv_install)).hexdigest()[:8]
+ virtualenv_cache = os.path.join(os.path.expanduser('~/.ansible/test/venv'))
+ virtualenv_path = os.path.join(virtualenv_cache, label, f'{python.version}', virtualenv_hash)
+ virtualenv_marker = os.path.join(virtualenv_path, 'marker.txt')
+
+ meta_install = os.path.join(virtualenv_path, 'meta.install.json')
+ meta_yaml = os.path.join(virtualenv_path, 'meta.yaml.json')
+
+ virtualenv_python = VirtualPythonConfig(
+ version=python.version,
+ path=os.path.join(virtualenv_path, 'bin', 'python'),
+ )
+
+ if not os.path.exists(virtualenv_marker):
+ # a virtualenv without a marker is assumed to have been partially created
+ remove_tree(virtualenv_path)
+
+ if not create_virtual_environment(args, python, virtualenv_path):
+ return None
+
+ run_pip(args, virtualenv_python, commands, None) # create_sanity_virtualenv()
+
+ write_text_file(meta_install, virtualenv_install)
+
+ if any(isinstance(command, PipInstall) and command.has_package('pyyaml') for command in commands):
+ virtualenv_yaml = yamlcheck(virtualenv_python)
+ else:
+ virtualenv_yaml = None
+
+ write_json_file(meta_yaml, virtualenv_yaml)
+
+ created_venvs.append(f'{label}-{python.version}')
+
+ # touch the marker to keep track of when the virtualenv was last used
+ pathlib.Path(virtualenv_marker).touch()
+
+ return virtualenv_python
-SANITY_TESTS = (
-)
+def check_sanity_virtualenv_yaml(python): # type: (VirtualPythonConfig) -> t.Optional[bool]
+ """Return True if PyYAML has libyaml support for the given sanity virtual environment, False if it does not and None if it was not found."""
+ virtualenv_path = os.path.dirname(os.path.dirname(python.path))
+ meta_yaml = os.path.join(virtualenv_path, 'meta.yaml.json')
+ virtualenv_yaml = read_json_file(meta_yaml)
-def sanity_init():
- """Initialize full sanity test list (includes code-smell scripts determined at runtime)."""
- import_plugins('sanity')
- sanity_plugins = {} # type: t.Dict[str, t.Type[SanityFunc]]
- load_plugins(SanityFunc, sanity_plugins)
- sanity_tests = tuple([plugin() for plugin in sanity_plugins.values() if data_context().content.is_ansible or not plugin.ansible_only])
- global SANITY_TESTS # pylint: disable=locally-disabled, global-statement
- SANITY_TESTS = tuple(sorted(sanity_tests + collect_code_smell_tests(), key=lambda k: k.name))
+ return virtualenv_yaml
diff --git a/test/lib/ansible_test/_internal/sanity/ansible_doc.py b/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py
index fc4c42f8..82d9f751 100644
--- a/test/lib/ansible_test/_internal/sanity/ansible_doc.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py
@@ -1,47 +1,47 @@
"""Sanity test for ansible-doc."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import collections
import os
import re
+import typing as t
-from .. import types as t
-
-from ..sanity import (
+from . import (
SanitySingleVersion,
SanityFailure,
SanitySuccess,
+ SanityTargets,
+)
+
+from ...test import (
+ TestResult,
)
-from ..target import (
+from ...target import (
TestTarget,
)
-from ..util import (
+from ...util import (
SubprocessError,
display,
is_subdir,
)
-from ..util_common import (
- intercept_command,
-)
-
-from ..ansible_util import (
+from ...ansible_util import (
ansible_environment,
+ intercept_python,
)
-from ..config import (
+from ...config import (
SanityConfig,
)
-from ..data import (
+from ...data import (
data_context,
)
-from ..coverage_util import (
- coverage_context,
+from ...host_configs import (
+ PythonConfig,
)
@@ -50,7 +50,7 @@ class AnsibleDocTest(SanitySingleVersion):
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return the given list of test targets, filtered to include only those relevant for the test."""
# This should use documentable plugins from constants instead
- unsupported_plugin_types = set([
+ unsupported_plugin_types = {
# not supported by ansible-doc
'action',
'doc_fragments',
@@ -62,7 +62,7 @@ class AnsibleDocTest(SanitySingleVersion):
# (https://github.com/ansible-collections/overview/blob/main/collection_requirements.rst#modules--plugins)
'plugin_utils',
'sub_plugins',
- ])
+ }
plugin_paths = [plugin_path for plugin_type, plugin_path in data_context().content.plugin_paths.items() if plugin_type not in unsupported_plugin_types]
@@ -72,13 +72,7 @@ class AnsibleDocTest(SanitySingleVersion):
and any(is_subdir(target.path, path) for path in plugin_paths)
]
- def test(self, args, targets, python_version):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :type python_version: str
- :rtype: TestResult
- """
+ def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
settings = self.load_processor(args)
paths = [target.path for target in targets.include]
@@ -113,9 +107,7 @@ class AnsibleDocTest(SanitySingleVersion):
cmd.extend(sorted(doc_targets[doc_type]))
try:
- with coverage_context(args):
- stdout, stderr = intercept_command(args, cmd, target_name='ansible-doc', env=env, capture=True, python_version=python_version)
-
+ stdout, stderr = intercept_python(args, python, cmd, env, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
diff --git a/test/lib/ansible_test/_internal/sanity/bin_symlinks.py b/test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py
index bd0ba58e..5dc582fa 100644
--- a/test/lib/ansible_test/_internal/sanity/bin_symlinks.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py
@@ -1,34 +1,39 @@
"""Sanity test for symlinks in the bin directory."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
+import typing as t
-from .. import types as t
-
-from ..sanity import (
+from . import (
SanityVersionNeutral,
SanityMessage,
SanityFailure,
SanitySuccess,
+ SanityTargets,
)
-from ..config import (
+from ...constants import (
+ __file__ as symlink_map_full_path,
+)
+
+from ...test import (
+ TestResult,
+)
+
+from ...config import (
SanityConfig,
)
-from ..data import (
+from ...data import (
data_context,
)
-from ..payload import (
+from ...payload import (
ANSIBLE_BIN_SYMLINK_MAP,
- __file__ as symlink_map_full_path,
)
-from ..util import (
+from ...util import (
ANSIBLE_BIN_PATH,
- ANSIBLE_TEST_DATA_ROOT,
)
@@ -46,20 +51,11 @@ class BinSymlinksTest(SanityVersionNeutral):
"""True if the test does not use test targets. Mutually exclusive with all_targets."""
return True
- # noinspection PyUnusedLocal
- def test(self, args, targets): # pylint: disable=locally-disabled, unused-argument
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :rtype: TestResult
- """
+ def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult
bin_root = ANSIBLE_BIN_PATH
bin_names = os.listdir(bin_root)
bin_paths = sorted(os.path.join(bin_root, path) for path in bin_names)
- injector_root = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'injector')
- injector_names = os.listdir(injector_root)
-
errors = [] # type: t.List[t.Tuple[str, str]]
symlink_map_path = os.path.relpath(symlink_map_full_path, data_context().content.root)
@@ -98,10 +94,6 @@ class BinSymlinksTest(SanityVersionNeutral):
bin_path = os.path.join(bin_root, bin_name)
errors.append((bin_path, 'missing symlink to "%s" defined in ANSIBLE_BIN_SYMLINK_MAP in file "%s"' % (dest, symlink_map_path)))
- if bin_name not in injector_names:
- injector_path = os.path.join(injector_root, bin_name)
- errors.append((injector_path, 'missing symlink to "python.py"'))
-
messages = [SanityMessage(message=message, path=os.path.relpath(path, data_context().content.root), confidence=100) for path, message in errors]
if errors:
diff --git a/test/lib/ansible_test/_internal/sanity/compile.py b/test/lib/ansible_test/_internal/commands/sanity/compile.py
index 5a517272..292f89cc 100644
--- a/test/lib/ansible_test/_internal/sanity/compile.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/compile.py
@@ -1,40 +1,46 @@
"""Sanity test for proper python syntax."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
+import typing as t
-from .. import types as t
-
-from ..sanity import (
+from . import (
SanityMultipleVersion,
SanityMessage,
SanityFailure,
SanitySuccess,
SanityTargets,
- SANITY_ROOT,
+ SanitySkipped,
+ TARGET_SANITY_ROOT,
+)
+
+from ...test import (
+ TestResult,
)
-from ..target import (
+from ...target import (
TestTarget,
)
-from ..util import (
+from ...util import (
SubprocessError,
display,
- find_python,
parse_to_list_of_dict,
is_subdir,
)
-from ..util_common import (
+from ...util_common import (
run_command,
)
-from ..config import (
+from ...config import (
SanityConfig,
)
+from ...host_configs import (
+ PythonConfig,
+)
+
class CompileTest(SanityMultipleVersion):
"""Sanity test for proper python syntax."""
@@ -42,18 +48,15 @@ class CompileTest(SanityMultipleVersion):
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' or is_subdir(target.path, 'bin')]
- def test(self, args, targets, python_version):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :type python_version: str
- :rtype: TestResult
- """
- settings = self.load_processor(args, python_version)
+ def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
+ if args.prime_venvs:
+ return SanitySkipped(self.name, python_version=python.version)
+
+ settings = self.load_processor(args, python.version)
paths = [target.path for target in targets.include]
- cmd = [find_python(python_version), os.path.join(SANITY_ROOT, 'compile', 'compile.py')]
+ cmd = [python.path, os.path.join(TARGET_SANITY_ROOT, 'compile', 'compile.py')]
data = '\n'.join(paths)
@@ -71,7 +74,7 @@ class CompileTest(SanityMultipleVersion):
raise SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout)
if args.explain:
- return SanitySuccess(self.name, python_version=python_version)
+ return SanitySuccess(self.name, python_version=python.version)
pattern = r'^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<message>.*)$'
@@ -87,6 +90,6 @@ class CompileTest(SanityMultipleVersion):
results = settings.process_errors(results, paths)
if results:
- return SanityFailure(self.name, messages=results, python_version=python_version)
+ return SanityFailure(self.name, messages=results, python_version=python.version)
- return SanitySuccess(self.name, python_version=python_version)
+ return SanitySuccess(self.name, python_version=python.version)
diff --git a/test/lib/ansible_test/_internal/sanity/ignores.py b/test/lib/ansible_test/_internal/commands/sanity/ignores.py
index 8b6df50c..9a39955a 100644
--- a/test/lib/ansible_test/_internal/sanity/ignores.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/ignores.py
@@ -1,23 +1,24 @@
"""Sanity test for the sanity ignore file."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-from ..sanity import (
+from . import (
SanityFailure,
SanityIgnoreParser,
SanityVersionNeutral,
SanitySuccess,
SanityMessage,
+ SanityTargets,
)
-from ..test import (
+from ...test import (
calculate_confidence,
calculate_best_confidence,
+ TestResult,
)
-from ..config import (
+from ...config import (
SanityConfig,
)
@@ -34,13 +35,7 @@ class IgnoresTest(SanityVersionNeutral):
"""True if the test does not use test targets. Mutually exclusive with all_targets."""
return True
- # noinspection PyUnusedLocal
- def test(self, args, targets): # pylint: disable=locally-disabled, unused-argument
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :rtype: TestResult
- """
+ def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult
sanity_ignore = SanityIgnoreParser.load(args)
messages = []
diff --git a/test/lib/ansible_test/_internal/commands/sanity/import.py b/test/lib/ansible_test/_internal/commands/sanity/import.py
new file mode 100644
index 00000000..9a961015
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/sanity/import.py
@@ -0,0 +1,184 @@
+"""Sanity test for proper import exception handling."""
+from __future__ import annotations
+
+import os
+import tempfile
+import typing as t
+
+from . import (
+ SanityMultipleVersion,
+ SanityMessage,
+ SanityFailure,
+ SanitySuccess,
+ SanitySkipped,
+ TARGET_SANITY_ROOT,
+ SanityTargets,
+ create_sanity_virtualenv,
+ check_sanity_virtualenv_yaml,
+)
+
+from ...constants import (
+ REMOTE_ONLY_PYTHON_VERSIONS,
+)
+
+from ...test import (
+ TestResult,
+)
+
+from ...target import (
+ TestTarget,
+)
+
+from ...util import (
+ SubprocessError,
+ display,
+ parse_to_list_of_dict,
+ is_subdir,
+)
+
+from ...util_common import (
+ ResultType,
+)
+
+from ...ansible_util import (
+ ansible_environment,
+)
+
+from ...python_requirements import (
+ install_requirements,
+)
+
+from ...config import (
+ SanityConfig,
+)
+
+from ...coverage_util import (
+ cover_python,
+)
+
+from ...data import (
+ data_context,
+)
+
+from ...host_configs import (
+ PythonConfig,
+)
+
+
+def _get_module_test(module_restrictions): # type: (bool) -> t.Callable[[str], bool]
+ """Create a predicate which tests whether a path can be used by modules or not."""
+ module_path = data_context().content.module_path
+ module_utils_path = data_context().content.module_utils_path
+ if module_restrictions:
+ return lambda path: is_subdir(path, module_path) or is_subdir(path, module_utils_path)
+ return lambda path: not (is_subdir(path, module_path) or is_subdir(path, module_utils_path))
+
+
+class ImportTest(SanityMultipleVersion):
+ """Sanity test for proper import exception handling."""
+ def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
+ """Return the given list of test targets, filtered to include only those relevant for the test."""
+ return [target for target in targets if os.path.splitext(target.path)[1] == '.py' and
+ any(is_subdir(target.path, path) for path in data_context().content.plugin_paths.values())]
+
+ @property
+ def needs_pypi(self): # type: () -> bool
+ """True if the test requires PyPI, otherwise False."""
+ return True
+
+ def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
+ settings = self.load_processor(args, python.version)
+
+ paths = [target.path for target in targets.include]
+
+ if python.version.startswith('2.'):
+ # hack to make sure that virtualenv is available under Python 2.x
+ # on Python 3.x we can use the built-in venv
+ install_requirements(args, python, virtualenv=True) # sanity (import)
+
+ temp_root = os.path.join(ResultType.TMP.path, 'sanity', 'import')
+
+ messages = []
+
+ for import_type, test, controller in (
+ ('module', _get_module_test(True), False),
+ ('plugin', _get_module_test(False), True),
+ ):
+ if import_type == 'plugin' and python.version in REMOTE_ONLY_PYTHON_VERSIONS:
+ continue
+
+ data = '\n'.join([path for path in paths if test(path)])
+
+ if not data and not args.prime_venvs:
+ continue
+
+ virtualenv_python = create_sanity_virtualenv(args, python, f'{self.name}.{import_type}', ansible=controller, coverage=args.coverage, minimize=True)
+
+ if not virtualenv_python:
+ display.warning(f'Skipping sanity test "{self.name}" on Python {python.version} due to missing virtual environment support.')
+ return SanitySkipped(self.name, python.version)
+
+ virtualenv_yaml = check_sanity_virtualenv_yaml(virtualenv_python)
+
+ if virtualenv_yaml is False:
+ display.warning(f'Sanity test "{self.name}" ({import_type}) on Python {python.version} may be slow due to missing libyaml support in PyYAML.')
+
+ if args.prime_venvs:
+ continue
+
+ env = ansible_environment(args, color=False)
+
+ env.update(
+ SANITY_TEMP_PATH=ResultType.TMP.path,
+ SANITY_IMPORTER_TYPE=import_type,
+ )
+
+ if data_context().content.collection:
+ env.update(
+ SANITY_COLLECTION_FULL_NAME=data_context().content.collection.full_name,
+ SANITY_EXTERNAL_PYTHON=python.path,
+ )
+
+ display.info(import_type + ': ' + data, verbosity=4)
+
+ cmd = ['importer.py']
+
+ try:
+ with tempfile.TemporaryDirectory(prefix='ansible-test', suffix='-import') as temp_dir:
+ # make the importer available in the temporary directory
+ os.symlink(os.path.abspath(os.path.join(TARGET_SANITY_ROOT, 'import', 'importer.py')), os.path.join(temp_dir, 'importer.py'))
+ os.symlink(os.path.abspath(os.path.join(TARGET_SANITY_ROOT, 'import', 'yaml_to_json.py')), os.path.join(temp_dir, 'yaml_to_json.py'))
+
+ # add the importer to the path so it can be accessed through the coverage injector
+ env['PATH'] = os.pathsep.join([temp_dir, env['PATH']])
+
+ stdout, stderr = cover_python(args, virtualenv_python, cmd, self.name, env, capture=True, data=data)
+
+ if stdout or stderr:
+ raise SubprocessError(cmd, stdout=stdout, stderr=stderr)
+ except SubprocessError as ex:
+ if ex.status != 10 or ex.stderr or not ex.stdout:
+ raise
+
+ pattern = r'^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<message>.*)$'
+
+ parsed = parse_to_list_of_dict(pattern, ex.stdout)
+
+ relative_temp_root = os.path.relpath(temp_root, data_context().content.root) + os.path.sep
+
+ messages += [SanityMessage(
+ message=r['message'],
+ path=os.path.relpath(r['path'], relative_temp_root) if r['path'].startswith(relative_temp_root) else r['path'],
+ line=int(r['line']),
+ column=int(r['column']),
+ ) for r in parsed]
+
+ if args.prime_venvs:
+ return SanitySkipped(self.name, python_version=python.version)
+
+ results = settings.process_errors(messages, paths)
+
+ if results:
+ return SanityFailure(self.name, messages=results, python_version=python.version)
+
+ return SanitySuccess(self.name, python_version=python.version)
diff --git a/test/lib/ansible_test/_internal/sanity/integration_aliases.py b/test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py
index e21c093a..bc3ebc0d 100644
--- a/test/lib/ansible_test/_internal/sanity/integration_aliases.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py
@@ -1,15 +1,13 @@
"""Sanity test to check integration test aliases."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
import textwrap
import os
+import typing as t
-from .. import types as t
-
-from ..sanity import (
- SanityVersionNeutral,
+from . import (
+ SanitySingleVersion,
SanityMessage,
SanityFailure,
SanitySuccess,
@@ -17,39 +15,47 @@ from ..sanity import (
SANITY_ROOT,
)
-from ..config import (
+from ...test import (
+ TestResult,
+)
+
+from ...config import (
SanityConfig,
)
-from ..target import (
+from ...target import (
filter_targets,
walk_posix_integration_targets,
walk_windows_integration_targets,
walk_integration_targets,
walk_module_targets,
+ CompletionTarget,
)
-from ..cloud import (
+from ..integration.cloud import (
get_cloud_platforms,
)
-from ..io import (
+from ...io import (
read_text_file,
)
-from ..util import (
+from ...util import (
display,
- find_python,
raw_command,
)
-from ..util_common import (
+from ...util_common import (
write_json_test_results,
ResultType,
)
+from ...host_configs import (
+ PythonConfig,
+)
+
-class IntegrationAliasesTest(SanityVersionNeutral):
+class IntegrationAliasesTest(SanitySingleVersion):
"""Sanity test to evaluate integration test aliases."""
CI_YML = '.azure-pipelines/azure-pipelines.yml'
TEST_ALIAS_PREFIX = 'shippable' # this will be changed at some point in the future
@@ -95,7 +101,7 @@ class IntegrationAliasesTest(SanityVersionNeutral):
ansible_only = True
def __init__(self):
- super(IntegrationAliasesTest, self).__init__()
+ super().__init__()
self._ci_config = {} # type: t.Dict[str, t.Any]
self._ci_test_groups = {} # type: t.Dict[str, t.List[int]]
@@ -110,10 +116,10 @@ class IntegrationAliasesTest(SanityVersionNeutral):
"""True if the test does not use test targets. Mutually exclusive with all_targets."""
return True
- def load_ci_config(self, args): # type: (SanityConfig) -> t.Dict[str, t.Any]
+ def load_ci_config(self, python): # type: (PythonConfig) -> t.Dict[str, t.Any]
"""Load and return the CI YAML configuration."""
if not self._ci_config:
- self._ci_config = self.load_yaml(args, self.CI_YML)
+ self._ci_config = self.load_yaml(python, self.CI_YML)
return self._ci_config
@@ -164,12 +170,8 @@ class IntegrationAliasesTest(SanityVersionNeutral):
return self._ci_test_groups
- def format_test_group_alias(self, name, fallback=''):
- """
- :type name: str
- :type fallback: str
- :rtype: str
- """
+ def format_test_group_alias(self, name, fallback=''): # type: (str, str) -> str
+ """Return a test group alias using the given name and fallback."""
group_numbers = self.ci_test_groups.get(name, None)
if group_numbers:
@@ -192,19 +194,12 @@ class IntegrationAliasesTest(SanityVersionNeutral):
return alias
- def load_yaml(self, args, path): # type: (SanityConfig, str) -> t.Dict[str, t.Any]
+ def load_yaml(self, python, path): # type: (PythonConfig, str) -> t.Dict[str, t.Any]
"""Load the specified YAML file and return the contents."""
yaml_to_json_path = os.path.join(SANITY_ROOT, self.name, 'yaml_to_json.py')
- python = find_python(args.python_version)
-
- return json.loads(raw_command([python, yaml_to_json_path], data=read_text_file(path), capture=True)[0])
+ return json.loads(raw_command([python.path, yaml_to_json_path], data=read_text_file(path), capture=True)[0])
- def test(self, args, targets):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :rtype: TestResult
- """
+ def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
if args.explain:
return SanitySuccess(self.name)
@@ -219,7 +214,7 @@ class IntegrationAliasesTest(SanityVersionNeutral):
labels={},
)
- self.load_ci_config(args)
+ self.load_ci_config(python)
self.check_changes(args, results)
write_json_test_results(ResultType.BOT, 'data-sanity-ci.json', results)
@@ -234,17 +229,14 @@ class IntegrationAliasesTest(SanityVersionNeutral):
return SanitySuccess(self.name)
- def check_posix_targets(self, args):
- """
- :type args: SanityConfig
- :rtype: list[SanityMessage]
- """
+ def check_posix_targets(self, args): # type: (SanityConfig) -> t.List[SanityMessage]
+ """Check POSIX integration test targets and return messages with any issues found."""
posix_targets = tuple(walk_posix_integration_targets())
clouds = get_cloud_platforms(args, posix_targets)
cloud_targets = ['cloud/%s/' % cloud for cloud in clouds]
- all_cloud_targets = tuple(filter_targets(posix_targets, ['cloud/'], include=True, directories=False, errors=False))
+ all_cloud_targets = tuple(filter_targets(posix_targets, ['cloud/'], directories=False, errors=False))
invalid_cloud_targets = tuple(filter_targets(all_cloud_targets, cloud_targets, include=False, directories=False, errors=False))
messages = []
@@ -258,23 +250,28 @@ class IntegrationAliasesTest(SanityVersionNeutral):
messages.append(SanityMessage('invalid alias `%s`' % alias, '%s/aliases' % target.path))
messages += self.check_ci_group(
- targets=tuple(filter_targets(posix_targets, ['cloud/', '%s/generic/' % self.TEST_ALIAS_PREFIX], include=False,
- directories=False, errors=False)),
+ targets=tuple(filter_targets(posix_targets, ['cloud/', '%s/generic/' % self.TEST_ALIAS_PREFIX], include=False, directories=False, errors=False)),
find=self.format_test_group_alias('linux').replace('linux', 'posix'),
find_incidental=['%s/posix/incidental/' % self.TEST_ALIAS_PREFIX],
)
messages += self.check_ci_group(
- targets=tuple(filter_targets(posix_targets, ['%s/generic/' % self.TEST_ALIAS_PREFIX], include=True, directories=False,
- errors=False)),
+ targets=tuple(filter_targets(posix_targets, ['%s/generic/' % self.TEST_ALIAS_PREFIX], directories=False, errors=False)),
find=self.format_test_group_alias('generic'),
)
for cloud in clouds:
+ if cloud == 'httptester':
+ find = self.format_test_group_alias('linux').replace('linux', 'posix')
+ find_incidental = ['%s/posix/incidental/' % self.TEST_ALIAS_PREFIX]
+ else:
+ find = self.format_test_group_alias(cloud, 'generic')
+ find_incidental = ['%s/%s/incidental/' % (self.TEST_ALIAS_PREFIX, cloud), '%s/cloud/incidental/' % self.TEST_ALIAS_PREFIX]
+
messages += self.check_ci_group(
- targets=tuple(filter_targets(posix_targets, ['cloud/%s/' % cloud], include=True, directories=False, errors=False)),
- find=self.format_test_group_alias(cloud, 'cloud'),
- find_incidental=['%s/%s/incidental/' % (self.TEST_ALIAS_PREFIX, cloud), '%s/cloud/incidental/' % self.TEST_ALIAS_PREFIX],
+ targets=tuple(filter_targets(posix_targets, ['cloud/%s/' % cloud], directories=False, errors=False)),
+ find=find,
+ find_incidental=find_incidental,
)
return messages
@@ -295,19 +292,19 @@ class IntegrationAliasesTest(SanityVersionNeutral):
return messages
- def check_ci_group(self, targets, find, find_incidental=None):
- """
- :type targets: tuple[CompletionTarget]
- :type find: str
- :type find_incidental: list[str] | None
- :rtype: list[SanityMessage]
- """
+ def check_ci_group(
+ self,
+ targets, # type: t.Tuple[CompletionTarget, ...]
+ find, # type: str
+ find_incidental=None, # type: t.Optional[t.List[str]]
+ ): # type: (...) -> t.List[SanityMessage]
+ """Check the CI groups set in the provided targets and return a list of messages with any issues found."""
all_paths = set(target.path for target in targets)
- supported_paths = set(target.path for target in filter_targets(targets, [find], include=True, directories=False, errors=False))
- unsupported_paths = set(target.path for target in filter_targets(targets, [self.UNSUPPORTED], include=True, directories=False, errors=False))
+ supported_paths = set(target.path for target in filter_targets(targets, [find], directories=False, errors=False))
+ unsupported_paths = set(target.path for target in filter_targets(targets, [self.UNSUPPORTED], directories=False, errors=False))
if find_incidental:
- incidental_paths = set(target.path for target in filter_targets(targets, find_incidental, include=True, directories=False, errors=False))
+ incidental_paths = set(target.path for target in filter_targets(targets, find_incidental, directories=False, errors=False))
else:
incidental_paths = set()
@@ -327,11 +324,8 @@ class IntegrationAliasesTest(SanityVersionNeutral):
return messages
- def check_changes(self, args, results):
- """
- :type args: SanityConfig
- :type results: dict[str, any]
- """
+ def check_changes(self, args, results): # type: (SanityConfig, t.Dict[str, t.Any]) -> None
+ """Check changes and store results in the provided results dictionary."""
integration_targets = list(walk_integration_targets())
module_targets = list(walk_module_targets())
@@ -378,12 +372,8 @@ class IntegrationAliasesTest(SanityVersionNeutral):
results['comments'] += comments
results['labels'].update(labels)
- def format_comment(self, template, targets):
- """
- :type template: str
- :type targets: list[str]
- :rtype: str | None
- """
+ def format_comment(self, template, targets): # type: (str, t.List[str]) -> t.Optional[str]
+ """Format and return a comment based on the given template and targets, or None if there are no targets."""
if not targets:
return None
diff --git a/test/lib/ansible_test/_internal/sanity/pep8.py b/test/lib/ansible_test/_internal/commands/sanity/pep8.py
index 9eb40dbc..71241c91 100644
--- a/test/lib/ansible_test/_internal/sanity/pep8.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/pep8.py
@@ -1,39 +1,45 @@
"""Sanity test for PEP 8 style guidelines using pycodestyle."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
+import typing as t
-from .. import types as t
-
-from ..sanity import (
+from . import (
SanitySingleVersion,
SanityMessage,
SanityFailure,
SanitySuccess,
+ SanityTargets,
SANITY_ROOT,
)
-from ..target import (
+from ...test import (
+ TestResult,
+)
+
+from ...target import (
TestTarget,
)
-from ..util import (
+from ...util import (
SubprocessError,
read_lines_without_comments,
parse_to_list_of_dict,
- find_python,
is_subdir,
)
-from ..util_common import (
+from ...util_common import (
run_command,
)
-from ..config import (
+from ...config import (
SanityConfig,
)
+from ...host_configs import (
+ PythonConfig,
+)
+
class Pep8Test(SanitySingleVersion):
"""Sanity test for PEP 8 style guidelines using pycodestyle."""
@@ -46,13 +52,7 @@ class Pep8Test(SanitySingleVersion):
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' or is_subdir(target.path, 'bin')]
- def test(self, args, targets, python_version):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :type python_version: str
- :rtype: TestResult
- """
+ def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
current_ignore_file = os.path.join(SANITY_ROOT, 'pep8', 'current-ignore.txt')
current_ignore = sorted(read_lines_without_comments(current_ignore_file, remove_blank_lines=True))
@@ -61,7 +61,7 @@ class Pep8Test(SanitySingleVersion):
paths = [target.path for target in targets.include]
cmd = [
- find_python(python_version),
+ python.path,
'-m', 'pycodestyle',
'--max-line-length', '160',
'--config', '/dev/null',
diff --git a/test/lib/ansible_test/_internal/sanity/pslint.py b/test/lib/ansible_test/_internal/commands/sanity/pslint.py
index 256eee04..254b7778 100644
--- a/test/lib/ansible_test/_internal/sanity/pslint.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/pslint.py
@@ -1,41 +1,44 @@
"""Sanity test using PSScriptAnalyzer."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
import os
import re
+import typing as t
-from .. import types as t
-
-from ..sanity import (
+from . import (
SanityVersionNeutral,
SanityMessage,
SanityFailure,
SanitySuccess,
SanitySkipped,
+ SanityTargets,
SANITY_ROOT,
)
-from ..target import (
+from ...test import (
+ TestResult,
+)
+
+from ...target import (
TestTarget,
)
-from ..util import (
+from ...util import (
SubprocessError,
find_executable,
ANSIBLE_TEST_DATA_ROOT,
)
-from ..util_common import (
+from ...util_common import (
run_command,
)
-from ..config import (
+from ...config import (
SanityConfig,
)
-from ..data import (
+from ...data import (
data_context,
)
@@ -51,12 +54,7 @@ class PslintTest(SanityVersionNeutral):
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] in ('.ps1', '.psm1', '.psd1')]
- def test(self, args, targets):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :rtype: TestResult
- """
+ def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult
settings = self.load_processor(args)
paths = [target.path for target in targets.include]
@@ -66,10 +64,10 @@ class PslintTest(SanityVersionNeutral):
cmds = []
- if args.requirements:
- cmds.append([os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', 'sanity.ps1')])
+ if args.controller.is_managed or args.requirements:
+ cmds.append(['pwsh', os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', 'sanity.pslint.ps1')])
- cmds.append([os.path.join(SANITY_ROOT, 'pslint', 'pslint.ps1')] + paths)
+ cmds.append(['pwsh', os.path.join(SANITY_ROOT, 'pslint', 'pslint.ps1')] + paths)
stdout = ''
diff --git a/test/lib/ansible_test/_internal/sanity/pylint.py b/test/lib/ansible_test/_internal/commands/sanity/pylint.py
index 769a1717..5bd17878 100644
--- a/test/lib/ansible_test/_internal/sanity/pylint.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/pylint.py
@@ -1,59 +1,64 @@
"""Sanity test using pylint."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import itertools
import json
import os
import datetime
+import configparser
+import typing as t
-from .. import types as t
-
-from ..sanity import (
+from . import (
SanitySingleVersion,
SanityMessage,
SanityFailure,
SanitySuccess,
+ SanityTargets,
SANITY_ROOT,
)
-from ..target import (
+from ...test import (
+ TestResult,
+)
+
+from ...target import (
TestTarget,
)
-from ..util import (
+from ...util import (
SubprocessError,
display,
- ConfigParser,
is_subdir,
- find_python,
)
-from ..util_common import (
+from ...util_common import (
run_command,
)
-from ..ansible_util import (
+from ...ansible_util import (
ansible_environment,
get_collection_detail,
CollectionDetail,
CollectionDetailError,
)
-from ..config import (
+from ...config import (
SanityConfig,
)
-from ..data import (
+from ...data import (
data_context,
)
+from ...host_configs import (
+ PythonConfig,
+)
+
class PylintTest(SanitySingleVersion):
"""Sanity test using pylint."""
-
def __init__(self):
- super(PylintTest, self).__init__()
+ super().__init__()
self.optional_error_codes.update([
'ansible-deprecated-date',
'too-complex',
@@ -68,13 +73,7 @@ class PylintTest(SanitySingleVersion):
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' or is_subdir(target.path, 'bin')]
- def test(self, args, targets, python_version):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :type python_version: str
- :rtype: TestResult
- """
+ def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
plugin_dir = os.path.join(SANITY_ROOT, 'pylint', 'plugins')
plugin_names = sorted(p[0] for p in [
os.path.splitext(p) for p in os.listdir(plugin_dir)] if p[1] == '.py' and p[0] != '__init__')
@@ -85,7 +84,7 @@ class PylintTest(SanitySingleVersion):
module_paths = [os.path.relpath(p, data_context().content.module_path).split(os.path.sep) for p in
paths if is_subdir(p, data_context().content.module_path)]
- module_dirs = sorted(set([p[0] for p in module_paths if len(p) > 1]))
+ module_dirs = sorted({p[0] for p in module_paths if len(p) > 1})
large_module_group_threshold = 500
large_module_groups = [key for key, value in
@@ -93,31 +92,21 @@ class PylintTest(SanitySingleVersion):
large_module_group_paths = [os.path.relpath(p, data_context().content.module_path).split(os.path.sep) for p in paths
if any(is_subdir(p, os.path.join(data_context().content.module_path, g)) for g in large_module_groups)]
- large_module_group_dirs = sorted(set([os.path.sep.join(p[:2]) for p in large_module_group_paths if len(p) > 2]))
+ large_module_group_dirs = sorted({os.path.sep.join(p[:2]) for p in large_module_group_paths if len(p) > 2})
contexts = []
remaining_paths = set(paths)
- def add_context(available_paths, context_name, context_filter):
- """
- :type available_paths: set[str]
- :type context_name: str
- :type context_filter: (str) -> bool
- """
+ def add_context(available_paths, context_name, context_filter): # type: (t.Set[str], str, t.Callable[[str], bool]) -> None
+ """Add the specified context to the context list, consuming available paths that match the given context filter."""
filtered_paths = set(p for p in available_paths if context_filter(p))
contexts.append((context_name, sorted(filtered_paths)))
available_paths -= filtered_paths
- def filter_path(path_filter=None):
- """
- :type path_filter: str
- :rtype: (str) -> bool
- """
- def context_filter(path_to_filter):
- """
- :type path_to_filter: str
- :rtype: bool
- """
+ def filter_path(path_filter=None): # type: (str) -> t.Callable[[str], bool]
+ """Return a function that filters out paths which are not a subdirectory of the given path."""
+ def context_filter(path_to_filter): # type: (str) -> bool
+ """Return true if the given path matches, otherwise return False."""
return is_subdir(path_to_filter, path_filter)
return context_filter
@@ -136,9 +125,11 @@ class PylintTest(SanitySingleVersion):
if data_context().content.collection:
add_context(remaining_paths, 'collection', lambda p: True)
else:
- add_context(remaining_paths, 'validate-modules', filter_path('test/lib/ansible_test/_data/sanity/validate-modules/'))
+ add_context(remaining_paths, 'validate-modules', filter_path('test/lib/ansible_test/_util/controller/sanity/validate-modules/'))
add_context(remaining_paths, 'validate-modules-unit', filter_path('test/lib/ansible_test/tests/validate-modules-unit/'))
- add_context(remaining_paths, 'sanity', filter_path('test/lib/ansible_test/_data/sanity/'))
+ add_context(remaining_paths, 'code-smell', filter_path('test/lib/ansible_test/_util/controller/sanity/code-smell/'))
+ add_context(remaining_paths, 'legacy-collection-loader', filter_path('test/lib/ansible_test/_util/target/legacy_collection_loader/'))
+ add_context(remaining_paths, 'ansible-test-target', filter_path('test/lib/ansible_test/_util/target/'))
add_context(remaining_paths, 'ansible-test', filter_path('test/lib/'))
add_context(remaining_paths, 'test', filter_path('test/'))
add_context(remaining_paths, 'hacking', filter_path('hacking/'))
@@ -147,8 +138,6 @@ class PylintTest(SanitySingleVersion):
messages = []
context_times = []
- python = find_python(python_version)
-
collection_detail = None
if data_context().content.collection:
@@ -205,7 +194,7 @@ class PylintTest(SanitySingleVersion):
paths, # type: t.List[str]
plugin_dir, # type: str
plugin_names, # type: t.List[str]
- python, # type: str
+ python, # type: PythonConfig
collection_detail, # type: CollectionDetail
): # type: (...) -> t.List[t.Dict[str, str]]
"""Run pylint using the config specified by the context on the specified paths."""
@@ -217,7 +206,7 @@ class PylintTest(SanitySingleVersion):
else:
rcfile = os.path.join(SANITY_ROOT, 'pylint', 'config', 'default.cfg')
- parser = ConfigParser()
+ parser = configparser.ConfigParser()
parser.read(rcfile)
if parser.has_section('ansible-test'):
@@ -229,7 +218,7 @@ class PylintTest(SanitySingleVersion):
load_plugins = set(plugin_names + ['pylint.extensions.mccabe']) - disable_plugins
cmd = [
- python,
+ python.path,
'-m', 'pylint',
'--jobs', '0',
'--reports', 'n',
diff --git a/test/lib/ansible_test/_internal/sanity/sanity_docs.py b/test/lib/ansible_test/_internal/commands/sanity/sanity_docs.py
index 44638075..a663bd96 100644
--- a/test/lib/ansible_test/_internal/sanity/sanity_docs.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/sanity_docs.py
@@ -1,22 +1,26 @@
"""Sanity test for documentation of sanity tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-from ..sanity import (
+from . import (
SanityVersionNeutral,
SanityMessage,
SanityFailure,
SanitySuccess,
+ SanityTargets,
sanity_get_tests,
)
-from ..config import (
+from ...test import (
+ TestResult,
+)
+
+from ...config import (
SanityConfig,
)
-from ..data import (
+from ...data import (
data_context,
)
@@ -35,13 +39,7 @@ class SanityDocsTest(SanityVersionNeutral):
"""True if the test does not use test targets. Mutually exclusive with all_targets."""
return True
- # noinspection PyUnusedLocal
- def test(self, args, targets): # pylint: disable=locally-disabled, unused-argument
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :rtype: TestResult
- """
+ def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult
sanity_dir = 'docs/docsite/rst/dev_guide/testing/sanity'
sanity_docs = set(part[0] for part in (os.path.splitext(os.path.basename(path)) for path in data_context().content.get_files(sanity_dir))
if part[1] == '.rst')
diff --git a/test/lib/ansible_test/_internal/sanity/shellcheck.py b/test/lib/ansible_test/_internal/commands/sanity/shellcheck.py
index 82689ced..19805ea9 100644
--- a/test/lib/ansible_test/_internal/sanity/shellcheck.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/shellcheck.py
@@ -1,40 +1,43 @@
"""Sanity test using shellcheck."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
+import typing as t
from xml.etree.ElementTree import (
fromstring,
Element,
)
-from .. import types as t
-
-from ..sanity import (
+from . import (
SanityVersionNeutral,
SanityMessage,
SanityFailure,
SanitySuccess,
SanitySkipped,
+ SanityTargets,
SANITY_ROOT,
)
-from ..target import (
+from ...test import (
+ TestResult,
+)
+
+from ...target import (
TestTarget,
)
-from ..util import (
+from ...util import (
SubprocessError,
read_lines_without_comments,
find_executable,
)
-from ..util_common import (
+from ...util_common import (
run_command,
)
-from ..config import (
+from ...config import (
SanityConfig,
)
@@ -50,12 +53,7 @@ class ShellcheckTest(SanityVersionNeutral):
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.sh']
- def test(self, args, targets):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :rtype: TestResult
- """
+ def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult
exclude_file = os.path.join(SANITY_ROOT, 'shellcheck', 'exclude.txt')
exclude = set(read_lines_without_comments(exclude_file, remove_blank_lines=True, optional=True))
diff --git a/test/lib/ansible_test/_internal/sanity/validate_modules.py b/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py
index add3cdc7..0eccc01f 100644
--- a/test/lib/ansible_test/_internal/sanity/validate_modules.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py
@@ -1,58 +1,65 @@
"""Sanity test using validate-modules."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
import os
+import typing as t
-from .. import types as t
-
-from ..sanity import (
+from . import (
SanitySingleVersion,
SanityMessage,
SanityFailure,
SanitySuccess,
+ SanityTargets,
SANITY_ROOT,
)
-from ..target import (
+from ...test import (
+ TestResult,
+)
+
+from ...target import (
TestTarget,
)
-from ..util import (
+from ...util import (
SubprocessError,
display,
- find_python,
)
-from ..util_common import (
+from ...util_common import (
run_command,
)
-from ..ansible_util import (
+from ...ansible_util import (
ansible_environment,
get_collection_detail,
CollectionDetailError,
)
-from ..config import (
+from ...config import (
SanityConfig,
)
-from ..ci import (
+from ...ci import (
get_ci_provider,
)
-from ..data import (
+from ...data import (
data_context,
)
+from ...host_configs import (
+ PythonConfig,
+)
+
class ValidateModulesTest(SanitySingleVersion):
"""Sanity test using validate-modules."""
def __init__(self):
- super(ValidateModulesTest, self).__init__()
+ super().__init__()
+
self.optional_error_codes.update([
'deprecated-date',
])
@@ -66,23 +73,15 @@ class ValidateModulesTest(SanitySingleVersion):
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if target.module]
- def test(self, args, targets, python_version):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :type python_version: str
- :rtype: TestResult
- """
+ def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
env = ansible_environment(args, color=False)
settings = self.load_processor(args)
paths = [target.path for target in targets.include]
- python = find_python(python_version)
-
cmd = [
- python,
+ python.path,
os.path.join(SANITY_ROOT, 'validate-modules', 'validate-modules'),
'--format', 'json',
'--arg-spec',
@@ -136,7 +135,6 @@ class ValidateModulesTest(SanitySingleVersion):
path=filename,
line=int(item['line']) if 'line' in item else 0,
column=int(item['column']) if 'column' in item else 0,
- level='error',
code='%s' % item['code'],
message=item['msg'],
))
diff --git a/test/lib/ansible_test/_internal/sanity/yamllint.py b/test/lib/ansible_test/_internal/commands/sanity/yamllint.py
index 85a576d0..4ca6dfe8 100644
--- a/test/lib/ansible_test/_internal/sanity/yamllint.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/yamllint.py
@@ -1,46 +1,49 @@
"""Sanity test using yamllint."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
import os
+import typing as t
-from .. import types as t
-
-from ..import ansible_util
-
-from ..sanity import (
+from . import (
SanitySingleVersion,
SanityMessage,
SanityFailure,
- SanitySkipped,
SanitySuccess,
+ SanityTargets,
SANITY_ROOT,
)
-from ..target import (
+from ...test import (
+ TestResult,
+)
+
+from ...target import (
TestTarget,
)
-from ..util import (
+from ...util import (
SubprocessError,
display,
is_subdir,
- find_python,
)
-from ..util_common import (
+from ...util_common import (
run_command,
)
-from ..config import (
+from ...config import (
SanityConfig,
)
-from ..data import (
+from ...data import (
data_context,
)
+from ...host_configs import (
+ PythonConfig,
+)
+
class YamllintTest(SanitySingleVersion):
"""Sanity test using yamllint."""
@@ -49,6 +52,11 @@ class YamllintTest(SanitySingleVersion):
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return 'ansible-test'
+ @property
+ def require_libyaml(self): # type: () -> bool
+ """True if the test requires PyYAML to have libyaml support."""
+ return True
+
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return the given list of test targets, filtered to include only those relevant for the test."""
yaml_targets = [target for target in targets if os.path.splitext(target.path)[1] in ('.yml', '.yaml')]
@@ -64,25 +72,11 @@ class YamllintTest(SanitySingleVersion):
return yaml_targets
- def test(self, args, targets, python_version):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :type python_version: str
- :rtype: TestResult
- """
- pyyaml_presence = ansible_util.check_pyyaml(args, python_version, quiet=True)
- if not pyyaml_presence['cloader']:
- display.warning("Skipping sanity test '%s' due to missing libyaml support in PyYAML."
- % self.name)
- return SanitySkipped(self.name)
-
+ def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
settings = self.load_processor(args)
paths = [target.path for target in targets.include]
- python = find_python(python_version)
-
results = self.test_paths(args, paths, python)
results = settings.process_errors(results, paths)
@@ -92,15 +86,10 @@ class YamllintTest(SanitySingleVersion):
return SanitySuccess(self.name)
@staticmethod
- def test_paths(args, paths, python):
- """
- :type args: SanityConfig
- :type paths: list[str]
- :type python: str
- :rtype: list[SanityMessage]
- """
+ def test_paths(args, paths, python): # type: (SanityConfig, t.List[str], PythonConfig) -> t.List[SanityMessage]
+ """Test the specified paths using the given Python and return the results."""
cmd = [
- python,
+ python.path,
os.path.join(SANITY_ROOT, 'yamllint', 'yamllinter.py'),
]
diff --git a/test/lib/ansible_test/_internal/commands/shell/__init__.py b/test/lib/ansible_test/_internal/commands/shell/__init__.py
new file mode 100644
index 00000000..7364819e
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/shell/__init__.py
@@ -0,0 +1,89 @@
+"""Open a shell prompt inside an ansible-test environment."""
+from __future__ import annotations
+
+import os
+import typing as t
+
+from ...util import (
+ ApplicationError,
+ display,
+)
+
+from ...config import (
+ ShellConfig,
+)
+
+from ...executor import (
+ Delegate,
+)
+
+from ...connections import (
+ LocalConnection,
+ SshConnection,
+)
+
+from ...host_profiles import (
+ ControllerProfile,
+ PosixProfile,
+ SshTargetHostProfile,
+)
+
+from ...provisioning import (
+ prepare_profiles,
+)
+
+from ...host_configs import (
+ ControllerConfig,
+ OriginConfig,
+)
+
+
+def command_shell(args): # type: (ShellConfig) -> None
+ """Entry point for the `shell` command."""
+ if args.raw and isinstance(args.targets[0], ControllerConfig):
+ raise ApplicationError('The --raw option has no effect on the controller.')
+
+ host_state = prepare_profiles(args, skip_setup=args.raw) # shell
+
+ if args.delegate:
+ raise Delegate(host_state=host_state)
+
+ if args.raw and not isinstance(args.controller, OriginConfig):
+ display.warning('The --raw option will only be applied to the target.')
+
+ target_profile = t.cast(SshTargetHostProfile, host_state.target_profiles[0])
+
+ if isinstance(target_profile, ControllerProfile):
+ # run the shell locally unless a target was requested
+ con = LocalConnection(args)
+ else:
+ # a target was requested, connect to it over SSH
+ con = target_profile.get_controller_target_connections()[0]
+
+ if isinstance(con, SshConnection) and args.raw:
+ cmd = []
+ elif isinstance(target_profile, PosixProfile):
+ cmd = []
+
+ if args.raw:
+ shell = 'sh' # shell required for non-ssh connection
+ else:
+ shell = 'bash'
+
+ python = target_profile.python # make sure the python interpreter has been initialized before opening a shell
+ display.info(f'Target Python {python.version} is at: {python.path}')
+
+ optional_vars = (
+ 'TERM', # keep backspace working
+ )
+
+ env = {name: os.environ[name] for name in optional_vars if name in os.environ}
+
+ if env:
+ cmd = ['/usr/bin/env'] + [f'{name}={value}' for name, value in env.items()]
+
+ cmd += [shell, '-i']
+ else:
+ cmd = []
+
+ con.run(cmd)
diff --git a/test/lib/ansible_test/_internal/commands/units/__init__.py b/test/lib/ansible_test/_internal/commands/units/__init__.py
new file mode 100644
index 00000000..d23d36fc
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/units/__init__.py
@@ -0,0 +1,334 @@
+"""Execute unit tests using pytest."""
+from __future__ import annotations
+
+import os
+import sys
+import typing as t
+
+from ...constants import (
+ CONTROLLER_MIN_PYTHON_VERSION,
+ CONTROLLER_PYTHON_VERSIONS,
+ REMOTE_ONLY_PYTHON_VERSIONS,
+ SUPPORTED_PYTHON_VERSIONS,
+)
+
+from ...io import (
+ write_text_file,
+ make_dirs,
+)
+
+from ...util import (
+ ANSIBLE_TEST_DATA_ROOT,
+ display,
+ is_subdir,
+ SubprocessError,
+ ANSIBLE_LIB_ROOT,
+ ANSIBLE_TEST_TARGET_ROOT,
+)
+
+from ...util_common import (
+ ResultType,
+ handle_layout_messages,
+ create_temp_dir,
+)
+
+from ...ansible_util import (
+ ansible_environment,
+ get_ansible_python_path,
+)
+
+from ...target import (
+ walk_internal_targets,
+ walk_units_targets,
+)
+
+from ...config import (
+ UnitsConfig,
+)
+
+from ...coverage_util import (
+ cover_python,
+)
+
+from ...data import (
+ data_context,
+)
+
+from ...executor import (
+ AllTargetsSkipped,
+ Delegate,
+ get_changes_filter,
+)
+
+from ...python_requirements import (
+ install_requirements,
+)
+
+from ...content_config import (
+ get_content_config,
+)
+
+from ...host_configs import (
+ PosixConfig,
+)
+
+from ...provisioning import (
+ prepare_profiles,
+)
+
+from ...pypi_proxy import (
+ configure_pypi_proxy,
+)
+
+from ...host_profiles import (
+ PosixProfile,
+)
+
+
+class TestContext:
+ """Contexts that unit tests run in based on the type of content."""
+ controller = 'controller'
+ modules = 'modules'
+ module_utils = 'module_utils'
+
+
+def command_units(args): # type: (UnitsConfig) -> None
+ """Run unit tests."""
+ handle_layout_messages(data_context().content.unit_messages)
+
+ changes = get_changes_filter(args)
+ require = args.require + changes
+ include = walk_internal_targets(walk_units_targets(), args.include, args.exclude, require)
+
+ paths = [target.path for target in include]
+
+ content_config = get_content_config()
+ supported_remote_python_versions = content_config.modules.python_versions
+
+ if content_config.modules.controller_only:
+ # controller-only collections run modules/module_utils unit tests as controller-only tests
+ module_paths = []
+ module_utils_paths = []
+ else:
+ # normal collections run modules/module_utils unit tests isolated from controller code due to differences in python version requirements
+ module_paths = [path for path in paths if is_subdir(path, data_context().content.unit_module_path)]
+ module_utils_paths = [path for path in paths if is_subdir(path, data_context().content.unit_module_utils_path)]
+
+ controller_paths = sorted(path for path in set(paths) - set(module_paths) - set(module_utils_paths))
+
+ remote_paths = module_paths or module_utils_paths
+
+ test_context_paths = {
+ TestContext.modules: module_paths,
+ TestContext.module_utils: module_utils_paths,
+ TestContext.controller: controller_paths,
+ }
+
+ if not paths:
+ raise AllTargetsSkipped()
+
+ targets = t.cast(t.List[PosixConfig], args.targets)
+ target_versions = {target.python.version: target for target in targets} # type: t.Dict[str, PosixConfig]
+ skipped_versions = args.host_settings.skipped_python_versions
+ warn_versions = []
+
+ # requested python versions that are remote-only and not supported by this collection
+ test_versions = [version for version in target_versions if version in REMOTE_ONLY_PYTHON_VERSIONS and version not in supported_remote_python_versions]
+
+ if test_versions:
+ for version in test_versions:
+ display.warning(f'Skipping unit tests on Python {version} because it is not supported by this collection.'
+ f' Supported Python versions are: {", ".join(content_config.python_versions)}')
+
+ warn_versions.extend(test_versions)
+
+ if warn_versions == list(target_versions):
+ raise AllTargetsSkipped()
+
+ if not remote_paths:
+ # all selected unit tests are controller tests
+
+ # requested python versions that are remote-only
+ test_versions = [version for version in target_versions if version in REMOTE_ONLY_PYTHON_VERSIONS and version not in warn_versions]
+
+ if test_versions:
+ for version in test_versions:
+ display.warning(f'Skipping unit tests on Python {version} because it is only supported by module/module_utils unit tests.'
+ ' No module/module_utils unit tests were selected.')
+
+ warn_versions.extend(test_versions)
+
+ if warn_versions == list(target_versions):
+ raise AllTargetsSkipped()
+
+ if not controller_paths:
+ # all selected unit tests are remote tests
+
+ # requested python versions that are not supported by remote tests for this collection
+ test_versions = [version for version in target_versions if version not in supported_remote_python_versions and version not in warn_versions]
+
+ if test_versions:
+ for version in test_versions:
+ display.warning(f'Skipping unit tests on Python {version} because it is not supported by module/module_utils unit tests of this collection.'
+ f' Supported Python versions are: {", ".join(supported_remote_python_versions)}')
+
+ warn_versions.extend(test_versions)
+
+ if warn_versions == list(target_versions):
+ raise AllTargetsSkipped()
+
+ host_state = prepare_profiles(args, targets_use_pypi=True) # units
+
+ if args.delegate:
+ raise Delegate(host_state=host_state, require=changes, exclude=args.exclude)
+
+ test_sets = []
+
+ if args.requirements_mode != 'skip':
+ configure_pypi_proxy(args, host_state.controller_profile) # units
+
+ for version in SUPPORTED_PYTHON_VERSIONS:
+ if version not in target_versions and version not in skipped_versions:
+ continue
+
+ test_candidates = []
+
+ for test_context, paths in test_context_paths.items():
+ if test_context == TestContext.controller:
+ if version not in CONTROLLER_PYTHON_VERSIONS:
+ continue
+ else:
+ if version not in supported_remote_python_versions:
+ continue
+
+ if not paths:
+ continue
+
+ env = ansible_environment(args)
+
+ env.update(
+ PYTHONPATH=get_units_ansible_python_path(args, test_context),
+ ANSIBLE_CONTROLLER_MIN_PYTHON_VERSION=CONTROLLER_MIN_PYTHON_VERSION,
+ )
+
+ test_candidates.append((test_context, paths, env))
+
+ if not test_candidates:
+ continue
+
+ if version in skipped_versions:
+ display.warning("Skipping unit tests on Python %s because it could not be found." % version)
+ continue
+
+ target_profiles = {profile.config.python.version: profile for profile in host_state.targets(PosixProfile)} # type: t.Dict[str, PosixProfile]
+ target_profile = target_profiles[version]
+
+ final_candidates = [(test_context, target_profile.python, paths, env) for test_context, paths, env in test_candidates]
+ controller = any(test_context == TestContext.controller for test_context, python, paths, env in final_candidates)
+
+ if args.requirements_mode != 'skip':
+ install_requirements(args, target_profile.python, ansible=controller, command=True) # units
+
+ test_sets.extend(final_candidates)
+
+ if args.requirements_mode == 'only':
+ sys.exit()
+
+ for test_context, python, paths, env in test_sets:
+ cmd = [
+ 'pytest',
+ '--boxed',
+ '-r', 'a',
+ '-n', str(args.num_workers) if args.num_workers else 'auto',
+ '--color',
+ 'yes' if args.color else 'no',
+ '-p', 'no:cacheprovider',
+ '-c', os.path.join(ANSIBLE_TEST_DATA_ROOT, 'pytest.ini'),
+ '--junit-xml', os.path.join(ResultType.JUNIT.path, 'python%s-%s-units.xml' % (python.version, test_context)),
+ ]
+
+ if not data_context().content.collection:
+ cmd.append('--durations=25')
+
+ if python.version == '2.6':
+ # same as --strict-markers in older versions of pytest which still support python 2.6
+ cmd.append('--strict')
+ else:
+ # added in pytest 4.5.0, which requires python 2.7+
+ cmd.append('--strict-markers')
+
+ plugins = []
+
+ if args.coverage:
+ plugins.append('ansible_pytest_coverage')
+
+ if data_context().content.collection:
+ plugins.append('ansible_pytest_collections')
+
+ if plugins:
+ env['PYTHONPATH'] += ':%s' % os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'pytest/plugins')
+ env['PYTEST_PLUGINS'] = ','.join(plugins)
+
+ if args.collect_only:
+ cmd.append('--collect-only')
+
+ if args.verbosity:
+ cmd.append('-' + ('v' * args.verbosity))
+
+ cmd.extend(paths)
+
+ display.info('Unit test %s with Python %s' % (test_context, python.version))
+
+ try:
+ cover_python(args, python, cmd, test_context, env)
+ except SubprocessError as ex:
+ # pytest exits with status code 5 when all tests are skipped, which isn't an error for our use case
+ if ex.status != 5:
+ raise
+
+
+def get_units_ansible_python_path(args, test_context): # type: (UnitsConfig, str) -> str
+ """
+ Return a directory usable for PYTHONPATH, containing only the modules and module_utils portion of the ansible package.
+ The temporary directory created will be cached for the lifetime of the process and cleaned up at exit.
+ """
+ if test_context == TestContext.controller:
+ return get_ansible_python_path(args)
+
+ try:
+ cache = get_units_ansible_python_path.cache
+ except AttributeError:
+ cache = get_units_ansible_python_path.cache = {}
+
+ python_path = cache.get(test_context)
+
+ if python_path:
+ return python_path
+
+ python_path = create_temp_dir(prefix='ansible-test-')
+ ansible_path = os.path.join(python_path, 'ansible')
+ ansible_test_path = os.path.join(python_path, 'ansible_test')
+
+ write_text_file(os.path.join(ansible_path, '__init__.py'), '', True)
+ os.symlink(os.path.join(ANSIBLE_LIB_ROOT, 'module_utils'), os.path.join(ansible_path, 'module_utils'))
+
+ if data_context().content.collection:
+ # built-in runtime configuration for the collection loader
+ make_dirs(os.path.join(ansible_path, 'config'))
+ os.symlink(os.path.join(ANSIBLE_LIB_ROOT, 'config', 'ansible_builtin_runtime.yml'), os.path.join(ansible_path, 'config', 'ansible_builtin_runtime.yml'))
+
+ # current collection loader required by all python versions supported by the controller
+ write_text_file(os.path.join(ansible_path, 'utils', '__init__.py'), '', True)
+ os.symlink(os.path.join(ANSIBLE_LIB_ROOT, 'utils', 'collection_loader'), os.path.join(ansible_path, 'utils', 'collection_loader'))
+
+ # legacy collection loader required by all python versions not supported by the controller
+ write_text_file(os.path.join(ansible_test_path, '__init__.py'), '', True)
+ write_text_file(os.path.join(ansible_test_path, '_internal', '__init__.py'), '', True)
+ os.symlink(os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'legacy_collection_loader'), os.path.join(ansible_test_path, '_internal', 'legacy_collection_loader'))
+ elif test_context == TestContext.modules:
+ # only non-collection ansible module tests should have access to ansible built-in modules
+ os.symlink(os.path.join(ANSIBLE_LIB_ROOT, 'modules'), os.path.join(ansible_path, 'modules'))
+
+ cache[test_context] = python_path
+
+ return python_path
diff --git a/test/lib/ansible_test/_internal/compat/__init__.py b/test/lib/ansible_test/_internal/compat/__init__.py
new file mode 100644
index 00000000..e9cb6816
--- /dev/null
+++ b/test/lib/ansible_test/_internal/compat/__init__.py
@@ -0,0 +1,2 @@
+"""Nearly empty __init__.py to keep pylint happy."""
+from __future__ import annotations
diff --git a/test/lib/ansible_test/_internal/compat/packaging.py b/test/lib/ansible_test/_internal/compat/packaging.py
new file mode 100644
index 00000000..a38e1abc
--- /dev/null
+++ b/test/lib/ansible_test/_internal/compat/packaging.py
@@ -0,0 +1,16 @@
+"""Packaging compatibility."""
+from __future__ import annotations
+
+try:
+ from packaging import (
+ specifiers,
+ version,
+ )
+
+ SpecifierSet = specifiers.SpecifierSet
+ Version = version.Version
+ PACKAGING_IMPORT_ERROR = None
+except ImportError as ex:
+ SpecifierSet = None # pylint: disable=invalid-name
+ Version = None # pylint: disable=invalid-name
+ PACKAGING_IMPORT_ERROR = ex
diff --git a/test/lib/ansible_test/_internal/compat/yaml.py b/test/lib/ansible_test/_internal/compat/yaml.py
new file mode 100644
index 00000000..daa5ef0e
--- /dev/null
+++ b/test/lib/ansible_test/_internal/compat/yaml.py
@@ -0,0 +1,20 @@
+"""PyYAML compatibility."""
+from __future__ import annotations
+
+from functools import (
+ partial,
+)
+
+try:
+ import yaml as _yaml
+ YAML_IMPORT_ERROR = None
+except ImportError as ex:
+ yaml_load = None # pylint: disable=invalid-name
+ YAML_IMPORT_ERROR = ex
+else:
+ try:
+ _SafeLoader = _yaml.CSafeLoader
+ except AttributeError:
+ _SafeLoader = _yaml.SafeLoader
+
+ yaml_load = partial(_yaml.load, Loader=_SafeLoader)
diff --git a/test/lib/ansible_test/_internal/completion.py b/test/lib/ansible_test/_internal/completion.py
new file mode 100644
index 00000000..25cc6367
--- /dev/null
+++ b/test/lib/ansible_test/_internal/completion.py
@@ -0,0 +1,226 @@
+"""Loading, parsing and storing of completion configurations."""
+from __future__ import annotations
+
+import abc
+import dataclasses
+import os
+import typing as t
+
+from .constants import (
+ CONTROLLER_PYTHON_VERSIONS,
+ SUPPORTED_PYTHON_VERSIONS,
+)
+
+from .util import (
+ ANSIBLE_TEST_DATA_ROOT,
+ read_lines_without_comments,
+)
+
+from .data import (
+ data_context,
+)
+
+
+@dataclasses.dataclass(frozen=True)
+class CompletionConfig(metaclass=abc.ABCMeta):
+ """Base class for completion configuration."""
+ name: str
+
+ @property
+ @abc.abstractmethod
+ def is_default(self):
+ """True if the completion entry is only used for defaults, otherwise False."""
+
+
+@dataclasses.dataclass(frozen=True)
+class PosixCompletionConfig(CompletionConfig, metaclass=abc.ABCMeta):
+ """Base class for completion configuration of POSIX environments."""
+ @property
+ @abc.abstractmethod
+ def supported_pythons(self): # type: () -> t.List[str]
+ """Return a list of the supported Python versions."""
+
+ @abc.abstractmethod
+ def get_python_path(self, version): # type: (str) -> str
+ """Return the path of the requested Python version."""
+
+ def get_default_python(self, controller): # type: (bool) -> str
+ """Return the default Python version for a controller or target as specified."""
+ context_pythons = CONTROLLER_PYTHON_VERSIONS if controller else SUPPORTED_PYTHON_VERSIONS
+ version = [python for python in self.supported_pythons if python in context_pythons][0]
+ return version
+
+ @property
+ def controller_supported(self): # type: () -> bool
+ """True if at least one Python version is provided which supports the controller, otherwise False."""
+ return any(version in CONTROLLER_PYTHON_VERSIONS for version in self.supported_pythons)
+
+
+@dataclasses.dataclass(frozen=True)
+class PythonCompletionConfig(PosixCompletionConfig, metaclass=abc.ABCMeta):
+ """Base class for completion configuration of Python environments."""
+ python: str = ''
+ python_dir: str = '/usr/bin'
+
+ @property
+ def supported_pythons(self): # type: () -> t.List[str]
+ """Return a list of the supported Python versions."""
+ versions = self.python.split(',') if self.python else []
+ versions = [version for version in versions if version in SUPPORTED_PYTHON_VERSIONS]
+ return versions
+
+ def get_python_path(self, version): # type: (str) -> str
+ """Return the path of the requested Python version."""
+ return os.path.join(self.python_dir, f'python{version}')
+
+
+@dataclasses.dataclass(frozen=True)
+class RemoteCompletionConfig(CompletionConfig):
+ """Base class for completion configuration of remote environments provisioned through Ansible Core CI."""
+ provider: t.Optional[str] = None
+
+ @property
+ def platform(self):
+ """The name of the platform."""
+ return self.name.partition('/')[0]
+
+ @property
+ def version(self):
+ """The version of the platform."""
+ return self.name.partition('/')[2]
+
+ @property
+ def is_default(self):
+ """True if the completion entry is only used for defaults, otherwise False."""
+ return not self.version
+
+ def __post_init__(self):
+ if not self.provider:
+ raise Exception(f'Remote completion entry "{self.name}" must provide a "provider" setting.')
+
+
+@dataclasses.dataclass(frozen=True)
+class InventoryCompletionConfig(CompletionConfig):
+ """Configuration for inventory files."""
+ def __init__(self): # type: () -> None
+ super().__init__(name='inventory')
+
+ @property
+ def is_default(self): # type: () -> bool
+ """True if the completion entry is only used for defaults, otherwise False."""
+ return False
+
+
+@dataclasses.dataclass(frozen=True)
+class PosixSshCompletionConfig(PythonCompletionConfig):
+ """Configuration for a POSIX host reachable over SSH."""
+ def __init__(self, user, host): # type: (str, str) -> None
+ super().__init__(
+ name=f'{user}@{host}',
+ python=','.join(SUPPORTED_PYTHON_VERSIONS),
+ )
+
+ @property
+ def is_default(self): # type: () -> bool
+ """True if the completion entry is only used for defaults, otherwise False."""
+ return False
+
+
+@dataclasses.dataclass(frozen=True)
+class DockerCompletionConfig(PythonCompletionConfig):
+ """Configuration for Docker containers."""
+ image: str = ''
+ seccomp: str = 'default'
+ placeholder: bool = False
+
+ @property
+ def is_default(self):
+ """True if the completion entry is only used for defaults, otherwise False."""
+ return False
+
+ def __post_init__(self):
+ if not self.image:
+ raise Exception(f'Docker completion entry "{self.name}" must provide an "image" setting.')
+
+ if not self.supported_pythons and not self.placeholder:
+ raise Exception(f'Docker completion entry "{self.name}" must provide a "python" setting.')
+
+
+@dataclasses.dataclass(frozen=True)
+class NetworkRemoteCompletionConfig(RemoteCompletionConfig):
+ """Configuration for remote network platforms."""
+ collection: str = ''
+ connection: str = ''
+
+
+@dataclasses.dataclass(frozen=True)
+class PosixRemoteCompletionConfig(RemoteCompletionConfig, PythonCompletionConfig):
+ """Configuration for remote POSIX platforms."""
+ placeholder: bool = False
+
+ def __post_init__(self):
+ if not self.supported_pythons:
+ if self.version and not self.placeholder:
+ raise Exception(f'POSIX remote completion entry "{self.name}" must provide a "python" setting.')
+ else:
+ if not self.version:
+ raise Exception(f'POSIX remote completion entry "{self.name}" is a platform default and cannot provide a "python" setting.')
+
+
+@dataclasses.dataclass(frozen=True)
+class WindowsRemoteCompletionConfig(RemoteCompletionConfig):
+ """Configuration for remote Windows platforms."""
+
+
+TCompletionConfig = t.TypeVar('TCompletionConfig', bound=CompletionConfig)
+
+
+def load_completion(name, completion_type): # type: (str, t.Type[TCompletionConfig]) -> t.Dict[str, TCompletionConfig]
+ """Load the named completion entries, returning them in dictionary form using the specified completion type."""
+ lines = read_lines_without_comments(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'completion', '%s.txt' % name), remove_blank_lines=True)
+
+ if data_context().content.collection:
+ context = 'collection'
+ else:
+ context = 'ansible-core'
+
+ items = {name: data for name, data in [parse_completion_entry(line) for line in lines] if data.get('context', context) == context}
+
+ for item in items.values():
+ item.pop('context', None)
+ item.pop('placeholder', None)
+
+ completion = {name: completion_type(name=name, **data) for name, data in items.items()}
+
+ return completion
+
+
+def parse_completion_entry(value): # type: (str) -> t.Tuple[str, t.Dict[str, str]]
+ """Parse the given completion entry, returning the entry name and a dictionary of key/value settings."""
+ values = value.split()
+
+ name = values[0]
+ data = {kvp[0]: kvp[1] if len(kvp) > 1 else '' for kvp in [item.split('=', 1) for item in values[1:]]}
+
+ return name, data
+
+
+def filter_completion(
+ completion, # type: t.Dict[str, TCompletionConfig]
+ controller_only=False, # type: bool
+ include_defaults=False, # type: bool
+): # type: (...) -> t.Dict[str, TCompletionConfig]
+ """Return a the given completion dictionary, filtering out configs which do not support the controller if controller_only is specified."""
+ if controller_only:
+ completion = {name: config for name, config in completion.items() if config.controller_supported}
+
+ if not include_defaults:
+ completion = {name: config for name, config in completion.items() if not config.is_default}
+
+ return completion
+
+
+DOCKER_COMPLETION = load_completion('docker', DockerCompletionConfig)
+REMOTE_COMPLETION = load_completion('remote', PosixRemoteCompletionConfig)
+WINDOWS_COMPLETION = load_completion('windows', WindowsRemoteCompletionConfig)
+NETWORK_COMPLETION = load_completion('network', NetworkRemoteCompletionConfig)
diff --git a/test/lib/ansible_test/_internal/config.py b/test/lib/ansible_test/_internal/config.py
index eb9c1739..5b276eb1 100644
--- a/test/lib/ansible_test/_internal/config.py
+++ b/test/lib/ansible_test/_internal/config.py
@@ -1,23 +1,18 @@
"""Configuration classes."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+import enum
import os
import sys
-
-from . import types as t
+import typing as t
from .util import (
- find_python,
- generate_password,
- generate_pip_command,
- ApplicationError,
+ display,
+ verify_sys_executable,
+ version_to_str,
)
from .util_common import (
- docker_qualify_image,
- get_docker_completion,
- get_remote_completion,
CommonConfig,
)
@@ -29,11 +24,27 @@ from .data import (
data_context,
)
-try:
- # noinspection PyTypeChecker
- TIntegrationConfig = t.TypeVar('TIntegrationConfig', bound='IntegrationConfig')
-except AttributeError:
- TIntegrationConfig = None # pylint: disable=invalid-name
+from .host_configs import (
+ ControllerConfig,
+ ControllerHostConfig,
+ HostConfig,
+ HostSettings,
+ OriginConfig,
+ PythonConfig,
+ VirtualPythonConfig,
+)
+
+THostConfig = t.TypeVar('THostConfig', bound=HostConfig)
+
+
+class TerminateMode(enum.Enum):
+ """When to terminate instances."""
+ ALWAYS = enum.auto()
+ NEVER = enum.auto()
+ SUCCESS = enum.auto()
+
+ def __str__(self):
+ return self.name.lower()
class ParsedRemote:
@@ -61,134 +72,133 @@ class ParsedRemote:
class EnvironmentConfig(CommonConfig):
"""Configuration common to all commands which execute in an environment."""
- def __init__(self, args, command):
- """
- :type args: any
- :type command: str
- """
- super(EnvironmentConfig, self).__init__(args, command)
+ def __init__(self, args, command): # type: (t.Any, str) -> None
+ super().__init__(args, command)
- self.pypi_endpoint = args.pypi_endpoint # type: str
+ self.host_settings = args.host_settings # type: HostSettings
+ self.host_path = args.host_path # type: t.Optional[str]
+ self.containers = args.containers # type: t.Optional[str]
self.pypi_proxy = args.pypi_proxy # type: bool
+ self.pypi_endpoint = args.pypi_endpoint # type: t.Optional[str]
- self.local = args.local is True
- self.venv = args.venv
- self.venv_system_site_packages = args.venv_system_site_packages
-
- self.python = args.python if 'python' in args else None # type: str
-
- self.docker = docker_qualify_image(args.docker) # type: str
- self.docker_raw = args.docker # type: str
- self.remote = args.remote # type: str
-
- if self.remote:
- self.parsed_remote = ParsedRemote.parse(self.remote)
+ # Set by check_controller_python once HostState has been created by prepare_profiles.
+ # This is here for convenience, to avoid needing to pass HostState to some functions which already have access to EnvironmentConfig.
+ self.controller_python = None # type: t.Optional[PythonConfig]
+ """
+ The Python interpreter used by the controller.
+ Only available after delegation has been performed or skipped (if delegation is not required).
+ """
- if not self.parsed_remote or not self.parsed_remote.platform or not self.parsed_remote.version:
- raise ApplicationError('Unrecognized remote "%s" syntax. Use "platform/version" or "arch/platform/version".' % self.remote)
+ if self.host_path:
+ self.delegate = False
else:
- self.parsed_remote = None
-
- self.docker_privileged = args.docker_privileged if 'docker_privileged' in args else False # type: bool
- self.docker_pull = args.docker_pull if 'docker_pull' in args else False # type: bool
- self.docker_keep_git = args.docker_keep_git if 'docker_keep_git' in args else False # type: bool
- self.docker_seccomp = args.docker_seccomp if 'docker_seccomp' in args else None # type: str
- self.docker_memory = args.docker_memory if 'docker_memory' in args else None
- self.docker_terminate = args.docker_terminate if 'docker_terminate' in args else None # type: str
- self.docker_network = args.docker_network if 'docker_network' in args else None # type: str
+ self.delegate = (
+ not isinstance(self.controller, OriginConfig)
+ or isinstance(self.controller.python, VirtualPythonConfig)
+ or self.controller.python.version != version_to_str(sys.version_info[:2])
+ or verify_sys_executable(self.controller.python.path)
+ )
- if self.docker_seccomp is None:
- self.docker_seccomp = get_docker_completion().get(self.docker_raw, {}).get('seccomp', 'default')
+ self.docker_network = args.docker_network # type: t.Optional[str]
+ self.docker_terminate = args.docker_terminate # type: t.Optional[TerminateMode]
- self.remote_stage = args.remote_stage # type: str
- self.remote_provider = args.remote_provider # type: str
self.remote_endpoint = args.remote_endpoint # type: t.Optional[str]
- self.remote_terminate = args.remote_terminate # type: str
+ self.remote_stage = args.remote_stage # type: t.Optional[str]
+ self.remote_terminate = args.remote_terminate # type: t.Optional[TerminateMode]
- if self.remote_provider == 'default':
- self.remote_provider = None
+ self.prime_containers = args.prime_containers # type: bool
self.requirements = args.requirements # type: bool
- if self.python == 'default':
- self.python = None
+ self.delegate_args = [] # type: t.List[str]
- actual_major_minor = '.'.join(str(i) for i in sys.version_info[:2])
+ def host_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
+ """Add the host files to the payload file list."""
+ config = self
- self.python_version = self.python or actual_major_minor
- self.python_interpreter = args.python_interpreter
+ if config.host_path:
+ settings_path = os.path.join(config.host_path, 'settings.dat')
+ state_path = os.path.join(config.host_path, 'state.dat')
- self.pip_check = args.pip_check
+ files.append((os.path.abspath(settings_path), settings_path))
+ files.append((os.path.abspath(state_path), state_path))
- self.delegate = self.docker or self.remote or self.venv
- self.delegate_args = [] # type: t.List[str]
+ data_context().register_payload_callback(host_callback)
- if self.delegate:
- self.requirements = True
+ if args.docker_no_pull:
+ display.warning('The --docker-no-pull option is deprecated and has no effect. It will be removed in a future version of ansible-test.')
- self.inject_httptester = args.inject_httptester if 'inject_httptester' in args else False # type: bool
- self.httptester = docker_qualify_image(args.httptester if 'httptester' in args else '') # type: str
- krb5_password = args.httptester_krb5_password if 'httptester_krb5_password' in args else ''
- self.httptester_krb5_password = krb5_password or generate_password() # type: str
+ if args.no_pip_check:
+ display.warning('The --no-pip-check option is deprecated and has no effect. It will be removed in a future version of ansible-test.')
- if self.get_delegated_completion().get('httptester', 'enabled') == 'disabled':
- self.httptester = False
+ @property
+ def controller(self): # type: () -> ControllerHostConfig
+ """Host configuration for the controller."""
+ return self.host_settings.controller
- if self.get_delegated_completion().get('pip-check', 'enabled') == 'disabled':
- self.pip_check = False
+ @property
+ def targets(self): # type: () -> t.List[HostConfig]
+ """Host configuration for the targets."""
+ return self.host_settings.targets
- if args.check_python and args.check_python != actual_major_minor:
- raise ApplicationError('Running under Python %s instead of Python %s as expected.' % (actual_major_minor, args.check_python))
+ def only_target(self, target_type): # type: (t.Type[THostConfig]) -> THostConfig
+ """
+ Return the host configuration for the target.
+ Requires that there is exactly one target of the specified type.
+ """
+ targets = list(self.targets)
- if self.docker_keep_git:
- def git_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
- """Add files from the content root .git directory to the payload file list."""
- for dirpath, _dirnames, filenames in os.walk(os.path.join(data_context().content.root, '.git')):
- paths = [os.path.join(dirpath, filename) for filename in filenames]
- files.extend((path, os.path.relpath(path, data_context().content.root)) for path in paths)
+ if len(targets) != 1:
+ raise Exception('There must be exactly one target.')
- data_context().register_payload_callback(git_callback)
+ target = targets.pop()
- @property
- def python_executable(self):
+ if not isinstance(target, target_type):
+ raise Exception(f'Target is {type(target_type)} instead of {target_type}.')
+
+ return target
+
+ def only_targets(self, target_type): # type: (t.Type[THostConfig]) -> t.List[THostConfig]
"""
- :rtype: str
+ Return a list of target host configurations.
+ Requires that there are one or more targets, all of the specified type.
"""
- return find_python(self.python_version)
+ if not self.targets:
+ raise Exception('There must be one or more targets.')
+
+ for target in self.targets:
+ if not isinstance(target, target_type):
+ raise Exception(f'Target is {type(target_type)} instead of {target_type}.')
+
+ return self.targets
@property
- def pip_command(self):
+ def target_type(self): # type: () -> t.Type[HostConfig]
"""
- :rtype: list[str]
+ The true type of the target(s).
+ If the target is the controller, the controller type is returned.
+ Requires at least one target, and all targets must be of the same type.
"""
- return generate_pip_command(self.python_executable)
+ target_types = set(type(target) for target in self.targets)
- def get_delegated_completion(self):
- """Returns a dictionary of settings specific to the selected delegation system, if any. Otherwise returns an empty dictionary.
- :rtype: dict[str, str]
- """
- if self.docker:
- return get_docker_completion().get(self.docker_raw, {})
+ if len(target_types) != 1:
+ raise Exception('There must be one or more targets, all of the same type.')
+
+ target_type = target_types.pop()
- if self.remote:
- return get_remote_completion().get(self.remote, {})
+ if issubclass(target_type, ControllerConfig):
+ target_type = type(self.controller)
- return {}
+ return target_type
class TestConfig(EnvironmentConfig):
"""Configuration common to all test commands."""
- def __init__(self, args, command):
- """
- :type args: any
- :type command: str
- """
- super(TestConfig, self).__init__(args, command)
+ def __init__(self, args, command): # type: (t.Any, str) -> None
+ super().__init__(args, command)
self.coverage = args.coverage # type: bool
- self.coverage_label = args.coverage_label # type: str
self.coverage_check = args.coverage_check # type: bool
- self.coverage_config_base_path = None # type: t.Optional[str]
self.include = args.include or [] # type: t.List[str]
self.exclude = args.exclude or [] # type: t.List[str]
self.require = args.require or [] # type: t.List[str]
@@ -203,9 +213,9 @@ class TestConfig(EnvironmentConfig):
self.changed_path = args.changed_path # type: t.List[str]
self.base_branch = args.base_branch # type: str
- self.lint = args.lint if 'lint' in args else False # type: bool
- self.junit = args.junit if 'junit' in args else False # type: bool
- self.failure_ok = args.failure_ok if 'failure_ok' in args else False # type: bool
+ self.lint = getattr(args, 'lint', False) # type: bool
+ self.junit = getattr(args, 'junit', False) # type: bool
+ self.failure_ok = getattr(args, 'failure_ok', False) # type: bool
self.metadata = Metadata.from_file(args.metadata) if args.metadata else Metadata()
self.metadata_path = None
@@ -217,7 +227,7 @@ class TestConfig(EnvironmentConfig):
"""Add the metadata file to the payload file list."""
config = self
- if self.metadata_path:
+ if config.metadata_path:
files.append((os.path.abspath(config.metadata_path), config.metadata_path))
data_context().register_payload_callback(metadata_callback)
@@ -225,42 +235,41 @@ class TestConfig(EnvironmentConfig):
class ShellConfig(EnvironmentConfig):
"""Configuration for the shell command."""
- def __init__(self, args):
- """
- :type args: any
- """
- super(ShellConfig, self).__init__(args, 'shell')
+ def __init__(self, args): # type: (t.Any) -> None
+ super().__init__(args, 'shell')
self.raw = args.raw # type: bool
- if self.raw:
- self.httptester = False
-
class SanityConfig(TestConfig):
"""Configuration for the sanity command."""
- def __init__(self, args):
- """
- :type args: any
- """
- super(SanityConfig, self).__init__(args, 'sanity')
+ def __init__(self, args): # type: (t.Any) -> None
+ super().__init__(args, 'sanity')
self.test = args.test # type: t.List[str]
self.skip_test = args.skip_test # type: t.List[str]
self.list_tests = args.list_tests # type: bool
self.allow_disabled = args.allow_disabled # type: bool
self.enable_optional_errors = args.enable_optional_errors # type: bool
+ self.keep_git = args.keep_git # type: bool
+ self.prime_venvs = args.prime_venvs # type: bool
+
self.info_stderr = self.lint
+ if self.keep_git:
+ def git_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
+ """Add files from the content root .git directory to the payload file list."""
+ for dirpath, _dirnames, filenames in os.walk(os.path.join(data_context().content.root, '.git')):
+ paths = [os.path.join(dirpath, filename) for filename in filenames]
+ files.extend((path, os.path.relpath(path, data_context().content.root)) for path in paths)
+
+ data_context().register_payload_callback(git_callback)
+
class IntegrationConfig(TestConfig):
"""Configuration for the integration command."""
- def __init__(self, args, command):
- """
- :type args: any
- :type command: str
- """
- super(IntegrationConfig, self).__init__(args, command)
+ def __init__(self, args, command): # type: (t.Any, str) -> None
+ super().__init__(args, command)
self.start_at = args.start_at # type: str
self.start_at_task = args.start_at_task # type: str
@@ -282,9 +291,6 @@ class IntegrationConfig(TestConfig):
self.no_temp_workdir = args.no_temp_workdir
self.no_temp_unicode = args.no_temp_unicode
- if self.get_delegated_completion().get('temp-unicode', 'enabled') == 'disabled':
- self.no_temp_unicode = True
-
if self.list_targets:
self.explain = True
self.info_stderr = True
@@ -296,65 +302,43 @@ class IntegrationConfig(TestConfig):
if not os.path.exists(ansible_config_path):
# use the default empty configuration unless one has been provided
- ansible_config_path = super(IntegrationConfig, self).get_ansible_config()
+ ansible_config_path = super().get_ansible_config()
return ansible_config_path
+TIntegrationConfig = t.TypeVar('TIntegrationConfig', bound=IntegrationConfig)
+
+
class PosixIntegrationConfig(IntegrationConfig):
"""Configuration for the posix integration command."""
-
- def __init__(self, args):
- """
- :type args: any
- """
- super(PosixIntegrationConfig, self).__init__(args, 'integration')
+ def __init__(self, args): # type: (t.Any) -> None
+ super().__init__(args, 'integration')
class WindowsIntegrationConfig(IntegrationConfig):
"""Configuration for the windows integration command."""
-
- def __init__(self, args):
- """
- :type args: any
- """
- super(WindowsIntegrationConfig, self).__init__(args, 'windows-integration')
-
- self.windows = args.windows # type: t.List[str]
- self.inventory = args.inventory # type: str
-
- if self.windows:
- self.allow_destructive = True
+ def __init__(self, args): # type: (t.Any) -> None
+ super().__init__(args, 'windows-integration')
class NetworkIntegrationConfig(IntegrationConfig):
"""Configuration for the network integration command."""
+ def __init__(self, args): # type: (t.Any) -> None
+ super().__init__(args, 'network-integration')
- def __init__(self, args):
- """
- :type args: any
- """
- super(NetworkIntegrationConfig, self).__init__(args, 'network-integration')
-
- self.platform = args.platform # type: t.List[str]
- self.platform_collection = dict(args.platform_collection or []) # type: t.Dict[str, str]
- self.platform_connection = dict(args.platform_connection or []) # type: t.Dict[str, str]
- self.inventory = args.inventory # type: str
self.testcase = args.testcase # type: str
class UnitsConfig(TestConfig):
"""Configuration for the units command."""
- def __init__(self, args):
- """
- :type args: any
- """
- super(UnitsConfig, self).__init__(args, 'units')
+ def __init__(self, args): # type: (t.Any) -> None
+ super().__init__(args, 'units')
self.collect_only = args.collect_only # type: bool
self.num_workers = args.num_workers # type: int
- self.requirements_mode = args.requirements_mode if 'requirements_mode' in args else ''
+ self.requirements_mode = getattr(args, 'requirements_mode', '') # type: str
if self.requirements_mode == 'only':
self.requirements = True
diff --git a/test/lib/ansible_test/_internal/connections.py b/test/lib/ansible_test/_internal/connections.py
new file mode 100644
index 00000000..ddf4e8df
--- /dev/null
+++ b/test/lib/ansible_test/_internal/connections.py
@@ -0,0 +1,246 @@
+"""Connection abstraction for interacting with test hosts."""
+from __future__ import annotations
+
+import abc
+import shlex
+import sys
+import tempfile
+import typing as t
+
+from .io import (
+ read_text_file,
+)
+
+from .config import (
+ EnvironmentConfig,
+)
+
+from .util import (
+ Display,
+ SubprocessError,
+ retry,
+)
+
+from .util_common import (
+ run_command,
+)
+
+from .docker_util import (
+ DockerInspect,
+ docker_exec,
+ docker_inspect,
+ docker_network_disconnect,
+)
+
+from .ssh import (
+ SshConnectionDetail,
+)
+
+from .become import (
+ Become,
+)
+
+
+class Connection(metaclass=abc.ABCMeta):
+ """Base class for connecting to a host."""
+ @abc.abstractmethod
+ def run(self,
+ command, # type: t.List[str]
+ capture=False, # type: bool
+ data=None, # type: t.Optional[str]
+ stdin=None, # type: t.Optional[t.IO[bytes]]
+ stdout=None, # type: t.Optional[t.IO[bytes]]
+ ): # type: (...) -> t.Tuple[t.Optional[str], t.Optional[str]]
+ """Run the specified command and return the result."""
+
+ def extract_archive(self,
+ chdir, # type: str
+ src, # type: t.IO[bytes]
+ ):
+ """Extract the given archive file stream in the specified directory."""
+ # This will not work on AIX.
+ # However, AIX isn't supported as a controller, which is where this would be needed.
+ tar_cmd = ['tar', 'oxzf', '-', '-C', chdir]
+
+ retry(lambda: self.run(tar_cmd, stdin=src))
+
+ def create_archive(self,
+ chdir, # type: str
+ name, # type: str
+ dst, # type: t.IO[bytes]
+ exclude=None, # type: t.Optional[str]
+ ):
+ """Create the specified archive file stream from the specified directory, including the given name and optionally excluding the given name."""
+ tar_cmd = ['tar', 'cf', '-', '-C', chdir]
+ gzip_cmd = ['gzip']
+
+ if exclude:
+ # This will not work on AIX.
+ # However, AIX isn't supported as a controller, which is where this would be needed.
+ tar_cmd += ['--exclude', exclude]
+
+ tar_cmd.append(name)
+
+ # Using gzip to compress the archive allows this to work on all POSIX systems we support, including AIX.
+ commands = [tar_cmd, gzip_cmd]
+
+ sh_cmd = ['sh', '-c', ' | '.join(' '.join(shlex.quote(cmd) for cmd in command) for command in commands)]
+
+ retry(lambda: self.run(sh_cmd, stdout=dst))
+
+
+class LocalConnection(Connection):
+ """Connect to localhost."""
+ def __init__(self, args): # type: (EnvironmentConfig) -> None
+ self.args = args
+
+ def run(self,
+ command, # type: t.List[str]
+ capture=False, # type: bool
+ data=None, # type: t.Optional[str]
+ stdin=None, # type: t.Optional[t.IO[bytes]]
+ stdout=None, # type: t.Optional[t.IO[bytes]]
+ ): # type: (...) -> t.Tuple[t.Optional[str], t.Optional[str]]
+ """Run the specified command and return the result."""
+ return run_command(
+ args=self.args,
+ cmd=command,
+ capture=capture,
+ data=data,
+ stdin=stdin,
+ stdout=stdout,
+ )
+
+
+class SshConnection(Connection):
+ """Connect to a host using SSH."""
+ def __init__(self, args, settings, become=None): # type: (EnvironmentConfig, SshConnectionDetail, t.Optional[Become]) -> None
+ self.args = args
+ self.settings = settings
+ self.become = become
+
+ self.options = ['-i', settings.identity_file]
+
+ ssh_options = dict(
+ BatchMode='yes',
+ StrictHostKeyChecking='no',
+ UserKnownHostsFile='/dev/null',
+ ServerAliveInterval=15,
+ ServerAliveCountMax=4,
+ )
+
+ for ssh_option in sorted(ssh_options):
+ self.options.extend(['-o', f'{ssh_option}={ssh_options[ssh_option]}'])
+
+ def run(self,
+ command, # type: t.List[str]
+ capture=False, # type: bool
+ data=None, # type: t.Optional[str]
+ stdin=None, # type: t.Optional[t.IO[bytes]]
+ stdout=None, # type: t.Optional[t.IO[bytes]]
+ ): # type: (...) -> t.Tuple[t.Optional[str], t.Optional[str]]
+ """Run the specified command and return the result."""
+ options = list(self.options)
+
+ if self.become:
+ command = self.become.prepare_command(command)
+
+ options.append('-q')
+
+ if not data and not stdin and not stdout and sys.stdin.isatty():
+ options.append('-tt')
+
+ with tempfile.NamedTemporaryFile(prefix='ansible-test-ssh-debug-', suffix='.log') as ssh_logfile:
+ options.extend(['-vvv', '-E', ssh_logfile.name])
+
+ if self.settings.port:
+ options.extend(['-p', str(self.settings.port)])
+
+ options.append(f'{self.settings.user}@{self.settings.host}')
+ options.append(' '.join(shlex.quote(cmd) for cmd in command))
+
+ def error_callback(ex): # type: (SubprocessError) -> None
+ """Error handler."""
+ self.capture_log_details(ssh_logfile.name, ex)
+
+ return run_command(
+ args=self.args,
+ cmd=['ssh'] + options,
+ capture=capture,
+ data=data,
+ stdin=stdin,
+ stdout=stdout,
+ error_callback=error_callback,
+ )
+
+ @staticmethod
+ def capture_log_details(path, ex): # type: (str, SubprocessError) -> None
+ """Read the specified SSH debug log and add relevant details to the provided exception."""
+ if ex.status != 255:
+ return
+
+ markers = [
+ 'debug1: Connection Established',
+ 'debug1: Authentication successful',
+ 'debug1: Entering interactive session',
+ 'debug1: Sending command',
+ 'debug2: PTY allocation request accepted',
+ 'debug2: exec request accepted',
+ ]
+
+ file_contents = read_text_file(path)
+ messages = []
+
+ for line in reversed(file_contents.splitlines()):
+ messages.append(line)
+
+ if any(line.startswith(marker) for marker in markers):
+ break
+
+ message = '\n'.join(reversed(messages))
+
+ ex.message += '>>> SSH Debug Output\n'
+ ex.message += '%s%s\n' % (message.strip(), Display.clear)
+
+
+class DockerConnection(Connection):
+ """Connect to a host using Docker."""
+ def __init__(self, args, container_id, user=None): # type: (EnvironmentConfig, str, t.Optional[str]) -> None
+ self.args = args
+ self.container_id = container_id
+ self.user = user # type: t.Optional[str]
+
+ def run(self,
+ command, # type: t.List[str]
+ capture=False, # type: bool
+ data=None, # type: t.Optional[str]
+ stdin=None, # type: t.Optional[t.IO[bytes]]
+ stdout=None, # type: t.Optional[t.IO[bytes]]
+ ): # type: (...) -> t.Tuple[t.Optional[str], t.Optional[str]]
+ """Run the specified command and return the result."""
+ options = []
+
+ if self.user:
+ options.extend(['--user', self.user])
+
+ if not data and not stdin and not stdout and sys.stdin.isatty():
+ options.append('-it')
+
+ return docker_exec(
+ args=self.args,
+ container_id=self.container_id,
+ cmd=command,
+ options=options,
+ capture=capture,
+ data=data,
+ stdin=stdin,
+ stdout=stdout,
+ )
+
+ def inspect(self): # type: () -> DockerInspect
+ """Inspect the container and return a DockerInspect instance with the results."""
+ return docker_inspect(self.args, self.container_id)
+
+ def disconnect_network(self, network): # type: (str) -> None
+ """Disconnect the container from the specified network."""
+ docker_network_disconnect(self.args, self.container_id, network)
diff --git a/test/lib/ansible_test/_internal/constants.py b/test/lib/ansible_test/_internal/constants.py
index f4307822..07455d8e 100644..120000
--- a/test/lib/ansible_test/_internal/constants.py
+++ b/test/lib/ansible_test/_internal/constants.py
@@ -1,10 +1 @@
-"""Constants used by ansible-test. Imports should not be used in this file."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-# Setting a low soft RLIMIT_NOFILE value will improve the performance of subprocess.Popen on Python 2.x when close_fds=True.
-# This will affect all Python subprocesses. It will also affect the current Python process if set before subprocess is imported for the first time.
-SOFT_RLIMIT_NOFILE = 1024
-
-# File used to track the ansible-test test execution timeout.
-TIMEOUT_PATH = '.ansible-test-timeout.json'
+../_util/target/common/constants.py \ No newline at end of file
diff --git a/test/lib/ansible_test/_internal/containers.py b/test/lib/ansible_test/_internal/containers.py
new file mode 100644
index 00000000..97e84880
--- /dev/null
+++ b/test/lib/ansible_test/_internal/containers.py
@@ -0,0 +1,835 @@
+"""High level functions for working with containers."""
+from __future__ import annotations
+
+import atexit
+import contextlib
+import enum
+import json
+import random
+import time
+import uuid
+import threading
+import typing as t
+
+from .util import (
+ ApplicationError,
+ SubprocessError,
+ display,
+ get_host_ip,
+ sanitize_host_name,
+)
+
+from .util_common import (
+ named_temporary_file,
+)
+
+from .config import (
+ EnvironmentConfig,
+ IntegrationConfig,
+ SanityConfig,
+ ShellConfig,
+ UnitsConfig,
+ WindowsIntegrationConfig,
+)
+
+from .docker_util import (
+ ContainerNotFoundError,
+ DockerInspect,
+ docker_exec,
+ docker_inspect,
+ docker_pull,
+ docker_rm,
+ docker_run,
+ docker_start,
+ get_docker_container_id,
+ get_docker_host_ip,
+ require_docker,
+)
+
+from .ansible_util import (
+ run_playbook,
+)
+
+from .core_ci import (
+ SshKey,
+)
+
+from .target import (
+ IntegrationTarget,
+)
+
+from .ssh import (
+ SshConnectionDetail,
+ SshProcess,
+ create_ssh_port_forwards,
+ create_ssh_port_redirects,
+ generate_ssh_inventory,
+)
+
+from .host_configs import (
+ ControllerConfig,
+ DockerConfig,
+ OriginConfig,
+ PosixSshConfig,
+ PythonConfig,
+ RemoteConfig,
+ WindowsInventoryConfig,
+)
+
+from .connections import (
+ SshConnection,
+)
+
+# information about support containers provisioned by the current ansible-test instance
+support_containers = {} # type: t.Dict[str, ContainerDescriptor]
+support_containers_mutex = threading.Lock()
+
+
+class HostType:
+ """Enum representing the types of hosts involved in running tests."""
+ origin = 'origin'
+ control = 'control'
+ managed = 'managed'
+
+
+class CleanupMode(enum.Enum):
+ """How container cleanup should be handled."""
+ YES = enum.auto()
+ NO = enum.auto()
+ INFO = enum.auto()
+
+
+def run_support_container(
+ args, # type: EnvironmentConfig
+ context, # type: str
+ image, # type: str
+ name, # type: name
+ ports, # type: t.List[int]
+ aliases=None, # type: t.Optional[t.List[str]]
+ start=True, # type: bool
+ allow_existing=False, # type: bool
+ cleanup=None, # type: t.Optional[CleanupMode]
+ cmd=None, # type: t.Optional[t.List[str]]
+ env=None, # type: t.Optional[t.Dict[str, str]]
+ options=None, # type: t.Optional[t.List[str]]
+ publish_ports=True, # type: bool
+): # type: (...) -> t.Optional[ContainerDescriptor]
+ """
+ Start a container used to support tests, but not run them.
+ Containers created this way will be accessible from tests.
+ """
+ if args.prime_containers:
+ docker_pull(args, image)
+ return None
+
+ # SSH is required for publishing ports, as well as modifying the hosts file.
+ # Initializing the SSH key here makes sure it is available for use after delegation.
+ SshKey(args)
+
+ aliases = aliases or [sanitize_host_name(name)]
+
+ docker_command = require_docker().command
+ current_container_id = get_docker_container_id()
+
+ if docker_command == 'docker':
+ if isinstance(args.controller, DockerConfig) and all(isinstance(target, (ControllerConfig, DockerConfig)) for target in args.targets):
+ publish_ports = False # publishing ports is not needed when test hosts are on the docker network
+
+ if current_container_id:
+ publish_ports = False # publishing ports is pointless if already running in a docker container
+
+ options = (options or []) + ['--name', name]
+
+ if start:
+ options.append('-d')
+
+ if publish_ports:
+ for port in ports:
+ options.extend(['-p', str(port)])
+
+ if env:
+ for key, value in env.items():
+ options.extend(['--env', '%s=%s' % (key, value)])
+
+ support_container_id = None
+
+ if allow_existing:
+ try:
+ container = docker_inspect(args, name)
+ except ContainerNotFoundError:
+ container = None
+
+ if container:
+ support_container_id = container.id
+
+ if not container.running:
+ display.info('Ignoring existing "%s" container which is not running.' % name, verbosity=1)
+ support_container_id = None
+ elif not container.image:
+ display.info('Ignoring existing "%s" container which has the wrong image.' % name, verbosity=1)
+ support_container_id = None
+ elif publish_ports and not all(port and len(port) == 1 for port in [container.get_tcp_port(port) for port in ports]):
+ display.info('Ignoring existing "%s" container which does not have the required published ports.' % name, verbosity=1)
+ support_container_id = None
+
+ if not support_container_id:
+ docker_rm(args, name)
+
+ if support_container_id:
+ display.info('Using existing "%s" container.' % name)
+ running = True
+ existing = True
+ else:
+ display.info('Starting new "%s" container.' % name)
+ docker_pull(args, image)
+ support_container_id = docker_run(args, image, options, create_only=not start, cmd=cmd)
+ running = start
+ existing = False
+
+ if cleanup is None:
+ cleanup = CleanupMode.INFO if existing else CleanupMode.YES
+
+ descriptor = ContainerDescriptor(
+ image,
+ context,
+ name,
+ support_container_id,
+ ports,
+ aliases,
+ publish_ports,
+ running,
+ existing,
+ cleanup,
+ env,
+ )
+
+ with support_containers_mutex:
+ if name in support_containers:
+ raise Exception(f'Container already defined: {name}')
+
+ if not support_containers:
+ atexit.register(cleanup_containers, args)
+
+ support_containers[name] = descriptor
+
+ display.info(f'Adding "{name}" to container database.')
+
+ if start:
+ descriptor.register(args)
+
+ return descriptor
+
+
+def get_container_database(args): # type: (EnvironmentConfig) -> ContainerDatabase
+ """Return the current container database, creating it as needed, or returning the one provided on the command line through delegation."""
+ try:
+ return get_container_database.database
+ except AttributeError:
+ pass
+
+ if args.containers:
+ display.info('Parsing container database.', verbosity=1)
+ database = ContainerDatabase.from_dict(json.loads(args.containers))
+ else:
+ display.info('Creating container database.', verbosity=1)
+ database = create_container_database(args)
+
+ display.info('>>> Container Database\n%s' % json.dumps(database.to_dict(), indent=4, sort_keys=True), verbosity=3)
+
+ get_container_database.database = database
+
+ return get_container_database.database
+
+
+class ContainerAccess:
+ """Information needed for one test host to access a single container supporting tests."""
+ def __init__(self, host_ip, names, ports, forwards): # type: (str, t.List[str], t.Optional[t.List[int]], t.Optional[t.Dict[int, int]]) -> None
+ # if forwards is set
+ # this is where forwards are sent (it is the host that provides an indirect connection to the containers on alternate ports)
+ # /etc/hosts uses 127.0.0.1 (since port redirection will be used)
+ # else
+ # this is what goes into /etc/hosts (it is the container's direct IP)
+ self.host_ip = host_ip
+
+ # primary name + any aliases -- these go into the hosts file and reference the appropriate ip for the origin/control/managed host
+ self.names = names
+
+ # ports available (set if forwards is not set)
+ self.ports = ports
+
+ # port redirections to create through host_ip -- if not set, no port redirections will be used
+ self.forwards = forwards
+
+ def port_map(self): # type: () -> t.List[t.Tuple[int, int]]
+ """Return a port map for accessing this container."""
+ if self.forwards:
+ ports = list(self.forwards.items())
+ else:
+ ports = [(port, port) for port in self.ports]
+
+ return ports
+
+ @staticmethod
+ def from_dict(data): # type: (t.Dict[str, t.Any]) -> ContainerAccess
+ """Return a ContainerAccess instance from the given dict."""
+ forwards = data.get('forwards')
+
+ if forwards:
+ forwards = dict((int(key), value) for key, value in forwards.items())
+
+ return ContainerAccess(
+ host_ip=data['host_ip'],
+ names=data['names'],
+ ports=data.get('ports'),
+ forwards=forwards,
+ )
+
+ def to_dict(self): # type: () -> t.Dict[str, t.Any]
+ """Return a dict of the current instance."""
+ value = dict(
+ host_ip=self.host_ip,
+ names=self.names,
+ )
+
+ if self.ports:
+ value.update(ports=self.ports)
+
+ if self.forwards:
+ value.update(forwards=self.forwards)
+
+ return value
+
+
+class ContainerDatabase:
+ """Database of running containers used to support tests."""
+ def __init__(self, data): # type: (t.Dict[str, t.Dict[str, t.Dict[str, ContainerAccess]]]) -> None
+ self.data = data
+
+ @staticmethod
+ def from_dict(data): # type: (t.Dict[str, t.Any]) -> ContainerDatabase
+ """Return a ContainerDatabase instance from the given dict."""
+ return ContainerDatabase(dict((access_name,
+ dict((context_name,
+ dict((container_name, ContainerAccess.from_dict(container))
+ for container_name, container in containers.items()))
+ for context_name, containers in contexts.items()))
+ for access_name, contexts in data.items()))
+
+ def to_dict(self): # type: () -> t.Dict[str, t.Any]
+ """Return a dict of the current instance."""
+ return dict((access_name,
+ dict((context_name,
+ dict((container_name, container.to_dict())
+ for container_name, container in containers.items()))
+ for context_name, containers in contexts.items()))
+ for access_name, contexts in self.data.items())
+
+
+def local_ssh(args, python): # type: (EnvironmentConfig, PythonConfig) -> SshConnectionDetail
+ """Return SSH connection details for localhost, connecting as root to the default SSH port."""
+ return SshConnectionDetail('localhost', 'localhost', None, 'root', SshKey(args).key, python.path)
+
+
+def root_ssh(ssh): # type: (SshConnection) -> SshConnectionDetail
+ """Return the SSH connection details from the given SSH connection. If become was specified, the user will be changed to `root`."""
+ settings = ssh.settings.__dict__.copy()
+
+ if ssh.become:
+ settings.update(
+ user='root',
+ )
+
+ return SshConnectionDetail(**settings)
+
+
+def create_container_database(args): # type: (EnvironmentConfig) -> ContainerDatabase
+ """Create and return a container database with information necessary for all test hosts to make use of relevant support containers."""
+ origin = {} # type: t.Dict[str, t.Dict[str, ContainerAccess]]
+ control = {} # type: t.Dict[str, t.Dict[str, ContainerAccess]]
+ managed = {} # type: t.Dict[str, t.Dict[str, ContainerAccess]]
+
+ for name, container in support_containers.items():
+ if container.details.published_ports:
+ published_access = ContainerAccess(
+ host_ip=get_docker_host_ip(),
+ names=container.aliases,
+ ports=None,
+ forwards=dict((port, published_port) for port, published_port in container.details.published_ports.items()),
+ )
+ else:
+ published_access = None # no published access without published ports (ports are only published if needed)
+
+ if container.details.container_ip:
+ # docker containers, and rootfull podman containers should have a container IP address
+ container_access = ContainerAccess(
+ host_ip=container.details.container_ip,
+ names=container.aliases,
+ ports=container.ports,
+ forwards=None,
+ )
+ elif require_docker().command == 'podman':
+ # published ports for rootless podman containers should be accessible from the host's IP
+ container_access = ContainerAccess(
+ host_ip=get_host_ip(),
+ names=container.aliases,
+ ports=None,
+ forwards=dict((port, published_port) for port, published_port in container.details.published_ports.items()),
+ )
+ else:
+ container_access = None # no container access without an IP address
+
+ if get_docker_container_id():
+ if not container_access:
+ raise Exception('Missing IP address for container: %s' % name)
+
+ origin_context = origin.setdefault(container.context, {})
+ origin_context[name] = container_access
+ elif not published_access:
+ pass # origin does not have network access to the containers
+ else:
+ origin_context = origin.setdefault(container.context, {})
+ origin_context[name] = published_access
+
+ if isinstance(args.controller, RemoteConfig):
+ pass # SSH forwarding required
+ elif '-controller-' in name:
+ pass # hack to avoid exposing the controller container to the controller
+ elif isinstance(args.controller, DockerConfig) or (isinstance(args.controller, OriginConfig) and get_docker_container_id()):
+ if container_access:
+ control_context = control.setdefault(container.context, {})
+ control_context[name] = container_access
+ else:
+ raise Exception('Missing IP address for container: %s' % name)
+ else:
+ if not published_access:
+ raise Exception('Missing published ports for container: %s' % name)
+
+ control_context = control.setdefault(container.context, {})
+ control_context[name] = published_access
+
+ if issubclass(args.target_type, (RemoteConfig, WindowsInventoryConfig, PosixSshConfig)):
+ pass # SSH forwarding required
+ elif '-controller-' in name or '-target-' in name:
+ pass # hack to avoid exposing the controller and target containers to the target
+ elif issubclass(args.target_type, DockerConfig) or (issubclass(args.target_type, OriginConfig) and get_docker_container_id()):
+ if container_access:
+ managed_context = managed.setdefault(container.context, {})
+ managed_context[name] = container_access
+ else:
+ raise Exception('Missing IP address for container: %s' % name)
+ else:
+ if not published_access:
+ raise Exception('Missing published ports for container: %s' % name)
+
+ managed_context = managed.setdefault(container.context, {})
+ managed_context[name] = published_access
+
+ data = {
+ HostType.origin: origin,
+ HostType.control: control,
+ HostType.managed: managed,
+ }
+
+ data = dict((key, value) for key, value in data.items() if value)
+
+ return ContainerDatabase(data)
+
+
+class SupportContainerContext:
+ """Context object for tracking information relating to access of support containers."""
+ def __init__(self, containers, process): # type: (ContainerDatabase, t.Optional[SshProcess]) -> None
+ self.containers = containers
+ self.process = process
+
+ def close(self): # type: () -> None
+ """Close the process maintaining the port forwards."""
+ if not self.process:
+ return # forwarding not in use
+
+ self.process.terminate()
+
+ display.info('Waiting for the session SSH port forwarding process to terminate.', verbosity=1)
+
+ self.process.wait()
+
+
+@contextlib.contextmanager
+def support_container_context(
+ args, # type: EnvironmentConfig
+ ssh, # type: t.Optional[SshConnectionDetail]
+): # type: (...) -> t.Optional[ContainerDatabase]
+ """Create a context manager for integration tests that use support containers."""
+ if not isinstance(args, (IntegrationConfig, UnitsConfig, SanityConfig, ShellConfig)):
+ yield None # containers are only needed for commands that have targets (hosts or pythons)
+ return
+
+ containers = get_container_database(args)
+
+ if not containers.data:
+ yield ContainerDatabase({}) # no containers are being used, return an empty database
+ return
+
+ context = create_support_container_context(args, ssh, containers)
+
+ try:
+ yield context.containers
+ finally:
+ context.close()
+
+
+def create_support_container_context(
+ args, # type: EnvironmentConfig
+ ssh, # type: t.Optional[SshConnectionDetail]
+ containers, # type: ContainerDatabase
+): # type: (...) -> SupportContainerContext
+ """Context manager that provides SSH port forwards. Returns updated container metadata."""
+ host_type = HostType.control
+
+ revised = ContainerDatabase(containers.data.copy())
+ source = revised.data.pop(HostType.origin, None)
+
+ container_map = {} # type: t.Dict[t.Tuple[str, int], t.Tuple[str, str, int]]
+
+ if host_type not in revised.data:
+ if not source:
+ raise Exception('Missing origin container details.')
+
+ for context_name, context in source.items():
+ for container_name, container in context.items():
+ if '-controller-' in container_name:
+ continue # hack to avoid exposing the controller container to the controller
+
+ for port, access_port in container.port_map():
+ container_map[(container.host_ip, access_port)] = (context_name, container_name, port)
+
+ if not container_map:
+ return SupportContainerContext(revised, None)
+
+ if not ssh:
+ raise Exception('The %s host was not pre-configured for container access and SSH forwarding is not available.' % host_type)
+
+ forwards = list(container_map.keys())
+ process = create_ssh_port_forwards(args, ssh, forwards)
+ result = SupportContainerContext(revised, process)
+
+ try:
+ port_forwards = process.collect_port_forwards()
+ contexts = {}
+
+ for forward, forwarded_port in port_forwards.items():
+ access_host, access_port = forward
+ context_name, container_name, container_port = container_map[(access_host, access_port)]
+ container = source[context_name][container_name]
+ context = contexts.setdefault(context_name, {})
+
+ forwarded_container = context.setdefault(container_name, ContainerAccess('127.0.0.1', container.names, None, {}))
+ forwarded_container.forwards[container_port] = forwarded_port
+
+ display.info('Container "%s" port %d available at %s:%d is forwarded over SSH as port %d.' % (
+ container_name, container_port, access_host, access_port, forwarded_port,
+ ), verbosity=1)
+
+ revised.data[host_type] = contexts
+
+ return result
+ except Exception:
+ result.close()
+ raise
+
+
+class ContainerDescriptor:
+ """Information about a support container."""
+ def __init__(self,
+ image, # type: str
+ context, # type: str
+ name, # type: str
+ container_id, # type: str
+ ports, # type: t.List[int]
+ aliases, # type: t.List[str]
+ publish_ports, # type: bool
+ running, # type: bool
+ existing, # type: bool
+ cleanup, # type: CleanupMode
+ env, # type: t.Optional[t.Dict[str, str]]
+ ): # type: (...) -> None
+ self.image = image
+ self.context = context
+ self.name = name
+ self.container_id = container_id
+ self.ports = ports
+ self.aliases = aliases
+ self.publish_ports = publish_ports
+ self.running = running
+ self.existing = existing
+ self.cleanup = cleanup
+ self.env = env
+ self.details = None # type: t.Optional[SupportContainer]
+
+ def start(self, args): # type: (EnvironmentConfig) -> None
+ """Start the container. Used for containers which are created, but not started."""
+ docker_start(args, self.name)
+
+ self.register(args)
+
+ def register(self, args): # type: (EnvironmentConfig) -> SupportContainer
+ """Record the container's runtime details. Must be used after the container has been started."""
+ if self.details:
+ raise Exception('Container already registered: %s' % self.name)
+
+ try:
+ container = docker_inspect(args, self.container_id)
+ except ContainerNotFoundError:
+ if not args.explain:
+ raise
+
+ # provide enough mock data to keep --explain working
+ container = DockerInspect(args, dict(
+ Id=self.container_id,
+ NetworkSettings=dict(
+ IPAddress='127.0.0.1',
+ Ports=dict(('%d/tcp' % port, [dict(HostPort=random.randint(30000, 40000) if self.publish_ports else port)]) for port in self.ports),
+ ),
+ Config=dict(
+ Env=['%s=%s' % (key, value) for key, value in self.env.items()] if self.env else [],
+ ),
+ ))
+
+ support_container_ip = container.get_ip_address()
+
+ if self.publish_ports:
+ # inspect the support container to locate the published ports
+ tcp_ports = dict((port, container.get_tcp_port(port)) for port in self.ports)
+
+ if any(not config or len(set(conf['HostPort'] for conf in config)) != 1 for config in tcp_ports.values()):
+ raise ApplicationError('Unexpected `docker inspect` results for published TCP ports:\n%s' % json.dumps(tcp_ports, indent=4, sort_keys=True))
+
+ published_ports = dict((port, int(config[0]['HostPort'])) for port, config in tcp_ports.items())
+ else:
+ published_ports = {}
+
+ self.details = SupportContainer(
+ container,
+ support_container_ip,
+ published_ports,
+ )
+
+ return self.details
+
+
+class SupportContainer:
+ """Information about a running support container available for use by tests."""
+ def __init__(self,
+ container, # type: DockerInspect
+ container_ip, # type: str
+ published_ports, # type: t.Dict[int, int]
+ ): # type: (...) -> None
+ self.container = container
+ self.container_ip = container_ip
+ self.published_ports = published_ports
+
+
+def wait_for_file(args, # type: EnvironmentConfig
+ container_name, # type: str
+ path, # type: str
+ sleep, # type: int
+ tries, # type: int
+ check=None, # type: t.Optional[t.Callable[[str], bool]]
+ ): # type: (...) -> str
+ """Wait for the specified file to become available in the requested container and return its contents."""
+ display.info('Waiting for container "%s" to provide file: %s' % (container_name, path))
+
+ for _iteration in range(1, tries):
+ if _iteration > 1:
+ time.sleep(sleep)
+
+ try:
+ stdout = docker_exec(args, container_name, ['dd', 'if=%s' % path], capture=True)[0]
+ except SubprocessError:
+ continue
+
+ if not check or check(stdout):
+ return stdout
+
+ raise ApplicationError('Timeout waiting for container "%s" to provide file: %s' % (container_name, path))
+
+
+def cleanup_containers(args): # type: (EnvironmentConfig) -> None
+ """Clean up containers."""
+ for container in support_containers.values():
+ if container.cleanup == CleanupMode.YES:
+ docker_rm(args, container.container_id)
+ elif container.cleanup == CleanupMode.INFO:
+ display.notice('Remember to run `docker rm -f %s` when finished testing.' % container.name)
+
+
+def create_hosts_entries(context): # type: (t.Dict[str, ContainerAccess]) -> t.List[str]
+ """Return hosts entries for the specified context."""
+ entries = []
+ unique_id = uuid.uuid4()
+
+ for container in context.values():
+ # forwards require port redirection through localhost
+ if container.forwards:
+ host_ip = '127.0.0.1'
+ else:
+ host_ip = container.host_ip
+
+ entries.append('%s %s # ansible-test %s' % (host_ip, ' '.join(container.names), unique_id))
+
+ return entries
+
+
+def create_container_hooks(
+ args, # type: IntegrationConfig
+ control_connections, # type: t.List[SshConnectionDetail]
+ managed_connections, # type: t.Optional[t.List[SshConnectionDetail]]
+): # type: (...) -> t.Tuple[t.Optional[t.Callable[[IntegrationTarget], None]], t.Optional[t.Callable[[IntegrationTarget], None]]]
+ """Return pre and post target callbacks for enabling and disabling container access for each test target."""
+ containers = get_container_database(args)
+
+ control_contexts = containers.data.get(HostType.control)
+
+ if control_contexts:
+ managed_contexts = containers.data.get(HostType.managed)
+
+ if not managed_contexts:
+ managed_contexts = create_managed_contexts(control_contexts)
+
+ control_type = 'posix'
+
+ if isinstance(args, WindowsIntegrationConfig):
+ managed_type = 'windows'
+ else:
+ managed_type = 'posix'
+
+ control_state = {}
+ managed_state = {}
+
+ def pre_target(target):
+ """Configure hosts for SSH port forwarding required by the specified target."""
+ forward_ssh_ports(args, control_connections, '%s_hosts_prepare.yml' % control_type, control_state, target, HostType.control, control_contexts)
+ forward_ssh_ports(args, managed_connections, '%s_hosts_prepare.yml' % managed_type, managed_state, target, HostType.managed, managed_contexts)
+
+ def post_target(target):
+ """Clean up previously configured SSH port forwarding which was required by the specified target."""
+ cleanup_ssh_ports(args, control_connections, '%s_hosts_restore.yml' % control_type, control_state, target, HostType.control)
+ cleanup_ssh_ports(args, managed_connections, '%s_hosts_restore.yml' % managed_type, managed_state, target, HostType.managed)
+ else:
+ pre_target, post_target = None, None
+
+ return pre_target, post_target
+
+
+def create_managed_contexts(control_contexts): # type: (t.Dict[str, t.Dict[str, ContainerAccess]]) -> t.Dict[str, t.Dict[str, ContainerAccess]]
+ """Create managed contexts from the given control contexts."""
+ managed_contexts = {}
+
+ for context_name, control_context in control_contexts.items():
+ managed_context = managed_contexts[context_name] = {}
+
+ for container_name, control_container in control_context.items():
+ managed_context[container_name] = ContainerAccess(control_container.host_ip, control_container.names, None, dict(control_container.port_map()))
+
+ return managed_contexts
+
+
+def forward_ssh_ports(
+ args, # type: IntegrationConfig
+ ssh_connections, # type: t.Optional[t.List[SshConnectionDetail]]
+ playbook, # type: str
+ target_state, # type: t.Dict[str, t.Tuple[t.List[str], t.List[SshProcess]]]
+ target, # type: IntegrationTarget
+ host_type, # type: str
+ contexts, # type: t.Dict[str, t.Dict[str, ContainerAccess]]
+): # type: (...) -> None
+ """Configure port forwarding using SSH and write hosts file entries."""
+ if ssh_connections is None:
+ return
+
+ test_context = None
+
+ for context_name, context in contexts.items():
+ context_alias = 'cloud/%s/' % context_name
+
+ if context_alias in target.aliases:
+ test_context = context
+ break
+
+ if not test_context:
+ return
+
+ if not ssh_connections:
+ if args.explain:
+ return
+
+ raise Exception('The %s host was not pre-configured for container access and SSH forwarding is not available.' % host_type)
+
+ redirects = [] # type: t.List[t.Tuple[int, str, int]]
+ messages = []
+
+ for container_name, container in test_context.items():
+ explain = []
+
+ for container_port, access_port in container.port_map():
+ if container.forwards:
+ redirects.append((container_port, container.host_ip, access_port))
+
+ explain.append('%d -> %s:%d' % (container_port, container.host_ip, access_port))
+ else:
+ explain.append('%s:%d' % (container.host_ip, container_port))
+
+ if explain:
+ if container.forwards:
+ message = 'Port forwards for the "%s" container have been established on the %s host' % (container_name, host_type)
+ else:
+ message = 'Ports for the "%s" container are available on the %s host as' % (container_name, host_type)
+
+ messages.append('%s:\n%s' % (message, '\n'.join(explain)))
+
+ hosts_entries = create_hosts_entries(test_context)
+ inventory = generate_ssh_inventory(ssh_connections)
+
+ with named_temporary_file(args, 'ssh-inventory-', '.json', None, inventory) as inventory_path:
+ run_playbook(args, inventory_path, playbook, dict(hosts_entries=hosts_entries))
+
+ ssh_processes = [] # type: t.List[SshProcess]
+
+ if redirects:
+ for ssh in ssh_connections:
+ ssh_processes.append(create_ssh_port_redirects(args, ssh, redirects))
+
+ target_state[target.name] = (hosts_entries, ssh_processes)
+
+ for message in messages:
+ display.info(message, verbosity=1)
+
+
+def cleanup_ssh_ports(
+ args, # type: IntegrationConfig
+ ssh_connections, # type: t.List[SshConnectionDetail]
+ playbook, # type: str
+ target_state, # type: t.Dict[str, t.Tuple[t.List[str], t.List[SshProcess]]]
+ target, # type: IntegrationTarget
+ host_type, # type: str
+): # type: (...) -> None
+ """Stop previously configured SSH port forwarding and remove previously written hosts file entries."""
+ state = target_state.pop(target.name, None)
+
+ if not state:
+ return
+
+ (hosts_entries, ssh_processes) = state
+
+ inventory = generate_ssh_inventory(ssh_connections)
+
+ with named_temporary_file(args, 'ssh-inventory-', '.json', None, inventory) as inventory_path:
+ run_playbook(args, inventory_path, playbook, dict(hosts_entries=hosts_entries))
+
+ if ssh_processes:
+ for process in ssh_processes:
+ process.terminate()
+
+ display.info('Waiting for the %s host SSH port forwarding processs(es) to terminate.' % host_type, verbosity=1)
+
+ for process in ssh_processes:
+ process.wait()
diff --git a/test/lib/ansible_test/_internal/content_config.py b/test/lib/ansible_test/_internal/content_config.py
new file mode 100644
index 00000000..10574cc0
--- /dev/null
+++ b/test/lib/ansible_test/_internal/content_config.py
@@ -0,0 +1,151 @@
+"""Content configuration."""
+from __future__ import annotations
+
+import os
+import typing as t
+
+from .constants import (
+ CONTROLLER_PYTHON_VERSIONS,
+ SUPPORTED_PYTHON_VERSIONS,
+)
+
+from .compat.packaging import (
+ PACKAGING_IMPORT_ERROR,
+ SpecifierSet,
+ Version,
+)
+
+from .compat.yaml import (
+ YAML_IMPORT_ERROR,
+ yaml_load,
+)
+
+from .io import (
+ read_text_file,
+)
+
+from .util import (
+ ApplicationError,
+ display,
+ str_to_version,
+ cache,
+)
+
+from .data import (
+ data_context,
+)
+
+
+MISSING = object()
+
+
+class BaseConfig:
+ """Base class for content configuration."""
+ def __init__(self, data): # type: (t.Any) -> None
+ if not isinstance(data, dict):
+ raise Exception('config must be type `dict` not `%s`' % type(data))
+
+
+class ModulesConfig(BaseConfig):
+ """Configuration for modules."""
+ def __init__(self, data): # type: (t.Any) -> None
+ super().__init__(data)
+
+ python_requires = data.get('python_requires', MISSING)
+
+ if python_requires == MISSING:
+ raise KeyError('python_requires is required')
+
+ self.python_requires = python_requires
+ self.python_versions = parse_python_requires(python_requires)
+ self.controller_only = python_requires == 'controller'
+
+
+class ContentConfig(BaseConfig):
+ """Configuration for all content."""
+ def __init__(self, data): # type: (t.Any) -> None
+ super().__init__(data)
+
+ # Configuration specific to modules/module_utils.
+ self.modules = ModulesConfig(data.get('modules', {}))
+
+ # Python versions supported by the controller, combined with Python versions supported by modules/module_utils.
+ # Mainly used for display purposes and to limit the Python versions used for sanity tests.
+ self.python_versions = [version for version in SUPPORTED_PYTHON_VERSIONS
+ if version in CONTROLLER_PYTHON_VERSIONS or version in self.modules.python_versions]
+
+ # True if Python 2.x is supported.
+ self.py2_support = any(version for version in self.python_versions if str_to_version(version)[0] == 2)
+
+
+def load_config(path): # type: (str) -> t.Optional[ContentConfig]
+ """Load and parse the specified config file and return the result or None if loading/parsing failed."""
+ if YAML_IMPORT_ERROR:
+ raise ApplicationError('The "PyYAML" module is required to parse config: %s' % YAML_IMPORT_ERROR)
+
+ if PACKAGING_IMPORT_ERROR:
+ raise ApplicationError('The "packaging" module is required to parse config: %s' % PACKAGING_IMPORT_ERROR)
+
+ value = read_text_file(path)
+
+ try:
+ yaml_value = yaml_load(value)
+ except Exception as ex: # pylint: disable=broad-except
+ display.warning('Ignoring config "%s" due to a YAML parsing error: %s' % (path, ex))
+ return None
+
+ try:
+ config = ContentConfig(yaml_value)
+ except Exception as ex: # pylint: disable=broad-except
+ display.warning('Ignoring config "%s" due a config parsing error: %s' % (path, ex))
+ return None
+
+ display.info('Loaded configuration: %s' % path, verbosity=1)
+
+ return config
+
+
+@cache
+def get_content_config(): # type: () -> ContentConfig
+ """
+ Parse and return the content configuration (if any) for the current collection.
+ For ansible-core, a default configuration is used.
+ Results are cached.
+ """
+ collection_config_path = 'tests/config.yml'
+
+ config = None
+
+ if data_context().content.collection and os.path.exists(collection_config_path):
+ config = load_config(collection_config_path)
+
+ if not config:
+ config = ContentConfig(dict(
+ modules=dict(
+ python_requires='default',
+ ),
+ ))
+
+ if not config.modules.python_versions:
+ raise ApplicationError('This collection does not declare support for modules/module_utils on any known Python version.\n'
+ 'Ansible supports modules/module_utils on Python versions: %s\n'
+ 'This collection provides the Python requirement: %s' % (
+ ', '.join(SUPPORTED_PYTHON_VERSIONS), config.modules.python_requires))
+
+ return config
+
+
+def parse_python_requires(value): # type: (t.Any) -> t.List[str]
+ """Parse the given 'python_requires' version specifier and return the matching Python versions."""
+ if not isinstance(value, str):
+ raise ValueError('python_requires must must be of type `str` not type `%s`' % type(value))
+
+ if value == 'default':
+ versions = list(SUPPORTED_PYTHON_VERSIONS)
+ elif value == 'controller':
+ versions = list(CONTROLLER_PYTHON_VERSIONS)
+ else:
+ specifier_set = SpecifierSet(value)
+ versions = [version for version in SUPPORTED_PYTHON_VERSIONS if specifier_set.contains(Version(version))]
+
+ return versions
diff --git a/test/lib/ansible_test/_internal/core_ci.py b/test/lib/ansible_test/_internal/core_ci.py
index a7e07066..023b5655 100644
--- a/test/lib/ansible_test/_internal/core_ci.py
+++ b/test/lib/ansible_test/_internal/core_ci.py
@@ -1,6 +1,5 @@
"""Access Ansible Core CI remote services."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
import os
@@ -9,8 +8,7 @@ import traceback
import uuid
import errno
import time
-
-from . import types as t
+import typing as t
from .http import (
HttpClient,
@@ -28,7 +26,8 @@ from .io import (
from .util import (
ApplicationError,
display,
- ANSIBLE_TEST_DATA_ROOT,
+ ANSIBLE_TEST_TARGET_ROOT,
+ mutex,
)
from .util_common import (
@@ -41,7 +40,6 @@ from .config import (
)
from .ci import (
- AuthContext,
get_ci_provider,
)
@@ -54,59 +52,20 @@ class AnsibleCoreCI:
"""Client for Ansible Core CI services."""
DEFAULT_ENDPOINT = 'https://ansible-core-ci.testing.ansible.com'
- # Assign a default provider for each VM platform supported.
- # This is used to determine the provider from the platform when no provider is specified.
- # The keys here also serve as the list of providers which users can select from the command line.
- #
- # Entries can take one of two formats:
- # {platform}
- # {platform} arch={arch}
- #
- # Entries with an arch are only used as a default if the value for --remote-arch matches the {arch} specified.
- # This allows arch specific defaults to be distinct from the default used when no arch is specified.
-
- PROVIDERS = dict(
- aws=(
- 'freebsd',
- 'ios',
- 'rhel',
- 'vyos',
- 'windows',
- ),
- azure=(
- ),
- ibmps=(
- 'aix',
- 'ibmi',
- ),
- parallels=(
- 'macos',
- 'osx',
- ),
- )
-
- # Currently ansible-core-ci has no concept of arch selection. This effectively means each provider only supports one arch.
- # The list below identifies which platforms accept an arch, and which one. These platforms can only be used with the specified arch.
- PROVIDER_ARCHES = dict(
- )
-
- def __init__(self, args, platform, version, stage='prod', persist=True, load=True, provider=None, arch=None, internal=False):
- """
- :type args: EnvironmentConfig
- :type platform: str
- :type version: str
- :type stage: str
- :type persist: bool
- :type load: bool
- :type provider: str | None
- :type arch: str | None
- :type internal: bool
- """
+ def __init__(
+ self,
+ args, # type: EnvironmentConfig
+ platform, # type: str
+ version, # type: str
+ provider, # type: str
+ persist=True, # type: bool
+ load=True, # type: bool
+ suffix=None, # type: t.Optional[str]
+ ): # type: (...) -> None
self.args = args
- self.arch = arch
self.platform = platform
self.version = version
- self.stage = stage
+ self.stage = args.remote_stage
self.client = HttpClient(args)
self.connection = None
self.instance_id = None
@@ -114,51 +73,13 @@ class AnsibleCoreCI:
self.default_endpoint = args.remote_endpoint or self.DEFAULT_ENDPOINT
self.retries = 3
self.ci_provider = get_ci_provider()
- self.auth_context = AuthContext()
-
- if self.arch:
- self.name = '%s-%s-%s' % (self.arch, self.platform, self.version)
- else:
- self.name = '%s-%s' % (self.platform, self.version)
-
- if provider:
- # override default provider selection (not all combinations are valid)
- self.provider = provider
- else:
- self.provider = None
-
- for candidate in self.PROVIDERS:
- choices = [
- platform,
- '%s arch=%s' % (platform, arch),
- ]
-
- if any(choice in self.PROVIDERS[candidate] for choice in choices):
- # assign default provider based on platform
- self.provider = candidate
- break
-
- # If a provider has been selected, make sure the correct arch (or none) has been selected.
- if self.provider:
- required_arch = self.PROVIDER_ARCHES.get(self.provider)
-
- if self.arch != required_arch:
- if required_arch:
- if self.arch:
- raise ApplicationError('Provider "%s" requires the "%s" arch instead of "%s".' % (self.provider, required_arch, self.arch))
+ self.provider = provider
+ self.name = '%s-%s' % (self.platform, self.version)
- raise ApplicationError('Provider "%s" requires the "%s" arch.' % (self.provider, required_arch))
-
- raise ApplicationError('Provider "%s" does not support specification of an arch.' % self.provider)
+ if suffix:
+ self.name += '-' + suffix
self.path = os.path.expanduser('~/.ansible/test/instances/%s-%s-%s' % (self.name, self.provider, self.stage))
-
- if self.provider not in self.PROVIDERS and not internal:
- if self.arch:
- raise ApplicationError('Provider not detected for platform "%s" on arch "%s".' % (self.platform, self.arch))
-
- raise ApplicationError('Provider not detected for platform "%s" with no arch specified.' % self.platform)
-
self.ssh_key = SshKey(args)
if persist and load and self._load():
@@ -200,7 +121,7 @@ class AnsibleCoreCI:
@property
def available(self):
"""Return True if Ansible Core CI is supported."""
- return self.ci_provider.supports_core_ci_auth(self.auth_context)
+ return self.ci_provider.supports_core_ci_auth()
def start(self):
"""Start instance."""
@@ -209,7 +130,7 @@ class AnsibleCoreCI:
verbosity=1)
return None
- return self._start(self.ci_provider.prepare_core_ci_auth(self.auth_context))
+ return self._start(self.ci_provider.prepare_core_ci_auth())
def stop(self):
"""Stop instance."""
@@ -234,14 +155,8 @@ class AnsibleCoreCI:
raise self._create_http_error(response)
- def get(self, tries=3, sleep=15, always_raise_on=None):
- """
- Get instance connection information.
- :type tries: int
- :type sleep: int
- :type always_raise_on: list[int] | None
- :rtype: InstanceConnection
- """
+ def get(self, tries=3, sleep=15, always_raise_on=None): # type: (int, int, t.Optional[t.List[int]]) -> t.Optional[InstanceConnection]
+ """Get instance connection information."""
if not self.started:
display.info('Skipping invalid %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
verbosity=1)
@@ -273,7 +188,7 @@ class AnsibleCoreCI:
running=True,
hostname='cloud.example.com',
port=12345,
- username='username',
+ username='root',
password='password' if self.platform == 'windows' else None,
)
else:
@@ -326,7 +241,7 @@ class AnsibleCoreCI:
display.info('Initializing new %s/%s instance %s.' % (self.platform, self.version, self.instance_id), verbosity=1)
if self.platform == 'windows':
- winrm_config = read_text_file(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'setup', 'ConfigureRemotingForAnsible.ps1'))
+ winrm_config = read_text_file(os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'ConfigureRemotingForAnsible.ps1'))
else:
winrm_config = None
@@ -358,12 +273,7 @@ class AnsibleCoreCI:
return response.json()
- def _start_endpoint(self, data, headers):
- """
- :type data: dict[str, any]
- :type headers: dict[str, str]
- :rtype: HttpResponse
- """
+ def _start_endpoint(self, data, headers): # type: (t.Dict[str, t.Any], t.Dict[str, str]) -> HttpResponse
tries = self.retries
sleep = 15
@@ -413,11 +323,8 @@ class AnsibleCoreCI:
return self.load(config)
- def load(self, config):
- """
- :type config: dict[str, str]
- :rtype: bool
- """
+ def load(self, config): # type: (t.Dict[str, str]) -> bool
+ """Load the instance from the provided dictionary."""
self.instance_id = str(config['instance_id'])
self.endpoint = config['endpoint']
self.started = True
@@ -426,7 +333,7 @@ class AnsibleCoreCI:
return True
- def _save(self):
+ def _save(self): # type: () -> None
"""Save instance information."""
if self.args.explain:
return
@@ -435,10 +342,8 @@ class AnsibleCoreCI:
write_json_file(self.path, config, create_directories=True)
- def save(self):
- """
- :rtype: dict[str, str]
- """
+ def save(self): # type: () -> t.Dict[str, str]
+ """Save instance details and return as a dictionary."""
return dict(
platform_version='%s/%s' % (self.platform, self.version),
instance_id=self.instance_id,
@@ -446,11 +351,8 @@ class AnsibleCoreCI:
)
@staticmethod
- def _create_http_error(response):
- """
- :type response: HttpResponse
- :rtype: ApplicationError
- """
+ def _create_http_error(response): # type: (HttpResponse) -> ApplicationError
+ """Return an exception created from the given HTTP resposne."""
response_json = response.json()
stack_trace = ''
@@ -476,13 +378,8 @@ class AnsibleCoreCI:
class CoreHttpError(HttpError):
"""HTTP response as an error."""
- def __init__(self, status, remote_message, remote_stack_trace):
- """
- :type status: int
- :type remote_message: str
- :type remote_stack_trace: str
- """
- super(CoreHttpError, self).__init__(status, '%s%s' % (remote_message, remote_stack_trace))
+ def __init__(self, status, remote_message, remote_stack_trace): # type: (int, str, str) -> None
+ super().__init__(status, '%s%s' % (remote_message, remote_stack_trace))
self.remote_message = remote_message
self.remote_stack_trace = remote_stack_trace
@@ -494,10 +391,8 @@ class SshKey:
KEY_NAME = 'id_%s' % KEY_TYPE
PUB_NAME = '%s.pub' % KEY_NAME
- def __init__(self, args):
- """
- :type args: EnvironmentConfig
- """
+ @mutex
+ def __init__(self, args): # type: (EnvironmentConfig) -> None
key_pair = self.get_key_pair()
if not key_pair:
@@ -525,6 +420,15 @@ class SshKey:
self.pub_contents = read_text_file(self.pub).strip()
self.key_contents = read_text_file(self.key).strip()
+ @staticmethod
+ def get_relative_in_tree_private_key_path(): # type: () -> str
+ """Return the ansible-test SSH private key path relative to the content tree."""
+ temp_dir = ResultType.TMP.relative_path
+
+ key = os.path.join(temp_dir, SshKey.KEY_NAME)
+
+ return key
+
def get_in_tree_key_pair_paths(self): # type: () -> t.Optional[t.Tuple[str, str]]
"""Return the ansible-test SSH key pair paths from the content tree."""
temp_dir = ResultType.TMP.path
@@ -567,6 +471,9 @@ class SshKey:
if not os.path.isfile(key) or not os.path.isfile(pub):
run_command(args, ['ssh-keygen', '-m', 'PEM', '-q', '-t', self.KEY_TYPE, '-N', '', '-f', key])
+ if args.explain:
+ return key, pub
+
# newer ssh-keygen PEM output (such as on RHEL 8.1) is not recognized by paramiko
key_contents = read_text_file(key)
key_contents = re.sub(r'(BEGIN|END) PRIVATE KEY', r'\1 RSA PRIVATE KEY', key_contents)
diff --git a/test/lib/ansible_test/_internal/coverage/erase.py b/test/lib/ansible_test/_internal/coverage/erase.py
deleted file mode 100644
index 92d241c7..00000000
--- a/test/lib/ansible_test/_internal/coverage/erase.py
+++ /dev/null
@@ -1,27 +0,0 @@
-"""Erase code coverage files."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-
-from ..util_common import (
- ResultType,
-)
-
-from . import (
- CoverageConfig,
-)
-
-
-def command_coverage_erase(args): # type: (CoverageConfig) -> None
- """Erase code coverage data files collected during test runs."""
- coverage_dir = ResultType.COVERAGE.path
-
- for name in os.listdir(coverage_dir):
- if not name.startswith('coverage') and '=coverage.' not in name:
- continue
-
- path = os.path.join(coverage_dir, name)
-
- if not args.explain:
- os.remove(path)
diff --git a/test/lib/ansible_test/_internal/coverage_util.py b/test/lib/ansible_test/_internal/coverage_util.py
index e5434231..e705db76 100644
--- a/test/lib/ansible_test/_internal/coverage_util.py
+++ b/test/lib/ansible_test/_internal/coverage_util.py
@@ -1,10 +1,10 @@
"""Utility code for facilitating collection of code coverage when running tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
-import contextlib
+import atexit
import os
import tempfile
+import typing as t
from .config import (
IntegrationConfig,
@@ -14,49 +14,120 @@ from .config import (
from .io import (
write_text_file,
+ make_dirs,
)
from .util import (
COVERAGE_CONFIG_NAME,
remove_tree,
+ sanitize_host_name,
)
from .data import (
data_context,
)
+from .util_common import (
+ intercept_python,
+ ResultType,
+)
+
+from .host_configs import (
+ DockerConfig,
+ HostConfig,
+ OriginConfig,
+ PosixRemoteConfig,
+ PosixSshConfig,
+ PythonConfig,
+)
-@contextlib.contextmanager
-def coverage_context(args): # type: (TestConfig) -> None
- """Content to set up and clean up code coverage configuration for tests."""
- coverage_setup(args)
- try:
- yield
- finally:
- coverage_cleanup(args)
+def cover_python(
+ args, # type: TestConfig
+ python, # type: PythonConfig
+ cmd, # type: t.List[str]
+ target_name, # type: str
+ env, # type: t.Dict[str, str]
+ capture=False, # type: bool
+ data=None, # type: t.Optional[str]
+ cwd=None, # type: t.Optional[str]
+): # type: (...) -> t.Tuple[t.Optional[str], t.Optional[str]]
+ """Run a command while collecting Python code coverage."""
+ if args.coverage:
+ env.update(get_coverage_environment(args, target_name, python.version))
+
+ return intercept_python(args, python, cmd, env, capture, data, cwd)
+
+
+def get_coverage_platform(config): # type: (HostConfig) -> str
+ """Return the platform label for the given host config."""
+ if isinstance(config, PosixRemoteConfig):
+ platform = f'remote-{sanitize_host_name(config.name)}'
+ elif isinstance(config, DockerConfig):
+ platform = f'docker-{sanitize_host_name(config.name)}'
+ elif isinstance(config, PosixSshConfig):
+ platform = f'ssh-{sanitize_host_name(config.host)}'
+ elif isinstance(config, OriginConfig):
+ platform = 'origin' # previous versions of ansible-test used "local-{python_version}"
+ else:
+ raise NotImplementedError(f'Coverage platform label not defined for type: {type(config)}')
+
+ return platform
+
+def get_coverage_environment(
+ args, # type: TestConfig
+ target_name, # type: str
+ version, # type: str
+): # type: (...) -> t.Dict[str, str]
+ """Return environment variables needed to collect code coverage."""
+ # unit tests, sanity tests and other special cases (localhost only)
+ # config is in a temporary directory
+ # results are in the source tree
+ config_file = get_coverage_config(args)
+ coverage_name = '='.join((args.command, target_name, get_coverage_platform(args.controller), f'python-{version}', 'coverage'))
+ coverage_dir = os.path.join(data_context().content.root, data_context().content.results_path, ResultType.COVERAGE.name)
+ coverage_file = os.path.join(coverage_dir, coverage_name)
-def coverage_setup(args): # type: (TestConfig) -> None
- """Set up code coverage configuration before running tests."""
- if not args.coverage:
- return
+ make_dirs(coverage_dir)
+
+ if args.coverage_check:
+ # cause the 'coverage' module to be found, but not imported or enabled
+ coverage_file = ''
+
+ # Enable code coverage collection on local Python programs (this does not include Ansible modules).
+ # Used by the injectors to support code coverage.
+ # Used by the pytest unit test plugin to support code coverage.
+ # The COVERAGE_FILE variable is also used directly by the 'coverage' module.
+ env = dict(
+ COVERAGE_CONF=config_file,
+ COVERAGE_FILE=coverage_file,
+ )
+
+ return env
+
+
+def get_coverage_config(args): # type: (TestConfig) -> str
+ """Return the path to the coverage config, creating the config if it does not already exist."""
+ try:
+ return get_coverage_config.path
+ except AttributeError:
+ pass
coverage_config = generate_coverage_config(args)
if args.explain:
- args.coverage_config_base_path = '/tmp/coverage-temp-dir'
+ temp_dir = '/tmp/coverage-temp-dir'
else:
- args.coverage_config_base_path = tempfile.mkdtemp()
+ temp_dir = tempfile.mkdtemp()
+ atexit.register(lambda: remove_tree(temp_dir))
- write_text_file(os.path.join(args.coverage_config_base_path, COVERAGE_CONFIG_NAME), coverage_config)
+ path = get_coverage_config.path = os.path.join(temp_dir, COVERAGE_CONFIG_NAME)
+ if not args.explain:
+ write_text_file(path, coverage_config)
-def coverage_cleanup(args): # type: (TestConfig) -> None
- """Clean up code coverage configuration after tests have finished."""
- if args.coverage_config_base_path and not args.explain:
- remove_tree(args.coverage_config_base_path)
- args.coverage_config_base_path = None
+ return path
def generate_coverage_config(args): # type: (TestConfig) -> str
diff --git a/test/lib/ansible_test/_internal/data.py b/test/lib/ansible_test/_internal/data.py
index 38ae6d21..c3b2187c 100644
--- a/test/lib/ansible_test/_internal/data.py
+++ b/test/lib/ansible_test/_internal/data.py
@@ -1,10 +1,9 @@
"""Context information for the current invocation of ansible-test."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+import dataclasses
import os
-
-from . import types as t
+import typing as t
from .util import (
ApplicationError,
@@ -14,6 +13,7 @@ from .util import (
ANSIBLE_TEST_ROOT,
ANSIBLE_SOURCE_ROOT,
display,
+ cache,
)
from .provider import (
@@ -53,7 +53,7 @@ class DataContext:
self.__source_providers = source_providers
self.__ansible_source = None # type: t.Optional[t.Tuple[t.Tuple[str, str], ...]]
- self.payload_callbacks = [] # type: t.List[t.Callable[t.List[t.Tuple[str, str]], None]]
+ self.payload_callbacks = [] # type: t.List[t.Callable[[t.List[t.Tuple[str, str]]], None]]
if content_path:
content = self.__create_content_layout(layout_providers, source_providers, content_path, False)
@@ -157,12 +157,13 @@ class DataContext:
return self.__ansible_source
- def register_payload_callback(self, callback): # type: (t.Callable[t.List[t.Tuple[str, str]], None]) -> None
+ def register_payload_callback(self, callback): # type: (t.Callable[[t.List[t.Tuple[str, str]]], None]) -> None
"""Register the given payload callback."""
self.payload_callbacks.append(callback)
-def data_init(): # type: () -> DataContext
+@cache
+def data_context(): # type: () -> DataContext
"""Initialize provider plugins."""
provider_types = (
'layout',
@@ -191,10 +192,51 @@ Current working directory: %s''' % ('\n'.join(options), os.getcwd()))
return context
-def data_context(): # type: () -> DataContext
- """Return the current data context."""
- try:
- return data_context.instance
- except AttributeError:
- data_context.instance = data_init()
- return data_context.instance
+@dataclasses.dataclass(frozen=True)
+class PluginInfo:
+ """Information about an Ansible plugin."""
+ plugin_type: str
+ name: str
+ paths: t.List[str]
+
+
+@cache
+def content_plugins():
+ """
+ Analyze content.
+ The primary purpose of this analysis is to facilitiate mapping of integration tests to the plugin(s) they are intended to test.
+ """
+ plugins = {} # type: t.Dict[str, t.Dict[str, PluginInfo]]
+
+ for plugin_type, plugin_directory in data_context().content.plugin_paths.items():
+ plugin_paths = sorted(data_context().content.walk_files(plugin_directory))
+ plugin_directory_offset = len(plugin_directory.split(os.path.sep))
+
+ plugin_files = {}
+
+ for plugin_path in plugin_paths:
+ plugin_filename = os.path.basename(plugin_path)
+ plugin_parts = plugin_path.split(os.path.sep)[plugin_directory_offset:-1]
+
+ if plugin_filename == '__init__.py':
+ if plugin_type != 'module_utils':
+ continue
+ else:
+ plugin_name = os.path.splitext(plugin_filename)[0]
+
+ if data_context().content.is_ansible and plugin_type == 'modules':
+ plugin_name = plugin_name.lstrip('_')
+
+ plugin_parts.append(plugin_name)
+
+ plugin_name = '.'.join(plugin_parts)
+
+ plugin_files.setdefault(plugin_name, []).append(plugin_filename)
+
+ plugins[plugin_type] = {plugin_name: PluginInfo(
+ plugin_type=plugin_type,
+ name=plugin_name,
+ paths=paths,
+ ) for plugin_name, paths in plugin_files.items()}
+
+ return plugins
diff --git a/test/lib/ansible_test/_internal/delegation.py b/test/lib/ansible_test/_internal/delegation.py
index 250b9114..aaee0dfa 100644
--- a/test/lib/ansible_test/_internal/delegation.py
+++ b/test/lib/ansible_test/_internal/delegation.py
@@ -1,92 +1,41 @@
"""Delegate test execution to another environment."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+import contextlib
+import json
import os
-import re
-import sys
import tempfile
-
-from . import types as t
+import typing as t
from .io import (
make_dirs,
- read_text_file,
-)
-
-from .executor import (
- SUPPORTED_PYTHON_VERSIONS,
- HTTPTESTER_HOSTS,
- create_shell_command,
- run_httptester,
- run_pypi_proxy,
- start_httptester,
- get_python_interpreter,
- get_python_version,
)
from .config import (
- TestConfig,
EnvironmentConfig,
IntegrationConfig,
- WindowsIntegrationConfig,
- NetworkIntegrationConfig,
- ShellConfig,
SanityConfig,
+ ShellConfig,
+ TestConfig,
UnitsConfig,
)
-from .core_ci import (
- AnsibleCoreCI,
- SshKey,
-)
-
-from .manage_ci import (
- ManagePosixCI,
- ManageWindowsCI,
- get_ssh_key_setup,
-)
-
from .util import (
- ApplicationError,
- common_environment,
+ SubprocessError,
display,
+ filter_args,
ANSIBLE_BIN_PATH,
- ANSIBLE_TEST_DATA_ROOT,
ANSIBLE_LIB_ROOT,
ANSIBLE_TEST_ROOT,
- tempdir,
)
from .util_common import (
- run_command,
ResultType,
- create_interpreter_wrapper,
- get_docker_completion,
- get_remote_completion,
-)
-
-from .docker_util import (
- docker_exec,
- docker_get,
- docker_pull,
- docker_put,
- docker_rm,
- docker_run,
- docker_available,
- docker_network_disconnect,
- get_docker_networks,
- get_docker_preferred_network_name,
- get_docker_hostname,
- is_docker_user_defined_network,
+ process_scoped_temporary_directory,
)
-from .cloud import (
- get_cloud_providers,
-)
-
-from .target import (
- IntegrationTarget,
+from .containers import (
+ support_container_context,
)
from .data import (
@@ -97,468 +46,204 @@ from .payload import (
create_payload,
)
-from .venv import (
- create_virtual_environment,
-)
-
from .ci import (
get_ci_provider,
)
+from .host_configs import (
+ OriginConfig,
+ PythonConfig,
+ VirtualPythonConfig,
+)
-def check_delegation_args(args):
- """
- :type args: CommonConfig
- """
- if not isinstance(args, EnvironmentConfig):
- return
-
- if args.docker:
- get_python_version(args, get_docker_completion(), args.docker_raw)
- elif args.remote:
- get_python_version(args, get_remote_completion(), args.remote)
-
-
-def delegate(args, exclude, require, integration_targets):
- """
- :type args: EnvironmentConfig
- :type exclude: list[str]
- :type require: list[str]
- :type integration_targets: tuple[IntegrationTarget]
- :rtype: bool
- """
- if isinstance(args, TestConfig):
- args.metadata.ci_provider = get_ci_provider().code
-
- make_dirs(ResultType.TMP.path)
-
- with tempfile.NamedTemporaryFile(prefix='metadata-', suffix='.json', dir=ResultType.TMP.path) as metadata_fd:
- args.metadata_path = os.path.join(ResultType.TMP.relative_path, os.path.basename(metadata_fd.name))
- args.metadata.to_file(args.metadata_path)
-
- try:
- return delegate_command(args, exclude, require, integration_targets)
- finally:
- args.metadata_path = None
- else:
- return delegate_command(args, exclude, require, integration_targets)
-
-
-def delegate_command(args, exclude, require, integration_targets):
- """
- :type args: EnvironmentConfig
- :type exclude: list[str]
- :type require: list[str]
- :type integration_targets: tuple[IntegrationTarget]
- :rtype: bool
- """
- if args.venv:
- delegate_venv(args, exclude, require, integration_targets)
- return True
-
- if args.docker:
- delegate_docker(args, exclude, require, integration_targets)
- return True
-
- if args.remote:
- delegate_remote(args, exclude, require, integration_targets)
- return True
-
- return False
-
-
-def delegate_venv(args, # type: EnvironmentConfig
- exclude, # type: t.List[str]
- require, # type: t.List[str]
- integration_targets, # type: t.Tuple[IntegrationTarget, ...]
- ): # type: (...) -> None
- """Delegate ansible-test execution to a virtual environment using venv or virtualenv."""
- if args.python:
- versions = (args.python_version,)
- else:
- versions = SUPPORTED_PYTHON_VERSIONS
-
- if args.httptester:
- needs_httptester = sorted(target.name for target in integration_targets if 'needs/httptester/' in target.aliases)
+from .connections import (
+ Connection,
+ DockerConnection,
+ SshConnection,
+ LocalConnection,
+)
- if needs_httptester:
- display.warning('Use --docker or --remote to enable httptester for tests marked "needs/httptester": %s' % ', '.join(needs_httptester))
+from .provisioning import (
+ HostState,
+)
- if args.venv_system_site_packages:
- suffix = '-ssp'
- else:
- suffix = ''
- venvs = dict((version, os.path.join(ResultType.TMP.path, 'delegation', 'python%s%s' % (version, suffix))) for version in versions)
- venvs = dict((version, path) for version, path in venvs.items() if create_virtual_environment(args, version, path, args.venv_system_site_packages))
+@contextlib.contextmanager
+def delegation_context(args, host_state): # type: (EnvironmentConfig, HostState) -> None
+ """Context manager for serialized host state during delegation."""
+ make_dirs(ResultType.TMP.path)
- if not venvs:
- raise ApplicationError('No usable virtual environment support found.')
+ # noinspection PyUnusedLocal
+ python = host_state.controller_profile.python # make sure the python interpreter has been initialized before serializing host state
+ del python
- options = {
- '--venv': 0,
- '--venv-system-site-packages': 0,
- }
+ with tempfile.TemporaryDirectory(prefix='host-', dir=ResultType.TMP.path) as host_dir:
+ args.host_settings.serialize(os.path.join(host_dir, 'settings.dat'))
+ host_state.serialize(os.path.join(host_dir, 'state.dat'))
- with tempdir() as inject_path:
- for version, path in venvs.items():
- create_interpreter_wrapper(os.path.join(path, 'bin', 'python'), os.path.join(inject_path, 'python%s' % version))
+ args.host_path = os.path.join(ResultType.TMP.relative_path, os.path.basename(host_dir))
- python_interpreter = os.path.join(inject_path, 'python%s' % args.python_version)
+ try:
+ yield
+ finally:
+ args.host_path = None
- cmd = generate_command(args, python_interpreter, ANSIBLE_BIN_PATH, data_context().content.root, options, exclude, require)
+def delegate(args, host_state, exclude, require): # type: (EnvironmentConfig, HostState, t.List[str], t.List[str]) -> None
+ """Delegate execution of ansible-test to another environment."""
+ with delegation_context(args, host_state):
if isinstance(args, TestConfig):
- if args.coverage and not args.coverage_label:
- cmd += ['--coverage-label', 'venv']
-
- env = common_environment()
-
- with tempdir() as library_path:
- # expose ansible and ansible_test to the virtual environment (only required when running from an install)
- os.symlink(ANSIBLE_LIB_ROOT, os.path.join(library_path, 'ansible'))
- os.symlink(ANSIBLE_TEST_ROOT, os.path.join(library_path, 'ansible_test'))
-
- env.update(
- PATH=inject_path + os.path.pathsep + env['PATH'],
- PYTHONPATH=library_path,
- )
-
- run_command(args, cmd, env=env)
+ args.metadata.ci_provider = get_ci_provider().code
+ make_dirs(ResultType.TMP.path)
-def delegate_docker(args, exclude, require, integration_targets):
- """
- :type args: EnvironmentConfig
- :type exclude: list[str]
- :type require: list[str]
- :type integration_targets: tuple[IntegrationTarget]
- """
- test_image = args.docker
- privileged = args.docker_privileged
-
- if isinstance(args, ShellConfig):
- use_httptester = args.httptester
- else:
- use_httptester = args.httptester and any('needs/httptester/' in target.aliases for target in integration_targets)
-
- if use_httptester:
- docker_pull(args, args.httptester)
+ with tempfile.NamedTemporaryFile(prefix='metadata-', suffix='.json', dir=ResultType.TMP.path) as metadata_fd:
+ args.metadata_path = os.path.join(ResultType.TMP.relative_path, os.path.basename(metadata_fd.name))
+ args.metadata.to_file(args.metadata_path)
- docker_pull(args, test_image)
+ try:
+ delegate_command(args, host_state, exclude, require)
+ finally:
+ args.metadata_path = None
+ else:
+ delegate_command(args, host_state, exclude, require)
- httptester_id = None
- test_id = None
- success = False
- options = {
- '--docker': 1,
- '--docker-privileged': 0,
- '--docker-util': 1,
- }
+def delegate_command(args, host_state, exclude, require): # type: (EnvironmentConfig, HostState, t.List[str], t.List[str]) -> None
+ """Delegate execution based on the provided host state."""
+ con = host_state.controller_profile.get_origin_controller_connection()
+ working_directory = host_state.controller_profile.get_working_directory()
+ host_delegation = not isinstance(args.controller, OriginConfig)
- python_interpreter = get_python_interpreter(args, get_docker_completion(), args.docker_raw)
+ if host_delegation:
+ if data_context().content.collection:
+ content_root = os.path.join(working_directory, data_context().content.collection.directory)
+ else:
+ content_root = os.path.join(working_directory, 'ansible')
- pwd = '/root'
- ansible_root = os.path.join(pwd, 'ansible')
+ ansible_bin_path = os.path.join(working_directory, 'ansible', 'bin')
- if data_context().content.collection:
- content_root = os.path.join(pwd, data_context().content.collection.directory)
+ with tempfile.NamedTemporaryFile(prefix='ansible-source-', suffix='.tgz') as payload_file:
+ create_payload(args, payload_file.name)
+ con.extract_archive(chdir=working_directory, src=payload_file)
else:
- content_root = ansible_root
+ content_root = working_directory
+ ansible_bin_path = ANSIBLE_BIN_PATH
- remote_results_root = os.path.join(content_root, data_context().content.results_path)
+ command = generate_command(args, host_state.controller_profile.python, ansible_bin_path, content_root, exclude, require)
- cmd = generate_command(args, python_interpreter, os.path.join(ansible_root, 'bin'), content_root, options, exclude, require)
+ if isinstance(con, SshConnection):
+ ssh = con.settings
+ else:
+ ssh = None
- if isinstance(args, TestConfig):
- if args.coverage and not args.coverage_label:
- image_label = args.docker_raw
- image_label = re.sub('[^a-zA-Z0-9]+', '-', image_label)
- cmd += ['--coverage-label', 'docker-%s' % image_label]
+ options = []
- if isinstance(args, IntegrationConfig):
+ if isinstance(args, IntegrationConfig) and args.controller.is_managed and all(target.is_managed for target in args.targets):
if not args.allow_destructive:
- cmd.append('--allow-destructive')
+ options.append('--allow-destructive')
- cmd_options = []
+ with support_container_context(args, ssh) as containers:
+ if containers:
+ options.extend(['--containers', json.dumps(containers.to_dict())])
- if isinstance(args, ShellConfig) or (isinstance(args, IntegrationConfig) and args.debug_strategy):
- cmd_options.append('-it')
+ # Run unit tests unprivileged to prevent stray writes to the source tree.
+ # Also disconnect from the network once requirements have been installed.
+ if isinstance(args, UnitsConfig) and isinstance(con, DockerConnection):
+ pytest_user = 'pytest'
- pypi_proxy_id, pypi_proxy_endpoint = run_pypi_proxy(args)
-
- if pypi_proxy_endpoint:
- cmd += ['--pypi-endpoint', pypi_proxy_endpoint]
-
- with tempfile.NamedTemporaryFile(prefix='ansible-source-', suffix='.tgz') as local_source_fd:
- try:
- create_payload(args, local_source_fd.name)
-
- if use_httptester:
- httptester_id = run_httptester(args)
- else:
- httptester_id = None
-
- test_options = [
- '--detach',
- '--volume', '/sys/fs/cgroup:/sys/fs/cgroup:ro',
- '--privileged=%s' % str(privileged).lower(),
+ writable_dirs = [
+ os.path.join(content_root, ResultType.JUNIT.relative_path),
+ os.path.join(content_root, ResultType.COVERAGE.relative_path),
]
- if args.docker_memory:
- test_options.extend([
- '--memory=%d' % args.docker_memory,
- '--memory-swap=%d' % args.docker_memory,
- ])
-
- docker_socket = '/var/run/docker.sock'
-
- if args.docker_seccomp != 'default':
- test_options += ['--security-opt', 'seccomp=%s' % args.docker_seccomp]
+ con.run(['mkdir', '-p'] + writable_dirs)
+ con.run(['chmod', '777'] + writable_dirs)
+ con.run(['chmod', '755', working_directory])
+ con.run(['chmod', '644', os.path.join(content_root, args.metadata_path)])
+ con.run(['useradd', pytest_user, '--create-home'])
+ con.run(insert_options(command, options + ['--requirements-mode', 'only']))
- if get_docker_hostname() != 'localhost' or os.path.exists(docker_socket):
- test_options += ['--volume', '%s:%s' % (docker_socket, docker_socket)]
+ container = con.inspect()
+ networks = container.get_network_names()
- if httptester_id:
- test_options += ['--env', 'HTTPTESTER=1', '--env', 'KRB5_PASSWORD=%s' % args.httptester_krb5_password]
-
- network = get_docker_preferred_network_name(args)
-
- if not is_docker_user_defined_network(network):
- # legacy links are required when using the default bridge network instead of user-defined networks
- for host in HTTPTESTER_HOSTS:
- test_options += ['--link', '%s:%s' % (httptester_id, host)]
-
- if isinstance(args, IntegrationConfig):
- cloud_platforms = get_cloud_providers(args)
-
- for cloud_platform in cloud_platforms:
- test_options += cloud_platform.get_docker_run_options()
-
- test_id = docker_run(args, test_image, options=test_options)[0]
-
- if args.explain:
- test_id = 'test_id'
+ if networks is not None:
+ for network in networks:
+ con.disconnect_network(network)
else:
- test_id = test_id.strip()
-
- setup_sh = read_text_file(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'setup', 'docker.sh'))
-
- ssh_keys_sh = get_ssh_key_setup(SshKey(args))
+ display.warning('Network disconnection is not supported (this is normal under podman). '
+ 'Tests will not be isolated from the network. Network-related tests may misbehave.')
- setup_sh += ssh_keys_sh
- shell = setup_sh.splitlines()[0][2:]
+ options.extend(['--requirements-mode', 'skip'])
- docker_exec(args, test_id, [shell], data=setup_sh)
+ con.user = pytest_user
- # write temporary files to /root since /tmp isn't ready immediately on container start
- docker_put(args, test_id, local_source_fd.name, '/root/test.tgz')
- docker_exec(args, test_id, ['tar', 'oxzf', '/root/test.tgz', '-C', '/root'])
+ success = False
- # docker images are only expected to have a single python version available
- if isinstance(args, UnitsConfig) and not args.python:
- cmd += ['--python', 'default']
-
- # run unit tests unprivileged to prevent stray writes to the source tree
- # also disconnect from the network once requirements have been installed
- if isinstance(args, UnitsConfig):
- writable_dirs = [
- os.path.join(content_root, ResultType.JUNIT.relative_path),
- os.path.join(content_root, ResultType.COVERAGE.relative_path),
- ]
-
- docker_exec(args, test_id, ['mkdir', '-p'] + writable_dirs)
- docker_exec(args, test_id, ['chmod', '777'] + writable_dirs)
- docker_exec(args, test_id, ['chmod', '755', '/root'])
- docker_exec(args, test_id, ['chmod', '644', os.path.join(content_root, args.metadata_path)])
-
- docker_exec(args, test_id, ['useradd', 'pytest', '--create-home'])
-
- docker_exec(args, test_id, cmd + ['--requirements-mode', 'only'], options=cmd_options)
-
- networks = get_docker_networks(args, test_id)
-
- if networks is not None:
- for network in networks:
- docker_network_disconnect(args, test_id, network)
- else:
- display.warning('Network disconnection is not supported (this is normal under podman). '
- 'Tests will not be isolated from the network. Network-related tests may misbehave.')
-
- cmd += ['--requirements-mode', 'skip']
-
- cmd_options += ['--user', 'pytest']
-
- try:
- docker_exec(args, test_id, cmd, options=cmd_options)
- # docker_exec will throw SubprocessError if not successful
- # If we make it here, all the prep work earlier and the docker_exec line above were all successful.
- success = True
- finally:
- local_test_root = os.path.dirname(os.path.join(data_context().content.root, data_context().content.results_path))
-
- remote_test_root = os.path.dirname(remote_results_root)
- remote_results_name = os.path.basename(remote_results_root)
- remote_temp_file = os.path.join('/root', remote_results_name + '.tgz')
-
- make_dirs(local_test_root) # make sure directory exists for collections which have no tests
-
- with tempfile.NamedTemporaryFile(prefix='ansible-result-', suffix='.tgz') as local_result_fd:
- docker_exec(args, test_id, ['tar', 'czf', remote_temp_file, '--exclude', ResultType.TMP.name, '-C', remote_test_root, remote_results_name])
- docker_get(args, test_id, remote_temp_file, local_result_fd.name)
- run_command(args, ['tar', 'oxzf', local_result_fd.name, '-C', local_test_root])
+ try:
+ con.run(insert_options(command, options))
+ success = True
finally:
- if httptester_id:
- docker_rm(args, httptester_id)
-
- if pypi_proxy_id:
- docker_rm(args, pypi_proxy_id)
-
- if test_id:
- if args.docker_terminate == 'always' or (args.docker_terminate == 'success' and success):
- docker_rm(args, test_id)
-
+ if host_delegation:
+ download_results(args, con, content_root, success)
-def delegate_remote(args, exclude, require, integration_targets):
- """
- :type args: EnvironmentConfig
- :type exclude: list[str]
- :type require: list[str]
- :type integration_targets: tuple[IntegrationTarget]
- """
- remote = args.parsed_remote
- core_ci = AnsibleCoreCI(args, remote.platform, remote.version, stage=args.remote_stage, provider=args.remote_provider, arch=remote.arch)
- success = False
- raw = False
+def insert_options(command, options):
+ """Insert addition command line options into the given command and return the result."""
+ result = []
- if isinstance(args, ShellConfig):
- use_httptester = args.httptester
- raw = args.raw
- else:
- use_httptester = args.httptester and any('needs/httptester/' in target.aliases for target in integration_targets)
-
- if use_httptester and not docker_available():
- display.warning('Assuming --disable-httptester since `docker` is not available.')
- use_httptester = False
-
- httptester_id = None
- ssh_options = []
- content_root = None
-
- try:
- core_ci.start()
-
- if use_httptester:
- httptester_id, ssh_options = start_httptester(args)
-
- core_ci.wait()
-
- python_version = get_python_version(args, get_remote_completion(), args.remote)
-
- if remote.platform == 'windows':
- # Windows doesn't need the ansible-test fluff, just run the SSH command
- manage = ManageWindowsCI(core_ci)
- manage.setup(python_version)
-
- cmd = ['powershell.exe']
- elif raw:
- manage = ManagePosixCI(core_ci)
- manage.setup(python_version)
-
- cmd = create_shell_command(['sh'])
- else:
- manage = ManagePosixCI(core_ci)
- pwd = manage.setup(python_version)
-
- options = {
- '--remote': 1,
- }
-
- python_interpreter = get_python_interpreter(args, get_remote_completion(), args.remote)
-
- ansible_root = os.path.join(pwd, 'ansible')
-
- if data_context().content.collection:
- content_root = os.path.join(pwd, data_context().content.collection.directory)
- else:
- content_root = ansible_root
+ for arg in command:
+ if options and arg.startswith('--'):
+ result.extend(options)
+ options = None
- cmd = generate_command(args, python_interpreter, os.path.join(ansible_root, 'bin'), content_root, options, exclude, require)
+ result.append(arg)
- if httptester_id:
- cmd += ['--inject-httptester', '--httptester-krb5-password', args.httptester_krb5_password]
+ return result
- if isinstance(args, TestConfig):
- if args.coverage and not args.coverage_label:
- cmd += ['--coverage-label', 'remote-%s-%s' % (remote.platform, remote.version)]
- if isinstance(args, IntegrationConfig):
- if not args.allow_destructive:
- cmd.append('--allow-destructive')
-
- # remote instances are only expected to have a single python version available
- if isinstance(args, UnitsConfig) and not args.python:
- cmd += ['--python', 'default']
+def download_results(args, con, content_root, success): # type: (EnvironmentConfig, Connection, str, bool) -> None
+ """Download results from a delegated controller."""
+ remote_results_root = os.path.join(content_root, data_context().content.results_path)
+ local_test_root = os.path.dirname(os.path.join(data_context().content.root, data_context().content.results_path))
- if isinstance(args, IntegrationConfig):
- cloud_platforms = get_cloud_providers(args)
+ remote_test_root = os.path.dirname(remote_results_root)
+ remote_results_name = os.path.basename(remote_results_root)
- for cloud_platform in cloud_platforms:
- ssh_options += cloud_platform.get_remote_ssh_options()
+ make_dirs(local_test_root) # make sure directory exists for collections which have no tests
+ with tempfile.NamedTemporaryFile(prefix='ansible-test-result-', suffix='.tgz') as result_file:
try:
- manage.ssh(cmd, ssh_options)
- success = True
- finally:
- download = False
-
- if remote.platform != 'windows':
- download = True
-
- if isinstance(args, ShellConfig):
- if args.raw:
- download = False
-
- if download and content_root:
- local_test_root = os.path.dirname(os.path.join(data_context().content.root, data_context().content.results_path))
-
- remote_results_root = os.path.join(content_root, data_context().content.results_path)
- remote_results_name = os.path.basename(remote_results_root)
- remote_temp_path = os.path.join('/tmp', remote_results_name)
-
- # AIX cp and GNU cp provide different options, no way could be found to have a common
- # pattern and achieve the same goal
- cp_opts = '-hr' if remote.platform in ['aix', 'ibmi'] else '-a'
+ con.create_archive(chdir=remote_test_root, name=remote_results_name, dst=result_file, exclude=ResultType.TMP.name)
+ except SubprocessError as ex:
+ if success:
+ raise # download errors are fatal if tests succeeded
- manage.ssh('rm -rf {0} && mkdir {0} && cp {1} {2}/* {0}/ && chmod -R a+r {0}'.format(remote_temp_path, cp_opts, remote_results_root))
- manage.download(remote_temp_path, local_test_root)
- finally:
- if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
- core_ci.stop()
+ # surface download failures as a warning here to avoid masking test failures
+ display.warning(f'Failed to download results while handling an exception: {ex}')
+ else:
+ result_file.seek(0)
- if httptester_id:
- docker_rm(args, httptester_id)
+ local_con = LocalConnection(args)
+ local_con.extract_archive(chdir=local_test_root, src=result_file)
-def generate_command(args, python_interpreter, ansible_bin_path, content_root, options, exclude, require):
- """
- :type args: EnvironmentConfig
- :type python_interpreter: str | None
- :type ansible_bin_path: str
- :type content_root: str
- :type options: dict[str, int]
- :type exclude: list[str]
- :type require: list[str]
- :rtype: list[str]
- """
- options['--color'] = 1
+def generate_command(
+ args, # type: EnvironmentConfig
+ python, # type: PythonConfig
+ ansible_bin_path, # type: str
+ content_root, # type: str
+ exclude, # type: t.List[str]
+ require, # type: t.List[str]
+): # type: (...) -> t.List[str]
+ """Generate the command necessary to delegate ansible-test."""
+ options = {
+ '--color': 1,
+ '--docker-no-pull': 0,
+ }
cmd = [os.path.join(ansible_bin_path, 'ansible-test')]
-
- if python_interpreter:
- cmd = [python_interpreter] + cmd
+ cmd = [python.path] + cmd
# Force the encoding used during delegation.
# This is only needed because ansible-test relies on Python's file system encoding.
@@ -569,39 +254,53 @@ def generate_command(args, python_interpreter, ansible_bin_path, content_root, o
ANSIBLE_TEST_CONTENT_ROOT=content_root,
)
+ if isinstance(args.controller.python, VirtualPythonConfig):
+ # Expose the ansible and ansible_test library directories to the virtual environment.
+ # This is only required when running from an install.
+ library_path = process_scoped_temporary_directory(args)
+
+ os.symlink(ANSIBLE_LIB_ROOT, os.path.join(library_path, 'ansible'))
+ os.symlink(ANSIBLE_TEST_ROOT, os.path.join(library_path, 'ansible_test'))
+
+ env_vars.update(
+ PYTHONPATH=library_path,
+ )
+
+ # Propagate the TERM environment variable to the remote host when using the shell command.
+ if isinstance(args, ShellConfig):
+ term = os.environ.get('TERM')
+
+ if term is not None:
+ env_vars.update(TERM=term)
+
env_args = ['%s=%s' % (key, env_vars[key]) for key in sorted(env_vars)]
cmd = ['/usr/bin/env'] + env_args + cmd
- cmd += list(filter_options(args, sys.argv[1:], options, exclude, require))
+ cmd += list(filter_options(args, args.host_settings.filtered_args, options, exclude, require))
cmd += ['--color', 'yes' if args.color else 'no']
- if args.requirements:
- cmd += ['--requirements']
-
- if isinstance(args, ShellConfig):
- cmd = create_shell_command(cmd)
- elif isinstance(args, SanityConfig):
+ if isinstance(args, SanityConfig):
base_branch = args.base_branch or get_ci_provider().get_base_branch()
if base_branch:
cmd += ['--base-branch', base_branch]
+ cmd.extend(['--host-path', args.host_path])
+
return cmd
-def filter_options(args, argv, options, exclude, require):
- """
- :type args: EnvironmentConfig
- :type argv: list[str]
- :type options: dict[str, int]
- :type exclude: list[str]
- :type require: list[str]
- :rtype: collections.Iterable[str]
- """
+def filter_options(
+ args, # type: EnvironmentConfig
+ argv, # type: t.List[str]
+ options, # type: t.Dict[str, int]
+ exclude, # type: t.List[str]
+ require, # type: t.List[str]
+): # type: (...) -> t.Iterable[str]
+ """Return an iterable that filters out unwanted CLI options and injects new ones as requested."""
options = options.copy()
- options['--requirements'] = 0
options['--truncate'] = 1
options['--redact'] = 0
options['--no-redact'] = 0
@@ -628,30 +327,9 @@ def filter_options(args, argv, options, exclude, require):
if isinstance(args, IntegrationConfig):
options.update({
'--no-temp-unicode': 0,
- '--no-pip-check': 0,
- })
-
- if isinstance(args, (NetworkIntegrationConfig, WindowsIntegrationConfig)):
- options.update({
- '--inventory': 1,
})
- remaining = 0
-
- for arg in argv:
- if not arg.startswith('-') and remaining:
- remaining -= 1
- continue
-
- remaining = 0
-
- parts = arg.split('=', 1)
- key = parts[0]
-
- if key in options:
- remaining = options[key] - len(parts) + 1
- continue
-
+ for arg in filter_args(argv, options):
yield arg
for arg in args.delegate_args:
@@ -673,14 +351,9 @@ def filter_options(args, argv, options, exclude, require):
yield '--truncate'
yield '%d' % args.truncate
- if args.redact:
- yield '--redact'
- else:
+ if not args.redact:
yield '--no-redact'
if isinstance(args, IntegrationConfig):
if args.no_temp_unicode:
yield '--no-temp-unicode'
-
- if not args.pip_check:
- yield '--no-pip-check'
diff --git a/test/lib/ansible_test/_internal/diff.py b/test/lib/ansible_test/_internal/diff.py
index 1e2038b9..a8e1c113 100644
--- a/test/lib/ansible_test/_internal/diff.py
+++ b/test/lib/ansible_test/_internal/diff.py
@@ -1,59 +1,42 @@
"""Diff parsing functions and classes."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
import textwrap
import traceback
-
-from . import types as t
+import typing as t
from .util import (
ApplicationError,
)
-def parse_diff(lines):
- """
- :type lines: list[str]
- :rtype: list[FileDiff]
- """
+def parse_diff(lines): # type: (t.List[str]) -> t.List[FileDiff]
+ """Parse the given diff lines and return a list of FileDiff objects representing the changes of each file."""
return DiffParser(lines).files
class FileDiff:
"""Parsed diff for a single file."""
- def __init__(self, old_path, new_path):
- """
- :type old_path: str
- :type new_path: str
- """
+ def __init__(self, old_path, new_path): # type: (str, str) -> None
self.old = DiffSide(old_path, new=False)
self.new = DiffSide(new_path, new=True)
self.headers = [] # type: t.List[str]
self.binary = False
- def append_header(self, line):
- """
- :type line: str
- """
+ def append_header(self, line): # type: (str) -> None
+ """Append the given line to the list of headers for this file."""
self.headers.append(line)
@property
- def is_complete(self):
- """
- :rtype: bool
- """
+ def is_complete(self): # type: () -> bool
+ """True if the diff is complete, otherwise False."""
return self.old.is_complete and self.new.is_complete
class DiffSide:
"""Parsed diff for a single 'side' of a single file."""
- def __init__(self, path, new):
- """
- :type path: str
- :type new: bool
- """
+ def __init__(self, path, new): # type: (str, bool) -> None
self.path = path
self.new = new
self.prefix = '+' if self.new else '-'
@@ -68,19 +51,14 @@ class DiffSide:
self._lines_remaining = 0
self._range_start = 0
- def set_start(self, line_start, line_count):
- """
- :type line_start: int
- :type line_count: int
- """
+ def set_start(self, line_start, line_count): # type: (int, int) -> None
+ """Set the starting line and line count."""
self._next_line_number = line_start
self._lines_remaining = line_count
self._range_start = 0
- def append(self, line):
- """
- :type line: str
- """
+ def append(self, line): # type: (str) -> None
+ """Append the given line."""
if self._lines_remaining <= 0:
raise Exception('Diff range overflow.')
@@ -115,17 +93,12 @@ class DiffSide:
self._next_line_number += 1
@property
- def is_complete(self):
- """
- :rtype: bool
- """
+ def is_complete(self): # type: () -> bool
+ """True if the diff is complete, otherwise False."""
return self._lines_remaining == 0
- def format_lines(self, context=True):
- """
- :type context: bool
- :rtype: list[str]
- """
+ def format_lines(self, context=True): # type: (bool) -> t.List[str]
+ """Format the diff and return a list of lines, optionally including context."""
if context:
lines = self.lines_and_context
else:
@@ -136,10 +109,7 @@ class DiffSide:
class DiffParser:
"""Parse diff lines."""
- def __init__(self, lines):
- """
- :type lines: list[str]
- """
+ def __init__(self, lines): # type: (t.List[str]) -> None
self.lines = lines
self.files = [] # type: t.List[FileDiff]
@@ -176,11 +146,11 @@ class DiffParser:
self.complete_file()
- def process_start(self):
+ def process_start(self): # type: () -> None
"""Process a diff start line."""
self.complete_file()
- match = re.search(r'^diff --git "?a/(?P<old_path>.*)"? "?b/(?P<new_path>.*)"?$', self.line)
+ match = re.search(r'^diff --git "?(?:a/)?(?P<old_path>.*)"? "?(?:b/)?(?P<new_path>.*)"?$', self.line)
if not match:
raise Exception('Unexpected diff start line.')
@@ -188,7 +158,7 @@ class DiffParser:
self.file = FileDiff(match.group('old_path'), match.group('new_path'))
self.action = self.process_continue
- def process_range(self):
+ def process_range(self): # type: () -> None
"""Process a diff range line."""
match = re.search(r'^@@ -((?P<old_start>[0-9]+),)?(?P<old_count>[0-9]+) \+((?P<new_start>[0-9]+),)?(?P<new_count>[0-9]+) @@', self.line)
@@ -199,7 +169,7 @@ class DiffParser:
self.file.new.set_start(int(match.group('new_start') or 1), int(match.group('new_count')))
self.action = self.process_content
- def process_continue(self):
+ def process_continue(self): # type: () -> None
"""Process a diff start, range or header line."""
if self.line.startswith('diff '):
self.process_start()
@@ -208,7 +178,7 @@ class DiffParser:
else:
self.process_header()
- def process_header(self):
+ def process_header(self): # type: () -> None
"""Process a diff header line."""
if self.line.startswith('Binary files '):
self.file.binary = True
@@ -219,7 +189,7 @@ class DiffParser:
else:
self.file.append_header(self.line)
- def process_content(self):
+ def process_content(self): # type: () -> None
"""Process a diff content line."""
if self.line == r'\ No newline at end of file':
if self.previous_line.startswith(' '):
@@ -248,7 +218,7 @@ class DiffParser:
else:
raise Exception('Unexpected diff content line.')
- def complete_file(self):
+ def complete_file(self): # type: () -> None
"""Complete processing of the current file, if any."""
if not self.file:
return
diff --git a/test/lib/ansible_test/_internal/docker_util.py b/test/lib/ansible_test/_internal/docker_util.py
index ed8fb479..da113f02 100644
--- a/test/lib/ansible_test/_internal/docker_util.py
+++ b/test/lib/ansible_test/_internal/docker_util.py
@@ -1,15 +1,15 @@
"""Functions for accessing docker via the docker cli."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
import os
+import random
+import socket
import time
-
-from . import types as t
+import urllib.parse
+import typing as t
from .io import (
- open_binary_file,
read_text_file,
)
@@ -19,42 +19,93 @@ from .util import (
display,
find_executable,
SubprocessError,
-)
-
-from .http import (
- urlparse,
+ cache,
)
from .util_common import (
run_command,
+ raw_command,
)
from .config import (
+ CommonConfig,
EnvironmentConfig,
)
-BUFFER_SIZE = 256 * 256
+DOCKER_COMMANDS = [
+ 'docker',
+ 'podman',
+]
-def docker_available():
- """
- :rtype: bool
- """
- return find_executable('docker', required=False)
+class DockerCommand:
+ """Details about the available docker command."""
+ def __init__(self, command, executable, version): # type: (str, str, str) -> None
+ self.command = command
+ self.executable = executable
+ self.version = version
+
+ @staticmethod
+ def detect(): # type: () -> t.Optional[DockerCommand]
+ """Detect and return the available docker command, or None."""
+ if os.environ.get('ANSIBLE_TEST_PREFER_PODMAN'):
+ commands = list(reversed(DOCKER_COMMANDS))
+ else:
+ commands = DOCKER_COMMANDS
+
+ for command in commands:
+ executable = find_executable(command, required=False)
+
+ if executable:
+ version = raw_command([command, '-v'], capture=True)[0].strip()
+
+ if command == 'docker' and 'podman' in version:
+ continue # avoid detecting podman as docker
+
+ display.info('Detected "%s" container runtime version: %s' % (command, version), verbosity=1)
+
+ return DockerCommand(command, executable, version)
+
+ return None
+
+
+def require_docker(): # type: () -> DockerCommand
+ """Return the docker command to invoke. Raises an exception if docker is not available."""
+ if command := get_docker_command():
+ return command
+
+ raise ApplicationError(f'No container runtime detected. Supported commands: {", ".join(DOCKER_COMMANDS)}')
+
+@cache
+def get_docker_command(): # type: () -> t.Optional[DockerCommand]
+ """Return the docker command to invoke, or None if docker is not available."""
+ return DockerCommand.detect()
+
+def docker_available(): # type: () -> bool
+ """Return True if docker is available, otherwise return False."""
+ return bool(get_docker_command())
+
+
+@cache
+def get_docker_host_ip(): # type: () -> str
+ """Return the IP of the Docker host."""
+ docker_host_ip = socket.gethostbyname(get_docker_hostname())
+
+ display.info('Detected docker host IP: %s' % docker_host_ip, verbosity=1)
+
+ return docker_host_ip
+
+
+@cache
def get_docker_hostname(): # type: () -> str
"""Return the hostname of the Docker service."""
- try:
- return get_docker_hostname.hostname
- except AttributeError:
- pass
-
docker_host = os.environ.get('DOCKER_HOST')
if docker_host and docker_host.startswith('tcp://'):
try:
- hostname = urlparse(docker_host)[1].split(':')[0]
+ hostname = urllib.parse.urlparse(docker_host)[1].split(':')[0]
display.info('Detected Docker host: %s' % hostname, verbosity=1)
except ValueError:
hostname = 'localhost'
@@ -63,20 +114,12 @@ def get_docker_hostname(): # type: () -> str
hostname = 'localhost'
display.info('Assuming Docker is available on localhost.', verbosity=1)
- get_docker_hostname.hostname = hostname
-
return hostname
-def get_docker_container_id():
- """
- :rtype: str | None
- """
- try:
- return get_docker_container_id.container_id
- except AttributeError:
- pass
-
+@cache
+def get_docker_container_id(): # type: () -> t.Optional[str]
+ """Return the current container ID if running in a container, otherwise return None."""
path = '/proc/self/cpuset'
container_id = None
@@ -93,59 +136,12 @@ def get_docker_container_id():
if cgroup_path in ('/docker', '/azpl_job'):
container_id = cgroup_name
- get_docker_container_id.container_id = container_id
-
if container_id:
display.info('Detected execution in Docker container: %s' % container_id, verbosity=1)
return container_id
-def get_docker_container_ip(args, container_id):
- """
- :type args: EnvironmentConfig
- :type container_id: str
- :rtype: str
- """
- results = docker_inspect(args, container_id)
- network_settings = results[0]['NetworkSettings']
- networks = network_settings.get('Networks')
-
- if networks:
- network_name = get_docker_preferred_network_name(args)
-
- if not network_name:
- # Sort networks and use the first available.
- # This assumes all containers will have access to the same networks.
- network_name = sorted(networks.keys()).pop(0)
-
- ipaddress = networks[network_name]['IPAddress']
- else:
- # podman doesn't provide Networks, fall back to using IPAddress
- ipaddress = network_settings['IPAddress']
-
- if not ipaddress:
- raise ApplicationError('Cannot retrieve IP address for container: %s' % container_id)
-
- return ipaddress
-
-
-def get_docker_network_name(args, container_id): # type: (EnvironmentConfig, str) -> str
- """
- Return the network name of the specified container.
- Raises an exception if zero or more than one network is found.
- """
- networks = get_docker_networks(args, container_id)
-
- if not networks:
- raise ApplicationError('No network found for Docker container: %s.' % container_id)
-
- if len(networks) > 1:
- raise ApplicationError('Found multiple networks for Docker container %s instead of only one: %s' % (container_id, ', '.join(networks)))
-
- return networks[0]
-
-
def get_docker_preferred_network_name(args): # type: (EnvironmentConfig) -> str
"""
Return the preferred network name for use with Docker. The selection logic is:
@@ -153,6 +149,11 @@ def get_docker_preferred_network_name(args): # type: (EnvironmentConfig) -> str
- the network of the currently running docker container (if any)
- the default docker network (returns None)
"""
+ try:
+ return get_docker_preferred_network_name.network
+ except AttributeError:
+ pass
+
network = None
if args.docker_network:
@@ -163,7 +164,10 @@ def get_docker_preferred_network_name(args): # type: (EnvironmentConfig) -> str
if current_container_id:
# Make sure any additional containers we launch use the same network as the current container we're running in.
# This is needed when ansible-test is running in a container that is not connected to Docker's default network.
- network = get_docker_network_name(args, current_container_id)
+ container = docker_inspect(args, current_container_id, always=True)
+ network = container.get_network_name()
+
+ get_docker_preferred_network_name.network = network
return network
@@ -173,31 +177,18 @@ def is_docker_user_defined_network(network): # type: (str) -> bool
return network and network != 'bridge'
-def get_docker_networks(args, container_id):
+def docker_pull(args, image): # type: (EnvironmentConfig, str) -> None
"""
- :param args: EnvironmentConfig
- :param container_id: str
- :rtype: list[str]
- """
- results = docker_inspect(args, container_id)
- # podman doesn't return Networks- just silently return None if it's missing...
- networks = results[0]['NetworkSettings'].get('Networks')
- if networks is None:
- return None
- return sorted(networks)
-
-
-def docker_pull(args, image):
+ Pull the specified image if it is not available.
+ Images without a tag or digest will not be pulled.
+ Retries up to 10 times if the pull fails.
"""
- :type args: EnvironmentConfig
- :type image: str
- """
- if ('@' in image or ':' in image) and docker_images(args, image):
- display.info('Skipping docker pull of existing image with tag or digest: %s' % image, verbosity=2)
+ if '@' not in image and ':' not in image:
+ display.info('Skipping pull of image without tag or digest: %s' % image, verbosity=2)
return
- if not args.docker_pull:
- display.warning('Skipping docker pull for "%s". Image may be out-of-date.' % image)
+ if docker_image_exists(args, image):
+ display.info('Skipping pull of existing image: %s' % image, verbosity=2)
return
for _iteration in range(1, 10):
@@ -211,41 +202,19 @@ def docker_pull(args, image):
raise ApplicationError('Failed to pull docker image "%s".' % image)
-def docker_put(args, container_id, src, dst):
- """
- :type args: EnvironmentConfig
- :type container_id: str
- :type src: str
- :type dst: str
- """
- # avoid 'docker cp' due to a bug which causes 'docker rm' to fail
- with open_binary_file(src) as src_fd:
- docker_exec(args, container_id, ['dd', 'of=%s' % dst, 'bs=%s' % BUFFER_SIZE],
- options=['-i'], stdin=src_fd, capture=True)
-
-
-def docker_get(args, container_id, src, dst):
- """
- :type args: EnvironmentConfig
- :type container_id: str
- :type src: str
- :type dst: str
- """
- # avoid 'docker cp' due to a bug which causes 'docker rm' to fail
- with open_binary_file(dst, 'wb') as dst_fd:
- docker_exec(args, container_id, ['dd', 'if=%s' % src, 'bs=%s' % BUFFER_SIZE],
- options=['-i'], stdout=dst_fd, capture=True)
+def docker_cp_to(args, container_id, src, dst): # type: (EnvironmentConfig, str, str, str) -> None
+ """Copy a file to the specified container."""
+ docker_command(args, ['cp', src, '%s:%s' % (container_id, dst)])
-def docker_run(args, image, options, cmd=None, create_only=False):
- """
- :type args: EnvironmentConfig
- :type image: str
- :type options: list[str] | None
- :type cmd: list[str] | None
- :type create_only[bool] | False
- :rtype: str | None, str | None
- """
+def docker_run(
+ args, # type: EnvironmentConfig
+ image, # type: str
+ options, # type: t.Optional[t.List[str]]
+ cmd=None, # type: t.Optional[t.List[str]]
+ create_only=False, # type: bool
+): # type: (...) -> str
+ """Run a container using the given docker image."""
if not options:
options = []
@@ -261,12 +230,16 @@ def docker_run(args, image, options, cmd=None, create_only=False):
if is_docker_user_defined_network(network):
# Only when the network is not the default bridge network.
- # Using this with the default bridge network results in an error when using --link: links are only supported for user-defined networks
options.extend(['--network', network])
for _iteration in range(1, 3):
try:
- return docker_command(args, [command] + options + [image] + cmd, capture=True)
+ stdout = docker_command(args, [command] + options + [image] + cmd, capture=True)[0]
+
+ if args.explain:
+ return ''.join(random.choice('0123456789abcdef') for _iteration in range(64))
+
+ return stdout.strip()
except SubprocessError as ex:
display.error(ex)
display.warning('Failed to run docker image "%s". Waiting a few seconds before trying again.' % image)
@@ -275,7 +248,7 @@ def docker_run(args, image, options, cmd=None, create_only=False):
raise ApplicationError('Failed to run docker image "%s".' % image)
-def docker_start(args, container_id, options): # type: (EnvironmentConfig, str, t.List[str]) -> (t.Optional[str], t.Optional[str])
+def docker_start(args, container_id, options=None): # type: (EnvironmentConfig, str, t.Optional[t.List[str]]) -> (t.Optional[str], t.Optional[str])
"""
Start a docker container by name or ID
"""
@@ -293,154 +266,218 @@ def docker_start(args, container_id, options): # type: (EnvironmentConfig, str,
raise ApplicationError('Failed to run docker container "%s".' % container_id)
-def docker_images(args, image):
- """
- :param args: CommonConfig
- :param image: str
- :rtype: list[dict[str, any]]
- """
+def docker_rm(args, container_id): # type: (EnvironmentConfig, str) -> None
+ """Remove the specified container."""
try:
- stdout, _dummy = docker_command(args, ['images', image, '--format', '{{json .}}'], capture=True, always=True)
+ docker_command(args, ['rm', '-f', container_id], capture=True)
except SubprocessError as ex:
- if 'no such image' in ex.stderr:
- return [] # podman does not handle this gracefully, exits 125
-
- if 'function "json" not defined' in ex.stderr:
- # podman > 2 && < 2.2.0 breaks with --format {{json .}}, and requires --format json
- # So we try this as a fallback. If it fails again, we just raise the exception and bail.
- stdout, _dummy = docker_command(args, ['images', image, '--format', 'json'], capture=True, always=True)
+ if 'no such container' in ex.stderr:
+ pass # podman does not handle this gracefully, exits 1
else:
raise ex
- if stdout.startswith('['):
- # modern podman outputs a pretty-printed json list. Just load the whole thing.
- return json.loads(stdout)
- # docker outputs one json object per line (jsonl)
- return [json.loads(line) for line in stdout.splitlines()]
+class DockerError(Exception):
+ """General Docker error."""
-def docker_rm(args, container_id):
- """
- :type args: EnvironmentConfig
- :type container_id: str
- """
- try:
- docker_command(args, ['rm', '-f', container_id], capture=True)
- except SubprocessError as ex:
- if 'no such container' in ex.stderr:
- pass # podman does not handle this gracefully, exits 1
+class ContainerNotFoundError(DockerError):
+ """The container identified by `identifier` was not found."""
+ def __init__(self, identifier):
+ super().__init__('The container "%s" was not found.' % identifier)
+
+ self.identifier = identifier
+
+
+class DockerInspect:
+ """The results of `docker inspect` for a single container."""
+ def __init__(self, args, inspection): # type: (EnvironmentConfig, t.Dict[str, t.Any]) -> None
+ self.args = args
+ self.inspection = inspection
+
+ # primary properties
+
+ @property
+ def id(self): # type: () -> str
+ """Return the ID of the container."""
+ return self.inspection['Id']
+
+ @property
+ def network_settings(self): # type: () -> t.Dict[str, t.Any]
+ """Return a dictionary of the container network settings."""
+ return self.inspection['NetworkSettings']
+
+ @property
+ def state(self): # type: () -> t.Dict[str, t.Any]
+ """Return a dictionary of the container state."""
+ return self.inspection['State']
+
+ @property
+ def config(self): # type: () -> t.Dict[str, t.Any]
+ """Return a dictionary of the container configuration."""
+ return self.inspection['Config']
+
+ # nested properties
+
+ @property
+ def ports(self): # type: () -> t.Dict[str, t.List[t.Dict[str, str]]]
+ """Return a dictionary of ports the container has published."""
+ return self.network_settings['Ports']
+
+ @property
+ def networks(self): # type: () -> t.Optional[t.Dict[str, t.Dict[str, t.Any]]]
+ """Return a dictionary of the networks the container is attached to, or None if running under podman, which does not support networks."""
+ return self.network_settings.get('Networks')
+
+ @property
+ def running(self): # type: () -> bool
+ """Return True if the container is running, otherwise False."""
+ return self.state['Running']
+
+ @property
+ def env(self): # type: () -> t.List[str]
+ """Return a list of the environment variables used to create the container."""
+ return self.config['Env']
+
+ @property
+ def image(self): # type: () -> str
+ """Return the image used to create the container."""
+ return self.config['Image']
+
+ # functions
+
+ def env_dict(self): # type: () -> t.Dict[str, str]
+ """Return a dictionary of the environment variables used to create the container."""
+ return dict((item[0], item[1]) for item in [e.split('=', 1) for e in self.env])
+
+ def get_tcp_port(self, port): # type: (int) -> t.Optional[t.List[t.Dict[str, str]]]
+ """Return a list of the endpoints published by the container for the specified TCP port, or None if it is not published."""
+ return self.ports.get('%d/tcp' % port)
+
+ def get_network_names(self): # type: () -> t.Optional[t.List[str]]
+ """Return a list of the network names the container is attached to."""
+ if self.networks is None:
+ return None
+
+ return sorted(self.networks)
+
+ def get_network_name(self): # type: () -> str
+ """Return the network name the container is attached to. Raises an exception if no network, or more than one, is attached."""
+ networks = self.get_network_names()
+
+ if not networks:
+ raise ApplicationError('No network found for Docker container: %s.' % self.id)
+
+ if len(networks) > 1:
+ raise ApplicationError('Found multiple networks for Docker container %s instead of only one: %s' % (self.id, ', '.join(networks)))
+
+ return networks[0]
+
+ def get_ip_address(self): # type: () -> t.Optional[str]
+ """Return the IP address of the container for the preferred docker network."""
+ if self.networks:
+ network_name = get_docker_preferred_network_name(self.args)
+
+ if not network_name:
+ # Sort networks and use the first available.
+ # This assumes all containers will have access to the same networks.
+ network_name = sorted(self.networks.keys()).pop(0)
+
+ ipaddress = self.networks[network_name]['IPAddress']
else:
- raise ex
+ ipaddress = self.network_settings['IPAddress']
+
+ if not ipaddress:
+ return None
+ return ipaddress
-def docker_inspect(args, container_id):
+
+def docker_inspect(args, identifier, always=False): # type: (EnvironmentConfig, str, bool) -> DockerInspect
"""
- :type args: EnvironmentConfig
- :type container_id: str
- :rtype: list[dict]
+ Return the results of `docker inspect` for the specified container.
+ Raises a ContainerNotFoundError if the container was not found.
"""
- if args.explain:
- return []
-
try:
- stdout = docker_command(args, ['inspect', container_id], capture=True)[0]
- return json.loads(stdout)
+ stdout = docker_command(args, ['inspect', identifier], capture=True, always=always)[0]
except SubprocessError as ex:
- if 'no such image' in ex.stderr:
- return [] # podman does not handle this gracefully, exits 125
- try:
- return json.loads(ex.stdout)
- except Exception:
- raise ex
+ stdout = ex.stdout
+ if args.explain and not always:
+ items = []
+ else:
+ items = json.loads(stdout)
-def docker_network_disconnect(args, container_id, network):
- """
- :param args: EnvironmentConfig
- :param container_id: str
- :param network: str
- """
- docker_command(args, ['network', 'disconnect', network, container_id], capture=True)
+ if len(items) == 1:
+ return DockerInspect(args, items[0])
+ raise ContainerNotFoundError(identifier)
-def docker_network_inspect(args, network):
- """
- :type args: EnvironmentConfig
- :type network: str
- :rtype: list[dict]
- """
- if args.explain:
- return []
- try:
- stdout = docker_command(args, ['network', 'inspect', network], capture=True)[0]
- return json.loads(stdout)
- except SubprocessError as ex:
- try:
- return json.loads(ex.stdout)
- except Exception:
- raise ex
+def docker_network_disconnect(args, container_id, network): # type: (EnvironmentConfig, str, str) -> None
+ """Disconnect the specified docker container from the given network."""
+ docker_command(args, ['network', 'disconnect', network, container_id], capture=True)
-def docker_exec(args, container_id, cmd, options=None, capture=False, stdin=None, stdout=None, data=None):
- """
- :type args: EnvironmentConfig
- :type container_id: str
- :type cmd: list[str]
- :type options: list[str] | None
- :type capture: bool
- :type stdin: BinaryIO | None
- :type stdout: BinaryIO | None
- :type data: str | None
- :rtype: str | None, str | None
- """
+def docker_image_exists(args, image): # type: (EnvironmentConfig, str) -> bool
+ """Return True if the image exists, otherwise False."""
+ try:
+ docker_command(args, ['image', 'inspect', image], capture=True)
+ except SubprocessError:
+ return False
+
+ return True
+
+
+def docker_exec(
+ args, # type: EnvironmentConfig
+ container_id, # type: str
+ cmd, # type: t.List[str]
+ options=None, # type: t.Optional[t.List[str]]
+ capture=False, # type: bool
+ stdin=None, # type: t.Optional[t.BinaryIO]
+ stdout=None, # type: t.Optional[t.BinaryIO]
+ data=None, # type: t.Optional[str]
+): # type: (...) -> t.Tuple[t.Optional[str], t.Optional[str]]
+ """Execute the given command in the specified container."""
if not options:
options = []
- if data:
+ if data or stdin or stdout:
options.append('-i')
return docker_command(args, ['exec'] + options + [container_id] + cmd, capture=capture, stdin=stdin, stdout=stdout, data=data)
-def docker_info(args):
- """
- :param args: CommonConfig
- :rtype: dict[str, any]
- """
+def docker_info(args): # type: (CommonConfig) -> t.Dict[str, t.Any]
+ """Return a dictionary containing details from the `docker info` command."""
stdout, _dummy = docker_command(args, ['info', '--format', '{{json .}}'], capture=True, always=True)
return json.loads(stdout)
-def docker_version(args):
- """
- :param args: CommonConfig
- :rtype: dict[str, any]
- """
+def docker_version(args): # type: (CommonConfig) -> t.Dict[str, t.Any]
+ """Return a dictionary containing details from the `docker version` command."""
stdout, _dummy = docker_command(args, ['version', '--format', '{{json .}}'], capture=True, always=True)
return json.loads(stdout)
-def docker_command(args, cmd, capture=False, stdin=None, stdout=None, always=False, data=None):
- """
- :type args: CommonConfig
- :type cmd: list[str]
- :type capture: bool
- :type stdin: file | None
- :type stdout: file | None
- :type always: bool
- :type data: str | None
- :rtype: str | None, str | None
- """
+def docker_command(
+ args, # type: CommonConfig
+ cmd, # type: t.List[str]
+ capture=False, # type: bool
+ stdin=None, # type: t.Optional[t.BinaryIO]
+ stdout=None, # type: t.Optional[t.BinaryIO]
+ always=False, # type: bool
+ data=None, # type: t.Optional[str]
+): # type: (...) -> t.Tuple[t.Optional[str], t.Optional[str]]
+ """Run the specified docker command."""
env = docker_environment()
- return run_command(args, ['docker'] + cmd, env=env, capture=capture, stdin=stdin, stdout=stdout, always=always, data=data)
+ command = require_docker().command
+ return run_command(args, [command] + cmd, env=env, capture=capture, stdin=stdin, stdout=stdout, always=always, data=data)
-def docker_environment():
- """
- :rtype: dict[str, str]
- """
+def docker_environment(): # type: () -> t.Dict[str, str]
+ """Return a dictionary of docker related environment variables found in the current environment."""
env = common_environment()
env.update(dict((key, os.environ[key]) for key in os.environ if key.startswith('DOCKER_')))
return env
diff --git a/test/lib/ansible_test/_internal/encoding.py b/test/lib/ansible_test/_internal/encoding.py
index 8e014794..189b44c0 100644
--- a/test/lib/ansible_test/_internal/encoding.py
+++ b/test/lib/ansible_test/_internal/encoding.py
@@ -1,8 +1,7 @@
"""Functions for encoding and decoding strings."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
-from . import types as t
+import typing as t
ENCODING = 'utf-8'
diff --git a/test/lib/ansible_test/_internal/executor.py b/test/lib/ansible_test/_internal/executor.py
index 5500c1cc..2dd53de8 100644
--- a/test/lib/ansible_test/_internal/executor.py
+++ b/test/lib/ansible_test/_internal/executor.py
@@ -1,114 +1,15 @@
"""Execute Ansible tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
-import atexit
-import json
-import os
-import datetime
-import re
-import time
-import textwrap
-import functools
-import difflib
-import filecmp
-import random
-import string
-import shutil
-
-from . import types as t
-
-from .thread import (
- WrappedThread,
-)
-
-from .core_ci import (
- AnsibleCoreCI,
- SshKey,
-)
-
-from .manage_ci import (
- ManageWindowsCI,
- ManageNetworkCI,
- get_network_settings,
-)
-
-from .cloud import (
- cloud_filter,
- cloud_init,
- get_cloud_environment,
- get_cloud_platforms,
- CloudEnvironmentConfig,
-)
+import typing as t
from .io import (
- make_dirs,
- open_text_file,
read_text_file,
- write_text_file,
)
from .util import (
ApplicationWarning,
- ApplicationError,
- SubprocessError,
display,
- remove_tree,
- find_executable,
- raw_command,
- get_available_port,
- generate_pip_command,
- find_python,
- cmd_quote,
- ANSIBLE_LIB_ROOT,
- ANSIBLE_TEST_DATA_ROOT,
- ANSIBLE_TEST_CONFIG_ROOT,
- get_ansible_version,
- tempdir,
- open_zipfile,
- SUPPORTED_PYTHON_VERSIONS,
- str_to_version,
- version_to_str,
- get_hash,
-)
-
-from .util_common import (
- get_docker_completion,
- get_remote_completion,
- get_python_path,
- intercept_command,
- named_temporary_file,
- run_command,
- write_json_test_results,
- ResultType,
- handle_layout_messages,
- CommonConfig,
-)
-
-from .docker_util import (
- docker_pull,
- docker_run,
- docker_available,
- docker_rm,
- get_docker_container_id,
- get_docker_container_ip,
- get_docker_hostname,
- get_docker_preferred_network_name,
- is_docker_user_defined_network,
-)
-
-from .ansible_util import (
- ansible_environment,
- check_pyyaml,
-)
-
-from .target import (
- IntegrationTarget,
- walk_internal_targets,
- walk_posix_integration_targets,
- walk_network_integration_targets,
- walk_windows_integration_targets,
- TIntegrationTarget,
)
from .ci import (
@@ -121,1654 +22,19 @@ from .classification import (
from .config import (
TestConfig,
- EnvironmentConfig,
- IntegrationConfig,
- NetworkIntegrationConfig,
- PosixIntegrationConfig,
- ShellConfig,
- WindowsIntegrationConfig,
- TIntegrationConfig,
- UnitsConfig,
- SanityConfig,
)
from .metadata import (
ChangeDescription,
)
-from .integration import (
- integration_test_environment,
- integration_test_config_file,
- setup_common_temp_dir,
- get_inventory_relative_path,
- check_inventory,
- delegate_inventory,
+from .provisioning import (
+ HostState,
)
-from .data import (
- data_context,
-)
-
-from .http import (
- urlparse,
-)
-
-HTTPTESTER_HOSTS = (
- 'ansible.http.tests',
- 'sni1.ansible.http.tests',
- 'fail.ansible.http.tests',
- 'self-signed.ansible.http.tests',
-)
-
-
-def check_startup():
- """Checks to perform at startup before running commands."""
- check_legacy_modules()
-
-
-def check_legacy_modules():
- """Detect conflicts with legacy core/extras module directories to avoid problems later."""
- for directory in 'core', 'extras':
- path = 'lib/ansible/modules/%s' % directory
-
- for root, _dir_names, file_names in os.walk(path):
- if file_names:
- # the directory shouldn't exist, but if it does, it must contain no files
- raise ApplicationError('Files prohibited in "%s". '
- 'These are most likely legacy modules from version 2.2 or earlier.' % root)
-
-
-def create_shell_command(command):
- """
- :type command: list[str]
- :rtype: list[str]
- """
- optional_vars = (
- 'TERM',
- )
-
- cmd = ['/usr/bin/env']
- cmd += ['%s=%s' % (var, os.environ[var]) for var in optional_vars if var in os.environ]
- cmd += command
-
- return cmd
-
-
-def get_openssl_version(args, python, python_version): # type: (EnvironmentConfig, str, str) -> t.Optional[t.Tuple[int, ...]]
- """Return the openssl version."""
- if not python_version.startswith('2.'):
- # OpenSSL version checking only works on Python 3.x.
- # This should be the most accurate, since it is the Python we will be using.
- version = json.loads(run_command(args, [python, os.path.join(ANSIBLE_TEST_DATA_ROOT, 'sslcheck.py')], capture=True, always=True)[0])['version']
-
- if version:
- display.info('Detected OpenSSL version %s under Python %s.' % (version_to_str(version), python_version), verbosity=1)
-
- return tuple(version)
-
- # Fall back to detecting the OpenSSL version from the CLI.
- # This should provide an adequate solution on Python 2.x.
- openssl_path = find_executable('openssl', required=False)
-
- if openssl_path:
- try:
- result = raw_command([openssl_path, 'version'], capture=True)[0]
- except SubprocessError:
- result = ''
-
- match = re.search(r'^OpenSSL (?P<version>[0-9]+\.[0-9]+\.[0-9]+)', result)
-
- if match:
- version = str_to_version(match.group('version'))
-
- display.info('Detected OpenSSL version %s using the openssl CLI.' % version_to_str(version), verbosity=1)
-
- return version
-
- display.info('Unable to detect OpenSSL version.', verbosity=1)
-
- return None
-
-
-def get_setuptools_version(args, python): # type: (EnvironmentConfig, str) -> t.Tuple[int]
- """Return the setuptools version for the given python."""
- try:
- return str_to_version(raw_command([python, '-c', 'import setuptools; print(setuptools.__version__)'], capture=True)[0])
- except SubprocessError:
- if args.explain:
- return tuple() # ignore errors in explain mode in case setuptools is not aleady installed
-
- raise
-
-
-def install_cryptography(args, python, python_version, pip): # type: (EnvironmentConfig, str, str, t.List[str]) -> None
- """
- Install cryptography for the specified environment.
- """
- # make sure ansible-test's basic requirements are met before continuing
- # this is primarily to ensure that pip is new enough to facilitate further requirements installation
- install_ansible_test_requirements(args, pip)
-
- # make sure setuptools is available before trying to install cryptography
- # the installed version of setuptools affects the version of cryptography to install
- run_command(args, generate_pip_install(pip, '', packages=['setuptools']))
-
- # install the latest cryptography version that the current requirements can support
- # use a custom constraints file to avoid the normal constraints file overriding the chosen version of cryptography
- # if not installed here later install commands may try to install an unsupported version due to the presence of older setuptools
- # this is done instead of upgrading setuptools to allow tests to function with older distribution provided versions of setuptools
- run_command(args, generate_pip_install(pip, '',
- packages=[get_cryptography_requirement(args, python, python_version)],
- constraints=os.path.join(ANSIBLE_TEST_DATA_ROOT, 'cryptography-constraints.txt')))
-
-
-def get_cryptography_requirement(args, python, python_version): # type: (EnvironmentConfig, str, str) -> str
- """
- Return the correct cryptography requirement for the given python version.
- The version of cryptography installed depends on the python version, setuptools version and openssl version.
- """
- setuptools_version = get_setuptools_version(args, python)
- openssl_version = get_openssl_version(args, python, python_version)
-
- if setuptools_version >= (18, 5):
- if python_version == '2.6':
- # cryptography 2.2+ requires python 2.7+
- # see https://github.com/pyca/cryptography/blob/master/CHANGELOG.rst#22---2018-03-19
- cryptography = 'cryptography < 2.2'
- elif openssl_version and openssl_version < (1, 1, 0):
- # cryptography 3.2 requires openssl 1.1.x or later
- # see https://cryptography.io/en/latest/changelog.html#v3-2
- cryptography = 'cryptography < 3.2'
- else:
- # cryptography 3.4+ fails to install on many systems
- # this is a temporary work-around until a more permanent solution is available
- cryptography = 'cryptography < 3.4'
- else:
- # cryptography 2.1+ requires setuptools 18.5+
- # see https://github.com/pyca/cryptography/blob/62287ae18383447585606b9d0765c0f1b8a9777c/setup.py#L26
- cryptography = 'cryptography < 2.1'
-
- return cryptography
-
-
-def install_command_requirements(args, python_version=None, context=None, enable_pyyaml_check=False):
- """
- :type args: EnvironmentConfig
- :type python_version: str | None
- :type context: str | None
- :type enable_pyyaml_check: bool
- """
- if not args.explain:
- make_dirs(ResultType.COVERAGE.path)
- make_dirs(ResultType.DATA.path)
-
- if isinstance(args, ShellConfig):
- if args.raw:
- return
-
- if not args.requirements:
- return
-
- if isinstance(args, ShellConfig):
- return
-
- packages = []
-
- if isinstance(args, TestConfig):
- if args.coverage:
- packages.append('coverage')
- if args.junit:
- packages.append('junit-xml')
-
- if not python_version:
- python_version = args.python_version
-
- python = find_python(python_version)
- pip = generate_pip_command(python)
-
- # skip packages which have aleady been installed for python_version
-
- try:
- package_cache = install_command_requirements.package_cache
- except AttributeError:
- package_cache = install_command_requirements.package_cache = {}
-
- installed_packages = package_cache.setdefault(python_version, set())
- skip_packages = [package for package in packages if package in installed_packages]
-
- for package in skip_packages:
- packages.remove(package)
-
- installed_packages.update(packages)
-
- if args.command != 'sanity':
- install_cryptography(args, python, python_version, pip)
-
- commands = [generate_pip_install(pip, args.command, packages=packages, context=context)]
-
- if isinstance(args, IntegrationConfig):
- for cloud_platform in get_cloud_platforms(args):
- commands.append(generate_pip_install(pip, '%s.cloud.%s' % (args.command, cloud_platform)))
-
- commands = [cmd for cmd in commands if cmd]
-
- if not commands:
- return # no need to detect changes or run pip check since we are not making any changes
-
- # only look for changes when more than one requirements file is needed
- detect_pip_changes = len(commands) > 1
-
- # first pass to install requirements, changes expected unless environment is already set up
- install_ansible_test_requirements(args, pip)
- changes = run_pip_commands(args, pip, commands, detect_pip_changes)
-
- if changes:
- # second pass to check for conflicts in requirements, changes are not expected here
- changes = run_pip_commands(args, pip, commands, detect_pip_changes)
-
- if changes:
- raise ApplicationError('Conflicts detected in requirements. The following commands reported changes during verification:\n%s' %
- '\n'.join((' '.join(cmd_quote(c) for c in cmd) for cmd in changes)))
-
- if args.pip_check:
- # ask pip to check for conflicts between installed packages
- try:
- run_command(args, pip + ['check', '--disable-pip-version-check'], capture=True)
- except SubprocessError as ex:
- if ex.stderr.strip() == 'ERROR: unknown command "check"':
- display.warning('Cannot check pip requirements for conflicts because "pip check" is not supported.')
- else:
- raise
-
- if enable_pyyaml_check:
- # pyyaml may have been one of the requirements that was installed, so perform an optional check for it
- check_pyyaml(args, python_version, required=False)
-
-
-def install_ansible_test_requirements(args, pip): # type: (EnvironmentConfig, t.List[str]) -> None
- """Install requirements for ansible-test for the given pip if not already installed."""
- try:
- installed = install_command_requirements.installed
- except AttributeError:
- installed = install_command_requirements.installed = set()
-
- if tuple(pip) in installed:
- return
-
- # make sure basic ansible-test requirements are met, including making sure that pip is recent enough to support constraints
- # virtualenvs created by older distributions may include very old pip versions, such as those created in the centos6 test container (pip 6.0.8)
- run_command(args, generate_pip_install(pip, 'ansible-test', use_constraints=False))
-
- installed.add(tuple(pip))
-
-
-def run_pip_commands(args, pip, commands, detect_pip_changes=False):
- """
- :type args: EnvironmentConfig
- :type pip: list[str]
- :type commands: list[list[str]]
- :type detect_pip_changes: bool
- :rtype: list[list[str]]
- """
- changes = []
-
- after_list = pip_list(args, pip) if detect_pip_changes else None
-
- for cmd in commands:
- if not cmd:
- continue
-
- before_list = after_list
-
- run_command(args, cmd)
-
- after_list = pip_list(args, pip) if detect_pip_changes else None
-
- if before_list != after_list:
- changes.append(cmd)
-
- return changes
-
-
-def pip_list(args, pip):
- """
- :type args: EnvironmentConfig
- :type pip: list[str]
- :rtype: str
- """
- stdout = run_command(args, pip + ['list'], capture=True)[0]
- return stdout
-
-
-def generate_pip_install(pip, command, packages=None, constraints=None, use_constraints=True, context=None):
- """
- :type pip: list[str]
- :type command: str
- :type packages: list[str] | None
- :type constraints: str | None
- :type use_constraints: bool
- :type context: str | None
- :rtype: list[str] | None
- """
- constraints = constraints or os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', 'constraints.txt')
- requirements = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', '%s.txt' % ('%s.%s' % (command, context) if context else command))
- content_constraints = None
-
- options = []
-
- if os.path.exists(requirements) and os.path.getsize(requirements):
- options += ['-r', requirements]
-
- if command == 'sanity' and data_context().content.is_ansible:
- requirements = os.path.join(data_context().content.sanity_path, 'code-smell', '%s.requirements.txt' % context)
-
- if os.path.exists(requirements) and os.path.getsize(requirements):
- options += ['-r', requirements]
-
- if command == 'units':
- requirements = os.path.join(data_context().content.unit_path, 'requirements.txt')
-
- if os.path.exists(requirements) and os.path.getsize(requirements):
- options += ['-r', requirements]
-
- content_constraints = os.path.join(data_context().content.unit_path, 'constraints.txt')
-
- if command in ('integration', 'windows-integration', 'network-integration'):
- requirements = os.path.join(data_context().content.integration_path, 'requirements.txt')
-
- if os.path.exists(requirements) and os.path.getsize(requirements):
- options += ['-r', requirements]
-
- requirements = os.path.join(data_context().content.integration_path, '%s.requirements.txt' % command)
-
- if os.path.exists(requirements) and os.path.getsize(requirements):
- options += ['-r', requirements]
-
- content_constraints = os.path.join(data_context().content.integration_path, 'constraints.txt')
-
- if command.startswith('integration.cloud.'):
- content_constraints = os.path.join(data_context().content.integration_path, 'constraints.txt')
-
- if packages:
- options += packages
-
- if not options:
- return None
-
- if use_constraints:
- if content_constraints and os.path.exists(content_constraints) and os.path.getsize(content_constraints):
- # listing content constraints first gives them priority over constraints provided by ansible-test
- options.extend(['-c', content_constraints])
-
- options.extend(['-c', constraints])
-
- return pip + ['install', '--disable-pip-version-check'] + options
-
-
-def command_shell(args):
- """
- :type args: ShellConfig
- """
- if args.delegate:
- raise Delegate()
-
- install_command_requirements(args)
-
- if args.inject_httptester:
- inject_httptester(args)
-
- cmd = create_shell_command(['bash', '-i'])
- run_command(args, cmd)
-
-
-def command_posix_integration(args):
- """
- :type args: PosixIntegrationConfig
- """
- handle_layout_messages(data_context().content.integration_messages)
-
- inventory_relative_path = get_inventory_relative_path(args)
- inventory_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, os.path.basename(inventory_relative_path))
-
- all_targets = tuple(walk_posix_integration_targets(include_hidden=True))
- internal_targets = command_integration_filter(args, all_targets)
- command_integration_filtered(args, internal_targets, all_targets, inventory_path)
-
-
-def command_network_integration(args):
- """
- :type args: NetworkIntegrationConfig
- """
- handle_layout_messages(data_context().content.integration_messages)
-
- inventory_relative_path = get_inventory_relative_path(args)
- template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template'
-
- if args.inventory:
- inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path, args.inventory)
- else:
- inventory_path = os.path.join(data_context().content.root, inventory_relative_path)
-
- if args.no_temp_workdir:
- # temporary solution to keep DCI tests working
- inventory_exists = os.path.exists(inventory_path)
- else:
- inventory_exists = os.path.isfile(inventory_path)
-
- if not args.explain and not args.platform and not inventory_exists:
- raise ApplicationError(
- 'Inventory not found: %s\n'
- 'Use --inventory to specify the inventory path.\n'
- 'Use --platform to provision resources and generate an inventory file.\n'
- 'See also inventory template: %s' % (inventory_path, template_path)
- )
-
- check_inventory(args, inventory_path)
- delegate_inventory(args, inventory_path)
-
- all_targets = tuple(walk_network_integration_targets(include_hidden=True))
- internal_targets = command_integration_filter(args, all_targets, init_callback=network_init)
- instances = [] # type: t.List[WrappedThread]
-
- if args.platform:
- get_python_path(args, args.python_executable) # initialize before starting threads
-
- configs = dict((config['platform_version'], config) for config in args.metadata.instance_config)
-
- for platform_version in args.platform:
- platform, version = platform_version.split('/', 1)
- config = configs.get(platform_version)
-
- if not config:
- continue
-
- instance = WrappedThread(functools.partial(network_run, args, platform, version, config))
- instance.daemon = True
- instance.start()
- instances.append(instance)
-
- while any(instance.is_alive() for instance in instances):
- time.sleep(1)
-
- remotes = [instance.wait_for_result() for instance in instances]
- inventory = network_inventory(args, remotes)
-
- display.info('>>> Inventory: %s\n%s' % (inventory_path, inventory.strip()), verbosity=3)
-
- if not args.explain:
- write_text_file(inventory_path, inventory)
-
- success = False
-
- try:
- command_integration_filtered(args, internal_targets, all_targets, inventory_path)
- success = True
- finally:
- if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
- for instance in instances:
- instance.result.stop()
-
-
-def network_init(args, internal_targets): # type: (NetworkIntegrationConfig, t.Tuple[IntegrationTarget, ...]) -> None
- """Initialize platforms for network integration tests."""
- if not args.platform:
- return
-
- if args.metadata.instance_config is not None:
- return
-
- platform_targets = set(a for target in internal_targets for a in target.aliases if a.startswith('network/'))
-
- instances = [] # type: t.List[WrappedThread]
-
- # generate an ssh key (if needed) up front once, instead of for each instance
- SshKey(args)
-
- for platform_version in args.platform:
- platform, version = platform_version.split('/', 1)
- platform_target = 'network/%s/' % platform
-
- if platform_target not in platform_targets:
- display.warning('Skipping "%s" because selected tests do not target the "%s" platform.' % (
- platform_version, platform))
- continue
-
- instance = WrappedThread(functools.partial(network_start, args, platform, version))
- instance.daemon = True
- instance.start()
- instances.append(instance)
-
- while any(instance.is_alive() for instance in instances):
- time.sleep(1)
-
- args.metadata.instance_config = [instance.wait_for_result() for instance in instances]
-
-
-def network_start(args, platform, version):
- """
- :type args: NetworkIntegrationConfig
- :type platform: str
- :type version: str
- :rtype: AnsibleCoreCI
- """
- core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage, provider=args.remote_provider)
- core_ci.start()
-
- return core_ci.save()
-
-
-def network_run(args, platform, version, config):
- """
- :type args: NetworkIntegrationConfig
- :type platform: str
- :type version: str
- :type config: dict[str, str]
- :rtype: AnsibleCoreCI
- """
- core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage, provider=args.remote_provider, load=False)
- core_ci.load(config)
- core_ci.wait()
-
- manage = ManageNetworkCI(args, core_ci)
- manage.wait()
-
- return core_ci
-
-
-def network_inventory(args, remotes):
- """
- :type args: NetworkIntegrationConfig
- :type remotes: list[AnsibleCoreCI]
- :rtype: str
- """
- groups = dict([(remote.platform, []) for remote in remotes])
- net = []
-
- for remote in remotes:
- options = dict(
- ansible_host=remote.connection.hostname,
- ansible_user=remote.connection.username,
- ansible_ssh_private_key_file=os.path.abspath(remote.ssh_key.key),
- )
-
- settings = get_network_settings(args, remote.platform, remote.version)
-
- options.update(settings.inventory_vars)
-
- groups[remote.platform].append(
- '%s %s' % (
- remote.name.replace('.', '-'),
- ' '.join('%s="%s"' % (k, options[k]) for k in sorted(options)),
- )
- )
-
- net.append(remote.platform)
-
- groups['net:children'] = net
-
- template = ''
-
- for group in groups:
- hosts = '\n'.join(groups[group])
-
- template += textwrap.dedent("""
- [%s]
- %s
- """) % (group, hosts)
-
- inventory = template
-
- return inventory
-
-
-def command_windows_integration(args):
- """
- :type args: WindowsIntegrationConfig
- """
- handle_layout_messages(data_context().content.integration_messages)
-
- inventory_relative_path = get_inventory_relative_path(args)
- template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template'
-
- if args.inventory:
- inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path, args.inventory)
- else:
- inventory_path = os.path.join(data_context().content.root, inventory_relative_path)
-
- if not args.explain and not args.windows and not os.path.isfile(inventory_path):
- raise ApplicationError(
- 'Inventory not found: %s\n'
- 'Use --inventory to specify the inventory path.\n'
- 'Use --windows to provision resources and generate an inventory file.\n'
- 'See also inventory template: %s' % (inventory_path, template_path)
- )
-
- check_inventory(args, inventory_path)
- delegate_inventory(args, inventory_path)
-
- all_targets = tuple(walk_windows_integration_targets(include_hidden=True))
- internal_targets = command_integration_filter(args, all_targets, init_callback=windows_init)
- instances = [] # type: t.List[WrappedThread]
- pre_target = None
- post_target = None
- httptester_id = None
-
- if args.windows:
- get_python_path(args, args.python_executable) # initialize before starting threads
-
- configs = dict((config['platform_version'], config) for config in args.metadata.instance_config)
-
- for version in args.windows:
- config = configs['windows/%s' % version]
-
- instance = WrappedThread(functools.partial(windows_run, args, version, config))
- instance.daemon = True
- instance.start()
- instances.append(instance)
-
- while any(instance.is_alive() for instance in instances):
- time.sleep(1)
-
- remotes = [instance.wait_for_result() for instance in instances]
- inventory = windows_inventory(remotes)
-
- display.info('>>> Inventory: %s\n%s' % (inventory_path, inventory.strip()), verbosity=3)
-
- if not args.explain:
- write_text_file(inventory_path, inventory)
-
- use_httptester = args.httptester and any('needs/httptester/' in target.aliases for target in internal_targets)
- # if running under Docker delegation, the httptester may have already been started
- docker_httptester = bool(os.environ.get("HTTPTESTER", False))
-
- if use_httptester and not docker_available() and not docker_httptester:
- display.warning('Assuming --disable-httptester since `docker` is not available.')
- elif use_httptester:
- if docker_httptester:
- # we are running in a Docker container that is linked to the httptester container, we just need to
- # forward these requests to the linked hostname
- first_host = HTTPTESTER_HOSTS[0]
- ssh_options = [
- "-R", "8080:%s:80" % first_host,
- "-R", "8443:%s:443" % first_host,
- "-R", "8444:%s:444" % first_host
- ]
- else:
- # we are running directly and need to start the httptester container ourselves and forward the port
- # from there manually set so HTTPTESTER env var is set during the run
- args.inject_httptester = True
- httptester_id, ssh_options = start_httptester(args)
-
- # to get this SSH command to run in the background we need to set to run in background (-f) and disable
- # the pty allocation (-T)
- ssh_options.insert(0, "-fT")
-
- # create a script that will continue to run in the background until the script is deleted, this will
- # cleanup and close the connection
- def forward_ssh_ports(target):
- """
- :type target: IntegrationTarget
- """
- if 'needs/httptester/' not in target.aliases:
- return
-
- for remote in [r for r in remotes if r.version != '2008']:
- manage = ManageWindowsCI(remote)
- manage.upload(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'setup', 'windows-httptester.ps1'), watcher_path)
-
- # We cannot pass an array of string with -File so we just use a delimiter for multiple values
- script = "powershell.exe -NoProfile -ExecutionPolicy Bypass -File .\\%s -Hosts \"%s\"" \
- % (watcher_path, "|".join(HTTPTESTER_HOSTS))
- if args.verbosity > 3:
- script += " -Verbose"
- manage.ssh(script, options=ssh_options, force_pty=False)
-
- def cleanup_ssh_ports(target):
- """
- :type target: IntegrationTarget
- """
- if 'needs/httptester/' not in target.aliases:
- return
-
- for remote in [r for r in remotes if r.version != '2008']:
- # delete the tmp file that keeps the http-tester alive
- manage = ManageWindowsCI(remote)
- manage.ssh("cmd.exe /c \"del %s /F /Q\"" % watcher_path, force_pty=False)
-
- watcher_path = "ansible-test-http-watcher-%s.ps1" % time.time()
- pre_target = forward_ssh_ports
- post_target = cleanup_ssh_ports
-
- def run_playbook(playbook, run_playbook_vars): # type: (str, t.Dict[str, t.Any]) -> None
- playbook_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'playbooks', playbook)
- command = ['ansible-playbook', '-i', inventory_path, playbook_path, '-e', json.dumps(run_playbook_vars)]
- if args.verbosity:
- command.append('-%s' % ('v' * args.verbosity))
-
- env = ansible_environment(args)
- intercept_command(args, command, '', env, disable_coverage=True)
-
- remote_temp_path = None
-
- if args.coverage and not args.coverage_check:
- # Create the remote directory that is writable by everyone. Use Ansible to talk to the remote host.
- remote_temp_path = 'C:\\ansible_test_coverage_%s' % time.time()
- playbook_vars = {'remote_temp_path': remote_temp_path}
- run_playbook('windows_coverage_setup.yml', playbook_vars)
-
- success = False
-
- try:
- command_integration_filtered(args, internal_targets, all_targets, inventory_path, pre_target=pre_target,
- post_target=post_target, remote_temp_path=remote_temp_path)
- success = True
- finally:
- if httptester_id:
- docker_rm(args, httptester_id)
-
- if remote_temp_path:
- # Zip up the coverage files that were generated and fetch it back to localhost.
- with tempdir() as local_temp_path:
- playbook_vars = {'remote_temp_path': remote_temp_path, 'local_temp_path': local_temp_path}
- run_playbook('windows_coverage_teardown.yml', playbook_vars)
-
- for filename in os.listdir(local_temp_path):
- with open_zipfile(os.path.join(local_temp_path, filename)) as coverage_zip:
- coverage_zip.extractall(ResultType.COVERAGE.path)
-
- if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
- for instance in instances:
- instance.result.stop()
-
-
-# noinspection PyUnusedLocal
-def windows_init(args, internal_targets): # pylint: disable=locally-disabled, unused-argument
- """
- :type args: WindowsIntegrationConfig
- :type internal_targets: tuple[IntegrationTarget]
- """
- if not args.windows:
- return
-
- if args.metadata.instance_config is not None:
- return
-
- instances = [] # type: t.List[WrappedThread]
-
- for version in args.windows:
- instance = WrappedThread(functools.partial(windows_start, args, version))
- instance.daemon = True
- instance.start()
- instances.append(instance)
-
- while any(instance.is_alive() for instance in instances):
- time.sleep(1)
-
- args.metadata.instance_config = [instance.wait_for_result() for instance in instances]
-
-
-def windows_start(args, version):
- """
- :type args: WindowsIntegrationConfig
- :type version: str
- :rtype: AnsibleCoreCI
- """
- core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage, provider=args.remote_provider)
- core_ci.start()
-
- return core_ci.save()
-
-
-def windows_run(args, version, config):
- """
- :type args: WindowsIntegrationConfig
- :type version: str
- :type config: dict[str, str]
- :rtype: AnsibleCoreCI
- """
- core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage, provider=args.remote_provider, load=False)
- core_ci.load(config)
- core_ci.wait()
-
- manage = ManageWindowsCI(core_ci)
- manage.wait()
-
- return core_ci
-
-
-def windows_inventory(remotes):
- """
- :type remotes: list[AnsibleCoreCI]
- :rtype: str
- """
- hosts = []
-
- for remote in remotes:
- options = dict(
- ansible_host=remote.connection.hostname,
- ansible_user=remote.connection.username,
- ansible_password=remote.connection.password,
- ansible_port=remote.connection.port,
- )
-
- # used for the connection_windows_ssh test target
- if remote.ssh_key:
- options["ansible_ssh_private_key_file"] = os.path.abspath(remote.ssh_key.key)
-
- if remote.name == 'windows-2008':
- options.update(
- # force 2008 to use PSRP for the connection plugin
- ansible_connection='psrp',
- ansible_psrp_auth='basic',
- ansible_psrp_cert_validation='ignore',
- )
- elif remote.name == 'windows-2016':
- options.update(
- # force 2016 to use NTLM + HTTP message encryption
- ansible_connection='winrm',
- ansible_winrm_server_cert_validation='ignore',
- ansible_winrm_transport='ntlm',
- ansible_winrm_scheme='http',
- ansible_port='5985',
- )
- else:
- options.update(
- ansible_connection='winrm',
- ansible_winrm_server_cert_validation='ignore',
- )
-
- hosts.append(
- '%s %s' % (
- remote.name.replace('/', '_'),
- ' '.join('%s="%s"' % (k, options[k]) for k in sorted(options)),
- )
- )
-
- template = """
- [windows]
- %s
-
- # support winrm binary module tests (temporary solution)
- [testhost:children]
- windows
- """
-
- template = textwrap.dedent(template)
- inventory = template % ('\n'.join(hosts))
-
- return inventory
-
-
-def command_integration_filter(args, # type: TIntegrationConfig
- targets, # type: t.Iterable[TIntegrationTarget]
- init_callback=None, # type: t.Callable[[TIntegrationConfig, t.Tuple[TIntegrationTarget, ...]], None]
- ): # type: (...) -> t.Tuple[TIntegrationTarget, ...]
- """Filter the given integration test targets."""
- targets = tuple(target for target in targets if 'hidden/' not in target.aliases)
- changes = get_changes_filter(args)
-
- # special behavior when the --changed-all-target target is selected based on changes
- if args.changed_all_target in changes:
- # act as though the --changed-all-target target was in the include list
- if args.changed_all_mode == 'include' and args.changed_all_target not in args.include:
- args.include.append(args.changed_all_target)
- args.delegate_args += ['--include', args.changed_all_target]
- # act as though the --changed-all-target target was in the exclude list
- elif args.changed_all_mode == 'exclude' and args.changed_all_target not in args.exclude:
- args.exclude.append(args.changed_all_target)
-
- require = args.require + changes
- exclude = args.exclude
-
- internal_targets = walk_internal_targets(targets, args.include, exclude, require)
- environment_exclude = get_integration_filter(args, internal_targets)
-
- environment_exclude += cloud_filter(args, internal_targets)
-
- if environment_exclude:
- exclude += environment_exclude
- internal_targets = walk_internal_targets(targets, args.include, exclude, require)
-
- if not internal_targets:
- raise AllTargetsSkipped()
-
- if args.start_at and not any(target.name == args.start_at for target in internal_targets):
- raise ApplicationError('Start at target matches nothing: %s' % args.start_at)
-
- if init_callback:
- init_callback(args, internal_targets)
-
- cloud_init(args, internal_targets)
-
- vars_file_src = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
-
- if os.path.exists(vars_file_src):
- def integration_config_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
- """
- Add the integration config vars file to the payload file list.
- This will preserve the file during delegation even if the file is ignored by source control.
- """
- files.append((vars_file_src, data_context().content.integration_vars_path))
-
- data_context().register_payload_callback(integration_config_callback)
-
- if args.delegate:
- raise Delegate(require=require, exclude=exclude, integration_targets=internal_targets)
-
- install_command_requirements(args)
-
- return internal_targets
-
-
-def command_integration_filtered(args, targets, all_targets, inventory_path, pre_target=None, post_target=None,
- remote_temp_path=None):
- """
- :type args: IntegrationConfig
- :type targets: tuple[IntegrationTarget]
- :type all_targets: tuple[IntegrationTarget]
- :type inventory_path: str
- :type pre_target: (IntegrationTarget) -> None | None
- :type post_target: (IntegrationTarget) -> None | None
- :type remote_temp_path: str | None
- """
- found = False
- passed = []
- failed = []
-
- targets_iter = iter(targets)
- all_targets_dict = dict((target.name, target) for target in all_targets)
-
- setup_errors = []
- setup_targets_executed = set()
-
- for target in all_targets:
- for setup_target in target.setup_once + target.setup_always:
- if setup_target not in all_targets_dict:
- setup_errors.append('Target "%s" contains invalid setup target: %s' % (target.name, setup_target))
-
- if setup_errors:
- raise ApplicationError('Found %d invalid setup aliases:\n%s' % (len(setup_errors), '\n'.join(setup_errors)))
-
- check_pyyaml(args, args.python_version)
-
- test_dir = os.path.join(ResultType.TMP.path, 'output_dir')
-
- if not args.explain and any('needs/ssh/' in target.aliases for target in targets):
- max_tries = 20
- display.info('SSH service required for tests. Checking to make sure we can connect.')
- for i in range(1, max_tries + 1):
- try:
- run_command(args, ['ssh', '-o', 'BatchMode=yes', 'localhost', 'id'], capture=True)
- display.info('SSH service responded.')
- break
- except SubprocessError:
- if i == max_tries:
- raise
- seconds = 3
- display.warning('SSH service not responding. Waiting %d second(s) before checking again.' % seconds)
- time.sleep(seconds)
-
- # Windows is different as Ansible execution is done locally but the host is remote
- if args.inject_httptester and not isinstance(args, WindowsIntegrationConfig):
- inject_httptester(args)
-
- start_at_task = args.start_at_task
-
- results = {}
-
- current_environment = None # type: t.Optional[EnvironmentDescription]
-
- # common temporary directory path that will be valid on both the controller and the remote
- # it must be common because it will be referenced in environment variables that are shared across multiple hosts
- common_temp_path = '/tmp/ansible-test-%s' % ''.join(random.choice(string.ascii_letters + string.digits) for _idx in range(8))
-
- setup_common_temp_dir(args, common_temp_path)
-
- try:
- for target in targets_iter:
- if args.start_at and not found:
- found = target.name == args.start_at
- if not found:
- continue
-
- if args.list_targets:
- print(target.name)
- continue
-
- tries = 2 if args.retry_on_error else 1
- verbosity = args.verbosity
-
- cloud_environment = get_cloud_environment(args, target)
-
- original_environment = current_environment if current_environment else EnvironmentDescription(args)
- current_environment = None
-
- display.info('>>> Environment Description\n%s' % original_environment, verbosity=3)
-
- try:
- while tries:
- tries -= 1
-
- try:
- if cloud_environment:
- cloud_environment.setup_once()
-
- run_setup_targets(args, test_dir, target.setup_once, all_targets_dict, setup_targets_executed, inventory_path, common_temp_path, False)
-
- start_time = time.time()
-
- run_setup_targets(args, test_dir, target.setup_always, all_targets_dict, setup_targets_executed, inventory_path, common_temp_path, True)
-
- if not args.explain:
- # create a fresh test directory for each test target
- remove_tree(test_dir)
- make_dirs(test_dir)
-
- if pre_target:
- pre_target(target)
-
- try:
- if target.script_path:
- command_integration_script(args, target, test_dir, inventory_path, common_temp_path,
- remote_temp_path=remote_temp_path)
- else:
- command_integration_role(args, target, start_at_task, test_dir, inventory_path,
- common_temp_path, remote_temp_path=remote_temp_path)
- start_at_task = None
- finally:
- if post_target:
- post_target(target)
-
- end_time = time.time()
-
- results[target.name] = dict(
- name=target.name,
- type=target.type,
- aliases=target.aliases,
- modules=target.modules,
- run_time_seconds=int(end_time - start_time),
- setup_once=target.setup_once,
- setup_always=target.setup_always,
- coverage=args.coverage,
- coverage_label=args.coverage_label,
- python_version=args.python_version,
- )
-
- break
- except SubprocessError:
- if cloud_environment:
- cloud_environment.on_failure(target, tries)
-
- if not original_environment.validate(target.name, throw=False):
- raise
-
- if not tries:
- raise
-
- display.warning('Retrying test target "%s" with maximum verbosity.' % target.name)
- display.verbosity = args.verbosity = 6
-
- start_time = time.time()
- current_environment = EnvironmentDescription(args)
- end_time = time.time()
-
- EnvironmentDescription.check(original_environment, current_environment, target.name, throw=True)
-
- results[target.name]['validation_seconds'] = int(end_time - start_time)
-
- passed.append(target)
- except Exception as ex:
- failed.append(target)
-
- if args.continue_on_error:
- display.error(ex)
- continue
-
- display.notice('To resume at this test target, use the option: --start-at %s' % target.name)
-
- next_target = next(targets_iter, None)
-
- if next_target:
- display.notice('To resume after this test target, use the option: --start-at %s' % next_target.name)
-
- raise
- finally:
- display.verbosity = args.verbosity = verbosity
-
- finally:
- if not args.explain:
- if args.coverage:
- coverage_temp_path = os.path.join(common_temp_path, ResultType.COVERAGE.name)
- coverage_save_path = ResultType.COVERAGE.path
-
- for filename in os.listdir(coverage_temp_path):
- shutil.copy(os.path.join(coverage_temp_path, filename), os.path.join(coverage_save_path, filename))
-
- remove_tree(common_temp_path)
-
- result_name = '%s-%s.json' % (
- args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0))))
-
- data = dict(
- targets=results,
- )
-
- write_json_test_results(ResultType.DATA, result_name, data)
-
- if failed:
- raise ApplicationError('The %d integration test(s) listed below (out of %d) failed. See error output above for details:\n%s' % (
- len(failed), len(passed) + len(failed), '\n'.join(target.name for target in failed)))
-
-
-def start_httptester(args):
- """
- :type args: EnvironmentConfig
- :rtype: str, list[str]
- """
-
- # map ports from remote -> localhost -> container
- # passing through localhost is only used when ansible-test is not already running inside a docker container
- ports = [
- dict(
- remote=8080,
- container=80,
- ),
- dict(
- remote=8088,
- container=88,
- ),
- dict(
- remote=8443,
- container=443,
- ),
- dict(
- remote=8444,
- container=444,
- ),
- dict(
- remote=8749,
- container=749,
- ),
- ]
-
- container_id = get_docker_container_id()
-
- if not container_id:
- for item in ports:
- item['localhost'] = get_available_port()
-
- docker_pull(args, args.httptester)
-
- httptester_id = run_httptester(args, dict((port['localhost'], port['container']) for port in ports if 'localhost' in port))
-
- if container_id:
- container_host = get_docker_container_ip(args, httptester_id)
- display.info('Found httptester container address: %s' % container_host, verbosity=1)
- else:
- container_host = get_docker_hostname()
-
- ssh_options = []
-
- for port in ports:
- ssh_options += ['-R', '%d:%s:%d' % (port['remote'], container_host, port.get('localhost', port['container']))]
-
- return httptester_id, ssh_options
-
-
-def run_httptester(args, ports=None):
- """
- :type args: EnvironmentConfig
- :type ports: dict[int, int] | None
- :rtype: str
- """
- options = [
- '--detach',
- '--env', 'KRB5_PASSWORD=%s' % args.httptester_krb5_password,
- ]
-
- if ports:
- for localhost_port, container_port in ports.items():
- options += ['-p', '%d:%d' % (localhost_port, container_port)]
-
- network = get_docker_preferred_network_name(args)
-
- if is_docker_user_defined_network(network):
- # network-scoped aliases are only supported for containers in user defined networks
- for alias in HTTPTESTER_HOSTS:
- options.extend(['--network-alias', alias])
-
- httptester_id = docker_run(args, args.httptester, options=options)[0]
-
- if args.explain:
- httptester_id = 'httptester_id'
- else:
- httptester_id = httptester_id.strip()
-
- return httptester_id
-
-
-def inject_httptester(args):
- """
- :type args: CommonConfig
- """
- comment = ' # ansible-test httptester\n'
- append_lines = ['127.0.0.1 %s%s' % (host, comment) for host in HTTPTESTER_HOSTS]
- hosts_path = '/etc/hosts'
-
- original_lines = read_text_file(hosts_path).splitlines(True)
-
- if not any(line.endswith(comment) for line in original_lines):
- write_text_file(hosts_path, ''.join(original_lines + append_lines))
-
- # determine which forwarding mechanism to use
- pfctl = find_executable('pfctl', required=False)
- iptables = find_executable('iptables', required=False)
-
- if pfctl:
- kldload = find_executable('kldload', required=False)
-
- if kldload:
- try:
- run_command(args, ['kldload', 'pf'], capture=True)
- except SubprocessError:
- pass # already loaded
-
- rules = '''
-rdr pass inet proto tcp from any to any port 80 -> 127.0.0.1 port 8080
-rdr pass inet proto tcp from any to any port 88 -> 127.0.0.1 port 8088
-rdr pass inet proto tcp from any to any port 443 -> 127.0.0.1 port 8443
-rdr pass inet proto tcp from any to any port 444 -> 127.0.0.1 port 8444
-rdr pass inet proto tcp from any to any port 749 -> 127.0.0.1 port 8749
-'''
- cmd = ['pfctl', '-ef', '-']
-
- try:
- run_command(args, cmd, capture=True, data=rules)
- except SubprocessError:
- pass # non-zero exit status on success
-
- elif iptables:
- ports = [
- (80, 8080),
- (88, 8088),
- (443, 8443),
- (444, 8444),
- (749, 8749),
- ]
-
- for src, dst in ports:
- rule = ['-o', 'lo', '-p', 'tcp', '--dport', str(src), '-j', 'REDIRECT', '--to-port', str(dst)]
-
- try:
- # check for existing rule
- cmd = ['iptables', '-t', 'nat', '-C', 'OUTPUT'] + rule
- run_command(args, cmd, capture=True)
- except SubprocessError:
- # append rule when it does not exist
- cmd = ['iptables', '-t', 'nat', '-A', 'OUTPUT'] + rule
- run_command(args, cmd, capture=True)
- else:
- raise ApplicationError('No supported port forwarding mechanism detected.')
-
-
-def run_pypi_proxy(args): # type: (EnvironmentConfig) -> t.Tuple[t.Optional[str], t.Optional[str]]
- """Run a PyPI proxy container, returning the container ID and proxy endpoint."""
- use_proxy = False
-
- if args.docker_raw == 'centos6':
- use_proxy = True # python 2.6 is the only version available
-
- if args.docker_raw == 'default':
- if args.python == '2.6':
- use_proxy = True # python 2.6 requested
- elif not args.python and isinstance(args, (SanityConfig, UnitsConfig, ShellConfig)):
- use_proxy = True # multiple versions (including python 2.6) can be used
-
- if args.docker_raw and args.pypi_proxy:
- use_proxy = True # manual override to force proxy usage
-
- if not use_proxy:
- return None, None
-
- proxy_image = 'quay.io/ansible/pypi-test-container:1.0.0'
- port = 3141
-
- options = [
- '--detach',
- ]
-
- docker_pull(args, proxy_image)
-
- container_id = docker_run(args, proxy_image, options=options)[0]
-
- if args.explain:
- container_id = 'pypi_id'
- container_ip = '127.0.0.1'
- else:
- container_id = container_id.strip()
- container_ip = get_docker_container_ip(args, container_id)
-
- endpoint = 'http://%s:%d/root/pypi/+simple/' % (container_ip, port)
-
- return container_id, endpoint
-
-
-def configure_pypi_proxy(args): # type: (CommonConfig) -> None
- """Configure the environment to use a PyPI proxy, if present."""
- if not isinstance(args, EnvironmentConfig):
- return
-
- if args.pypi_endpoint:
- configure_pypi_block_access()
- configure_pypi_proxy_pip(args)
- configure_pypi_proxy_easy_install(args)
-
-
-def configure_pypi_block_access(): # type: () -> None
- """Block direct access to PyPI to ensure proxy configurations are always used."""
- if os.getuid() != 0:
- display.warning('Skipping custom hosts block for PyPI for non-root user.')
- return
-
- hosts_path = '/etc/hosts'
- hosts_block = '''
-127.0.0.1 pypi.org pypi.python.org files.pythonhosted.org
-'''
-
- def hosts_cleanup():
- display.info('Removing custom PyPI hosts entries: %s' % hosts_path, verbosity=1)
-
- with open(hosts_path) as hosts_file_read:
- content = hosts_file_read.read()
-
- content = content.replace(hosts_block, '')
-
- with open(hosts_path, 'w') as hosts_file_write:
- hosts_file_write.write(content)
-
- display.info('Injecting custom PyPI hosts entries: %s' % hosts_path, verbosity=1)
- display.info('Config: %s\n%s' % (hosts_path, hosts_block), verbosity=3)
-
- with open(hosts_path, 'a') as hosts_file:
- hosts_file.write(hosts_block)
-
- atexit.register(hosts_cleanup)
-
-
-def configure_pypi_proxy_pip(args): # type: (EnvironmentConfig) -> None
- """Configure a custom index for pip based installs."""
- pypi_hostname = urlparse(args.pypi_endpoint)[1].split(':')[0]
-
- pip_conf_path = os.path.expanduser('~/.pip/pip.conf')
- pip_conf = '''
-[global]
-index-url = {0}
-trusted-host = {1}
-'''.format(args.pypi_endpoint, pypi_hostname).strip()
-
- def pip_conf_cleanup():
- display.info('Removing custom PyPI config: %s' % pip_conf_path, verbosity=1)
- os.remove(pip_conf_path)
-
- if os.path.exists(pip_conf_path):
- raise ApplicationError('Refusing to overwrite existing file: %s' % pip_conf_path)
-
- display.info('Injecting custom PyPI config: %s' % pip_conf_path, verbosity=1)
- display.info('Config: %s\n%s' % (pip_conf_path, pip_conf), verbosity=3)
-
- write_text_file(pip_conf_path, pip_conf, True)
- atexit.register(pip_conf_cleanup)
-
-
-def configure_pypi_proxy_easy_install(args): # type: (EnvironmentConfig) -> None
- """Configure a custom index for easy_install based installs."""
- pydistutils_cfg_path = os.path.expanduser('~/.pydistutils.cfg')
- pydistutils_cfg = '''
-[easy_install]
-index_url = {0}
-'''.format(args.pypi_endpoint).strip()
-
- if os.path.exists(pydistutils_cfg_path):
- raise ApplicationError('Refusing to overwrite existing file: %s' % pydistutils_cfg_path)
-
- def pydistutils_cfg_cleanup():
- display.info('Removing custom PyPI config: %s' % pydistutils_cfg_path, verbosity=1)
- os.remove(pydistutils_cfg_path)
-
- display.info('Injecting custom PyPI config: %s' % pydistutils_cfg_path, verbosity=1)
- display.info('Config: %s\n%s' % (pydistutils_cfg_path, pydistutils_cfg), verbosity=3)
-
- write_text_file(pydistutils_cfg_path, pydistutils_cfg, True)
- atexit.register(pydistutils_cfg_cleanup)
-
-
-def run_setup_targets(args, test_dir, target_names, targets_dict, targets_executed, inventory_path, temp_path, always):
- """
- :type args: IntegrationConfig
- :type test_dir: str
- :type target_names: list[str]
- :type targets_dict: dict[str, IntegrationTarget]
- :type targets_executed: set[str]
- :type inventory_path: str
- :type temp_path: str
- :type always: bool
- """
- for target_name in target_names:
- if not always and target_name in targets_executed:
- continue
-
- target = targets_dict[target_name]
-
- if not args.explain:
- # create a fresh test directory for each test target
- remove_tree(test_dir)
- make_dirs(test_dir)
-
- if target.script_path:
- command_integration_script(args, target, test_dir, inventory_path, temp_path)
- else:
- command_integration_role(args, target, None, test_dir, inventory_path, temp_path)
-
- targets_executed.add(target_name)
-
-
-def integration_environment(args, target, test_dir, inventory_path, ansible_config, env_config):
- """
- :type args: IntegrationConfig
- :type target: IntegrationTarget
- :type test_dir: str
- :type inventory_path: str
- :type ansible_config: str | None
- :type env_config: CloudEnvironmentConfig | None
- :rtype: dict[str, str]
- """
- env = ansible_environment(args, ansible_config=ansible_config)
-
- if args.inject_httptester:
- env.update(dict(
- HTTPTESTER='1',
- KRB5_PASSWORD=args.httptester_krb5_password,
- ))
-
- callback_plugins = ['junit'] + (env_config.callback_plugins or [] if env_config else [])
-
- integration = dict(
- JUNIT_OUTPUT_DIR=ResultType.JUNIT.path,
- ANSIBLE_CALLBACKS_ENABLED=','.join(sorted(set(callback_plugins))),
- ANSIBLE_TEST_CI=args.metadata.ci_provider or get_ci_provider().code,
- ANSIBLE_TEST_COVERAGE='check' if args.coverage_check else ('yes' if args.coverage else ''),
- OUTPUT_DIR=test_dir,
- INVENTORY_PATH=os.path.abspath(inventory_path),
- )
-
- if args.debug_strategy:
- env.update(dict(ANSIBLE_STRATEGY='debug'))
-
- if 'non_local/' in target.aliases:
- if args.coverage:
- display.warning('Skipping coverage reporting on Ansible modules for non-local test: %s' % target.name)
-
- env.update(dict(ANSIBLE_TEST_REMOTE_INTERPRETER=''))
-
- env.update(integration)
-
- return env
-
-
-def command_integration_script(args, target, test_dir, inventory_path, temp_path, remote_temp_path=None):
- """
- :type args: IntegrationConfig
- :type target: IntegrationTarget
- :type test_dir: str
- :type inventory_path: str
- :type temp_path: str
- :type remote_temp_path: str | None
- """
- display.info('Running %s integration test script' % target.name)
-
- env_config = None
-
- if isinstance(args, PosixIntegrationConfig):
- cloud_environment = get_cloud_environment(args, target)
-
- if cloud_environment:
- env_config = cloud_environment.get_environment_config()
-
- with integration_test_environment(args, target, inventory_path) as test_env:
- cmd = ['./%s' % os.path.basename(target.script_path)]
-
- if args.verbosity:
- cmd.append('-' + ('v' * args.verbosity))
-
- env = integration_environment(args, target, test_dir, test_env.inventory_path, test_env.ansible_config, env_config)
- cwd = os.path.join(test_env.targets_dir, target.relative_path)
-
- env.update(dict(
- # support use of adhoc ansible commands in collections without specifying the fully qualified collection name
- ANSIBLE_PLAYBOOK_DIR=cwd,
- ))
-
- if env_config and env_config.env_vars:
- env.update(env_config.env_vars)
-
- with integration_test_config_file(args, env_config, test_env.integration_dir) as config_path:
- if config_path:
- cmd += ['-e', '@%s' % config_path]
-
- module_coverage = 'non_local/' not in target.aliases
- intercept_command(args, cmd, target_name=target.name, env=env, cwd=cwd, temp_path=temp_path,
- remote_temp_path=remote_temp_path, module_coverage=module_coverage)
-
-
-def command_integration_role(args, target, start_at_task, test_dir, inventory_path, temp_path, remote_temp_path=None):
- """
- :type args: IntegrationConfig
- :type target: IntegrationTarget
- :type start_at_task: str | None
- :type test_dir: str
- :type inventory_path: str
- :type temp_path: str
- :type remote_temp_path: str | None
- """
- display.info('Running %s integration test role' % target.name)
-
- env_config = None
-
- vars_files = []
- variables = dict(
- output_dir=test_dir,
- )
-
- if isinstance(args, WindowsIntegrationConfig):
- hosts = 'windows'
- gather_facts = False
- variables.update(dict(
- win_output_dir=r'C:\ansible_testing',
- ))
- elif isinstance(args, NetworkIntegrationConfig):
- hosts = target.network_platform
- gather_facts = False
- else:
- hosts = 'testhost'
- gather_facts = True
-
- cloud_environment = get_cloud_environment(args, target)
-
- if cloud_environment:
- env_config = cloud_environment.get_environment_config()
-
- with integration_test_environment(args, target, inventory_path) as test_env:
- if os.path.exists(test_env.vars_file):
- vars_files.append(os.path.relpath(test_env.vars_file, test_env.integration_dir))
-
- play = dict(
- hosts=hosts,
- gather_facts=gather_facts,
- vars_files=vars_files,
- vars=variables,
- roles=[
- target.name,
- ],
- )
-
- if env_config:
- if env_config.ansible_vars:
- variables.update(env_config.ansible_vars)
-
- play.update(dict(
- environment=env_config.env_vars,
- module_defaults=env_config.module_defaults,
- ))
-
- playbook = json.dumps([play], indent=4, sort_keys=True)
-
- with named_temporary_file(args=args, directory=test_env.integration_dir, prefix='%s-' % target.name, suffix='.yml', content=playbook) as playbook_path:
- filename = os.path.basename(playbook_path)
-
- display.info('>>> Playbook: %s\n%s' % (filename, playbook.strip()), verbosity=3)
-
- cmd = ['ansible-playbook', filename, '-i', os.path.relpath(test_env.inventory_path, test_env.integration_dir)]
-
- if start_at_task:
- cmd += ['--start-at-task', start_at_task]
-
- if args.tags:
- cmd += ['--tags', args.tags]
-
- if args.skip_tags:
- cmd += ['--skip-tags', args.skip_tags]
-
- if args.diff:
- cmd += ['--diff']
-
- if isinstance(args, NetworkIntegrationConfig):
- if args.testcase:
- cmd += ['-e', 'testcase=%s' % args.testcase]
-
- if args.verbosity:
- cmd.append('-' + ('v' * args.verbosity))
-
- env = integration_environment(args, target, test_dir, test_env.inventory_path, test_env.ansible_config, env_config)
- cwd = test_env.integration_dir
-
- env.update(dict(
- # support use of adhoc ansible commands in collections without specifying the fully qualified collection name
- ANSIBLE_PLAYBOOK_DIR=cwd,
- ))
-
- env['ANSIBLE_ROLES_PATH'] = test_env.targets_dir
-
- module_coverage = 'non_local/' not in target.aliases
- intercept_command(args, cmd, target_name=target.name, env=env, cwd=cwd, temp_path=temp_path,
- remote_temp_path=remote_temp_path, module_coverage=module_coverage)
-
-
-def get_changes_filter(args):
- """
- :type args: TestConfig
- :rtype: list[str]
- """
+def get_changes_filter(args): # type: (TestConfig) -> t.List[str]
+ """Return a list of targets which should be tested based on the changes made."""
paths = detect_changes(args)
if not args.metadata.change_description:
@@ -1791,11 +57,8 @@ def get_changes_filter(args):
return args.metadata.change_description.targets
-def detect_changes(args):
- """
- :type args: TestConfig
- :rtype: list[str] | None
- """
+def detect_changes(args): # type: (TestConfig) -> t.Optional[t.List[str]]
+ """Return a list of changed paths."""
if args.changed:
paths = get_ci_provider().detect_changes(args)
elif args.changed_from or args.changed_path:
@@ -1816,481 +79,37 @@ def detect_changes(args):
return paths
-def get_integration_filter(args, targets):
- """
- :type args: IntegrationConfig
- :type targets: tuple[IntegrationTarget]
- :rtype: list[str]
- """
- if args.docker:
- return get_integration_docker_filter(args, targets)
-
- if args.remote:
- return get_integration_remote_filter(args, targets)
-
- return get_integration_local_filter(args, targets)
-
-
-def common_integration_filter(args, targets, exclude):
- """
- :type args: IntegrationConfig
- :type targets: tuple[IntegrationTarget]
- :type exclude: list[str]
- """
- override_disabled = set(target for target in args.include if target.startswith('disabled/'))
-
- if not args.allow_disabled:
- skip = 'disabled/'
- override = [target.name for target in targets if override_disabled & set(target.aliases)]
- skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
- if skipped:
- exclude.extend(skipped)
- display.warning('Excluding tests marked "%s" which require --allow-disabled or prefixing with "disabled/": %s'
- % (skip.rstrip('/'), ', '.join(skipped)))
-
- override_unsupported = set(target for target in args.include if target.startswith('unsupported/'))
-
- if not args.allow_unsupported:
- skip = 'unsupported/'
- override = [target.name for target in targets if override_unsupported & set(target.aliases)]
- skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
- if skipped:
- exclude.extend(skipped)
- display.warning('Excluding tests marked "%s" which require --allow-unsupported or prefixing with "unsupported/": %s'
- % (skip.rstrip('/'), ', '.join(skipped)))
-
- override_unstable = set(target for target in args.include if target.startswith('unstable/'))
-
- if args.allow_unstable_changed:
- override_unstable |= set(args.metadata.change_description.focused_targets or [])
-
- if not args.allow_unstable:
- skip = 'unstable/'
- override = [target.name for target in targets if override_unstable & set(target.aliases)]
- skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
- if skipped:
- exclude.extend(skipped)
- display.warning('Excluding tests marked "%s" which require --allow-unstable or prefixing with "unstable/": %s'
- % (skip.rstrip('/'), ', '.join(skipped)))
-
- # only skip a Windows test if using --windows and all the --windows versions are defined in the aliases as skip/windows/%s
- if isinstance(args, WindowsIntegrationConfig) and args.windows:
- all_skipped = []
- not_skipped = []
-
- for target in targets:
- if "skip/windows/" not in target.aliases:
- continue
-
- skip_valid = []
- skip_missing = []
- for version in args.windows:
- if "skip/windows/%s/" % version in target.aliases:
- skip_valid.append(version)
- else:
- skip_missing.append(version)
-
- if skip_missing and skip_valid:
- not_skipped.append((target.name, skip_valid, skip_missing))
- elif skip_valid:
- all_skipped.append(target.name)
-
- if all_skipped:
- exclude.extend(all_skipped)
- skip_aliases = ["skip/windows/%s/" % w for w in args.windows]
- display.warning('Excluding tests marked "%s" which are set to skip with --windows %s: %s'
- % ('", "'.join(skip_aliases), ', '.join(args.windows), ', '.join(all_skipped)))
-
- if not_skipped:
- for target, skip_valid, skip_missing in not_skipped:
- # warn when failing to skip due to lack of support for skipping only some versions
- display.warning('Including test "%s" which was marked to skip for --windows %s but not %s.'
- % (target, ', '.join(skip_valid), ', '.join(skip_missing)))
-
-
-def get_integration_local_filter(args, targets):
- """
- :type args: IntegrationConfig
- :type targets: tuple[IntegrationTarget]
- :rtype: list[str]
- """
- exclude = []
-
- common_integration_filter(args, targets, exclude)
-
- if not args.allow_root and os.getuid() != 0:
- skip = 'needs/root/'
- skipped = [target.name for target in targets if skip in target.aliases]
- if skipped:
- exclude.append(skip)
- display.warning('Excluding tests marked "%s" which require --allow-root or running as root: %s'
- % (skip.rstrip('/'), ', '.join(skipped)))
-
- override_destructive = set(target for target in args.include if target.startswith('destructive/'))
-
- if not args.allow_destructive:
- skip = 'destructive/'
- override = [target.name for target in targets if override_destructive & set(target.aliases)]
- skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
- if skipped:
- exclude.extend(skipped)
- display.warning('Excluding tests marked "%s" which require --allow-destructive or prefixing with "destructive/" to run locally: %s'
- % (skip.rstrip('/'), ', '.join(skipped)))
-
- exclude_targets_by_python_version(targets, args.python_version, exclude)
-
- return exclude
-
-
-def get_integration_docker_filter(args, targets):
- """
- :type args: IntegrationConfig
- :type targets: tuple[IntegrationTarget]
- :rtype: list[str]
- """
- exclude = []
-
- common_integration_filter(args, targets, exclude)
-
- skip = 'skip/docker/'
- skipped = [target.name for target in targets if skip in target.aliases]
- if skipped:
- exclude.append(skip)
- display.warning('Excluding tests marked "%s" which cannot run under docker: %s'
- % (skip.rstrip('/'), ', '.join(skipped)))
-
- if not args.docker_privileged:
- skip = 'needs/privileged/'
- skipped = [target.name for target in targets if skip in target.aliases]
- if skipped:
- exclude.append(skip)
- display.warning('Excluding tests marked "%s" which require --docker-privileged to run under docker: %s'
- % (skip.rstrip('/'), ', '.join(skipped)))
-
- python_version = get_python_version(args, get_docker_completion(), args.docker_raw)
-
- exclude_targets_by_python_version(targets, python_version, exclude)
-
- return exclude
-
-
-def get_integration_remote_filter(args, targets):
- """
- :type args: IntegrationConfig
- :type targets: tuple[IntegrationTarget]
- :rtype: list[str]
- """
- remote = args.parsed_remote
-
- exclude = []
-
- common_integration_filter(args, targets, exclude)
-
- skips = {
- 'skip/%s' % remote.platform: remote.platform,
- 'skip/%s/%s' % (remote.platform, remote.version): '%s %s' % (remote.platform, remote.version),
- 'skip/%s%s' % (remote.platform, remote.version): '%s %s' % (remote.platform, remote.version), # legacy syntax, use above format
- }
-
- if remote.arch:
- skips.update({
- 'skip/%s/%s' % (remote.arch, remote.platform): '%s on %s' % (remote.platform, remote.arch),
- 'skip/%s/%s/%s' % (remote.arch, remote.platform, remote.version): '%s %s on %s' % (remote.platform, remote.version, remote.arch),
- })
-
- for skip, description in skips.items():
- skipped = [target.name for target in targets if skip in target.skips]
- if skipped:
- exclude.append(skip + '/')
- display.warning('Excluding tests marked "%s" which are not supported on %s: %s' % (skip, description, ', '.join(skipped)))
-
- python_version = get_python_version(args, get_remote_completion(), args.remote)
-
- exclude_targets_by_python_version(targets, python_version, exclude)
-
- return exclude
-
-
-def exclude_targets_by_python_version(targets, python_version, exclude):
- """
- :type targets: tuple[IntegrationTarget]
- :type python_version: str
- :type exclude: list[str]
- """
- if not python_version:
- display.warning('Python version unknown. Unable to skip tests based on Python version.')
- return
-
- python_major_version = python_version.split('.')[0]
-
- skip = 'skip/python%s/' % python_version
- skipped = [target.name for target in targets if skip in target.aliases]
- if skipped:
- exclude.append(skip)
- display.warning('Excluding tests marked "%s" which are not supported on python %s: %s'
- % (skip.rstrip('/'), python_version, ', '.join(skipped)))
-
- skip = 'skip/python%s/' % python_major_version
- skipped = [target.name for target in targets if skip in target.aliases]
- if skipped:
- exclude.append(skip)
- display.warning('Excluding tests marked "%s" which are not supported on python %s: %s'
- % (skip.rstrip('/'), python_version, ', '.join(skipped)))
-
-
-def get_python_version(args, configs, name):
- """
- :type args: EnvironmentConfig
- :type configs: dict[str, dict[str, str]]
- :type name: str
- """
- config = configs.get(name, {})
- config_python = config.get('python')
-
- if not config or not config_python:
- if args.python:
- return args.python
-
- display.warning('No Python version specified. '
- 'Use completion config or the --python option to specify one.', unique=True)
-
- return '' # failure to provide a version may result in failures or reduced functionality later
-
- supported_python_versions = config_python.split(',')
- default_python_version = supported_python_versions[0]
-
- if args.python and args.python not in supported_python_versions:
- raise ApplicationError('Python %s is not supported by %s. Supported Python version(s) are: %s' % (
- args.python, name, ', '.join(sorted(supported_python_versions))))
-
- python_version = args.python or default_python_version
-
- return python_version
-
-
-def get_python_interpreter(args, configs, name):
- """
- :type args: EnvironmentConfig
- :type configs: dict[str, dict[str, str]]
- :type name: str
- """
- if args.python_interpreter:
- return args.python_interpreter
-
- config = configs.get(name, {})
-
- if not config:
- if args.python:
- guess = 'python%s' % args.python
- else:
- guess = 'python'
-
- display.warning('Using "%s" as the Python interpreter. '
- 'Use completion config or the --python-interpreter option to specify the path.' % guess, unique=True)
-
- return guess
-
- python_version = get_python_version(args, configs, name)
-
- python_dir = config.get('python_dir', '/usr/bin')
- python_interpreter = os.path.join(python_dir, 'python%s' % python_version)
- python_interpreter = config.get('python%s' % python_version, python_interpreter)
-
- return python_interpreter
-
-
-class EnvironmentDescription:
- """Description of current running environment."""
- def __init__(self, args):
- """Initialize snapshot of environment configuration.
- :type args: IntegrationConfig
- """
- self.args = args
-
- if self.args.explain:
- self.data = {}
- return
-
- warnings = []
-
- versions = ['']
- versions += SUPPORTED_PYTHON_VERSIONS
- versions += list(set(v.split('.')[0] for v in SUPPORTED_PYTHON_VERSIONS))
-
- version_check = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'versions.py')
- python_paths = dict((v, find_executable('python%s' % v, required=False)) for v in sorted(versions))
- pip_paths = dict((v, find_executable('pip%s' % v, required=False)) for v in sorted(versions))
- program_versions = dict((v, self.get_version([python_paths[v], version_check], warnings)) for v in sorted(python_paths) if python_paths[v])
- pip_interpreters = dict((v, self.get_shebang(pip_paths[v])) for v in sorted(pip_paths) if pip_paths[v])
- known_hosts_hash = get_hash(os.path.expanduser('~/.ssh/known_hosts'))
-
- for version in sorted(versions):
- self.check_python_pip_association(version, python_paths, pip_paths, pip_interpreters, warnings)
-
- for warning in warnings:
- display.warning(warning, unique=True)
-
- self.data = dict(
- python_paths=python_paths,
- pip_paths=pip_paths,
- program_versions=program_versions,
- pip_interpreters=pip_interpreters,
- known_hosts_hash=known_hosts_hash,
- warnings=warnings,
- )
-
- @staticmethod
- def check_python_pip_association(version, python_paths, pip_paths, pip_interpreters, warnings):
- """
- :type version: str
- :param python_paths: dict[str, str]
- :param pip_paths: dict[str, str]
- :param pip_interpreters: dict[str, str]
- :param warnings: list[str]
- """
- python_label = 'Python%s' % (' %s' % version if version else '')
-
- pip_path = pip_paths.get(version)
- python_path = python_paths.get(version)
-
- if not python_path or not pip_path:
- # skip checks when either python or pip are missing for this version
- return
-
- pip_shebang = pip_interpreters.get(version)
-
- match = re.search(r'#!\s*(?P<command>[^\s]+)', pip_shebang)
-
- if not match:
- warnings.append('A %s pip was found at "%s", but it does not have a valid shebang: %s' % (python_label, pip_path, pip_shebang))
- return
-
- pip_interpreter = os.path.realpath(match.group('command'))
- python_interpreter = os.path.realpath(python_path)
-
- if pip_interpreter == python_interpreter:
- return
-
- try:
- identical = filecmp.cmp(pip_interpreter, python_interpreter)
- except OSError:
- identical = False
-
- if identical:
- return
-
- warnings.append('A %s pip was found at "%s", but it uses interpreter "%s" instead of "%s".' % (
- python_label, pip_path, pip_interpreter, python_interpreter))
-
- def __str__(self):
- """
- :rtype: str
- """
- return json.dumps(self.data, sort_keys=True, indent=4)
-
- def validate(self, target_name, throw):
- """
- :type target_name: str
- :type throw: bool
- :rtype: bool
- """
- current = EnvironmentDescription(self.args)
-
- return self.check(self, current, target_name, throw)
-
- @staticmethod
- def check(original, current, target_name, throw):
- """
- :type original: EnvironmentDescription
- :type current: EnvironmentDescription
- :type target_name: str
- :type throw: bool
- :rtype: bool
- """
- original_json = str(original)
- current_json = str(current)
-
- if original_json == current_json:
- return True
-
- unified_diff = '\n'.join(difflib.unified_diff(
- a=original_json.splitlines(),
- b=current_json.splitlines(),
- fromfile='original.json',
- tofile='current.json',
- lineterm='',
- ))
-
- message = ('Test target "%s" has changed the test environment!\n'
- 'If these changes are necessary, they must be reverted before the test finishes.\n'
- '>>> Original Environment\n'
- '%s\n'
- '>>> Current Environment\n'
- '%s\n'
- '>>> Environment Diff\n'
- '%s'
- % (target_name, original_json, current_json, unified_diff))
-
- if throw:
- raise ApplicationError(message)
-
- display.error(message)
-
- return False
-
- @staticmethod
- def get_version(command, warnings):
- """
- :type command: list[str]
- :type warnings: list[text]
- :rtype: list[str]
- """
- try:
- stdout, stderr = raw_command(command, capture=True, cmd_verbosity=2)
- except SubprocessError as ex:
- warnings.append(u'%s' % ex)
- return None # all failures are equal, we don't care why it failed, only that it did
-
- return [line.strip() for line in ((stdout or '').strip() + (stderr or '').strip()).splitlines()]
-
- @staticmethod
- def get_shebang(path):
- """
- :type path: str
- :rtype: str
- """
- with open_text_file(path) as script_fd:
- return script_fd.readline().strip()
-
-
class NoChangesDetected(ApplicationWarning):
"""Exception when change detection was performed, but no changes were found."""
def __init__(self):
- super(NoChangesDetected, self).__init__('No changes detected.')
+ super().__init__('No changes detected.')
class NoTestsForChanges(ApplicationWarning):
"""Exception when changes detected, but no tests trigger as a result."""
def __init__(self):
- super(NoTestsForChanges, self).__init__('No tests found for detected changes.')
+ super().__init__('No tests found for detected changes.')
class Delegate(Exception):
"""Trigger command delegation."""
- def __init__(self, exclude=None, require=None, integration_targets=None):
- """
- :type exclude: list[str] | None
- :type require: list[str] | None
- :type integration_targets: tuple[IntegrationTarget] | None
- """
- super(Delegate, self).__init__()
+ def __init__(self, host_state, exclude=None, require=None): # type: (HostState, t.List[str], t.List[str]) -> None
+ super().__init__()
+ self.host_state = host_state
self.exclude = exclude or []
self.require = require or []
- self.integration_targets = integration_targets or tuple()
+
+
+class ListTargets(Exception):
+ """List integration test targets instead of executing them."""
+ def __init__(self, target_names): # type: (t.List[str]) -> None
+ super().__init__()
+
+ self.target_names = target_names
class AllTargetsSkipped(ApplicationWarning):
"""All targets skipped."""
def __init__(self):
- super(AllTargetsSkipped, self).__init__('All targets skipped.')
+ super().__init__('All targets skipped.')
diff --git a/test/lib/ansible_test/_internal/git.py b/test/lib/ansible_test/_internal/git.py
index acc39f3f..038f3988 100644
--- a/test/lib/ansible_test/_internal/git.py
+++ b/test/lib/ansible_test/_internal/git.py
@@ -1,10 +1,8 @@
"""Wrapper around git command-line tools."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
-
-from . import types as t
+import typing as t
from .util import (
SubprocessError,
@@ -18,22 +16,15 @@ class Git:
self.git = 'git'
self.root = root
- def get_diff(self, args, git_options=None):
- """
- :type args: list[str]
- :type git_options: list[str] | None
- :rtype: list[str]
- """
+ def get_diff(self, args, git_options=None): # type: (t.List[str], t.Optional[t.List[str]]) -> t.List[str]
+ """Run `git diff` and return the result as a list."""
cmd = ['diff'] + args
if git_options is None:
git_options = ['-c', 'core.quotePath=']
return self.run_git_split(git_options + cmd, '\n', str_errors='replace')
- def get_diff_names(self, args):
- """
- :type args: list[str]
- :rtype: list[str]
- """
+ def get_diff_names(self, args): # type: (t.List[str]) -> t.List[str]
+ """Return a list of file names from the `git diff` command."""
cmd = ['diff', '--name-only', '--no-renames', '-z'] + args
return self.run_git_split(cmd, '\0')
@@ -54,34 +45,23 @@ class Git:
return submodule_paths
- def get_file_names(self, args):
- """
- :type args: list[str]
- :rtype: list[str]
- """
+ def get_file_names(self, args): # type: (t.List[str]) -> t.List[str]
+ """Return a list of file names from the `git ls-files` command."""
cmd = ['ls-files', '-z'] + args
return self.run_git_split(cmd, '\0')
- def get_branches(self):
- """
- :rtype: list[str]
- """
+ def get_branches(self): # type: () -> t.List[str]
+ """Return the list of branches."""
cmd = ['for-each-ref', 'refs/heads/', '--format', '%(refname:strip=2)']
return self.run_git_split(cmd)
- def get_branch(self):
- """
- :rtype: str
- """
+ def get_branch(self): # type: () -> str
+ """Return the current branch name."""
cmd = ['symbolic-ref', '--short', 'HEAD']
return self.run_git(cmd).strip()
- def get_rev_list(self, commits=None, max_count=None):
- """
- :type commits: list[str] | None
- :type max_count: int | None
- :rtype: list[str]
- """
+ def get_rev_list(self, commits=None, max_count=None): # type: (t.Optional[t.List[str]], t.Optional[int]) -> t.List[str]
+ """Return the list of results from the `git rev-list` command."""
cmd = ['rev-list']
if commits:
@@ -94,19 +74,13 @@ class Git:
return self.run_git_split(cmd)
- def get_branch_fork_point(self, branch):
- """
- :type branch: str
- :rtype: str
- """
+ def get_branch_fork_point(self, branch): # type: (str) -> str
+ """Return a reference to the point at which the given branch was forked."""
cmd = ['merge-base', '--fork-point', branch]
return self.run_git(cmd).strip()
- def is_valid_ref(self, ref):
- """
- :type ref: str
- :rtype: bool
- """
+ def is_valid_ref(self, ref): # type: (str) -> bool
+ """Return True if the given reference is valid, otherwise return False."""
cmd = ['show', ref]
try:
self.run_git(cmd, str_errors='replace')
@@ -114,13 +88,8 @@ class Git:
except SubprocessError:
return False
- def run_git_split(self, cmd, separator=None, str_errors='strict'):
- """
- :type cmd: list[str]
- :type separator: str | None
- :type str_errors: str
- :rtype: list[str]
- """
+ def run_git_split(self, cmd, separator=None, str_errors='strict'): # type: (t.List[str], t.Optional[str], str) -> t.List[str]
+ """Run the given `git` command and return the results as a list."""
output = self.run_git(cmd, str_errors=str_errors).strip(separator)
if not output:
@@ -128,10 +97,6 @@ class Git:
return output.split(separator)
- def run_git(self, cmd, str_errors='strict'):
- """
- :type cmd: list[str]
- :type str_errors: str
- :rtype: str
- """
+ def run_git(self, cmd, str_errors='strict'): # type: (t.List[str], str) -> str
+ """Run the given `git` command and return the results as a string."""
return raw_command([self.git] + cmd, cwd=self.root, capture=True, str_errors=str_errors)[0]
diff --git a/test/lib/ansible_test/_internal/host_configs.py b/test/lib/ansible_test/_internal/host_configs.py
new file mode 100644
index 00000000..a819652e
--- /dev/null
+++ b/test/lib/ansible_test/_internal/host_configs.py
@@ -0,0 +1,491 @@
+"""Configuration for the test hosts requested by the user."""
+from __future__ import annotations
+
+import abc
+import dataclasses
+import enum
+import os
+import pickle
+import sys
+import typing as t
+
+from .constants import (
+ SUPPORTED_PYTHON_VERSIONS,
+)
+
+from .io import (
+ open_binary_file,
+)
+
+from .completion import (
+ CompletionConfig,
+ DOCKER_COMPLETION,
+ DockerCompletionConfig,
+ InventoryCompletionConfig,
+ NETWORK_COMPLETION,
+ NetworkRemoteCompletionConfig,
+ PosixCompletionConfig,
+ PosixRemoteCompletionConfig,
+ PosixSshCompletionConfig,
+ REMOTE_COMPLETION,
+ RemoteCompletionConfig,
+ WINDOWS_COMPLETION,
+ WindowsRemoteCompletionConfig,
+ filter_completion,
+)
+
+from .util import (
+ find_python,
+ get_available_python_versions,
+ str_to_version,
+ version_to_str,
+)
+
+
+@dataclasses.dataclass(frozen=True)
+class OriginCompletionConfig(PosixCompletionConfig):
+ """Pseudo completion config for the origin."""
+ def __init__(self):
+ super().__init__(name='origin')
+
+ @property
+ def supported_pythons(self): # type: () -> t.List[str]
+ """Return a list of the supported Python versions."""
+ current_version = version_to_str(sys.version_info[:2])
+ versions = [version for version in SUPPORTED_PYTHON_VERSIONS if version == current_version] + \
+ [version for version in SUPPORTED_PYTHON_VERSIONS if version != current_version]
+ return versions
+
+ def get_python_path(self, version): # type: (str) -> str
+ """Return the path of the requested Python version."""
+ version = find_python(version)
+ return version
+
+ @property
+ def is_default(self):
+ """True if the completion entry is only used for defaults, otherwise False."""
+ return False
+
+
+@dataclasses.dataclass(frozen=True)
+class HostContext:
+ """Context used when getting and applying defaults for host configurations."""
+ controller_config: t.Optional['PosixConfig']
+
+ @property
+ def controller(self): # type: () -> bool
+ """True if the context is for the controller, otherwise False."""
+ return not self.controller_config
+
+
+@dataclasses.dataclass
+class HostConfig(metaclass=abc.ABCMeta):
+ """Base class for host configuration."""
+ @abc.abstractmethod
+ def get_defaults(self, context): # type: (HostContext) -> CompletionConfig
+ """Return the default settings."""
+
+ @abc.abstractmethod
+ def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None
+ """Apply default settings."""
+
+ @property
+ def is_managed(self): # type: () -> bool
+ """
+ True if the host is a managed instance, otherwise False.
+ Managed instances are used exclusively by ansible-test and can safely have destructive operations performed without explicit permission from the user.
+ """
+ return False
+
+
+@dataclasses.dataclass
+class PythonConfig(metaclass=abc.ABCMeta):
+ """Configuration for Python."""
+ version: t.Optional[str] = None
+ path: t.Optional[str] = None
+
+ @property
+ def tuple(self): # type: () -> t.Tuple[int, ...]
+ """Return the Python version as a tuple."""
+ return str_to_version(self.version)
+
+ @property
+ def major_version(self): # type: () -> int
+ """Return the Python major version."""
+ return self.tuple[0]
+
+ def apply_defaults(self, context, defaults): # type: (HostContext, PosixCompletionConfig) -> None
+ """Apply default settings."""
+ if self.version in (None, 'default'):
+ self.version = defaults.get_default_python(context.controller)
+
+ if self.path:
+ if self.path.endswith('/'):
+ self.path = os.path.join(self.path, f'python{self.version}')
+
+ # FUTURE: If the host is origin, the python path could be validated here.
+ else:
+ self.path = defaults.get_python_path(self.version)
+
+ @property
+ @abc.abstractmethod
+ def is_managed(self): # type: () -> bool
+ """
+ True if this Python is a managed instance, otherwise False.
+ Managed instances are used exclusively by ansible-test and can safely have requirements installed without explicit permission from the user.
+ """
+
+
+@dataclasses.dataclass
+class NativePythonConfig(PythonConfig):
+ """Configuration for native Python."""
+ @property
+ def is_managed(self): # type: () -> bool
+ """
+ True if this Python is a managed instance, otherwise False.
+ Managed instances are used exclusively by ansible-test and can safely have requirements installed without explicit permission from the user.
+ """
+ return False
+
+
+@dataclasses.dataclass
+class VirtualPythonConfig(PythonConfig):
+ """Configuration for Python in a virtual environment."""
+ system_site_packages: t.Optional[bool] = None
+
+ def apply_defaults(self, context, defaults): # type: (HostContext, PosixCompletionConfig) -> None
+ """Apply default settings."""
+ super().apply_defaults(context, defaults)
+
+ if self.system_site_packages is None:
+ self.system_site_packages = False
+
+ @property
+ def is_managed(self): # type: () -> bool
+ """
+ True if this Python is a managed instance, otherwise False.
+ Managed instances are used exclusively by ansible-test and can safely have requirements installed without explicit permission from the user.
+ """
+ return True
+
+
+@dataclasses.dataclass
+class PosixConfig(HostConfig, metaclass=abc.ABCMeta):
+ """Base class for POSIX host configuration."""
+ python: t.Optional[PythonConfig] = None
+
+ @property
+ @abc.abstractmethod
+ def have_root(self): # type: () -> bool
+ """True if root is available, otherwise False."""
+
+ @abc.abstractmethod
+ def get_defaults(self, context): # type: (HostContext) -> PosixCompletionConfig
+ """Return the default settings."""
+
+ def apply_defaults(self, context, defaults): # type: (HostContext, PosixCompletionConfig) -> None
+ """Apply default settings."""
+ super().apply_defaults(context, defaults)
+
+ self.python = self.python or NativePythonConfig()
+ self.python.apply_defaults(context, defaults)
+
+
+@dataclasses.dataclass
+class ControllerHostConfig(PosixConfig, metaclass=abc.ABCMeta):
+ """Base class for host configurations which support the controller."""
+ @abc.abstractmethod
+ def get_default_targets(self, context): # type: (HostContext) -> t.List[ControllerConfig]
+ """Return the default targets for this host config."""
+
+
+@dataclasses.dataclass
+class RemoteConfig(HostConfig, metaclass=abc.ABCMeta):
+ """Base class for remote host configuration."""
+ name: t.Optional[str] = None
+ provider: t.Optional[str] = None
+
+ @property
+ def platform(self):
+ """The name of the platform."""
+ return self.name.partition('/')[0]
+
+ @property
+ def version(self):
+ """The version of the platform."""
+ return self.name.partition('/')[2]
+
+ def apply_defaults(self, context, defaults): # type: (HostContext, RemoteCompletionConfig) -> None
+ """Apply default settings."""
+ super().apply_defaults(context, defaults)
+
+ if self.provider == 'default':
+ self.provider = None
+
+ self.provider = self.provider or defaults.provider or 'aws'
+
+ @property
+ def is_managed(self): # type: () -> bool
+ """
+ True if this host is a managed instance, otherwise False.
+ Managed instances are used exclusively by ansible-test and can safely have destructive operations performed without explicit permission from the user.
+ """
+ return True
+
+
+@dataclasses.dataclass
+class PosixSshConfig(PosixConfig):
+ """Configuration for a POSIX SSH host."""
+ user: t.Optional[str] = None
+ host: t.Optional[str] = None
+ port: t.Optional[int] = None
+
+ def get_defaults(self, context): # type: (HostContext) -> PosixSshCompletionConfig
+ """Return the default settings."""
+ return PosixSshCompletionConfig(
+ user=self.user,
+ host=self.host,
+ )
+
+ @property
+ def have_root(self): # type: () -> bool
+ """True if root is available, otherwise False."""
+ return self.user == 'root'
+
+
+@dataclasses.dataclass
+class InventoryConfig(HostConfig):
+ """Configuration using inventory."""
+ path: t.Optional[str] = None
+
+ def get_defaults(self, context): # type: (HostContext) -> InventoryCompletionConfig
+ """Return the default settings."""
+ return InventoryCompletionConfig()
+
+ def apply_defaults(self, context, defaults): # type: (HostContext, InventoryCompletionConfig) -> None
+ """Apply default settings."""
+
+
+@dataclasses.dataclass
+class DockerConfig(ControllerHostConfig, PosixConfig):
+ """Configuration for a docker host."""
+ name: t.Optional[str] = None
+ image: t.Optional[str] = None
+ memory: t.Optional[int] = None
+ privileged: t.Optional[bool] = None
+ seccomp: t.Optional[str] = None
+
+ def get_defaults(self, context): # type: (HostContext) -> DockerCompletionConfig
+ """Return the default settings."""
+ return filter_completion(DOCKER_COMPLETION).get(self.name) or DockerCompletionConfig(
+ name=self.name,
+ image=self.name,
+ placeholder=True,
+ )
+
+ def get_default_targets(self, context): # type: (HostContext) -> t.List[ControllerConfig]
+ """Return the default targets for this host config."""
+ if self.name in filter_completion(DOCKER_COMPLETION):
+ defaults = self.get_defaults(context)
+ pythons = {version: defaults.get_python_path(version) for version in defaults.supported_pythons}
+ else:
+ pythons = {context.controller_config.python.version: context.controller_config.python.path}
+
+ return [ControllerConfig(python=NativePythonConfig(version=version, path=path)) for version, path in pythons.items()]
+
+ def apply_defaults(self, context, defaults): # type: (HostContext, DockerCompletionConfig) -> None
+ """Apply default settings."""
+ super().apply_defaults(context, defaults)
+
+ self.name = defaults.name
+ self.image = defaults.image
+
+ if self.seccomp is None:
+ self.seccomp = defaults.seccomp
+
+ if self.privileged is None:
+ self.privileged = False
+
+ @property
+ def is_managed(self): # type: () -> bool
+ """
+ True if this host is a managed instance, otherwise False.
+ Managed instances are used exclusively by ansible-test and can safely have destructive operations performed without explicit permission from the user.
+ """
+ return True
+
+ @property
+ def have_root(self): # type: () -> bool
+ """True if root is available, otherwise False."""
+ return True
+
+
+@dataclasses.dataclass
+class PosixRemoteConfig(RemoteConfig, ControllerHostConfig, PosixConfig):
+ """Configuration for a POSIX remote host."""
+ arch: t.Optional[str] = None
+
+ def get_defaults(self, context): # type: (HostContext) -> PosixRemoteCompletionConfig
+ """Return the default settings."""
+ return filter_completion(REMOTE_COMPLETION).get(self.name) or REMOTE_COMPLETION.get(self.platform) or PosixRemoteCompletionConfig(
+ name=self.name,
+ placeholder=True,
+ )
+
+ def get_default_targets(self, context): # type: (HostContext) -> t.List[ControllerConfig]
+ """Return the default targets for this host config."""
+ if self.name in filter_completion(REMOTE_COMPLETION):
+ defaults = self.get_defaults(context)
+ pythons = {version: defaults.get_python_path(version) for version in defaults.supported_pythons}
+ else:
+ pythons = {context.controller_config.python.version: context.controller_config.python.path}
+
+ return [ControllerConfig(python=NativePythonConfig(version=version, path=path)) for version, path in pythons.items()]
+
+ @property
+ def have_root(self): # type: () -> bool
+ """True if root is available, otherwise False."""
+ return True
+
+
+@dataclasses.dataclass
+class WindowsConfig(HostConfig, metaclass=abc.ABCMeta):
+ """Base class for Windows host configuration."""
+
+
+@dataclasses.dataclass
+class WindowsRemoteConfig(RemoteConfig, WindowsConfig):
+ """Configuration for a remoe Windows host."""
+ def get_defaults(self, context): # type: (HostContext) -> WindowsRemoteCompletionConfig
+ """Return the default settings."""
+ return filter_completion(WINDOWS_COMPLETION).get(self.name) or WindowsRemoteCompletionConfig(
+ name=self.name,
+ )
+
+
+@dataclasses.dataclass
+class WindowsInventoryConfig(InventoryConfig, WindowsConfig):
+ """Configuration for Windows hosts using inventory."""
+
+
+@dataclasses.dataclass
+class NetworkConfig(HostConfig, metaclass=abc.ABCMeta):
+ """Base class for network host configuration."""
+
+
+@dataclasses.dataclass
+class NetworkRemoteConfig(RemoteConfig, NetworkConfig):
+ """Configuration for a remoe network host."""
+ collection: t.Optional[str] = None
+ connection: t.Optional[str] = None
+
+ def get_defaults(self, context): # type: (HostContext) -> NetworkRemoteCompletionConfig
+ """Return the default settings."""
+ return filter_completion(NETWORK_COMPLETION).get(self.name) or NetworkRemoteCompletionConfig(
+ name=self.name,
+ )
+
+ def apply_defaults(self, context, defaults): # type: (HostContext, NetworkRemoteCompletionConfig) -> None
+ """Apply default settings."""
+ super().apply_defaults(context, defaults)
+
+ self.collection = self.collection or defaults.collection
+ self.connection = self.connection or defaults.connection
+
+
+@dataclasses.dataclass
+class NetworkInventoryConfig(InventoryConfig, NetworkConfig):
+ """Configuration for network hosts using inventory."""
+
+
+@dataclasses.dataclass
+class OriginConfig(ControllerHostConfig, PosixConfig):
+ """Configuration for the origin host."""
+ def get_defaults(self, context): # type: (HostContext) -> OriginCompletionConfig
+ """Return the default settings."""
+ return OriginCompletionConfig()
+
+ def get_default_targets(self, context): # type: (HostContext) -> t.List[ControllerConfig]
+ """Return the default targets for this host config."""
+ return [ControllerConfig(python=NativePythonConfig(version=version, path=path)) for version, path in get_available_python_versions().items()]
+
+ @property
+ def have_root(self): # type: () -> bool
+ """True if root is available, otherwise False."""
+ return os.getuid() != 0
+
+
+@dataclasses.dataclass
+class ControllerConfig(PosixConfig):
+ """Configuration for the controller host."""
+ controller: t.Optional[PosixConfig] = None
+
+ def get_defaults(self, context): # type: (HostContext) -> PosixCompletionConfig
+ """Return the default settings."""
+ return context.controller_config.get_defaults(context)
+
+ def apply_defaults(self, context, defaults): # type: (HostContext, PosixCompletionConfig) -> None
+ """Apply default settings."""
+ self.controller = context.controller_config
+
+ if not self.python and not defaults.supported_pythons:
+ # The user did not specify a target Python and supported Pythons are unknown, so use the controller Python specified by the user instead.
+ self.python = context.controller_config.python
+
+ super().apply_defaults(context, defaults)
+
+ @property
+ def is_managed(self): # type: () -> bool
+ """
+ True if the host is a managed instance, otherwise False.
+ Managed instances are used exclusively by ansible-test and can safely have destructive operations performed without explicit permission from the user.
+ """
+ return self.controller.is_managed
+
+ @property
+ def have_root(self): # type: () -> bool
+ """True if root is available, otherwise False."""
+ return self.controller.have_root
+
+
+class FallbackReason(enum.Enum):
+ """Reason fallback was peformed."""
+ ENVIRONMENT = enum.auto()
+ PYTHON = enum.auto()
+
+
+@dataclasses.dataclass(frozen=True)
+class FallbackDetail:
+ """Details about controller fallback behavior."""
+ reason: FallbackReason
+ message: str
+
+
+@dataclasses.dataclass(frozen=True)
+class HostSettings:
+ """Host settings for the controller and targets."""
+ controller: ControllerHostConfig
+ targets: t.List[HostConfig]
+ skipped_python_versions: t.List[str]
+ filtered_args: t.List[str]
+ controller_fallback: t.Optional[FallbackDetail]
+
+ def serialize(self, path): # type: (str) -> None
+ """Serialize the host settings to the given path."""
+ with open_binary_file(path, 'wb') as settings_file:
+ pickle.dump(self, settings_file)
+
+ @staticmethod
+ def deserialize(path): # type: (str) -> HostSettings
+ """Deserialize host settings from the path."""
+ with open_binary_file(path) as settings_file:
+ return pickle.load(settings_file)
+
+ def apply_defaults(self):
+ """Apply defaults to the host settings."""
+ context = HostContext(controller_config=None)
+ self.controller.apply_defaults(context, self.controller.get_defaults(context))
+
+ for target in self.targets:
+ context = HostContext(controller_config=self.controller)
+ target.apply_defaults(context, target.get_defaults(context))
diff --git a/test/lib/ansible_test/_internal/host_profiles.py b/test/lib/ansible_test/_internal/host_profiles.py
new file mode 100644
index 00000000..0a08d68f
--- /dev/null
+++ b/test/lib/ansible_test/_internal/host_profiles.py
@@ -0,0 +1,761 @@
+"""Profiles to represent individual test hosts or a user-provided inventory file."""
+from __future__ import annotations
+
+import abc
+import dataclasses
+import os
+import tempfile
+import time
+import typing as t
+
+from .io import (
+ write_text_file,
+)
+
+from .config import (
+ CommonConfig,
+ EnvironmentConfig,
+ IntegrationConfig,
+ TerminateMode,
+)
+
+from .host_configs import (
+ ControllerConfig,
+ ControllerHostConfig,
+ DockerConfig,
+ HostConfig,
+ NetworkInventoryConfig,
+ NetworkRemoteConfig,
+ OriginConfig,
+ PosixConfig,
+ PosixRemoteConfig,
+ PosixSshConfig,
+ PythonConfig,
+ RemoteConfig,
+ VirtualPythonConfig,
+ WindowsInventoryConfig,
+ WindowsRemoteConfig,
+)
+
+from .core_ci import (
+ AnsibleCoreCI,
+ SshKey,
+)
+
+from .util import (
+ ApplicationError,
+ SubprocessError,
+ cache,
+ display,
+ get_type_map,
+ sanitize_host_name,
+ sorted_versions,
+)
+
+from .util_common import (
+ intercept_python,
+)
+
+from .docker_util import (
+ docker_exec,
+ docker_rm,
+ get_docker_hostname,
+)
+
+from .bootstrap import (
+ BootstrapDocker,
+ BootstrapRemote,
+)
+
+from .venv import (
+ get_virtual_python,
+)
+
+from .ssh import (
+ SshConnectionDetail,
+)
+
+from .ansible_util import (
+ ansible_environment,
+ get_hosts,
+ parse_inventory,
+)
+
+from .containers import (
+ CleanupMode,
+ HostType,
+ get_container_database,
+ run_support_container,
+)
+
+from .connections import (
+ Connection,
+ DockerConnection,
+ LocalConnection,
+ SshConnection,
+)
+
+from .become import (
+ Su,
+ Sudo,
+)
+
+TControllerHostConfig = t.TypeVar('TControllerHostConfig', bound=ControllerHostConfig)
+THostConfig = t.TypeVar('THostConfig', bound=HostConfig)
+TPosixConfig = t.TypeVar('TPosixConfig', bound=PosixConfig)
+TRemoteConfig = t.TypeVar('TRemoteConfig', bound=RemoteConfig)
+
+
+@dataclasses.dataclass(frozen=True)
+class Inventory:
+ """Simple representation of an Ansible inventory."""
+ host_groups: t.Dict[str, t.Dict[str, t.Dict[str, str]]]
+ extra_groups: t.Optional[t.Dict[str, t.List[str]]] = None
+
+ @staticmethod
+ def create_single_host(name, variables): # type: (str, t.Dict[str, str]) -> Inventory
+ """Return an inventory instance created from the given hostname and variables."""
+ return Inventory(host_groups=dict(all={name: variables}))
+
+ def write(self, args, path): # type: (CommonConfig, str) -> None
+ """Write the given inventory to the specified path on disk."""
+
+ # NOTE: Switching the inventory generation to write JSON would be nice, but is currently not possible due to the use of hard-coded inventory filenames.
+ # The name `inventory` works for the POSIX integration tests, but `inventory.winrm` and `inventory.networking` will only parse in INI format.
+ # If tests are updated to use the `INVENTORY_PATH` environment variable, then this could be changed.
+ # Also, some tests detect the test type by inspecting the suffix on the inventory filename, which would break if it were changed.
+
+ inventory_text = ''
+
+ for group, hosts in self.host_groups.items():
+ inventory_text += f'[{group}]\n'
+
+ for host, variables in hosts.items():
+ kvp = ' '.join(f'{key}="{value}"' for key, value in variables.items())
+ inventory_text += f'{host} {kvp}\n'
+
+ inventory_text += '\n'
+
+ for group, children in (self.extra_groups or {}).items():
+ inventory_text += f'[{group}]\n'
+
+ for child in children:
+ inventory_text += f'{child}\n'
+
+ inventory_text += '\n'
+
+ inventory_text = inventory_text.strip()
+
+ if not args.explain:
+ write_text_file(path, inventory_text)
+
+ display.info(f'>>> Inventory\n{inventory_text}', verbosity=3)
+
+
+class HostProfile(t.Generic[THostConfig], metaclass=abc.ABCMeta):
+ """Base class for host profiles."""
+ def __init__(self,
+ *,
+ args, # type: EnvironmentConfig
+ config, # type: THostConfig
+ targets, # type: t.Optional[t.List[HostConfig]]
+ ): # type: (...) -> None
+ self.args = args
+ self.config = config
+ self.controller = bool(targets)
+ self.targets = targets or []
+
+ self.state = {} # type: t.Dict[str, t.Any]
+ """State that must be persisted across delegation."""
+ self.cache = {} # type: t.Dict[str, t.Any]
+ """Cache that must not be persisted across delegation."""
+
+ def provision(self): # type: () -> None
+ """Provision the host before delegation."""
+
+ def setup(self): # type: () -> None
+ """Perform out-of-band setup before delegation."""
+
+ def deprovision(self): # type: () -> None
+ """Deprovision the host after delegation has completed."""
+
+ def wait(self): # type: () -> None
+ """Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
+
+ def configure(self): # type: () -> None
+ """Perform in-band configuration. Executed before delegation for the controller and after delegation for targets."""
+
+ def __getstate__(self):
+ return {key: value for key, value in self.__dict__.items() if key not in ('args', 'cache')}
+
+ def __setstate__(self, state):
+ self.__dict__.update(state)
+
+ # args will be populated after the instances are restored
+ self.cache = {}
+
+
+class PosixProfile(HostProfile[TPosixConfig], metaclass=abc.ABCMeta):
+ """Base class for POSIX host profiles."""
+ @property
+ def python(self): # type: () -> PythonConfig
+ """
+ The Python to use for this profile.
+ If it is a virtual python, it will be created the first time it is requested.
+ """
+ python = self.state.get('python')
+
+ if not python:
+ python = self.config.python
+
+ if isinstance(python, VirtualPythonConfig):
+ python = VirtualPythonConfig(
+ version=python.version,
+ system_site_packages=python.system_site_packages,
+ path=os.path.join(get_virtual_python(self.args, python), 'bin', 'python'),
+ )
+
+ self.state['python'] = python
+
+ return python
+
+
+class ControllerHostProfile(PosixProfile[TControllerHostConfig], metaclass=abc.ABCMeta):
+ """Base class for profiles usable as a controller."""
+ @abc.abstractmethod
+ def get_origin_controller_connection(self): # type: () -> Connection
+ """Return a connection for accessing the host as a controller from the origin."""
+
+ @abc.abstractmethod
+ def get_working_directory(self): # type: () -> str
+ """Return the working directory for the host."""
+
+
+class SshTargetHostProfile(HostProfile[THostConfig], metaclass=abc.ABCMeta):
+ """Base class for profiles offering SSH connectivity."""
+ @abc.abstractmethod
+ def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
+ """Return SSH connection(s) for accessing the host as a target from the controller."""
+
+
+class RemoteProfile(SshTargetHostProfile[TRemoteConfig], metaclass=abc.ABCMeta):
+ """Base class for remote instance profiles."""
+ @property
+ def core_ci_state(self): # type: () -> t.Optional[t.Dict[str, str]]
+ """The saved Ansible Core CI state."""
+ return self.state.get('core_ci')
+
+ @core_ci_state.setter
+ def core_ci_state(self, value): # type: (t.Dict[str, str]) -> None
+ """The saved Ansible Core CI state."""
+ self.state['core_ci'] = value
+
+ def provision(self): # type: () -> None
+ """Provision the host before delegation."""
+ self.core_ci = self.create_core_ci(load=True)
+ self.core_ci.start()
+
+ self.core_ci_state = self.core_ci.save()
+
+ def deprovision(self): # type: () -> None
+ """Deprovision the host after delegation has completed."""
+ if self.args.remote_terminate == TerminateMode.ALWAYS or (self.args.remote_terminate == TerminateMode.SUCCESS and self.args.success):
+ self.delete_instance()
+
+ @property
+ def core_ci(self): # type: () -> t.Optional[AnsibleCoreCI]
+ """Return the cached AnsibleCoreCI instance, if any, otherwise None."""
+ return self.cache.get('core_ci')
+
+ @core_ci.setter
+ def core_ci(self, value): # type: (AnsibleCoreCI) -> None
+ """Cache the given AnsibleCoreCI instance."""
+ self.cache['core_ci'] = value
+
+ def get_instance(self): # type: () -> t.Optional[AnsibleCoreCI]
+ """Return the current AnsibleCoreCI instance, loading it if not already loaded."""
+ if not self.core_ci and self.core_ci_state:
+ self.core_ci = self.create_core_ci(load=False)
+ self.core_ci.load(self.core_ci_state)
+
+ return self.core_ci
+
+ def delete_instance(self):
+ """Delete the AnsibleCoreCI VM instance."""
+ core_ci = self.get_instance()
+
+ if not core_ci:
+ return # instance has not been provisioned
+
+ core_ci.stop()
+
+ def wait_for_instance(self): # type: () -> AnsibleCoreCI
+ """Wait for an AnsibleCoreCI VM instance to become ready."""
+ core_ci = self.get_instance()
+ core_ci.wait()
+
+ return core_ci
+
+ def create_core_ci(self, load): # type: (bool) -> AnsibleCoreCI
+ """Create and return an AnsibleCoreCI instance."""
+ return AnsibleCoreCI(
+ args=self.args,
+ platform=self.config.platform,
+ version=self.config.version,
+ provider=self.config.provider,
+ suffix='controller' if self.controller else 'target',
+ load=load,
+ )
+
+
+class ControllerProfile(SshTargetHostProfile[ControllerConfig], PosixProfile[ControllerConfig]):
+ """Host profile for the controller as a target."""
+ def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
+ """Return SSH connection(s) for accessing the host as a target from the controller."""
+ settings = SshConnectionDetail(
+ name='localhost',
+ host='localhost',
+ port=None,
+ user='root',
+ identity_file=SshKey(self.args).key,
+ python_interpreter=self.args.controller_python.path,
+ )
+
+ return [SshConnection(self.args, settings)]
+
+
+class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[DockerConfig]):
+ """Host profile for a docker instance."""
+ @property
+ def container_name(self): # type: () -> t.Optional[str]
+ """Return the stored container name, if any, otherwise None."""
+ return self.state.get('container_name')
+
+ @container_name.setter
+ def container_name(self, value): # type: (str) -> None
+ """Store the given container name."""
+ self.state['container_name'] = value
+
+ def provision(self): # type: () -> None
+ """Provision the host before delegation."""
+ container = run_support_container(
+ args=self.args,
+ context='__test_hosts__',
+ image=self.config.image,
+ name=f'ansible-test-{"controller" if self.controller else "target"}-{self.args.session_name}',
+ ports=[22],
+ publish_ports=not self.controller, # connections to the controller over SSH are not required
+ options=self.get_docker_run_options(),
+ cleanup=CleanupMode.NO,
+ )
+
+ if not container:
+ return
+
+ self.container_name = container.name
+
+ def setup(self): # type: () -> None
+ """Perform out-of-band setup before delegation."""
+ bootstrapper = BootstrapDocker(
+ controller=self.controller,
+ python_versions=[self.python.version],
+ ssh_key=SshKey(self.args),
+ )
+
+ setup_sh = bootstrapper.get_script()
+ shell = setup_sh.splitlines()[0][2:]
+
+ docker_exec(self.args, self.container_name, [shell], data=setup_sh)
+
+ def deprovision(self): # type: () -> None
+ """Deprovision the host after delegation has completed."""
+ if not self.container_name:
+ return # provision was never called or did not succeed, so there is no container to remove
+
+ if self.args.docker_terminate == TerminateMode.ALWAYS or (self.args.docker_terminate == TerminateMode.SUCCESS and self.args.success):
+ docker_rm(self.args, self.container_name)
+
+ def wait(self): # type: () -> None
+ """Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
+ if not self.controller:
+ con = self.get_controller_target_connections()[0]
+
+ for dummy in range(1, 60):
+ try:
+ con.run(['id'], capture=True)
+ except SubprocessError as ex:
+ if 'Permission denied' in ex.message:
+ raise
+
+ time.sleep(1)
+ else:
+ return
+
+ def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
+ """Return SSH connection(s) for accessing the host as a target from the controller."""
+ containers = get_container_database(self.args)
+ access = containers.data[HostType.control]['__test_hosts__'][self.container_name]
+
+ host = access.host_ip
+ port = dict(access.port_map())[22]
+
+ settings = SshConnectionDetail(
+ name=self.config.name,
+ user='root',
+ host=host,
+ port=port,
+ identity_file=SshKey(self.args).key,
+ python_interpreter=self.python.path,
+ )
+
+ return [SshConnection(self.args, settings)]
+
+ def get_origin_controller_connection(self): # type: () -> DockerConnection
+ """Return a connection for accessing the host as a controller from the origin."""
+ return DockerConnection(self.args, self.container_name)
+
+ def get_working_directory(self): # type: () -> str
+ """Return the working directory for the host."""
+ return '/root'
+
+ def get_docker_run_options(self): # type: () -> t.List[str]
+ """Return a list of options needed to run the container."""
+ options = [
+ '--volume', '/sys/fs/cgroup:/sys/fs/cgroup:ro',
+ f'--privileged={str(self.config.privileged).lower()}',
+ ]
+
+ if self.config.memory:
+ options.extend([
+ f'--memory={self.config.memory}',
+ f'--memory-swap={self.config.memory}',
+ ])
+
+ if self.config.seccomp != 'default':
+ options.extend(['--security-opt', f'seccomp={self.config.seccomp}'])
+
+ docker_socket = '/var/run/docker.sock'
+
+ if get_docker_hostname() != 'localhost' or os.path.exists(docker_socket):
+ options.extend(['--volume', f'{docker_socket}:{docker_socket}'])
+
+ return options
+
+
+class NetworkInventoryProfile(HostProfile[NetworkInventoryConfig]):
+ """Host profile for a network inventory."""
+
+
+class NetworkRemoteProfile(RemoteProfile[NetworkRemoteConfig]):
+ """Host profile for a network remote instance."""
+ def wait(self): # type: () -> None
+ """Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
+ self.wait_until_ready()
+
+ def get_inventory_variables(self):
+ """Return inventory variables for accessing this host."""
+ core_ci = self.wait_for_instance()
+ connection = core_ci.connection
+
+ variables = dict(
+ ansible_connection=self.config.connection,
+ ansible_pipelining='yes',
+ ansible_host=connection.hostname,
+ ansible_port=connection.port,
+ ansible_user=connection.username,
+ ansible_ssh_private_key_file=core_ci.ssh_key.key,
+ ansible_network_os=f'{self.config.collection}.{self.config.platform}' if self.config.collection else self.config.platform,
+ )
+
+ return variables
+
+ def wait_until_ready(self): # type: () -> None
+ """Wait for the host to respond to an Ansible module request."""
+ core_ci = self.wait_for_instance()
+
+ if not isinstance(self.args, IntegrationConfig):
+ return # skip extended checks unless we're running integration tests
+
+ inventory = Inventory.create_single_host(sanitize_host_name(self.config.name), self.get_inventory_variables())
+ env = ansible_environment(self.args)
+ module_name = f'{self.config.collection + "." if self.config.collection else ""}{self.config.platform}_command'
+
+ with tempfile.NamedTemporaryFile() as inventory_file:
+ inventory.write(self.args, inventory_file.name)
+
+ cmd = ['ansible', '-m', module_name, '-a', 'commands=?', '-i', inventory_file.name, 'all']
+
+ for dummy in range(1, 90):
+ try:
+ intercept_python(self.args, self.args.controller_python, cmd, env)
+ except SubprocessError:
+ time.sleep(10)
+ else:
+ return
+
+ raise ApplicationError(f'Timeout waiting for {self.config.name} instance {core_ci.instance_id}.')
+
+ def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
+ """Return SSH connection(s) for accessing the host as a target from the controller."""
+ core_ci = self.wait_for_instance()
+
+ settings = SshConnectionDetail(
+ name=core_ci.name,
+ host=core_ci.connection.hostname,
+ port=core_ci.connection.port,
+ user=core_ci.connection.username,
+ identity_file=core_ci.ssh_key.key,
+ )
+
+ return [SshConnection(self.args, settings)]
+
+
+class OriginProfile(ControllerHostProfile[OriginConfig]):
+ """Host profile for origin."""
+ def get_origin_controller_connection(self): # type: () -> LocalConnection
+ """Return a connection for accessing the host as a controller from the origin."""
+ return LocalConnection(self.args)
+
+ def get_working_directory(self): # type: () -> str
+ """Return the working directory for the host."""
+ return os.getcwd()
+
+
+class PosixRemoteProfile(ControllerHostProfile[PosixRemoteConfig], RemoteProfile[PosixRemoteConfig]):
+ """Host profile for a POSIX remote instance."""
+ def wait(self): # type: () -> None
+ """Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
+ self.wait_until_ready()
+
+ def configure(self): # type: () -> None
+ """Perform in-band configuration. Executed before delegation for the controller and after delegation for targets."""
+ # a target uses a single python version, but a controller may include additional versions for targets running on the controller
+ python_versions = [self.python.version] + [target.python.version for target in self.targets if isinstance(target, ControllerConfig)]
+ python_versions = sorted_versions(list(set(python_versions)))
+
+ core_ci = self.wait_for_instance()
+ pwd = self.wait_until_ready()
+
+ display.info(f'Remote working directory: {pwd}', verbosity=1)
+
+ bootstrapper = BootstrapRemote(
+ controller=self.controller,
+ platform=self.config.platform,
+ platform_version=self.config.version,
+ python_versions=python_versions,
+ ssh_key=core_ci.ssh_key,
+ )
+
+ setup_sh = bootstrapper.get_script()
+ shell = setup_sh.splitlines()[0][2:]
+
+ ssh = self.get_origin_controller_connection()
+ ssh.run([shell], data=setup_sh)
+
+ def get_ssh_connection(self): # type: () -> SshConnection
+ """Return an SSH connection for accessing the host."""
+ core_ci = self.wait_for_instance()
+
+ settings = SshConnectionDetail(
+ name=core_ci.name,
+ user=core_ci.connection.username,
+ host=core_ci.connection.hostname,
+ port=core_ci.connection.port,
+ identity_file=core_ci.ssh_key.key,
+ python_interpreter=self.python.path,
+ )
+
+ if settings.user == 'root':
+ become = None
+ elif self.config.platform == 'freebsd':
+ become = Su()
+ elif self.config.platform == 'macos':
+ become = Sudo()
+ elif self.config.platform == 'rhel':
+ become = Sudo()
+ else:
+ raise NotImplementedError(f'Become support has not been implemented for platform "{self.config.platform}" and user "{settings.user}" is not root.')
+
+ return SshConnection(self.args, settings, become)
+
+ def wait_until_ready(self): # type: () -> str
+ """Wait for instance to respond to SSH, returning the current working directory once connected."""
+ core_ci = self.wait_for_instance()
+
+ for dummy in range(1, 90):
+ try:
+ return self.get_working_directory()
+ except SubprocessError as ex:
+ if 'Permission denied' in ex.message:
+ raise
+
+ time.sleep(10)
+
+ raise ApplicationError(f'Timeout waiting for {self.config.name} instance {core_ci.instance_id}.')
+
+ def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
+ """Return SSH connection(s) for accessing the host as a target from the controller."""
+ return [self.get_ssh_connection()]
+
+ def get_origin_controller_connection(self): # type: () -> SshConnection
+ """Return a connection for accessing the host as a controller from the origin."""
+ return self.get_ssh_connection()
+
+ def get_working_directory(self): # type: () -> str
+ """Return the working directory for the host."""
+ if not self.pwd:
+ ssh = self.get_origin_controller_connection()
+ stdout = ssh.run(['pwd'], capture=True)[0]
+
+ if self.args.explain:
+ return '/pwd'
+
+ pwd = stdout.strip().splitlines()[-1]
+
+ if not pwd.startswith('/'):
+ raise Exception(f'Unexpected current working directory "{pwd}" from "pwd" command output:\n{stdout.strip()}')
+
+ self.pwd = pwd
+
+ return self.pwd
+
+ @property
+ def pwd(self): # type: () -> t.Optional[str]
+ """Return the cached pwd, if any, otherwise None."""
+ return self.cache.get('pwd')
+
+ @pwd.setter
+ def pwd(self, value): # type: (str) -> None
+ """Cache the given pwd."""
+ self.cache['pwd'] = value
+
+
+class PosixSshProfile(SshTargetHostProfile[PosixSshConfig], PosixProfile[PosixSshConfig]):
+ """Host profile for a POSIX SSH instance."""
+ def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
+ """Return SSH connection(s) for accessing the host as a target from the controller."""
+ settings = SshConnectionDetail(
+ name='target',
+ user=self.config.user,
+ host=self.config.host,
+ port=self.config.port,
+ identity_file=SshKey(self.args).key,
+ python_interpreter=self.python.path,
+ )
+
+ return [SshConnection(self.args, settings)]
+
+
+class WindowsInventoryProfile(SshTargetHostProfile[WindowsInventoryConfig]):
+ """Host profile for a Windows inventory."""
+ def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
+ """Return SSH connection(s) for accessing the host as a target from the controller."""
+ inventory = parse_inventory(self.args, self.config.path)
+ hosts = get_hosts(inventory, 'windows')
+ identity_file = SshKey(self.args).key
+
+ settings = [SshConnectionDetail(
+ name=name,
+ host=config['ansible_host'],
+ port=22,
+ user=config['ansible_user'],
+ identity_file=identity_file,
+ shell_type='powershell',
+ ) for name, config in hosts.items()]
+
+ if settings:
+ details = '\n'.join(f'{ssh.name} {ssh.user}@{ssh.host}:{ssh.port}' for ssh in settings)
+ display.info(f'Generated SSH connection details from inventory:\n{details}', verbosity=1)
+
+ return [SshConnection(self.args, setting) for setting in settings]
+
+
+class WindowsRemoteProfile(RemoteProfile[WindowsRemoteConfig]):
+ """Host profile for a Windows remote instance."""
+ def wait(self): # type: () -> None
+ """Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
+ self.wait_until_ready()
+
+ def get_inventory_variables(self):
+ """Return inventory variables for accessing this host."""
+ core_ci = self.wait_for_instance()
+ connection = core_ci.connection
+
+ variables = dict(
+ ansible_connection='winrm',
+ ansible_pipelining='yes',
+ ansible_winrm_server_cert_validation='ignore',
+ ansible_host=connection.hostname,
+ ansible_port=connection.port,
+ ansible_user=connection.username,
+ ansible_password=connection.password,
+ ansible_ssh_private_key_file=core_ci.ssh_key.key,
+ )
+
+ # HACK: force 2016 to use NTLM + HTTP message encryption
+ if self.config.version == '2016':
+ variables.update(
+ ansible_winrm_transport='ntlm',
+ ansible_winrm_scheme='http',
+ ansible_port='5985',
+ )
+
+ return variables
+
+ def wait_until_ready(self): # type: () -> None
+ """Wait for the host to respond to an Ansible module request."""
+ core_ci = self.wait_for_instance()
+
+ if not isinstance(self.args, IntegrationConfig):
+ return # skip extended checks unless we're running integration tests
+
+ inventory = Inventory.create_single_host(sanitize_host_name(self.config.name), self.get_inventory_variables())
+ env = ansible_environment(self.args)
+ module_name = 'ansible.windows.win_ping'
+
+ with tempfile.NamedTemporaryFile() as inventory_file:
+ inventory.write(self.args, inventory_file.name)
+
+ cmd = ['ansible', '-m', module_name, '-i', inventory_file.name, 'all']
+
+ for dummy in range(1, 120):
+ try:
+ intercept_python(self.args, self.args.controller_python, cmd, env)
+ except SubprocessError:
+ time.sleep(10)
+ else:
+ return
+
+ raise ApplicationError(f'Timeout waiting for {self.config.name} instance {core_ci.instance_id}.')
+
+ def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
+ """Return SSH connection(s) for accessing the host as a target from the controller."""
+ core_ci = self.wait_for_instance()
+
+ settings = SshConnectionDetail(
+ name=core_ci.name,
+ host=core_ci.connection.hostname,
+ port=22,
+ user=core_ci.connection.username,
+ identity_file=core_ci.ssh_key.key,
+ shell_type='powershell',
+ )
+
+ return [SshConnection(self.args, settings)]
+
+
+@cache
+def get_config_profile_type_map(): # type: () -> t.Dict[t.Type[HostConfig], t.Type[HostProfile]]
+ """Create and return a mapping of HostConfig types to HostProfile types."""
+ return get_type_map(HostProfile, HostConfig)
+
+
+def create_host_profile(
+ args, # type: EnvironmentConfig
+ config, # type: HostConfig
+ controller, # type: bool
+): # type: (...) -> HostProfile
+ """Create and return a host profile from the given host configuration."""
+ profile_type = get_config_profile_type_map()[type(config)]
+ profile = profile_type(args=args, config=config, targets=args.targets if controller else None)
+ return profile
diff --git a/test/lib/ansible_test/_internal/http.py b/test/lib/ansible_test/_internal/http.py
index 6607a10b..1375d23e 100644
--- a/test/lib/ansible_test/_internal/http.py
+++ b/test/lib/ansible_test/_internal/http.py
@@ -2,24 +2,11 @@
Primitive replacement for requests to avoid extra dependency.
Avoids use of urllib2 due to lack of SNI support.
"""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
import time
-
-try:
- from urllib import urlencode
-except ImportError:
- # noinspection PyCompatibility, PyUnresolvedReferences
- from urllib.parse import urlencode # pylint: disable=locally-disabled, import-error, no-name-in-module
-
-try:
- # noinspection PyCompatibility
- from urlparse import urlparse, urlunparse, parse_qs
-except ImportError:
- # noinspection PyCompatibility, PyUnresolvedReferences
- from urllib.parse import urlparse, urlunparse, parse_qs # pylint: disable=locally-disabled, ungrouped-imports
+import typing as t
from .util import (
ApplicationError,
@@ -35,12 +22,7 @@ from .util_common import (
class HttpClient:
"""Make HTTP requests via curl."""
- def __init__(self, args, always=False, insecure=False, proxy=None):
- """
- :type args: CommonConfig
- :type always: bool
- :type insecure: bool
- """
+ def __init__(self, args, always=False, insecure=False, proxy=None): # type: (CommonConfig, bool, bool, t.Optional[str]) -> None
self.args = args
self.always = always
self.insecure = insecure
@@ -49,37 +31,20 @@ class HttpClient:
self.username = None
self.password = None
- def get(self, url):
- """
- :type url: str
- :rtype: HttpResponse
- """
+ def get(self, url): # type: (str) -> HttpResponse
+ """Perform an HTTP GET and return the response."""
return self.request('GET', url)
- def delete(self, url):
- """
- :type url: str
- :rtype: HttpResponse
- """
+ def delete(self, url): # type: (str) -> HttpResponse
+ """Perform an HTTP DELETE and return the response."""
return self.request('DELETE', url)
- def put(self, url, data=None, headers=None):
- """
- :type url: str
- :type data: str | None
- :type headers: dict[str, str] | None
- :rtype: HttpResponse
- """
+ def put(self, url, data=None, headers=None): # type: (str, t.Optional[str], t.Optional[t.Dict[str, str]]) -> HttpResponse
+ """Perform an HTTP PUT and return the response."""
return self.request('PUT', url, data, headers)
- def request(self, method, url, data=None, headers=None):
- """
- :type method: str
- :type url: str
- :type data: str | None
- :type headers: dict[str, str] | None
- :rtype: HttpResponse
- """
+ def request(self, method, url, data=None, headers=None): # type: (str, str, t.Optional[str], t.Optional[t.Dict[str, str]]) -> HttpResponse
+ """Perform an HTTP request and return the response."""
cmd = ['curl', '-s', '-S', '-i', '-X', method]
if self.insecure:
@@ -148,22 +113,14 @@ class HttpClient:
class HttpResponse:
"""HTTP response from curl."""
- def __init__(self, method, url, status_code, response):
- """
- :type method: str
- :type url: str
- :type status_code: int
- :type response: str
- """
+ def __init__(self, method, url, status_code, response): # type: (str, str, int, str) -> None
self.method = method
self.url = url
self.status_code = status_code
self.response = response
- def json(self):
- """
- :rtype: any
- """
+ def json(self): # type: () -> t.Any
+ """Return the response parsed as JSON, raising an exception if parsing fails."""
try:
return json.loads(self.response)
except ValueError:
@@ -172,10 +129,6 @@ class HttpResponse:
class HttpError(ApplicationError):
"""HTTP response as an error."""
- def __init__(self, status, message):
- """
- :type status: int
- :type message: str
- """
- super(HttpError, self).__init__('%s: %s' % (status, message))
+ def __init__(self, status, message): # type: (int, str) -> None
+ super().__init__('%s: %s' % (status, message))
self.status = status
diff --git a/test/lib/ansible_test/_internal/init.py b/test/lib/ansible_test/_internal/init.py
index 682e6b0c..863c2589 100644
--- a/test/lib/ansible_test/_internal/init.py
+++ b/test/lib/ansible_test/_internal/init.py
@@ -1,6 +1,5 @@
"""Early initialization for ansible-test before most other imports have been performed."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import resource
diff --git a/test/lib/ansible_test/_internal/integration/__init__.py b/test/lib/ansible_test/_internal/integration/__init__.py
deleted file mode 100644
index f7be34e7..00000000
--- a/test/lib/ansible_test/_internal/integration/__init__.py
+++ /dev/null
@@ -1,349 +0,0 @@
-"""Ansible integration test infrastructure."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import contextlib
-import json
-import os
-import shutil
-import tempfile
-
-from .. import types as t
-
-from ..encoding import (
- to_bytes,
-)
-
-from ..target import (
- analyze_integration_target_dependencies,
- walk_integration_targets,
-)
-
-from ..config import (
- IntegrationConfig,
- NetworkIntegrationConfig,
- PosixIntegrationConfig,
- WindowsIntegrationConfig,
-)
-
-from ..io import (
- make_dirs,
- write_text_file,
- read_text_file,
-)
-
-from ..util import (
- ApplicationError,
- display,
- COVERAGE_CONFIG_NAME,
- MODE_DIRECTORY,
- MODE_DIRECTORY_WRITE,
- MODE_FILE,
-)
-
-from ..util_common import (
- named_temporary_file,
- ResultType,
-)
-
-from ..coverage_util import (
- generate_coverage_config,
-)
-
-from ..cache import (
- CommonCache,
-)
-
-from ..cloud import (
- CloudEnvironmentConfig,
-)
-
-from ..data import (
- data_context,
-)
-
-
-def setup_common_temp_dir(args, path):
- """
- :type args: IntegrationConfig
- :type path: str
- """
- if args.explain:
- return
-
- os.mkdir(path)
- os.chmod(path, MODE_DIRECTORY)
-
- if args.coverage:
- coverage_config_path = os.path.join(path, COVERAGE_CONFIG_NAME)
-
- coverage_config = generate_coverage_config(args)
-
- write_text_file(coverage_config_path, coverage_config)
-
- os.chmod(coverage_config_path, MODE_FILE)
-
- coverage_output_path = os.path.join(path, ResultType.COVERAGE.name)
-
- os.mkdir(coverage_output_path)
- os.chmod(coverage_output_path, MODE_DIRECTORY_WRITE)
-
-
-def generate_dependency_map(integration_targets):
- """
- :type integration_targets: list[IntegrationTarget]
- :rtype: dict[str, set[IntegrationTarget]]
- """
- targets_dict = dict((target.name, target) for target in integration_targets)
- target_dependencies = analyze_integration_target_dependencies(integration_targets)
- dependency_map = {}
-
- invalid_targets = set()
-
- for dependency, dependents in target_dependencies.items():
- dependency_target = targets_dict.get(dependency)
-
- if not dependency_target:
- invalid_targets.add(dependency)
- continue
-
- for dependent in dependents:
- if dependent not in dependency_map:
- dependency_map[dependent] = set()
-
- dependency_map[dependent].add(dependency_target)
-
- if invalid_targets:
- raise ApplicationError('Non-existent target dependencies: %s' % ', '.join(sorted(invalid_targets)))
-
- return dependency_map
-
-
-def get_files_needed(target_dependencies):
- """
- :type target_dependencies: list[IntegrationTarget]
- :rtype: list[str]
- """
- files_needed = []
-
- for target_dependency in target_dependencies:
- files_needed += target_dependency.needs_file
-
- files_needed = sorted(set(files_needed))
-
- invalid_paths = [path for path in files_needed if not os.path.isfile(path)]
-
- if invalid_paths:
- raise ApplicationError('Invalid "needs/file/*" aliases:\n%s' % '\n'.join(invalid_paths))
-
- return files_needed
-
-
-def check_inventory(args, inventory_path): # type: (IntegrationConfig, str) -> None
- """Check the given inventory for issues."""
- if args.docker or args.remote:
- if os.path.exists(inventory_path):
- inventory = read_text_file(inventory_path)
-
- if 'ansible_ssh_private_key_file' in inventory:
- display.warning('Use of "ansible_ssh_private_key_file" in inventory with the --docker or --remote option is unsupported and will likely fail.')
-
-
-def get_inventory_relative_path(args): # type: (IntegrationConfig) -> str
- """Return the inventory path used for the given integration configuration relative to the content root."""
- inventory_names = {
- PosixIntegrationConfig: 'inventory',
- WindowsIntegrationConfig: 'inventory.winrm',
- NetworkIntegrationConfig: 'inventory.networking',
- } # type: t.Dict[t.Type[IntegrationConfig], str]
-
- return os.path.join(data_context().content.integration_path, inventory_names[type(args)])
-
-
-def delegate_inventory(args, inventory_path_src): # type: (IntegrationConfig, str) -> None
- """Make the given inventory available during delegation."""
- if isinstance(args, PosixIntegrationConfig):
- return
-
- def inventory_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
- """
- Add the inventory file to the payload file list.
- This will preserve the file during delegation even if it is ignored or is outside the content and install roots.
- """
- inventory_path = get_inventory_relative_path(args)
- inventory_tuple = inventory_path_src, inventory_path
-
- if os.path.isfile(inventory_path_src) and inventory_tuple not in files:
- originals = [item for item in files if item[1] == inventory_path]
-
- if originals:
- for original in originals:
- files.remove(original)
-
- display.warning('Overriding inventory file "%s" with "%s".' % (inventory_path, inventory_path_src))
- else:
- display.notice('Sourcing inventory file "%s" from "%s".' % (inventory_path, inventory_path_src))
-
- files.append(inventory_tuple)
-
- data_context().register_payload_callback(inventory_callback)
-
-
-@contextlib.contextmanager
-def integration_test_environment(args, target, inventory_path_src):
- """
- :type args: IntegrationConfig
- :type target: IntegrationTarget
- :type inventory_path_src: str
- """
- ansible_config_src = args.get_ansible_config()
- ansible_config_relative = os.path.join(data_context().content.integration_path, '%s.cfg' % args.command)
-
- if args.no_temp_workdir or 'no/temp_workdir/' in target.aliases:
- display.warning('Disabling the temp work dir is a temporary debugging feature that may be removed in the future without notice.')
-
- integration_dir = os.path.join(data_context().content.root, data_context().content.integration_path)
- targets_dir = os.path.join(data_context().content.root, data_context().content.integration_targets_path)
- inventory_path = inventory_path_src
- ansible_config = ansible_config_src
- vars_file = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
-
- yield IntegrationEnvironment(integration_dir, targets_dir, inventory_path, ansible_config, vars_file)
- return
-
- # When testing a collection, the temporary directory must reside within the collection.
- # This is necessary to enable support for the default collection for non-collection content (playbooks and roles).
- root_temp_dir = os.path.join(ResultType.TMP.path, 'integration')
-
- prefix = '%s-' % target.name
- suffix = u'-\u00c5\u00d1\u015a\u00cc\u03b2\u0141\u00c8'
-
- if args.no_temp_unicode or 'no/temp_unicode/' in target.aliases:
- display.warning('Disabling unicode in the temp work dir is a temporary debugging feature that may be removed in the future without notice.')
- suffix = '-ansible'
-
- if args.explain:
- temp_dir = os.path.join(root_temp_dir, '%stemp%s' % (prefix, suffix))
- else:
- make_dirs(root_temp_dir)
- temp_dir = tempfile.mkdtemp(prefix=prefix, suffix=suffix, dir=root_temp_dir)
-
- try:
- display.info('Preparing temporary directory: %s' % temp_dir, verbosity=2)
-
- inventory_relative_path = get_inventory_relative_path(args)
- inventory_path = os.path.join(temp_dir, inventory_relative_path)
-
- cache = IntegrationCache(args)
-
- target_dependencies = sorted([target] + list(cache.dependency_map.get(target.name, set())))
-
- files_needed = get_files_needed(target_dependencies)
-
- integration_dir = os.path.join(temp_dir, data_context().content.integration_path)
- targets_dir = os.path.join(temp_dir, data_context().content.integration_targets_path)
- ansible_config = os.path.join(temp_dir, ansible_config_relative)
-
- vars_file_src = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
- vars_file = os.path.join(temp_dir, data_context().content.integration_vars_path)
-
- file_copies = [
- (ansible_config_src, ansible_config),
- (inventory_path_src, inventory_path),
- ]
-
- if os.path.exists(vars_file_src):
- file_copies.append((vars_file_src, vars_file))
-
- file_copies += [(path, os.path.join(temp_dir, path)) for path in files_needed]
-
- integration_targets_relative_path = data_context().content.integration_targets_path
-
- directory_copies = [
- (
- os.path.join(integration_targets_relative_path, target.relative_path),
- os.path.join(temp_dir, integration_targets_relative_path, target.relative_path)
- )
- for target in target_dependencies
- ]
-
- directory_copies = sorted(set(directory_copies))
- file_copies = sorted(set(file_copies))
-
- if not args.explain:
- make_dirs(integration_dir)
-
- for dir_src, dir_dst in directory_copies:
- display.info('Copying %s/ to %s/' % (dir_src, dir_dst), verbosity=2)
-
- if not args.explain:
- shutil.copytree(to_bytes(dir_src), to_bytes(dir_dst), symlinks=True)
-
- for file_src, file_dst in file_copies:
- display.info('Copying %s to %s' % (file_src, file_dst), verbosity=2)
-
- if not args.explain:
- make_dirs(os.path.dirname(file_dst))
- shutil.copy2(file_src, file_dst)
-
- yield IntegrationEnvironment(integration_dir, targets_dir, inventory_path, ansible_config, vars_file)
- finally:
- if not args.explain:
- shutil.rmtree(temp_dir)
-
-
-@contextlib.contextmanager
-def integration_test_config_file(args, env_config, integration_dir):
- """
- :type args: IntegrationConfig
- :type env_config: CloudEnvironmentConfig
- :type integration_dir: str
- """
- if not env_config:
- yield None
- return
-
- config_vars = (env_config.ansible_vars or {}).copy()
-
- config_vars.update(dict(
- ansible_test=dict(
- environment=env_config.env_vars,
- module_defaults=env_config.module_defaults,
- )
- ))
-
- config_file = json.dumps(config_vars, indent=4, sort_keys=True)
-
- with named_temporary_file(args, 'config-file-', '.json', integration_dir, config_file) as path:
- filename = os.path.relpath(path, integration_dir)
-
- display.info('>>> Config File: %s\n%s' % (filename, config_file), verbosity=3)
-
- yield path
-
-
-class IntegrationEnvironment:
- """Details about the integration environment."""
- def __init__(self, integration_dir, targets_dir, inventory_path, ansible_config, vars_file):
- self.integration_dir = integration_dir
- self.targets_dir = targets_dir
- self.inventory_path = inventory_path
- self.ansible_config = ansible_config
- self.vars_file = vars_file
-
-
-class IntegrationCache(CommonCache):
- """Integration cache."""
- @property
- def integration_targets(self):
- """
- :rtype: list[IntegrationTarget]
- """
- return self.get('integration_targets', lambda: list(walk_integration_targets()))
-
- @property
- def dependency_map(self):
- """
- :rtype: dict[str, set[IntegrationTarget]]
- """
- return self.get('dependency_map', lambda: generate_dependency_map(self.integration_targets))
diff --git a/test/lib/ansible_test/_internal/inventory.py b/test/lib/ansible_test/_internal/inventory.py
new file mode 100644
index 00000000..73a9ae9c
--- /dev/null
+++ b/test/lib/ansible_test/_internal/inventory.py
@@ -0,0 +1,170 @@
+"""Inventory creation from host profiles."""
+from __future__ import annotations
+
+import shutil
+import typing as t
+
+from .config import (
+ EnvironmentConfig,
+)
+
+from .util import (
+ sanitize_host_name,
+ exclude_none_values,
+)
+
+from .host_profiles import (
+ ControllerHostProfile,
+ ControllerProfile,
+ HostProfile,
+ Inventory,
+ NetworkInventoryProfile,
+ NetworkRemoteProfile,
+ SshTargetHostProfile,
+ WindowsInventoryProfile,
+ WindowsRemoteProfile,
+)
+
+
+def create_controller_inventory(args, path, controller_host): # type: (EnvironmentConfig, str, ControllerHostProfile) -> None
+ """Create and return inventory for use in controller-only integration tests."""
+ inventory = Inventory(
+ host_groups=dict(
+ testgroup=dict(
+ testhost=dict(
+ ansible_connection='local',
+ ansible_pipelining='yes',
+ ansible_python_interpreter=controller_host.python.path,
+ ),
+ ),
+ ),
+ )
+
+ inventory.write(args, path)
+
+
+def create_windows_inventory(args, path, target_hosts): # type: (EnvironmentConfig, str, t.List[HostProfile]) -> None
+ """Create and return inventory for use in target Windows integration tests."""
+ first = target_hosts[0]
+
+ if isinstance(first, WindowsInventoryProfile):
+ if args.explain:
+ return
+
+ try:
+ shutil.copyfile(first.config.path, path)
+ except shutil.SameFileError:
+ pass
+
+ return
+
+ target_hosts = t.cast(t.List[WindowsRemoteProfile], target_hosts)
+ hosts = [(target_host, target_host.wait_for_instance().connection) for target_host in target_hosts]
+ windows_hosts = {sanitize_host_name(host.config.name): host.get_inventory_variables() for host, connection in hosts}
+
+ inventory = Inventory(
+ host_groups=dict(
+ windows=windows_hosts,
+ ),
+ # The `testhost` group is needed to support the `binary_modules_winrm` integration test.
+ # The test should be updated to remove the need for this.
+ extra_groups={
+ 'testhost:children': [
+ 'windows',
+ ],
+ },
+ )
+
+ inventory.write(args, path)
+
+
+def create_network_inventory(args, path, target_hosts): # type: (EnvironmentConfig, str, t.List[HostProfile]) -> None
+ """Create and return inventory for use in target network integration tests."""
+ first = target_hosts[0]
+
+ if isinstance(first, NetworkInventoryProfile):
+ if args.explain:
+ return
+
+ try:
+ shutil.copyfile(first.config.path, path)
+ except shutil.SameFileError:
+ pass
+
+ return
+
+ target_hosts = t.cast(t.List[NetworkRemoteProfile], target_hosts)
+ host_groups = {target_host.config.platform: {} for target_host in target_hosts}
+
+ for target_host in target_hosts:
+ host_groups[target_host.config.platform][sanitize_host_name(target_host.config.name)] = target_host.get_inventory_variables()
+
+ inventory = Inventory(
+ host_groups=host_groups,
+ # The `net` group was added to support platform agnostic testing. It may not longer be needed.
+ # see: https://github.com/ansible/ansible/pull/34661
+ # see: https://github.com/ansible/ansible/pull/34707
+ extra_groups={
+ 'net:children': sorted(host_groups),
+ },
+ )
+
+ inventory.write(args, path)
+
+
+def create_posix_inventory(args, path, target_hosts, needs_ssh=False): # type: (EnvironmentConfig, str, t.List[HostProfile], bool) -> None
+ """Create and return inventory for use in POSIX integration tests."""
+ target_hosts = t.cast(t.List[SshTargetHostProfile], target_hosts)
+
+ if len(target_hosts) != 1:
+ raise Exception()
+
+ target_host = target_hosts[0]
+
+ if isinstance(target_host, ControllerProfile) and not needs_ssh:
+ inventory = Inventory(
+ host_groups=dict(
+ testgroup=dict(
+ testhost=dict(
+ ansible_connection='local',
+ ansible_pipelining='yes',
+ ansible_python_interpreter=target_host.python.path,
+ ),
+ ),
+ ),
+ )
+ else:
+ connections = target_host.get_controller_target_connections()
+
+ if len(connections) != 1:
+ raise Exception()
+
+ ssh = connections[0]
+
+ testhost = dict(
+ ansible_connection='ssh',
+ ansible_pipelining='yes',
+ ansible_python_interpreter=ssh.settings.python_interpreter,
+ ansible_host=ssh.settings.host,
+ ansible_port=ssh.settings.port,
+ ansible_user=ssh.settings.user,
+ ansible_ssh_private_key_file=ssh.settings.identity_file,
+ )
+
+ if ssh.become:
+ testhost.update(
+ ansible_become='yes',
+ ansible_become_method=ssh.become.method,
+ )
+
+ testhost = exclude_none_values(testhost)
+
+ inventory = Inventory(
+ host_groups=dict(
+ testgroup=dict(
+ testhost=testhost,
+ ),
+ ),
+ )
+
+ inventory.write(args, path)
diff --git a/test/lib/ansible_test/_internal/io.py b/test/lib/ansible_test/_internal/io.py
index da69da40..9d3301a1 100644
--- a/test/lib/ansible_test/_internal/io.py
+++ b/test/lib/ansible_test/_internal/io.py
@@ -1,13 +1,11 @@
"""Functions for disk IO."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import errno
import io
import json
import os
-
-from . import types as t
+import typing as t
from .encoding import (
ENCODING,
@@ -42,11 +40,11 @@ def make_dirs(path): # type: (str) -> None
def write_json_file(path, # type: str
- content, # type: t.Union[t.List[t.Any], t.Dict[str, t.Any]]
+ content, # type: t.Any
create_directories=False, # type: bool
formatted=True, # type: bool
encoder=None, # type: t.Optional[t.Callable[[t.Any], t.Any]]
- ): # type: (...) -> None
+ ): # type: (...) -> str
"""Write the given json content to the specified path, optionally creating missing directories."""
text_content = json.dumps(content,
sort_keys=formatted,
@@ -57,6 +55,8 @@ def write_json_file(path, # type: str
write_text_file(path, text_content, create_directories=create_directories)
+ return text_content
+
def write_text_file(path, content, create_directories=False): # type: (str, str, bool) -> None
"""Write the given text content to the specified path, optionally creating missing directories."""
@@ -73,7 +73,7 @@ def open_text_file(path, mode='r'): # type: (str, str) -> t.TextIO
raise Exception('mode cannot include "b" for text files: %s' % mode)
# noinspection PyTypeChecker
- return io.open(to_bytes(path), mode, encoding=ENCODING)
+ return io.open(to_bytes(path), mode, encoding=ENCODING) # pylint: disable=consider-using-with
def open_binary_file(path, mode='rb'): # type: (str, str) -> t.BinaryIO
@@ -82,13 +82,14 @@ def open_binary_file(path, mode='rb'): # type: (str, str) -> t.BinaryIO
raise Exception('mode must include "b" for binary files: %s' % mode)
# noinspection PyTypeChecker
- return io.open(to_bytes(path), mode)
+ return io.open(to_bytes(path), mode) # pylint: disable=consider-using-with
class SortedSetEncoder(json.JSONEncoder):
"""Encode sets as sorted lists."""
- def default(self, obj): # pylint: disable=method-hidden, arguments-differ
- if isinstance(obj, set):
- return sorted(obj)
+ def default(self, o):
+ """Return a serialized version of the `o` object."""
+ if isinstance(o, set):
+ return sorted(o)
- return json.JSONEncoder.default(self, obj)
+ return json.JSONEncoder.default(self, o)
diff --git a/test/lib/ansible_test/_internal/junit_xml.py b/test/lib/ansible_test/_internal/junit_xml.py
new file mode 120000
index 00000000..bde5519b
--- /dev/null
+++ b/test/lib/ansible_test/_internal/junit_xml.py
@@ -0,0 +1 @@
+../../../../lib/ansible/utils/_junit_xml.py \ No newline at end of file
diff --git a/test/lib/ansible_test/_internal/manage_ci.py b/test/lib/ansible_test/_internal/manage_ci.py
deleted file mode 100644
index 8cb09ba2..00000000
--- a/test/lib/ansible_test/_internal/manage_ci.py
+++ /dev/null
@@ -1,401 +0,0 @@
-"""Access Ansible Core CI remote services."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-import tempfile
-import time
-
-from . import types as t
-
-from .io import (
- read_text_file,
-)
-
-from .util import (
- SubprocessError,
- ApplicationError,
- cmd_quote,
- display,
- ANSIBLE_TEST_DATA_ROOT,
-)
-
-from .util_common import (
- intercept_command,
- get_network_completion,
- run_command,
- ShellScriptTemplate,
-)
-
-from .core_ci import (
- AnsibleCoreCI,
- SshKey,
-)
-
-from .ansible_util import (
- ansible_environment,
-)
-
-from .config import (
- NetworkIntegrationConfig,
- ShellConfig,
-)
-
-from .payload import (
- create_payload,
-)
-
-
-class ManageWindowsCI:
- """Manage access to a Windows instance provided by Ansible Core CI."""
- def __init__(self, core_ci):
- """
- :type core_ci: AnsibleCoreCI
- """
- self.core_ci = core_ci
- self.ssh_args = ['-i', self.core_ci.ssh_key.key]
-
- ssh_options = dict(
- BatchMode='yes',
- StrictHostKeyChecking='no',
- UserKnownHostsFile='/dev/null',
- ServerAliveInterval=15,
- ServerAliveCountMax=4,
- )
-
- for ssh_option in sorted(ssh_options):
- self.ssh_args += ['-o', '%s=%s' % (ssh_option, ssh_options[ssh_option])]
-
- def setup(self, python_version):
- """Used in delegate_remote to setup the host, no action is required for Windows.
- :type python_version: str
- """
-
- def wait(self):
- """Wait for instance to respond to ansible ping."""
- extra_vars = [
- 'ansible_connection=winrm',
- 'ansible_host=%s' % self.core_ci.connection.hostname,
- 'ansible_user=%s' % self.core_ci.connection.username,
- 'ansible_password=%s' % self.core_ci.connection.password,
- 'ansible_port=%s' % self.core_ci.connection.port,
- 'ansible_winrm_server_cert_validation=ignore',
- ]
-
- name = 'windows_%s' % self.core_ci.version
-
- env = ansible_environment(self.core_ci.args)
- cmd = ['ansible', '-m', 'ansible.windows.win_ping', '-i', '%s,' % name, name, '-e', ' '.join(extra_vars)]
-
- for dummy in range(1, 120):
- try:
- intercept_command(self.core_ci.args, cmd, 'ping', env=env, disable_coverage=True)
- return
- except SubprocessError:
- time.sleep(10)
-
- raise ApplicationError('Timeout waiting for %s/%s instance %s.' %
- (self.core_ci.platform, self.core_ci.version, self.core_ci.instance_id))
-
- def download(self, remote, local):
- """
- :type remote: str
- :type local: str
- """
- self.scp('%s@%s:%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname, remote), local)
-
- def upload(self, local, remote):
- """
- :type local: str
- :type remote: str
- """
- self.scp(local, '%s@%s:%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname, remote))
-
- def ssh(self, command, options=None, force_pty=True):
- """
- :type command: str | list[str]
- :type options: list[str] | None
- :type force_pty: bool
- """
- if not options:
- options = []
- if force_pty:
- options.append('-tt')
-
- if isinstance(command, list):
- command = ' '.join(cmd_quote(c) for c in command)
-
- run_command(self.core_ci.args,
- ['ssh', '-q'] + self.ssh_args +
- options +
- ['-p', '22',
- '%s@%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname)] +
- [command])
-
- def scp(self, src, dst):
- """
- :type src: str
- :type dst: str
- """
- for dummy in range(1, 10):
- try:
- run_command(self.core_ci.args,
- ['scp'] + self.ssh_args +
- ['-P', '22', '-q', '-r', src, dst])
- return
- except SubprocessError:
- time.sleep(10)
-
- raise ApplicationError('Failed transfer: %s -> %s' % (src, dst))
-
-
-class ManageNetworkCI:
- """Manage access to a network instance provided by Ansible Core CI."""
- def __init__(self, args, core_ci):
- """
- :type args: NetworkIntegrationConfig
- :type core_ci: AnsibleCoreCI
- """
- self.args = args
- self.core_ci = core_ci
-
- def wait(self):
- """Wait for instance to respond to ansible ping."""
- settings = get_network_settings(self.args, self.core_ci.platform, self.core_ci.version)
-
- extra_vars = [
- 'ansible_host=%s' % self.core_ci.connection.hostname,
- 'ansible_port=%s' % self.core_ci.connection.port,
- 'ansible_ssh_private_key_file=%s' % self.core_ci.ssh_key.key,
- ] + [
- '%s=%s' % (key, value) for key, value in settings.inventory_vars.items()
- ]
-
- name = '%s-%s' % (self.core_ci.platform, self.core_ci.version.replace('.', '-'))
-
- env = ansible_environment(self.core_ci.args)
- cmd = [
- 'ansible',
- '-m', '%s%s_command' % (settings.collection + '.' if settings.collection else '', self.core_ci.platform),
- '-a', 'commands=?',
- '-u', self.core_ci.connection.username,
- '-i', '%s,' % name,
- '-e', ' '.join(extra_vars),
- name,
- ]
-
- for dummy in range(1, 90):
- try:
- intercept_command(self.core_ci.args, cmd, 'ping', env=env, disable_coverage=True)
- return
- except SubprocessError:
- time.sleep(10)
-
- raise ApplicationError('Timeout waiting for %s/%s instance %s.' %
- (self.core_ci.platform, self.core_ci.version, self.core_ci.instance_id))
-
-
-class ManagePosixCI:
- """Manage access to a POSIX instance provided by Ansible Core CI."""
- def __init__(self, core_ci):
- """
- :type core_ci: AnsibleCoreCI
- """
- self.core_ci = core_ci
- self.ssh_args = ['-i', self.core_ci.ssh_key.key]
-
- ssh_options = dict(
- BatchMode='yes',
- StrictHostKeyChecking='no',
- UserKnownHostsFile='/dev/null',
- ServerAliveInterval=15,
- ServerAliveCountMax=4,
- )
-
- for ssh_option in sorted(ssh_options):
- self.ssh_args += ['-o', '%s=%s' % (ssh_option, ssh_options[ssh_option])]
-
- self.become = None
-
- if self.core_ci.platform == 'freebsd':
- self.become = ['su', '-l', 'root', '-c']
- elif self.core_ci.platform == 'macos':
- self.become = ['sudo', '-in', 'PATH=/usr/local/bin:$PATH', 'sh', '-c']
- elif self.core_ci.platform == 'osx':
- self.become = ['sudo', '-in', 'PATH=/usr/local/bin:$PATH']
- elif self.core_ci.platform == 'rhel':
- self.become = ['sudo', '-in', 'bash', '-c']
- elif self.core_ci.platform in ['aix', 'ibmi']:
- self.become = []
-
- if self.become is None:
- raise NotImplementedError('provider %s has not been implemented' % self.core_ci.provider)
-
- def setup(self, python_version):
- """Start instance and wait for it to become ready and respond to an ansible ping.
- :type python_version: str
- :rtype: str
- """
- pwd = self.wait()
-
- display.info('Remote working directory: %s' % pwd, verbosity=1)
-
- if isinstance(self.core_ci.args, ShellConfig):
- if self.core_ci.args.raw:
- return pwd
-
- self.configure(python_version)
- self.upload_source()
-
- return pwd
-
- def wait(self): # type: () -> str
- """Wait for instance to respond to SSH."""
- for dummy in range(1, 90):
- try:
- stdout = self.ssh('pwd', capture=True)[0]
-
- if self.core_ci.args.explain:
- return '/pwd'
-
- pwd = stdout.strip().splitlines()[-1]
-
- if not pwd.startswith('/'):
- raise Exception('Unexpected current working directory "%s" from "pwd" command output:\n%s' % (pwd, stdout))
-
- return pwd
- except SubprocessError:
- time.sleep(10)
-
- raise ApplicationError('Timeout waiting for %s/%s instance %s.' %
- (self.core_ci.platform, self.core_ci.version, self.core_ci.instance_id))
-
- def configure(self, python_version):
- """Configure remote host for testing.
- :type python_version: str
- """
- template = ShellScriptTemplate(read_text_file(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'setup', 'remote.sh')))
- setup_sh = template.substitute(
- platform=self.core_ci.platform,
- platform_version=self.core_ci.version,
- python_version=python_version,
- )
-
- ssh_keys_sh = get_ssh_key_setup(self.core_ci.ssh_key)
-
- setup_sh += ssh_keys_sh
- shell = setup_sh.splitlines()[0][2:]
-
- self.ssh(shell, data=setup_sh)
-
- def upload_source(self):
- """Upload and extract source."""
- with tempfile.NamedTemporaryFile(prefix='ansible-source-', suffix='.tgz') as local_source_fd:
- remote_source_dir = '/tmp'
- remote_source_path = os.path.join(remote_source_dir, os.path.basename(local_source_fd.name))
-
- create_payload(self.core_ci.args, local_source_fd.name)
-
- self.upload(local_source_fd.name, remote_source_dir)
- # AIX does not provide the GNU tar version, leading to parameters
- # being different and -z not being recognized. This pattern works
- # with both versions of tar.
- self.ssh(
- 'rm -rf ~/ansible ~/ansible_collections && cd ~/ && gunzip --stdout %s | tar oxf - && rm %s' %
- (remote_source_path, remote_source_path)
- )
-
- def download(self, remote, local):
- """
- :type remote: str
- :type local: str
- """
- self.scp('%s@%s:%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname, remote), local)
-
- def upload(self, local, remote):
- """
- :type local: str
- :type remote: str
- """
- self.scp(local, '%s@%s:%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname, remote))
-
- def ssh(self, command, options=None, capture=False, data=None):
- """
- :type command: str | list[str]
- :type options: list[str] | None
- :type capture: bool
- :type data: str | None
- :rtype: str | None, str | None
- """
- if not options:
- options = []
-
- if isinstance(command, list):
- command = ' '.join(cmd_quote(c) for c in command)
-
- command = cmd_quote(command) if self.become else command
-
- options.append('-q')
-
- if not data:
- options.append('-tt')
-
- return run_command(self.core_ci.args,
- ['ssh'] + self.ssh_args +
- options +
- ['-p', str(self.core_ci.connection.port),
- '%s@%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname)] +
- self.become + [command], capture=capture, data=data)
-
- def scp(self, src, dst):
- """
- :type src: str
- :type dst: str
- """
- for dummy in range(1, 10):
- try:
- run_command(self.core_ci.args,
- ['scp'] + self.ssh_args +
- ['-P', str(self.core_ci.connection.port), '-q', '-r', src, dst])
- return
- except SubprocessError:
- time.sleep(10)
-
- raise ApplicationError('Failed transfer: %s -> %s' % (src, dst))
-
-
-def get_ssh_key_setup(ssh_key): # type: (SshKey) -> str
- """Generate and return a script to configure SSH keys on a host."""
- template = ShellScriptTemplate(read_text_file(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'setup', 'ssh-keys.sh')))
-
- ssh_keys_sh = template.substitute(
- ssh_public_key=ssh_key.pub_contents,
- ssh_private_key=ssh_key.key_contents,
- ssh_key_type=ssh_key.KEY_TYPE,
- )
-
- return ssh_keys_sh
-
-
-def get_network_settings(args, platform, version): # type: (NetworkIntegrationConfig, str, str) -> NetworkPlatformSettings
- """Returns settings for the given network platform and version."""
- platform_version = '%s/%s' % (platform, version)
- completion = get_network_completion().get(platform_version, {})
- collection = args.platform_collection.get(platform, completion.get('collection'))
-
- settings = NetworkPlatformSettings(
- collection,
- dict(
- ansible_connection=args.platform_connection.get(platform, completion.get('connection')),
- ansible_network_os='%s.%s' % (collection, platform) if collection else platform,
- )
- )
-
- return settings
-
-
-class NetworkPlatformSettings:
- """Settings required for provisioning a network platform."""
- def __init__(self, collection, inventory_vars): # type: (str, t.Type[str, str]) -> None
- self.collection = collection
- self.inventory_vars = inventory_vars
diff --git a/test/lib/ansible_test/_internal/metadata.py b/test/lib/ansible_test/_internal/metadata.py
index 36575d0c..769ec834 100644
--- a/test/lib/ansible_test/_internal/metadata.py
+++ b/test/lib/ansible_test/_internal/metadata.py
@@ -1,8 +1,6 @@
"""Test metadata for passing data to delegated tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-from . import types as t
+from __future__ import annotations
+import typing as t
from .util import (
display,
@@ -25,14 +23,11 @@ class Metadata:
"""Initialize metadata."""
self.changes = {} # type: t.Dict[str, t.Tuple[t.Tuple[int, int]]]
self.cloud_config = None # type: t.Optional[t.Dict[str, str]]
- self.instance_config = None # type: t.Optional[t.List[t.Dict[str, str]]]
self.change_description = None # type: t.Optional[ChangeDescription]
self.ci_provider = None # type: t.Optional[str]
- def populate_changes(self, diff):
- """
- :type diff: list[str] | None
- """
+ def populate_changes(self, diff): # type: (t.Optional[t.List[str]]) -> None
+ """Populate the changeset using the given diff."""
patches = parse_diff(diff)
patches = sorted(patches, key=lambda k: k.new.path) # type: t.List[FileDiff]
@@ -50,22 +45,17 @@ class Metadata:
# failed tests involving deleted files should be using line 0 since there is no content remaining
self.changes[path] = ((0, 0),)
- def to_dict(self):
- """
- :rtype: dict[str, any]
- """
+ def to_dict(self): # type: () -> t.Dict[str, t.Any]
+ """Return a dictionary representation of the metadata."""
return dict(
changes=self.changes,
cloud_config=self.cloud_config,
- instance_config=self.instance_config,
ci_provider=self.ci_provider,
change_description=self.change_description.to_dict(),
)
- def to_file(self, path):
- """
- :type path: path
- """
+ def to_file(self, path): # type: (str) -> None
+ """Write the metadata to the specified file."""
data = self.to_dict()
display.info('>>> Metadata: %s\n%s' % (path, data), verbosity=3)
@@ -73,24 +63,17 @@ class Metadata:
write_json_file(path, data)
@staticmethod
- def from_file(path):
- """
- :type path: str
- :rtype: Metadata
- """
+ def from_file(path): # type: (str) -> Metadata
+ """Return metadata loaded from the specified file."""
data = read_json_file(path)
return Metadata.from_dict(data)
@staticmethod
- def from_dict(data):
- """
- :type data: dict[str, any]
- :rtype: Metadata
- """
+ def from_dict(data): # type: (t.Dict[str, t.Any]) -> Metadata
+ """Return metadata loaded from the specified dictionary."""
metadata = Metadata()
metadata.changes = data['changes']
metadata.cloud_config = data['cloud_config']
- metadata.instance_config = data['instance_config']
metadata.ci_provider = data['ci_provider']
metadata.change_description = ChangeDescription.from_dict(data['change_description'])
@@ -108,23 +91,17 @@ class ChangeDescription:
self.no_integration_paths = [] # type: t.List[str]
@property
- def targets(self):
- """
- :rtype: list[str] | None
- """
+ def targets(self): # type: () -> t.Optional[t.List[str]]
+ """Optional list of target names."""
return self.regular_command_targets.get(self.command)
@property
- def focused_targets(self):
- """
- :rtype: list[str] | None
- """
+ def focused_targets(self): # type: () -> t.Optional[t.List[str]]
+ """Optional list of focused target names."""
return self.focused_command_targets.get(self.command)
- def to_dict(self):
- """
- :rtype: dict[str, any]
- """
+ def to_dict(self): # type: () -> t.Dict[str, t.Any]
+ """Return a dictionary representation of the change description."""
return dict(
command=self.command,
changed_paths=self.changed_paths,
@@ -135,11 +112,8 @@ class ChangeDescription:
)
@staticmethod
- def from_dict(data):
- """
- :param data: dict[str, any]
- :rtype: ChangeDescription
- """
+ def from_dict(data): # type: (t.Dict[str, t.Any]) -> ChangeDescription
+ """Return a change description loaded from the given dictionary."""
changes = ChangeDescription()
changes.command = data['command']
changes.changed_paths = data['changed_paths']
diff --git a/test/lib/ansible_test/_internal/payload.py b/test/lib/ansible_test/_internal/payload.py
index 87d6ad81..d92f9f65 100644
--- a/test/lib/ansible_test/_internal/payload.py
+++ b/test/lib/ansible_test/_internal/payload.py
@@ -1,6 +1,5 @@
"""Payload management for sending Ansible files and test content to other systems (VMs, containers)."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import atexit
import os
@@ -8,8 +7,11 @@ import stat
import tarfile
import tempfile
import time
+import typing as t
-from . import types as t
+from .constants import (
+ ANSIBLE_BIN_SYMLINK_MAP,
+)
from .config import (
IntegrationConfig,
@@ -35,22 +37,6 @@ from .util_common import (
tarfile.pwd = None
tarfile.grp = None
-# this bin symlink map must exactly match the contents of the bin directory
-# it is necessary for payload creation to reconstruct the bin directory when running ansible-test from an installed version of ansible
-ANSIBLE_BIN_SYMLINK_MAP = {
- 'ansible': '../lib/ansible/cli/scripts/ansible_cli_stub.py',
- 'ansible-config': 'ansible',
- 'ansible-connection': '../lib/ansible/cli/scripts/ansible_connection_cli_stub.py',
- 'ansible-console': 'ansible',
- 'ansible-doc': 'ansible',
- 'ansible-galaxy': 'ansible',
- 'ansible-inventory': 'ansible',
- 'ansible-playbook': 'ansible',
- 'ansible-pull': 'ansible',
- 'ansible-test': '../test/lib/ansible_test/_data/cli/ansible_test_cli_stub.py',
- 'ansible-vault': 'ansible',
-}
-
def create_payload(args, dst_path): # type: (CommonConfig, str) -> None
"""Create a payload for delegation."""
diff --git a/test/lib/ansible_test/_internal/provider/__init__.py b/test/lib/ansible_test/_internal/provider/__init__.py
index a60d2a95..e8972ac8 100644
--- a/test/lib/ansible_test/_internal/provider/__init__.py
+++ b/test/lib/ansible_test/_internal/provider/__init__.py
@@ -1,26 +1,16 @@
"""Provider (plugin) infrastructure for ansible-test."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import abc
import os
-
-from .. import types as t
+import typing as t
from ..util import (
- ABC,
ApplicationError,
get_subclasses,
)
-try:
- # noinspection PyTypeChecker
- TPathProvider = t.TypeVar('TPathProvider', bound='PathProvider')
-except AttributeError:
- TPathProvider = None # pylint: disable=invalid-name
-
-
def get_path_provider_classes(provider_type): # type: (t.Type[TPathProvider]) -> t.List[t.Type[TPathProvider]]
"""Return a list of path provider classes of the given type."""
return sorted(get_subclasses(provider_type), key=lambda c: (c.priority, c.__name__))
@@ -59,13 +49,13 @@ def find_path_provider(provider_type, # type: t.Type[TPathProvider],
class ProviderNotFoundForPath(ApplicationError):
"""Exception generated when a path based provider cannot be found for a given path."""
def __init__(self, provider_type, path): # type: (t.Type, str) -> None
- super(ProviderNotFoundForPath, self).__init__('No %s found for path: %s' % (provider_type.__name__, path))
+ super().__init__('No %s found for path: %s' % (provider_type.__name__, path))
self.provider_type = provider_type
self.path = path
-class PathProvider(ABC):
+class PathProvider(metaclass=abc.ABCMeta):
"""Base class for provider plugins that are path based."""
sequence = 500
priority = 500
@@ -77,3 +67,6 @@ class PathProvider(ABC):
@abc.abstractmethod
def is_content_root(path): # type: (str) -> bool
"""Return True if the given path is a content root for this provider."""
+
+
+TPathProvider = t.TypeVar('TPathProvider', bound=PathProvider)
diff --git a/test/lib/ansible_test/_internal/provider/layout/__init__.py b/test/lib/ansible_test/_internal/provider/layout/__init__.py
index 8065c64e..147fcbd5 100644
--- a/test/lib/ansible_test/_internal/provider/layout/__init__.py
+++ b/test/lib/ansible_test/_internal/provider/layout/__init__.py
@@ -1,12 +1,10 @@
"""Code for finding content."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import abc
import collections
import os
-
-from ... import types as t
+import typing as t
from ...util import (
ANSIBLE_SOURCE_ROOT,
@@ -94,7 +92,7 @@ class ContentLayout(Layout):
unit_module_utils_path, # type: str
unit_messages, # type: t.Optional[LayoutMessages]
): # type: (...) -> None
- super(ContentLayout, self).__init__(root, paths)
+ super().__init__(root, paths)
self.plugin_paths = plugin_paths
self.collection = collection
diff --git a/test/lib/ansible_test/_internal/provider/layout/ansible.py b/test/lib/ansible_test/_internal/provider/layout/ansible.py
index 49ca482b..345faa7c 100644
--- a/test/lib/ansible_test/_internal/provider/layout/ansible.py
+++ b/test/lib/ansible_test/_internal/provider/layout/ansible.py
@@ -1,10 +1,8 @@
"""Layout provider for Ansible source."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-
-from ... import types as t
+import typing as t
from . import (
ContentLayout,
diff --git a/test/lib/ansible_test/_internal/provider/layout/collection.py b/test/lib/ansible_test/_internal/provider/layout/collection.py
index ffad29f2..5dca046f 100644
--- a/test/lib/ansible_test/_internal/provider/layout/collection.py
+++ b/test/lib/ansible_test/_internal/provider/layout/collection.py
@@ -1,10 +1,8 @@
"""Layout provider for Ansible collections."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-
-from ... import types as t
+import typing as t
from . import (
ContentLayout,
@@ -16,9 +14,6 @@ from . import (
class CollectionLayout(LayoutProvider):
"""Layout provider for Ansible collections."""
- __module_path = 'plugins/modules'
- __unit_path = 'test/unit'
-
@staticmethod
def is_content_root(path): # type: (str) -> bool
"""Return True if the given path is a content root for this provider."""
diff --git a/test/lib/ansible_test/_internal/provider/source/__init__.py b/test/lib/ansible_test/_internal/provider/source/__init__.py
index fab28b09..359c5d6a 100644
--- a/test/lib/ansible_test/_internal/provider/source/__init__.py
+++ b/test/lib/ansible_test/_internal/provider/source/__init__.py
@@ -1,10 +1,8 @@
"""Common code for source providers."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import abc
-
-from ... import types as t
+import typing as t
from .. import (
PathProvider,
diff --git a/test/lib/ansible_test/_internal/provider/source/git.py b/test/lib/ansible_test/_internal/provider/source/git.py
index 0bf81a1c..96f85dc7 100644
--- a/test/lib/ansible_test/_internal/provider/source/git.py
+++ b/test/lib/ansible_test/_internal/provider/source/git.py
@@ -1,10 +1,8 @@
"""Source provider for a content root managed by git version control."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-
-from ... import types as t
+import typing as t
from ...git import (
Git,
diff --git a/test/lib/ansible_test/_internal/provider/source/installed.py b/test/lib/ansible_test/_internal/provider/source/installed.py
index d24a6e3d..f4ed6f3c 100644
--- a/test/lib/ansible_test/_internal/provider/source/installed.py
+++ b/test/lib/ansible_test/_internal/provider/source/installed.py
@@ -1,10 +1,8 @@
"""Source provider for content which has been installed."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-
-from ... import types as t
+import typing as t
from . import (
SourceProvider,
diff --git a/test/lib/ansible_test/_internal/provider/source/unversioned.py b/test/lib/ansible_test/_internal/provider/source/unversioned.py
index cb35fe3d..a78060cf 100644
--- a/test/lib/ansible_test/_internal/provider/source/unversioned.py
+++ b/test/lib/ansible_test/_internal/provider/source/unversioned.py
@@ -1,10 +1,8 @@
"""Fallback source provider when no other provider matches the content root."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-
-from ... import types as t
+import typing as t
from ...constants import (
TIMEOUT_PATH,
diff --git a/test/lib/ansible_test/_internal/provisioning.py b/test/lib/ansible_test/_internal/provisioning.py
new file mode 100644
index 00000000..a9536036
--- /dev/null
+++ b/test/lib/ansible_test/_internal/provisioning.py
@@ -0,0 +1,196 @@
+"""Provision hosts for running tests."""
+from __future__ import annotations
+
+import atexit
+import dataclasses
+import functools
+import itertools
+import os
+import pickle
+import sys
+import time
+import traceback
+import typing as t
+
+from .config import (
+ EnvironmentConfig,
+)
+
+from .util import (
+ ApplicationError,
+ display,
+ open_binary_file,
+ verify_sys_executable,
+ version_to_str,
+)
+
+from .thread import (
+ WrappedThread,
+)
+
+from .host_profiles import (
+ ControllerHostProfile,
+ DockerProfile,
+ HostProfile,
+ SshConnection,
+ SshTargetHostProfile,
+ create_host_profile,
+)
+
+from .pypi_proxy import (
+ run_pypi_proxy,
+)
+
+THostProfile = t.TypeVar('THostProfile', bound=HostProfile)
+TEnvironmentConfig = t.TypeVar('TEnvironmentConfig', bound=EnvironmentConfig)
+
+
+class PrimeContainers(ApplicationError):
+ """Exception raised to end execution early after priming containers."""
+
+
+@dataclasses.dataclass(frozen=True)
+class HostState:
+ """State of hosts and profiles to be passed to ansible-test during delegation."""
+ controller_profile: ControllerHostProfile
+ target_profiles: t.List[HostProfile]
+
+ @property
+ def profiles(self): # type: () -> t.List[HostProfile]
+ """Return all the profiles as a list."""
+ return [t.cast(HostProfile, self.controller_profile)] + self.target_profiles
+
+ def serialize(self, path): # type: (str) -> None
+ """Serialize the host state to the given path."""
+ with open_binary_file(path, 'wb') as state_file:
+ pickle.dump(self, state_file)
+
+ @staticmethod
+ def deserialize(args, path): # type: (EnvironmentConfig, str) -> HostState
+ """Deserialize host state from the given args and path."""
+ with open_binary_file(path) as state_file:
+ host_state = pickle.load(state_file) # type: HostState
+
+ host_state.controller_profile.args = args
+
+ for target in host_state.target_profiles:
+ target.args = args
+
+ return host_state
+
+ def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
+ """Return SSH connection(s) for accessing all target hosts from the controller."""
+ return list(itertools.chain.from_iterable([target.get_controller_target_connections() for
+ target in self.target_profiles if isinstance(target, SshTargetHostProfile)]))
+
+ def targets(self, profile_type): # type: (t.Type[THostProfile]) -> t.List[THostProfile]
+ """The list of target(s), verified to be of the specified type."""
+ if not self.target_profiles:
+ raise Exception('No target profiles found.')
+
+ if not all(isinstance(target, profile_type) for target in self.target_profiles):
+ raise Exception(f'Target profile(s) are not of the required type: {profile_type}')
+
+ return self.target_profiles
+
+
+def prepare_profiles(
+ args, # type: TEnvironmentConfig
+ targets_use_pypi=False, # type: bool
+ skip_setup=False, # type: bool
+ requirements=None, # type: t.Optional[t.Callable[[TEnvironmentConfig, HostState], None]]
+): # type: (...) -> HostState
+ """
+ Create new profiles, or load existing ones, and return them.
+ If a requirements callback was provided, it will be used before configuring hosts if delegation has already been performed.
+ """
+ if args.host_path:
+ host_state = HostState.deserialize(args, os.path.join(args.host_path, 'state.dat'))
+ else:
+ run_pypi_proxy(args, targets_use_pypi)
+
+ host_state = HostState(
+ controller_profile=t.cast(ControllerHostProfile, create_host_profile(args, args.controller, True)),
+ target_profiles=[create_host_profile(args, target, False) for target in args.targets],
+ )
+
+ if args.prime_containers:
+ for host_profile in host_state.profiles:
+ if isinstance(host_profile, DockerProfile):
+ host_profile.provision()
+
+ raise PrimeContainers()
+
+ atexit.register(functools.partial(cleanup_profiles, host_state))
+
+ def provision(profile): # type: (HostProfile) -> None
+ """Provision the given profile."""
+ profile.provision()
+
+ if not skip_setup:
+ profile.setup()
+
+ dispatch_jobs([(profile, WrappedThread(functools.partial(provision, profile))) for profile in host_state.profiles])
+
+ host_state.controller_profile.configure()
+
+ if not args.delegate:
+ check_controller_python(args, host_state)
+
+ if requirements:
+ requirements(args, host_state)
+
+ def configure(profile): # type: (HostProfile) -> None
+ """Configure the given profile."""
+ profile.wait()
+
+ if not skip_setup:
+ profile.configure()
+
+ dispatch_jobs([(profile, WrappedThread(functools.partial(configure, profile))) for profile in host_state.target_profiles])
+
+ return host_state
+
+
+def check_controller_python(args, host_state): # type: (EnvironmentConfig, HostState) -> None
+ """Check the running environment to make sure it is what we expected."""
+ sys_version = version_to_str(sys.version_info[:2])
+ controller_python = host_state.controller_profile.python
+
+ if expected_executable := verify_sys_executable(controller_python.path):
+ raise ApplicationError(f'Running under Python interpreter "{sys.executable}" instead of "{expected_executable}".')
+
+ expected_version = controller_python.version
+
+ if expected_version != sys_version:
+ raise ApplicationError(f'Running under Python version {sys_version} instead of {expected_version}.')
+
+ args.controller_python = controller_python
+
+
+def cleanup_profiles(host_state): # type: (HostState) -> None
+ """Cleanup provisioned hosts when exiting."""
+ for profile in host_state.profiles:
+ profile.deprovision()
+
+
+def dispatch_jobs(jobs): # type: (t.List[t.Tuple[HostProfile, WrappedThread]]) -> None
+ """Run the given profile job threads and wait for them to complete."""
+ for profile, thread in jobs:
+ thread.daemon = True
+ thread.start()
+
+ while any(thread.is_alive() for profile, thread in jobs):
+ time.sleep(1)
+
+ failed = False
+
+ for profile, thread in jobs:
+ try:
+ thread.wait_for_result()
+ except Exception as ex: # pylint: disable=broad-except
+ display.error(f'Host {profile} job failed: {ex}\n{"".join(traceback.format_tb(ex.__traceback__))}')
+ failed = True
+
+ if failed:
+ raise ApplicationError('Host job(s) failed. See previous error(s) for details.')
diff --git a/test/lib/ansible_test/_internal/pypi_proxy.py b/test/lib/ansible_test/_internal/pypi_proxy.py
new file mode 100644
index 00000000..968794fd
--- /dev/null
+++ b/test/lib/ansible_test/_internal/pypi_proxy.py
@@ -0,0 +1,178 @@
+"""PyPI proxy management."""
+from __future__ import annotations
+
+import atexit
+import os
+import urllib.parse
+
+from .io import (
+ write_text_file,
+)
+
+from .config import (
+ EnvironmentConfig,
+)
+
+from .host_configs import (
+ PosixConfig,
+)
+
+from .util import (
+ ApplicationError,
+ display,
+)
+
+from .util_common import (
+ process_scoped_temporary_file,
+)
+
+from .docker_util import (
+ docker_available,
+)
+
+from .containers import (
+ HostType,
+ get_container_database,
+ run_support_container,
+)
+
+from .ansible_util import (
+ run_playbook,
+)
+
+from .host_profiles import (
+ HostProfile,
+)
+
+from .inventory import (
+ create_posix_inventory,
+)
+
+
+def run_pypi_proxy(args, targets_use_pypi): # type: (EnvironmentConfig, bool) -> None
+ """Run a PyPI proxy support container."""
+ if args.pypi_endpoint:
+ return # user has overridden the proxy endpoint, there is nothing to provision
+
+ posix_targets = [target for target in args.targets if isinstance(target, PosixConfig)]
+ need_proxy = targets_use_pypi and any(target.python.version == '2.6' for target in posix_targets)
+ use_proxy = args.pypi_proxy or need_proxy
+
+ if not use_proxy:
+ return
+
+ if not docker_available():
+ if args.pypi_proxy:
+ raise ApplicationError('Use of the PyPI proxy was requested, but Docker is not available.')
+
+ display.warning('Unable to use the PyPI proxy because Docker is not available. Installation of packages using `pip` may fail.')
+ return
+
+ image = 'quay.io/ansible/pypi-test-container:1.0.0'
+ port = 3141
+
+ run_support_container(
+ args=args,
+ context='__pypi_proxy__',
+ image=image,
+ name=f'pypi-test-container-{args.session_name}',
+ ports=[port],
+ )
+
+
+def configure_pypi_proxy(args, profile): # type: (EnvironmentConfig, HostProfile) -> None
+ """Configure the environment to use a PyPI proxy, if present."""
+ if args.pypi_endpoint:
+ pypi_endpoint = args.pypi_endpoint
+ else:
+ containers = get_container_database(args)
+ context = containers.data.get(HostType.control if profile.controller else HostType.managed, {}).get('__pypi_proxy__')
+
+ if not context:
+ return # proxy not configured
+
+ access = list(context.values())[0]
+
+ host = access.host_ip
+ port = dict(access.port_map())[3141]
+
+ pypi_endpoint = f'http://{host}:{port}/root/pypi/+simple/'
+
+ pypi_hostname = urllib.parse.urlparse(pypi_endpoint)[1].split(':')[0]
+
+ if profile.controller:
+ configure_controller_pypi_proxy(args, profile, pypi_endpoint, pypi_hostname)
+ else:
+ configure_target_pypi_proxy(args, profile, pypi_endpoint, pypi_hostname)
+
+
+def configure_controller_pypi_proxy(args, profile, pypi_endpoint, pypi_hostname): # type: (EnvironmentConfig, HostProfile, str, str) -> None
+ """Configure the controller environment to use a PyPI proxy."""
+ configure_pypi_proxy_pip(args, profile, pypi_endpoint, pypi_hostname)
+ configure_pypi_proxy_easy_install(args, profile, pypi_endpoint)
+
+
+def configure_target_pypi_proxy(args, profile, pypi_endpoint, pypi_hostname): # type: (EnvironmentConfig, HostProfile, str, str) -> None
+ """Configure the target environment to use a PyPI proxy."""
+ inventory_path = process_scoped_temporary_file(args)
+
+ create_posix_inventory(args, inventory_path, [profile])
+
+ def cleanup_pypi_proxy():
+ """Undo changes made to configure the PyPI proxy."""
+ run_playbook(args, inventory_path, 'pypi_proxy_restore.yml', capture=True)
+
+ force = 'yes' if profile.config.is_managed else 'no'
+
+ run_playbook(args, inventory_path, 'pypi_proxy_prepare.yml', dict(pypi_endpoint=pypi_endpoint, pypi_hostname=pypi_hostname, force=force), capture=True)
+
+ atexit.register(cleanup_pypi_proxy)
+
+
+def configure_pypi_proxy_pip(args, profile, pypi_endpoint, pypi_hostname): # type: (EnvironmentConfig, HostProfile, str, str) -> None
+ """Configure a custom index for pip based installs."""
+ pip_conf_path = os.path.expanduser('~/.pip/pip.conf')
+ pip_conf = '''
+[global]
+index-url = {0}
+trusted-host = {1}
+'''.format(pypi_endpoint, pypi_hostname).strip()
+
+ def pip_conf_cleanup(): # type: () -> None
+ """Remove custom pip PyPI config."""
+ display.info('Removing custom PyPI config: %s' % pip_conf_path, verbosity=1)
+ os.remove(pip_conf_path)
+
+ if os.path.exists(pip_conf_path) and not profile.config.is_managed:
+ raise ApplicationError('Refusing to overwrite existing file: %s' % pip_conf_path)
+
+ display.info('Injecting custom PyPI config: %s' % pip_conf_path, verbosity=1)
+ display.info('Config: %s\n%s' % (pip_conf_path, pip_conf), verbosity=3)
+
+ if not args.explain:
+ write_text_file(pip_conf_path, pip_conf, True)
+ atexit.register(pip_conf_cleanup)
+
+
+def configure_pypi_proxy_easy_install(args, profile, pypi_endpoint): # type: (EnvironmentConfig, HostProfile, str) -> None
+ """Configure a custom index for easy_install based installs."""
+ pydistutils_cfg_path = os.path.expanduser('~/.pydistutils.cfg')
+ pydistutils_cfg = '''
+[easy_install]
+index_url = {0}
+'''.format(pypi_endpoint).strip()
+
+ if os.path.exists(pydistutils_cfg_path) and not profile.config.is_managed:
+ raise ApplicationError('Refusing to overwrite existing file: %s' % pydistutils_cfg_path)
+
+ def pydistutils_cfg_cleanup(): # type: () -> None
+ """Remove custom PyPI config."""
+ display.info('Removing custom PyPI config: %s' % pydistutils_cfg_path, verbosity=1)
+ os.remove(pydistutils_cfg_path)
+
+ display.info('Injecting custom PyPI config: %s' % pydistutils_cfg_path, verbosity=1)
+ display.info('Config: %s\n%s' % (pydistutils_cfg_path, pydistutils_cfg), verbosity=3)
+
+ if not args.explain:
+ write_text_file(pydistutils_cfg_path, pydistutils_cfg, True)
+ atexit.register(pydistutils_cfg_cleanup)
diff --git a/test/lib/ansible_test/_internal/python_requirements.py b/test/lib/ansible_test/_internal/python_requirements.py
new file mode 100644
index 00000000..8fca7834
--- /dev/null
+++ b/test/lib/ansible_test/_internal/python_requirements.py
@@ -0,0 +1,482 @@
+"""Python requirements management"""
+from __future__ import annotations
+
+import base64
+import dataclasses
+import json
+import os
+import re
+import typing as t
+
+from .constants import (
+ COVERAGE_REQUIRED_VERSION,
+)
+
+from .encoding import (
+ to_text,
+ to_bytes,
+)
+
+from .io import (
+ read_text_file,
+)
+
+from .util import (
+ ANSIBLE_TEST_DATA_ROOT,
+ ANSIBLE_TEST_TARGET_ROOT,
+ ANSIBLE_TEST_TOOLS_ROOT,
+ SubprocessError,
+ display,
+ find_executable,
+ raw_command,
+ str_to_version,
+ version_to_str,
+)
+
+from .util_common import (
+ check_pyyaml,
+ create_result_directories,
+)
+
+from .config import (
+ EnvironmentConfig,
+ IntegrationConfig,
+ UnitsConfig,
+)
+
+from .data import (
+ data_context,
+)
+
+from .host_configs import (
+ PosixConfig,
+ PythonConfig,
+)
+
+from .connections import (
+ LocalConnection,
+ Connection,
+)
+
+QUIET_PIP_SCRIPT_PATH = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'quiet_pip.py')
+REQUIREMENTS_SCRIPT_PATH = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'requirements.py')
+
+
+# Pip Abstraction
+
+
+@dataclasses.dataclass(frozen=True)
+class PipCommand:
+ """Base class for pip commands."""""
+
+ def serialize(self): # type: () -> t.Tuple[str, t.Dict[str, t.Any]]
+ """Return a serialized representation of this command."""
+ name = type(self).__name__[3:].lower()
+ return name, self.__dict__
+
+
+@dataclasses.dataclass(frozen=True)
+class PipInstall(PipCommand):
+ """Details required to perform a pip install."""
+ requirements: t.List[t.Tuple[str, str]]
+ constraints: t.List[t.Tuple[str, str]]
+ packages: t.List[str]
+
+ def has_package(self, name): # type: (str) -> bool
+ """Return True if the specified package will be installed, otherwise False."""
+ name = name.lower()
+
+ return (any(name in package.lower() for package in self.packages) or
+ any(name in contents.lower() for path, contents in self.requirements))
+
+
+@dataclasses.dataclass(frozen=True)
+class PipUninstall(PipCommand):
+ """Details required to perform a pip uninstall."""
+ packages: t.List[str]
+ ignore_errors: bool
+
+
+# Entry Points
+
+
+def install_requirements(
+ args, # type: EnvironmentConfig
+ python, # type: PythonConfig
+ ansible=False, # type: bool
+ command=False, # type: bool
+ coverage=False, # type: bool
+ virtualenv=False, # type: bool
+ connection=None, # type: t.Optional[Connection]
+): # type: (...) -> None
+ """Install requirements for the given Python using the specified arguments."""
+ create_result_directories(args)
+
+ controller = not connection
+
+ if not requirements_allowed(args, controller):
+ return
+
+ if command and isinstance(args, (UnitsConfig, IntegrationConfig)) and args.coverage:
+ coverage = True
+
+ cryptography = False
+
+ if ansible:
+ try:
+ ansible_cache = install_requirements.ansible_cache
+ except AttributeError:
+ ansible_cache = install_requirements.ansible_cache = {}
+
+ ansible_installed = ansible_cache.get(python.path)
+
+ if ansible_installed:
+ ansible = False
+ else:
+ ansible_cache[python.path] = True
+
+ # Install the latest cryptography version that the current requirements can support if it is not already available.
+ # This avoids downgrading cryptography when OS packages provide a newer version than we are able to install using pip.
+ # If not installed here, later install commands may try to install a version of cryptography which cannot be installed.
+ cryptography = not is_cryptography_available(python.path)
+
+ commands = collect_requirements(
+ python=python,
+ controller=controller,
+ ansible=ansible,
+ cryptography=cryptography,
+ command=args.command if command else None,
+ coverage=coverage,
+ virtualenv=virtualenv,
+ minimize=False,
+ sanity=None,
+ )
+
+ if not commands:
+ return
+
+ run_pip(args, python, commands, connection)
+
+ if any(isinstance(command, PipInstall) and command.has_package('pyyaml') for command in commands):
+ check_pyyaml(python)
+
+
+def collect_requirements(
+ python, # type: PythonConfig
+ controller, # type: bool
+ ansible, # type: bool
+ cryptography, # type: bool
+ coverage, # type: bool
+ virtualenv, # type: bool
+ minimize, # type: bool
+ command, # type: t.Optional[str]
+ sanity, # type: t.Optional[str]
+): # type: (...) -> t.List[PipCommand]
+ """Collect requirements for the given Python using the specified arguments."""
+ commands = [] # type: t.List[PipCommand]
+
+ if virtualenv:
+ commands.extend(collect_package_install(packages=['virtualenv']))
+
+ if coverage:
+ commands.extend(collect_package_install(packages=[f'coverage=={COVERAGE_REQUIRED_VERSION}'], constraints=False))
+
+ if cryptography:
+ commands.extend(collect_package_install(packages=get_cryptography_requirements(python)))
+
+ if ansible or command:
+ commands.extend(collect_general_install(command, ansible))
+
+ if sanity:
+ commands.extend(collect_sanity_install(sanity))
+
+ if command == 'units':
+ commands.extend(collect_units_install())
+
+ if command in ('integration', 'windows-integration', 'network-integration'):
+ commands.extend(collect_integration_install(command, controller))
+
+ if minimize:
+ # In some environments pkg_resources is installed as a separate pip package which needs to be removed.
+ # For example, using Python 3.8 on Ubuntu 18.04 a virtualenv is created with only pip and setuptools.
+ # However, a venv is created with an additional pkg-resources package which is independent of setuptools.
+ # Making sure pkg-resources is removed preserves the import test consistency between venv and virtualenv.
+ # Additionally, in the above example, the pyparsing package vendored with pkg-resources is out-of-date and generates deprecation warnings.
+ # Thus it is important to remove pkg-resources to prevent system installed packages from generating deprecation warnings.
+ commands.extend(collect_uninstall(packages=['pkg-resources'], ignore_errors=True))
+ commands.extend(collect_uninstall(packages=['setuptools', 'pip']))
+
+ return commands
+
+
+def run_pip(
+ args, # type: EnvironmentConfig
+ python, # type: PythonConfig
+ commands, # type: t.List[PipCommand]
+ connection, # type: t.Optional[Connection]
+): # type: (...) -> None
+ """Run the specified pip commands for the given Python, and optionally the specified host."""
+ connection = connection or LocalConnection(args)
+ script = prepare_pip_script(commands)
+
+ if not args.explain:
+ connection.run([python.path], data=script)
+
+
+# Collect
+
+
+def collect_general_install(
+ command=None, # type: t.Optional[str]
+ ansible=False, # type: bool
+): # type: (...) -> t.List[PipInstall]
+ """Return details necessary for the specified general-purpose pip install(s)."""
+ requirements_paths = [] # type: t.List[t.Tuple[str, str]]
+ constraints_paths = [] # type: t.List[t.Tuple[str, str]]
+
+ if ansible:
+ path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', 'ansible.txt')
+ requirements_paths.append((ANSIBLE_TEST_DATA_ROOT, path))
+
+ if command:
+ path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', f'{command}.txt')
+ requirements_paths.append((ANSIBLE_TEST_DATA_ROOT, path))
+
+ return collect_install(requirements_paths, constraints_paths)
+
+
+def collect_package_install(packages, constraints=True): # type: (t.List[str], bool) -> t.List[PipInstall]
+ """Return the details necessary to install the specified packages."""
+ return collect_install([], [], packages, constraints=constraints)
+
+
+def collect_sanity_install(sanity): # type: (str) -> t.List[PipInstall]
+ """Return the details necessary for the specified sanity pip install(s)."""
+ requirements_paths = [] # type: t.List[t.Tuple[str, str]]
+ constraints_paths = [] # type: t.List[t.Tuple[str, str]]
+
+ path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', f'sanity.{sanity}.txt')
+ requirements_paths.append((ANSIBLE_TEST_DATA_ROOT, path))
+
+ if data_context().content.is_ansible:
+ path = os.path.join(data_context().content.sanity_path, 'code-smell', f'{sanity}.requirements.txt')
+ requirements_paths.append((data_context().content.root, path))
+
+ return collect_install(requirements_paths, constraints_paths, constraints=False)
+
+
+def collect_units_install(): # type: () -> t.List[PipInstall]
+ """Return details necessary for the specified units pip install(s)."""
+ requirements_paths = [] # type: t.List[t.Tuple[str, str]]
+ constraints_paths = [] # type: t.List[t.Tuple[str, str]]
+
+ path = os.path.join(data_context().content.unit_path, 'requirements.txt')
+ requirements_paths.append((data_context().content.root, path))
+
+ path = os.path.join(data_context().content.unit_path, 'constraints.txt')
+ constraints_paths.append((data_context().content.root, path))
+
+ return collect_install(requirements_paths, constraints_paths)
+
+
+def collect_integration_install(command, controller): # type: (str, bool) -> t.List[PipInstall]
+ """Return details necessary for the specified integration pip install(s)."""
+ requirements_paths = [] # type: t.List[t.Tuple[str, str]]
+ constraints_paths = [] # type: t.List[t.Tuple[str, str]]
+
+ # Support for prefixed files was added to ansible-test in ansible-core 2.12 when split controller/target testing was implemented.
+ # Previous versions of ansible-test only recognize non-prefixed files.
+ # If a prefixed file exists (even if empty), it takes precedence over the non-prefixed file.
+ prefixes = ('controller.' if controller else 'target.', '')
+
+ for prefix in prefixes:
+ path = os.path.join(data_context().content.integration_path, f'{prefix}requirements.txt')
+
+ if os.path.exists(path):
+ requirements_paths.append((data_context().content.root, path))
+ break
+
+ for prefix in prefixes:
+ path = os.path.join(data_context().content.integration_path, f'{command}.{prefix}requirements.txt')
+
+ if os.path.exists(path):
+ requirements_paths.append((data_context().content.root, path))
+ break
+
+ for prefix in prefixes:
+ path = os.path.join(data_context().content.integration_path, f'{prefix}constraints.txt')
+
+ if os.path.exists(path):
+ constraints_paths.append((data_context().content.root, path))
+ break
+
+ return collect_install(requirements_paths, constraints_paths)
+
+
+def collect_install(
+ requirements_paths, # type: t.List[t.Tuple[str, str]]
+ constraints_paths, # type: t.List[t.Tuple[str, str]]
+ packages=None, # type: t.Optional[t.List[str]]
+ constraints=True, # type: bool
+) -> t.List[PipInstall]:
+ """Build a pip install list from the given requirements, constraints and packages."""
+ # listing content constraints first gives them priority over constraints provided by ansible-test
+ constraints_paths = list(constraints_paths)
+
+ if constraints:
+ constraints_paths.append((ANSIBLE_TEST_DATA_ROOT, os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', 'constraints.txt')))
+
+ requirements = [(os.path.relpath(path, root), read_text_file(path)) for root, path in requirements_paths if usable_pip_file(path)]
+ constraints = [(os.path.relpath(path, root), read_text_file(path)) for root, path in constraints_paths if usable_pip_file(path)]
+ packages = packages or []
+
+ if requirements or packages:
+ installs = [PipInstall(
+ requirements=requirements,
+ constraints=constraints,
+ packages=packages,
+ )]
+ else:
+ installs = []
+
+ return installs
+
+
+def collect_uninstall(packages, ignore_errors=False): # type: (t.List[str], bool) -> t.List[PipUninstall]
+ """Return the details necessary for the specified pip uninstall."""
+ uninstall = PipUninstall(
+ packages=packages,
+ ignore_errors=ignore_errors,
+ )
+
+ return [uninstall]
+
+
+# Support
+
+
+def requirements_allowed(args, controller): # type: (EnvironmentConfig, bool) -> bool
+ """
+ Return True if requirements can be installed, otherwise return False.
+
+ Requirements are only allowed if one of the following conditions is met:
+
+ The user specified --requirements manually.
+ The install will occur on the controller and the controller or controller Python is managed by ansible-test.
+ The install will occur on the target and the target or target Python is managed by ansible-test.
+ """
+ if args.requirements:
+ return True
+
+ if controller:
+ return args.controller.is_managed or args.controller.python.is_managed
+
+ target = args.only_targets(PosixConfig)[0]
+
+ return target.is_managed or target.python.is_managed
+
+
+def prepare_pip_script(commands): # type: (t.List[PipCommand]) -> str
+ """Generate a Python script to perform the requested pip commands."""
+ data = [command.serialize() for command in commands]
+
+ display.info(f'>>> Requirements Commands\n{json.dumps(data, indent=4)}', verbosity=3)
+
+ args = dict(
+ script=read_text_file(QUIET_PIP_SCRIPT_PATH),
+ verbosity=display.verbosity,
+ commands=data,
+ )
+
+ payload = to_text(base64.b64encode(to_bytes(json.dumps(args))))
+ path = REQUIREMENTS_SCRIPT_PATH
+ template = read_text_file(path)
+ script = template.format(payload=payload)
+
+ display.info(f'>>> Python Script from Template ({path})\n{script.strip()}', verbosity=4)
+
+ return script
+
+
+def usable_pip_file(path): # type: (t.Optional[str]) -> bool
+ """Return True if the specified pip file is usable, otherwise False."""
+ return path and os.path.exists(path) and os.path.getsize(path)
+
+
+# Cryptography
+
+
+def is_cryptography_available(python): # type: (str) -> bool
+ """Return True if cryptography is available for the given python."""
+ try:
+ raw_command([python, '-c', 'import cryptography'], capture=True)
+ except SubprocessError:
+ return False
+
+ return True
+
+
+def get_cryptography_requirements(python): # type: (PythonConfig) -> t.List[str]
+ """
+ Return the correct cryptography and pyopenssl requirements for the given python version.
+ The version of cryptography installed depends on the python version and openssl version.
+ """
+ openssl_version = get_openssl_version(python)
+
+ if openssl_version and openssl_version < (1, 1, 0):
+ # cryptography 3.2 requires openssl 1.1.x or later
+ # see https://cryptography.io/en/latest/changelog.html#v3-2
+ cryptography = 'cryptography < 3.2'
+ # pyopenssl 20.0.0 requires cryptography 3.2 or later
+ pyopenssl = 'pyopenssl < 20.0.0'
+ else:
+ # cryptography 3.4+ fails to install on many systems
+ # this is a temporary work-around until a more permanent solution is available
+ cryptography = 'cryptography < 3.4'
+ # no specific version of pyopenssl required, don't install it
+ pyopenssl = None
+
+ requirements = [
+ cryptography,
+ pyopenssl,
+ ]
+
+ requirements = [requirement for requirement in requirements if requirement]
+
+ return requirements
+
+
+def get_openssl_version(python): # type: (PythonConfig) -> t.Optional[t.Tuple[int, ...]]
+ """Return the openssl version."""
+ if not python.version.startswith('2.'):
+ # OpenSSL version checking only works on Python 3.x.
+ # This should be the most accurate, since it is the Python we will be using.
+ version = json.loads(raw_command([python.path, os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'sslcheck.py')], capture=True)[0])['version']
+
+ if version:
+ display.info(f'Detected OpenSSL version {version_to_str(version)} under Python {python.version}.', verbosity=1)
+
+ return tuple(version)
+
+ # Fall back to detecting the OpenSSL version from the CLI.
+ # This should provide an adequate solution on Python 2.x.
+ openssl_path = find_executable('openssl', required=False)
+
+ if openssl_path:
+ try:
+ result = raw_command([openssl_path, 'version'], capture=True)[0]
+ except SubprocessError:
+ result = ''
+
+ match = re.search(r'^OpenSSL (?P<version>[0-9]+\.[0-9]+\.[0-9]+)', result)
+
+ if match:
+ version = str_to_version(match.group('version'))
+
+ display.info(f'Detected OpenSSL version {version_to_str(version)} using the openssl CLI.', verbosity=1)
+
+ return version
+
+ display.info('Unable to detect OpenSSL version.', verbosity=1)
+
+ return None
diff --git a/test/lib/ansible_test/_internal/sanity/import.py b/test/lib/ansible_test/_internal/sanity/import.py
deleted file mode 100644
index 34730635..00000000
--- a/test/lib/ansible_test/_internal/sanity/import.py
+++ /dev/null
@@ -1,218 +0,0 @@
-"""Sanity test for proper import exception handling."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-
-from .. import types as t
-
-from ..sanity import (
- SanityMultipleVersion,
- SanityMessage,
- SanityFailure,
- SanitySuccess,
- SanitySkipped,
- SANITY_ROOT,
-)
-
-from ..target import (
- TestTarget,
-)
-
-from ..util import (
- ANSIBLE_TEST_DATA_ROOT,
- SubprocessError,
- remove_tree,
- display,
- parse_to_list_of_dict,
- is_subdir,
- generate_pip_command,
- find_python,
- get_hash,
- REMOTE_ONLY_PYTHON_VERSIONS,
-)
-
-from ..util_common import (
- intercept_command,
- run_command,
- ResultType,
-)
-
-from ..ansible_util import (
- ansible_environment,
-)
-
-from ..executor import (
- generate_pip_install,
- install_cryptography,
-)
-
-from ..config import (
- SanityConfig,
-)
-
-from ..coverage_util import (
- coverage_context,
-)
-
-from ..venv import (
- create_virtual_environment,
-)
-
-from ..data import (
- data_context,
-)
-
-
-def _get_module_test(module_restrictions): # type: (bool) -> t.Callable[[str], bool]
- """Create a predicate which tests whether a path can be used by modules or not."""
- module_path = data_context().content.module_path
- module_utils_path = data_context().content.module_utils_path
- if module_restrictions:
- return lambda path: is_subdir(path, module_path) or is_subdir(path, module_utils_path)
- return lambda path: not (is_subdir(path, module_path) or is_subdir(path, module_utils_path))
-
-
-class ImportTest(SanityMultipleVersion):
- """Sanity test for proper import exception handling."""
- def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
- """Return the given list of test targets, filtered to include only those relevant for the test."""
- return [target for target in targets if os.path.splitext(target.path)[1] == '.py' and
- any(is_subdir(target.path, path) for path in data_context().content.plugin_paths.values())]
-
- def test(self, args, targets, python_version):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :type python_version: str
- :rtype: TestResult
- """
- capture_pip = args.verbosity < 2
-
- python = find_python(python_version)
-
- if python_version.startswith('2.') and args.requirements:
- # hack to make sure that virtualenv is available under Python 2.x
- # on Python 3.x we can use the built-in venv
- pip = generate_pip_command(python)
- run_command(args, generate_pip_install(pip, '', packages=['virtualenv']), capture=capture_pip)
-
- settings = self.load_processor(args, python_version)
-
- paths = [target.path for target in targets.include]
-
- env = ansible_environment(args, color=False)
-
- temp_root = os.path.join(ResultType.TMP.path, 'sanity', 'import')
-
- messages = []
-
- for import_type, test, add_ansible_requirements in (
- ('module', _get_module_test(True), False),
- ('plugin', _get_module_test(False), True),
- ):
- if import_type == 'plugin' and python_version in REMOTE_ONLY_PYTHON_VERSIONS:
- continue
-
- data = '\n'.join([path for path in paths if test(path)])
- if not data:
- continue
-
- requirements_file = None
-
- # create a clean virtual environment to minimize the available imports beyond the python standard library
- virtual_environment_dirname = 'minimal-py%s' % python_version.replace('.', '')
- if add_ansible_requirements:
- requirements_file = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', 'sanity.import-plugins.txt')
- virtual_environment_dirname += '-requirements-%s' % get_hash(requirements_file)
- virtual_environment_path = os.path.join(temp_root, virtual_environment_dirname)
- virtual_environment_bin = os.path.join(virtual_environment_path, 'bin')
-
- remove_tree(virtual_environment_path)
-
- if not create_virtual_environment(args, python_version, virtual_environment_path):
- display.warning("Skipping sanity test '%s' on Python %s due to missing virtual environment support." % (self.name, python_version))
- return SanitySkipped(self.name, python_version)
-
- # add the importer to our virtual environment so it can be accessed through the coverage injector
- importer_path = os.path.join(virtual_environment_bin, 'importer.py')
- yaml_to_json_path = os.path.join(virtual_environment_bin, 'yaml_to_json.py')
- if not args.explain:
- os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'importer.py')), importer_path)
- os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'yaml_to_json.py')), yaml_to_json_path)
-
- # activate the virtual environment
- env['PATH'] = '%s:%s' % (virtual_environment_bin, env['PATH'])
-
- env.update(
- SANITY_TEMP_PATH=ResultType.TMP.path,
- SANITY_IMPORTER_TYPE=import_type,
- )
-
- if data_context().content.collection:
- env.update(
- SANITY_COLLECTION_FULL_NAME=data_context().content.collection.full_name,
- SANITY_EXTERNAL_PYTHON=python,
- )
-
- virtualenv_python = os.path.join(virtual_environment_bin, 'python')
- virtualenv_pip = generate_pip_command(virtualenv_python)
-
- # make sure requirements are installed if needed
- if requirements_file:
- install_cryptography(args, virtualenv_python, python_version, virtualenv_pip)
- run_command(args, generate_pip_install(virtualenv_pip, 'sanity', context='import-plugins'), env=env, capture=capture_pip)
-
- # make sure coverage is available in the virtual environment if needed
- if args.coverage:
- run_command(args, generate_pip_install(virtualenv_pip, '', packages=['setuptools']), env=env, capture=capture_pip)
- run_command(args, generate_pip_install(virtualenv_pip, '', packages=['coverage']), env=env, capture=capture_pip)
-
- try:
- # In some environments pkg_resources is installed as a separate pip package which needs to be removed.
- # For example, using Python 3.8 on Ubuntu 18.04 a virtualenv is created with only pip and setuptools.
- # However, a venv is created with an additional pkg-resources package which is independent of setuptools.
- # Making sure pkg-resources is removed preserves the import test consistency between venv and virtualenv.
- # Additionally, in the above example, the pyparsing package vendored with pkg-resources is out-of-date and generates deprecation warnings.
- # Thus it is important to remove pkg-resources to prevent system installed packages from generating deprecation warnings.
- run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'pkg-resources'], env=env, capture=capture_pip)
- except SubprocessError:
- pass
-
- run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'setuptools'], env=env, capture=capture_pip)
- run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'pip'], env=env, capture=capture_pip)
-
- display.info(import_type + ': ' + data, verbosity=4)
-
- cmd = ['importer.py']
-
- try:
- with coverage_context(args):
- stdout, stderr = intercept_command(args, cmd, self.name, env, capture=True, data=data, python_version=python_version,
- virtualenv=virtualenv_python)
-
- if stdout or stderr:
- raise SubprocessError(cmd, stdout=stdout, stderr=stderr)
- except SubprocessError as ex:
- if ex.status != 10 or ex.stderr or not ex.stdout:
- raise
-
- pattern = r'^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<message>.*)$'
-
- parsed = parse_to_list_of_dict(pattern, ex.stdout)
-
- relative_temp_root = os.path.relpath(temp_root, data_context().content.root) + os.path.sep
-
- messages += [SanityMessage(
- message=r['message'],
- path=os.path.relpath(r['path'], relative_temp_root) if r['path'].startswith(relative_temp_root) else r['path'],
- line=int(r['line']),
- column=int(r['column']),
- ) for r in parsed]
-
- results = settings.process_errors(messages, paths)
-
- if results:
- return SanityFailure(self.name, messages=results, python_version=python_version)
-
- return SanitySuccess(self.name, python_version=python_version)
diff --git a/test/lib/ansible_test/_internal/ssh.py b/test/lib/ansible_test/_internal/ssh.py
new file mode 100644
index 00000000..21212dc1
--- /dev/null
+++ b/test/lib/ansible_test/_internal/ssh.py
@@ -0,0 +1,258 @@
+"""High level functions for working with SSH."""
+from __future__ import annotations
+
+import dataclasses
+import json
+import os
+import random
+import re
+import subprocess
+import shlex
+import typing as t
+
+from .encoding import (
+ to_bytes,
+ to_text,
+)
+
+from .util import (
+ ApplicationError,
+ common_environment,
+ display,
+ exclude_none_values,
+ sanitize_host_name,
+)
+
+from .config import (
+ EnvironmentConfig,
+)
+
+
+@dataclasses.dataclass
+class SshConnectionDetail:
+ """Information needed to establish an SSH connection to a host."""
+ name: str
+ host: str
+ port: t.Optional[int]
+ user: str
+ identity_file: str
+ python_interpreter: t.Optional[str] = None
+ shell_type: t.Optional[str] = None
+
+ def __post_init__(self):
+ self.name = sanitize_host_name(self.name)
+
+
+class SshProcess:
+ """Wrapper around an SSH process."""
+ def __init__(self, process): # type: (t.Optional[subprocess.Popen]) -> None
+ self._process = process
+ self.pending_forwards = None # type: t.Optional[t.Set[t.Tuple[str, int]]]
+
+ self.forwards = {} # type: t.Dict[t.Tuple[str, int], int]
+
+ def terminate(self): # type: () -> None
+ """Terminate the SSH process."""
+ if not self._process:
+ return # explain mode
+
+ # noinspection PyBroadException
+ try:
+ self._process.terminate()
+ except Exception: # pylint: disable=broad-except
+ pass
+
+ def wait(self): # type: () -> None
+ """Wait for the SSH process to terminate."""
+ if not self._process:
+ return # explain mode
+
+ self._process.wait()
+
+ def collect_port_forwards(self): # type: (SshProcess) -> t.Dict[t.Tuple[str, int], int]
+ """Collect port assignments for dynamic SSH port forwards."""
+ errors = []
+
+ display.info('Collecting %d SSH port forward(s).' % len(self.pending_forwards), verbosity=2)
+
+ while self.pending_forwards:
+ if self._process:
+ line_bytes = self._process.stderr.readline()
+
+ if not line_bytes:
+ if errors:
+ details = ':\n%s' % '\n'.join(errors)
+ else:
+ details = '.'
+
+ raise ApplicationError('SSH port forwarding failed%s' % details)
+
+ line = to_text(line_bytes).strip()
+
+ match = re.search(r'^Allocated port (?P<src_port>[0-9]+) for remote forward to (?P<dst_host>[^:]+):(?P<dst_port>[0-9]+)$', line)
+
+ if not match:
+ if re.search(r'^Warning: Permanently added .* to the list of known hosts\.$', line):
+ continue
+
+ display.warning('Unexpected SSH port forwarding output: %s' % line, verbosity=2)
+
+ errors.append(line)
+ continue
+
+ src_port = int(match.group('src_port'))
+ dst_host = str(match.group('dst_host'))
+ dst_port = int(match.group('dst_port'))
+
+ dst = (dst_host, dst_port)
+ else:
+ # explain mode
+ dst = list(self.pending_forwards)[0]
+ src_port = random.randint(40000, 50000)
+
+ self.pending_forwards.remove(dst)
+ self.forwards[dst] = src_port
+
+ display.info('Collected %d SSH port forward(s):\n%s' % (
+ len(self.forwards), '\n'.join('%s -> %s:%s' % (src_port, dst[0], dst[1]) for dst, src_port in sorted(self.forwards.items()))), verbosity=2)
+
+ return self.forwards
+
+
+def create_ssh_command(
+ ssh, # type: SshConnectionDetail
+ options=None, # type: t.Optional[t.Dict[str, t.Union[str, int]]]
+ cli_args=None, # type: t.List[str]
+ command=None, # type: t.Optional[str]
+): # type: (...) -> t.List[str]
+ """Create an SSH command using the specified options."""
+ cmd = [
+ 'ssh',
+ '-n', # prevent reading from stdin
+ '-i', ssh.identity_file, # file from which the identity for public key authentication is read
+ ]
+
+ if not command:
+ cmd.append('-N') # do not execute a remote command
+
+ if ssh.port:
+ cmd.extend(['-p', str(ssh.port)]) # port to connect to on the remote host
+
+ if ssh.user:
+ cmd.extend(['-l', ssh.user]) # user to log in as on the remote machine
+
+ ssh_options = dict(
+ BatchMode='yes',
+ ExitOnForwardFailure='yes',
+ LogLevel='ERROR',
+ ServerAliveCountMax=4,
+ ServerAliveInterval=15,
+ StrictHostKeyChecking='no',
+ UserKnownHostsFile='/dev/null',
+ )
+
+ ssh_options.update(options or {})
+
+ for key, value in sorted(ssh_options.items()):
+ cmd.extend(['-o', '='.join([key, str(value)])])
+
+ cmd.extend(cli_args or [])
+ cmd.append(ssh.host)
+
+ if command:
+ cmd.append(command)
+
+ return cmd
+
+
+def run_ssh_command(
+ args, # type: EnvironmentConfig
+ ssh, # type: SshConnectionDetail
+ options=None, # type: t.Optional[t.Dict[str, t.Union[str, int]]]
+ cli_args=None, # type: t.List[str]
+ command=None, # type: t.Optional[str]
+): # type: (...) -> SshProcess
+ """Run the specified SSH command, returning the created SshProcess instance created."""
+ cmd = create_ssh_command(ssh, options, cli_args, command)
+ env = common_environment()
+
+ cmd_show = ' '.join([shlex.quote(c) for c in cmd])
+ display.info('Run background command: %s' % cmd_show, verbosity=1, truncate=True)
+
+ cmd_bytes = [to_bytes(c) for c in cmd]
+ env_bytes = dict((to_bytes(k), to_bytes(v)) for k, v in env.items())
+
+ if args.explain:
+ process = SshProcess(None)
+ else:
+ process = SshProcess(subprocess.Popen(cmd_bytes, env=env_bytes, bufsize=-1, # pylint: disable=consider-using-with
+ stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, stderr=subprocess.PIPE))
+
+ return process
+
+
+def create_ssh_port_forwards(
+ args, # type: EnvironmentConfig
+ ssh, # type: SshConnectionDetail
+ forwards, # type: t.List[t.Tuple[str, int]]
+): # type: (...) -> SshProcess
+ """
+ Create SSH port forwards using the provided list of tuples (target_host, target_port).
+ Port bindings will be automatically assigned by SSH and must be collected with a subseqent call to collect_port_forwards.
+ """
+ options = dict(
+ LogLevel='INFO', # info level required to get messages on stderr indicating the ports assigned to each forward
+ )
+
+ cli_args = []
+
+ for forward_host, forward_port in forwards:
+ cli_args.extend(['-R', ':'.join([str(0), forward_host, str(forward_port)])])
+
+ process = run_ssh_command(args, ssh, options, cli_args)
+ process.pending_forwards = forwards
+
+ return process
+
+
+def create_ssh_port_redirects(
+ args, # type: EnvironmentConfig
+ ssh, # type: SshConnectionDetail
+ redirects, # type: t.List[t.Tuple[int, str, int]]
+): # type: (...) -> SshProcess
+ """Create SSH port redirections using the provided list of tuples (bind_port, target_host, target_port)."""
+ options = {}
+ cli_args = []
+
+ for bind_port, target_host, target_port in redirects:
+ cli_args.extend(['-R', ':'.join([str(bind_port), target_host, str(target_port)])])
+
+ process = run_ssh_command(args, ssh, options, cli_args)
+
+ return process
+
+
+def generate_ssh_inventory(ssh_connections): # type: (t.List[SshConnectionDetail]) -> str
+ """Return an inventory file in JSON format, created from the provided SSH connection details."""
+ inventory = dict(
+ all=dict(
+ hosts=dict((ssh.name, exclude_none_values(dict(
+ ansible_host=ssh.host,
+ ansible_port=ssh.port,
+ ansible_user=ssh.user,
+ ansible_ssh_private_key_file=os.path.abspath(ssh.identity_file),
+ ansible_connection='ssh',
+ ansible_pipelining='yes',
+ ansible_python_interpreter=ssh.python_interpreter,
+ ansible_shell_type=ssh.shell_type,
+ ansible_ssh_extra_args='-o UserKnownHostsFile=/dev/null', # avoid changing the test environment
+ ansible_ssh_host_key_checking='no',
+ ))) for ssh in ssh_connections),
+ ),
+ )
+
+ inventory_text = json.dumps(inventory, indent=4, sort_keys=True)
+
+ display.info('>>> SSH Inventory\n%s' % inventory_text, verbosity=3)
+
+ return inventory_text
diff --git a/test/lib/ansible_test/_internal/target.py b/test/lib/ansible_test/_internal/target.py
index ad6e5ad2..ced111f7 100644
--- a/test/lib/ansible_test/_internal/target.py
+++ b/test/lib/ansible_test/_internal/target.py
@@ -1,14 +1,13 @@
"""Test target identification, iteration and inclusion/exclusion."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import collections
+import enum
import os
import re
import itertools
import abc
-
-from . import types as t
+import typing as t
from .encoding import (
to_bytes,
@@ -28,45 +27,24 @@ from .util import (
from .data import (
data_context,
+ content_plugins,
)
MODULE_EXTENSIONS = '.py', '.ps1'
-try:
- # noinspection PyTypeChecker
- TCompletionTarget = t.TypeVar('TCompletionTarget', bound='CompletionTarget')
-except AttributeError:
- TCompletionTarget = None # pylint: disable=invalid-name
-
-try:
- # noinspection PyTypeChecker
- TIntegrationTarget = t.TypeVar('TIntegrationTarget', bound='IntegrationTarget')
-except AttributeError:
- TIntegrationTarget = None # pylint: disable=invalid-name
-
-def find_target_completion(target_func, prefix):
- """
- :type target_func: () -> collections.Iterable[CompletionTarget]
- :type prefix: unicode
- :rtype: list[str]
- """
+def find_target_completion(target_func, prefix, short): # type: (t.Callable[[], t.Iterable[CompletionTarget]], str, bool) -> t.List[str]
+ """Return a list of targets from the given target function which match the given prefix."""
try:
targets = target_func()
- short = os.environ.get('COMP_TYPE') == '63' # double tab completion from bash
- matches = walk_completion_targets(targets, prefix, short)
+ matches = list(walk_completion_targets(targets, prefix, short))
return matches
except Exception as ex: # pylint: disable=locally-disabled, broad-except
return [u'%s' % ex]
-def walk_completion_targets(targets, prefix, short=False):
- """
- :type targets: collections.Iterable[CompletionTarget]
- :type prefix: str
- :type short: bool
- :rtype: tuple[str]
- """
+def walk_completion_targets(targets, prefix, short=False): # type: (t.Iterable[CompletionTarget], str, bool) -> t.Tuple[str, ...]
+ """Return a tuple of targets from the given target iterable which match the given prefix."""
aliases = set(alias for target in targets for alias in target.aliases)
if prefix.endswith('/') and prefix in aliases:
@@ -85,24 +63,23 @@ def walk_completion_targets(targets, prefix, short=False):
return tuple(sorted(matches))
-def walk_internal_targets(targets, includes=None, excludes=None, requires=None):
- """
- :type targets: collections.Iterable[T <= CompletionTarget]
- :type includes: list[str]
- :type excludes: list[str]
- :type requires: list[str]
- :rtype: tuple[T <= CompletionTarget]
- """
+def walk_internal_targets(
+ targets, # type: t.Iterable[TCompletionTarget]
+ includes=None, # type: t.Optional[t.List[str]]
+ excludes=None, # type: t.Optional[t.List[str]]
+ requires=None, # type: t.Optional[t.List[str]]
+): # type: (...) -> t.Tuple[TCompletionTarget, ...]
+ """Return a tuple of matching completion targets."""
targets = tuple(targets)
- include_targets = sorted(filter_targets(targets, includes, errors=True, directories=False), key=lambda include_target: include_target.name)
+ include_targets = sorted(filter_targets(targets, includes, directories=False), key=lambda include_target: include_target.name)
if requires:
- require_targets = set(filter_targets(targets, requires, errors=True, directories=False))
+ require_targets = set(filter_targets(targets, requires, directories=False))
include_targets = [require_target for require_target in include_targets if require_target in require_targets]
if excludes:
- list(filter_targets(targets, excludes, errors=True, include=False, directories=False))
+ list(filter_targets(targets, excludes, include=False, directories=False))
internal_targets = set(filter_targets(include_targets, excludes, errors=False, include=False, directories=False))
return tuple(sorted(internal_targets, key=lambda sort_target: sort_target.name))
@@ -173,69 +150,49 @@ def walk_module_targets():
yield target
-def walk_units_targets():
- """
- :rtype: collections.Iterable[TestTarget]
- """
+def walk_units_targets(): # type: () -> t.Iterable[TestTarget]
+ """Return an iterable of units targets."""
return walk_test_targets(path=data_context().content.unit_path, module_path=data_context().content.unit_module_path, extensions=('.py',), prefix='test_')
-def walk_compile_targets(include_symlinks=True):
- """
- :type include_symlinks: bool
- :rtype: collections.Iterable[TestTarget]
- """
+def walk_compile_targets(include_symlinks=True): # type: (bool) -> t.Iterable[TestTarget, ...]
+ """Return an iterable of compile targets."""
return walk_test_targets(module_path=data_context().content.module_path, extensions=('.py',), extra_dirs=('bin',), include_symlinks=include_symlinks)
-def walk_powershell_targets(include_symlinks=True):
- """
- :rtype: collections.Iterable[TestTarget]
- """
+def walk_powershell_targets(include_symlinks=True): # type: (bool) -> t.Iterable[TestTarget]
+ """Return an iterable of PowerShell targets."""
return walk_test_targets(module_path=data_context().content.module_path, extensions=('.ps1', '.psm1'), include_symlinks=include_symlinks)
-def walk_sanity_targets():
- """
- :rtype: collections.Iterable[TestTarget]
- """
+def walk_sanity_targets(): # type: () -> t.Iterable[TestTarget]
+ """Return an iterable of sanity targets."""
return walk_test_targets(module_path=data_context().content.module_path, include_symlinks=True, include_symlinked_directories=True)
-def walk_posix_integration_targets(include_hidden=False):
- """
- :type include_hidden: bool
- :rtype: collections.Iterable[IntegrationTarget]
- """
+def walk_posix_integration_targets(include_hidden=False): # type: (bool) -> t.Iterable[IntegrationTarget]
+ """Return an iterable of POSIX integration targets."""
for target in walk_integration_targets():
if 'posix/' in target.aliases or (include_hidden and 'hidden/posix/' in target.aliases):
yield target
-def walk_network_integration_targets(include_hidden=False):
- """
- :type include_hidden: bool
- :rtype: collections.Iterable[IntegrationTarget]
- """
+def walk_network_integration_targets(include_hidden=False): # type: (bool) -> t.Iterable[IntegrationTarget]
+ """Return an iterable of network integration targets."""
for target in walk_integration_targets():
if 'network/' in target.aliases or (include_hidden and 'hidden/network/' in target.aliases):
yield target
-def walk_windows_integration_targets(include_hidden=False):
- """
- :type include_hidden: bool
- :rtype: collections.Iterable[IntegrationTarget]
- """
+def walk_windows_integration_targets(include_hidden=False): # type: (bool) -> t.Iterable[IntegrationTarget]
+ """Return an iterable of windows integration targets."""
for target in walk_integration_targets():
if 'windows/' in target.aliases or (include_hidden and 'hidden/windows/' in target.aliases):
yield target
-def walk_integration_targets():
- """
- :rtype: collections.Iterable[IntegrationTarget]
- """
+def walk_integration_targets(): # type: () -> t.Iterable[IntegrationTarget]
+ """Return an iterable of integration targets."""
path = data_context().content.integration_targets_path
modules = frozenset(target.module for target in walk_module_targets())
paths = data_context().content.walk_files(path)
@@ -305,17 +262,16 @@ def load_integration_prefixes():
return prefixes
-def walk_test_targets(path=None, module_path=None, extensions=None, prefix=None, extra_dirs=None, include_symlinks=False, include_symlinked_directories=False):
- """
- :type path: str | None
- :type module_path: str | None
- :type extensions: tuple[str] | None
- :type prefix: str | None
- :type extra_dirs: tuple[str] | None
- :type include_symlinks: bool
- :type include_symlinked_directories: bool
- :rtype: collections.Iterable[TestTarget]
- """
+def walk_test_targets(
+ path=None, # type: t.Optional[str]
+ module_path=None, # type: t.Optional[str]
+ extensions=None, # type: t.Optional[t.Tuple[str, ...]]
+ prefix=None, # type: t.Optional[str]
+ extra_dirs=None, # type: t.Optional[t.Tuple[str, ...]]
+ include_symlinks=False, # type: bool
+ include_symlinked_directories=False, # type: bool
+): # type: (...) -> t.Iterable[TestTarget]
+ """Iterate over available test targets."""
if path:
file_paths = data_context().content.walk_files(path, include_symlinked_directories=include_symlinked_directories)
else:
@@ -353,11 +309,8 @@ def walk_test_targets(path=None, module_path=None, extensions=None, prefix=None,
yield TestTarget(file_path, module_path, prefix, path, symlink)
-def analyze_integration_target_dependencies(integration_targets):
- """
- :type integration_targets: list[IntegrationTarget]
- :rtype: dict[str,set[str]]
- """
+def analyze_integration_target_dependencies(integration_targets): # type: (t.List[IntegrationTarget]) -> t.Dict[str, t.Set[str]]
+ """Analyze the given list of integration test targets and return a dictionary expressing target names and the target names which depend on them."""
real_target_root = os.path.realpath(data_context().content.integration_targets_path) + '/'
role_targets = [target for target in integration_targets if target.type == 'role']
@@ -453,10 +406,8 @@ def analyze_integration_target_dependencies(integration_targets):
return dependencies
-class CompletionTarget:
+class CompletionTarget(metaclass=abc.ABCMeta):
"""Command-line argument completion target base class."""
- __metaclass__ = abc.ABCMeta
-
def __init__(self):
self.name = None
self.path = None
@@ -491,12 +442,8 @@ class CompletionTarget:
class DirectoryTarget(CompletionTarget):
"""Directory target."""
- def __init__(self, path, modules):
- """
- :type path: str
- :type modules: tuple[str]
- """
- super(DirectoryTarget, self).__init__()
+ def __init__(self, path, modules): # type: (str, t.Tuple[str, ...]) -> None
+ super().__init__()
self.name = path
self.path = path
@@ -505,15 +452,15 @@ class DirectoryTarget(CompletionTarget):
class TestTarget(CompletionTarget):
"""Generic test target."""
- def __init__(self, path, module_path, module_prefix, base_path, symlink=None):
- """
- :type path: str
- :type module_path: str | None
- :type module_prefix: str | None
- :type base_path: str
- :type symlink: bool | None
- """
- super(TestTarget, self).__init__()
+ def __init__(
+ self,
+ path, # type: str
+ module_path, # type: t.Optional[str]
+ module_prefix, # type: t.Optional[str]
+ base_path, # type: str
+ symlink=None, # type: t.Optional[bool]
+ ):
+ super().__init__()
if symlink is None:
symlink = os.path.islink(to_bytes(path.rstrip(os.path.sep)))
@@ -544,6 +491,67 @@ class TestTarget(CompletionTarget):
self.aliases = tuple(sorted(aliases))
+class IntegrationTargetType(enum.Enum):
+ """Type of integration test target."""
+ CONTROLLER = enum.auto()
+ TARGET = enum.auto()
+ UNKNOWN = enum.auto()
+ CONFLICT = enum.auto()
+
+
+def extract_plugin_references(name, aliases): # type: (str, t.List[str]) -> t.List[t.Tuple[str, str]]
+ """Return a list of plugin references found in the given integration test target name and aliases."""
+ plugins = content_plugins()
+ found = [] # type: t.List[t.Tuple[str, str]]
+
+ for alias in [name] + aliases:
+ plugin_type = 'modules'
+ plugin_name = alias
+
+ if plugin_name in plugins.get(plugin_type, {}):
+ found.append((plugin_type, plugin_name))
+
+ parts = alias.split('_')
+
+ for type_length in (1, 2):
+ if len(parts) > type_length:
+ plugin_type = '_'.join(parts[:type_length])
+ plugin_name = '_'.join(parts[type_length:])
+
+ if plugin_name in plugins.get(plugin_type, {}):
+ found.append((plugin_type, plugin_name))
+
+ return found
+
+
+def categorize_integration_test(name, aliases, force_target): # type: (str, t.List[str], bool) -> t.Tuple[IntegrationTargetType, IntegrationTargetType]
+ """Return the integration test target types (used and actual) based on the given target name and aliases."""
+ context_controller = f'context/{IntegrationTargetType.CONTROLLER.name.lower()}' in aliases
+ context_target = f'context/{IntegrationTargetType.TARGET.name.lower()}' in aliases or force_target
+ actual_type = None
+ strict_mode = data_context().content.is_ansible
+
+ if context_controller and context_target:
+ target_type = IntegrationTargetType.CONFLICT
+ elif context_controller and not context_target:
+ target_type = IntegrationTargetType.CONTROLLER
+ elif context_target and not context_controller:
+ target_type = IntegrationTargetType.TARGET
+ else:
+ target_types = {IntegrationTargetType.TARGET if plugin_type in ('modules', 'module_utils') else IntegrationTargetType.CONTROLLER
+ for plugin_type, plugin_name in extract_plugin_references(name, aliases)}
+
+ if len(target_types) == 1:
+ target_type = target_types.pop()
+ elif not target_types:
+ actual_type = IntegrationTargetType.UNKNOWN
+ target_type = actual_type if strict_mode else IntegrationTargetType.TARGET
+ else:
+ target_type = IntegrationTargetType.CONFLICT
+
+ return target_type, actual_type or target_type
+
+
class IntegrationTarget(CompletionTarget):
"""Integration test target."""
non_posix = frozenset((
@@ -558,13 +566,8 @@ class IntegrationTarget(CompletionTarget):
'skip',
)))
- def __init__(self, path, modules, prefixes):
- """
- :type path: str
- :type modules: frozenset[str]
- :type prefixes: dict[str, str]
- """
- super(IntegrationTarget, self).__init__()
+ def __init__(self, path, modules, prefixes): # type: (str, t.FrozenSet[str], t.Dict[str, str]) -> None
+ super().__init__()
self.relative_path = os.path.relpath(path, data_context().content.integration_targets_path)
self.name = self.relative_path.replace(os.path.sep, '.')
@@ -614,6 +617,9 @@ class IntegrationTarget(CompletionTarget):
if 'destructive' not in groups:
groups.append('non_destructive')
+ if 'needs/httptester' in groups:
+ groups.append('cloud/httptester') # backwards compatibility for when it was not a cloud plugin
+
if '_' in self.name:
prefix = self.name[:self.name.find('_')]
else:
@@ -662,6 +668,24 @@ class IntegrationTarget(CompletionTarget):
if not any(g in self.non_posix for g in groups):
groups.append('posix')
+ # target type
+
+ # targets which are non-posix test against the target, even if they also support posix
+ force_target = any(group in self.non_posix for group in groups)
+
+ target_type, actual_type = categorize_integration_test(self.name, list(static_aliases), force_target)
+
+ self._remove_group(groups, 'context')
+
+ groups.extend(['context/', f'context/{target_type.name.lower()}'])
+
+ if target_type != actual_type:
+ # allow users to query for the actual type
+ groups.extend(['context/', f'context/{actual_type.name.lower()}'])
+
+ self.target_type = target_type
+ self.actual_type = actual_type
+
# aliases
aliases = [self.name] + \
@@ -679,13 +703,14 @@ class IntegrationTarget(CompletionTarget):
self.setup_always = tuple(sorted(set(g.split('/')[2] for g in groups if g.startswith('setup/always/'))))
self.needs_target = tuple(sorted(set(g.split('/')[2] for g in groups if g.startswith('needs/target/'))))
+ @staticmethod
+ def _remove_group(groups, group):
+ return [g for g in groups if g != group and not g.startswith('%s/' % group)]
+
class TargetPatternsNotMatched(ApplicationError):
"""One or more targets were not matched when a match was required."""
- def __init__(self, patterns):
- """
- :type patterns: set[str]
- """
+ def __init__(self, patterns): # type: (t.Set[str]) -> None
self.patterns = sorted(patterns)
if len(patterns) > 1:
@@ -693,4 +718,8 @@ class TargetPatternsNotMatched(ApplicationError):
else:
message = 'Target pattern not matched: %s' % self.patterns[0]
- super(TargetPatternsNotMatched, self).__init__(message)
+ super().__init__(message)
+
+
+TCompletionTarget = t.TypeVar('TCompletionTarget', bound=CompletionTarget)
+TIntegrationTarget = t.TypeVar('TIntegrationTarget', bound=IntegrationTarget)
diff --git a/test/lib/ansible_test/_internal/test.py b/test/lib/ansible_test/_internal/test.py
index f0a0c83c..af21cbd6 100644
--- a/test/lib/ansible_test/_internal/test.py
+++ b/test/lib/ansible_test/_internal/test.py
@@ -1,11 +1,9 @@
"""Classes for storing and processing test results."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import datetime
import re
-
-from . import types as t
+import typing as t
from .util import (
display,
@@ -18,17 +16,19 @@ from .util_common import (
ResultType,
)
+from .metadata import (
+ Metadata,
+)
+
from .config import (
TestConfig,
)
+from . import junit_xml
-def calculate_best_confidence(choices, metadata):
- """
- :type choices: tuple[tuple[str, int]]
- :type metadata: Metadata
- :rtype: int
- """
+
+def calculate_best_confidence(choices, metadata): # type: (t.Tuple[t.Tuple[str, int], ...], Metadata) -> int
+ """Return the best confidence value available from the given choices and metadata."""
best_confidence = 0
for path, line in choices:
@@ -38,13 +38,8 @@ def calculate_best_confidence(choices, metadata):
return best_confidence
-def calculate_confidence(path, line, metadata):
- """
- :type path: str
- :type line: int
- :type metadata: Metadata
- :rtype: int
- """
+def calculate_confidence(path, line, metadata): # type: (str, int, Metadata) -> int
+ """Return the confidence level for a test result associated with the given file path and line number."""
ranges = metadata.changes.get(path)
# no changes were made to the file
@@ -65,12 +60,7 @@ def calculate_confidence(path, line, metadata):
class TestResult:
"""Base class for test results."""
- def __init__(self, command, test, python_version=None):
- """
- :type command: str
- :type test: str
- :type python_version: str
- """
+ def __init__(self, command, test, python_version=None): # type: (str, str, t.Optional[str]) -> None
self.command = command
self.test = test
self.python_version = python_version
@@ -79,17 +69,8 @@ class TestResult:
if self.python_version:
self.name += '-python-%s' % self.python_version
- try:
- import junit_xml
- except ImportError:
- junit_xml = None
-
- self.junit = junit_xml
-
- def write(self, args):
- """
- :type args: TestConfig
- """
+ def write(self, args): # type: (TestConfig) -> None
+ """Write the test results to various locations."""
self.write_console()
self.write_bot(args)
@@ -97,32 +78,22 @@ class TestResult:
self.write_lint()
if args.junit:
- if self.junit:
- self.write_junit(args)
- else:
- display.warning('Skipping junit xml output because the `junit-xml` python package was not found.', unique=True)
+ self.write_junit(args)
- def write_console(self):
+ def write_console(self): # type: () -> None
"""Write results to console."""
- def write_lint(self):
+ def write_lint(self): # type: () -> None
"""Write lint results to stdout."""
- def write_bot(self, args):
- """
- :type args: TestConfig
- """
+ def write_bot(self, args): # type: (TestConfig) -> None
+ """Write results to a file for ansibullbot to consume."""
- def write_junit(self, args):
- """
- :type args: TestConfig
- """
+ def write_junit(self, args): # type: (TestConfig) -> None
+ """Write results to a junit XML file."""
- def create_result_name(self, extension):
- """
- :type extension: str
- :rtype: str
- """
+ def create_result_name(self, extension): # type: (str) -> str
+ """Return the name of the result file using the given extension."""
name = 'ansible-test-%s' % self.command
if self.test:
@@ -135,32 +106,19 @@ class TestResult:
return name
- def save_junit(self, args, test_case, properties=None):
- """
- :type args: TestConfig
- :type test_case: junit_xml.TestCase
- :type properties: dict[str, str] | None
- :rtype: str | None
- """
- test_suites = [
- self.junit.TestSuite(
- name='ansible-test',
- test_cases=[test_case],
- timestamp=datetime.datetime.utcnow().replace(microsecond=0).isoformat(),
- properties=properties,
- ),
- ]
-
- # the junit_xml API is changing in version 2.0.0
- # TestSuite.to_xml_string is being replaced with to_xml_report_string
- # see: https://github.com/kyrus/python-junit-xml/blob/63db26da353790500642fd02cae1543eb41aab8b/junit_xml/__init__.py#L249-L261
- try:
- to_xml_string = self.junit.to_xml_report_string
- except AttributeError:
- # noinspection PyDeprecation
- to_xml_string = self.junit.TestSuite.to_xml_string
-
- report = to_xml_string(test_suites=test_suites, prettyprint=True, encoding='utf-8')
+ def save_junit(self, args, test_case): # type: (TestConfig, junit_xml.TestCase) -> None
+ """Save the given test case results to disk as JUnit XML."""
+ suites = junit_xml.TestSuites(
+ suites=[
+ junit_xml.TestSuite(
+ name='ansible-test',
+ cases=[test_case],
+ timestamp=datetime.datetime.utcnow(),
+ ),
+ ],
+ )
+
+ report = suites.to_pretty_xml()
if args.explain:
return
@@ -170,18 +128,13 @@ class TestResult:
class TestTimeout(TestResult):
"""Test timeout."""
- def __init__(self, timeout_duration):
- """
- :type timeout_duration: int
- """
- super(TestTimeout, self).__init__(command='timeout', test='')
+ def __init__(self, timeout_duration): # type: (int) -> None
+ super().__init__(command='timeout', test='')
self.timeout_duration = timeout_duration
- def write(self, args):
- """
- :type args: TestConfig
- """
+ def write(self, args): # type: (TestConfig) -> None
+ """Write the test results to various locations."""
message = 'Tests were aborted after exceeding the %d minute time limit.' % self.timeout_duration
# Include a leading newline to improve readability on Shippable "Tests" tab.
@@ -198,61 +151,78 @@ One or more of the following situations may be responsible:
output += '\n\nConsult the console log for additional details on where the timeout occurred.'
- timestamp = datetime.datetime.utcnow().replace(microsecond=0).isoformat()
+ timestamp = datetime.datetime.utcnow()
+
+ suites = junit_xml.TestSuites(
+ suites=[
+ junit_xml.TestSuite(
+ name='ansible-test',
+ timestamp=timestamp,
+ cases=[
+ junit_xml.TestCase(
+ name='timeout',
+ classname='timeout',
+ errors=[
+ junit_xml.TestError(
+ message=message,
+ ),
+ ],
+ ),
+ ],
+ )
+ ],
+ )
- # hack to avoid requiring junit-xml, which may not be pre-installed outside our test containers
- xml = '''
-<?xml version="1.0" encoding="utf-8"?>
-<testsuites disabled="0" errors="1" failures="0" tests="1" time="0.0">
-\t<testsuite disabled="0" errors="1" failures="0" file="None" log="None" name="ansible-test" skipped="0" tests="1" time="0" timestamp="%s" url="None">
-\t\t<testcase classname="timeout" name="timeout">
-\t\t\t<error message="%s" type="error">%s</error>
-\t\t</testcase>
-\t</testsuite>
-</testsuites>
-''' % (timestamp, message, output)
+ report = suites.to_pretty_xml()
- write_text_test_results(ResultType.JUNIT, self.create_result_name('.xml'), xml.lstrip())
+ write_text_test_results(ResultType.JUNIT, self.create_result_name('.xml'), report)
class TestSuccess(TestResult):
"""Test success."""
- def write_junit(self, args):
- """
- :type args: TestConfig
- """
- test_case = self.junit.TestCase(classname=self.command, name=self.name)
+ def write_junit(self, args): # type: (TestConfig) -> None
+ """Write results to a junit XML file."""
+ test_case = junit_xml.TestCase(classname=self.command, name=self.name)
self.save_junit(args, test_case)
class TestSkipped(TestResult):
"""Test skipped."""
- def write_console(self):
- """Write results to console."""
- display.info('No tests applicable.', verbosity=1)
+ def __init__(self, command, test, python_version=None): # type: (str, str, t.Optional[str]) -> None
+ super().__init__(command, test, python_version)
- def write_junit(self, args):
- """
- :type args: TestConfig
- """
- test_case = self.junit.TestCase(classname=self.command, name=self.name)
- test_case.add_skipped_info('No tests applicable.')
+ self.reason = None # type: t.Optional[str]
+
+ def write_console(self): # type: () -> None
+ """Write results to console."""
+ if self.reason:
+ display.warning(self.reason)
+ else:
+ display.info('No tests applicable.', verbosity=1)
+
+ def write_junit(self, args): # type: (TestConfig) -> None
+ """Write results to a junit XML file."""
+ test_case = junit_xml.TestCase(
+ classname=self.command,
+ name=self.name,
+ skipped=self.reason or 'No tests applicable.',
+ )
self.save_junit(args, test_case)
class TestFailure(TestResult):
"""Test failure."""
- def __init__(self, command, test, python_version=None, messages=None, summary=None):
- """
- :type command: str
- :type test: str
- :type python_version: str | None
- :type messages: list[TestMessage] | None
- :type summary: unicode | None
- """
- super(TestFailure, self).__init__(command, test, python_version)
+ def __init__(
+ self,
+ command, # type: str
+ test, # type: str
+ python_version=None, # type: t.Optional[str]
+ messages=None, # type: t.Optional[t.List[TestMessage]]
+ summary=None, # type: t.Optional[str]
+ ):
+ super().__init__(command, test, python_version)
if messages:
messages = sorted(messages)
@@ -262,16 +232,14 @@ class TestFailure(TestResult):
self.messages = messages
self.summary = summary
- def write(self, args):
- """
- :type args: TestConfig
- """
+ def write(self, args): # type: (TestConfig) -> None
+ """Write the test results to various locations."""
if args.metadata.changes:
self.populate_confidence(args.metadata)
- super(TestFailure, self).write(args)
+ super().write(args)
- def write_console(self):
+ def write_console(self): # type: () -> None
"""Write results to console."""
if self.summary:
display.error(self.summary)
@@ -290,7 +258,7 @@ class TestFailure(TestResult):
if doc_url:
display.info('See documentation for help: %s' % doc_url)
- def write_lint(self):
+ def write_lint(self): # type: () -> None
"""Write lint results to stdout."""
if self.summary:
command = self.format_command()
@@ -302,25 +270,26 @@ class TestFailure(TestResult):
for message in self.messages:
print(message)
- def write_junit(self, args):
- """
- :type args: TestConfig
- """
+ def write_junit(self, args): # type: (TestConfig) -> None
+ """Write results to a junit XML file."""
title = self.format_title()
output = self.format_block()
- test_case = self.junit.TestCase(classname=self.command, name=self.name)
-
- # Include a leading newline to improve readability on Shippable "Tests" tab.
- # Without this, the first line becomes indented.
- test_case.add_failure_info(message=title, output='\n%s' % output)
+ test_case = junit_xml.TestCase(
+ classname=self.command,
+ name=self.name,
+ failures=[
+ junit_xml.TestFailure(
+ message=title,
+ output=output,
+ ),
+ ],
+ )
self.save_junit(args, test_case)
- def write_bot(self, args):
- """
- :type args: TestConfig
- """
+ def write_bot(self, args): # type: (TestConfig) -> None
+ """Write results to a file for ansibullbot to consume."""
docs = self.find_docs()
message = self.format_title(help_link=docs)
output = self.format_block()
@@ -346,18 +315,14 @@ class TestFailure(TestResult):
write_json_test_results(ResultType.BOT, self.create_result_name('.json'), bot_data)
- def populate_confidence(self, metadata):
- """
- :type metadata: Metadata
- """
+ def populate_confidence(self, metadata): # type: (Metadata) -> None
+ """Populate test result confidence using the provided metadata."""
for message in self.messages:
if message.confidence is None:
message.confidence = calculate_confidence(message.path, message.line, metadata)
- def format_command(self):
- """
- :rtype: str
- """
+ def format_command(self): # type: () -> str
+ """Return a string representing the CLI command associated with the test failure."""
command = 'ansible-test %s' % self.command
if self.test:
@@ -391,11 +356,8 @@ class TestFailure(TestResult):
return url
- def format_title(self, help_link=None):
- """
- :type help_link: str | None
- :rtype: str
- """
+ def format_title(self, help_link=None): # type: (t.Optional[str]) -> str
+ """Return a string containing a title/heading for this test failure, including an optional help link to explain the test."""
command = self.format_command()
if self.summary:
@@ -412,10 +374,8 @@ class TestFailure(TestResult):
return title
- def format_block(self):
- """
- :rtype: str
- """
+ def format_block(self): # type: () -> str
+ """Format the test summary or messages as a block of text and return the result."""
if self.summary:
block = self.summary
else:
@@ -431,16 +391,16 @@ class TestFailure(TestResult):
class TestMessage:
"""Single test message for one file."""
- def __init__(self, message, path, line=0, column=0, level='error', code=None, confidence=None):
- """
- :type message: str
- :type path: str
- :type line: int
- :type column: int
- :type level: str
- :type code: str | None
- :type confidence: int | None
- """
+ def __init__(
+ self,
+ message, # type: str
+ path, # type: str
+ line=0, # type: int
+ column=0, # type: int
+ level='error', # type: str
+ code=None, # type: t.Optional[str]
+ confidence=None, # type: t.Optional[int]
+ ):
self.__path = path
self.__line = line
self.__column = column
@@ -509,11 +469,8 @@ class TestMessage:
def __str__(self):
return self.format()
- def format(self, show_confidence=False):
- """
- :type show_confidence: bool
- :rtype: str
- """
+ def format(self, show_confidence=False): # type: (bool) -> str
+ """Return a string representation of this message, optionally including the confidence level."""
if self.__code:
msg = '%s: %s' % (self.__code, self.__message)
else:
diff --git a/test/lib/ansible_test/_internal/thread.py b/test/lib/ansible_test/_internal/thread.py
index 49fbc1ba..1b2fbec2 100644
--- a/test/lib/ansible_test/_internal/thread.py
+++ b/test/lib/ansible_test/_internal/thread.py
@@ -1,26 +1,20 @@
"""Python threading tools."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
-import threading
+import functools
import sys
+import threading
+import queue
+import typing as t
+
-try:
- # noinspection PyPep8Naming
- import Queue as queue
-except ImportError:
- # noinspection PyUnresolvedReferences
- import queue # pylint: disable=locally-disabled, import-error
+TCallable = t.TypeVar('TCallable', bound=t.Callable)
class WrappedThread(threading.Thread):
"""Wrapper around Thread which captures results and exceptions."""
- def __init__(self, action):
- """
- :type action: () -> any
- """
- # noinspection PyOldStyleClasses
- super(WrappedThread, self).__init__()
+ def __init__(self, action): # type: (t.Callable[[], t.Any]) -> None
+ super().__init__()
self._result = queue.Queue()
self.action = action
self.result = None
@@ -55,3 +49,16 @@ class WrappedThread(threading.Thread):
self.result = result
return result
+
+
+def mutex(func): # type: (TCallable) -> TCallable
+ """Enforce exclusive access on a decorated function."""
+ lock = threading.Lock()
+
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ """Wrapper around `func` which uses a lock to provide exclusive access to the function."""
+ with lock:
+ return func(*args, **kwargs)
+
+ return wrapper
diff --git a/test/lib/ansible_test/_internal/timeout.py b/test/lib/ansible_test/_internal/timeout.py
new file mode 100644
index 00000000..c255f5ce
--- /dev/null
+++ b/test/lib/ansible_test/_internal/timeout.py
@@ -0,0 +1,93 @@
+"""Timeout management for tests."""
+from __future__ import annotations
+
+import datetime
+import functools
+import os
+import signal
+import time
+import typing as t
+
+from .io import (
+ read_json_file,
+)
+
+from .config import (
+ CommonConfig,
+ TestConfig,
+)
+
+from .util import (
+ display,
+ ApplicationError,
+)
+
+from .thread import (
+ WrappedThread,
+)
+
+from .constants import (
+ TIMEOUT_PATH,
+)
+
+from .test import (
+ TestTimeout,
+)
+
+
+def get_timeout(): # type: () -> t.Optional[t.Dict[str, t.Any]]
+ """Return details about the currently set timeout, if any, otherwise return None."""
+ if not os.path.exists(TIMEOUT_PATH):
+ return None
+
+ data = read_json_file(TIMEOUT_PATH)
+ data['deadline'] = datetime.datetime.strptime(data['deadline'], '%Y-%m-%dT%H:%M:%SZ')
+
+ return data
+
+
+def configure_timeout(args): # type: (CommonConfig) -> None
+ """Configure the timeout."""
+ if isinstance(args, TestConfig):
+ configure_test_timeout(args) # only tests are subject to the timeout
+
+
+def configure_test_timeout(args): # type: (TestConfig) -> None
+ """Configure the test timeout."""
+ timeout = get_timeout()
+
+ if not timeout:
+ return
+
+ timeout_start = datetime.datetime.utcnow()
+ timeout_duration = timeout['duration']
+ timeout_deadline = timeout['deadline']
+ timeout_remaining = timeout_deadline - timeout_start
+
+ test_timeout = TestTimeout(timeout_duration)
+
+ if timeout_remaining <= datetime.timedelta():
+ test_timeout.write(args)
+
+ raise ApplicationError('The %d minute test timeout expired %s ago at %s.' % (
+ timeout_duration, timeout_remaining * -1, timeout_deadline))
+
+ display.info('The %d minute test timeout expires in %s at %s.' % (
+ timeout_duration, timeout_remaining, timeout_deadline), verbosity=1)
+
+ def timeout_handler(_dummy1, _dummy2):
+ """Runs when SIGUSR1 is received."""
+ test_timeout.write(args)
+
+ raise ApplicationError('Tests aborted after exceeding the %d minute time limit.' % timeout_duration)
+
+ def timeout_waiter(timeout_seconds): # type: (int) -> None
+ """Background thread which will kill the current process if the timeout elapses."""
+ time.sleep(timeout_seconds)
+ os.kill(os.getpid(), signal.SIGUSR1)
+
+ signal.signal(signal.SIGUSR1, timeout_handler)
+
+ instance = WrappedThread(functools.partial(timeout_waiter, timeout_remaining.seconds))
+ instance.daemon = True
+ instance.start()
diff --git a/test/lib/ansible_test/_internal/types.py b/test/lib/ansible_test/_internal/types.py
deleted file mode 100644
index 46ef7066..00000000
--- a/test/lib/ansible_test/_internal/types.py
+++ /dev/null
@@ -1,32 +0,0 @@
-"""Import wrapper for type hints when available."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-TYPE_CHECKING = False
-
-try:
- from typing import (
- Any,
- AnyStr,
- BinaryIO,
- Callable,
- Dict,
- FrozenSet,
- Generator,
- IO,
- Iterable,
- Iterator,
- List,
- Optional,
- Pattern,
- Set,
- Text,
- TextIO,
- Tuple,
- Type,
- TYPE_CHECKING,
- TypeVar,
- Union,
- )
-except ImportError:
- pass
diff --git a/test/lib/ansible_test/_internal/units/__init__.py b/test/lib/ansible_test/_internal/units/__init__.py
deleted file mode 100644
index 22145431..00000000
--- a/test/lib/ansible_test/_internal/units/__init__.py
+++ /dev/null
@@ -1,159 +0,0 @@
-"""Execute unit tests using pytest."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-import sys
-
-from ..util import (
- ANSIBLE_TEST_DATA_ROOT,
- display,
- get_available_python_versions,
- is_subdir,
- SubprocessError,
- REMOTE_ONLY_PYTHON_VERSIONS,
-)
-
-from ..util_common import (
- intercept_command,
- ResultType,
- handle_layout_messages,
-)
-
-from ..ansible_util import (
- ansible_environment,
- check_pyyaml,
-)
-
-from ..target import (
- walk_internal_targets,
- walk_units_targets,
-)
-
-from ..config import (
- UnitsConfig,
-)
-
-from ..coverage_util import (
- coverage_context,
-)
-
-from ..data import (
- data_context,
-)
-
-from ..executor import (
- AllTargetsSkipped,
- Delegate,
- get_changes_filter,
- install_command_requirements,
- SUPPORTED_PYTHON_VERSIONS,
-)
-
-
-def command_units(args):
- """
- :type args: UnitsConfig
- """
- handle_layout_messages(data_context().content.unit_messages)
-
- changes = get_changes_filter(args)
- require = args.require + changes
- include = walk_internal_targets(walk_units_targets(), args.include, args.exclude, require)
-
- paths = [target.path for target in include]
- remote_paths = [path for path in paths
- if is_subdir(path, data_context().content.unit_module_path)
- or is_subdir(path, data_context().content.unit_module_utils_path)]
-
- if not paths:
- raise AllTargetsSkipped()
-
- if args.python and args.python in REMOTE_ONLY_PYTHON_VERSIONS and not remote_paths:
- raise AllTargetsSkipped()
-
- if args.delegate:
- raise Delegate(require=changes, exclude=args.exclude)
-
- version_commands = []
-
- available_versions = sorted(get_available_python_versions(list(SUPPORTED_PYTHON_VERSIONS)).keys())
-
- for version in SUPPORTED_PYTHON_VERSIONS:
- # run all versions unless version given, in which case run only that version
- if args.python and version != args.python_version:
- continue
-
- if not args.python and version not in available_versions:
- display.warning("Skipping unit tests on Python %s due to missing interpreter." % version)
- continue
-
- if args.requirements_mode != 'skip':
- install_command_requirements(args, version)
-
- env = ansible_environment(args)
-
- cmd = [
- 'pytest',
- '--boxed',
- '-r', 'a',
- '-n', str(args.num_workers) if args.num_workers else 'auto',
- '--color',
- 'yes' if args.color else 'no',
- '-p', 'no:cacheprovider',
- '-c', os.path.join(ANSIBLE_TEST_DATA_ROOT, 'pytest.ini'),
- '--junit-xml', os.path.join(ResultType.JUNIT.path, 'python%s-units.xml' % version),
- ]
-
- if not data_context().content.collection:
- cmd.append('--durations=25')
-
- if version != '2.6':
- # added in pytest 4.5.0, which requires python 2.7+
- cmd.append('--strict-markers')
-
- plugins = []
-
- if args.coverage:
- plugins.append('ansible_pytest_coverage')
-
- if data_context().content.collection:
- plugins.append('ansible_pytest_collections')
-
- if plugins:
- env['PYTHONPATH'] += ':%s' % os.path.join(ANSIBLE_TEST_DATA_ROOT, 'pytest/plugins')
- env['PYTEST_PLUGINS'] = ','.join(plugins)
-
- if args.collect_only:
- cmd.append('--collect-only')
-
- if args.verbosity:
- cmd.append('-' + ('v' * args.verbosity))
-
- if version in REMOTE_ONLY_PYTHON_VERSIONS:
- test_paths = remote_paths
- else:
- test_paths = paths
-
- if not test_paths:
- continue
-
- cmd.extend(test_paths)
-
- version_commands.append((version, cmd, env))
-
- if args.requirements_mode == 'only':
- sys.exit()
-
- for version, command, env in version_commands:
- check_pyyaml(args, version)
-
- display.info('Unit test with Python %s' % version)
-
- try:
- with coverage_context(args):
- intercept_command(args, command, target_name='units', env=env, python_version=version)
- except SubprocessError as ex:
- # pytest exits with status code 5 when all tests are skipped, which isn't an error for our use case
- if ex.status != 5:
- raise
diff --git a/test/lib/ansible_test/_internal/util.py b/test/lib/ansible_test/_internal/util.py
index ebc14783..fdd921e1 100644
--- a/test/lib/ansible_test/_internal/util.py
+++ b/test/lib/ansible_test/_internal/util.py
@@ -1,11 +1,8 @@
"""Miscellaneous utility functions and classes."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
-import contextlib
import errno
import fcntl
-import hashlib
import inspect
import os
import pkgutil
@@ -17,35 +14,14 @@ import stat
import string
import subprocess
import sys
-import tempfile
import time
-import zipfile
+import functools
+import shlex
+import typing as t
from struct import unpack, pack
from termios import TIOCGWINSZ
-try:
- from abc import ABC
-except ImportError:
- from abc import ABCMeta
- ABC = ABCMeta('ABC', (), {})
-
-try:
- # noinspection PyCompatibility
- from configparser import ConfigParser
-except ImportError:
- # noinspection PyCompatibility,PyUnresolvedReferences
- from ConfigParser import SafeConfigParser as ConfigParser
-
-try:
- # noinspection PyProtectedMember
- from shlex import quote as cmd_quote
-except ImportError:
- # noinspection PyProtectedMember
- from pipes import quote as cmd_quote
-
-from . import types as t
-
from .encoding import (
to_bytes,
to_optional_bytes,
@@ -54,15 +30,21 @@ from .encoding import (
from .io import (
open_binary_file,
- read_binary_file,
read_text_file,
)
-try:
- C = t.TypeVar('C')
-except AttributeError:
- C = None
+from .thread import (
+ mutex,
+)
+
+from .constants import (
+ SUPPORTED_PYTHON_VERSIONS,
+)
+C = t.TypeVar('C')
+TType = t.TypeVar('TType')
+TKey = t.TypeVar('TKey')
+TValue = t.TypeVar('TValue')
PYTHON_PATHS = {} # type: t.Dict[str, str]
@@ -90,8 +72,14 @@ if not os.path.exists(ANSIBLE_LIB_ROOT):
ANSIBLE_SOURCE_ROOT = ANSIBLE_ROOT
ANSIBLE_TEST_DATA_ROOT = os.path.join(ANSIBLE_TEST_ROOT, '_data')
+ANSIBLE_TEST_UTIL_ROOT = os.path.join(ANSIBLE_TEST_ROOT, '_util')
ANSIBLE_TEST_CONFIG_ROOT = os.path.join(ANSIBLE_TEST_ROOT, 'config')
+ANSIBLE_TEST_CONTROLLER_ROOT = os.path.join(ANSIBLE_TEST_UTIL_ROOT, 'controller')
+ANSIBLE_TEST_TARGET_ROOT = os.path.join(ANSIBLE_TEST_UTIL_ROOT, 'target')
+
+ANSIBLE_TEST_TOOLS_ROOT = os.path.join(ANSIBLE_TEST_CONTROLLER_ROOT, 'tools')
+
# Modes are set to allow all users the same level of access.
# This permits files to be used in tests that change users.
# The only exception is write access to directories for the user creating them.
@@ -106,27 +94,47 @@ MODE_FILE_WRITE = MODE_FILE | stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH
MODE_DIRECTORY = MODE_READ | stat.S_IWUSR | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
MODE_DIRECTORY_WRITE = MODE_DIRECTORY | stat.S_IWGRP | stat.S_IWOTH
-REMOTE_ONLY_PYTHON_VERSIONS = (
- '2.6',
-)
-SUPPORTED_PYTHON_VERSIONS = (
- '2.6',
- '2.7',
- '3.5',
- '3.6',
- '3.7',
- '3.8',
- '3.9',
-)
+def cache(func): # type: (t.Callable[[], TValue]) -> t.Callable[[], TValue]
+ """Enforce exclusive access on a decorated function and cache the result."""
+ storage = {} # type: t.Dict[None, TValue]
+ sentinel = object()
+ @functools.wraps(func)
+ def cache_func():
+ """Cache the return value from func."""
+ if (value := storage.get(None, sentinel)) is sentinel:
+ value = storage[None] = func()
-def remove_file(path):
- """
- :type path: str
- """
- if os.path.isfile(path):
- os.remove(path)
+ return value
+
+ wrapper = mutex(cache_func)
+
+ return wrapper
+
+
+def filter_args(args, filters): # type: (t.List[str], t.Dict[str, int]) -> t.List[str]
+ """Return a filtered version of the given command line arguments."""
+ remaining = 0
+ result = []
+
+ for arg in args:
+ if not arg.startswith('-') and remaining:
+ remaining -= 1
+ continue
+
+ remaining = 0
+
+ parts = arg.split('=', 1)
+ key = parts[0]
+
+ if key in filters:
+ remaining = filters[key] - len(parts) + 1
+ continue
+
+ result.append(arg)
+
+ return result
def read_lines_without_comments(path, remove_blank_lines=False, optional=False): # type: (str, bool, bool) -> t.List[str]
@@ -148,13 +156,16 @@ def read_lines_without_comments(path, remove_blank_lines=False, optional=False):
return lines
-def find_executable(executable, cwd=None, path=None, required=True):
+def exclude_none_values(data): # type: (t.Dict[TKey, t.Optional[TValue]]) -> t.Dict[TKey, TValue]
+ """Return the provided dictionary with any None values excluded."""
+ return dict((key, value) for key, value in data.items() if value is not None)
+
+
+def find_executable(executable, cwd=None, path=None, required=True): # type: (str, t.Optional[str], t.Optional[str], t.Union[bool, str]) -> t.Optional[str]
"""
- :type executable: str
- :type cwd: str
- :type path: str
- :type required: bool | str
- :rtype: str | None
+ Find the specified executable and return the full path, or None if it could not be found.
+ If required is True an exception will be raised if the executable is not found.
+ If required is set to 'warning' then a warning will be shown if the executable is not found.
"""
match = None
real_cwd = os.getcwd()
@@ -200,14 +211,13 @@ def find_executable(executable, cwd=None, path=None, required=True):
return match
-def find_python(version, path=None, required=True):
+def find_python(version, path=None, required=True): # type: (str, t.Optional[str], bool) -> t.Optional[str]
"""
- :type version: str
- :type path: str | None
- :type required: bool
- :rtype: str
+ Find and return the full path to the specified Python version.
+ If required, an exception will be raised not found.
+ If not required, None will be returned if not found.
"""
- version_info = tuple(int(n) for n in version.split('.'))
+ version_info = str_to_version(version)
if not path and version_info == sys.version_info[:len(version_info)]:
python_bin = sys.executable
@@ -217,13 +227,9 @@ def find_python(version, path=None, required=True):
return python_bin
+@cache
def get_ansible_version(): # type: () -> str
"""Return the Ansible version."""
- try:
- return get_ansible_version.version
- except AttributeError:
- pass
-
# ansible may not be in our sys.path
# avoids a symlink to release.py since ansible placement relative to ansible-test may change during delegation
load_module(os.path.join(ANSIBLE_LIB_ROOT, 'release.py'), 'ansible_release')
@@ -231,47 +237,29 @@ def get_ansible_version(): # type: () -> str
# noinspection PyUnresolvedReferences
from ansible_release import __version__ as ansible_version # pylint: disable=import-error
- get_ansible_version.version = ansible_version
-
return ansible_version
-def get_available_python_versions(versions): # type: (t.List[str]) -> t.Dict[str, str]
- """Return a dictionary indicating which of the requested Python versions are available."""
- try:
- return get_available_python_versions.result
- except AttributeError:
- pass
-
- get_available_python_versions.result = dict((version, path) for version, path in
- ((version, find_python(version, required=False)) for version in versions) if path)
-
- return get_available_python_versions.result
-
-
-def generate_pip_command(python):
- """
- :type python: str
- :rtype: list[str]
- """
- return [python, os.path.join(ANSIBLE_TEST_DATA_ROOT, 'quiet_pip.py')]
-
-
-def raw_command(cmd, capture=False, env=None, data=None, cwd=None, explain=False, stdin=None, stdout=None,
- cmd_verbosity=1, str_errors='strict'):
- """
- :type cmd: collections.Iterable[str]
- :type capture: bool
- :type env: dict[str, str] | None
- :type data: str | None
- :type cwd: str | None
- :type explain: bool
- :type stdin: file | None
- :type stdout: file | None
- :type cmd_verbosity: int
- :type str_errors: str
- :rtype: str | None, str | None
- """
+@cache
+def get_available_python_versions(): # type: () -> t.Dict[str, str]
+ """Return a dictionary indicating which supported Python versions are available."""
+ return dict((version, path) for version, path in ((version, find_python(version, required=False)) for version in SUPPORTED_PYTHON_VERSIONS) if path)
+
+
+def raw_command(
+ cmd, # type: t.Iterable[str]
+ capture=False, # type: bool
+ env=None, # type: t.Optional[t.Dict[str, str]]
+ data=None, # type: t.Optional[str]
+ cwd=None, # type: t.Optional[str]
+ explain=False, # type: bool
+ stdin=None, # type: t.Optional[t.BinaryIO]
+ stdout=None, # type: t.Optional[t.BinaryIO]
+ cmd_verbosity=1, # type: int
+ str_errors='strict', # type: str
+ error_callback=None, # type: t.Optional[t.Callable[[SubprocessError], None]]
+): # type: (...) -> t.Tuple[t.Optional[str], t.Optional[str]]
+ """Run the specified command and return stdout and stderr as a tuple."""
if not cwd:
cwd = os.getcwd()
@@ -280,7 +268,7 @@ def raw_command(cmd, capture=False, env=None, data=None, cwd=None, explain=False
cmd = list(cmd)
- escaped_cmd = ' '.join(cmd_quote(c) for c in cmd)
+ escaped_cmd = ' '.join(shlex.quote(c) for c in cmd)
display.info('Run command: %s' % escaped_cmd, verbosity=cmd_verbosity, truncate=True)
display.info('Working directory: %s' % cwd, verbosity=2)
@@ -322,7 +310,7 @@ def raw_command(cmd, capture=False, env=None, data=None, cwd=None, explain=False
try:
cmd_bytes = [to_bytes(c) for c in cmd]
env_bytes = dict((to_bytes(k), to_bytes(v)) for k, v in env.items())
- process = subprocess.Popen(cmd_bytes, env=env_bytes, stdin=stdin, stdout=stdout, stderr=stderr, cwd=cwd)
+ process = subprocess.Popen(cmd_bytes, env=env_bytes, stdin=stdin, stdout=stdout, stderr=stderr, cwd=cwd) # pylint: disable=consider-using-with
except OSError as ex:
if ex.errno == errno.ENOENT:
raise ApplicationError('Required program "%s" not found.' % cmd[0])
@@ -350,7 +338,7 @@ def raw_command(cmd, capture=False, env=None, data=None, cwd=None, explain=False
if status == 0:
return stdout_text, stderr_text
- raise SubprocessError(cmd, status, stdout_text, stderr_text, runtime)
+ raise SubprocessError(cmd, status, stdout_text, stderr_text, runtime, error_callback)
def common_environment():
@@ -365,8 +353,6 @@ def common_environment():
)
optional = (
- 'HTTPTESTER',
- 'KRB5_PASSWORD',
'LD_LIBRARY_PATH',
'SSH_AUTH_SOCK',
# MacOS High Sierra Compatibility
@@ -401,12 +387,8 @@ def common_environment():
return env
-def pass_vars(required, optional):
- """
- :type required: collections.Iterable[str]
- :type optional: collections.Iterable[str]
- :rtype: dict[str, str]
- """
+def pass_vars(required, optional): # type: (t.Collection[str], t.Collection[str]) -> t.Dict[str, str]
+ """Return a filtered dictionary of environment variables based on the current environment."""
env = {}
for name in required:
@@ -422,31 +404,8 @@ def pass_vars(required, optional):
return env
-def deepest_path(path_a, path_b):
- """Return the deepest of two paths, or None if the paths are unrelated.
- :type path_a: str
- :type path_b: str
- :rtype: str | None
- """
- if path_a == '.':
- path_a = ''
-
- if path_b == '.':
- path_b = ''
-
- if path_a.startswith(path_b):
- return path_a or '.'
-
- if path_b.startswith(path_a):
- return path_b or '.'
-
- return None
-
-
-def remove_tree(path):
- """
- :type path: str
- """
+def remove_tree(path): # type: (str) -> None
+ """Remove the specified directory, siliently continuing if the directory does not exist."""
try:
shutil.rmtree(to_bytes(path))
except OSError as ex:
@@ -454,12 +413,9 @@ def remove_tree(path):
raise
-def is_binary_file(path):
- """
- :type path: str
- :rtype: bool
- """
- assume_text = set([
+def is_binary_file(path): # type: (str) -> bool
+ """Return True if the specified file is a binary file, otherwise return False."""
+ assume_text = {
'.cfg',
'.conf',
'.crt',
@@ -481,9 +437,9 @@ def is_binary_file(path):
'.xml',
'.yaml',
'.yml',
- ])
+ }
- assume_binary = set([
+ assume_binary = {
'.bin',
'.eot',
'.gz',
@@ -499,7 +455,7 @@ def is_binary_file(path):
'.woff',
'.woff2',
'.zip',
- ])
+ }
ext = os.path.splitext(path)[1]
@@ -514,10 +470,13 @@ def is_binary_file(path):
return b'\0' in path_fd.read(4096)
-def generate_password():
- """Generate a random password.
- :rtype: str
- """
+def generate_name(length=8): # type: (int) -> str
+ """Generate and return a random name."""
+ return ''.join(random.choice(string.ascii_letters + string.digits) for _idx in range(length))
+
+
+def generate_password(): # type: () -> str
+ """Generate and return random password."""
chars = [
string.ascii_letters,
string.digits,
@@ -565,13 +524,11 @@ class Display:
if os.isatty(0):
self.rows, self.columns = unpack('HHHH', fcntl.ioctl(0, TIOCGWINSZ, pack('HHHH', 0, 0, 0, 0)))[:2]
- def __warning(self, message):
- """
- :type message: str
- """
+ def __warning(self, message): # type: (str) -> None
+ """Internal implementation for displaying a warning message."""
self.print_message('WARNING: %s' % message, color=self.purple, fd=sys.stderr)
- def review_warnings(self):
+ def review_warnings(self): # type: () -> None
"""Review all warnings which previously occurred."""
if not self.warnings:
return
@@ -581,12 +538,8 @@ class Display:
for warning in self.warnings:
self.__warning(warning)
- def warning(self, message, unique=False, verbosity=0):
- """
- :type message: str
- :type unique: bool
- :type verbosity: int
- """
+ def warning(self, message, unique=False, verbosity=0): # type: (str, bool, int) -> None
+ """Display a warning level message."""
if verbosity > self.verbosity:
return
@@ -599,35 +552,28 @@ class Display:
self.__warning(message)
self.warnings.append(message)
- def notice(self, message):
- """
- :type message: str
- """
+ def notice(self, message): # type: (str) -> None
+ """Display a notice level message."""
self.print_message('NOTICE: %s' % message, color=self.purple, fd=sys.stderr)
- def error(self, message):
- """
- :type message: str
- """
+ def error(self, message): # type: (str) -> None
+ """Display an error level message."""
self.print_message('ERROR: %s' % message, color=self.red, fd=sys.stderr)
- def info(self, message, verbosity=0, truncate=False):
- """
- :type message: str
- :type verbosity: int
- :type truncate: bool
- """
+ def info(self, message, verbosity=0, truncate=False): # type: (str, int, bool) -> None
+ """Display an info level message."""
if self.verbosity >= verbosity:
color = self.verbosity_colors.get(verbosity, self.yellow)
self.print_message(message, color=color, fd=sys.stderr if self.info_stderr else sys.stdout, truncate=truncate)
- def print_message(self, message, color=None, fd=sys.stdout, truncate=False): # pylint: disable=locally-disabled, invalid-name
- """
- :type message: str
- :type color: str | None
- :type fd: t.IO[str]
- :type truncate: bool
- """
+ def print_message( # pylint: disable=locally-disabled, invalid-name
+ self,
+ message, # type: str
+ color=None, # type: t.Optional[str]
+ fd=sys.stdout, # type: t.TextIO
+ truncate=False, # type: bool
+ ): # type: (...) -> None
+ """Display a message."""
if self.redact and self.sensitive:
for item in self.sensitive:
if not item:
@@ -661,15 +607,16 @@ class ApplicationWarning(Exception):
class SubprocessError(ApplicationError):
"""Error resulting from failed subprocess execution."""
- def __init__(self, cmd, status=0, stdout=None, stderr=None, runtime=None):
- """
- :type cmd: list[str]
- :type status: int
- :type stdout: str | None
- :type stderr: str | None
- :type runtime: float | None
- """
- message = 'Command "%s" returned exit status %s.\n' % (' '.join(cmd_quote(c) for c in cmd), status)
+ def __init__(
+ self,
+ cmd, # type: t.List[str]
+ status=0, # type: int
+ stdout=None, # type: t.Optional[str]
+ stderr=None, # type: t.Optional[str]
+ runtime=None, # type: t.Optional[float]
+ error_callback=None, # type: t.Optional[t.Callable[[SubprocessError], None]]
+ ): # type: (...) -> None
+ message = 'Command "%s" returned exit status %s.\n' % (' '.join(shlex.quote(c) for c in cmd), status)
if stderr:
message += '>>> Standard Error\n'
@@ -679,10 +626,6 @@ class SubprocessError(ApplicationError):
message += '>>> Standard Output\n'
message += '%s%s\n' % (stdout.strip(), Display.clear)
- message = message.strip()
-
- super(SubprocessError, self).__init__(message)
-
self.cmd = cmd
self.message = message
self.status = status
@@ -690,24 +633,35 @@ class SubprocessError(ApplicationError):
self.stderr = stderr
self.runtime = runtime
+ if error_callback:
+ error_callback(self)
+
+ self.message = self.message.strip()
+
+ super().__init__(self.message)
+
class MissingEnvironmentVariable(ApplicationError):
"""Error caused by missing environment variable."""
- def __init__(self, name):
- """
- :type name: str
- """
- super(MissingEnvironmentVariable, self).__init__('Missing environment variable: %s' % name)
+ def __init__(self, name): # type: (str) -> None
+ super().__init__('Missing environment variable: %s' % name)
self.name = name
-def parse_to_list_of_dict(pattern, value):
- """
- :type pattern: str
- :type value: str
- :return: list[dict[str, str]]
- """
+def retry(func, ex_type=SubprocessError, sleep=10, attempts=10):
+ """Retry the specified function on failure."""
+ for dummy in range(1, attempts):
+ try:
+ return func()
+ except ex_type:
+ time.sleep(sleep)
+
+ return func()
+
+
+def parse_to_list_of_dict(pattern, value): # type: (str, str) -> t.List[t.Dict[str, str]]
+ """Parse lines from the given value using the specified pattern and return the extracted list of key/value pair dictionaries."""
matched = []
unmatched = []
@@ -725,20 +679,8 @@ def parse_to_list_of_dict(pattern, value):
return matched
-def get_available_port():
- """
- :rtype: int
- """
- # this relies on the kernel not reusing previously assigned ports immediately
- socket_fd = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-
- with contextlib.closing(socket_fd):
- socket_fd.bind(('', 0))
- return socket_fd.getsockname()[1]
-
-
-def get_subclasses(class_type): # type: (t.Type[C]) -> t.Set[t.Type[C]]
- """Returns the set of types that are concrete subclasses of the given type."""
+def get_subclasses(class_type): # type: (t.Type[C]) -> t.List[t.Type[C]]
+ """Returns a list of types that are concrete subclasses of the given type."""
subclasses = set() # type: t.Set[t.Type[C]]
queue = [class_type] # type: t.List[t.Type[C]]
@@ -751,7 +693,7 @@ def get_subclasses(class_type): # type: (t.Type[C]) -> t.Set[t.Type[C]]
subclasses.add(child)
queue.append(child)
- return subclasses
+ return sorted(subclasses, key=lambda sc: sc.__name__)
def is_subdir(candidate_path, path): # type: (str, str) -> bool
@@ -791,6 +733,11 @@ def version_to_str(version): # type: (t.Tuple[int, ...]) -> str
return '.'.join(str(n) for n in version)
+def sorted_versions(versions): # type: (t.List[str]) -> t.List[str]
+ """Return a sorted copy of the given list of versions."""
+ return [version_to_str(version) for version in sorted(str_to_version(version) for version in versions)]
+
+
def import_plugins(directory, root=None): # type: (str, t.Optional[str]) -> None
"""
Import plugins from the given directory relative to the given root.
@@ -835,7 +782,7 @@ def load_module(path, name): # type: (str, str) -> None
sys.modules[name] = module
else:
# noinspection PyDeprecation
- import imp
+ import imp # pylint: disable=deprecated-module
# load_source (and thus load_module) require a file opened with `open` in text mode
with open(to_bytes(path)) as module_file:
@@ -843,35 +790,54 @@ def load_module(path, name): # type: (str, str) -> None
imp.load_module(name, module_file, path, ('.py', 'r', imp.PY_SOURCE))
-@contextlib.contextmanager
-def tempdir(): # type: () -> str
- """Creates a temporary directory that is deleted outside the context scope."""
- temp_path = tempfile.mkdtemp()
- yield temp_path
- shutil.rmtree(temp_path)
+def sanitize_host_name(name):
+ """Return a sanitized version of the given name, suitable for use as a hostname."""
+ return re.sub('[^A-Za-z0-9]+', '-', name)[:63].strip('-')
-@contextlib.contextmanager
-def open_zipfile(path, mode='r'):
- """Opens a zip file and closes the file automatically."""
- zib_obj = zipfile.ZipFile(path, mode=mode)
- yield zib_obj
- zib_obj.close()
+@cache
+def get_host_ip():
+ """Return the host's IP address."""
+ with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as sock:
+ sock.connect(('10.255.255.255', 22))
+ host_ip = get_host_ip.ip = sock.getsockname()[0]
+ display.info('Detected host IP: %s' % host_ip, verbosity=1)
-def get_hash(path):
- """
- :type path: str
- :rtype: str | None
- """
- if not os.path.exists(path):
+ return host_ip
+
+
+def get_generic_type(base_type, generic_base_type): # type: (t.Type, t.Type[TType]) -> t.Optional[t.Type[TType]]
+ """Return the generic type arg derived from the generic_base_type type that is associated with the base_type type, if any, otherwise return None."""
+ # noinspection PyUnresolvedReferences
+ type_arg = t.get_args(base_type.__orig_bases__[0])[0]
+ return None if isinstance(type_arg, generic_base_type) else type_arg
+
+
+def get_type_associations(base_type, generic_base_type): # type: (t.Type[TType], t.Type[TValue]) -> t.List[t.Tuple[t.Type[TValue], t.Type[TType]]]
+ """Create and return a list of tuples associating generic_base_type derived types with a corresponding base_type derived type."""
+ return [item for item in [(get_generic_type(sc_type, generic_base_type), sc_type) for sc_type in get_subclasses(base_type)] if item[1]]
+
+
+def get_type_map(base_type, generic_base_type): # type: (t.Type[TType], t.Type[TValue]) -> t.Dict[t.Type[TValue], t.Type[TType]]
+ """Create and return a mapping of generic_base_type derived types to base_type derived types."""
+ return {item[0]: item[1] for item in get_type_associations(base_type, generic_base_type)}
+
+
+def verify_sys_executable(path): # type: (str) -> t.Optional[str]
+ """Verify that the given path references the current Python interpreter. If not, return the expected path, otherwise return None."""
+ if path == sys.executable:
return None
- file_hash = hashlib.sha256()
+ if os.path.realpath(path) == os.path.realpath(sys.executable):
+ return None
+
+ expected_executable = raw_command([path, '-c', 'import sys; print(sys.executable)'], capture=True)[0]
- file_hash.update(read_binary_file(path))
+ if expected_executable == sys.executable:
+ return None
- return file_hash.hexdigest()
+ return expected_executable
display = Display() # pylint: disable=locally-disabled, invalid-name
diff --git a/test/lib/ansible_test/_internal/util_common.py b/test/lib/ansible_test/_internal/util_common.py
index 40dc68b0..b4d42420 100644
--- a/test/lib/ansible_test/_internal/util_common.py
+++ b/test/lib/ansible_test/_internal/util_common.py
@@ -1,39 +1,45 @@
"""Common utility code that depends on CommonConfig."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import atexit
import contextlib
+import json
import os
import re
-import shutil
+import shlex
import sys
import tempfile
import textwrap
+import typing as t
-from . import types as t
+from .constants import (
+ ANSIBLE_BIN_SYMLINK_MAP,
+)
from .encoding import (
to_bytes,
)
from .util import (
- common_environment,
- COVERAGE_CONFIG_NAME,
+ cache,
display,
- find_python,
remove_tree,
MODE_DIRECTORY,
MODE_FILE_EXECUTE,
+ MODE_FILE,
PYTHON_PATHS,
raw_command,
- read_lines_without_comments,
ANSIBLE_TEST_DATA_ROOT,
+ ANSIBLE_TEST_TARGET_ROOT,
+ ANSIBLE_TEST_TOOLS_ROOT,
ApplicationError,
- cmd_quote,
+ SubprocessError,
+ generate_name,
)
from .io import (
+ make_dirs,
+ read_text_file,
write_text_file,
write_json_file,
)
@@ -46,23 +52,34 @@ from .provider.layout import (
LayoutMessages,
)
-DOCKER_COMPLETION = {} # type: t.Dict[str, t.Dict[str, str]]
-REMOTE_COMPLETION = {} # type: t.Dict[str, t.Dict[str, str]]
-NETWORK_COMPLETION = {} # type: t.Dict[str, t.Dict[str, str]]
+from .host_configs import (
+ PythonConfig,
+ VirtualPythonConfig,
+)
+
+CHECK_YAML_VERSIONS = {}
class ShellScriptTemplate:
"""A simple substition template for shell scripts."""
- def __init__(self, template): # type: (str) -> None
+ def __init__(self, template): # type: (t.Text) -> None
self.template = template
- def substitute(self, **kwargs):
+ def substitute(self, **kwargs): # type: (t.Dict[str, t.Union[str, t.List[str]]]) -> str
"""Return a string templated with the given arguments."""
- kvp = dict((k, cmd_quote(v)) for k, v in kwargs.items())
+ kvp = dict((k, self.quote(v)) for k, v in kwargs.items())
pattern = re.compile(r'#{(?P<name>[^}]+)}')
value = pattern.sub(lambda match: kvp[match.group('name')], self.template)
return value
+ @staticmethod
+ def quote(value): # type: (t.Union[str, t.List[str]]) -> str
+ """Return a shell quoted version of the given value."""
+ if isinstance(value, list):
+ return shlex.quote(' '.join(value))
+
+ return shlex.quote(value)
+
class ResultType:
"""Test result type."""
@@ -107,12 +124,9 @@ ResultType._populate() # pylint: disable=protected-access
class CommonConfig:
"""Configuration common to all commands."""
- def __init__(self, args, command):
- """
- :type args: any
- :type command: str
- """
+ def __init__(self, args, command): # type: (t.Any, str) -> None
self.command = command
+ self.success = None # type: t.Optional[bool]
self.color = args.color # type: bool
self.explain = args.explain # type: bool
@@ -123,6 +137,8 @@ class CommonConfig:
self.info_stderr = False # type: bool
+ self.session_name = generate_name()
+
self.cache = {}
def get_ansible_config(self): # type: () -> str
@@ -130,67 +146,13 @@ class CommonConfig:
return os.path.join(ANSIBLE_TEST_DATA_ROOT, 'ansible.cfg')
-def get_docker_completion():
- """
- :rtype: dict[str, dict[str, str]]
- """
- return get_parameterized_completion(DOCKER_COMPLETION, 'docker')
-
-
-def get_remote_completion():
- """
- :rtype: dict[str, dict[str, str]]
- """
- return get_parameterized_completion(REMOTE_COMPLETION, 'remote')
-
-
-def get_network_completion():
- """
- :rtype: dict[str, dict[str, str]]
- """
- return get_parameterized_completion(NETWORK_COMPLETION, 'network')
-
-
-def get_parameterized_completion(cache, name):
- """
- :type cache: dict[str, dict[str, str]]
- :type name: str
- :rtype: dict[str, dict[str, str]]
- """
- if not cache:
- if data_context().content.collection:
- context = 'collection'
- else:
- context = 'ansible-core'
-
- images = read_lines_without_comments(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'completion', '%s.txt' % name), remove_blank_lines=True)
-
- cache.update(dict(kvp for kvp in [parse_parameterized_completion(i) for i in images] if kvp and kvp[1].get('context', context) == context))
-
- return cache
-
-
-def parse_parameterized_completion(value): # type: (str) -> t.Optional[t.Tuple[str, t.Dict[str, str]]]
- """Parse the given completion entry, returning the entry name and a dictionary of key/value settings."""
- values = value.split()
-
- if not values:
- return None
-
- name = values[0]
- data = dict((kvp[0], kvp[1] if len(kvp) > 1 else '') for kvp in [item.split('=', 1) for item in values[1:]])
-
- return name, data
-
-
-def docker_qualify_image(name):
- """
- :type name: str
- :rtype: str
- """
- config = get_docker_completion().get(name, {})
+def create_result_directories(args): # type: (CommonConfig) -> None
+ """Create result directories."""
+ if args.explain:
+ return
- return config.get('name', name)
+ make_dirs(ResultType.COVERAGE.path)
+ make_dirs(ResultType.DATA.path)
def handle_layout_messages(messages): # type: (t.Optional[LayoutMessages]) -> None
@@ -208,6 +170,29 @@ def handle_layout_messages(messages): # type: (t.Optional[LayoutMessages]) -> N
raise ApplicationError('\n'.join(messages.error))
+def process_scoped_temporary_file(args, prefix='ansible-test-', suffix=None): # type: (CommonConfig, t.Optional[str], t.Optional[str]) -> str
+ """Return the path to a temporary file that will be automatically removed when the process exits."""
+ if args.explain:
+ path = os.path.join(tempfile.gettempdir(), f'{prefix or tempfile.gettempprefix()}{generate_name()}{suffix or ""}')
+ else:
+ temp_fd, path = tempfile.mkstemp(prefix=prefix, suffix=suffix)
+ os.close(temp_fd)
+ atexit.register(lambda: os.remove(path))
+
+ return path
+
+
+def process_scoped_temporary_directory(args, prefix='ansible-test-', suffix=None): # type: (CommonConfig, t.Optional[str], t.Optional[str]) -> str
+ """Return the path to a temporary directory that will be automatically removed when the process exits."""
+ if args.explain:
+ path = os.path.join(tempfile.gettempdir(), f'{prefix or tempfile.gettempprefix()}{generate_name()}{suffix or ""}')
+ else:
+ path = tempfile.mkdtemp(prefix=prefix, suffix=suffix)
+ atexit.register(lambda: remove_tree(path))
+
+ return path
+
+
@contextlib.contextmanager
def named_temporary_file(args, prefix, suffix, directory, content):
"""
@@ -219,7 +204,7 @@ def named_temporary_file(args, prefix, suffix, directory, content):
:rtype: str
"""
if args.explain:
- yield os.path.join(directory, '%stemp%s' % (prefix, suffix))
+ yield os.path.join(directory or '/tmp', '%stemp%s' % (prefix, suffix))
else:
with tempfile.NamedTemporaryFile(prefix=prefix, suffix=suffix, dir=directory) as tempfile_fd:
tempfile_fd.write(to_bytes(content))
@@ -245,12 +230,74 @@ def write_text_test_results(category, name, content): # type: (ResultType, str,
write_text_file(path, content, create_directories=True)
-def get_python_path(args, interpreter):
- """
- :type args: TestConfig
- :type interpreter: str
- :rtype: str
- """
+@cache
+def get_injector_path(): # type: () -> str
+ """Return the path to a directory which contains a `python.py` executable and associated injector scripts."""
+ injector_path = tempfile.mkdtemp(prefix='ansible-test-', suffix='-injector', dir='/tmp')
+
+ display.info(f'Initializing "{injector_path}" as the temporary injector directory.', verbosity=1)
+
+ injector_names = sorted(list(ANSIBLE_BIN_SYMLINK_MAP) + [
+ 'importer.py',
+ 'pytest',
+ ])
+
+ scripts = (
+ ('python.py', '/usr/bin/env python', MODE_FILE_EXECUTE),
+ ('virtualenv.sh', '/usr/bin/env bash', MODE_FILE),
+ )
+
+ source_path = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'injector')
+
+ for name in injector_names:
+ os.symlink('python.py', os.path.join(injector_path, name))
+
+ for name, shebang, mode in scripts:
+ src = os.path.join(source_path, name)
+ dst = os.path.join(injector_path, name)
+
+ script = read_text_file(src)
+ script = set_shebang(script, shebang)
+
+ write_text_file(dst, script)
+ os.chmod(dst, mode)
+
+ os.chmod(injector_path, MODE_DIRECTORY)
+
+ def cleanup_injector():
+ """Remove the temporary injector directory."""
+ remove_tree(injector_path)
+
+ atexit.register(cleanup_injector)
+
+ return injector_path
+
+
+def set_shebang(script, executable): # type: (str, str) -> str
+ """Return the given script with the specified executable used for the shebang."""
+ prefix = '#!'
+ shebang = prefix + executable
+
+ overwrite = (
+ prefix,
+ '# auto-shebang',
+ '# shellcheck shell=',
+ )
+
+ lines = script.splitlines()
+
+ if any(lines[0].startswith(value) for value in overwrite):
+ lines[0] = shebang
+ else:
+ lines.insert(0, shebang)
+
+ script = '\n'.join(lines)
+
+ return script
+
+
+def get_python_path(interpreter): # type: (str) -> str
+ """Return the path to a directory which contains a `python` executable that runs the specified interpreter."""
python_path = PYTHON_PATHS.get(interpreter)
if python_path:
@@ -261,9 +308,6 @@ def get_python_path(args, interpreter):
root_temp_dir = '/tmp'
- if args.explain:
- return os.path.join(root_temp_dir, ''.join((prefix, 'temp', suffix)))
-
python_path = tempfile.mkdtemp(prefix=prefix, suffix=suffix, dir=root_temp_dir)
injected_interpreter = os.path.join(python_path, 'python')
@@ -324,154 +368,93 @@ def cleanup_python_paths():
"""Clean up all temporary python directories."""
for path in sorted(PYTHON_PATHS.values()):
display.info('Cleaning up temporary python directory: %s' % path, verbosity=2)
- shutil.rmtree(path)
-
-
-def get_coverage_environment(args, target_name, version, temp_path, module_coverage, remote_temp_path=None):
+ remove_tree(path)
+
+
+def intercept_python(
+ args, # type: CommonConfig
+ python, # type: PythonConfig
+ cmd, # type: t.List[str]
+ env, # type: t.Dict[str, str]
+ capture=False, # type: bool
+ data=None, # type: t.Optional[str]
+ cwd=None, # type: t.Optional[str]
+ always=False, # type: bool
+): # type: (...) -> t.Tuple[t.Optional[str], t.Optional[str]]
"""
- :type args: TestConfig
- :type target_name: str
- :type version: str
- :type temp_path: str
- :type module_coverage: bool
- :type remote_temp_path: str | None
- :rtype: dict[str, str]
+ Run a command while intercepting invocations of Python to control the version used.
+ If the specified Python is an ansible-test managed virtual environment, it will be added to PATH to activate it.
+ Otherwise a temporary directory will be created to ensure the correct Python can be found in PATH.
"""
- if temp_path:
- # integration tests (both localhost and the optional testhost)
- # config and results are in a temporary directory
- coverage_config_base_path = temp_path
- coverage_output_base_path = temp_path
- elif args.coverage_config_base_path:
- # unit tests, sanity tests and other special cases (localhost only)
- # config is in a temporary directory
- # results are in the source tree
- coverage_config_base_path = args.coverage_config_base_path
- coverage_output_base_path = os.path.join(data_context().content.root, data_context().content.results_path)
- else:
- raise Exception('No temp path and no coverage config base path. Check for missing coverage_context usage.')
-
- config_file = os.path.join(coverage_config_base_path, COVERAGE_CONFIG_NAME)
- coverage_file = os.path.join(coverage_output_base_path, ResultType.COVERAGE.name, '%s=%s=%s=%s=coverage' % (
- args.command, target_name, args.coverage_label or 'local-%s' % version, 'python-%s' % version))
-
- if not args.explain and not os.path.exists(config_file):
- raise Exception('Missing coverage config file: %s' % config_file)
-
- if args.coverage_check:
- # cause the 'coverage' module to be found, but not imported or enabled
- coverage_file = ''
-
- # Enable code coverage collection on local Python programs (this does not include Ansible modules).
- # Used by the injectors to support code coverage.
- # Used by the pytest unit test plugin to support code coverage.
- # The COVERAGE_FILE variable is also used directly by the 'coverage' module.
- env = dict(
- COVERAGE_CONF=config_file,
- COVERAGE_FILE=coverage_file,
- )
-
- if module_coverage:
- # Enable code coverage collection on Ansible modules (both local and remote).
- # Used by the AnsiballZ wrapper generator in lib/ansible/executor/module_common.py to support code coverage.
- env.update(dict(
- _ANSIBLE_COVERAGE_CONFIG=config_file,
- _ANSIBLE_COVERAGE_OUTPUT=coverage_file,
- ))
-
- if remote_temp_path:
- # Include the command, target and label so the remote host can create a filename with that info. The remote
- # is responsible for adding '={language version}=coverage.{hostname}.{pid}.{id}'
- env['_ANSIBLE_COVERAGE_REMOTE_OUTPUT'] = os.path.join(remote_temp_path, '%s=%s=%s' % (
- args.command, target_name, args.coverage_label or 'remote'))
- env['_ANSIBLE_COVERAGE_REMOTE_PATH_FILTER'] = os.path.join(data_context().content.root, '*')
-
- return env
-
-
-def intercept_command(args, cmd, target_name, env, capture=False, data=None, cwd=None, python_version=None, temp_path=None, module_coverage=True,
- virtualenv=None, disable_coverage=False, remote_temp_path=None):
- """
- :type args: TestConfig
- :type cmd: collections.Iterable[str]
- :type target_name: str
- :type env: dict[str, str]
- :type capture: bool
- :type data: str | None
- :type cwd: str | None
- :type python_version: str | None
- :type temp_path: str | None
- :type module_coverage: bool
- :type virtualenv: str | None
- :type disable_coverage: bool
- :type remote_temp_path: str | None
- :rtype: str | None, str | None
- """
- if not env:
- env = common_environment()
- else:
- env = env.copy()
-
+ env = env.copy()
cmd = list(cmd)
- version = python_version or args.python_version
- interpreter = virtualenv or find_python(version)
- inject_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'injector')
-
- if not virtualenv:
- # injection of python into the path is required when not activating a virtualenv
- # otherwise scripts may find the wrong interpreter or possibly no interpreter
- python_path = get_python_path(args, interpreter)
- inject_path = python_path + os.path.pathsep + inject_path
-
- env['PATH'] = inject_path + os.path.pathsep + env['PATH']
- env['ANSIBLE_TEST_PYTHON_VERSION'] = version
- env['ANSIBLE_TEST_PYTHON_INTERPRETER'] = interpreter
-
- if args.coverage and not disable_coverage:
- # add the necessary environment variables to enable code coverage collection
- env.update(get_coverage_environment(args, target_name, version, temp_path, module_coverage,
- remote_temp_path=remote_temp_path))
-
- return run_command(args, cmd, capture=capture, env=env, data=data, cwd=cwd)
+ inject_path = get_injector_path()
+ # make sure scripts (including injector.py) find the correct Python interpreter
+ if isinstance(python, VirtualPythonConfig):
+ python_path = os.path.dirname(python.path)
+ else:
+ python_path = get_python_path(python.path)
+
+ env['PATH'] = os.path.pathsep.join([inject_path, python_path, env['PATH']])
+ env['ANSIBLE_TEST_PYTHON_VERSION'] = python.version
+ env['ANSIBLE_TEST_PYTHON_INTERPRETER'] = python.path
+
+ return run_command(args, cmd, capture=capture, env=env, data=data, cwd=cwd, always=always)
+
+
+def run_command(
+ args, # type: CommonConfig
+ cmd, # type: t.Iterable[str]
+ capture=False, # type: bool
+ env=None, # type: t.Optional[t.Dict[str, str]]
+ data=None, # type: t.Optional[str]
+ cwd=None, # type: t.Optional[str]
+ always=False, # type: bool
+ stdin=None, # type: t.Optional[t.BinaryIO]
+ stdout=None, # type: t.Optional[t.BinaryIO]
+ cmd_verbosity=1, # type: int
+ str_errors='strict', # type: str
+ error_callback=None, # type: t.Optional[t.Callable[[SubprocessError], None]]
+): # type: (...) -> t.Tuple[t.Optional[str], t.Optional[str]]
+ """Run the specified command and return stdout and stderr as a tuple."""
+ explain = args.explain and not always
+ return raw_command(cmd, capture=capture, env=env, data=data, cwd=cwd, explain=explain, stdin=stdin, stdout=stdout,
+ cmd_verbosity=cmd_verbosity, str_errors=str_errors, error_callback=error_callback)
-def resolve_csharp_ps_util(import_name, path):
- """
- :type import_name: str
- :type path: str
- """
- if data_context().content.is_ansible or not import_name.startswith('.'):
- # We don't support relative paths for builtin utils, there's no point.
- return import_name
- packages = import_name.split('.')
- module_packages = path.split(os.path.sep)
+def yamlcheck(python):
+ """Return True if PyYAML has libyaml support, False if it does not and None if it was not found."""
+ result = json.loads(raw_command([python.path, os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'yamlcheck.py')], capture=True)[0])
- for package in packages:
- if not module_packages or package:
- break
- del module_packages[-1]
+ if not result['yaml']:
+ return None
- return 'ansible_collections.%s%s' % (data_context().content.prefix,
- '.'.join(module_packages + [p for p in packages if p]))
+ return result['cloader']
-def run_command(args, cmd, capture=False, env=None, data=None, cwd=None, always=False, stdin=None, stdout=None,
- cmd_verbosity=1, str_errors='strict'):
+def check_pyyaml(python, required=True, quiet=False): # type: (PythonConfig, bool, bool) -> t.Optional[bool]
"""
- :type args: CommonConfig
- :type cmd: collections.Iterable[str]
- :type capture: bool
- :type env: dict[str, str] | None
- :type data: str | None
- :type cwd: str | None
- :type always: bool
- :type stdin: file | None
- :type stdout: file | None
- :type cmd_verbosity: int
- :type str_errors: str
- :rtype: str | None, str | None
+ Return True if PyYAML has libyaml support, False if it does not and None if it was not found.
+ The result is cached if True or required.
"""
- explain = args.explain and not always
- return raw_command(cmd, capture=capture, env=env, data=data, cwd=cwd, explain=explain, stdin=stdin, stdout=stdout,
- cmd_verbosity=cmd_verbosity, str_errors=str_errors)
+ try:
+ return CHECK_YAML_VERSIONS[python.path]
+ except KeyError:
+ pass
+
+ state = yamlcheck(python)
+
+ if state is not None or required:
+ # results are cached only if pyyaml is required or present
+ # it is assumed that tests will not uninstall/re-install pyyaml -- if they do, those changes will go undetected
+ CHECK_YAML_VERSIONS[python.path] = state
+
+ if not quiet:
+ if state is None:
+ if required:
+ display.warning('PyYAML is not installed for interpreter: %s' % python.path)
+ elif not state:
+ display.warning('PyYAML will be slow due to installation without libyaml support for interpreter: %s' % python.path)
+
+ return state
diff --git a/test/lib/ansible_test/_internal/venv.py b/test/lib/ansible_test/_internal/venv.py
index 37eef367..2cfd978d 100644
--- a/test/lib/ansible_test/_internal/venv.py
+++ b/test/lib/ansible_test/_internal/venv.py
@@ -1,12 +1,10 @@
"""Virtual environment management."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
import os
import sys
-
-from . import types as t
+import typing as t
from .config import (
EnvironmentConfig,
@@ -16,69 +14,96 @@ from .util import (
find_python,
SubprocessError,
get_available_python_versions,
- SUPPORTED_PYTHON_VERSIONS,
- ANSIBLE_TEST_DATA_ROOT,
+ ANSIBLE_TEST_TOOLS_ROOT,
display,
remove_tree,
+ ApplicationError,
+ str_to_version,
)
from .util_common import (
run_command,
+ ResultType,
+)
+
+from .host_configs import (
+ VirtualPythonConfig,
+ PythonConfig,
)
+def get_virtual_python(
+ args, # type: EnvironmentConfig
+ python, # type: VirtualPythonConfig
+):
+ """Create a virtual environment for the given Python and return the path to its root."""
+ if python.system_site_packages:
+ suffix = '-ssp'
+ else:
+ suffix = ''
+
+ virtual_environment_path = os.path.join(ResultType.TMP.path, 'delegation', f'python{python.version}{suffix}')
+
+ if not create_virtual_environment(args, python, virtual_environment_path, python.system_site_packages):
+ raise ApplicationError(f'Python {python.version} does not provide virtual environment support.')
+
+ return virtual_environment_path
+
+
def create_virtual_environment(args, # type: EnvironmentConfig
- version, # type: str
+ python, # type: PythonConfig
path, # type: str
system_site_packages=False, # type: bool
pip=True, # type: bool
): # type: (...) -> bool
"""Create a virtual environment using venv or virtualenv for the requested Python version."""
if os.path.isdir(path):
- display.info('Using existing Python %s virtual environment: %s' % (version, path), verbosity=1)
+ display.info('Using existing Python %s virtual environment: %s' % (python.version, path), verbosity=1)
return True
- python = find_python(version, required=False)
- python_version = tuple(int(v) for v in version.split('.'))
-
- if not python:
+ if not os.path.exists(python.path):
# the requested python version could not be found
return False
- if python_version >= (3, 0):
+ if str_to_version(python.version) >= (3, 0):
# use the built-in 'venv' module on Python 3.x
# creating a virtual environment using 'venv' when running in a virtual environment created by 'virtualenv' results
# in a copy of the original virtual environment instead of creation of a new one
# avoid this issue by only using "real" python interpreters to invoke 'venv'
- for real_python in iterate_real_pythons(args, version):
+ for real_python in iterate_real_pythons(args, python.version):
if run_venv(args, real_python, system_site_packages, pip, path):
- display.info('Created Python %s virtual environment using "venv": %s' % (version, path), verbosity=1)
+ display.info('Created Python %s virtual environment using "venv": %s' % (python.version, path), verbosity=1)
return True
# something went wrong, most likely the package maintainer for the Python installation removed ensurepip
# which will prevent creation of a virtual environment without installation of other OS packages
# use the installed 'virtualenv' module on the Python requested version
- if run_virtualenv(args, python, python, system_site_packages, pip, path):
- display.info('Created Python %s virtual environment using "virtualenv": %s' % (version, path), verbosity=1)
+ if run_virtualenv(args, python.path, python.path, system_site_packages, pip, path):
+ display.info('Created Python %s virtual environment using "virtualenv": %s' % (python.version, path), verbosity=1)
return True
- available_pythons = get_available_python_versions(SUPPORTED_PYTHON_VERSIONS)
+ available_pythons = get_available_python_versions()
for available_python_version, available_python_interpreter in sorted(available_pythons.items()):
+ if available_python_interpreter == python.path:
+ # already attempted to use this interpreter
+ continue
+
virtualenv_version = get_virtualenv_version(args, available_python_interpreter)
if not virtualenv_version:
# virtualenv not available for this Python or we were unable to detect the version
continue
- if python_version == (2, 6) and virtualenv_version >= (16, 0, 0):
+ if python.version == '2.6' and virtualenv_version >= (16, 0, 0):
# virtualenv 16.0.0 dropped python 2.6 support: https://virtualenv.pypa.io/en/latest/changes/#v16-0-0-2018-05-16
continue
# try using 'virtualenv' from another Python to setup the desired version
- if run_virtualenv(args, available_python_interpreter, python, system_site_packages, pip, path):
- display.info('Created Python %s virtual environment using "virtualenv" on Python %s: %s' % (version, available_python_version, path), verbosity=1)
+ if run_virtualenv(args, available_python_interpreter, python.path, system_site_packages, pip, path):
+ display.info('Created Python %s virtual environment using "virtualenv" on Python %s: %s' % (python.version, available_python_version, path),
+ verbosity=1)
return True
# no suitable 'virtualenv' available
@@ -90,7 +115,7 @@ def iterate_real_pythons(args, version): # type: (EnvironmentConfig, str) -> t.
Iterate through available real python interpreters of the requested version.
The current interpreter will be checked and then the path will be searched.
"""
- version_info = tuple(int(n) for n in version.split('.'))
+ version_info = str_to_version(version)
current_python = None
if version_info == sys.version_info[:len(version_info)]:
@@ -125,11 +150,11 @@ def iterate_real_pythons(args, version): # type: (EnvironmentConfig, str) -> t.
yield found_python
-def get_python_real_prefix(args, path): # type: (EnvironmentConfig, str) -> t.Optional[str]
+def get_python_real_prefix(args, python_path): # type: (EnvironmentConfig, str) -> t.Optional[str]
"""
Return the real prefix of the specified interpreter or None if the interpreter is not a virtual environment created by 'virtualenv'.
"""
- cmd = [path, os.path.join(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'virtualenvcheck.py'))]
+ cmd = [python_path, os.path.join(os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'virtualenvcheck.py'))]
check_result = json.loads(run_command(args, cmd, capture=True, always=True)[0])
real_prefix = check_result['real_prefix']
return real_prefix
@@ -173,7 +198,7 @@ def run_virtualenv(args, # type: EnvironmentConfig
path, # type: str
): # type: (...) -> bool
"""Create a virtual environment using the 'virtualenv' module."""
- # always specify --python to guarantee the desired interpreter is provided
+ # always specify which interpreter to use to guarantee the desired interpreter is provided
# otherwise virtualenv may select a different interpreter than the one running virtualenv
cmd = [run_python, '-m', 'virtualenv', '--python', env_python]
@@ -199,29 +224,32 @@ def run_virtualenv(args, # type: EnvironmentConfig
def get_virtualenv_version(args, python): # type: (EnvironmentConfig, str) -> t.Optional[t.Tuple[int, ...]]
- """Get the virtualenv version for the given python intepreter, if available."""
+ """Get the virtualenv version for the given python intepreter, if available, otherwise return None."""
try:
- return get_virtualenv_version.result
+ cache = get_virtualenv_version.cache
except AttributeError:
- pass
+ cache = get_virtualenv_version.cache = {}
- get_virtualenv_version.result = None
+ if python not in cache:
+ try:
+ stdout = run_command(args, [python, '-m', 'virtualenv', '--version'], capture=True)[0]
+ except SubprocessError as ex:
+ stdout = ''
- cmd = [python, '-m', 'virtualenv', '--version']
+ if args.verbosity > 1:
+ display.error(ex)
- try:
- stdout = run_command(args, cmd, capture=True)[0]
- except SubprocessError as ex:
- if args.verbosity > 1:
- display.error(ex)
+ version = None
- stdout = ''
+ if stdout:
+ # noinspection PyBroadException
+ try:
+ version = str_to_version(stdout.strip())
+ except Exception: # pylint: disable=broad-except
+ pass
- if stdout:
- # noinspection PyBroadException
- try:
- get_virtualenv_version.result = tuple(int(v) for v in stdout.strip().split('.'))
- except Exception: # pylint: disable=broad-except
- pass
+ cache[python] = version
+
+ version = cache[python]
- return get_virtualenv_version.result
+ return version
diff --git a/test/lib/ansible_test/_util/__init__.py b/test/lib/ansible_test/_util/__init__.py
new file mode 100644
index 00000000..d6fc0a86
--- /dev/null
+++ b/test/lib/ansible_test/_util/__init__.py
@@ -0,0 +1,3 @@
+"""Nearly empty __init__.py to allow importing under Python 2.x."""
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/action-plugin-docs.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.json
index 12bbe0d1..12bbe0d1 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/action-plugin-docs.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.json
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/action-plugin-docs.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.py
index 65142e00..e19b4d98 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/action-plugin-docs.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
"""Test to verify action plugins have an associated module to provide documentation."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/changelog.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.json
index 7d19f101..7d19f101 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/changelog.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.json
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/changelog.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.py
index 2ccfb24f..1875ab3a 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/changelog.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/changelog/sphinx.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog/sphinx.py
index 000c29e4..000c29e4 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/changelog/sphinx.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog/sphinx.py
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/empty-init.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.json
index 9835f9b6..9835f9b6 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/empty-init.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.json
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/empty-init.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.py
index 8bcd7f9e..806c0e6e 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/empty-init.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/future-import-boilerplate.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.json
index 6f1edb78..4ebce32c 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/future-import-boilerplate.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.json
@@ -2,5 +2,6 @@
"extensions": [
".py"
],
+ "py2_compat": true,
"output": "path-message"
}
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/future-import-boilerplate.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.py
index 81081eed..cdad9655 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/future-import-boilerplate.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/line-endings.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.json
index db5c3c98..db5c3c98 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/line-endings.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.json
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/line-endings.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.py
index 1e4212d1..660b0fce 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/line-endings.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/metaclass-boilerplate.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.json
index 6f1edb78..4ebce32c 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/metaclass-boilerplate.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.json
@@ -2,5 +2,6 @@
"extensions": [
".py"
],
+ "py2_compat": true,
"output": "path-message"
}
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/metaclass-boilerplate.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.py
index 28d06f36..e3fba1f5 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/metaclass-boilerplate.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-assert.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.json
index ccee80a2..ccee80a2 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/no-assert.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.json
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-assert.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.py
index 78561d96..d6d710ae 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/no-assert.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -10,8 +9,8 @@ ASSERT_RE = re.compile(r'^\s*assert[^a-z0-9_:]')
def main():
for path in sys.argv[1:] or sys.stdin.read().splitlines():
- with open(path, 'r') as f:
- for i, line in enumerate(f.readlines()):
+ with open(path, 'r') as file:
+ for i, line in enumerate(file.readlines()):
matches = ASSERT_RE.findall(line)
if matches:
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-basestring.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.json
index 88858aeb..88858aeb 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/no-basestring.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.json
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-basestring.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.py
index a35650ef..18a3f6d1 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/no-basestring.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iteritems.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.json
index 88858aeb..88858aeb 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iteritems.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.json
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iteritems.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.py
index e28b24f4..7dfd5b26 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iteritems.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iterkeys.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iterkeys.json
index 88858aeb..88858aeb 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iterkeys.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iterkeys.json
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iterkeys.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iterkeys.py
index 237ee5b1..8925e831 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iterkeys.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iterkeys.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-itervalues.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-itervalues.json
index 88858aeb..88858aeb 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-itervalues.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-itervalues.json
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-itervalues.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-itervalues.py
index 4bf92ea9..18134154 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-itervalues.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-itervalues.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-get-exception.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.json
index 88858aeb..88858aeb 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/no-get-exception.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.json
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-get-exception.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.py
index c925f5b7..5a267ba0 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/no-get-exception.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-illegal-filenames.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-illegal-filenames.json
index 6f13c86b..6f13c86b 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/no-illegal-filenames.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-illegal-filenames.json
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-illegal-filenames.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-illegal-filenames.py
index 99432ea1..421bbd62 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/no-illegal-filenames.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-illegal-filenames.py
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
# a script to check for illegal filenames on various Operating Systems. The
# main rules are derived from restrictions on Windows
# https://msdn.microsoft.com/en-us/library/aa365247#naming_conventions
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-main-display.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.json
index ccee80a2..ccee80a2 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/no-main-display.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.json
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-main-display.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.py
index 74a36ecc..e5abd64d 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/no-main-display.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -9,8 +8,8 @@ MAIN_DISPLAY_IMPORT = 'from __main__ import display'
def main():
for path in sys.argv[1:] or sys.stdin.read().splitlines():
- with open(path, 'r') as f:
- for i, line in enumerate(f.readlines()):
+ with open(path, 'r') as file:
+ for i, line in enumerate(file.readlines()):
if MAIN_DISPLAY_IMPORT in line:
lineno = i + 1
colno = line.index(MAIN_DISPLAY_IMPORT) + 1
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-smart-quotes.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.json
index 54d9fff5..54d9fff5 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/no-smart-quotes.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.json
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-smart-quotes.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.py
index e44005a5..8399a36e 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/no-smart-quotes.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-unicode-literals.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.json
index 88858aeb..88858aeb 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/no-unicode-literals.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.json
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-unicode-literals.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.py
index e2201ab1..bb8c8f01 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/no-unicode-literals.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/replace-urlopen.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.json
index 88858aeb..88858aeb 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/replace-urlopen.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.json
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/replace-urlopen.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.py
index b2de1ba8..87575f51 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/replace-urlopen.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.json
index 44003ec0..44003ec0 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.json
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py
index 53ea00fc..cad82a55 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
"""Schema validation of ansible-core's ansible_builtin_runtime.yml and collection's meta/runtime.yml"""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -7,7 +6,7 @@ import datetime
import os
import re
import sys
-from distutils.version import StrictVersion, LooseVersion
+
from functools import partial
import yaml
@@ -16,6 +15,7 @@ from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA
from voluptuous import Required, Schema, Invalid
from voluptuous.humanize import humanize_error
+from ansible.module_utils.compat.version import StrictVersion, LooseVersion
from ansible.module_utils.six import string_types
from ansible.utils.version import SemanticVersion
@@ -105,8 +105,7 @@ def get_collection_version():
"""Return current collection version, or None if it is not available"""
import importlib.util
- collection_detail_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))),
- 'collection_detail.py')
+ collection_detail_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'tools', 'collection_detail.py')
collection_detail_spec = importlib.util.spec_from_file_location('collection_detail', collection_detail_path)
collection_detail = importlib.util.module_from_spec(collection_detail_spec)
sys.modules['collection_detail'] = collection_detail
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/shebang.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.json
index 5648429e..5648429e 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/shebang.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.json
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/shebang.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.py
index 7cf3cf72..401af1ae 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/shebang.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -69,8 +68,8 @@ def main():
is_module = True
elif re.search('^test/support/[^/]+/collections/ansible_collections/[^/]+/[^/]+/plugins/modules/', path):
is_module = True
- elif path.startswith('test/lib/ansible_test/_data/'):
- pass
+ elif path == 'test/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py':
+ pass # ansible-test entry point must be executable and have a shebang
elif path.startswith('lib/') or path.startswith('test/lib/'):
if executable:
print('%s:%d:%d: should not be executable' % (path, 0, 0))
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/symlinks.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.json
index 6f13c86b..6f13c86b 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/symlinks.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.json
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/symlinks.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.py
index 0585c6b1..5603051a 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/symlinks.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/use-argspec-type-path.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.json
index 36103051..36103051 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/use-argspec-type-path.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.json
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/use-argspec-type-path.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.py
index 687136dc..68f380b0 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/use-argspec-type-path.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/use-compat-six.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.json
index 776590b7..776590b7 100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/use-compat-six.json
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.json
diff --git a/test/lib/ansible_test/_data/sanity/code-smell/use-compat-six.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.py
index 49cb76c5..a8f0b879 100755..100644
--- a/test/lib/ansible_test/_data/sanity/code-smell/use-compat-six.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/sanity/integration-aliases/yaml_to_json.py b/test/lib/ansible_test/_util/controller/sanity/integration-aliases/yaml_to_json.py
index 74a45f00..74a45f00 100644
--- a/test/lib/ansible_test/_data/sanity/integration-aliases/yaml_to_json.py
+++ b/test/lib/ansible_test/_util/controller/sanity/integration-aliases/yaml_to_json.py
diff --git a/test/lib/ansible_test/_data/sanity/pep8/current-ignore.txt b/test/lib/ansible_test/_util/controller/sanity/pep8/current-ignore.txt
index 659c7f59..659c7f59 100644
--- a/test/lib/ansible_test/_data/sanity/pep8/current-ignore.txt
+++ b/test/lib/ansible_test/_util/controller/sanity/pep8/current-ignore.txt
diff --git a/test/lib/ansible_test/_data/sanity/pslint/pslint.ps1 b/test/lib/ansible_test/_util/controller/sanity/pslint/pslint.ps1
index 21007db6..f9d11d9d 100755..100644
--- a/test/lib/ansible_test/_data/sanity/pslint/pslint.ps1
+++ b/test/lib/ansible_test/_util/controller/sanity/pslint/pslint.ps1
@@ -1,4 +1,3 @@
-#!/usr/bin/env pwsh
#Requires -Version 6
#Requires -Modules PSScriptAnalyzer, PSSA-PSCustomUseLiteralPath
diff --git a/test/lib/ansible_test/_data/sanity/pslint/settings.psd1 b/test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd1
index 7646ec35..7646ec35 100644
--- a/test/lib/ansible_test/_data/sanity/pslint/settings.psd1
+++ b/test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd1
diff --git a/test/lib/ansible_test/_data/sanity/pylint/config/ansible-test.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg
index 5e9b593c..30e40ba1 100644
--- a/test/lib/ansible_test/_data/sanity/pylint/config/ansible-test.cfg
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg
@@ -21,7 +21,7 @@ disable=
too-many-nested-blocks,
too-many-return-statements,
too-many-statements,
- unused-import, # pylint does not understand PEP 484 type hints
+ useless-return, # complains about returning None when the return type is optional
[BASIC]
@@ -43,5 +43,12 @@ good-names=
k,
Run,
-method-rgx=[a-z_][a-z0-9_]{2,40}$
-function-rgx=[a-z_][a-z0-9_]{2,40}$
+class-attribute-rgx=[A-Za-z_][A-Za-z0-9_]{1,40}$
+attr-rgx=[a-z_][a-z0-9_]{1,40}$
+method-rgx=[a-z_][a-z0-9_]{1,40}$
+function-rgx=[a-z_][a-z0-9_]{1,40}$
+
+[IMPORTS]
+
+preferred-modules =
+ distutils.version:ansible.module_utils.compat.version,
diff --git a/test/lib/ansible_test/_data/sanity/pylint/config/sanity.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg
index 77f69b3e..3c60aa77 100644
--- a/test/lib/ansible_test/_data/sanity/pylint/config/sanity.cfg
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg
@@ -1,18 +1,14 @@
[MESSAGES CONTROL]
disable=
- consider-using-dict-comprehension, # requires Python 2.7+, but we still require Python 2.6 support
- consider-using-set-comprehension, # requires Python 2.7+, but we still require Python 2.6 support
cyclic-import, # consistent results require running with --jobs 1 and testing all files
duplicate-code, # consistent results require running with --jobs 1 and testing all files
import-error, # inconsistent results which depend on the availability of imports
import-outside-toplevel, # common pattern in ansible related code
- missing-docstring,
no-name-in-module, # inconsistent results which depend on the availability of imports
+ no-self-use,
raise-missing-from, # Python 2.x does not support raise from
- super-with-arguments, # Python 2.x does not support super without arguments
too-few-public-methods,
- too-many-ancestors, # inconsistent results between python 3.6 and 3.7+
too-many-arguments,
too-many-branches,
too-many-instance-attributes,
@@ -21,7 +17,7 @@ disable=
too-many-nested-blocks,
too-many-return-statements,
too-many-statements,
- unused-import, # pylint does not understand PEP 484 type hints
+ useless-return, # complains about returning None when the return type is optional
[BASIC]
@@ -37,14 +33,18 @@ bad-names=
good-names=
__metaclass__,
C,
- e,
ex,
- f,
i,
j,
k,
Run,
-module-rgx=[a-z_][a-z0-9_-]{2,40}$
-method-rgx=[a-z_][a-z0-9_]{2,40}$
-function-rgx=[a-z_][a-z0-9_]{2,40}$
+class-attribute-rgx=[A-Za-z_][A-Za-z0-9_]{1,40}$
+attr-rgx=[a-z_][a-z0-9_]{1,40}$
+method-rgx=[a-z_][a-z0-9_]{1,40}$
+function-rgx=[a-z_][a-z0-9_]{1,40}$
+
+[IMPORTS]
+
+preferred-modules =
+ distutils.version:ansible.module_utils.compat.version,
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/code-smell.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/code-smell.cfg
new file mode 100644
index 00000000..739d3757
--- /dev/null
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/code-smell.cfg
@@ -0,0 +1,55 @@
+[MESSAGES CONTROL]
+
+disable=
+ cyclic-import, # consistent results require running with --jobs 1 and testing all files
+ duplicate-code, # consistent results require running with --jobs 1 and testing all files
+ import-error, # inconsistent results which depend on the availability of imports
+ import-outside-toplevel, # common pattern in ansible related code
+ no-name-in-module, # inconsistent results which depend on the availability of imports
+ no-self-use,
+ raise-missing-from, # Python 2.x does not support raise from
+ too-few-public-methods,
+ too-many-arguments,
+ too-many-branches,
+ too-many-instance-attributes,
+ too-many-lines,
+ too-many-locals,
+ too-many-nested-blocks,
+ too-many-return-statements,
+ too-many-statements,
+ useless-return, # complains about returning None when the return type is optional
+ # code-smell tests should be updated so the following rules can be enabled
+ # once that happens the pylint sanity test can be updated to no longer special-case the code-smell tests (use standard ansible-test config instead)
+ missing-module-docstring,
+ missing-function-docstring,
+
+[BASIC]
+
+bad-names=
+ _,
+ bar,
+ baz,
+ foo,
+ tata,
+ toto,
+ tutu,
+
+good-names=
+ __metaclass__,
+ C,
+ ex,
+ i,
+ j,
+ k,
+ Run,
+
+class-attribute-rgx=[A-Za-z_][A-Za-z0-9_]{1,40}$
+attr-rgx=[a-z_][a-z0-9_]{1,40}$
+method-rgx=[a-z_][a-z0-9_]{1,40}$
+function-rgx=[a-z_][a-z0-9_]{1,40}$
+module-rgx=[a-z_][a-z0-9_-]{2,40}$
+
+[IMPORTS]
+
+preferred-modules =
+ distutils.version:ansible.module_utils.compat.version,
diff --git a/test/lib/ansible_test/_data/sanity/pylint/config/collection.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/collection.cfg
index 27c5a8ca..31c140cc 100644
--- a/test/lib/ansible_test/_data/sanity/pylint/config/collection.cfg
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/collection.cfg
@@ -18,11 +18,13 @@ disable=
consider-iterating-dictionary,
consider-merging-isinstance,
consider-using-dict-comprehension, # requires Python 2.7+, but we still require Python 2.6 support
+ consider-using-dict-items,
consider-using-enumerate,
consider-using-get,
consider-using-in,
consider-using-set-comprehension, # requires Python 2.7+, but we still require Python 2.6 support
consider-using-ternary,
+ consider-using-with,
cyclic-import, # consistent results require running with --jobs 1 and testing all files
deprecated-lambda,
deprecated-method,
diff --git a/test/lib/ansible_test/_data/sanity/pylint/config/default.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg
index c1a08be5..a1275aa9 100644
--- a/test/lib/ansible_test/_data/sanity/pylint/config/default.cfg
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg
@@ -19,11 +19,13 @@ disable=
consider-iterating-dictionary,
consider-merging-isinstance,
consider-using-dict-comprehension, # requires Python 2.7+, but we still require Python 2.6 support
+ consider-using-dict-items,
consider-using-enumerate,
consider-using-get,
consider-using-in,
consider-using-set-comprehension, # requires Python 2.7+, but we still require Python 2.6 support
consider-using-ternary,
+ consider-using-with,
cyclic-import, # consistent results require running with --jobs 1 and testing all files
deprecated-lambda,
deprecated-method,
@@ -139,3 +141,8 @@ good-names=
ignored-modules=
_MovedItems,
+
+[IMPORTS]
+
+preferred-modules =
+ distutils.version:ansible.module_utils.compat.version,
diff --git a/test/lib/ansible_test/_data/sanity/pylint/plugins/deprecated.py b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py
index 337ccd75..234ec217 100644
--- a/test/lib/ansible_test/_data/sanity/pylint/plugins/deprecated.py
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py
@@ -1,3 +1,4 @@
+"""Ansible specific plyint plugin for checking deprecations."""
# (c) 2018, Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# -*- coding: utf-8 -*-
@@ -7,14 +8,13 @@ __metaclass__ = type
import datetime
import re
-from distutils.version import LooseVersion
-
import astroid
from pylint.interfaces import IAstroidChecker
from pylint.checkers import BaseChecker
from pylint.checkers.utils import check_messages
+from ansible.module_utils.compat.version import LooseVersion
from ansible.module_utils.six import string_types
from ansible.release import __version__ as ansible_version_raw
from ansible.utils.version import SemanticVersion
@@ -107,6 +107,7 @@ def _get_expr_name(node):
def parse_isodate(value):
+ """Parse an ISO 8601 date string."""
msg = 'Expected ISO 8601 date string (YYYY-MM-DD)'
if not isinstance(value, string_types):
raise ValueError(msg)
@@ -147,10 +148,10 @@ class AnsibleDeprecatedChecker(BaseChecker):
def __init__(self, *args, **kwargs):
self.collection_version = None
self.collection_name = None
- super(AnsibleDeprecatedChecker, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def set_option(self, optname, value, action=None, optdict=None):
- super(AnsibleDeprecatedChecker, self).set_option(optname, value, action, optdict)
+ super().set_option(optname, value, action, optdict)
if optname == 'collection-version' and value is not None:
self.collection_version = SemanticVersion(self.config.collection_version)
if optname == 'collection-name' and value is not None:
@@ -203,6 +204,7 @@ class AnsibleDeprecatedChecker(BaseChecker):
@check_messages(*(MSGS.keys()))
def visit_call(self, node):
+ """Visit a call node."""
version = None
date = None
collection_name = None
diff --git a/test/lib/ansible_test/_data/sanity/pylint/plugins/string_format.py b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py
index eafde73b..3b9a37e5 100644
--- a/test/lib/ansible_test/_data/sanity/pylint/plugins/string_format.py
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py
@@ -1,13 +1,10 @@
+"""Ansible specific pylint plugin for checking format string usage."""
# (c) 2018, Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import sys
-
-import six
-
import astroid
from pylint.interfaces import IAstroidChecker
from pylint.checkers import BaseChecker
@@ -19,8 +16,6 @@ except ImportError:
# noinspection PyUnresolvedReferences
from pylint.checkers.strings import parse_format_method_string
-_PY3K = sys.version_info[:2] >= (3, 0)
-
MSGS = {
'E9305': ("Format string contains automatic field numbering "
"specification",
@@ -47,6 +42,7 @@ class AnsibleStringFormatChecker(BaseChecker):
@check_messages(*(MSGS.keys()))
def visit_call(self, node):
+ """Visit a call node."""
func = utils.safe_infer(node.func)
if (isinstance(func, astroid.BoundMethod)
and isinstance(func.bound, astroid.Instance)
@@ -66,10 +62,10 @@ class AnsibleStringFormatChecker(BaseChecker):
if not isinstance(strnode, astroid.Const):
return
- if _PY3K and isinstance(strnode.value, six.binary_type):
+ if isinstance(strnode.value, bytes):
self.add_message('ansible-no-format-on-bytestring', node=node)
return
- if not isinstance(strnode.value, six.string_types):
+ if not isinstance(strnode.value, str):
return
if node.starargs or node.kwargs:
diff --git a/test/lib/ansible_test/_data/sanity/pylint/plugins/unwanted.py b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py
index 7012feaa..75a8b57f 100644
--- a/test/lib/ansible_test/_data/sanity/pylint/plugins/unwanted.py
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py
@@ -4,6 +4,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
+import typing as t
import astroid
@@ -16,24 +17,20 @@ ANSIBLE_TEST_MODULE_UTILS_PATH = os.environ['ANSIBLE_TEST_MODULE_UTILS_PATH']
class UnwantedEntry:
"""Defines an unwanted import."""
- def __init__(self, alternative, modules_only=False, names=None, ignore_paths=None):
- """
- :type alternative: str
- :type modules_only: bool
- :type names: tuple[str] | None
- :type ignore_paths: tuple[str] | None
- """
+ def __init__(
+ self,
+ alternative, # type: str
+ modules_only=False, # type: bool
+ names=None, # type: t.Optional[t.Tuple[str, ...]]
+ ignore_paths=None, # type: t.Optional[t.Tuple[str, ...]]
+ ): # type: (...) -> None
self.alternative = alternative
self.modules_only = modules_only
self.names = set(names) if names else set()
self.ignore_paths = ignore_paths
- def applies_to(self, path, name=None):
- """
- :type path: str
- :type name: str | None
- :rtype: bool
- """
+ def applies_to(self, path, name=None): # type: (str, t.Optional[str]) -> bool
+ """Return True if this entry applies to the given path, otherwise return False."""
if self.names:
if not name:
return False
@@ -50,11 +47,8 @@ class UnwantedEntry:
return True
-def is_module_path(path):
- """
- :type path: str
- :rtype: bool
- """
+def is_module_path(path): # type: (str) -> bool
+ """Return True if the given path is a module or module_utils path, otherwise return False."""
return path.startswith(ANSIBLE_TEST_MODULES_PATH) or path.startswith(ANSIBLE_TEST_MODULE_UTILS_PATH)
@@ -96,7 +90,7 @@ class AnsibleUnwantedChecker(BaseChecker):
'/lib/ansible/module_utils/urls.py',
)),
- # see https://docs.python.org/3.7/library/collections.abc.html
+ # see https://docs.python.org/3/library/collections.abc.html
collections=UnwantedEntry('ansible.module_utils.common._collections_compat',
ignore_paths=(
'/lib/ansible/module_utils/common/_collections_compat.py',
@@ -119,42 +113,34 @@ class AnsibleUnwantedChecker(BaseChecker):
)
unwanted_functions = {
- # see https://docs.python.org/2/library/tempfile.html#tempfile.mktemp
+ # see https://docs.python.org/3/library/tempfile.html#tempfile.mktemp
'tempfile.mktemp': UnwantedEntry('tempfile.mkstemp'),
'sys.exit': UnwantedEntry('exit_json or fail_json',
ignore_paths=(
'/lib/ansible/module_utils/basic.py',
'/lib/ansible/modules/async_wrapper.py',
- '/lib/ansible/module_utils/common/removed.py',
),
modules_only=True),
'builtins.print': UnwantedEntry('module.log or module.debug',
ignore_paths=(
'/lib/ansible/module_utils/basic.py',
- '/lib/ansible/module_utils/common/removed.py',
),
modules_only=True),
}
- def visit_import(self, node):
- """
- :type node: astroid.node_classes.Import
- """
+ def visit_import(self, node): # type: (astroid.node_classes.Import) -> None
+ """Visit an import node."""
for name in node.names:
self._check_import(node, name[0])
- def visit_importfrom(self, node):
- """
- :type node: astroid.node_classes.ImportFrom
- """
+ def visit_importfrom(self, node): # type: (astroid.node_classes.ImportFrom) -> None
+ """Visit an import from node."""
self._check_importfrom(node, node.modname, node.names)
- def visit_attribute(self, node):
- """
- :type node: astroid.node_classes.Attribute
- """
+ def visit_attribute(self, node): # type: (astroid.node_classes.Attribute) -> None
+ """Visit an attribute node."""
last_child = node.last_child()
# this is faster than using type inference and will catch the most common cases
@@ -169,10 +155,8 @@ class AnsibleUnwantedChecker(BaseChecker):
if entry.applies_to(self.linter.current_file, node.attrname):
self.add_message(self.BAD_IMPORT_FROM, args=(node.attrname, entry.alternative, module), node=node)
- def visit_call(self, node):
- """
- :type node: astroid.node_classes.Call
- """
+ def visit_call(self, node): # type: (astroid.node_classes.Call) -> None
+ """Visit a call node."""
try:
for i in node.func.inferred():
func = None
@@ -190,11 +174,8 @@ class AnsibleUnwantedChecker(BaseChecker):
except astroid.exceptions.InferenceError:
pass
- def _check_import(self, node, modname):
- """
- :type node: astroid.node_classes.Import
- :type modname: str
- """
+ def _check_import(self, node, modname): # type: (astroid.node_classes.Import, str) -> None
+ """Check the imports on the specified import node."""
self._check_module_import(node, modname)
entry = self.unwanted_imports.get(modname)
@@ -205,12 +186,8 @@ class AnsibleUnwantedChecker(BaseChecker):
if entry.applies_to(self.linter.current_file):
self.add_message(self.BAD_IMPORT, args=(entry.alternative, modname), node=node)
- def _check_importfrom(self, node, modname, names):
- """
- :type node: astroid.node_classes.ImportFrom
- :type modname: str
- :type names: list[str[
- """
+ def _check_importfrom(self, node, modname, names): # type: (astroid.node_classes.ImportFrom, str, t.List[str]) -> None
+ """Check the imports on the specified import from node."""
self._check_module_import(node, modname)
entry = self.unwanted_imports.get(modname)
@@ -222,11 +199,8 @@ class AnsibleUnwantedChecker(BaseChecker):
if entry.applies_to(self.linter.current_file, name[0]):
self.add_message(self.BAD_IMPORT_FROM, args=(name[0], entry.alternative, modname), node=node)
- def _check_module_import(self, node, modname):
- """
- :type node: astroid.node_classes.Import | astroid.node_classes.ImportFrom
- :type modname: str
- """
+ def _check_module_import(self, node, modname): # type: (t.Union[astroid.node_classes.Import, astroid.node_classes.ImportFrom], str) -> None
+ """Check the module import on the given import or import from node."""
if not is_module_path(self.linter.current_file):
return
diff --git a/test/lib/ansible_test/_data/sanity/shellcheck/exclude.txt b/test/lib/ansible_test/_util/controller/sanity/shellcheck/exclude.txt
index 29588ddd..29588ddd 100644
--- a/test/lib/ansible_test/_data/sanity/shellcheck/exclude.txt
+++ b/test/lib/ansible_test/_util/controller/sanity/shellcheck/exclude.txt
diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/main.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/main.py
index c1e2bdaa..e6749cdc 100755..100644
--- a/test/lib/ansible_test/_data/sanity/validate-modules/main.py
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/main.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate-modules b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate-modules
index 11a5d8e1..11a5d8e1 120000
--- a/test/lib/ansible_test/_data/sanity/validate-modules/validate-modules
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate-modules
diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/__init__.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/__init__.py
index d8ff2dc0..d8ff2dc0 100644
--- a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/__init__.py
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/__init__.py
diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/main.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py
index 5b61f44b..9f4f5c47 100644
--- a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/main.py
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py
@@ -33,7 +33,7 @@ import traceback
from collections import OrderedDict
from contextlib import contextmanager
-from distutils.version import StrictVersion, LooseVersion
+from ansible.module_utils.compat.version import StrictVersion, LooseVersion
from fnmatch import fnmatch
import yaml
@@ -241,12 +241,14 @@ class Validator(with_metaclass(abc.ABCMeta, object)):
def __init__(self, reporter=None):
self.reporter = reporter
- @abc.abstractproperty
+ @property
+ @abc.abstractmethod
def object_name(self):
"""Name of the object we validated"""
pass
- @abc.abstractproperty
+ @property
+ @abc.abstractmethod
def object_path(self):
"""Path of the object we validated"""
pass
@@ -260,7 +262,6 @@ class Validator(with_metaclass(abc.ABCMeta, object)):
class ModuleValidator(Validator):
REJECTLIST_PATTERNS = ('.git*', '*.pyc', '*.pyo', '.*', '*.md', '*.rst', '*.txt')
REJECTLIST_FILES = frozenset(('.git', '.gitignore', '.travis.yml',
- 'shippable.yml',
'.gitattributes', '.gitmodules', 'COPYING',
'__init__.py', 'VERSION', 'test-docs.sh'))
REJECTLIST = REJECTLIST_FILES.union(REJECTLIST['MODULE'])
@@ -545,7 +546,7 @@ class ModuleValidator(Validator):
**options['error']
)
- def _find_module_utils(self, main):
+ def _find_module_utils(self):
linenos = []
found_basic = False
for child in self.ast.body:
@@ -607,74 +608,6 @@ class ModuleValidator(Validator):
return min(linenos)
- def _find_main_call(self, look_for="main"):
- """ Ensure that the module ends with:
- if __name__ == '__main__':
- main()
- OR, in the case of modules that are in the docs-only deprecation phase
- if __name__ == '__main__':
- removed_module()
- """
- lineno = False
- if_bodies = []
- for child in self.ast.body:
- if isinstance(child, ast.If):
- try:
- if child.test.left.id == '__name__':
- if_bodies.extend(child.body)
- except AttributeError:
- pass
-
- bodies = self.ast.body
- bodies.extend(if_bodies)
-
- for child in bodies:
-
- # validate that the next to last line is 'if __name__ == "__main__"'
- if child.lineno == (self.length - 1):
-
- mainchecked = False
- try:
- if isinstance(child, ast.If) and \
- child.test.left.id == '__name__' and \
- len(child.test.ops) == 1 and \
- isinstance(child.test.ops[0], ast.Eq) and \
- child.test.comparators[0].s == '__main__':
- mainchecked = True
- except Exception:
- pass
-
- if not mainchecked:
- self.reporter.error(
- path=self.object_path,
- code='missing-if-name-main',
- msg='Next to last line should be: if __name__ == "__main__":',
- line=child.lineno
- )
-
- # validate that the final line is a call to main()
- if isinstance(child, ast.Expr):
- if isinstance(child.value, ast.Call):
- if (isinstance(child.value.func, ast.Name) and
- child.value.func.id == look_for):
- lineno = child.lineno
- if lineno < self.length - 1:
- self.reporter.error(
- path=self.object_path,
- code='last-line-main-call',
- msg=('Call to %s() not the last line' % look_for),
- line=lineno
- )
-
- if not lineno:
- self.reporter.error(
- path=self.object_path,
- code='missing-main-call',
- msg=('Did not find a call to %s()' % look_for)
- )
-
- return lineno or 0
-
def _find_has_import(self):
for child in self.ast.body:
found_try_except_import = False
@@ -1069,7 +1002,8 @@ class ModuleValidator(Validator):
else:
_doc, errors, traces = parse_yaml(doc_info['EXAMPLES']['value'],
doc_info['EXAMPLES']['lineno'],
- self.name, 'EXAMPLES', load_all=True)
+ self.name, 'EXAMPLES', load_all=True,
+ ansible_loader=True)
for error in errors:
self.reporter.error(
path=self.object_path,
@@ -1122,7 +1056,7 @@ class ModuleValidator(Validator):
if not (filename_deprecated_or_removed or removed or deprecated or doc_deprecated):
mismatched_deprecation = False
else:
- if (filename_deprecated_or_removed and deprecated and doc_deprecated):
+ if (filename_deprecated_or_removed and doc_deprecated):
mismatched_deprecation = False
if (filename_deprecated_or_removed and removed and not (documentation_exists or examples_exist or returns_exist)):
mismatched_deprecation = False
@@ -1241,8 +1175,8 @@ class ModuleValidator(Validator):
)
return
- self._validate_docs_schema(kwargs, ansible_module_kwargs_schema(for_collection=bool(self.collection)),
- 'AnsibleModule', 'invalid-ansiblemodule-schema')
+ schema = ansible_module_kwargs_schema(self.object_name.split('.')[0], for_collection=bool(self.collection))
+ self._validate_docs_schema(kwargs, schema, 'AnsibleModule', 'invalid-ansiblemodule-schema')
self._validate_argument_spec(docs, spec, kwargs)
@@ -2208,10 +2142,26 @@ class ModuleValidator(Validator):
strict_ansible_version = self._create_strict_version(
'.'.join(ansible_version.split('.')[:2]), self.collection_name)
end_of_deprecation_should_be_removed_only = strict_ansible_version >= removed_in
+
+ if end_of_deprecation_should_be_removed_only:
+ self.reporter.error(
+ path=self.object_path,
+ code='ansible-deprecated-module',
+ msg='Module is marked for removal in version %s of Ansible when the current version is %s' % (
+ version, ansible_version),
+ )
elif self.collection_version:
strict_ansible_version = self.collection_version
end_of_deprecation_should_be_removed_only = strict_ansible_version >= removed_in
+ if end_of_deprecation_should_be_removed_only:
+ self.reporter.error(
+ path=self.object_path,
+ code='collection-deprecated-module',
+ msg='Module is marked for removal in version %s of this collection when the current version is %s' % (
+ version, self.collection_version_str),
+ )
+
# handle deprecation by date
if 'removed_at_date' in docs['deprecated']:
try:
@@ -2227,8 +2177,7 @@ class ModuleValidator(Validator):
self._validate_ansible_module_call(docs)
self._check_for_sys_exit()
self._find_rejectlist_imports()
- main = self._find_main_call()
- self._find_module_utils(main)
+ self._find_module_utils()
self._find_has_import()
first_callable = self._get_first_callable()
self._ensure_imports_below_docs(doc_info, first_callable)
@@ -2255,12 +2204,6 @@ class ModuleValidator(Validator):
self._check_type_instead_of_isinstance(
powershell=self._powershell_module()
)
- if end_of_deprecation_should_be_removed_only:
- # Ensure that `if __name__ == '__main__':` calls `removed_module()` which ensure that the module has no code in
- main = self._find_main_call('removed_module')
- # FIXME: Ensure that the version in the call to removed_module is less than +2.
- # Otherwise it's time to remove the file (This may need to be done in another test to
- # avoid breaking whenever the Ansible version bumps)
class PythonPackageValidator(Validator):
diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/module_args.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py
index 8cd0e5e5..3846ee5d 100644
--- a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/module_args.py
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py
@@ -114,7 +114,7 @@ def get_ps_argument_spec(filename, collection):
ps_dep_finder.scan_module(b_module_data, fqn=fqc_name)
# For ps_argspec.ps1 to compile Ansible.Basic it also needs the AddType module_util.
- ps_dep_finder._add_module((b"Ansible.ModuleUtils.AddType", ".psm1", None), wrapper=False)
+ ps_dep_finder._add_module(name=b"Ansible.ModuleUtils.AddType", ext=".psm1", fqn=None, optional=False, wrapper=False)
util_manifest = json.dumps({
'module_path': to_text(module_path, errors='surrogiate_or_strict'),
@@ -123,7 +123,7 @@ def get_ps_argument_spec(filename, collection):
})
script_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ps_argspec.ps1')
- proc = subprocess.Popen([script_path, util_manifest], stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ proc = subprocess.Popen(['pwsh', script_path, util_manifest], stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=False)
stdout, stderr = proc.communicate()
diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/ps_argspec.ps1 b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/ps_argspec.ps1
index 5ceb9d50..fb4a6174 100755..100644
--- a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/ps_argspec.ps1
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/ps_argspec.ps1
@@ -1,4 +1,3 @@
-#!/usr/bin/env pwsh
#Requires -Version 6
Set-StrictMode -Version 2.0
diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/schema.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py
index 359773e5..ed098cbc 100644
--- a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/schema.py
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py
@@ -8,13 +8,14 @@ __metaclass__ = type
import re
-from distutils.version import StrictVersion
+from ansible.module_utils.compat.version import StrictVersion
from functools import partial
from voluptuous import ALLOW_EXTRA, PREVENT_EXTRA, All, Any, Invalid, Length, Required, Schema, Self, ValueInvalid
from ansible.module_utils.six import string_types
from ansible.module_utils.common.collections import is_iterable
from ansible.utils.version import SemanticVersion
+from ansible.release import __version__
from .utils import parse_isodate
@@ -59,6 +60,12 @@ def collection_name(v, error_code=None):
return v
+def deprecation_versions():
+ """Create a list of valid version for deprecation entries, current+4"""
+ major, minor = [int(version) for version in __version__.split('.')[0:2]]
+ return Any(*['{0}.{1}'.format(major, minor + increment) for increment in range(0, 5)])
+
+
def version(for_collection=False):
if for_collection:
# We do not accept floats for versions in collections
@@ -246,7 +253,7 @@ def argument_spec_schema(for_collection):
return Schema(schemas)
-def ansible_module_kwargs_schema(for_collection):
+def ansible_module_kwargs_schema(module_name, for_collection):
schema = {
'argument_spec': argument_spec_schema(for_collection),
'bypass_checks': bool,
@@ -255,6 +262,9 @@ def ansible_module_kwargs_schema(for_collection):
'add_file_common_args': bool,
'supports_check_mode': bool,
}
+ if module_name.endswith(('_info', '_facts')):
+ del schema['supports_check_mode']
+ schema[Required('supports_check_mode')] = True
schema.update(argument_spec_modifiers)
return Schema(schema)
@@ -446,12 +456,7 @@ def deprecation_schema(for_collection):
}
else:
version_schema = {
- # Only list branches that are deprecated or may have docs stubs in
- # Deprecation cycle changed at 2.4 (though not retroactively)
- # 2.3 -> removed_in: "2.5" + n for docs stub
- # 2.4 -> removed_in: "2.8" + n for docs stub
- Required('removed_in'): Any(
- "2.2", "2.3", "2.4", "2.5", "2.6", "2.8", "2.9", "2.10", "2.11", "2.12", "2.13", "2.14"),
+ Required('removed_in'): deprecation_versions(),
}
version_schema.update(main_fields)
@@ -505,7 +510,6 @@ def doc_schema(module_name, for_collection=False, deprecated_module=False):
'options': Any(None, *list_dict_option_schema(for_collection)),
'extends_documentation_fragment': Any(list_string_types, *string_types),
'version_added_collection': collection_name,
- 'attributes': object,
}
if for_collection:
@@ -520,6 +524,45 @@ def doc_schema(module_name, for_collection=False, deprecated_module=False):
}
doc_schema_dict.update(deprecation_required_scheme)
+
+ def add_default_attributes(more=None):
+ schema = {
+ 'description': Any(list_string_types, *string_types),
+ 'details': Any(list_string_types, *string_types),
+ 'support': any_string_types,
+ 'version_added_collection': any_string_types,
+ 'version_added': any_string_types,
+ }
+ if more:
+ schema.update(more)
+ return schema
+
+ doc_schema_dict['attributes'] = Schema(
+ All(
+ Schema({
+ any_string_types: {
+ Required('description'): Any(list_string_types, *string_types),
+ Required('support'): Any('full', 'partial', 'none', 'N/A'),
+ 'details': Any(list_string_types, *string_types),
+ 'version_added_collection': collection_name,
+ 'version_added': version(for_collection=for_collection),
+ },
+ }, extra=ALLOW_EXTRA),
+ partial(version_added, error_code='attribute-invalid-version-added', accept_historical=False),
+ Schema({
+ any_string_types: add_default_attributes(),
+ 'action_group': add_default_attributes({
+ Required('membership'): list_string_types,
+ }),
+ 'forced_action_plugin': add_default_attributes({
+ Required('action_plugin'): any_string_types,
+ }),
+ 'platform': add_default_attributes({
+ Required('platforms'): Any(list_string_types, *string_types)
+ }),
+ }, extra=PREVENT_EXTRA),
+ )
+ )
return Schema(
All(
Schema(
diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/utils.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py
index 939ae651..ac46f666 100644
--- a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/utils.py
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py
@@ -31,7 +31,9 @@ import yaml.reader
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils.common.yaml import SafeLoader
from ansible.module_utils.six import string_types
+from ansible.parsing.yaml.loader import AnsibleLoader
class AnsibleTextIOWrapper(TextIOWrapper):
@@ -133,18 +135,23 @@ def get_module_name_from_filename(filename, collection):
return name
-def parse_yaml(value, lineno, module, name, load_all=False):
+def parse_yaml(value, lineno, module, name, load_all=False, ansible_loader=False):
traces = []
errors = []
data = None
if load_all:
- loader = yaml.safe_load_all
+ yaml_load = yaml.load_all
else:
- loader = yaml.safe_load
+ yaml_load = yaml.load
+
+ if ansible_loader:
+ loader = AnsibleLoader
+ else:
+ loader = SafeLoader
try:
- data = loader(value)
+ data = yaml_load(value, Loader=loader)
if load_all:
data = list(data)
except yaml.MarkedYAMLError as e:
diff --git a/test/lib/ansible_test/_data/sanity/yamllint/config/default.yml b/test/lib/ansible_test/_util/controller/sanity/yamllint/config/default.yml
index 45d8b7ad..45d8b7ad 100644
--- a/test/lib/ansible_test/_data/sanity/yamllint/config/default.yml
+++ b/test/lib/ansible_test/_util/controller/sanity/yamllint/config/default.yml
diff --git a/test/lib/ansible_test/_data/sanity/yamllint/config/modules.yml b/test/lib/ansible_test/_util/controller/sanity/yamllint/config/modules.yml
index da7e6049..da7e6049 100644
--- a/test/lib/ansible_test/_data/sanity/yamllint/config/modules.yml
+++ b/test/lib/ansible_test/_util/controller/sanity/yamllint/config/modules.yml
diff --git a/test/lib/ansible_test/_data/sanity/yamllint/config/plugins.yml b/test/lib/ansible_test/_util/controller/sanity/yamllint/config/plugins.yml
index 6d418137..6d418137 100644
--- a/test/lib/ansible_test/_data/sanity/yamllint/config/plugins.yml
+++ b/test/lib/ansible_test/_util/controller/sanity/yamllint/config/plugins.yml
diff --git a/test/lib/ansible_test/_data/sanity/yamllint/yamllinter.py b/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py
index b9fc73e5..42822111 100644
--- a/test/lib/ansible_test/_data/sanity/yamllint/yamllinter.py
+++ b/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
"""Wrapper around yamllint that supports YAML embedded in Ansible modules."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -8,6 +7,7 @@ import json
import os
import re
import sys
+import typing as t
import yaml
from yaml.resolver import Resolver
@@ -29,9 +29,9 @@ def main():
class TestConstructor(SafeConstructor):
- """Yaml Safe Constructor that knows about Ansible tags"""
-
+ """Yaml Safe Constructor that knows about Ansible tags."""
def construct_yaml_unsafe(self, node):
+ """Construct an unsafe tag."""
try:
constructor = getattr(node, 'id', 'object')
if constructor is not None:
@@ -60,6 +60,7 @@ TestConstructor.add_constructor(
class TestLoader(CParser, TestConstructor, Resolver):
+ """Custom YAML loader that recognizes custom Ansible tags."""
def __init__(self, stream):
CParser.__init__(self, stream)
TestConstructor.__init__(self)
@@ -79,10 +80,8 @@ class YamlChecker:
print(json.dumps(report, indent=4, sort_keys=True))
- def check(self, paths):
- """
- :type paths: t.List[str]
- """
+ def check(self, paths): # type: (t.List[str]) -> None
+ """Check the specified paths."""
config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config')
yaml_conf = YamlLintConfig(file=os.path.join(config_path, 'default.yml'))
@@ -92,8 +91,8 @@ class YamlChecker:
for path in paths:
extension = os.path.splitext(path)[1]
- with open(path) as f:
- contents = f.read()
+ with open(path) as file:
+ contents = file.read()
if extension in ('.yml', '.yaml'):
self.check_yaml(yaml_conf, path, contents)
@@ -107,21 +106,13 @@ class YamlChecker:
else:
raise Exception('unsupported extension: %s' % extension)
- def check_yaml(self, conf, path, contents):
- """
- :type conf: YamlLintConfig
- :type path: str
- :type contents: str
- """
+ def check_yaml(self, conf, path, contents): # type: (YamlLintConfig, str, str) -> None
+ """Check the given YAML."""
self.check_parsable(path, contents)
self.messages += [self.result_to_message(r, path) for r in linter.run(contents, conf, path)]
- def check_module(self, conf, path, contents):
- """
- :type conf: YamlLintConfig
- :type path: str
- :type contents: str
- """
+ def check_module(self, conf, path, contents): # type: (YamlLintConfig, str, str) -> None
+ """Check the given module."""
docs = self.get_module_docs(path, contents)
for key, value in docs.items():
@@ -142,32 +133,22 @@ class YamlChecker:
self.messages += [self.result_to_message(r, path, lineno - 1, key) for r in messages]
- def check_parsable(self, path, contents, lineno=1):
- """
- :type path: str
- :type contents: str
- :type lineno: int
- """
+ def check_parsable(self, path, contents, lineno=1): # type: (str, str, int) -> None
+ """Check the given contents to verify they can be parsed as YAML."""
try:
yaml.load(contents, Loader=TestLoader)
- except MarkedYAMLError as e:
+ except MarkedYAMLError as ex:
self.messages += [{'code': 'unparsable-with-libyaml',
- 'message': '%s - %s' % (e.args[0], e.args[2]),
+ 'message': '%s - %s' % (ex.args[0], ex.args[2]),
'path': path,
- 'line': e.problem_mark.line + lineno,
- 'column': e.problem_mark.column + 1,
+ 'line': ex.problem_mark.line + lineno,
+ 'column': ex.problem_mark.column + 1,
'level': 'error',
}]
@staticmethod
- def result_to_message(result, path, line_offset=0, prefix=''):
- """
- :type result: any
- :type path: str
- :type line_offset: int
- :type prefix: str
- :rtype: dict[str, any]
- """
+ def result_to_message(result, path, line_offset=0, prefix=''): # type: (t.Any, str, int, str) -> t.Dict[str, t.Any]
+ """Convert the given result to a dictionary and return it."""
if prefix:
prefix = '%s: ' % prefix
@@ -180,12 +161,8 @@ class YamlChecker:
level=result.level,
)
- def get_module_docs(self, path, contents):
- """
- :type path: str
- :type contents: str
- :rtype: dict[str, any]
- """
+ def get_module_docs(self, path, contents): # type: (str, str) -> t.Dict[str, t.Any]
+ """Return the module documentation for the given module contents."""
module_doc_types = [
'DOCUMENTATION',
'EXAMPLES',
@@ -240,12 +217,8 @@ class YamlChecker:
return docs
- def parse_module(self, path, contents):
- """
- :type path: str
- :type contents: str
- :rtype: ast.Module | None
- """
+ def parse_module(self, path, contents): # type: (str, str) -> t.Optional[ast.Module]
+ """Parse the given contents and return a module if successful, otherwise return None."""
try:
return ast.parse(contents)
except SyntaxError as ex:
diff --git a/test/lib/ansible_test/_data/collection_detail.py b/test/lib/ansible_test/_util/controller/tools/collection_detail.py
index e7c883ca..e7c883ca 100644
--- a/test/lib/ansible_test/_data/collection_detail.py
+++ b/test/lib/ansible_test/_util/controller/tools/collection_detail.py
diff --git a/test/lib/ansible_test/_util/controller/tools/coverage_stub.ps1 b/test/lib/ansible_test/_util/controller/tools/coverage_stub.ps1
new file mode 100644
index 00000000..83c27ff7
--- /dev/null
+++ b/test/lib/ansible_test/_util/controller/tools/coverage_stub.ps1
@@ -0,0 +1,38 @@
+<#
+.SYNOPSIS
+Gets the lines to hit from a sourcefile for coverage stubs.
+#>
+[CmdletBinding()]
+param (
+ [Parameter(Mandatory, ValueFromRemainingArguments)]
+ [String[]]
+ $Path
+)
+
+$stubInfo = @(foreach ($sourcePath in $Path) {
+ # Default is to just no lines for missing files
+ [Collections.Generic.HashSet[int]]$lines = @()
+
+ if (Test-Path -LiteralPath $sourcePath) {
+ $code = [ScriptBlock]::Create([IO.File]::ReadAllText($sourcePath))
+
+ # We set our breakpoints with this predicate so our stubs should match
+ # that logic.
+ $predicate = {
+ $args[0] -is [System.Management.Automation.Language.CommandBaseAst]
+ }
+ $cmds = $code.Ast.FindAll($predicate, $true)
+
+ # We only care about unique lines not multiple commands on 1 line.
+ $lines = @(foreach ($cmd in $cmds) {
+ $cmd.Extent.StartLineNumber
+ })
+ }
+
+ [PSCustomObject]@{
+ Path = $sourcePath
+ Lines = $lines
+ }
+})
+
+ConvertTo-Json -InputObject $stubInfo -Depth 2 -Compress
diff --git a/test/lib/ansible_test/_data/sslcheck.py b/test/lib/ansible_test/_util/controller/tools/sslcheck.py
index 37b82279..115c5ed2 100755..100644
--- a/test/lib/ansible_test/_data/sslcheck.py
+++ b/test/lib/ansible_test/_util/controller/tools/sslcheck.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
"""Show openssl version."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/virtualenvcheck.py b/test/lib/ansible_test/_util/controller/tools/virtualenvcheck.py
index 0c8f7680..90dfa394 100755..100644
--- a/test/lib/ansible_test/_data/virtualenvcheck.py
+++ b/test/lib/ansible_test/_util/controller/tools/virtualenvcheck.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
"""Detect the real python interpreter when running in a virtual environment created by the 'virtualenv' module."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_data/yamlcheck.py b/test/lib/ansible_test/_util/controller/tools/yamlcheck.py
index 591842f4..dfd08e58 100755..100644
--- a/test/lib/ansible_test/_data/yamlcheck.py
+++ b/test/lib/ansible_test/_util/controller/tools/yamlcheck.py
@@ -1,5 +1,4 @@
-#!/usr/bin/env python
-"""Show python and pip versions."""
+"""Show availability of PyYAML and libyaml support."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/target/__init__.py b/test/lib/ansible_test/_util/target/__init__.py
new file mode 100644
index 00000000..d6fc0a86
--- /dev/null
+++ b/test/lib/ansible_test/_util/target/__init__.py
@@ -0,0 +1,3 @@
+"""Nearly empty __init__.py to allow importing under Python 2.x."""
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py b/test/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py
new file mode 100755
index 00000000..dc31095a
--- /dev/null
+++ b/test/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+# PYTHON_ARGCOMPLETE_OK
+"""Command line entry point for ansible-test."""
+
+# NOTE: This file resides in the _util/target directory to ensure compatibility with all supported Python versions.
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+import sys
+
+
+def main():
+ """Main program entry point."""
+ ansible_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+ source_root = os.path.join(ansible_root, 'test', 'lib')
+
+ if os.path.exists(os.path.join(source_root, 'ansible_test', '_internal', '__init__.py')):
+ # running from source, use that version of ansible-test instead of any version that may already be installed
+ sys.path.insert(0, source_root)
+
+ # noinspection PyProtectedMember
+ from ansible_test._util.target.common.constants import CONTROLLER_PYTHON_VERSIONS
+
+ if version_to_str(sys.version_info[:2]) not in CONTROLLER_PYTHON_VERSIONS:
+ raise SystemExit('This version of ansible-test cannot be executed with Python version %s. Supported Python versions are: %s' % (
+ version_to_str(sys.version_info[:3]), ', '.join(CONTROLLER_PYTHON_VERSIONS)))
+
+ # noinspection PyProtectedMember
+ from ansible_test._internal import main as cli_main
+
+ cli_main()
+
+
+def version_to_str(version):
+ """Return a version string from a version tuple."""
+ return '.'.join(str(n) for n in version)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/lib/ansible_test/_util/target/common/__init__.py b/test/lib/ansible_test/_util/target/common/__init__.py
new file mode 100644
index 00000000..d6fc0a86
--- /dev/null
+++ b/test/lib/ansible_test/_util/target/common/__init__.py
@@ -0,0 +1,3 @@
+"""Nearly empty __init__.py to allow importing under Python 2.x."""
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/target/common/constants.py b/test/lib/ansible_test/_util/target/common/constants.py
new file mode 100644
index 00000000..3c02eb2e
--- /dev/null
+++ b/test/lib/ansible_test/_util/target/common/constants.py
@@ -0,0 +1,62 @@
+"""Constants used by ansible-test. Imports should not be used in this file."""
+
+# NOTE: This file resides in the _util/target directory to ensure compatibility with all supported Python versions.
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+# Setting a low soft RLIMIT_NOFILE value will improve the performance of subprocess.Popen on Python 2.x when close_fds=True.
+# This will affect all Python subprocesses. It will also affect the current Python process if set before subprocess is imported for the first time.
+SOFT_RLIMIT_NOFILE = 1024
+
+# File used to track the ansible-test test execution timeout.
+TIMEOUT_PATH = '.ansible-test-timeout.json'
+
+REMOTE_ONLY_PYTHON_VERSIONS = (
+ '2.6',
+ '2.7',
+ '3.5',
+ '3.6',
+ '3.7',
+)
+
+CONTROLLER_PYTHON_VERSIONS = (
+ '3.8',
+ '3.9',
+ '3.10',
+)
+
+CONTROLLER_MIN_PYTHON_VERSION = CONTROLLER_PYTHON_VERSIONS[0]
+SUPPORTED_PYTHON_VERSIONS = REMOTE_ONLY_PYTHON_VERSIONS + CONTROLLER_PYTHON_VERSIONS
+
+COVERAGE_REQUIRED_VERSION = '4.5.4'
+
+REMOTE_PROVIDERS = [
+ 'default',
+ 'aws',
+ 'azure',
+ 'ibmps',
+ 'parallels',
+]
+
+SECCOMP_CHOICES = [
+ 'default',
+ 'unconfined',
+]
+
+# This bin symlink map must exactly match the contents of the bin directory.
+# It is necessary for payload creation to reconstruct the bin directory when running ansible-test from an installed version of ansible.
+# It is also used to construct the injector directory at runtime.
+ANSIBLE_BIN_SYMLINK_MAP = {
+ 'ansible': '../lib/ansible/cli/scripts/ansible_cli_stub.py',
+ 'ansible-config': 'ansible',
+ 'ansible-connection': '../lib/ansible/cli/scripts/ansible_connection_cli_stub.py',
+ 'ansible-console': 'ansible',
+ 'ansible-doc': 'ansible',
+ 'ansible-galaxy': 'ansible',
+ 'ansible-inventory': 'ansible',
+ 'ansible-playbook': 'ansible',
+ 'ansible-pull': 'ansible',
+ 'ansible-test': '../test/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py',
+ 'ansible-vault': 'ansible',
+}
diff --git a/test/lib/ansible_test/_data/injector/python.py b/test/lib/ansible_test/_util/target/injector/python.py
index 290b995c..67516396 100755..100644
--- a/test/lib/ansible_test/_data/injector/python.py
+++ b/test/lib/ansible_test/_util/target/injector/python.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+# auto-shebang
"""Provides an entry point for python scripts and python modules on the controller with the current python interpreter and optional code coverage collection."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -27,7 +27,7 @@ def main():
found = bool(importlib.util.find_spec('coverage'))
else:
# noinspection PyDeprecation
- import imp
+ import imp # pylint: disable=deprecated-module
try:
# noinspection PyDeprecation
@@ -46,21 +46,24 @@ def main():
sys.exit('ERROR: Use `python -c` instead of `python.py -c` to avoid errors when code coverage is collected.')
elif name == 'pytest':
args += ['-m', 'pytest']
+ elif name == 'importer.py':
+ args += [find_program(name, False)]
else:
- args += [find_executable(name)]
+ args += [find_program(name, True)]
args += sys.argv[1:]
os.execv(args[0], args)
-def find_executable(name):
+def find_program(name, executable): # type: (str, bool) -> str
"""
- :type name: str
- :rtype: str
+ Find and return the full path to the named program, optionally requiring it to be executable.
+ Raises an exception if the program is not found.
"""
path = os.environ.get('PATH', os.path.defpath)
seen = set([os.path.abspath(__file__)])
+ mode = os.F_OK | os.X_OK if executable else os.F_OK
for base in path.split(os.path.pathsep):
candidate = os.path.abspath(os.path.join(base, name))
@@ -70,7 +73,7 @@ def find_executable(name):
seen.add(candidate)
- if os.path.exists(candidate) and os.access(candidate, os.F_OK | os.X_OK):
+ if os.path.exists(candidate) and os.access(candidate, mode):
return candidate
raise Exception('Executable "%s" not found in path: %s' % (name, path))
diff --git a/test/lib/ansible_test/_data/injector/virtualenv.sh b/test/lib/ansible_test/_util/target/injector/virtualenv.sh
index cb19a7ce..5dcbe0e0 100644
--- a/test/lib/ansible_test/_data/injector/virtualenv.sh
+++ b/test/lib/ansible_test/_util/target/injector/virtualenv.sh
@@ -1,4 +1,4 @@
-#!/usr/bin/env bash
+# shellcheck shell=bash
# Create and activate a fresh virtual environment with `source virtualenv.sh`.
rm -rf "${OUTPUT_DIR}/venv"
diff --git a/test/lib/ansible_test/_util/target/legacy_collection_loader/__init__.py b/test/lib/ansible_test/_util/target/legacy_collection_loader/__init__.py
new file mode 100644
index 00000000..21c49c47
--- /dev/null
+++ b/test/lib/ansible_test/_util/target/legacy_collection_loader/__init__.py
@@ -0,0 +1,31 @@
+# (c) 2019 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# CAUTION: There are two implementations of the collection loader.
+# They must be kept functionally identical, although their implementations may differ.
+#
+# 1) The controller implementation resides in the "lib/ansible/utils/collection_loader/" directory.
+# It must function on all Python versions supported on the controller.
+# 2) The ansible-test implementation resides in the "test/lib/ansible_test/_util/target/legacy_collection_loader/" directory.
+# It must function on all Python versions supported on managed hosts which are not supported by the controller.
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+# FIXME: decide what of this we want to actually be public/toplevel, put other stuff on a utility class?
+from ._collection_config import AnsibleCollectionConfig
+from ._collection_finder import AnsibleCollectionRef
+from ansible.module_utils.common.text.converters import to_text
+
+
+def resource_from_fqcr(ref):
+ """
+ Return resource from a fully-qualified collection reference,
+ or from a simple resource name.
+ For fully-qualified collection references, this is equivalent to
+ ``AnsibleCollectionRef.from_fqcr(ref).resource``.
+ :param ref: collection reference to parse
+ :return: the resource as a unicode string
+ """
+ ref = to_text(ref, errors='strict')
+ return ref.split(u'.')[-1]
diff --git a/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_config.py b/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_config.py
new file mode 100644
index 00000000..a2031931
--- /dev/null
+++ b/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_config.py
@@ -0,0 +1,107 @@
+# (c) 2019 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# CAUTION: There are two implementations of the collection loader.
+# They must be kept functionally identical, although their implementations may differ.
+#
+# 1) The controller implementation resides in the "lib/ansible/utils/collection_loader/" directory.
+# It must function on all Python versions supported on the controller.
+# 2) The ansible-test implementation resides in the "test/lib/ansible_test/_util/target/legacy_collection_loader/" directory.
+# It must function on all Python versions supported on managed hosts which are not supported by the controller.
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils.six import with_metaclass
+
+
+class _EventSource:
+ def __init__(self):
+ self._handlers = set()
+
+ def __iadd__(self, handler):
+ if not callable(handler):
+ raise ValueError('handler must be callable')
+ self._handlers.add(handler)
+ return self
+
+ def __isub__(self, handler):
+ try:
+ self._handlers.remove(handler)
+ except KeyError:
+ pass
+
+ return self
+
+ def _on_exception(self, handler, exc, *args, **kwargs):
+ # if we return True, we want the caller to re-raise
+ return True
+
+ def fire(self, *args, **kwargs):
+ for h in self._handlers:
+ try:
+ h(*args, **kwargs)
+ except Exception as ex:
+ if self._on_exception(h, ex, *args, **kwargs):
+ raise
+
+
+class _AnsibleCollectionConfig(type):
+ def __init__(cls, meta, name, bases):
+ cls._collection_finder = None
+ cls._default_collection = None
+ cls._on_collection_load = _EventSource()
+
+ @property
+ def collection_finder(cls):
+ return cls._collection_finder
+
+ @collection_finder.setter
+ def collection_finder(cls, value):
+ if cls._collection_finder:
+ raise ValueError('an AnsibleCollectionFinder has already been configured')
+
+ cls._collection_finder = value
+
+ @property
+ def collection_paths(cls):
+ cls._require_finder()
+ return [to_text(p) for p in cls._collection_finder._n_collection_paths]
+
+ @property
+ def default_collection(cls):
+ return cls._default_collection
+
+ @default_collection.setter
+ def default_collection(cls, value):
+
+ cls._default_collection = value
+
+ @property
+ def on_collection_load(cls):
+ return cls._on_collection_load
+
+ @on_collection_load.setter
+ def on_collection_load(cls, value):
+ if value is not cls._on_collection_load:
+ raise ValueError('on_collection_load is not directly settable (use +=)')
+
+ @property
+ def playbook_paths(cls):
+ cls._require_finder()
+ return [to_text(p) for p in cls._collection_finder._n_playbook_paths]
+
+ @playbook_paths.setter
+ def playbook_paths(cls, value):
+ cls._require_finder()
+ cls._collection_finder.set_playbook_paths(value)
+
+ def _require_finder(cls):
+ if not cls._collection_finder:
+ raise NotImplementedError('an AnsibleCollectionFinder has not been installed in this process')
+
+
+# concrete class of our metaclass type that defines the class properties we want
+class AnsibleCollectionConfig(with_metaclass(_AnsibleCollectionConfig)):
+ pass
diff --git a/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_finder.py b/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_finder.py
new file mode 100644
index 00000000..8b4b1b98
--- /dev/null
+++ b/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_finder.py
@@ -0,0 +1,1067 @@
+# (c) 2019 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# CAUTION: There are two implementations of the collection loader.
+# They must be kept functionally identical, although their implementations may differ.
+#
+# 1) The controller implementation resides in the "lib/ansible/utils/collection_loader/" directory.
+# It must function on all Python versions supported on the controller.
+# 2) The ansible-test implementation resides in the "test/lib/ansible_test/_util/target/legacy_collection_loader/" directory.
+# It must function on all Python versions supported on managed hosts which are not supported by the controller.
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+import os.path
+import pkgutil
+import re
+import sys
+from keyword import iskeyword
+from tokenize import Name as _VALID_IDENTIFIER_REGEX
+
+
+# DO NOT add new non-stdlib import deps here, this loader is used by external tools (eg ansible-test import sanity)
+# that only allow stdlib and module_utils
+from ansible.module_utils.common.text.converters import to_native, to_text, to_bytes
+from ansible.module_utils.six import string_types, PY3
+from ._collection_config import AnsibleCollectionConfig
+
+from contextlib import contextmanager
+from types import ModuleType
+
+try:
+ from importlib import import_module
+except ImportError:
+ def import_module(name):
+ __import__(name)
+ return sys.modules[name]
+
+try:
+ from importlib import reload as reload_module
+except ImportError:
+ # 2.7 has a global reload function instead...
+ reload_module = reload # pylint:disable=undefined-variable
+
+# NB: this supports import sanity test providing a different impl
+try:
+ from ._collection_meta import _meta_yml_to_dict
+except ImportError:
+ _meta_yml_to_dict = None
+
+
+if not hasattr(__builtins__, 'ModuleNotFoundError'):
+ # this was introduced in Python 3.6
+ ModuleNotFoundError = ImportError
+
+
+_VALID_IDENTIFIER_STRING_REGEX = re.compile(
+ ''.join((_VALID_IDENTIFIER_REGEX, r'\Z')),
+)
+
+
+try: # NOTE: py3/py2 compat
+ # py2 mypy can't deal with try/excepts
+ is_python_identifier = str.isidentifier # type: ignore[attr-defined]
+except AttributeError: # Python 2
+ def is_python_identifier(tested_str): # type: (str) -> bool
+ """Determine whether the given string is a Python identifier."""
+ # Ref: https://stackoverflow.com/a/55802320/595220
+ return bool(re.match(_VALID_IDENTIFIER_STRING_REGEX, tested_str))
+
+
+PB_EXTENSIONS = ('.yml', '.yaml')
+
+
+class _AnsibleCollectionFinder:
+ def __init__(self, paths=None, scan_sys_paths=True):
+ # TODO: accept metadata loader override
+ self._ansible_pkg_path = to_native(os.path.dirname(to_bytes(sys.modules['ansible'].__file__)))
+
+ if isinstance(paths, string_types):
+ paths = [paths]
+ elif paths is None:
+ paths = []
+
+ # expand any placeholders in configured paths
+ paths = [os.path.expanduser(to_native(p, errors='surrogate_or_strict')) for p in paths]
+
+ # add syspaths if needed
+ if scan_sys_paths:
+ paths.extend(sys.path)
+
+ good_paths = []
+ # expand any placeholders in configured paths
+ for p in paths:
+
+ # ensure we always have ansible_collections
+ if os.path.basename(p) == 'ansible_collections':
+ p = os.path.dirname(p)
+
+ if p not in good_paths and os.path.isdir(to_bytes(os.path.join(p, 'ansible_collections'), errors='surrogate_or_strict')):
+ good_paths.append(p)
+
+ self._n_configured_paths = good_paths
+ self._n_cached_collection_paths = None
+ self._n_cached_collection_qualified_paths = None
+
+ self._n_playbook_paths = []
+
+ @classmethod
+ def _remove(cls):
+ for mps in sys.meta_path:
+ if isinstance(mps, _AnsibleCollectionFinder):
+ sys.meta_path.remove(mps)
+
+ # remove any path hooks that look like ours
+ for ph in sys.path_hooks:
+ if hasattr(ph, '__self__') and isinstance(ph.__self__, _AnsibleCollectionFinder):
+ sys.path_hooks.remove(ph)
+
+ # zap any cached path importer cache entries that might refer to us
+ sys.path_importer_cache.clear()
+
+ AnsibleCollectionConfig._collection_finder = None
+
+ # validate via the public property that we really killed it
+ if AnsibleCollectionConfig.collection_finder is not None:
+ raise AssertionError('_AnsibleCollectionFinder remove did not reset AnsibleCollectionConfig.collection_finder')
+
+ def _install(self):
+ self._remove()
+ sys.meta_path.insert(0, self)
+
+ sys.path_hooks.insert(0, self._ansible_collection_path_hook)
+
+ AnsibleCollectionConfig.collection_finder = self
+
+ def _ansible_collection_path_hook(self, path):
+ path = to_native(path)
+ interesting_paths = self._n_cached_collection_qualified_paths
+ if not interesting_paths:
+ interesting_paths = []
+ for p in self._n_collection_paths:
+ if os.path.basename(p) != 'ansible_collections':
+ p = os.path.join(p, 'ansible_collections')
+
+ if p not in interesting_paths:
+ interesting_paths.append(p)
+
+ interesting_paths.insert(0, self._ansible_pkg_path)
+ self._n_cached_collection_qualified_paths = interesting_paths
+
+ if any(path.startswith(p) for p in interesting_paths):
+ return _AnsiblePathHookFinder(self, path)
+
+ raise ImportError('not interested')
+
+ @property
+ def _n_collection_paths(self):
+ paths = self._n_cached_collection_paths
+ if not paths:
+ self._n_cached_collection_paths = paths = self._n_playbook_paths + self._n_configured_paths
+ return paths
+
+ def set_playbook_paths(self, playbook_paths):
+ if isinstance(playbook_paths, string_types):
+ playbook_paths = [playbook_paths]
+
+ # track visited paths; we have to preserve the dir order as-passed in case there are duplicate collections (first one wins)
+ added_paths = set()
+
+ # de-dupe
+ self._n_playbook_paths = [os.path.join(to_native(p), 'collections') for p in playbook_paths if not (p in added_paths or added_paths.add(p))]
+ self._n_cached_collection_paths = None
+ # HACK: playbook CLI sets this relatively late, so we've already loaded some packages whose paths might depend on this. Fix those up.
+ # NB: this should NOT be used for late additions; ideally we'd fix the playbook dir setup earlier in Ansible init
+ # to prevent this from occurring
+ for pkg in ['ansible_collections', 'ansible_collections.ansible']:
+ self._reload_hack(pkg)
+
+ def _reload_hack(self, fullname):
+ m = sys.modules.get(fullname)
+ if not m:
+ return
+ reload_module(m)
+
+ def find_module(self, fullname, path=None):
+ # Figure out what's being asked for, and delegate to a special-purpose loader
+
+ split_name = fullname.split('.')
+ toplevel_pkg = split_name[0]
+ module_to_find = split_name[-1]
+ part_count = len(split_name)
+
+ if toplevel_pkg not in ['ansible', 'ansible_collections']:
+ # not interested in anything other than ansible_collections (and limited cases under ansible)
+ return None
+
+ # sanity check what we're getting from import, canonicalize path values
+ if part_count == 1:
+ if path:
+ raise ValueError('path should not be specified for top-level packages (trying to find {0})'.format(fullname))
+ else:
+ # seed the path to the configured collection roots
+ path = self._n_collection_paths
+
+ if part_count > 1 and path is None:
+ raise ValueError('path must be specified for subpackages (trying to find {0})'.format(fullname))
+
+ # NB: actual "find"ing is delegated to the constructors on the various loaders; they'll ImportError if not found
+ try:
+ if toplevel_pkg == 'ansible':
+ # something under the ansible package, delegate to our internal loader in case of redirections
+ return _AnsibleInternalRedirectLoader(fullname=fullname, path_list=path)
+ if part_count == 1:
+ return _AnsibleCollectionRootPkgLoader(fullname=fullname, path_list=path)
+ if part_count == 2: # ns pkg eg, ansible_collections, ansible_collections.somens
+ return _AnsibleCollectionNSPkgLoader(fullname=fullname, path_list=path)
+ elif part_count == 3: # collection pkg eg, ansible_collections.somens.somecoll
+ return _AnsibleCollectionPkgLoader(fullname=fullname, path_list=path)
+ # anything below the collection
+ return _AnsibleCollectionLoader(fullname=fullname, path_list=path)
+ except ImportError:
+ # TODO: log attempt to load context
+ return None
+
+
+# Implements a path_hook finder for iter_modules (since it's only path based). This finder does not need to actually
+# function as a finder in most cases, since our meta_path finder is consulted first for *almost* everything, except
+# pkgutil.iter_modules, and under py2, pkgutil.get_data if the parent package passed has not been loaded yet.
+class _AnsiblePathHookFinder:
+ def __init__(self, collection_finder, pathctx):
+ # when called from a path_hook, find_module doesn't usually get the path arg, so this provides our context
+ self._pathctx = to_native(pathctx)
+ self._collection_finder = collection_finder
+ if PY3:
+ # cache the native FileFinder (take advantage of its filesystem cache for future find/load requests)
+ self._file_finder = None
+
+ # class init is fun- this method has a self arg that won't get used
+ def _get_filefinder_path_hook(self=None):
+ _file_finder_hook = None
+ if PY3:
+ # try to find the FileFinder hook to call for fallback path-based imports in Py3
+ _file_finder_hook = [ph for ph in sys.path_hooks if 'FileFinder' in repr(ph)]
+ if len(_file_finder_hook) != 1:
+ raise Exception('need exactly one FileFinder import hook (found {0})'.format(len(_file_finder_hook)))
+ _file_finder_hook = _file_finder_hook[0]
+
+ return _file_finder_hook
+
+ _filefinder_path_hook = _get_filefinder_path_hook()
+
+ def find_module(self, fullname, path=None):
+ # we ignore the passed in path here- use what we got from the path hook init
+ split_name = fullname.split('.')
+ toplevel_pkg = split_name[0]
+
+ if toplevel_pkg == 'ansible_collections':
+ # collections content? delegate to the collection finder
+ return self._collection_finder.find_module(fullname, path=[self._pathctx])
+ else:
+ # Something else; we'd normally restrict this to `ansible` descendent modules so that any weird loader
+ # behavior that arbitrary Python modules have can be serviced by those loaders. In some dev/test
+ # scenarios (eg a venv under a collection) our path_hook signs us up to load non-Ansible things, and
+ # it's too late by the time we've reached this point, but also too expensive for the path_hook to figure
+ # out what we *shouldn't* be loading with the limited info it has. So we'll just delegate to the
+ # normal path-based loader as best we can to service it. This also allows us to take advantage of Python's
+ # built-in FS caching and byte-compilation for most things.
+ if PY3:
+ # create or consult our cached file finder for this path
+ if not self._file_finder:
+ try:
+ self._file_finder = _AnsiblePathHookFinder._filefinder_path_hook(self._pathctx)
+ except ImportError:
+ # FUTURE: log at a high logging level? This is normal for things like python36.zip on the path, but
+ # might not be in some other situation...
+ return None
+
+ spec = self._file_finder.find_spec(fullname)
+ if not spec:
+ return None
+ return spec.loader
+ else:
+ # call py2's internal loader
+ # noinspection PyDeprecation
+ return pkgutil.ImpImporter(self._pathctx).find_module(fullname) # pylint: disable=deprecated-class
+
+ def iter_modules(self, prefix):
+ # NB: this currently represents only what's on disk, and does not handle package redirection
+ return _iter_modules_impl([self._pathctx], prefix)
+
+ def __repr__(self):
+ return "{0}(path='{1}')".format(self.__class__.__name__, self._pathctx)
+
+
+class _AnsibleCollectionPkgLoaderBase:
+ _allows_package_code = False
+
+ def __init__(self, fullname, path_list=None):
+ self._fullname = fullname
+ self._redirect_module = None
+ self._split_name = fullname.split('.')
+ self._rpart_name = fullname.rpartition('.')
+ self._parent_package_name = self._rpart_name[0] # eg ansible_collections for ansible_collections.somens, '' for toplevel
+ self._package_to_load = self._rpart_name[2] # eg somens for ansible_collections.somens
+
+ self._source_code_path = None
+ self._decoded_source = None
+ self._compiled_code = None
+
+ self._validate_args()
+
+ self._candidate_paths = self._get_candidate_paths([to_native(p) for p in path_list])
+ self._subpackage_search_paths = self._get_subpackage_search_paths(self._candidate_paths)
+
+ self._validate_final()
+
+ # allow subclasses to validate args and sniff split values before we start digging around
+ def _validate_args(self):
+ if self._split_name[0] != 'ansible_collections':
+ raise ImportError('this loader can only load packages from the ansible_collections package, not {0}'.format(self._fullname))
+
+ # allow subclasses to customize candidate path filtering
+ def _get_candidate_paths(self, path_list):
+ return [os.path.join(p, self._package_to_load) for p in path_list]
+
+ # allow subclasses to customize finding paths
+ def _get_subpackage_search_paths(self, candidate_paths):
+ # filter candidate paths for existence (NB: silently ignoring package init code and same-named modules)
+ return [p for p in candidate_paths if os.path.isdir(to_bytes(p))]
+
+ # allow subclasses to customize state validation/manipulation before we return the loader instance
+ def _validate_final(self):
+ return
+
+ @staticmethod
+ @contextmanager
+ def _new_or_existing_module(name, **kwargs):
+ # handle all-or-nothing sys.modules creation/use-existing/delete-on-exception-if-created behavior
+ created_module = False
+ module = sys.modules.get(name)
+ try:
+ if not module:
+ module = ModuleType(name)
+ created_module = True
+ sys.modules[name] = module
+ # always override the values passed, except name (allow reference aliasing)
+ for attr, value in kwargs.items():
+ setattr(module, attr, value)
+ yield module
+ except Exception:
+ if created_module:
+ if sys.modules.get(name):
+ sys.modules.pop(name)
+ raise
+
+ # basic module/package location support
+ # NB: this does not support distributed packages!
+ @staticmethod
+ def _module_file_from_path(leaf_name, path):
+ has_code = True
+ package_path = os.path.join(to_native(path), to_native(leaf_name))
+ module_path = None
+
+ # if the submodule is a package, assemble valid submodule paths, but stop looking for a module
+ if os.path.isdir(to_bytes(package_path)):
+ # is there a package init?
+ module_path = os.path.join(package_path, '__init__.py')
+ if not os.path.isfile(to_bytes(module_path)):
+ module_path = os.path.join(package_path, '__synthetic__')
+ has_code = False
+ else:
+ module_path = package_path + '.py'
+ package_path = None
+ if not os.path.isfile(to_bytes(module_path)):
+ raise ImportError('{0} not found at {1}'.format(leaf_name, path))
+
+ return module_path, has_code, package_path
+
+ def load_module(self, fullname):
+ # short-circuit redirect; we've already imported the redirected module, so just alias it and return it
+ if self._redirect_module:
+ sys.modules[self._fullname] = self._redirect_module
+ return self._redirect_module
+
+ # we're actually loading a module/package
+ module_attrs = dict(
+ __loader__=self,
+ __file__=self.get_filename(fullname),
+ __package__=self._parent_package_name # sane default for non-packages
+ )
+
+ # eg, I am a package
+ if self._subpackage_search_paths is not None: # empty is legal
+ module_attrs['__path__'] = self._subpackage_search_paths
+ module_attrs['__package__'] = fullname # per PEP366
+
+ with self._new_or_existing_module(fullname, **module_attrs) as module:
+ # execute the module's code in its namespace
+ code_obj = self.get_code(fullname)
+ if code_obj is not None: # things like NS packages that can't have code on disk will return None
+ exec(code_obj, module.__dict__)
+
+ return module
+
+ def is_package(self, fullname):
+ if fullname != self._fullname:
+ raise ValueError('this loader cannot answer is_package for {0}, only {1}'.format(fullname, self._fullname))
+ return self._subpackage_search_paths is not None
+
+ def get_source(self, fullname):
+ if self._decoded_source:
+ return self._decoded_source
+ if fullname != self._fullname:
+ raise ValueError('this loader cannot load source for {0}, only {1}'.format(fullname, self._fullname))
+ if not self._source_code_path:
+ return None
+ # FIXME: what do we want encoding/newline requirements to be?
+ self._decoded_source = self.get_data(self._source_code_path)
+ return self._decoded_source
+
+ def get_data(self, path):
+ if not path:
+ raise ValueError('a path must be specified')
+
+ # TODO: ensure we're being asked for a path below something we own
+ # TODO: try to handle redirects internally?
+
+ if not path[0] == '/':
+ # relative to current package, search package paths if possible (this may not be necessary)
+ # candidate_paths = [os.path.join(ssp, path) for ssp in self._subpackage_search_paths]
+ raise ValueError('relative resource paths not supported')
+ else:
+ candidate_paths = [path]
+
+ for p in candidate_paths:
+ b_path = to_bytes(p)
+ if os.path.isfile(b_path):
+ with open(b_path, 'rb') as fd:
+ return fd.read()
+ # HACK: if caller asks for __init__.py and the parent dir exists, return empty string (this keep consistency
+ # with "collection subpackages don't require __init__.py" working everywhere with get_data
+ elif b_path.endswith(b'__init__.py') and os.path.isdir(os.path.dirname(b_path)):
+ return ''
+
+ return None
+
+ def _synthetic_filename(self, fullname):
+ return '<ansible_synthetic_collection_package>'
+
+ def get_filename(self, fullname):
+ if fullname != self._fullname:
+ raise ValueError('this loader cannot find files for {0}, only {1}'.format(fullname, self._fullname))
+
+ filename = self._source_code_path
+
+ if not filename and self.is_package(fullname):
+ if len(self._subpackage_search_paths) == 1:
+ filename = os.path.join(self._subpackage_search_paths[0], '__synthetic__')
+ else:
+ filename = self._synthetic_filename(fullname)
+
+ return filename
+
+ def get_code(self, fullname):
+ if self._compiled_code:
+ return self._compiled_code
+
+ # this may or may not be an actual filename, but it's the value we'll use for __file__
+ filename = self.get_filename(fullname)
+ if not filename:
+ filename = '<string>'
+
+ source_code = self.get_source(fullname)
+
+ # for things like synthetic modules that really have no source on disk, don't return a code object at all
+ # vs things like an empty package init (which has an empty string source on disk)
+ if source_code is None:
+ return None
+
+ self._compiled_code = compile(source=source_code, filename=filename, mode='exec', flags=0, dont_inherit=True)
+
+ return self._compiled_code
+
+ def iter_modules(self, prefix):
+ return _iter_modules_impl(self._subpackage_search_paths, prefix)
+
+ def __repr__(self):
+ return '{0}(path={1})'.format(self.__class__.__name__, self._subpackage_search_paths or self._source_code_path)
+
+
+class _AnsibleCollectionRootPkgLoader(_AnsibleCollectionPkgLoaderBase):
+ def _validate_args(self):
+ super(_AnsibleCollectionRootPkgLoader, self)._validate_args()
+ if len(self._split_name) != 1:
+ raise ImportError('this loader can only load the ansible_collections toplevel package, not {0}'.format(self._fullname))
+
+
+# Implements Ansible's custom namespace package support.
+# The ansible_collections package and one level down (collections namespaces) are Python namespace packages
+# that search across all configured collection roots. The collection package (two levels down) is the first one found
+# on the configured collection root path, and Python namespace package aggregation is not allowed at or below
+# the collection. Implements implicit package (package dir) support for both Py2/3. Package init code is ignored
+# by this loader.
+class _AnsibleCollectionNSPkgLoader(_AnsibleCollectionPkgLoaderBase):
+ def _validate_args(self):
+ super(_AnsibleCollectionNSPkgLoader, self)._validate_args()
+ if len(self._split_name) != 2:
+ raise ImportError('this loader can only load collections namespace packages, not {0}'.format(self._fullname))
+
+ def _validate_final(self):
+ # special-case the `ansible` namespace, since `ansible.builtin` is magical
+ if not self._subpackage_search_paths and self._package_to_load != 'ansible':
+ raise ImportError('no {0} found in {1}'.format(self._package_to_load, self._candidate_paths))
+
+
+# handles locating the actual collection package and associated metadata
+class _AnsibleCollectionPkgLoader(_AnsibleCollectionPkgLoaderBase):
+ def _validate_args(self):
+ super(_AnsibleCollectionPkgLoader, self)._validate_args()
+ if len(self._split_name) != 3:
+ raise ImportError('this loader can only load collection packages, not {0}'.format(self._fullname))
+
+ def _validate_final(self):
+ if self._split_name[1:3] == ['ansible', 'builtin']:
+ # we don't want to allow this one to have on-disk search capability
+ self._subpackage_search_paths = []
+ elif not self._subpackage_search_paths:
+ raise ImportError('no {0} found in {1}'.format(self._package_to_load, self._candidate_paths))
+ else:
+ # only search within the first collection we found
+ self._subpackage_search_paths = [self._subpackage_search_paths[0]]
+
+ def load_module(self, fullname):
+ if not _meta_yml_to_dict:
+ raise ValueError('ansible.utils.collection_loader._meta_yml_to_dict is not set')
+
+ module = super(_AnsibleCollectionPkgLoader, self).load_module(fullname)
+
+ module._collection_meta = {}
+ # TODO: load collection metadata, cache in __loader__ state
+
+ collection_name = '.'.join(self._split_name[1:3])
+
+ if collection_name == 'ansible.builtin':
+ # ansible.builtin is a synthetic collection, get its routing config from the Ansible distro
+ ansible_pkg_path = os.path.dirname(import_module('ansible').__file__)
+ metadata_path = os.path.join(ansible_pkg_path, 'config/ansible_builtin_runtime.yml')
+ with open(to_bytes(metadata_path), 'rb') as fd:
+ raw_routing = fd.read()
+ else:
+ b_routing_meta_path = to_bytes(os.path.join(module.__path__[0], 'meta/runtime.yml'))
+ if os.path.isfile(b_routing_meta_path):
+ with open(b_routing_meta_path, 'rb') as fd:
+ raw_routing = fd.read()
+ else:
+ raw_routing = ''
+ try:
+ if raw_routing:
+ routing_dict = _meta_yml_to_dict(raw_routing, (collection_name, 'runtime.yml'))
+ module._collection_meta = self._canonicalize_meta(routing_dict)
+ except Exception as ex:
+ raise ValueError('error parsing collection metadata: {0}'.format(to_native(ex)))
+
+ AnsibleCollectionConfig.on_collection_load.fire(collection_name=collection_name, collection_path=os.path.dirname(module.__file__))
+
+ return module
+
+ def _canonicalize_meta(self, meta_dict):
+ # TODO: rewrite import keys and all redirect targets that start with .. (current namespace) and . (current collection)
+ # OR we could do it all on the fly?
+ # if not meta_dict:
+ # return {}
+ #
+ # ns_name = '.'.join(self._split_name[0:2])
+ # collection_name = '.'.join(self._split_name[0:3])
+ #
+ # #
+ # for routing_type, routing_type_dict in iteritems(meta_dict.get('plugin_routing', {})):
+ # for plugin_key, plugin_dict in iteritems(routing_type_dict):
+ # redirect = plugin_dict.get('redirect', '')
+ # if redirect.startswith('..'):
+ # redirect = redirect[2:]
+
+ return meta_dict
+
+
+# loads everything under a collection, including handling redirections defined by the collection
+class _AnsibleCollectionLoader(_AnsibleCollectionPkgLoaderBase):
+ # HACK: stash this in a better place
+ _redirected_package_map = {}
+ _allows_package_code = True
+
+ def _validate_args(self):
+ super(_AnsibleCollectionLoader, self)._validate_args()
+ if len(self._split_name) < 4:
+ raise ValueError('this loader is only for sub-collection modules/packages, not {0}'.format(self._fullname))
+
+ def _get_candidate_paths(self, path_list):
+ if len(path_list) != 1 and self._split_name[1:3] != ['ansible', 'builtin']:
+ raise ValueError('this loader requires exactly one path to search')
+
+ return path_list
+
+ def _get_subpackage_search_paths(self, candidate_paths):
+ collection_name = '.'.join(self._split_name[1:3])
+ collection_meta = _get_collection_metadata(collection_name)
+
+ # check for explicit redirection, as well as ancestor package-level redirection (only load the actual code once!)
+ redirect = None
+ explicit_redirect = False
+
+ routing_entry = _nested_dict_get(collection_meta, ['import_redirection', self._fullname])
+ if routing_entry:
+ redirect = routing_entry.get('redirect')
+
+ if redirect:
+ explicit_redirect = True
+ else:
+ redirect = _get_ancestor_redirect(self._redirected_package_map, self._fullname)
+
+ # NB: package level redirection requires hooking all future imports beneath the redirected source package
+ # in order to ensure sanity on future relative imports. We always import everything under its "real" name,
+ # then add a sys.modules entry with the redirected name using the same module instance. If we naively imported
+ # the source for each redirection, most submodules would import OK, but we'd have N runtime copies of the module
+ # (one for each name), and relative imports that ascend above the redirected package would break (since they'd
+ # see the redirected ancestor package contents instead of the package where they actually live).
+ if redirect:
+ # FIXME: wrap this so we can be explicit about a failed redirection
+ self._redirect_module = import_module(redirect)
+ if explicit_redirect and hasattr(self._redirect_module, '__path__') and self._redirect_module.__path__:
+ # if the import target looks like a package, store its name so we can rewrite future descendent loads
+ self._redirected_package_map[self._fullname] = redirect
+
+ # if we redirected, don't do any further custom package logic
+ return None
+
+ # we're not doing a redirect- try to find what we need to actually load a module/package
+
+ # this will raise ImportError if we can't find the requested module/package at all
+ if not candidate_paths:
+ # noplace to look, just ImportError
+ raise ImportError('package has no paths')
+
+ found_path, has_code, package_path = self._module_file_from_path(self._package_to_load, candidate_paths[0])
+
+ # still here? we found something to load...
+ if has_code:
+ self._source_code_path = found_path
+
+ if package_path:
+ return [package_path] # always needs to be a list
+
+ return None
+
+
+# This loader only answers for intercepted Ansible Python modules. Normal imports will fail here and be picked up later
+# by our path_hook importer (which proxies the built-in import mechanisms, allowing normal caching etc to occur)
+class _AnsibleInternalRedirectLoader:
+ def __init__(self, fullname, path_list):
+ self._redirect = None
+
+ split_name = fullname.split('.')
+ toplevel_pkg = split_name[0]
+ module_to_load = split_name[-1]
+
+ if toplevel_pkg != 'ansible':
+ raise ImportError('not interested')
+
+ builtin_meta = _get_collection_metadata('ansible.builtin')
+
+ routing_entry = _nested_dict_get(builtin_meta, ['import_redirection', fullname])
+ if routing_entry:
+ self._redirect = routing_entry.get('redirect')
+
+ if not self._redirect:
+ raise ImportError('not redirected, go ask path_hook')
+
+ def load_module(self, fullname):
+ # since we're delegating to other loaders, this should only be called for internal redirects where we answered
+ # find_module with this loader, in which case we'll just directly import the redirection target, insert it into
+ # sys.modules under the name it was requested by, and return the original module.
+
+ # should never see this
+ if not self._redirect:
+ raise ValueError('no redirect found for {0}'.format(fullname))
+
+ # FIXME: smuggle redirection context, provide warning/error that we tried and failed to redirect
+ mod = import_module(self._redirect)
+ sys.modules[fullname] = mod
+ return mod
+
+
+class AnsibleCollectionRef:
+ # FUTURE: introspect plugin loaders to get these dynamically?
+ VALID_REF_TYPES = frozenset(to_text(r) for r in ['action', 'become', 'cache', 'callback', 'cliconf', 'connection',
+ 'doc_fragments', 'filter', 'httpapi', 'inventory', 'lookup',
+ 'module_utils', 'modules', 'netconf', 'role', 'shell', 'strategy',
+ 'terminal', 'test', 'vars', 'playbook'])
+
+ # FIXME: tighten this up to match Python identifier reqs, etc
+ VALID_SUBDIRS_RE = re.compile(to_text(r'^\w+(\.\w+)*$'))
+ VALID_FQCR_RE = re.compile(to_text(r'^\w+(\.\w+){2,}$')) # can have 0-N included subdirs as well
+
+ def __init__(self, collection_name, subdirs, resource, ref_type):
+ """
+ Create an AnsibleCollectionRef from components
+ :param collection_name: a collection name of the form 'namespace.collectionname'
+ :param subdirs: optional subdir segments to be appended below the plugin type (eg, 'subdir1.subdir2')
+ :param resource: the name of the resource being references (eg, 'mymodule', 'someaction', 'a_role')
+ :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
+ """
+ collection_name = to_text(collection_name, errors='strict')
+ if subdirs is not None:
+ subdirs = to_text(subdirs, errors='strict')
+ resource = to_text(resource, errors='strict')
+ ref_type = to_text(ref_type, errors='strict')
+
+ if not self.is_valid_collection_name(collection_name):
+ raise ValueError('invalid collection name (must be of the form namespace.collection): {0}'.format(to_native(collection_name)))
+
+ if ref_type not in self.VALID_REF_TYPES:
+ raise ValueError('invalid collection ref_type: {0}'.format(ref_type))
+
+ self.collection = collection_name
+ if subdirs:
+ if not re.match(self.VALID_SUBDIRS_RE, subdirs):
+ raise ValueError('invalid subdirs entry: {0} (must be empty/None or of the form subdir1.subdir2)'.format(to_native(subdirs)))
+ self.subdirs = subdirs
+ else:
+ self.subdirs = u''
+
+ self.resource = resource
+ self.ref_type = ref_type
+
+ package_components = [u'ansible_collections', self.collection]
+ fqcr_components = [self.collection]
+
+ self.n_python_collection_package_name = to_native('.'.join(package_components))
+
+ if self.ref_type == u'role':
+ package_components.append(u'roles')
+ elif self.ref_type == u'playbook':
+ package_components.append(u'playbooks')
+ else:
+ # we assume it's a plugin
+ package_components += [u'plugins', self.ref_type]
+
+ if self.subdirs:
+ package_components.append(self.subdirs)
+ fqcr_components.append(self.subdirs)
+
+ if self.ref_type in (u'role', u'playbook'):
+ # playbooks and roles are their own resource
+ package_components.append(self.resource)
+
+ fqcr_components.append(self.resource)
+
+ self.n_python_package_name = to_native('.'.join(package_components))
+ self._fqcr = u'.'.join(fqcr_components)
+
+ def __repr__(self):
+ return 'AnsibleCollectionRef(collection={0!r}, subdirs={1!r}, resource={2!r})'.format(self.collection, self.subdirs, self.resource)
+
+ @property
+ def fqcr(self):
+ return self._fqcr
+
+ @staticmethod
+ def from_fqcr(ref, ref_type):
+ """
+ Parse a string as a fully-qualified collection reference, raises ValueError if invalid
+ :param ref: collection reference to parse (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
+ :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
+ :return: a populated AnsibleCollectionRef object
+ """
+ # assuming the fq_name is of the form (ns).(coll).(optional_subdir_N).(resource_name),
+ # we split the resource name off the right, split ns and coll off the left, and we're left with any optional
+ # subdirs that need to be added back below the plugin-specific subdir we'll add. So:
+ # ns.coll.resource -> ansible_collections.ns.coll.plugins.(plugintype).resource
+ # ns.coll.subdir1.resource -> ansible_collections.ns.coll.plugins.subdir1.(plugintype).resource
+ # ns.coll.rolename -> ansible_collections.ns.coll.roles.rolename
+ if not AnsibleCollectionRef.is_valid_fqcr(ref):
+ raise ValueError('{0} is not a valid collection reference'.format(to_native(ref)))
+
+ ref = to_text(ref, errors='strict')
+ ref_type = to_text(ref_type, errors='strict')
+ ext = ''
+
+ if ref_type == u'playbook' and ref.endswith(PB_EXTENSIONS):
+ resource_splitname = ref.rsplit(u'.', 2)
+ package_remnant = resource_splitname[0]
+ resource = resource_splitname[1]
+ ext = '.' + resource_splitname[2]
+ else:
+ resource_splitname = ref.rsplit(u'.', 1)
+ package_remnant = resource_splitname[0]
+ resource = resource_splitname[1]
+
+ # split the left two components of the collection package name off, anything remaining is plugin-type
+ # specific subdirs to be added back on below the plugin type
+ package_splitname = package_remnant.split(u'.', 2)
+ if len(package_splitname) == 3:
+ subdirs = package_splitname[2]
+ else:
+ subdirs = u''
+
+ collection_name = u'.'.join(package_splitname[0:2])
+
+ return AnsibleCollectionRef(collection_name, subdirs, resource + ext, ref_type)
+
+ @staticmethod
+ def try_parse_fqcr(ref, ref_type):
+ """
+ Attempt to parse a string as a fully-qualified collection reference, returning None on failure (instead of raising an error)
+ :param ref: collection reference to parse (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
+ :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
+ :return: a populated AnsibleCollectionRef object on successful parsing, else None
+ """
+ try:
+ return AnsibleCollectionRef.from_fqcr(ref, ref_type)
+ except ValueError:
+ pass
+
+ @staticmethod
+ def legacy_plugin_dir_to_plugin_type(legacy_plugin_dir_name):
+ """
+ Utility method to convert from a PluginLoader dir name to a plugin ref_type
+ :param legacy_plugin_dir_name: PluginLoader dir name (eg, 'action_plugins', 'library')
+ :return: the corresponding plugin ref_type (eg, 'action', 'role')
+ """
+ legacy_plugin_dir_name = to_text(legacy_plugin_dir_name)
+
+ plugin_type = legacy_plugin_dir_name.replace(u'_plugins', u'')
+
+ if plugin_type == u'library':
+ plugin_type = u'modules'
+
+ if plugin_type not in AnsibleCollectionRef.VALID_REF_TYPES:
+ raise ValueError('{0} cannot be mapped to a valid collection ref type'.format(to_native(legacy_plugin_dir_name)))
+
+ return plugin_type
+
+ @staticmethod
+ def is_valid_fqcr(ref, ref_type=None):
+ """
+ Validates if is string is a well-formed fully-qualified collection reference (does not look up the collection itself)
+ :param ref: candidate collection reference to validate (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
+ :param ref_type: optional reference type to enable deeper validation, eg 'module', 'role', 'doc_fragment'
+ :return: True if the collection ref passed is well-formed, False otherwise
+ """
+
+ ref = to_text(ref)
+
+ if not ref_type:
+ return bool(re.match(AnsibleCollectionRef.VALID_FQCR_RE, ref))
+
+ return bool(AnsibleCollectionRef.try_parse_fqcr(ref, ref_type))
+
+ @staticmethod
+ def is_valid_collection_name(collection_name):
+ """
+ Validates if the given string is a well-formed collection name (does not look up the collection itself)
+ :param collection_name: candidate collection name to validate (a valid name is of the form 'ns.collname')
+ :return: True if the collection name passed is well-formed, False otherwise
+ """
+
+ collection_name = to_text(collection_name)
+
+ if collection_name.count(u'.') != 1:
+ return False
+
+ return all(
+ # NOTE: keywords and identifiers are different in differnt Pythons
+ not iskeyword(ns_or_name) and is_python_identifier(ns_or_name)
+ for ns_or_name in collection_name.split(u'.')
+ )
+
+
+def _get_collection_playbook_path(playbook):
+
+ acr = AnsibleCollectionRef.try_parse_fqcr(playbook, u'playbook')
+ if acr:
+ try:
+ # get_collection_path
+ pkg = import_module(acr.n_python_collection_package_name)
+ except (IOError, ModuleNotFoundError) as e:
+ # leaving e as debug target, even though not used in normal code
+ pkg = None
+
+ if pkg:
+ cpath = os.path.join(sys.modules[acr.n_python_collection_package_name].__file__.replace('__synthetic__', 'playbooks'))
+
+ if acr.subdirs:
+ paths = [to_native(x) for x in acr.subdirs.split(u'.')]
+ paths.insert(0, cpath)
+ cpath = os.path.join(*paths)
+
+ path = os.path.join(cpath, to_native(acr.resource))
+ if os.path.exists(to_bytes(path)):
+ return acr.resource, path, acr.collection
+ elif not acr.resource.endswith(PB_EXTENSIONS):
+ for ext in PB_EXTENSIONS:
+ path = os.path.join(cpath, to_native(acr.resource + ext))
+ if os.path.exists(to_bytes(path)):
+ return acr.resource, path, acr.collection
+ return None
+
+
+def _get_collection_role_path(role_name, collection_list=None):
+ return _get_collection_resource_path(role_name, u'role', collection_list)
+
+
+def _get_collection_resource_path(name, ref_type, collection_list=None):
+
+ if ref_type == u'playbook':
+ # they are handled a bit diff due to 'extension variance' and no collection_list
+ return _get_collection_playbook_path(name)
+
+ acr = AnsibleCollectionRef.try_parse_fqcr(name, ref_type)
+ if acr:
+ # looks like a valid qualified collection ref; skip the collection_list
+ collection_list = [acr.collection]
+ subdirs = acr.subdirs
+ resource = acr.resource
+ elif not collection_list:
+ return None # not a FQ and no collection search list spec'd, nothing to do
+ else:
+ resource = name # treat as unqualified, loop through the collection search list to try and resolve
+ subdirs = ''
+
+ for collection_name in collection_list:
+ try:
+ acr = AnsibleCollectionRef(collection_name=collection_name, subdirs=subdirs, resource=resource, ref_type=ref_type)
+ # FIXME: error handling/logging; need to catch any import failures and move along
+ pkg = import_module(acr.n_python_package_name)
+
+ if pkg is not None:
+ # the package is now loaded, get the collection's package and ask where it lives
+ path = os.path.dirname(to_bytes(sys.modules[acr.n_python_package_name].__file__, errors='surrogate_or_strict'))
+ return resource, to_text(path, errors='surrogate_or_strict'), collection_name
+
+ except (IOError, ModuleNotFoundError) as e:
+ continue
+ except Exception as ex:
+ # FIXME: pick out typical import errors first, then error logging
+ continue
+
+ return None
+
+
+def _get_collection_name_from_path(path):
+ """
+ Return the containing collection name for a given path, or None if the path is not below a configured collection, or
+ the collection cannot be loaded (eg, the collection is masked by another of the same name higher in the configured
+ collection roots).
+ :param path: path to evaluate for collection containment
+ :return: collection name or None
+ """
+
+ # ensure we compare full paths since pkg path will be abspath
+ path = to_native(os.path.abspath(to_bytes(path)))
+
+ path_parts = path.split('/')
+ if path_parts.count('ansible_collections') != 1:
+ return None
+
+ ac_pos = path_parts.index('ansible_collections')
+
+ # make sure it's followed by at least a namespace and collection name
+ if len(path_parts) < ac_pos + 3:
+ return None
+
+ candidate_collection_name = '.'.join(path_parts[ac_pos + 1:ac_pos + 3])
+
+ try:
+ # we've got a name for it, now see if the path prefix matches what the loader sees
+ imported_pkg_path = to_native(os.path.dirname(to_bytes(import_module('ansible_collections.' + candidate_collection_name).__file__)))
+ except ImportError:
+ return None
+
+ # reassemble the original path prefix up the collection name, and it should match what we just imported. If not
+ # this is probably a collection root that's not configured.
+
+ original_path_prefix = os.path.join('/', *path_parts[0:ac_pos + 3])
+
+ imported_pkg_path = to_native(os.path.abspath(to_bytes(imported_pkg_path)))
+ if original_path_prefix != imported_pkg_path:
+ return None
+
+ return candidate_collection_name
+
+
+def _get_import_redirect(collection_meta_dict, fullname):
+ if not collection_meta_dict:
+ return None
+
+ return _nested_dict_get(collection_meta_dict, ['import_redirection', fullname, 'redirect'])
+
+
+def _get_ancestor_redirect(redirected_package_map, fullname):
+ # walk the requested module's ancestor packages to see if any have been previously redirected
+ cur_pkg = fullname
+ while cur_pkg:
+ cur_pkg = cur_pkg.rpartition('.')[0]
+ ancestor_redirect = redirected_package_map.get(cur_pkg)
+ if ancestor_redirect:
+ # rewrite the prefix on fullname so we import the target first, then alias it
+ redirect = ancestor_redirect + fullname[len(cur_pkg):]
+ return redirect
+ return None
+
+
+def _nested_dict_get(root_dict, key_list):
+ cur_value = root_dict
+ for key in key_list:
+ cur_value = cur_value.get(key)
+ if not cur_value:
+ return None
+
+ return cur_value
+
+
+def _iter_modules_impl(paths, prefix=''):
+ # NB: this currently only iterates what's on disk- redirected modules are not considered
+ if not prefix:
+ prefix = ''
+ else:
+ prefix = to_native(prefix)
+ # yield (module_loader, name, ispkg) for each module/pkg under path
+ # TODO: implement ignore/silent catch for unreadable?
+ for b_path in map(to_bytes, paths):
+ if not os.path.isdir(b_path):
+ continue
+ for b_basename in sorted(os.listdir(b_path)):
+ b_candidate_module_path = os.path.join(b_path, b_basename)
+ if os.path.isdir(b_candidate_module_path):
+ # exclude things that obviously aren't Python package dirs
+ # FIXME: this dir is adjustable in py3.8+, check for it
+ if b'.' in b_basename or b_basename == b'__pycache__':
+ continue
+
+ # TODO: proper string handling?
+ yield prefix + to_native(b_basename), True
+ else:
+ # FIXME: match builtin ordering for package/dir/file, support compiled?
+ if b_basename.endswith(b'.py') and b_basename != b'__init__.py':
+ yield prefix + to_native(os.path.splitext(b_basename)[0]), False
+
+
+def _get_collection_metadata(collection_name):
+ collection_name = to_native(collection_name)
+ if not collection_name or not isinstance(collection_name, string_types) or len(collection_name.split('.')) != 2:
+ raise ValueError('collection_name must be a non-empty string of the form namespace.collection')
+
+ try:
+ collection_pkg = import_module('ansible_collections.' + collection_name)
+ except ImportError:
+ raise ValueError('unable to locate collection {0}'.format(collection_name))
+
+ _collection_meta = getattr(collection_pkg, '_collection_meta', None)
+
+ if _collection_meta is None:
+ raise ValueError('collection metadata was not loaded for collection {0}'.format(collection_name))
+
+ return _collection_meta
diff --git a/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_meta.py b/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_meta.py
new file mode 100644
index 00000000..3a971978
--- /dev/null
+++ b/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_meta.py
@@ -0,0 +1,37 @@
+# (c) 2019 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# CAUTION: There are two implementations of the collection loader.
+# They must be kept functionally identical, although their implementations may differ.
+#
+# 1) The controller implementation resides in the "lib/ansible/utils/collection_loader/" directory.
+# It must function on all Python versions supported on the controller.
+# 2) The ansible-test implementation resides in the "test/lib/ansible_test/_util/target/legacy_collection_loader/" directory.
+# It must function on all Python versions supported on managed hosts which are not supported by the controller.
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+try:
+ from collections.abc import Mapping # pylint: disable=ansible-bad-import-from
+except ImportError:
+ from collections import Mapping # pylint: disable=ansible-bad-import-from,deprecated-class
+
+from ansible.module_utils.common.yaml import yaml_load
+
+
+def _meta_yml_to_dict(yaml_string_data, content_id):
+ """
+ Converts string YAML dictionary to a Python dictionary. This function may be monkeypatched to another implementation
+ by some tools (eg the import sanity test).
+ :param yaml_string_data: a bytes-ish YAML dictionary
+ :param content_id: a unique ID representing the content to allow other implementations to cache the output
+ :return: a Python dictionary representing the YAML dictionary content
+ """
+ # NB: content_id is passed in, but not used by this implementation
+ routing_dict = yaml_load(yaml_string_data)
+ if not routing_dict:
+ routing_dict = {}
+ if not isinstance(routing_dict, Mapping):
+ raise ValueError('collection metadata must be an instance of Python Mapping')
+ return routing_dict
diff --git a/test/lib/ansible_test/_data/pytest/plugins/ansible_pytest_collections.py b/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py
index 67c69f15..c26971fc 100644
--- a/test/lib/ansible_test/_data/pytest/plugins/ansible_pytest_collections.py
+++ b/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py
@@ -3,10 +3,14 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
+import sys
# set by ansible-test to a single directory, rather than a list of directories as supported by Ansible itself
ANSIBLE_COLLECTIONS_PATH = os.path.join(os.environ['ANSIBLE_COLLECTIONS_PATH'], 'ansible_collections')
+# set by ansible-test to the minimum python version supported on the controller
+ANSIBLE_CONTROLLER_MIN_PYTHON_VERSION = tuple(int(x) for x in os.environ['ANSIBLE_CONTROLLER_MIN_PYTHON_VERSION'].split('.'))
+
# this monkeypatch to _pytest.pathlib.resolve_package_path fixes PEP420 resolution for collections in pytest >= 6.0.0
# NB: this code should never run under py2
@@ -37,8 +41,12 @@ def pytest_configure():
except AttributeError:
pytest_configure.executed = True
- # noinspection PyProtectedMember
- from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder
+ if sys.version_info >= ANSIBLE_CONTROLLER_MIN_PYTHON_VERSION:
+ # noinspection PyProtectedMember
+ from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder
+ else:
+ # noinspection PyProtectedMember
+ from ansible_test._internal.legacy_collection_loader._collection_finder import _AnsibleCollectionFinder
# allow unit tests to import code from collections
diff --git a/test/lib/ansible_test/_data/pytest/plugins/ansible_pytest_coverage.py b/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_coverage.py
index b05298ab..b05298ab 100644
--- a/test/lib/ansible_test/_data/pytest/plugins/ansible_pytest_coverage.py
+++ b/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_coverage.py
diff --git a/test/lib/ansible_test/_util/target/sanity/compile/compile.py b/test/lib/ansible_test/_util/target/sanity/compile/compile.py
new file mode 100644
index 00000000..e2302fc0
--- /dev/null
+++ b/test/lib/ansible_test/_util/target/sanity/compile/compile.py
@@ -0,0 +1,47 @@
+"""Python syntax checker with lint friendly output."""
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import sys
+
+ENCODING = 'utf-8'
+ERRORS = 'replace'
+Text = type(u'')
+
+
+def main():
+ """Main program entry point."""
+ for path in sys.argv[1:] or sys.stdin.read().splitlines():
+ with open(path, 'rb') as source_fd:
+ source = source_fd.read()
+
+ try:
+ compile(source, path, 'exec', dont_inherit=True)
+ except SyntaxError as ex:
+ extype, message, lineno, offset = type(ex), ex.text, ex.lineno, ex.offset
+ except BaseException as ex: # pylint: disable=broad-except
+ extype, message, lineno, offset = type(ex), str(ex), 0, 0
+ else:
+ continue
+
+ result = "%s:%d:%d: %s: %s" % (path, lineno, offset, extype.__name__, safe_message(message))
+
+ if sys.version_info <= (3,):
+ result = result.encode(ENCODING, ERRORS)
+
+ print(result)
+
+
+def safe_message(value):
+ """Given an input value as text or bytes, return the first non-empty line as text, ensuring it can be round-tripped as UTF-8."""
+ if isinstance(value, Text):
+ value = value.encode(ENCODING, ERRORS)
+
+ value = value.decode(ENCODING, ERRORS)
+ value = value.strip().splitlines()[0].strip()
+
+ return value
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/lib/ansible_test/_data/sanity/import/importer.py b/test/lib/ansible_test/_util/target/sanity/import/importer.py
index 5fae766c..778643bb 100755..100644
--- a/test/lib/ansible_test/_data/sanity/import/importer.py
+++ b/test/lib/ansible_test/_util/target/sanity/import/importer.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
"""Import the given python module(s) and report error(s) encountered."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -80,7 +79,8 @@ def main():
try:
cmd = [external_python, yaml_to_json_path]
- proc = subprocess.Popen([to_bytes(c) for c in cmd], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ proc = subprocess.Popen([to_bytes(c) for c in cmd], # pylint: disable=consider-using-with
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout_bytes, stderr_bytes = proc.communicate(to_bytes(yaml))
if proc.returncode != 0:
@@ -144,7 +144,7 @@ def main():
if not self.restrict_to_module_paths:
return None # for non-modules, everything in the ansible namespace is allowed
- if fullname in ('ansible.module_utils.basic', 'ansible.module_utils.common.removed'):
+ if fullname in ('ansible.module_utils.basic',):
return self # intercept loading so we can modify the result
if is_name_in_namepace(fullname, ['ansible.module_utils', self.name]):
@@ -187,14 +187,6 @@ def main():
return module
- if fullname == 'ansible.module_utils.common.removed':
- module = self.__load_module(fullname)
-
- # no-op for removed_module since it is called in place of AnsibleModule instantiation
- module.removed_module = lambda *args, **kwargs: None
-
- return module
-
raise ImportError('import of "%s" is not allowed in this context' % fullname)
def __load_module(self, fullname):
@@ -238,6 +230,8 @@ def main():
capture_normal = Capture()
capture_main = Capture()
+ run_module_ok = False
+
try:
with monitor_sys_modules(path, messages):
with restrict_imports(path, name, messages, restrict_to_module_paths):
@@ -245,13 +239,19 @@ def main():
import_module(name)
if run_main:
+ run_module_ok = is_ansible_module
+
with monitor_sys_modules(path, messages):
with restrict_imports(path, name, messages, restrict_to_module_paths):
with capture_output(capture_main):
runpy.run_module(name, run_name='__main__', alter_sys=True)
except ImporterAnsibleModuleException:
# module instantiated AnsibleModule without raising an exception
- pass
+ if not run_module_ok:
+ if is_ansible_module:
+ report_message(path, 0, 0, 'module-guard', "AnsibleModule instantiation not guarded by `if __name__ == '__main__'`", messages)
+ else:
+ report_message(path, 0, 0, 'non-module', "AnsibleModule instantiated by import of non-module", messages)
except BaseException as ex: # pylint: disable=locally-disabled, broad-except
# intentionally catch all exceptions, including calls to sys.exit
exc_type, _exc, exc_tb = sys.exc_info()
@@ -472,6 +472,7 @@ def main():
with warnings.catch_warnings():
warnings.simplefilter('error')
+
if sys.version_info[0] == 2:
warnings.filterwarnings(
"ignore",
@@ -481,6 +482,7 @@ def main():
"ignore",
"Python 2 is no longer supported by the Python core team. Support for it is now deprecated in cryptography,"
" and will be removed in the next release.")
+
if sys.version_info[:2] == (3, 5):
warnings.filterwarnings(
"ignore",
@@ -488,10 +490,42 @@ def main():
warnings.filterwarnings(
"ignore",
"Python 3.5 support will be dropped in the next release of cryptography. Please upgrade your Python.")
- warnings.filterwarnings(
- "ignore",
- "The _yaml extension module is now located at yaml._yaml and its location is subject to change. To use the "
- "LibYAML-based parser and emitter, import from `yaml`: `from yaml import CLoader as Loader, CDumper as Dumper`.")
+
+ if sys.version_info >= (3, 10):
+ # Temporary solution for Python 3.10 until find_spec is implemented in RestrictedModuleLoader.
+ # That implementation is dependent on find_spec being added to the controller's collection loader first.
+ # The warning text is: main.<locals>.RestrictedModuleLoader.find_spec() not found; falling back to find_module()
+ warnings.filterwarnings(
+ "ignore",
+ r"main\.<locals>\.RestrictedModuleLoader\.find_spec\(\) not found; falling back to find_module\(\)",
+ )
+ # Temporary solution for Python 3.10 until exec_module is implemented in RestrictedModuleLoader.
+ # That implementation is dependent on exec_module being added to the controller's collection loader first.
+ # The warning text is: main.<locals>.RestrictedModuleLoader.exec_module() not found; falling back to load_module()
+ warnings.filterwarnings(
+ "ignore",
+ r"main\.<locals>\.RestrictedModuleLoader\.exec_module\(\) not found; falling back to load_module\(\)",
+ )
+
+ # Temporary solution for Python 3.10 until find_spec is implemented in the controller's collection loader.
+ warnings.filterwarnings(
+ "ignore",
+ r"_Ansible.*Finder\.find_spec\(\) not found; falling back to find_module\(\)",
+ )
+ # Temporary solution for Python 3.10 until exec_module is implemented in the controller's collection loader.
+ warnings.filterwarnings(
+ "ignore",
+ r"_Ansible.*Loader\.exec_module\(\) not found; falling back to load_module\(\)",
+ )
+
+ # Temporary solution until there is a vendored copy of distutils.version in module_utils.
+ # Some of our dependencies such as packaging.tags also import distutils, which we have no control over
+ # The warning text is: The distutils package is deprecated and slated for removal in Python 3.12.
+ # Use setuptools or check PEP 632 for potential alternatives
+ warnings.filterwarnings(
+ "ignore",
+ r"The distutils package is deprecated and slated for removal in Python 3\.12\. .*",
+ )
try:
yield
diff --git a/test/lib/ansible_test/_data/sanity/import/yaml_to_json.py b/test/lib/ansible_test/_util/target/sanity/import/yaml_to_json.py
index 09be9576..1164168e 100644
--- a/test/lib/ansible_test/_data/sanity/import/yaml_to_json.py
+++ b/test/lib/ansible_test/_util/target/sanity/import/yaml_to_json.py
@@ -18,6 +18,7 @@ ISO_DATE_MARKER = 'isodate:f23983df-f3df-453c-9904-bcd08af468cc:'
def default(value):
+ """Custom default serializer which supports datetime.date types."""
if isinstance(value, datetime.date):
return '%s%s' % (ISO_DATE_MARKER, value.isoformat())
diff --git a/test/lib/ansible_test/_data/setup/ConfigureRemotingForAnsible.ps1 b/test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1
index 7e039bb4..7e039bb4 100644
--- a/test/lib/ansible_test/_data/setup/ConfigureRemotingForAnsible.ps1
+++ b/test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1
diff --git a/test/lib/ansible_test/_util/target/setup/bootstrap.sh b/test/lib/ansible_test/_util/target/setup/bootstrap.sh
new file mode 100644
index 00000000..2d31945e
--- /dev/null
+++ b/test/lib/ansible_test/_util/target/setup/bootstrap.sh
@@ -0,0 +1,323 @@
+# shellcheck shell=sh
+
+set -eu
+
+install_ssh_keys()
+{
+ if [ ! -f "${ssh_private_key_path}" ]; then
+ # write public/private ssh key pair
+ public_key_path="${ssh_private_key_path}.pub"
+
+ # shellcheck disable=SC2174
+ mkdir -m 0700 -p "${ssh_path}"
+ touch "${public_key_path}" "${ssh_private_key_path}"
+ chmod 0600 "${public_key_path}" "${ssh_private_key_path}"
+ echo "${ssh_public_key}" > "${public_key_path}"
+ echo "${ssh_private_key}" > "${ssh_private_key_path}"
+
+ # add public key to authorized_keys
+ authoried_keys_path="${HOME}/.ssh/authorized_keys"
+
+ # the existing file is overwritten to avoid conflicts (ex: RHEL on EC2 blocks root login)
+ cat "${public_key_path}" > "${authoried_keys_path}"
+ chmod 0600 "${authoried_keys_path}"
+
+ # add localhost's server keys to known_hosts
+ known_hosts_path="${HOME}/.ssh/known_hosts"
+
+ for key in /etc/ssh/ssh_host_*_key.pub; do
+ echo "localhost $(cat "${key}")" >> "${known_hosts_path}"
+ done
+ fi
+}
+
+customize_bashrc()
+{
+ true > ~/.bashrc
+
+ # Show color `ls` results when available.
+ if ls --color > /dev/null 2>&1; then
+ echo "alias ls='ls --color'" >> ~/.bashrc
+ elif ls -G > /dev/null 2>&1; then
+ echo "alias ls='ls -G'" >> ~/.bashrc
+ fi
+
+ # Improve shell prompts for interactive use.
+ echo "export PS1='\[\e]0;\u@\h: \w\a\]\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ '" >> ~/.bashrc
+}
+
+install_pip() {
+ if ! "${python_interpreter}" -m pip.__main__ --version --disable-pip-version-check 2>/dev/null; then
+ case "${python_version}" in
+ *)
+ pip_bootstrap_url="https://ansible-ci-files.s3.amazonaws.com/ansible-test/get-pip-20.3.4.py"
+ ;;
+ esac
+
+ while true; do
+ curl --silent --show-error "${pip_bootstrap_url}" -o /tmp/get-pip.py && \
+ "${python_interpreter}" /tmp/get-pip.py --disable-pip-version-check --quiet && \
+ rm /tmp/get-pip.py \
+ && break
+ echo "Failed to install packages. Sleeping before trying again..."
+ sleep 10
+ done
+ fi
+}
+
+pip_install() {
+ pip_packages="$1"
+
+ while true; do
+ # shellcheck disable=SC2086
+ "${python_interpreter}" -m pip install --disable-pip-version-check ${pip_packages} \
+ && break
+ echo "Failed to install packages. Sleeping before trying again..."
+ sleep 10
+ done
+}
+
+bootstrap_remote_aix()
+{
+ chfs -a size=1G /
+ chfs -a size=4G /usr
+ chfs -a size=1G /var
+ chfs -a size=1G /tmp
+ chfs -a size=2G /opt
+
+ if [ "${python_version}" = "2.7" ]; then
+ python_package_version=""
+ else
+ python_package_version="3"
+ fi
+
+ packages="
+ gcc
+ python${python_package_version}
+ python${python_package_version}-devel
+ python${python_package_version}-pip
+ "
+
+ while true; do
+ # shellcheck disable=SC2086
+ yum install -q -y ${packages} \
+ && break
+ echo "Failed to install packages. Sleeping before trying again..."
+ sleep 10
+ done
+}
+
+bootstrap_remote_freebsd()
+{
+ if [ "${python_version}" = "2.7" ]; then
+ # on Python 2.7 our only option is to use virtualenv
+ virtualenv_pkg="py27-virtualenv"
+ else
+ # on Python 3.x we'll use the built-in venv instead
+ virtualenv_pkg=""
+ fi
+
+ packages="
+ python${python_package_version}
+ ${virtualenv_pkg}
+ bash
+ curl
+ gtar
+ sudo
+ "
+
+ if [ "${controller}" ]; then
+ # Declare platform/python version combinations which do not have supporting OS packages available.
+ # For these combinations ansible-test will use pip to install the requirements instead.
+ case "${platform_version}/${python_version}" in
+ "11.4/3.8")
+ have_os_packages=""
+ ;;
+ "12.2/3.8")
+ have_os_packages=""
+ ;;
+ "13.0/3.8")
+ have_os_packages=""
+ ;;
+ "13.0/3.9")
+ have_os_packages=""
+ ;;
+ *)
+ have_os_packages="yes"
+ ;;
+ esac
+
+ # PyYAML is never installed with an OS package since it does not include libyaml support.
+ # Instead, ansible-test will install it using pip.
+ if [ "${have_os_packages}" ]; then
+ jinja2_pkg="py${python_package_version}-Jinja2"
+ cryptography_pkg="py${python_package_version}-cryptography"
+ else
+ jinja2_pkg=""
+ cryptography_pkg=""
+ fi
+
+ packages="
+ ${packages}
+ libyaml
+ ${jinja2_pkg}
+ ${cryptography_pkg}
+ "
+ fi
+
+ while true; do
+ # shellcheck disable=SC2086
+ env ASSUME_ALWAYS_YES=YES pkg bootstrap && \
+ pkg install -q -y ${packages} \
+ && break
+ echo "Failed to install packages. Sleeping before trying again..."
+ sleep 10
+ done
+
+ install_pip
+
+ if ! grep '^PermitRootLogin yes$' /etc/ssh/sshd_config > /dev/null; then
+ sed -i '' 's/^# *PermitRootLogin.*$/PermitRootLogin yes/;' /etc/ssh/sshd_config
+ service sshd restart
+ fi
+}
+
+bootstrap_remote_macos()
+{
+ # Silence macOS deprecation warning for bash.
+ echo "export BASH_SILENCE_DEPRECATION_WARNING=1" >> ~/.bashrc
+
+ # Make sure ~/ansible/ is the starting directory for interactive shells on the control node.
+ # The root home directory is under a symlink. Without this the real path will be displayed instead.
+ if [ "${controller}" ]; then
+ echo "cd ~/ansible/" >> ~/.bashrc
+ fi
+
+ # Make sure commands like 'brew' can be found.
+ # This affects users with the 'zsh' shell, as well as 'root' accessed using 'sudo' from a user with 'zsh' for a shell.
+ # shellcheck disable=SC2016
+ echo 'PATH="/usr/local/bin:$PATH"' > /etc/zshenv
+}
+
+bootstrap_remote_rhel_7()
+{
+ packages="
+ gcc
+ python-devel
+ python-virtualenv
+ "
+
+ if [ "${controller}" ]; then
+ packages="
+ ${packages}
+ python2-cryptography
+ "
+ fi
+
+ while true; do
+ # shellcheck disable=SC2086
+ yum install -q -y ${packages} \
+ && break
+ echo "Failed to install packages. Sleeping before trying again..."
+ sleep 10
+ done
+
+ install_pip
+}
+
+bootstrap_remote_rhel_8()
+{
+ if [ "${python_version}" = "3.6" ]; then
+ py_pkg_prefix="python3"
+ else
+ py_pkg_prefix="python${python_package_version}"
+ fi
+
+ packages="
+ gcc
+ ${py_pkg_prefix}-devel
+ "
+
+ if [ "${controller}" ]; then
+ packages="
+ ${packages}
+ ${py_pkg_prefix}-jinja2
+ ${py_pkg_prefix}-cryptography
+ "
+ fi
+
+ while true; do
+ # shellcheck disable=SC2086
+ yum module install -q -y "python${python_package_version}" && \
+ yum install -q -y ${packages} \
+ && break
+ echo "Failed to install packages. Sleeping before trying again..."
+ sleep 10
+ done
+}
+
+bootstrap_remote_rhel()
+{
+ case "${platform_version}" in
+ 7.*) bootstrap_remote_rhel_7 ;;
+ 8.*) bootstrap_remote_rhel_8 ;;
+ esac
+
+ # pin packaging and pyparsing to match the downstream vendored versions
+ pip_packages="
+ packaging==20.4
+ pyparsing==2.4.7
+ "
+
+ pip_install "${pip_packages}"
+}
+
+bootstrap_docker()
+{
+ # Required for newer mysql-server packages to install/upgrade on Ubuntu 16.04.
+ rm -f /usr/sbin/policy-rc.d
+}
+
+bootstrap_remote()
+{
+ for python_version in ${python_versions}; do
+ echo "Bootstrapping Python ${python_version}"
+
+ python_interpreter="python${python_version}"
+ python_package_version="$(echo "${python_version}" | tr -d '.')"
+
+ case "${platform}" in
+ "aix") bootstrap_remote_aix ;;
+ "freebsd") bootstrap_remote_freebsd ;;
+ "macos") bootstrap_remote_macos ;;
+ "rhel") bootstrap_remote_rhel ;;
+ esac
+ done
+}
+
+bootstrap()
+{
+ ssh_path="${HOME}/.ssh"
+ ssh_private_key_path="${ssh_path}/id_${ssh_key_type}"
+
+ install_ssh_keys
+ customize_bashrc
+
+ case "${bootstrap_type}" in
+ "docker") bootstrap_docker ;;
+ "remote") bootstrap_remote ;;
+ esac
+}
+
+# These variables will be templated before sending the script to the host.
+# They are at the end of the script to maintain line numbers for debugging purposes.
+bootstrap_type=#{bootstrap_type}
+controller=#{controller}
+platform=#{platform}
+platform_version=#{platform_version}
+python_versions=#{python_versions}
+ssh_key_type=#{ssh_key_type}
+ssh_private_key=#{ssh_private_key}
+ssh_public_key=#{ssh_public_key}
+
+bootstrap
diff --git a/test/lib/ansible_test/_data/quiet_pip.py b/test/lib/ansible_test/_util/target/setup/quiet_pip.py
index e1bb8246..83d4576b 100644
--- a/test/lib/ansible_test/_data/quiet_pip.py
+++ b/test/lib/ansible_test/_util/target/setup/quiet_pip.py
@@ -11,6 +11,7 @@ BUILTIN_FILTERER_FILTER = logging.Filterer.filter
LOGGING_MESSAGE_FILTER = re.compile("^("
".*Running pip install with root privileges is generally not a good idea.*|" # custom Fedora patch [1]
+ ".*Running pip as the 'root' user can result in broken permissions .*|" # pip 21.1
"DEPRECATION: Python 2.7 will reach the end of its life .*|" # pip 19.2.3
"Ignoring .*: markers .* don't match your environment|"
"Looking in indexes: .*|" # pypi-test-container
diff --git a/test/lib/ansible_test/_util/target/setup/requirements.py b/test/lib/ansible_test/_util/target/setup/requirements.py
new file mode 100644
index 00000000..0e3b1e63
--- /dev/null
+++ b/test/lib/ansible_test/_util/target/setup/requirements.py
@@ -0,0 +1,252 @@
+"""A tool for installing test requirements on the controller and target host."""
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+# pylint: disable=wrong-import-position
+
+import resource
+
+# Setting a low soft RLIMIT_NOFILE value will improve the performance of subprocess.Popen on Python 2.x when close_fds=True.
+# This will affect all Python subprocesses. It will also affect the current Python process if set before subprocess is imported for the first time.
+SOFT_RLIMIT_NOFILE = 1024
+
+CURRENT_RLIMIT_NOFILE = resource.getrlimit(resource.RLIMIT_NOFILE)
+DESIRED_RLIMIT_NOFILE = (SOFT_RLIMIT_NOFILE, CURRENT_RLIMIT_NOFILE[1])
+
+if DESIRED_RLIMIT_NOFILE < CURRENT_RLIMIT_NOFILE:
+ resource.setrlimit(resource.RLIMIT_NOFILE, DESIRED_RLIMIT_NOFILE)
+ CURRENT_RLIMIT_NOFILE = DESIRED_RLIMIT_NOFILE
+
+import base64
+import errno
+import io
+import json
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+try:
+ import typing as t
+except ImportError:
+ t = None
+
+try:
+ from shlex import quote as cmd_quote
+except ImportError:
+ # noinspection PyProtectedMember
+ from pipes import quote as cmd_quote
+
+ENCODING = 'utf-8'
+PAYLOAD = b'{payload}' # base-64 encoded JSON payload which will be populated before this script is executed
+
+Text = type(u'')
+
+VERBOSITY = 0
+CONSOLE = sys.stderr
+
+
+def main(): # type: () -> None
+ """Main program entry point."""
+ global VERBOSITY # pylint: disable=global-statement
+
+ payload = json.loads(to_text(base64.b64decode(PAYLOAD)))
+
+ VERBOSITY = payload['verbosity']
+
+ script = payload['script']
+ commands = payload['commands']
+
+ with tempfile.NamedTemporaryFile(prefix='ansible-test-', suffix='-pip.py') as pip:
+ pip.write(to_bytes(script))
+ pip.flush()
+
+ for name, options in commands:
+ try:
+ globals()[name](pip.name, options)
+ except ApplicationError as ex:
+ print(ex)
+ sys.exit(1)
+
+
+def install(pip, options): # type: (str, t.Dict[str, t.Any]) -> None
+ """Perform a pip install."""
+ requirements = options['requirements']
+ constraints = options['constraints']
+ packages = options['packages']
+
+ tempdir = tempfile.mkdtemp(prefix='ansible-test-', suffix='-requirements')
+
+ try:
+ options = common_pip_options()
+ options.extend(packages)
+
+ for path, content in requirements:
+ write_text_file(os.path.join(tempdir, path), content, True)
+ options.extend(['-r', path])
+
+ for path, content in constraints:
+ write_text_file(os.path.join(tempdir, path), content, True)
+ options.extend(['-c', path])
+
+ command = [sys.executable, pip, 'install'] + options
+
+ execute_command(command, tempdir)
+ finally:
+ remove_tree(tempdir)
+
+
+def uninstall(pip, options): # type: (str, t.Dict[str, t.Any]) -> None
+ """Perform a pip uninstall."""
+ packages = options['packages']
+ ignore_errors = options['ignore_errors']
+
+ options = common_pip_options()
+ options.extend(packages)
+
+ command = [sys.executable, pip, 'uninstall', '-y'] + options
+
+ try:
+ execute_command(command, capture=True)
+ except SubprocessError:
+ if not ignore_errors:
+ raise
+
+
+def common_pip_options(): # type: () -> t.List[str]
+ """Return a list of common pip options."""
+ return [
+ '--disable-pip-version-check',
+ ]
+
+
+def devnull(): # type: () -> t.IO[bytes]
+ """Return a file object that references devnull."""
+ try:
+ return devnull.file
+ except AttributeError:
+ devnull.file = open(os.devnull, 'w+b') # pylint: disable=consider-using-with
+
+ return devnull.file
+
+
+class ApplicationError(Exception):
+ """Base class for application exceptions."""
+
+
+class SubprocessError(ApplicationError):
+ """A command returned a non-zero status."""
+ def __init__(self, cmd, status, stdout, stderr): # type: (t.List[str], int, str, str) -> None
+ message = 'A command failed with status %d: %s' % (status, ' '.join(cmd_quote(c) for c in cmd))
+
+ if stderr:
+ message += '\n>>> Standard Error\n%s' % stderr.strip()
+
+ if stdout:
+ message += '\n>>> Standard Output\n%s' % stdout.strip()
+
+ super(SubprocessError, self).__init__(message)
+
+
+def log(message, verbosity=0): # type: (str, int) -> None
+ """Log a message to the console if the verbosity is high enough."""
+ if verbosity > VERBOSITY:
+ return
+
+ print(message, file=CONSOLE)
+ CONSOLE.flush()
+
+
+def execute_command(cmd, cwd=None, capture=False): # type: (t.List[str], t.Optional[str], bool) -> None
+ """Execute the specified command."""
+ log('Execute command: %s' % ' '.join(cmd_quote(c) for c in cmd), verbosity=1)
+
+ cmd_bytes = [to_bytes(c) for c in cmd]
+
+ if capture:
+ stdout = subprocess.PIPE
+ stderr = subprocess.PIPE
+ else:
+ stdout = None
+ stderr = None
+
+ process = subprocess.Popen(cmd_bytes, cwd=to_optional_bytes(cwd), stdin=devnull(), stdout=stdout, stderr=stderr) # pylint: disable=consider-using-with
+ stdout_bytes, stderr_bytes = process.communicate()
+ stdout_text = to_optional_text(stdout_bytes) or u''
+ stderr_text = to_optional_text(stderr_bytes) or u''
+
+ if process.returncode != 0:
+ raise SubprocessError(cmd, process.returncode, stdout_text, stderr_text)
+
+
+def write_text_file(path, content, create_directories=False): # type: (str, str, bool) -> None
+ """Write the given text content to the specified path, optionally creating missing directories."""
+ if create_directories:
+ make_dirs(os.path.dirname(path))
+
+ with open_binary_file(path, 'wb') as file_obj:
+ file_obj.write(to_bytes(content))
+
+
+def remove_tree(path): # type: (str) -> None
+ """Remove the specified directory tree."""
+ try:
+ shutil.rmtree(to_bytes(path))
+ except OSError as ex:
+ if ex.errno != errno.ENOENT:
+ raise
+
+
+def make_dirs(path): # type: (str) -> None
+ """Create a directory at path, including any necessary parent directories."""
+ try:
+ os.makedirs(to_bytes(path))
+ except OSError as ex:
+ if ex.errno != errno.EEXIST:
+ raise
+
+
+def open_binary_file(path, mode='rb'): # type: (str, str) -> t.BinaryIO
+ """Open the given path for binary access."""
+ if 'b' not in mode:
+ raise Exception('mode must include "b" for binary files: %s' % mode)
+
+ # noinspection PyTypeChecker
+ return io.open(to_bytes(path), mode) # pylint: disable=consider-using-with
+
+
+def to_optional_bytes(value, errors='strict'): # type: (t.Optional[t.AnyStr], str) -> t.Optional[bytes]
+ """Return the given value as bytes encoded using UTF-8 if not already bytes, or None if the value is None."""
+ return None if value is None else to_bytes(value, errors)
+
+
+def to_optional_text(value, errors='strict'): # type: (t.Optional[t.AnyStr], str) -> t.Optional[t.Text]
+ """Return the given value as text decoded using UTF-8 if not already text, or None if the value is None."""
+ return None if value is None else to_text(value, errors)
+
+
+def to_bytes(value, errors='strict'): # type: (t.AnyStr, str) -> bytes
+ """Return the given value as bytes encoded using UTF-8 if not already bytes."""
+ if isinstance(value, bytes):
+ return value
+
+ if isinstance(value, Text):
+ return value.encode(ENCODING, errors)
+
+ raise Exception('value is not bytes or text: %s' % type(value))
+
+
+def to_text(value, errors='strict'): # type: (t.AnyStr, str) -> t.Text
+ """Return the given value as text decoded using UTF-8 if not already text."""
+ if isinstance(value, bytes):
+ return value.decode(ENCODING, errors)
+
+ if isinstance(value, Text):
+ return value
+
+ raise Exception('value is not bytes or text: %s' % type(value))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/lib/ansible_test/config/config.yml b/test/lib/ansible_test/config/config.yml
new file mode 100644
index 00000000..9fca7afb
--- /dev/null
+++ b/test/lib/ansible_test/config/config.yml
@@ -0,0 +1,41 @@
+# Sample ansible-test configuration file for collections.
+# Support for this feature was first added in ansible-core 2.12.
+# Use of this file is optional.
+# If used, this file must be placed in "tests/config.yml" relative to the base of the collection.
+
+modules:
+ # Configuration for modules/module_utils.
+ # These settings do not apply to other content in the collection.
+
+ python_requires: default
+ # Python versions supported by modules/module_utils.
+ # This setting is required.
+ #
+ # Possible values:
+ #
+ # - 'default' - All Python versions supported by Ansible.
+ # This is the default value if no configuration is provided.
+ # - 'controller' - All Python versions supported by the Ansible controller.
+ # This indicates the modules/module_utils can only run on the controller.
+ # Intended for use only with modules/module_utils that depend on ansible-connection, which only runs on the controller.
+ # Unit tests for modules/module_utils will be permitted to import any Ansible code, instead of only module_utils.
+ # - SpecifierSet - A PEP 440 specifier set indicating the supported Python versions.
+ # This is only needed when modules/module_utils do not support all Python versions supported by Ansible.
+ # It is not necessary to exclude versions which Ansible does not support, as this will be done automatically.
+ #
+ # What does this affect?
+ #
+ # - Unit tests will be skipped on any unsupported Python version.
+ # - Sanity tests that are Python version specific will be skipped on any unsupported Python version that is not supported by the controller.
+ #
+ # Sanity tests that are Python version specific will always be executed for Python versions supported by the controller, regardless of this setting.
+ # Reasons for this restriction include, but are not limited to:
+ #
+ # - AnsiballZ must be able to AST parse modules/module_utils on the controller, even though they may execute on a managed node.
+ # - ansible-doc must be able to AST parse modules/module_utils on the controller to display documentation.
+ # - ansible-test must be able to AST parse modules/module_utils to perform static analysis on them.
+ # - ansible-test must be able to execute portions of modules/module_utils to validate their argument specs.
+ #
+ # These settings only apply to modules/module_utils.
+ # It is not possible to declare supported Python versions for controller-only code.
+ # All Python versions supported by the controller must be supported by controller-only code.
diff --git a/test/sanity/code-smell/ansible-requirements.json b/test/sanity/code-smell/ansible-requirements.json
index 4bc356be..b4b7f2b1 100644
--- a/test/sanity/code-smell/ansible-requirements.json
+++ b/test/sanity/code-smell/ansible-requirements.json
@@ -1,7 +1,7 @@
{
"prefixes": [
"requirements.txt",
- "test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt"
+ "test/lib/ansible_test/_data/requirements/ansible.txt"
],
"output": "path-line-column-message"
}
diff --git a/test/sanity/code-smell/ansible-requirements.py b/test/sanity/code-smell/ansible-requirements.py
index c270b32d..48ecbaaf 100755..100644
--- a/test/sanity/code-smell/ansible-requirements.py
+++ b/test/sanity/code-smell/ansible-requirements.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -17,7 +16,7 @@ def read_file(path):
def main():
ORIGINAL_FILE = 'requirements.txt'
- VENDORED_COPY = 'test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt'
+ VENDORED_COPY = 'test/lib/ansible_test/_data/requirements/ansible.txt'
original_requirements = read_file(ORIGINAL_FILE)
vendored_requirements = read_file(VENDORED_COPY)
diff --git a/test/sanity/code-smell/ansible-test-future-boilerplate.json b/test/sanity/code-smell/ansible-test-future-boilerplate.json
new file mode 100644
index 00000000..e689ba5d
--- /dev/null
+++ b/test/sanity/code-smell/ansible-test-future-boilerplate.json
@@ -0,0 +1,9 @@
+{
+ "extensions": [
+ ".py"
+ ],
+ "prefixes": [
+ "test/lib/ansible_test/_internal/"
+ ],
+ "output": "path-message"
+}
diff --git a/test/sanity/code-smell/ansible-test-future-boilerplate.py b/test/sanity/code-smell/ansible-test-future-boilerplate.py
new file mode 100644
index 00000000..55092a73
--- /dev/null
+++ b/test/sanity/code-smell/ansible-test-future-boilerplate.py
@@ -0,0 +1,44 @@
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import ast
+import sys
+
+
+def main():
+ for path in sys.argv[1:] or sys.stdin.read().splitlines():
+ with open(path, 'rb') as path_fd:
+ lines = path_fd.read().splitlines()
+
+ missing = True
+ if not lines:
+ # Files are allowed to be empty of everything including boilerplate
+ missing = False
+
+ for text in lines:
+ if text == b'from __future__ import annotations':
+ missing = False
+ break
+
+ if missing:
+ with open(path) as file:
+ contents = file.read()
+
+ # noinspection PyBroadException
+ try:
+ node = ast.parse(contents)
+
+ # files consisting of only assignments have no need for future import boilerplate
+ # the only exception would be division during assignment, but we'll overlook that for simplicity
+ # the most likely case is that of a documentation only python file
+ if all(isinstance(statement, ast.Assign) for statement in node.body):
+ missing = False
+ except Exception: # pylint: disable=broad-except
+ pass # the compile sanity test will report this error
+
+ if missing:
+ print('%s: missing: from __future__ import annotations' % path)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/sanity/code-smell/configure-remoting-ps1.py b/test/sanity/code-smell/configure-remoting-ps1.py
index 51dff20c..bd216106 100755..100644
--- a/test/sanity/code-smell/configure-remoting-ps1.py
+++ b/test/sanity/code-smell/configure-remoting-ps1.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -11,7 +10,7 @@ def main():
# required to be packaged with ansible-test and must match the original file, but cannot be a symbolic link
# the packaged version is needed to run tests when ansible-test has been installed
# keeping the packaged version identical to the original makes sure tests cover both files
- packaged = 'test/lib/ansible_test/_data/setup/ConfigureRemotingForAnsible.ps1'
+ packaged = 'test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1'
copy_valid = False
diff --git a/test/sanity/code-smell/deprecated-config.py b/test/sanity/code-smell/deprecated-config.py
index 08e93c36..53cb2b93 100755..100644
--- a/test/sanity/code-smell/deprecated-config.py
+++ b/test/sanity/code-smell/deprecated-config.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2018, Matt Martz <matt@sivel.net>
#
@@ -25,7 +24,7 @@ import os
import re
import sys
-from distutils.version import StrictVersion
+from ansible.module_utils.compat.version import StrictVersion
import yaml
diff --git a/test/sanity/code-smell/deprecated-config.requirements.txt b/test/sanity/code-smell/deprecated-config.requirements.txt
index 859c4ee7..a3a33e6c 100644
--- a/test/sanity/code-smell/deprecated-config.requirements.txt
+++ b/test/sanity/code-smell/deprecated-config.requirements.txt
@@ -1,2 +1,5 @@
-jinja2 # ansible-core requirement
-pyyaml
+jinja2 == 3.0.1 # ansible-core requirement
+pyyaml == 5.4.1
+
+# dependencies
+MarkupSafe == 2.0.1
diff --git a/test/sanity/code-smell/docs-build.json b/test/sanity/code-smell/docs-build.json
index 0218bfc5..a43fa923 100644
--- a/test/sanity/code-smell/docs-build.json
+++ b/test/sanity/code-smell/docs-build.json
@@ -1,5 +1,4 @@
{
- "intercept": true,
"disabled": true,
"no_targets": true,
"output": "path-line-column-message"
diff --git a/test/sanity/code-smell/docs-build.py b/test/sanity/code-smell/docs-build.py
index 80eca15f..ff7d427a 100755..100644
--- a/test/sanity/code-smell/docs-build.py
+++ b/test/sanity/code-smell/docs-build.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/sanity/code-smell/docs-build.requirements.txt b/test/sanity/code-smell/docs-build.requirements.txt
index 2e143b03..36fc363a 100644
--- a/test/sanity/code-smell/docs-build.requirements.txt
+++ b/test/sanity/code-smell/docs-build.requirements.txt
@@ -1,8 +1,50 @@
-jinja2
-pyyaml
-resolvelib
-sphinx
-sphinx-notfound-page
-sphinx_ansible_theme
-straight.plugin
+jinja2 == 3.0.1
+pyyaml == 5.4.1
+resolvelib == 0.5.4
+sphinx == 2.1.2
+sphinx-notfound-page == 0.7.1
+sphinx-ansible-theme == 0.8.0
+straight.plugin == 1.5.0
antsibull == 0.26.0
+
+# dependencies
+MarkupSafe == 2.0.1
+aiofiles == 0.7.0
+aiohttp == 3.7.4.post0
+alabaster == 0.7.12
+ansible-pygments == 0.1.0
+antsibull-changelog == 0.9.0
+async-timeout == 3.0.1
+asyncio-pool == 0.5.2
+attrs == 21.2.0
+babel == 2.9.1
+certifi == 2021.5.30
+chardet == 4.0.0
+charset-normalizer == 2.0.5
+docutils == 0.17.1
+idna == 2.5
+imagesize == 1.2.0
+multidict == 5.1.0
+packaging == 21.0
+perky == 0.5.5
+pydantic == 1.8.2
+pygments == 2.10.0
+pyparsing == 2.4.7
+pytz == 2021.1
+requests == 2.26.0
+rstcheck == 3.3.1
+semantic-version == 2.8.5
+sh == 1.14.2
+six == 1.16.0
+snowballstemmer == 2.1.0
+sphinx-rtd-theme == 1.0.0
+sphinxcontrib-applehelp == 1.0.2
+sphinxcontrib-devhelp == 1.0.2
+sphinxcontrib-htmlhelp == 2.0.0
+sphinxcontrib-jsmath == 1.0.1
+sphinxcontrib-qthelp == 1.0.3
+sphinxcontrib-serializinghtml == 1.1.5
+twiggy == 0.5.1
+typing-extensions == 3.10.0.2
+urllib3 == 1.26.6
+yarl == 1.6.3
diff --git a/test/sanity/code-smell/no-unwanted-files.py b/test/sanity/code-smell/no-unwanted-files.py
index bff09152..1b55c23e 100755..100644
--- a/test/sanity/code-smell/no-unwanted-files.py
+++ b/test/sanity/code-smell/no-unwanted-files.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
"""Prevent unwanted files from being added to the source tree."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/sanity/code-smell/obsolete-files.py b/test/sanity/code-smell/obsolete-files.py
index e9ddc8a5..1fd98027 100755..100644
--- a/test/sanity/code-smell/obsolete-files.py
+++ b/test/sanity/code-smell/obsolete-files.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
"""Prevent files from being added to directories that are now obsolete."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/sanity/code-smell/package-data.json b/test/sanity/code-smell/package-data.json
index 2b8a5326..0aa70a3c 100644
--- a/test/sanity/code-smell/package-data.json
+++ b/test/sanity/code-smell/package-data.json
@@ -1,5 +1,4 @@
{
- "intercept": true,
"disabled": true,
"all_targets": true,
"output": "path-message"
diff --git a/test/sanity/code-smell/package-data.py b/test/sanity/code-smell/package-data.py
index ca5f5ef5..921cb197 100755..100644
--- a/test/sanity/code-smell/package-data.py
+++ b/test/sanity/code-smell/package-data.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -26,13 +25,18 @@ def assemble_files_to_ship(complete_file_list):
'.github/*/*',
'changelogs/fragments/*',
'hacking/backport/*',
- 'hacking/shippable/*',
+ 'hacking/azp/*',
'hacking/tests/*',
'hacking/ticket_stubs/*',
'test/sanity/code-smell/botmeta.*',
'test/utils/*',
'test/utils/*/*',
'test/utils/*/*/*',
+ 'test/results/.tmp/*',
+ 'test/results/.tmp/*/*',
+ 'test/results/.tmp/*/*/*',
+ 'test/results/.tmp/*/*/*/*',
+ 'test/results/.tmp/*/*/*/*/*',
'.git*',
)
ignore_files = frozenset((
diff --git a/test/sanity/code-smell/package-data.requirements.txt b/test/sanity/code-smell/package-data.requirements.txt
index 41b3b577..8055b3c5 100644
--- a/test/sanity/code-smell/package-data.requirements.txt
+++ b/test/sanity/code-smell/package-data.requirements.txt
@@ -1,9 +1,13 @@
-docutils
-jinja2
-packaging
-pyyaml # ansible-core requirement
-resolvelib # ansible-core requirement
-rstcheck
-setuptools
-straight.plugin
+docutils == 0.17.1
+jinja2 == 3.0.1
+packaging == 21.0
+pyyaml == 5.4.1 # ansible-core requirement
+resolvelib == 0.5.4 # ansible-core requirement
+rstcheck == 3.3.1
+straight.plugin == 1.5.0
antsibull-changelog == 0.9.0
+
+# dependencies
+MarkupSafe == 2.0.1
+pyparsing == 2.4.7
+semantic-version == 2.8.5
diff --git a/test/sanity/code-smell/release-names.py b/test/sanity/code-smell/release-names.py
index f8003320..4e145062 100755..100644
--- a/test/sanity/code-smell/release-names.py
+++ b/test/sanity/code-smell/release-names.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2019, Ansible Project
#
diff --git a/test/sanity/code-smell/release-names.requirements.txt b/test/sanity/code-smell/release-names.requirements.txt
index c3726e8b..cc530e42 100644
--- a/test/sanity/code-smell/release-names.requirements.txt
+++ b/test/sanity/code-smell/release-names.requirements.txt
@@ -1 +1 @@
-pyyaml
+pyyaml == 5.4.1
diff --git a/test/sanity/code-smell/required-and-default-attributes.py b/test/sanity/code-smell/required-and-default-attributes.py
index 5ef410bd..d71ddeeb 100755..100644
--- a/test/sanity/code-smell/required-and-default-attributes.py
+++ b/test/sanity/code-smell/required-and-default-attributes.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/sanity/code-smell/rstcheck.py b/test/sanity/code-smell/rstcheck.py
index 885659c3..7f702846 100755..100644
--- a/test/sanity/code-smell/rstcheck.py
+++ b/test/sanity/code-smell/rstcheck.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
"""Sanity test using rstcheck and sphinx."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/sanity/code-smell/rstcheck.requirements.txt b/test/sanity/code-smell/rstcheck.requirements.txt
index 4674b8df..071bc5a1 100644
--- a/test/sanity/code-smell/rstcheck.requirements.txt
+++ b/test/sanity/code-smell/rstcheck.requirements.txt
@@ -1,2 +1,27 @@
-rstcheck
-sphinx # required for full functionality
+rstcheck == 3.3.1
+sphinx == 2.1.2 # required for full functionality
+
+# dependencies
+Jinja2 == 3.0.1
+MarkupSafe == 2.0.1
+Pygments == 2.10.0
+alabaster == 0.7.12
+babel == 2.9.1
+certifi == 2021.5.30
+charset-normalizer == 2.0.5
+docutils == 0.17.1
+idna == 2.5
+imagesize == 1.2.0
+packaging == 21.0
+pyparsing == 2.4.7
+pytz == 2021.1
+requests == 2.26.0
+rstcheck == 3.3.1
+snowballstemmer == 2.1.0
+sphinxcontrib-applehelp == 1.0.2
+sphinxcontrib-devhelp == 1.0.2
+sphinxcontrib-htmlhelp == 2.0.0
+sphinxcontrib-jsmath == 1.0.1
+sphinxcontrib-qthelp == 1.0.3
+sphinxcontrib-serializinghtml == 1.1.5
+urllib3 == 1.26.6
diff --git a/test/sanity/code-smell/test-constraints.py b/test/sanity/code-smell/test-constraints.py
index 21dea5fa..8383235e 100755..100644
--- a/test/sanity/code-smell/test-constraints.py
+++ b/test/sanity/code-smell/test-constraints.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -12,11 +11,8 @@ def main():
requirements = {}
for path in sys.argv[1:] or sys.stdin.read().splitlines():
- if path == 'test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt':
- # This file is an exact copy of requirements.txt that is used in the import
- # sanity test. There is a code-smell test which ensures that the two files
- # are identical, and it is only used inside an empty venv, so we can ignore
- # it here.
+ if path == 'test/lib/ansible_test/_data/requirements/ansible.txt':
+ # This file is an exact copy of the ansible requirements.txt and should not conflict with other constraints.
continue
with open(path, 'r') as path_fd:
requirements[path] = parse_requirements(path_fd.read().splitlines())
@@ -38,7 +34,7 @@ def main():
comment = requirement.group('comment')
is_sanity = path.startswith('test/lib/ansible_test/_data/requirements/sanity.') or path.startswith('test/sanity/code-smell/')
- is_pinned = re.search('^ *== *[0-9.]+$', constraints)
+ is_pinned = re.search('^ *== *[0-9.]+(\\.post[0-9]+)?$', constraints)
is_constraints = path == constraints_path
if is_sanity:
@@ -63,11 +59,6 @@ def main():
print('%s:%d:%d: put the constraint (%s%s) in `%s`' % (path, lineno, 1, name, raw_constraints, constraints_path))
for name, requirements in frozen_sanity.items():
- for req in requirements:
- if name in non_sanity_requirements and req[3].group('constraints').strip():
- print('%s:%d:%d: sanity constraint (%s) for package `%s` is not allowed because `%s` is used outside sanity tests' % (
- req[0], req[1], req[3].start('constraints') + 1, req[3].group('constraints'), name, name))
-
if len(set(req[3].group('constraints').strip() for req in requirements)) != 1:
for req in requirements:
print('%s:%d:%d: sanity constraint (%s) does not match others for package `%s`' % (
diff --git a/test/sanity/code-smell/update-bundled.py b/test/sanity/code-smell/update-bundled.py
index 3904b730..009f801b 100755..100644
--- a/test/sanity/code-smell/update-bundled.py
+++ b/test/sanity/code-smell/update-bundled.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2018, Ansible Project
#
@@ -29,7 +28,7 @@ import fnmatch
import json
import re
import sys
-from distutils.version import LooseVersion
+from ansible.module_utils.compat.version import LooseVersion
import packaging.specifiers
@@ -140,6 +139,9 @@ def main():
files_with_bundled_metadata = get_files_with_bundled_metadata(paths)
for filename in files_with_bundled_metadata.difference(bundled_libs):
+ if filename.startswith('test/support/'):
+ continue # bundled support code does not need to be updated or tracked
+
print('{0}: ERROR: File contains _BUNDLED_METADATA but needs to be added to'
' test/sanity/code-smell/update-bundled.py'.format(filename))
diff --git a/test/sanity/code-smell/update-bundled.requirements.txt b/test/sanity/code-smell/update-bundled.requirements.txt
index 748809f7..101e3fdb 100644
--- a/test/sanity/code-smell/update-bundled.requirements.txt
+++ b/test/sanity/code-smell/update-bundled.requirements.txt
@@ -1 +1,4 @@
-packaging
+packaging == 21.0
+
+# dependencies
+pyparsing == 2.4.7
diff --git a/test/sanity/ignore.txt b/test/sanity/ignore.txt
index d6e3c565..4a3bb620 100644
--- a/test/sanity/ignore.txt
+++ b/test/sanity/ignore.txt
@@ -1,54 +1,25 @@
docs/docsite/rst/dev_guide/testing/sanity/no-smart-quotes.rst no-smart-quotes
docs/docsite/rst/locales/ja/LC_MESSAGES/dev_guide.po no-smart-quotes # Translation of the no-smart-quotes rule
-docs/docsite/rst/user_guide/playbooks_filters.rst docs-build
-docs/docsite/rst/user_guide/playbooks_python_version.rst docs-build
examples/play.yml shebang
+examples/scripts/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
examples/scripts/my_test.py shebang # example module but not in a normal module location
examples/scripts/my_test_facts.py shebang # example module but not in a normal module location
examples/scripts/my_test_info.py shebang # example module but not in a normal module location
-examples/scripts/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
examples/scripts/upgrade_to_ps3.ps1 pslint:PSCustomUseLiteralPath
examples/scripts/upgrade_to_ps3.ps1 pslint:PSUseApprovedVerbs
-hacking/build-ansible.py shebang # only run by release engineers, Python 3.6+ required
-hacking/build_library/build_ansible/announce.py compile-2.6!skip # release process only, 3.6+ required
-hacking/build_library/build_ansible/announce.py compile-2.7!skip # release process only, 3.6+ required
-hacking/build_library/build_ansible/announce.py compile-3.5!skip # release process only, 3.6+ required
-hacking/build_library/build_ansible/command_plugins/dump_config.py compile-2.6!skip # docs build only, 3.6+ required
-hacking/build_library/build_ansible/command_plugins/dump_config.py compile-2.7!skip # docs build only, 3.6+ required
-hacking/build_library/build_ansible/command_plugins/dump_config.py compile-3.5!skip # docs build only, 3.6+ required
-hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-2.6!skip # docs build only, 3.6+ required
-hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-2.7!skip # docs build only, 3.6+ required
-hacking/build_library/build_ansible/command_plugins/dump_keywords.py compile-3.5!skip # docs build only, 3.6+ required
-hacking/build_library/build_ansible/command_plugins/generate_man.py compile-2.6!skip # docs build only, 3.6+ required
-hacking/build_library/build_ansible/command_plugins/generate_man.py compile-2.7!skip # docs build only, 3.6+ required
-hacking/build_library/build_ansible/command_plugins/generate_man.py compile-3.5!skip # docs build only, 3.6+ required
-hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-2.6!skip # release process only, 3.6+ required
-hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-2.7!skip # release process only, 3.6+ required
-hacking/build_library/build_ansible/command_plugins/porting_guide.py compile-3.5!skip # release process only, 3.6+ required
-hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-2.6!skip # release process only, 3.6+ required
-hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-2.7!skip # release process only, 3.6+ required
-hacking/build_library/build_ansible/command_plugins/release_announcement.py compile-3.5!skip # release process only, 3.6+ required
-hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-2.6!skip # release process and docs build only, 3.6+ required
-hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-2.7!skip # release process and docs build only, 3.6+ required
-hacking/build_library/build_ansible/command_plugins/update_intersphinx.py compile-3.5!skip # release process and docs build only, 3.6+ required
-hacking/build_library/build_ansible/commands.py compile-2.6!skip # release and docs process only, 3.6+ required
-hacking/build_library/build_ansible/commands.py compile-2.7!skip # release and docs process only, 3.6+ required
-hacking/build_library/build_ansible/commands.py compile-3.5!skip # release and docs process only, 3.6+ required
-lib/ansible/keyword_desc.yml no-unwanted-files
-lib/ansible/cli/console.py pylint:blacklisted-name
+lib/ansible/cli/console.py pylint:disallowed-name
lib/ansible/cli/scripts/ansible_cli_stub.py shebang
lib/ansible/cli/scripts/ansible_connection_cli_stub.py shebang
lib/ansible/config/base.yml no-unwanted-files
-lib/ansible/executor/playbook_executor.py pylint:blacklisted-name
+lib/ansible/executor/playbook_executor.py pylint:disallowed-name
lib/ansible/executor/powershell/async_watchdog.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/async_wrapper.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/executor/powershell/exec_wrapper.ps1 pslint:PSCustomUseLiteralPath
-lib/ansible/executor/task_queue_manager.py pylint:blacklisted-name
-lib/ansible/cli/galaxy.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
-lib/ansible/galaxy/collection/__init__.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
-lib/ansible/galaxy/collection/galaxy_api_proxy.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
-lib/ansible/galaxy/dependency_resolution/dataclasses.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
-lib/ansible/galaxy/dependency_resolution/providers.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
+lib/ansible/executor/task_queue_manager.py pylint:disallowed-name
+lib/ansible/keyword_desc.yml no-unwanted-files
+lib/ansible/module_utils/compat/_selectors2.py future-import-boilerplate # ignore bundled
+lib/ansible/module_utils/compat/_selectors2.py metaclass-boilerplate # ignore bundled
+lib/ansible/module_utils/compat/_selectors2.py pylint:disallowed-name
lib/ansible/module_utils/compat/selinux.py import-2.6!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-2.7!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.5!skip # pass/fail depends on presence of libselinux.so
@@ -56,16 +27,13 @@ lib/ansible/module_utils/compat/selinux.py import-3.6!skip # pass/fail depends o
lib/ansible/module_utils/compat/selinux.py import-3.7!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.8!skip # pass/fail depends on presence of libselinux.so
lib/ansible/module_utils/compat/selinux.py import-3.9!skip # pass/fail depends on presence of libselinux.so
-lib/ansible/module_utils/compat/_selectors2.py future-import-boilerplate # ignore bundled
-lib/ansible/module_utils/compat/_selectors2.py metaclass-boilerplate # ignore bundled
-lib/ansible/module_utils/compat/_selectors2.py pylint:blacklisted-name
lib/ansible/module_utils/distro/__init__.py empty-init # breaks namespacing, bundled, do not override
lib/ansible/module_utils/distro/_distro.py future-import-boilerplate # ignore bundled
lib/ansible/module_utils/distro/_distro.py metaclass-boilerplate # ignore bundled
lib/ansible/module_utils/distro/_distro.py no-assert
lib/ansible/module_utils/distro/_distro.py pep8!skip # bundled code we don't want to modify
lib/ansible/module_utils/facts/__init__.py empty-init # breaks namespacing, deprecate and eventually remove
-lib/ansible/module_utils/facts/network/linux.py pylint:blacklisted-name
+lib/ansible/module_utils/facts/network/linux.py pylint:disallowed-name
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.ArgvParser.psm1 pslint:PSUseApprovedVerbs
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSProvideCommentHelp # need to agree on best format for comment location
lib/ansible/module_utils/powershell/Ansible.ModuleUtils.CommandUtil.psm1 pslint:PSUseApprovedVerbs
@@ -84,7 +52,7 @@ lib/ansible/module_utils/six/__init__.py no-dict-iterkeys
lib/ansible/module_utils/six/__init__.py no-dict-itervalues
lib/ansible/module_utils/six/__init__.py pylint:self-assigning-variable
lib/ansible/module_utils/six/__init__.py replace-urlopen
-lib/ansible/module_utils/urls.py pylint:blacklisted-name
+lib/ansible/module_utils/urls.py pylint:disallowed-name
lib/ansible/module_utils/urls.py replace-urlopen
lib/ansible/modules/apt.py validate-modules:parameter-invalid
lib/ansible/modules/apt_key.py validate-modules:parameter-type-not-in-doc
@@ -101,7 +69,7 @@ lib/ansible/modules/command.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/command.py validate-modules:doc-missing-type
lib/ansible/modules/command.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/command.py validate-modules:undocumented-parameter
-lib/ansible/modules/copy.py pylint:blacklisted-name
+lib/ansible/modules/copy.py pylint:disallowed-name
lib/ansible/modules/copy.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/copy.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/copy.py validate-modules:undocumented-parameter
@@ -110,17 +78,17 @@ lib/ansible/modules/dnf.py validate-modules:parameter-invalid
lib/ansible/modules/file.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/file.py validate-modules:undocumented-parameter
lib/ansible/modules/find.py use-argspec-type-path # fix needed
-lib/ansible/modules/git.py pylint:blacklisted-name
+lib/ansible/modules/git.py pylint:disallowed-name
lib/ansible/modules/git.py use-argspec-type-path
lib/ansible/modules/git.py validate-modules:doc-missing-type
lib/ansible/modules/git.py validate-modules:doc-required-mismatch
lib/ansible/modules/hostname.py validate-modules:invalid-ansiblemodule-schema
-lib/ansible/modules/iptables.py pylint:blacklisted-name
+lib/ansible/modules/iptables.py pylint:disallowed-name
lib/ansible/modules/lineinfile.py validate-modules:doc-choices-do-not-match-spec
lib/ansible/modules/lineinfile.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/lineinfile.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/package_facts.py validate-modules:doc-choices-do-not-match-spec
-lib/ansible/modules/pip.py pylint:blacklisted-name
+lib/ansible/modules/pip.py pylint:disallowed-name
lib/ansible/modules/pip.py validate-modules:invalid-ansiblemodule-schema
lib/ansible/modules/replace.py validate-modules:nonexistent-parameter-documented
lib/ansible/modules/service.py validate-modules:nonexistent-parameter-documented
@@ -133,27 +101,34 @@ lib/ansible/modules/systemd.py validate-modules:parameter-invalid
lib/ansible/modules/systemd.py validate-modules:return-syntax-error
lib/ansible/modules/sysvinit.py validate-modules:return-syntax-error
lib/ansible/modules/unarchive.py validate-modules:nonexistent-parameter-documented
-lib/ansible/modules/uri.py pylint:blacklisted-name
+lib/ansible/modules/uri.py pylint:disallowed-name
lib/ansible/modules/uri.py validate-modules:doc-required-mismatch
lib/ansible/modules/user.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/user.py validate-modules:doc-default-incompatible-type
lib/ansible/modules/user.py validate-modules:use-run-command-not-popen
-lib/ansible/modules/yum.py pylint:blacklisted-name
+lib/ansible/modules/yum.py pylint:disallowed-name
lib/ansible/modules/yum.py validate-modules:parameter-invalid
lib/ansible/modules/yum_repository.py validate-modules:doc-default-does-not-match-spec
lib/ansible/modules/yum_repository.py validate-modules:parameter-type-not-in-doc
lib/ansible/modules/yum_repository.py validate-modules:undocumented-parameter
-lib/ansible/parsing/vault/__init__.py pylint:blacklisted-name
-lib/ansible/playbook/base.py pylint:blacklisted-name
+lib/ansible/parsing/vault/__init__.py pylint:disallowed-name
+lib/ansible/parsing/yaml/objects.py pylint:arguments-renamed
+lib/ansible/plugins/callback/__init__.py pylint:arguments-renamed
+lib/ansible/plugins/inventory/advanced_host_list.py pylint:arguments-renamed
+lib/ansible/plugins/inventory/host_list.py pylint:arguments-renamed
+lib/ansible/plugins/lookup/random_choice.py pylint:arguments-renamed
+lib/ansible/plugins/shell/cmd.py pylint:arguments-renamed
+lib/ansible/playbook/base.py pylint:disallowed-name
lib/ansible/playbook/collectionsearch.py required-and-default-attributes # https://github.com/ansible/ansible/issues/61460
-lib/ansible/playbook/helpers.py pylint:blacklisted-name
-lib/ansible/playbook/role/__init__.py pylint:blacklisted-name
+lib/ansible/playbook/helpers.py pylint:disallowed-name
lib/ansible/plugins/action/normal.py action-plugin-docs # default action plugin for modules without a dedicated action plugin
lib/ansible/plugins/cache/base.py ansible-doc!skip # not a plugin, but a stub for backwards compatibility
-lib/ansible/plugins/lookup/sequence.py pylint:blacklisted-name
-lib/ansible/plugins/strategy/__init__.py pylint:blacklisted-name
-lib/ansible/plugins/strategy/linear.py pylint:blacklisted-name
-lib/ansible/vars/hostvars.py pylint:blacklisted-name
+lib/ansible/plugins/lookup/sequence.py pylint:disallowed-name
+lib/ansible/plugins/strategy/__init__.py pylint:disallowed-name
+lib/ansible/plugins/strategy/linear.py pylint:disallowed-name
+lib/ansible/vars/hostvars.py pylint:disallowed-name
+lib/ansible/utils/collection_loader/_collection_finder.py pylint:deprecated-class
+lib/ansible/utils/collection_loader/_collection_meta.py pylint:deprecated-class
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/plugins/modules/hello.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test-docker/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
@@ -163,14 +138,12 @@ test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integrati
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/integration/targets/hello/files/bad.py pylint:ansible-bad-import-from # ignore, required for testing
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/module_utils/test_my_util.py pylint:relative-beyond-top-level
test/integration/targets/ansible-test/ansible_collections/ns/col/tests/unit/plugins/modules/test_hello.py pylint:relative-beyond-top-level
-test/integration/targets/collections_plugin_namespace/collection_root/ansible_collections/my_ns/my_col/plugins/lookup/lookup_no_future_boilerplate.py future-import-boilerplate # testing Python 2.x implicit relative imports
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util2.py pylint:relative-beyond-top-level
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/module_utils/my_util3.py pylint:relative-beyond-top-level
test/integration/targets/collections_relative_imports/collection_root/ansible_collections/my_ns/my_col/plugins/modules/my_module.py pylint:relative-beyond-top-level
test/integration/targets/gathering_facts/library/bogus_facts shebang
test/integration/targets/gathering_facts/library/facts_one shebang
test/integration/targets/gathering_facts/library/facts_two shebang
-test/integration/targets/json_cleanup/library/bad_json shebang
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xSetReboot/ANSIBLE_xSetReboot.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/DSCResources/ANSIBLE_xTestResource/ANSIBLE_xTestResource.psm1 pslint!skip
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.0/xTestDsc.psd1 pslint!skip
@@ -178,6 +151,7 @@ test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/DSCResources/AN
test/integration/targets/incidental_win_dsc/files/xTestDsc/1.0.1/xTestDsc.psd1 pslint!skip
test/integration/targets/incidental_win_ping/library/win_ping_syntax_error.ps1 pslint!skip
test/integration/targets/incidental_win_reboot/templates/post_reboot.ps1 pslint!skip
+test/integration/targets/json_cleanup/library/bad_json shebang
test/integration/targets/lookup_csvfile/files/crlf.csv line-endings
test/integration/targets/lookup_ini/lookup-8859-15.ini no-smart-quotes
test/integration/targets/module_precedence/lib_with_extension/a.ini shebang
@@ -185,11 +159,11 @@ test/integration/targets/module_precedence/lib_with_extension/ping.ini shebang
test/integration/targets/module_precedence/roles_with_extension/foo/library/a.ini shebang
test/integration/targets/module_precedence/roles_with_extension/foo/library/ping.ini shebang
test/integration/targets/module_utils/library/test.py future-import-boilerplate # allow testing of Python 2.x implicit relative imports
-test/integration/targets/module_utils/module_utils/bar0/foo.py pylint:blacklisted-name
-test/integration/targets/module_utils/module_utils/foo.py pylint:blacklisted-name
-test/integration/targets/module_utils/module_utils/sub/bar/__init__.py pylint:blacklisted-name
-test/integration/targets/module_utils/module_utils/sub/bar/bar.py pylint:blacklisted-name
-test/integration/targets/module_utils/module_utils/yak/zebra/foo.py pylint:blacklisted-name
+test/integration/targets/module_utils/module_utils/bar0/foo.py pylint:disallowed-name
+test/integration/targets/module_utils/module_utils/foo.py pylint:disallowed-name
+test/integration/targets/module_utils/module_utils/sub/bar/__init__.py pylint:disallowed-name
+test/integration/targets/module_utils/module_utils/sub/bar/bar.py pylint:disallowed-name
+test/integration/targets/module_utils/module_utils/yak/zebra/foo.py pylint:disallowed-name
test/integration/targets/old_style_modules_posix/library/helloworld.sh shebang
test/integration/targets/template/files/encoding_1252_utf-8.expected no-smart-quotes
test/integration/targets/template/files/encoding_1252_windows-1252.expected no-smart-quotes
@@ -207,11 +181,11 @@ test/integration/targets/win_script/files/test_script_removes_file.ps1 pslint:PS
test/integration/targets/win_script/files/test_script_with_args.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/win_script/files/test_script_with_splatting.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/windows-minimal/library/win_ping_syntax_error.ps1 pslint!skip
-test/lib/ansible_test/_data/requirements/integration.cloud.azure.txt test-constraints
-test/lib/ansible_test/_data/requirements/sanity.ps1 pslint:PSCustomUseLiteralPath # Uses wildcards on purpose
-test/lib/ansible_test/_data/sanity/pylint/plugins/string_format.py use-compat-six
-test/lib/ansible_test/_data/setup/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
-test/lib/ansible_test/_data/setup/windows-httptester.ps1 pslint:PSCustomUseLiteralPath
+test/lib/ansible_test/_data/requirements/sanity.pslint.ps1 pslint:PSCustomUseLiteralPath # Uses wildcards on purpose
+test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
+test/support/integration/plugins/inventory/aws_ec2.py pylint:use-a-generator
+test/support/integration/plugins/module_utils/network/common/utils.py pylint:use-a-generator
+test/support/integration/plugins/modules/ec2_group.py pylint:use-a-generator
test/support/integration/plugins/module_utils/aws/core.py pylint:property-with-parameters
test/support/integration/plugins/module_utils/cloud.py future-import-boilerplate
test/support/integration/plugins/module_utils/cloud.py metaclass-boilerplate
@@ -224,60 +198,21 @@ test/support/integration/plugins/module_utils/database.py metaclass-boilerplate
test/support/integration/plugins/module_utils/mysql.py future-import-boilerplate
test/support/integration/plugins/module_utils/mysql.py metaclass-boilerplate
test/support/integration/plugins/module_utils/network/common/utils.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py pylint:unnecessary-comprehension
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/netconf/default.py pylint:unnecessary-comprehension
test/support/integration/plugins/module_utils/network/common/utils.py metaclass-boilerplate
test/support/integration/plugins/module_utils/postgres.py future-import-boilerplate
test/support/integration/plugins/module_utils/postgres.py metaclass-boilerplate
-test/support/integration/plugins/modules/lvg.py pylint:blacklisted-name
-test/support/integration/plugins/modules/timezone.py pylint:blacklisted-name
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/netconf.py metaclass-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/doc_fragments/network_agnostic.py metaclass-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py metaclass-boilerplate
+test/support/integration/plugins/modules/lvg.py pylint:disallowed-name
+test/support/integration/plugins/modules/timezone.py pylint:disallowed-name
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py no-unicode-literals
test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/compat/ipaddress.py pep8:E203
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/cfg/base.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/cfg/base.py metaclass-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/config.py metaclass-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py metaclass-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/netconf.py metaclass-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/network.py metaclass-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/parsing.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/parsing.py metaclass-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py metaclass-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/netconf/netconf.py metaclass-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/restconf/restconf.py metaclass-boilerplate
-test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/doc_fragments/ios.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/doc_fragments/ios.py metaclass-boilerplate
-test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/module_utils/network/ios/ios.py metaclass-boilerplate
-test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_command.py metaclass-boilerplate
-test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py metaclass-boilerplate
+test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/facts/facts.py pylint:unnecessary-comprehension
+test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/netconf/default.py pylint:unnecessary-comprehension
+test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/utils.py pylint:use-a-generator
+test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/cliconf/ios.py pylint:arguments-renamed
+test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/cliconf/vyos.py pylint:arguments-renamed
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/modules/ios_config.py pep8:E501
-test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/doc_fragments/vyos.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/doc_fragments/vyos.py metaclass-boilerplate
-test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/vyos.py metaclass-boilerplate
-test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py metaclass-boilerplate
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pep8:E231
-test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pylint:blacklisted-name
-test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_config.py metaclass-boilerplate
-test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_facts.py future-import-boilerplate
-test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_facts.py metaclass-boilerplate
+test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/modules/vyos_command.py pylint:disallowed-name
test/support/windows-integration/plugins/modules/async_status.ps1 pslint!skip
test/support/windows-integration/plugins/modules/setup.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_copy.ps1 pslint!skip
@@ -286,24 +221,21 @@ test/support/windows-integration/plugins/modules/win_feature.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_find.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_lineinfile.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_regedit.ps1 pslint!skip
-test/support/windows-integration/plugins/modules/win_security_policy.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_shell.ps1 pslint!skip
test/support/windows-integration/plugins/modules/win_wait_for.ps1 pslint!skip
-test/units/executor/test_play_iterator.py pylint:blacklisted-name
+test/units/executor/test_play_iterator.py pylint:disallowed-name
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-no-version
test/units/module_utils/basic/test_deprecate_warn.py pylint:ansible-deprecated-version
-test/units/module_utils/basic/test_run_command.py pylint:blacklisted-name
+test/units/module_utils/basic/test_run_command.py pylint:disallowed-name
test/units/module_utils/urls/fixtures/multipart.txt line-endings # Fixture for HTTP tests that use CRLF
test/units/module_utils/urls/test_Request.py replace-urlopen
test/units/module_utils/urls/test_fetch_url.py replace-urlopen
-test/units/modules/test_apt.py pylint:blacklisted-name
-test/units/parsing/vault/test_vault.py pylint:blacklisted-name
-test/units/playbook/role/test_role.py pylint:blacklisted-name
-test/units/plugins/test_plugins.py pylint:blacklisted-name
-test/units/template/test_templar.py pylint:blacklisted-name
+test/units/modules/test_apt.py pylint:disallowed-name
+test/units/parsing/vault/test_vault.py pylint:disallowed-name
+test/units/playbook/role/test_role.py pylint:disallowed-name
+test/units/plugins/test_plugins.py pylint:disallowed-name
+test/units/template/test_templar.py pylint:disallowed-name
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py pylint:relative-beyond-top-level
-test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py future-import-boilerplate # test expects no boilerplate
-test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py metaclass-boilerplate # test expects no boilerplate
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/modules/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/__init__.py empty-init # testing that collections don't need inits
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/ansible/__init__.py empty-init # testing that collections don't need inits
diff --git a/test/support/integration/plugins/inventory/foreman.py b/test/support/integration/plugins/inventory/foreman.py
index 43073f81..39e0de33 100644
--- a/test/support/integration/plugins/inventory/foreman.py
+++ b/test/support/integration/plugins/inventory/foreman.py
@@ -81,7 +81,7 @@ password: secure
validate_certs: False
'''
-from distutils.version import LooseVersion
+from ansible.module_utils.compat.version import LooseVersion
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_bytes, to_native, to_text
diff --git a/test/support/integration/plugins/module_utils/aws/core.py b/test/support/integration/plugins/module_utils/aws/core.py
index c4527b6d..909d0396 100644
--- a/test/support/integration/plugins/module_utils/aws/core.py
+++ b/test/support/integration/plugins/module_utils/aws/core.py
@@ -65,7 +65,7 @@ import re
import logging
import traceback
from functools import wraps
-from distutils.version import LooseVersion
+from ansible.module_utils.compat.version import LooseVersion
try:
from cStringIO import StringIO
diff --git a/test/support/integration/plugins/module_utils/crypto.py b/test/support/integration/plugins/module_utils/crypto.py
index e67eeff1..f3f43f07 100644
--- a/test/support/integration/plugins/module_utils/crypto.py
+++ b/test/support/integration/plugins/module_utils/crypto.py
@@ -31,7 +31,7 @@ __metaclass__ = type
import sys
-from distutils.version import LooseVersion
+from ansible.module_utils.compat.version import LooseVersion
try:
import OpenSSL
diff --git a/test/support/integration/plugins/module_utils/docker/common.py b/test/support/integration/plugins/module_utils/docker/common.py
index 03307250..08a87702 100644
--- a/test/support/integration/plugins/module_utils/docker/common.py
+++ b/test/support/integration/plugins/module_utils/docker/common.py
@@ -25,7 +25,7 @@ import platform
import re
import sys
from datetime import timedelta
-from distutils.version import LooseVersion
+from ansible.module_utils.compat.version import LooseVersion
from ansible.module_utils.basic import AnsibleModule, env_fallback, missing_required_lib
diff --git a/test/support/integration/plugins/module_utils/postgres.py b/test/support/integration/plugins/module_utils/postgres.py
index 63811c30..0ccc6ed7 100644
--- a/test/support/integration/plugins/module_utils/postgres.py
+++ b/test/support/integration/plugins/module_utils/postgres.py
@@ -37,7 +37,7 @@ except ImportError:
from ansible.module_utils.basic import missing_required_lib
from ansible.module_utils._text import to_native
from ansible.module_utils.six import iteritems
-from distutils.version import LooseVersion
+from ansible.module_utils.compat.version import LooseVersion
def postgres_common_argument_spec():
diff --git a/test/support/integration/plugins/modules/docker_swarm.py b/test/support/integration/plugins/modules/docker_swarm.py
index a2c076c5..c025b8dc 100644
--- a/test/support/integration/plugins/modules/docker_swarm.py
+++ b/test/support/integration/plugins/modules/docker_swarm.py
@@ -447,11 +447,6 @@ class SwarmManager(DockerBaseClass):
"inspect": self.inspect_swarm
}
- if self.state == 'inspect':
- self.client.module.deprecate(
- "The 'inspect' state is deprecated, please use 'docker_swarm_info' to inspect swarm cluster",
- version='2.12', collection_name='ansible.builtin')
-
choice_map.get(self.state)()
if self.client.module._diff or self.parameters.debug:
diff --git a/test/support/integration/plugins/modules/ec2.py b/test/support/integration/plugins/modules/ec2.py
index 952aa5a1..1e97effd 100644
--- a/test/support/integration/plugins/modules/ec2.py
+++ b/test/support/integration/plugins/modules/ec2.py
@@ -610,7 +610,7 @@ import time
import datetime
import traceback
from ast import literal_eval
-from distutils.version import LooseVersion
+from ansible.module_utils.compat.version import LooseVersion
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import get_aws_connection_info, ec2_argument_spec, ec2_connect
diff --git a/test/support/integration/plugins/modules/htpasswd.py b/test/support/integration/plugins/modules/htpasswd.py
index ad12b0c0..2c55a6bc 100644
--- a/test/support/integration/plugins/modules/htpasswd.py
+++ b/test/support/integration/plugins/modules/htpasswd.py
@@ -96,7 +96,7 @@ EXAMPLES = """
import os
import tempfile
import traceback
-from distutils.version import LooseVersion
+from ansible.module_utils.compat.version import LooseVersion
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils._text import to_native
diff --git a/test/support/integration/plugins/modules/mongodb_user.py b/test/support/integration/plugins/modules/mongodb_user.py
index 362b3aa4..7a18b159 100644
--- a/test/support/integration/plugins/modules/mongodb_user.py
+++ b/test/support/integration/plugins/modules/mongodb_user.py
@@ -200,7 +200,7 @@ user:
import os
import ssl as ssl_lib
import traceback
-from distutils.version import LooseVersion
+from ansible.module_utils.compat.version import LooseVersion
from operator import itemgetter
try:
diff --git a/test/support/integration/plugins/modules/x509_crl.py b/test/support/integration/plugins/modules/x509_crl.py
index ef601eda..9bb83a5b 100644
--- a/test/support/integration/plugins/modules/x509_crl.py
+++ b/test/support/integration/plugins/modules/x509_crl.py
@@ -349,7 +349,7 @@ crl:
import os
import traceback
-from distutils.version import LooseVersion
+from ansible.module_utils.compat.version import LooseVersion
from ansible.module_utils import crypto as crypto_utils
from ansible.module_utils._text import to_native, to_text
diff --git a/test/support/integration/plugins/modules/x509_crl_info.py b/test/support/integration/plugins/modules/x509_crl_info.py
index b61db26f..b6d36320 100644
--- a/test/support/integration/plugins/modules/x509_crl_info.py
+++ b/test/support/integration/plugins/modules/x509_crl_info.py
@@ -129,7 +129,7 @@ revoked_certificates:
import traceback
-from distutils.version import LooseVersion
+from ansible.module_utils.compat.version import LooseVersion
from ansible.module_utils import crypto as crypto_utils
from ansible.module_utils._text import to_native
diff --git a/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/action/ios.py b/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/action/ios.py
index e5ac2cd1..e3605d0b 100644
--- a/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/action/ios.py
+++ b/test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/action/ios.py
@@ -107,7 +107,7 @@ class ActionModule(ActionNetworkModule):
return {
"failed": True,
"msg": "unable to open shell. Please see: "
- + "https://docs.ansible.com/ansible/network_debug_troubleshooting.html#unable-to-open-shell",
+ + "https://docs.ansible.com/ansible/latest/network/user_guide/network_debug_troubleshooting.html#category-unable-to-open-shell",
}
task_vars["ansible_socket"] = socket_path
diff --git a/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/action/vyos.py b/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/action/vyos.py
index cab2f3fd..b86a0c42 100644
--- a/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/action/vyos.py
+++ b/test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/action/vyos.py
@@ -103,7 +103,7 @@ class ActionModule(ActionNetworkModule):
return {
"failed": True,
"msg": "unable to open shell. Please see: "
- + "https://docs.ansible.com/ansible/network_debug_troubleshooting.html#unable-to-open-shell",
+ + "https://docs.ansible.com/ansible/latest/network/user_guide/network_debug_troubleshooting.html#category-unable-to-open-shell",
}
task_vars["ansible_socket"] = socket_path
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/module_utils/WebRequest.psm1 b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/module_utils/WebRequest.psm1
new file mode 100644
index 00000000..8d077bd6
--- /dev/null
+++ b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/module_utils/WebRequest.psm1
@@ -0,0 +1,518 @@
+# Copyright (c) 2020 Ansible Project
+# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
+
+Function Get-AnsibleWindowsWebRequest {
+ <#
+ .SYNOPSIS
+ Creates a System.Net.WebRequest object based on common URL module options in Ansible.
+
+ .DESCRIPTION
+ Will create a WebRequest based on common input options within Ansible. This can be used manually or with
+ Invoke-AnsibleWindowsWebRequest.
+
+ .PARAMETER Uri
+ The URI to create the web request for.
+
+ .PARAMETER UrlMethod
+ The protocol method to use, if omitted, will use the default value for the URI protocol specified.
+
+ .PARAMETER FollowRedirects
+ Whether to follow redirect reponses. This is only valid when using a HTTP URI.
+ all - Will follow all redirects
+ none - Will follow no redirects
+ safe - Will only follow redirects when GET or HEAD is used as the UrlMethod
+
+ .PARAMETER Headers
+ A hashtable or dictionary of header values to set on the request. This is only valid for a HTTP URI.
+
+ .PARAMETER HttpAgent
+ A string to set for the 'User-Agent' header. This is only valid for a HTTP URI.
+
+ .PARAMETER MaximumRedirection
+ The maximum number of redirections that will be followed. This is only valid for a HTTP URI.
+
+ .PARAMETER UrlTimeout
+ The timeout in seconds that defines how long to wait until the request times out.
+
+ .PARAMETER ValidateCerts
+ Whether to validate SSL certificates, default to True.
+
+ .PARAMETER ClientCert
+ The path to PFX file to use for X509 authentication. This is only valid for a HTTP URI. This path can either
+ be a filesystem path (C:\folder\cert.pfx) or a PSPath to a credential (Cert:\CurrentUser\My\<thumbprint>).
+
+ .PARAMETER ClientCertPassword
+ The password for the PFX certificate if required. This is only valid for a HTTP URI.
+
+ .PARAMETER ForceBasicAuth
+ Whether to set the Basic auth header on the first request instead of when required. This is only valid for a
+ HTTP URI.
+
+ .PARAMETER UrlUsername
+ The username to use for authenticating with the target.
+
+ .PARAMETER UrlPassword
+ The password to use for authenticating with the target.
+
+ .PARAMETER UseDefaultCredential
+ Whether to use the current user's credentials if available. This will only work when using Become, using SSH with
+ password auth, or WinRM with CredSSP or Kerberos with credential delegation.
+
+ .PARAMETER UseProxy
+ Whether to use the default proxy defined in IE (WinINet) for the user or set no proxy at all. This should not
+ be set to True when ProxyUrl is also defined.
+
+ .PARAMETER ProxyUrl
+ An explicit proxy server to use for the request instead of relying on the default proxy in IE. This is only
+ valid for a HTTP URI.
+
+ .PARAMETER ProxyUsername
+ An optional username to use for proxy authentication.
+
+ .PARAMETER ProxyPassword
+ The password for ProxyUsername.
+
+ .PARAMETER ProxyUseDefaultCredential
+ Whether to use the current user's credentials for proxy authentication if available. This will only work when
+ using Become, using SSH with password auth, or WinRM with CredSSP or Kerberos with credential delegation.
+
+ .PARAMETER Module
+ The AnsibleBasic module that can be used as a backup parameter source or a way to return warnings back to the
+ Ansible controller.
+
+ .EXAMPLE
+ $spec = @{
+ options = @{}
+ }
+ $module = [Ansible.Basic.AnsibleModule]::Create($args, $spec, @(Get-AnsibleWindowsWebRequestSpec))
+
+ $web_request = Get-AnsibleWindowsWebRequest -Module $module
+ #>
+ [CmdletBinding()]
+ [OutputType([System.Net.WebRequest])]
+ Param (
+ [Alias("url")]
+ [System.Uri]
+ $Uri,
+
+ [Alias("url_method")]
+ [System.String]
+ $UrlMethod,
+
+ [Alias("follow_redirects")]
+ [ValidateSet("all", "none", "safe")]
+ [System.String]
+ $FollowRedirects = "safe",
+
+ [System.Collections.IDictionary]
+ $Headers,
+
+ [Alias("http_agent")]
+ [System.String]
+ $HttpAgent = "ansible-httpget",
+
+ [Alias("maximum_redirection")]
+ [System.Int32]
+ $MaximumRedirection = 50,
+
+ [Alias("url_timeout")]
+ [System.Int32]
+ $UrlTimeout = 30,
+
+ [Alias("validate_certs")]
+ [System.Boolean]
+ $ValidateCerts = $true,
+
+ # Credential params
+ [Alias("client_cert")]
+ [System.String]
+ $ClientCert,
+
+ [Alias("client_cert_password")]
+ [System.String]
+ $ClientCertPassword,
+
+ [Alias("force_basic_auth")]
+ [Switch]
+ $ForceBasicAuth,
+
+ [Alias("url_username")]
+ [System.String]
+ $UrlUsername,
+
+ [Alias("url_password")]
+ [System.String]
+ $UrlPassword,
+
+ [Alias("use_default_credential")]
+ [Switch]
+ $UseDefaultCredential,
+
+ # Proxy params
+ [Alias("use_proxy")]
+ [System.Boolean]
+ $UseProxy = $true,
+
+ [Alias("proxy_url")]
+ [System.String]
+ $ProxyUrl,
+
+ [Alias("proxy_username")]
+ [System.String]
+ $ProxyUsername,
+
+ [Alias("proxy_password")]
+ [System.String]
+ $ProxyPassword,
+
+ [Alias("proxy_use_default_credential")]
+ [Switch]
+ $ProxyUseDefaultCredential,
+
+ [ValidateScript({ $_.GetType().FullName -eq 'Ansible.Basic.AnsibleModule' })]
+ [System.Object]
+ $Module
+ )
+
+ # Set module options for parameters unless they were explicitly passed in.
+ if ($Module) {
+ foreach ($param in $PSCmdlet.MyInvocation.MyCommand.Parameters.GetEnumerator()) {
+ if ($PSBoundParameters.ContainsKey($param.Key)) {
+ # Was set explicitly we want to use that value
+ continue
+ }
+
+ foreach ($alias in @($Param.Key) + $param.Value.Aliases) {
+ if ($Module.Params.ContainsKey($alias)) {
+ $var_value = $Module.Params.$alias -as $param.Value.ParameterType
+ Set-Variable -Name $param.Key -Value $var_value
+ break
+ }
+ }
+ }
+ }
+
+ # Disable certificate validation if requested
+ # FUTURE: set this on ServerCertificateValidationCallback of the HttpWebRequest once .NET 4.5 is the minimum
+ if (-not $ValidateCerts) {
+ [System.Net.ServicePointManager]::ServerCertificateValidationCallback = { $true }
+ }
+
+ # Enable TLS1.1/TLS1.2 if they're available but disabled (eg. .NET 4.5)
+ $security_protocols = [System.Net.ServicePointManager]::SecurityProtocol -bor [System.Net.SecurityProtocolType]::SystemDefault
+ if ([System.Net.SecurityProtocolType].GetMember("Tls11").Count -gt 0) {
+ $security_protocols = $security_protocols -bor [System.Net.SecurityProtocolType]::Tls11
+ }
+ if ([System.Net.SecurityProtocolType].GetMember("Tls12").Count -gt 0) {
+ $security_protocols = $security_protocols -bor [System.Net.SecurityProtocolType]::Tls12
+ }
+ [System.Net.ServicePointManager]::SecurityProtocol = $security_protocols
+
+ $web_request = [System.Net.WebRequest]::Create($Uri)
+ if ($UrlMethod) {
+ $web_request.Method = $UrlMethod
+ }
+ $web_request.Timeout = $UrlTimeout * 1000
+
+ if ($UseDefaultCredential -and $web_request -is [System.Net.HttpWebRequest]) {
+ $web_request.UseDefaultCredentials = $true
+ } elseif ($UrlUsername) {
+ if ($ForceBasicAuth) {
+ $auth_value = [System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes(("{0}:{1}" -f $UrlUsername, $UrlPassword)))
+ $web_request.Headers.Add("Authorization", "Basic $auth_value")
+ } else {
+ $credential = New-Object -TypeName System.Net.NetworkCredential -ArgumentList $UrlUsername, $UrlPassword
+ $web_request.Credentials = $credential
+ }
+ }
+
+ if ($ClientCert) {
+ # Expecting either a filepath or PSPath (Cert:\CurrentUser\My\<thumbprint>)
+ $cert = Get-Item -LiteralPath $ClientCert -ErrorAction SilentlyContinue
+ if ($null -eq $cert) {
+ Write-Error -Message "Client certificate '$ClientCert' does not exist" -Category ObjectNotFound
+ return
+ }
+
+ $crypto_ns = 'System.Security.Cryptography.X509Certificates'
+ if ($cert.PSProvider.Name -ne 'Certificate') {
+ try {
+ $cert = New-Object -TypeName "$crypto_ns.X509Certificate2" -ArgumentList @(
+ $ClientCert, $ClientCertPassword
+ )
+ } catch [System.Security.Cryptography.CryptographicException] {
+ Write-Error -Message "Failed to read client certificate at '$ClientCert'" -Exception $_.Exception -Category SecurityError
+ return
+ }
+ }
+ $web_request.ClientCertificates = New-Object -TypeName "$crypto_ns.X509Certificate2Collection" -ArgumentList @(
+ $cert
+ )
+ }
+
+ if (-not $UseProxy) {
+ $proxy = $null
+ } elseif ($ProxyUrl) {
+ $proxy = New-Object -TypeName System.Net.WebProxy -ArgumentList $ProxyUrl, $true
+ } else {
+ $proxy = $web_request.Proxy
+ }
+
+ # $web_request.Proxy may return $null for a FTP web request. We only set the credentials if we have an actual
+ # proxy to work with, otherwise just ignore the credentials property.
+ if ($null -ne $proxy) {
+ if ($ProxyUseDefaultCredential) {
+ # Weird hack, $web_request.Proxy returns an IWebProxy object which only gurantees the Credentials
+ # property. We cannot set UseDefaultCredentials so we just set the Credentials to the
+ # DefaultCredentials in the CredentialCache which does the same thing.
+ $proxy.Credentials = [System.Net.CredentialCache]::DefaultCredentials
+ } elseif ($ProxyUsername) {
+ $proxy.Credentials = New-Object -TypeName System.Net.NetworkCredential -ArgumentList @(
+ $ProxyUsername, $ProxyPassword
+ )
+ } else {
+ $proxy.Credentials = $null
+ }
+ }
+
+ $web_request.Proxy = $proxy
+
+ # Some parameters only apply when dealing with a HttpWebRequest
+ if ($web_request -is [System.Net.HttpWebRequest]) {
+ if ($Headers) {
+ foreach ($header in $Headers.GetEnumerator()) {
+ switch ($header.Key) {
+ Accept { $web_request.Accept = $header.Value }
+ Connection { $web_request.Connection = $header.Value }
+ Content-Length { $web_request.ContentLength = $header.Value }
+ Content-Type { $web_request.ContentType = $header.Value }
+ Expect { $web_request.Expect = $header.Value }
+ Date { $web_request.Date = $header.Value }
+ Host { $web_request.Host = $header.Value }
+ If-Modified-Since { $web_request.IfModifiedSince = $header.Value }
+ Range { $web_request.AddRange($header.Value) }
+ Referer { $web_request.Referer = $header.Value }
+ Transfer-Encoding {
+ $web_request.SendChunked = $true
+ $web_request.TransferEncoding = $header.Value
+ }
+ User-Agent { continue }
+ default { $web_request.Headers.Add($header.Key, $header.Value) }
+ }
+ }
+ }
+
+ # For backwards compatibility we need to support setting the User-Agent if the header was set in the task.
+ # We just need to make sure that if an explicit http_agent module was set then that takes priority.
+ if ($Headers -and $Headers.ContainsKey("User-Agent")) {
+ $options = (Get-AnsibleWindowsWebRequestSpec).options
+ if ($HttpAgent -eq $options.http_agent.default) {
+ $HttpAgent = $Headers['User-Agent']
+ } elseif ($null -ne $Module) {
+ $Module.Warn("The 'User-Agent' header and the 'http_agent' was set, using the 'http_agent' for web request")
+ }
+ }
+ $web_request.UserAgent = $HttpAgent
+
+ switch ($FollowRedirects) {
+ none { $web_request.AllowAutoRedirect = $false }
+ safe {
+ if ($web_request.Method -in @("GET", "HEAD")) {
+ $web_request.AllowAutoRedirect = $true
+ } else {
+ $web_request.AllowAutoRedirect = $false
+ }
+ }
+ all { $web_request.AllowAutoRedirect = $true }
+ }
+
+ if ($MaximumRedirection -eq 0) {
+ $web_request.AllowAutoRedirect = $false
+ } else {
+ $web_request.MaximumAutomaticRedirections = $MaximumRedirection
+ }
+ }
+
+ return $web_request
+}
+
+Function Invoke-AnsibleWindowsWebRequest {
+ <#
+ .SYNOPSIS
+ Invokes a ScriptBlock with the WebRequest.
+
+ .DESCRIPTION
+ Invokes the ScriptBlock and handle extra information like accessing the response stream, closing those streams
+ safely as well as setting common module return values.
+
+ .PARAMETER Module
+ The Ansible.Basic module to set the return values for. This will set the following return values;
+ elapsed - The total time, in seconds, that it took to send the web request and process the response
+ msg - The human readable description of the response status code
+ status_code - An int that is the response status code
+
+ .PARAMETER Request
+ The System.Net.WebRequest to call. This can either be manually crafted or created with
+ Get-AnsibleWindowsWebRequest.
+
+ .PARAMETER Script
+ The ScriptBlock to invoke during the web request. This ScriptBlock should take in the params
+ Param ([System.Net.WebResponse]$Response, [System.IO.Stream]$Stream)
+
+ This scriptblock should manage the response based on what it need to do.
+
+ .PARAMETER Body
+ An optional Stream to send to the target during the request.
+
+ .PARAMETER IgnoreBadResponse
+ By default a WebException will be raised for a non 2xx status code and the Script will not be invoked. This
+ parameter can be set to process all responses regardless of the status code.
+
+ .EXAMPLE Basic module that downloads a file
+ $spec = @{
+ options = @{
+ path = @{ type = "path"; required = $true }
+ }
+ }
+ $module = Ansible.Basic.AnsibleModule]::Create($args, $spec, @(Get-AnsibleWindowsWebRequestSpec))
+
+ $web_request = Get-AnsibleWindowsWebRequest -Module $module
+
+ Invoke-AnsibleWindowsWebRequest -Module $module -Request $web_request -Script {
+ Param ([System.Net.WebResponse]$Response, [System.IO.Stream]$Stream)
+
+ $fs = [System.IO.File]::Create($module.Params.path)
+ try {
+ $Stream.CopyTo($fs)
+ $fs.Flush()
+ } finally {
+ $fs.Dispose()
+ }
+ }
+ #>
+ [CmdletBinding()]
+ param (
+ [Parameter(Mandatory=$true)]
+ [System.Object]
+ [ValidateScript({ $_.GetType().FullName -eq 'Ansible.Basic.AnsibleModule' })]
+ $Module,
+
+ [Parameter(Mandatory=$true)]
+ [System.Net.WebRequest]
+ $Request,
+
+ [Parameter(Mandatory=$true)]
+ [ScriptBlock]
+ $Script,
+
+ [AllowNull()]
+ [System.IO.Stream]
+ $Body,
+
+ [Switch]
+ $IgnoreBadResponse
+ )
+
+ $start = Get-Date
+ if ($null -ne $Body) {
+ $request_st = $Request.GetRequestStream()
+ try {
+ $Body.CopyTo($request_st)
+ $request_st.Flush()
+ } finally {
+ $request_st.Close()
+ }
+ }
+
+ try {
+ try {
+ $web_response = $Request.GetResponse()
+ } catch [System.Net.WebException] {
+ # A WebResponse with a status code not in the 200 range will raise a WebException. We check if the
+ # exception raised contains the actual response and continue on if IgnoreBadResponse is set. We also
+ # make sure we set the status_code return value on the Module object if possible
+
+ if ($_.Exception.PSObject.Properties.Name -match "Response") {
+ $web_response = $_.Exception.Response
+
+ if (-not $IgnoreBadResponse -or $null -eq $web_response) {
+ $Module.Result.msg = $_.Exception.StatusDescription
+ $Module.Result.status_code = $_.Exception.Response.StatusCode
+ throw $_
+ }
+ } else {
+ throw $_
+ }
+ }
+
+ if ($Request.RequestUri.IsFile) {
+ # A FileWebResponse won't have these properties set
+ $Module.Result.msg = "OK"
+ $Module.Result.status_code = 200
+ } else {
+ $Module.Result.msg = $web_response.StatusDescription
+ $Module.Result.status_code = $web_response.StatusCode
+ }
+
+ $response_stream = $web_response.GetResponseStream()
+ try {
+ # Invoke the ScriptBlock and pass in WebResponse and ResponseStream
+ &$Script -Response $web_response -Stream $response_stream
+ } finally {
+ $response_stream.Dispose()
+ }
+ } finally {
+ if ($web_response) {
+ $web_response.Close()
+ }
+ $Module.Result.elapsed = ((Get-date) - $start).TotalSeconds
+ }
+}
+
+Function Get-AnsibleWindowsWebRequestSpec {
+ <#
+ .SYNOPSIS
+ Used by modules to get the argument spec fragment for AnsibleModule.
+
+ .EXAMPLES
+ $spec = @{
+ options = @{}
+ }
+ $module = [Ansible.Basic.AnsibleModule]::Create($args, $spec, @(Get-AnsibleWindowsWebRequestSpec))
+
+ .NOTES
+ The options here are reflected in the doc fragment 'ansible.windows.web_request' at
+ 'plugins/doc_fragments/web_request.py'.
+ #>
+ @{
+ options = @{
+ url_method = @{ type = 'str' }
+ follow_redirects = @{ type = 'str'; choices = @('all', 'none', 'safe'); default = 'safe' }
+ headers = @{ type = 'dict' }
+ http_agent = @{ type = 'str'; default = 'ansible-httpget' }
+ maximum_redirection = @{ type = 'int'; default = 50 }
+ url_timeout = @{ type = 'int'; default = 30 }
+ validate_certs = @{ type = 'bool'; default = $true }
+
+ # Credential options
+ client_cert = @{ type = 'str' }
+ client_cert_password = @{ type = 'str'; no_log = $true }
+ force_basic_auth = @{ type = 'bool'; default = $false }
+ url_username = @{ type = 'str' }
+ url_password = @{ type = 'str'; no_log = $true }
+ use_default_credential = @{ type = 'bool'; default = $false }
+
+ # Proxy options
+ use_proxy = @{ type = 'bool'; default = $true }
+ proxy_url = @{ type = 'str' }
+ proxy_username = @{ type = 'str' }
+ proxy_password = @{ type = 'str'; no_log = $true }
+ proxy_use_default_credential = @{ type = 'bool'; default = $false }
+ }
+ }
+}
+
+$export_members = @{
+ Function = "Get-AnsibleWindowsWebRequest", "Get-AnsibleWindowsWebRequestSpec", "Invoke-AnsibleWindowsWebRequest"
+}
+Export-ModuleMember @export_members
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_uri.ps1 b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_uri.ps1
new file mode 100644
index 00000000..9d7c68be
--- /dev/null
+++ b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_uri.ps1
@@ -0,0 +1,219 @@
+#!powershell
+
+# Copyright: (c) 2015, Corwin Brown <corwin@corwinbrown.com>
+# Copyright: (c) 2017, Dag Wieers (@dagwieers) <dag@wieers.com>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+#AnsibleRequires -CSharpUtil Ansible.Basic
+#Requires -Module Ansible.ModuleUtils.CamelConversion
+#Requires -Module Ansible.ModuleUtils.FileUtil
+#Requires -Module Ansible.ModuleUtils.Legacy
+#AnsibleRequires -PowerShell ..module_utils.WebRequest
+
+$spec = @{
+ options = @{
+ url = @{ type = "str"; required = $true }
+ content_type = @{ type = "str" }
+ body = @{ type = "raw" }
+ dest = @{ type = "path" }
+ creates = @{ type = "path" }
+ removes = @{ type = "path" }
+ return_content = @{ type = "bool"; default = $false }
+ status_code = @{ type = "list"; elements = "int"; default = @(200) }
+
+ # Defined for ease of use and backwards compatibility
+ url_timeout = @{
+ aliases = "timeout"
+ }
+ url_method = @{
+ aliases = "method"
+ default = "GET"
+ }
+
+ # Defined for the alias backwards compatibility, remove once aliases are removed
+ url_username = @{
+ aliases = @("user", "username")
+ deprecated_aliases = @(
+ @{ name = "user"; date = [DateTime]::ParseExact("2022-07-01", "yyyy-MM-dd", $null); collection_name = 'ansible.windows' },
+ @{ name = "username"; date = [DateTime]::ParseExact("2022-07-01", "yyyy-MM-dd", $null); collection_name = 'ansible.windows' }
+ )
+ }
+ url_password = @{
+ aliases = @("password")
+ deprecated_aliases = @(
+ @{ name = "password"; date = [DateTime]::ParseExact("2022-07-01", "yyyy-MM-dd", $null); collection_name = 'ansible.windows' }
+ )
+ }
+ }
+ supports_check_mode = $true
+}
+$module = [Ansible.Basic.AnsibleModule]::Create($args, $spec, @(Get-AnsibleWindowsWebRequestSpec))
+
+$url = $module.Params.url
+$method = $module.Params.url_method.ToUpper()
+$content_type = $module.Params.content_type
+$body = $module.Params.body
+$dest = $module.Params.dest
+$creates = $module.Params.creates
+$removes = $module.Params.removes
+$return_content = $module.Params.return_content
+$status_code = $module.Params.status_code
+
+$JSON_CANDIDATES = @('text', 'json', 'javascript')
+
+$module.Result.elapsed = 0
+$module.Result.url = $url
+
+Function ConvertFrom-SafeJson {
+ <#
+ .SYNOPSIS
+ Safely convert a JSON string to an object, this is like ConvertFrom-Json except it respect -ErrorAction.
+
+ .PAREMTER InputObject
+ The input object string to convert from.
+ #>
+ [CmdletBinding()]
+ param (
+ [Parameter(Mandatory=$true)]
+ [AllowEmptyString()]
+ [AllowNull()]
+ [String]
+ $InputObject
+ )
+
+ if (-not $InputObject) {
+ return
+ }
+
+ try {
+ # Make sure we output the actual object without unpacking with the unary comma
+ ,[Ansible.Basic.AnsibleModule]::FromJson($InputObject)
+ } catch [System.ArgumentException] {
+ Write-Error -Message "Invalid json string as input object: $($_.Exception.Message)" -Exception $_.Exception
+ }
+}
+
+if (-not ($method -cmatch '^[A-Z]+$')) {
+ $module.FailJson("Parameter 'method' needs to be a single word in uppercase, like GET or POST.")
+}
+
+if ($creates -and (Test-AnsiblePath -Path $creates)) {
+ $module.Result.skipped = $true
+ $module.Result.msg = "The 'creates' file or directory ($creates) already exists."
+ $module.ExitJson()
+}
+
+if ($removes -and -not (Test-AnsiblePath -Path $removes)) {
+ $module.Result.skipped = $true
+ $module.Result.msg = "The 'removes' file or directory ($removes) does not exist."
+ $module.ExitJson()
+}
+
+$client = Get-AnsibleWindowsWebRequest -Uri $url -Module $module
+
+if ($null -ne $content_type) {
+ $client.ContentType = $content_type
+}
+
+$response_script = {
+ param($Response, $Stream)
+
+ ForEach ($prop in $Response.PSObject.Properties) {
+ $result_key = Convert-StringToSnakeCase -string $prop.Name
+ $prop_value = $prop.Value
+ # convert and DateTime values to ISO 8601 standard
+ if ($prop_value -is [System.DateTime]) {
+ $prop_value = $prop_value.ToString("o", [System.Globalization.CultureInfo]::InvariantCulture)
+ }
+ $module.Result.$result_key = $prop_value
+ }
+
+ # manually get the headers as not all of them are in the response properties
+ foreach ($header_key in $Response.Headers.GetEnumerator()) {
+ $header_value = $Response.Headers[$header_key]
+ $header_key = $header_key.Replace("-", "") # replace - with _ for snake conversion
+ $header_key = Convert-StringToSnakeCase -string $header_key
+ $module.Result.$header_key = $header_value
+ }
+
+ # we only care about the return body if we need to return the content or create a file
+ if ($return_content -or $dest) {
+ # copy to a MemoryStream so we can read it multiple times
+ $memory_st = New-Object -TypeName System.IO.MemoryStream
+ try {
+ $Stream.CopyTo($memory_st)
+
+ if ($return_content) {
+ $memory_st.Seek(0, [System.IO.SeekOrigin]::Begin) > $null
+ $content_bytes = $memory_st.ToArray()
+ $module.Result.content = [System.Text.Encoding]::UTF8.GetString($content_bytes)
+ if ($module.Result.ContainsKey("content_type") -and $module.Result.content_type -Match ($JSON_CANDIDATES -join '|')) {
+ $json = ConvertFrom-SafeJson -InputObject $module.Result.content -ErrorAction SilentlyContinue
+ if ($json) {
+ $module.Result.json = $json
+ }
+ }
+ }
+
+ if ($dest) {
+ $memory_st.Seek(0, [System.IO.SeekOrigin]::Begin) > $null
+ $changed = $true
+
+ if (Test-AnsiblePath -Path $dest) {
+ $actual_checksum = Get-FileChecksum -path $dest -algorithm "sha1"
+
+ $sp = New-Object -TypeName System.Security.Cryptography.SHA1CryptoServiceProvider
+ $content_checksum = [System.BitConverter]::ToString($sp.ComputeHash($memory_st)).Replace("-", "").ToLower()
+
+ if ($actual_checksum -eq $content_checksum) {
+ $changed = $false
+ }
+ }
+
+ $module.Result.changed = $changed
+ if ($changed -and (-not $module.CheckMode)) {
+ $memory_st.Seek(0, [System.IO.SeekOrigin]::Begin) > $null
+ $file_stream = [System.IO.File]::Create($dest)
+ try {
+ $memory_st.CopyTo($file_stream)
+ } finally {
+ $file_stream.Flush()
+ $file_stream.Close()
+ }
+ }
+ }
+ } finally {
+ $memory_st.Close()
+ }
+ }
+
+ if ($status_code -notcontains $Response.StatusCode) {
+ $module.FailJson("Status code of request '$([int]$Response.StatusCode)' is not in list of valid status codes $status_code : $($Response.StatusCode)'.")
+ }
+}
+
+$body_st = $null
+if ($null -ne $body) {
+ if ($body -is [System.Collections.IDictionary] -or $body -is [System.Collections.IList]) {
+ $body_string = ConvertTo-Json -InputObject $body -Compress
+ } elseif ($body -isnot [String]) {
+ $body_string = $body.ToString()
+ } else {
+ $body_string = $body
+ }
+ $buffer = [System.Text.Encoding]::UTF8.GetBytes($body_string)
+
+ $body_st = New-Object -TypeName System.IO.MemoryStream -ArgumentList @(,$buffer)
+}
+
+try {
+ Invoke-AnsibleWindowsWebRequest -Module $module -Request $client -Script $response_script -Body $body_st -IgnoreBadResponse
+} catch {
+ $module.FailJson("Unhandled exception occurred when sending web request. Exception: $($_.Exception.Message)", $_)
+} finally {
+ if ($null -ne $body_st) {
+ $body_st.Dispose()
+ }
+}
+
+$module.ExitJson()
diff --git a/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_uri.py b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_uri.py
new file mode 100644
index 00000000..3b1094ea
--- /dev/null
+++ b/test/support/windows-integration/collections/ansible_collections/ansible/windows/plugins/modules/win_uri.py
@@ -0,0 +1,155 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright: (c) 2015, Corwin Brown <corwin@corwinbrown.com>
+# Copyright: (c) 2017, Dag Wieers (@dagwieers) <dag@wieers.com>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r'''
+---
+module: win_uri
+short_description: Interacts with webservices
+description:
+- Interacts with FTP, HTTP and HTTPS web services.
+- Supports Digest, Basic and WSSE HTTP authentication mechanisms.
+- For non-Windows targets, use the M(ansible.builtin.uri) module instead.
+options:
+ url:
+ description:
+ - Supports FTP, HTTP or HTTPS URLs in the form of (ftp|http|https)://host.domain:port/path.
+ type: str
+ required: yes
+ content_type:
+ description:
+ - Sets the "Content-Type" header.
+ type: str
+ body:
+ description:
+ - The body of the HTTP request/response to the web service.
+ type: raw
+ dest:
+ description:
+ - Output the response body to a file.
+ type: path
+ creates:
+ description:
+ - A filename, when it already exists, this step will be skipped.
+ type: path
+ removes:
+ description:
+ - A filename, when it does not exist, this step will be skipped.
+ type: path
+ return_content:
+ description:
+ - Whether or not to return the body of the response as a "content" key in
+ the dictionary result. If the reported Content-type is
+ "application/json", then the JSON is additionally loaded into a key
+ called C(json) in the dictionary results.
+ type: bool
+ default: no
+ status_code:
+ description:
+ - A valid, numeric, HTTP status code that signifies success of the request.
+ - Can also be comma separated list of status codes.
+ type: list
+ elements: int
+ default: [ 200 ]
+
+ url_method:
+ default: GET
+ aliases:
+ - method
+ url_timeout:
+ aliases:
+ - timeout
+
+ # Following defined in the web_request fragment but the module contains deprecated aliases for backwards compatibility.
+ url_username:
+ description:
+ - The username to use for authentication.
+ - The alias I(user) and I(username) is deprecated and will be removed on
+ the major release after C(2022-07-01).
+ aliases:
+ - user
+ - username
+ url_password:
+ description:
+ - The password for I(url_username).
+ - The alias I(password) is deprecated and will be removed on the major
+ release after C(2022-07-01).
+ aliases:
+ - password
+extends_documentation_fragment:
+- ansible.windows.web_request
+
+seealso:
+- module: ansible.builtin.uri
+- module: ansible.windows.win_get_url
+author:
+- Corwin Brown (@blakfeld)
+- Dag Wieers (@dagwieers)
+'''
+
+EXAMPLES = r'''
+- name: Perform a GET and Store Output
+ ansible.windows.win_uri:
+ url: http://example.com/endpoint
+ register: http_output
+
+# Set a HOST header to hit an internal webserver:
+- name: Hit a Specific Host on the Server
+ ansible.windows.win_uri:
+ url: http://example.com/
+ method: GET
+ headers:
+ host: www.somesite.com
+
+- name: Perform a HEAD on an Endpoint
+ ansible.windows.win_uri:
+ url: http://www.example.com/
+ method: HEAD
+
+- name: POST a Body to an Endpoint
+ ansible.windows.win_uri:
+ url: http://www.somesite.com/
+ method: POST
+ body: "{ 'some': 'json' }"
+'''
+
+RETURN = r'''
+elapsed:
+ description: The number of seconds that elapsed while performing the download.
+ returned: always
+ type: float
+ sample: 23.2
+url:
+ description: The Target URL.
+ returned: always
+ type: str
+ sample: https://www.ansible.com
+status_code:
+ description: The HTTP Status Code of the response.
+ returned: success
+ type: int
+ sample: 200
+status_description:
+ description: A summary of the status.
+ returned: success
+ type: str
+ sample: OK
+content:
+ description: The raw content of the HTTP response.
+ returned: success and return_content is True
+ type: str
+ sample: '{"foo": "bar"}'
+content_length:
+ description: The byte size of the response.
+ returned: success
+ type: int
+ sample: 54447
+json:
+ description: The json structure returned under content as a dictionary.
+ returned: success and Content-Type is "application/json" or "application/javascript" and return_content is True
+ type: dict
+ sample: {"this-is-dependent": "on the actual return content"}
+'''
diff --git a/test/support/windows-integration/plugins/modules/win_security_policy.ps1 b/test/support/windows-integration/plugins/modules/win_security_policy.ps1
deleted file mode 100644
index 274204b6..00000000
--- a/test/support/windows-integration/plugins/modules/win_security_policy.ps1
+++ /dev/null
@@ -1,196 +0,0 @@
-#!powershell
-
-# Copyright: (c) 2017, Jordan Borean <jborean93@gmail.com>
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-#Requires -Module Ansible.ModuleUtils.Legacy
-
-$ErrorActionPreference = 'Stop'
-
-$params = Parse-Args $args -supports_check_mode $true
-$check_mode = Get-AnsibleParam -obj $params -name "_ansible_check_mode" -type "bool" -default $false
-$diff_mode = Get-AnsibleParam -obj $Params -name "_ansible_diff" -type "bool" -default $false
-
-$section = Get-AnsibleParam -obj $params -name "section" -type "str" -failifempty $true
-$key = Get-AnsibleParam -obj $params -name "key" -type "str" -failifempty $true
-$value = Get-AnsibleParam -obj $params -name "value" -failifempty $true
-
-$result = @{
- changed = $false
- section = $section
- key = $key
- value = $value
-}
-
-if ($diff_mode) {
- $result.diff = @{}
-}
-
-Function Run-SecEdit($arguments) {
- $stdout = $null
- $stderr = $null
- $log_path = [IO.Path]::GetTempFileName()
- $arguments = $arguments + @("/log", $log_path)
-
- try {
- $stdout = &SecEdit.exe $arguments | Out-String
- } catch {
- $stderr = $_.Exception.Message
- }
- $log = Get-Content -Path $log_path
- Remove-Item -Path $log_path -Force
-
- $return = @{
- log = ($log -join "`n").Trim()
- stdout = $stdout
- stderr = $stderr
- rc = $LASTEXITCODE
- }
-
- return $return
-}
-
-Function Export-SecEdit() {
- $secedit_ini_path = [IO.Path]::GetTempFileName()
- # while this will technically make a change to the system in check mode by
- # creating a new file, we need these values to be able to do anything
- # substantial in check mode
- $export_result = Run-SecEdit -arguments @("/export", "/cfg", $secedit_ini_path, "/quiet")
-
- # check the return code and if the file has been populated, otherwise error out
- if (($export_result.rc -ne 0) -or ((Get-Item -Path $secedit_ini_path).Length -eq 0)) {
- Remove-Item -Path $secedit_ini_path -Force
- $result.rc = $export_result.rc
- $result.stdout = $export_result.stdout
- $result.stderr = $export_result.stderr
- Fail-Json $result "Failed to export secedit.ini file to $($secedit_ini_path)"
- }
- $secedit_ini = ConvertFrom-Ini -file_path $secedit_ini_path
-
- return $secedit_ini
-}
-
-Function Import-SecEdit($ini) {
- $secedit_ini_path = [IO.Path]::GetTempFileName()
- $secedit_db_path = [IO.Path]::GetTempFileName()
- Remove-Item -Path $secedit_db_path -Force # needs to be deleted for SecEdit.exe /import to work
-
- $ini_contents = ConvertTo-Ini -ini $ini
- Set-Content -Path $secedit_ini_path -Value $ini_contents
- $result.changed = $true
-
- $import_result = Run-SecEdit -arguments @("/configure", "/db", $secedit_db_path, "/cfg", $secedit_ini_path, "/quiet")
- $result.import_log = $import_result.log
- Remove-Item -Path $secedit_ini_path -Force
- if ($import_result.rc -ne 0) {
- $result.rc = $import_result.rc
- $result.stdout = $import_result.stdout
- $result.stderr = $import_result.stderr
- Fail-Json $result "Failed to import secedit.ini file from $($secedit_ini_path)"
- }
-}
-
-Function ConvertTo-Ini($ini) {
- $content = @()
- foreach ($key in $ini.GetEnumerator()) {
- $section = $key.Name
- $values = $key.Value
-
- $content += "[$section]"
- foreach ($value in $values.GetEnumerator()) {
- $value_key = $value.Name
- $value_value = $value.Value
-
- if ($null -ne $value_value) {
- $content += "$value_key = $value_value"
- }
- }
- }
-
- return $content -join "`r`n"
-}
-
-Function ConvertFrom-Ini($file_path) {
- $ini = @{}
- switch -Regex -File $file_path {
- "^\[(.+)\]" {
- $section = $matches[1]
- $ini.$section = @{}
- }
- "(.+?)\s*=(.*)" {
- $name = $matches[1].Trim()
- $value = $matches[2].Trim()
- if ($value -match "^\d+$") {
- $value = [int]$value
- } elseif ($value.StartsWith('"') -and $value.EndsWith('"')) {
- $value = $value.Substring(1, $value.Length - 2)
- }
-
- $ini.$section.$name = $value
- }
- }
-
- return $ini
-}
-
-if ($section -eq "Privilege Rights") {
- Add-Warning -obj $result -message "Using this module to edit rights and privileges is error-prone, use the win_user_right module instead"
-}
-
-$will_change = $false
-$secedit_ini = Export-SecEdit
-if (-not ($secedit_ini.ContainsKey($section))) {
- Fail-Json $result "The section '$section' does not exist in SecEdit.exe output ini"
-}
-
-if ($secedit_ini.$section.ContainsKey($key)) {
- $current_value = $secedit_ini.$section.$key
-
- if ($current_value -cne $value) {
- if ($diff_mode) {
- $result.diff.prepared = @"
-[$section]
--$key = $current_value
-+$key = $value
-"@
- }
-
- $secedit_ini.$section.$key = $value
- $will_change = $true
- }
-} elseif ([string]$value -eq "") {
- # Value is requested to be removed, and has already been removed, do nothing
-} else {
- if ($diff_mode) {
- $result.diff.prepared = @"
-[$section]
-+$key = $value
-"@
- }
- $secedit_ini.$section.$key = $value
- $will_change = $true
-}
-
-if ($will_change -eq $true) {
- $result.changed = $true
- if (-not $check_mode) {
- Import-SecEdit -ini $secedit_ini
-
- # secedit doesn't error out on improper entries, re-export and verify
- # the changes occurred
- $verification_ini = Export-SecEdit
- $new_section_values = $verification_ini.$section
- if ($new_section_values.ContainsKey($key)) {
- $new_value = $new_section_values.$key
- if ($new_value -cne $value) {
- Fail-Json $result "Failed to change the value for key '$key' in section '$section', the value is still $new_value"
- }
- } elseif ([string]$value -eq "") {
- # Value was empty, so OK if no longer in the result
- } else {
- Fail-Json $result "The key '$key' in section '$section' is not a valid key, cannot set this value"
- }
- }
-}
-
-Exit-Json $result
diff --git a/test/support/windows-integration/plugins/modules/win_security_policy.py b/test/support/windows-integration/plugins/modules/win_security_policy.py
deleted file mode 100644
index d582a532..00000000
--- a/test/support/windows-integration/plugins/modules/win_security_policy.py
+++ /dev/null
@@ -1,126 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-# this is a windows documentation stub, actual code lives in the .ps1
-# file of the same name
-
-ANSIBLE_METADATA = {'metadata_version': '1.1',
- 'status': ['preview'],
- 'supported_by': 'community'}
-
-DOCUMENTATION = r'''
----
-module: win_security_policy
-version_added: '2.4'
-short_description: Change local security policy settings
-description:
-- Allows you to set the local security policies that are configured by
- SecEdit.exe.
-options:
- section:
- description:
- - The ini section the key exists in.
- - If the section does not exist then the module will return an error.
- - Example sections to use are 'Account Policies', 'Local Policies',
- 'Event Log', 'Restricted Groups', 'System Services', 'Registry' and
- 'File System'
- - If wanting to edit the C(Privilege Rights) section, use the
- M(win_user_right) module instead.
- type: str
- required: yes
- key:
- description:
- - The ini key of the section or policy name to modify.
- - The module will return an error if this key is invalid.
- type: str
- required: yes
- value:
- description:
- - The value for the ini key or policy name.
- - If the key takes in a boolean value then 0 = False and 1 = True.
- type: str
- required: yes
-notes:
-- This module uses the SecEdit.exe tool to configure the values, more details
- of the areas and keys that can be configured can be found here
- U(https://msdn.microsoft.com/en-us/library/bb742512.aspx).
-- If you are in a domain environment these policies may be set by a GPO policy,
- this module can temporarily change these values but the GPO will override
- it if the value differs.
-- You can also run C(SecEdit.exe /export /cfg C:\temp\output.ini) to view the
- current policies set on your system.
-- When assigning user rights, use the M(win_user_right) module instead.
-seealso:
-- module: win_user_right
-author:
-- Jordan Borean (@jborean93)
-'''
-
-EXAMPLES = r'''
-- name: Change the guest account name
- win_security_policy:
- section: System Access
- key: NewGuestName
- value: Guest Account
-
-- name: Set the maximum password age
- win_security_policy:
- section: System Access
- key: MaximumPasswordAge
- value: 15
-
-- name: Do not store passwords using reversible encryption
- win_security_policy:
- section: System Access
- key: ClearTextPassword
- value: 0
-
-- name: Enable system events
- win_security_policy:
- section: Event Audit
- key: AuditSystemEvents
- value: 1
-'''
-
-RETURN = r'''
-rc:
- description: The return code after a failure when running SecEdit.exe.
- returned: failure with secedit calls
- type: int
- sample: -1
-stdout:
- description: The output of the STDOUT buffer after a failure when running
- SecEdit.exe.
- returned: failure with secedit calls
- type: str
- sample: check log for error details
-stderr:
- description: The output of the STDERR buffer after a failure when running
- SecEdit.exe.
- returned: failure with secedit calls
- type: str
- sample: failed to import security policy
-import_log:
- description: The log of the SecEdit.exe /configure job that configured the
- local policies. This is used for debugging purposes on failures.
- returned: secedit.exe /import run and change occurred
- type: str
- sample: Completed 6 percent (0/15) \tProcess Privilege Rights area.
-key:
- description: The key in the section passed to the module to modify.
- returned: success
- type: str
- sample: NewGuestName
-section:
- description: The section passed to the module to modify.
- returned: success
- type: str
- sample: System Access
-value:
- description: The value passed to the module to modify to.
- returned: success
- type: str
- sample: Guest Account
-'''
diff --git a/test/units/_vendor/__init__.py b/test/units/_vendor/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/test/units/_vendor/__init__.py
diff --git a/test/units/_vendor/test_vendor.py b/test/units/_vendor/test_vendor.py
index 6a0fa385..fa9fdb25 100644
--- a/test/units/_vendor/test_vendor.py
+++ b/test/units/_vendor/test_vendor.py
@@ -62,4 +62,4 @@ def test_vendored_conflict():
import pkgutil
import sys
test_vendored(vendored_pkg_names=['sys', 'pkgutil']) # pass a real package we know is already loaded
- assert 'pkgutil, sys' in str(w[0].message) # ensure both conflicting modules are listed and sorted
+ assert any('pkgutil, sys' in str(msg.message) for msg in w) # ensure both conflicting modules are listed and sorted
diff --git a/test/units/ansible_test/ci/test_shippable.py b/test/units/ansible_test/ci/test_shippable.py
deleted file mode 100644
index 08b276c7..00000000
--- a/test/units/ansible_test/ci/test_shippable.py
+++ /dev/null
@@ -1,31 +0,0 @@
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-from .util import common_auth_test
-
-
-def test_auth():
- # noinspection PyProtectedMember
- from ansible_test._internal.ci.shippable import (
- ShippableAuthHelper,
- )
-
- class TestShippableAuthHelper(ShippableAuthHelper):
- def __init__(self):
- self.public_key_pem = None
- self.private_key_pem = None
-
- def publish_public_key(self, public_key_pem):
- # avoid publishing key
- self.public_key_pem = public_key_pem
-
- def initialize_private_key(self):
- # cache in memory instead of on disk
- if not self.private_key_pem:
- self.private_key_pem = self.generate_private_key()
-
- return self.private_key_pem
-
- auth = TestShippableAuthHelper()
-
- common_auth_test(auth)
diff --git a/test/units/ansible_test/test_docker_util.py b/test/units/ansible_test/test_docker_util.py
deleted file mode 100644
index 8427f0f2..00000000
--- a/test/units/ansible_test/test_docker_util.py
+++ /dev/null
@@ -1,131 +0,0 @@
-# This file is part of Ansible
-# -*- coding: utf-8 -*-
-#
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-#
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-
-import pytest
-from units.compat.mock import call, patch, MagicMock
-
-# docker images quay.io/ansible/centos7-test-container --format '{{json .}}'
-DOCKER_OUTPUT_MULTIPLE = """
-{"Containers":"N/A","CreatedAt":"2020-06-11 17:05:58 -0500 CDT","CreatedSince":"3 months ago","Digest":"\u003cnone\u003e","ID":"b0f914b26cc1","Repository":"quay.io/ansible/centos7-test-container","SharedSize":"N/A","Size":"556MB","Tag":"1.17.0","UniqueSize":"N/A","VirtualSize":"555.6MB"}
-{"Containers":"N/A","CreatedAt":"2020-06-11 17:05:58 -0500 CDT","CreatedSince":"3 months ago","Digest":"\u003cnone\u003e","ID":"b0f914b26cc1","Repository":"quay.io/ansible/centos7-test-container","SharedSize":"N/A","Size":"556MB","Tag":"latest","UniqueSize":"N/A","VirtualSize":"555.6MB"}
-{"Containers":"N/A","CreatedAt":"2019-04-01 19:59:39 -0500 CDT","CreatedSince":"18 months ago","Digest":"\u003cnone\u003e","ID":"dd3d10e03dd3","Repository":"quay.io/ansible/centos7-test-container","SharedSize":"N/A","Size":"678MB","Tag":"1.8.0","UniqueSize":"N/A","VirtualSize":"678MB"}
-""".lstrip() # noqa: E501
-
-PODMAN_OUTPUT = """
-[
- {
- "id": "dd3d10e03dd3580de865560c3440c812a33fd7a1fca8ed8e4a1219ff3d809e3a",
- "names": [
- "quay.io/ansible/centos7-test-container:1.8.0"
- ],
- "digest": "sha256:6e5d9c99aa558779715a80715e5cf0c227a4b59d95e6803c148290c5d0d9d352",
- "created": "2019-04-02T00:59:39.234584184Z",
- "size": 702761933
- },
- {
- "id": "b0f914b26cc1088ab8705413c2f2cf247306ceeea51260d64c26894190d188bd",
- "names": [
- "quay.io/ansible/centos7-test-container:latest"
- ],
- "digest": "sha256:d8431aa74f60f4ff0f1bd36bc9a227bbb2066330acd8bf25e29d8614ee99e39c",
- "created": "2020-06-11T22:05:58.382459136Z",
- "size": 578513505
- }
-]
-""".lstrip()
-
-
-@pytest.fixture
-def docker_images():
- from ansible_test._internal.docker_util import docker_images
- return docker_images
-
-
-@pytest.fixture
-def ansible_test(ansible_test):
- import ansible_test
- return ansible_test
-
-
-@pytest.fixture
-def subprocess_error():
- from ansible_test._internal.util import SubprocessError
- return SubprocessError
-
-
-@pytest.mark.parametrize(
- ('returned_items_count', 'patched_dc_stdout'),
- (
- (3, (DOCKER_OUTPUT_MULTIPLE, '')),
- (2, (PODMAN_OUTPUT, '')),
- (0, ('', '')),
- ),
- ids=('docker JSONL', 'podman JSON sequence', 'empty output'))
-def test_docker_images(docker_images, mocker, returned_items_count, patched_dc_stdout):
- mocker.patch(
- 'ansible_test._internal.docker_util.docker_command',
- return_value=patched_dc_stdout)
- ret = docker_images('', 'quay.io/ansible/centos7-test-container')
- assert len(ret) == returned_items_count
-
-
-def test_podman_fallback(ansible_test, docker_images, subprocess_error, mocker):
- '''Test podman >2 && <2.2 fallback'''
-
- cmd = ['docker', 'images', 'quay.io/ansible/centos7-test-container', '--format', '{{json .}}']
- docker_command_results = [
- subprocess_error(cmd, status=1, stderr='function "json" not defined'),
- (PODMAN_OUTPUT, ''),
- ]
- mocker.patch(
- 'ansible_test._internal.docker_util.docker_command',
- side_effect=docker_command_results)
-
- ret = docker_images('', 'quay.io/ansible/centos7-test-container')
- calls = [
- call(
- '',
- ['images', 'quay.io/ansible/centos7-test-container', '--format', '{{json .}}'],
- capture=True,
- always=True),
- call(
- '',
- ['images', 'quay.io/ansible/centos7-test-container', '--format', 'json'],
- capture=True,
- always=True),
- ]
- ansible_test._internal.docker_util.docker_command.assert_has_calls(calls)
- assert len(ret) == 2
-
-
-def test_podman_no_such_image(ansible_test, docker_images, subprocess_error, mocker):
- '''Test podman "no such image" error'''
-
- cmd = ['docker', 'images', 'quay.io/ansible/centos7-test-container', '--format', '{{json .}}']
- exc = subprocess_error(cmd, status=1, stderr='no such image'),
- mocker.patch(
- 'ansible_test._internal.docker_util.docker_command',
- side_effect=exc)
- ret = docker_images('', 'quay.io/ansible/centos7-test-container')
- assert ret == []
diff --git a/test/units/cli/test_adhoc.py b/test/units/cli/test_adhoc.py
index bb2ed165..18775f5d 100644
--- a/test/units/cli/test_adhoc.py
+++ b/test/units/cli/test_adhoc.py
@@ -105,7 +105,7 @@ def test_ansible_version(capsys, mocker):
version_lines = version[0].splitlines()
assert len(version_lines) == 9, 'Incorrect number of lines in "ansible --version" output'
- assert re.match(r'ansible \[core [0-9.a-z]+\]', version_lines[0]), 'Incorrect ansible version line in "ansible --version" output'
+ assert re.match(r'ansible \[core [0-9.a-z]+\]$', version_lines[0]), 'Incorrect ansible version line in "ansible --version" output'
assert re.match(' config file = .*$', version_lines[1]), 'Incorrect config file line in "ansible --version" output'
assert re.match(' configured module search path = .*$', version_lines[2]), 'Incorrect module search path in "ansible --version" output'
assert re.match(' ansible python module location = .*$', version_lines[3]), 'Incorrect python module location in "ansible --version" output'
diff --git a/test/units/cli/test_doc.py b/test/units/cli/test_doc.py
index 58feadf8..5cdf974b 100644
--- a/test/units/cli/test_doc.py
+++ b/test/units/cli/test_doc.py
@@ -27,6 +27,11 @@ TTY_IFY_DATA = {
'IBM(International Business Machines)': 'IBM(International Business Machines)',
'L(the user guide, https://docs.ansible.com/)': 'the user guide <https://docs.ansible.com/>',
'R(the user guide, user-guide)': 'the user guide',
+ # de-rsty refs and anchors
+ 'yolo :ref:`my boy` does stuff': 'yolo website for `my boy` does stuff',
+ '.. seealso:: Something amazing': 'See website for: Something amazing',
+ '.. seealso:: Troublesome multiline\n Stuff goes htere': 'See website for: Troublesome multiline\n Stuff goes htere',
+ '.. note:: boring stuff': 'Note: boring stuff',
}
diff --git a/test/units/executor/module_common/test_recursive_finder.py b/test/units/executor/module_common/test_recursive_finder.py
index 074dfb2f..8136a006 100644
--- a/test/units/executor/module_common/test_recursive_finder.py
+++ b/test/units/executor/module_common/test_recursive_finder.py
@@ -50,6 +50,7 @@ MODULE_UTILS_BASIC_FILES = frozenset(('ansible/__init__.py',
'ansible/module_utils/parsing/convert_bool.py',
'ansible/module_utils/common/__init__.py',
'ansible/module_utils/common/file.py',
+ 'ansible/module_utils/common/locale.py',
'ansible/module_utils/common/process.py',
'ansible/module_utils/common/sys_info.py',
'ansible/module_utils/common/text/__init__.py',
diff --git a/test/units/executor/test_interpreter_discovery.py b/test/units/executor/test_interpreter_discovery.py
index 10f97d63..5919d39f 100644
--- a/test/units/executor/test_interpreter_discovery.py
+++ b/test/units/executor/test_interpreter_discovery.py
@@ -29,10 +29,9 @@ def test_discovery_interpreter_linux_auto_legacy():
assert actual == u'/usr/bin/python'
assert len(mock_action.method_calls) == 3
- assert mock_action.method_calls[2][0] == '_discovery_deprecation_warnings.append'
+ assert mock_action.method_calls[2][0] == '_discovery_warnings.append'
assert u'Distribution Ubuntu 16.04 on host host-fóöbär should use /usr/bin/python3, but is using /usr/bin/python' \
- u' for backward compatibility' in mock_action.method_calls[2][1][0]['msg']
- assert mock_action.method_calls[2][1][0]['version'] == '2.12'
+ u' for backward compatibility' in mock_action.method_calls[2][1][0]
def test_discovery_interpreter_linux_auto_legacy_silent():
diff --git a/test/units/executor/test_task_executor.py b/test/units/executor/test_task_executor.py
index b27faf9b..8c01b339 100644
--- a/test/units/executor/test_task_executor.py
+++ b/test/units/executor/test_task_executor.py
@@ -246,7 +246,7 @@ class TestTaskExecutor(unittest.TestCase):
mock_connection = MagicMock()
mock_templar = MagicMock()
action = 'namespace.netconf_suffix'
- module_prefix = action.split('_')[0]
+ module_prefix = action.split('_', 1)[0]
te._task.action = action
handler = te._get_action_handler(mock_connection, mock_templar)
@@ -281,7 +281,7 @@ class TestTaskExecutor(unittest.TestCase):
mock_connection = MagicMock()
mock_templar = MagicMock()
action = 'namespace.prefix_suffix'
- module_prefix = action.split('_')[0]
+ module_prefix = action.split('_', 1)[0]
te._task.action = action
handler = te._get_action_handler(mock_connection, mock_templar)
diff --git a/test/units/galaxy/test_collection.py b/test/units/galaxy/test_collection.py
index 8575a55c..3de2e89a 100644
--- a/test/units/galaxy/test_collection.py
+++ b/test/units/galaxy/test_collection.py
@@ -17,8 +17,9 @@ from hashlib import sha256
from io import BytesIO
from units.compat.mock import MagicMock, mock_open, patch
+import ansible.constants as C
from ansible import context
-from ansible.cli.galaxy import GalaxyCLI
+from ansible.cli.galaxy import GalaxyCLI, SERVER_DEF
from ansible.errors import AnsibleError
from ansible.galaxy import api, collection, token
from ansible.module_utils._text import to_bytes, to_native, to_text
@@ -198,6 +199,48 @@ def manifest(manifest_info):
yield fake_file, sha256(b_data).hexdigest()
+@pytest.fixture()
+def server_config(monkeypatch):
+ monkeypatch.setattr(C, 'GALAXY_SERVER_LIST', ['server1', 'server2', 'server3'])
+
+ default_options = dict((k, None) for k, v in SERVER_DEF)
+
+ server1 = dict(default_options)
+ server1.update({'url': 'https://galaxy.ansible.com/api/', 'validate_certs': False})
+
+ server2 = dict(default_options)
+ server2.update({'url': 'https://galaxy.ansible.com/api/', 'validate_certs': True})
+
+ server3 = dict(default_options)
+ server3.update({'url': 'https://galaxy.ansible.com/api/'})
+
+ return server1, server2, server3
+
+
+@pytest.mark.parametrize('global_ignore_certs', [True, False])
+def test_validate_certs(global_ignore_certs, server_config, monkeypatch):
+ get_plugin_options = MagicMock(side_effect=server_config)
+ monkeypatch.setattr(C.config, 'get_plugin_options', get_plugin_options)
+
+ cli_args = [
+ 'ansible-galaxy',
+ 'collection',
+ 'install',
+ 'namespace.collection:1.0.0',
+ ]
+ if global_ignore_certs:
+ cli_args.append('--ignore-certs')
+
+ galaxy_cli = GalaxyCLI(args=cli_args)
+ mock_execute_install = MagicMock()
+ monkeypatch.setattr(galaxy_cli, '_execute_install_collection', mock_execute_install)
+ galaxy_cli.run()
+
+ assert galaxy_cli.api_servers[0].validate_certs is False
+ assert galaxy_cli.api_servers[1].validate_certs is True
+ assert galaxy_cli.api_servers[2].validate_certs is not global_ignore_certs
+
+
def test_build_collection_no_galaxy_yaml():
fake_path = u'/fake/ÅÑŚÌβŁÈ/path'
expected = to_native("The collection galaxy.yml path '%s/galaxy.yml' does not exist." % fake_path)
diff --git a/test/units/galaxy/test_collection_install.py b/test/units/galaxy/test_collection_install.py
index 37e09970..d4565fd5 100644
--- a/test/units/galaxy/test_collection_install.py
+++ b/test/units/galaxy/test_collection_install.py
@@ -171,6 +171,63 @@ def galaxy_server():
return galaxy_api
+@pytest.mark.parametrize(
+ 'url,version,trailing_slash',
+ [
+ ('https://github.com/org/repo', 'commitish', False),
+ ('https://github.com/org/repo,commitish', None, False),
+ ('https://github.com/org/repo/,commitish', None, True),
+ ('https://github.com/org/repo#,commitish', None, False),
+ ]
+)
+def test_concrete_artifact_manager_scm_cmd(url, version, trailing_slash, monkeypatch):
+ mock_subprocess_check_call = MagicMock()
+ monkeypatch.setattr(collection.concrete_artifact_manager.subprocess, 'check_call', mock_subprocess_check_call)
+ mock_mkdtemp = MagicMock(return_value='')
+ monkeypatch.setattr(collection.concrete_artifact_manager, 'mkdtemp', mock_mkdtemp)
+
+ collection.concrete_artifact_manager._extract_collection_from_git(url, version, b'path')
+
+ assert mock_subprocess_check_call.call_count == 2
+
+ repo = 'https://github.com/org/repo'
+ if trailing_slash:
+ repo += '/'
+ clone_cmd = ('git', 'clone', repo, '')
+
+ assert mock_subprocess_check_call.call_args_list[0].args[0] == clone_cmd
+ assert mock_subprocess_check_call.call_args_list[1].args[0] == ('git', 'checkout', 'commitish')
+
+
+@pytest.mark.parametrize(
+ 'url,version,trailing_slash',
+ [
+ ('https://github.com/org/repo', 'HEAD', False),
+ ('https://github.com/org/repo,HEAD', None, False),
+ ('https://github.com/org/repo/,HEAD', None, True),
+ ('https://github.com/org/repo#,HEAD', None, False),
+ ('https://github.com/org/repo', None, False),
+ ]
+)
+def test_concrete_artifact_manager_scm_cmd_shallow(url, version, trailing_slash, monkeypatch):
+ mock_subprocess_check_call = MagicMock()
+ monkeypatch.setattr(collection.concrete_artifact_manager.subprocess, 'check_call', mock_subprocess_check_call)
+ mock_mkdtemp = MagicMock(return_value='')
+ monkeypatch.setattr(collection.concrete_artifact_manager, 'mkdtemp', mock_mkdtemp)
+
+ collection.concrete_artifact_manager._extract_collection_from_git(url, version, b'path')
+
+ assert mock_subprocess_check_call.call_count == 2
+
+ repo = 'https://github.com/org/repo'
+ if trailing_slash:
+ repo += '/'
+ shallow_clone_cmd = ('git', 'clone', '--depth=1', repo, '')
+
+ assert mock_subprocess_check_call.call_args_list[0].args[0] == shallow_clone_cmd
+ assert mock_subprocess_check_call.call_args_list[1].args[0] == ('git', 'checkout', 'HEAD')
+
+
def test_build_requirement_from_path(collection_artifact):
tmp_path = os.path.join(os.path.split(collection_artifact[1])[0], b'temp')
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
diff --git a/test/units/galaxy/test_role_install.py b/test/units/galaxy/test_role_install.py
new file mode 100644
index 00000000..cf990b55
--- /dev/null
+++ b/test/units/galaxy/test_role_install.py
@@ -0,0 +1,151 @@
+# -*- coding: utf-8 -*-
+# Copyright: (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+import os
+import pytest
+import tempfile
+
+from io import StringIO
+from ansible import context
+from ansible.cli.galaxy import GalaxyCLI
+from ansible.galaxy import api, role, Galaxy
+from ansible.module_utils._text import to_text
+from ansible.utils import context_objects as co
+
+
+def call_galaxy_cli(args):
+ orig = co.GlobalCLIArgs._Singleton__instance
+ co.GlobalCLIArgs._Singleton__instance = None
+ try:
+ GalaxyCLI(args=['ansible-galaxy', 'role'] + args).run()
+ finally:
+ co.GlobalCLIArgs._Singleton__instance = orig
+
+
+@pytest.fixture(autouse='function')
+def reset_cli_args():
+ co.GlobalCLIArgs._Singleton__instance = None
+ yield
+ co.GlobalCLIArgs._Singleton__instance = None
+
+
+@pytest.fixture(autouse=True)
+def galaxy_server():
+ context.CLIARGS._store = {'ignore_certs': False}
+ galaxy_api = api.GalaxyAPI(None, 'test_server', 'https://galaxy.ansible.com')
+ return galaxy_api
+
+
+@pytest.fixture(autouse=True)
+def init_role_dir(tmp_path_factory):
+ test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Roles Input'))
+ namespace = 'ansible_namespace'
+ role = 'role'
+ skeleton_path = os.path.join(os.path.dirname(os.path.split(__file__)[0]), 'cli', 'test_data', 'role_skeleton')
+ call_galaxy_cli(['init', '%s.%s' % (namespace, role), '-c', '--init-path', test_dir, '--role-skeleton', skeleton_path])
+
+
+def mock_NamedTemporaryFile(mocker, **args):
+ mock_ntf = mocker.MagicMock()
+ mock_ntf.write = mocker.MagicMock()
+ mock_ntf.close = mocker.MagicMock()
+ mock_ntf.name = None
+ return mock_ntf
+
+
+@pytest.fixture(autouse=True)
+def init_test(monkeypatch):
+ monkeypatch.setattr(tempfile, 'NamedTemporaryFile', mock_NamedTemporaryFile)
+
+
+@pytest.fixture(autouse=True)
+def mock_role_download_api(mocker, monkeypatch):
+ mock_role_api = mocker.MagicMock()
+ mock_role_api.side_effect = [
+ StringIO(u''),
+ ]
+ monkeypatch.setattr(role, 'open_url', mock_role_api)
+ return mock_role_api
+
+
+def test_role_download_github(mocker, galaxy_server, mock_role_download_api, monkeypatch):
+ mock_api = mocker.MagicMock()
+ mock_api.side_effect = [
+ StringIO(u'{"available_versions":{"v1":"v1/"}}'),
+ StringIO(u'{"results":[{"id":"123","github_user":"test_owner","github_repo": "test_role"}]}'),
+ StringIO(u'{"results":[{"name": "0.0.1"},{"name": "0.0.2"}]}'),
+ ]
+ monkeypatch.setattr(api, 'open_url', mock_api)
+
+ role.GalaxyRole(Galaxy(), galaxy_server, 'test_owner.test_role', version="0.0.1").install()
+
+ assert mock_role_download_api.call_count == 1
+ assert mock_role_download_api.mock_calls[0][1][0] == 'https://github.com/test_owner/test_role/archive/0.0.1.tar.gz'
+
+
+def test_role_download_github_default_version(mocker, galaxy_server, mock_role_download_api, monkeypatch):
+ mock_api = mocker.MagicMock()
+ mock_api.side_effect = [
+ StringIO(u'{"available_versions":{"v1":"v1/"}}'),
+ StringIO(u'{"results":[{"id":"123","github_user":"test_owner","github_repo": "test_role"}]}'),
+ StringIO(u'{"results":[{"name": "0.0.1"},{"name": "0.0.2"}]}'),
+ ]
+ monkeypatch.setattr(api, 'open_url', mock_api)
+
+ role.GalaxyRole(Galaxy(), galaxy_server, 'test_owner.test_role').install()
+
+ assert mock_role_download_api.call_count == 1
+ assert mock_role_download_api.mock_calls[0][1][0] == 'https://github.com/test_owner/test_role/archive/0.0.2.tar.gz'
+
+
+def test_role_download_github_no_download_url_for_version(mocker, galaxy_server, mock_role_download_api, monkeypatch):
+ mock_api = mocker.MagicMock()
+ mock_api.side_effect = [
+ StringIO(u'{"available_versions":{"v1":"v1/"}}'),
+ StringIO(u'{"results":[{"id":"123","github_user":"test_owner","github_repo": "test_role"}]}'),
+ StringIO(u'{"results":[{"name": "0.0.1"},{"name": "0.0.2","download_url":"http://localhost:8080/test_owner/test_role/0.0.2.tar.gz"}]}'),
+ ]
+ monkeypatch.setattr(api, 'open_url', mock_api)
+
+ role.GalaxyRole(Galaxy(), galaxy_server, 'test_owner.test_role', version="0.0.1").install()
+
+ assert mock_role_download_api.call_count == 1
+ assert mock_role_download_api.mock_calls[0][1][0] == 'https://github.com/test_owner/test_role/archive/0.0.1.tar.gz'
+
+
+def test_role_download_url(mocker, galaxy_server, mock_role_download_api, monkeypatch):
+ mock_api = mocker.MagicMock()
+ mock_api.side_effect = [
+ StringIO(u'{"available_versions":{"v1":"v1/"}}'),
+ StringIO(u'{"results":[{"id":"123","github_user":"test_owner","github_repo": "test_role"}]}'),
+ StringIO(u'{"results":[{"name": "0.0.1","download_url":"http://localhost:8080/test_owner/test_role/0.0.1.tar.gz"},'
+ u'{"name": "0.0.2","download_url":"http://localhost:8080/test_owner/test_role/0.0.2.tar.gz"}]}'),
+ ]
+ monkeypatch.setattr(api, 'open_url', mock_api)
+
+ role.GalaxyRole(Galaxy(), galaxy_server, 'test_owner.test_role', version="0.0.1").install()
+
+ assert mock_role_download_api.call_count == 1
+ assert mock_role_download_api.mock_calls[0][1][0] == 'http://localhost:8080/test_owner/test_role/0.0.1.tar.gz'
+
+
+def test_role_download_url_default_version(mocker, galaxy_server, mock_role_download_api, monkeypatch):
+ mock_api = mocker.MagicMock()
+ mock_api.side_effect = [
+ StringIO(u'{"available_versions":{"v1":"v1/"}}'),
+ StringIO(u'{"results":[{"id":"123","github_user":"test_owner","github_repo": "test_role"}]}'),
+ StringIO(u'{"results":[{"name": "0.0.1","download_url":"http://localhost:8080/test_owner/test_role/0.0.1.tar.gz"},'
+ u'{"name": "0.0.2","download_url":"http://localhost:8080/test_owner/test_role/0.0.2.tar.gz"}]}'),
+ ]
+ monkeypatch.setattr(api, 'open_url', mock_api)
+
+ role.GalaxyRole(Galaxy(), galaxy_server, 'test_owner.test_role').install()
+
+ assert mock_role_download_api.call_count == 1
+ assert mock_role_download_api.mock_calls[0][1][0] == 'http://localhost:8080/test_owner/test_role/0.0.2.tar.gz'
diff --git a/test/units/galaxy/test_token.py b/test/units/galaxy/test_token.py
index 94449e28..13426688 100644
--- a/test/units/galaxy/test_token.py
+++ b/test/units/galaxy/test_token.py
@@ -8,8 +8,10 @@ __metaclass__ = type
import os
import pytest
+from units.compat.mock import MagicMock
import ansible.constants as C
+from ansible.cli.galaxy import GalaxyCLI, SERVER_DEF
from ansible.galaxy.token import GalaxyToken, NoTokenSentinel
from ansible.module_utils._text import to_bytes, to_text
@@ -32,6 +34,47 @@ def b_token_file(request, tmp_path_factory):
C.GALAXY_TOKEN_PATH = orig_token_path
+def test_client_id(monkeypatch):
+ monkeypatch.setattr(C, 'GALAXY_SERVER_LIST', ['server1', 'server2'])
+
+ test_server_config = {option[0]: None for option in SERVER_DEF}
+ test_server_config.update(
+ {
+ 'url': 'http://my_galaxy_ng:8000/api/automation-hub/',
+ 'auth_url': 'http://my_keycloak:8080/auth/realms/myco/protocol/openid-connect/token',
+ 'client_id': 'galaxy-ng',
+ 'token': 'access_token',
+ }
+ )
+
+ test_server_default = {option[0]: None for option in SERVER_DEF}
+ test_server_default.update(
+ {
+ 'url': 'https://cloud.redhat.com/api/automation-hub/',
+ 'auth_url': 'https://sso.redhat.com/auth/realms/redhat-external/protocol/openid-connect/token',
+ 'token': 'access_token',
+ }
+ )
+
+ get_plugin_options = MagicMock(side_effect=[test_server_config, test_server_default])
+ monkeypatch.setattr(C.config, 'get_plugin_options', get_plugin_options)
+
+ cli_args = [
+ 'ansible-galaxy',
+ 'collection',
+ 'install',
+ 'namespace.collection:1.0.0',
+ ]
+
+ galaxy_cli = GalaxyCLI(args=cli_args)
+ mock_execute_install = MagicMock()
+ monkeypatch.setattr(galaxy_cli, '_execute_install_collection', mock_execute_install)
+ galaxy_cli.run()
+
+ assert galaxy_cli.api_servers[0].token.client_id == 'galaxy-ng'
+ assert galaxy_cli.api_servers[1].token.client_id == 'cloud-services'
+
+
def test_token_explicit(b_token_file):
assert GalaxyToken(token="explicit").get() == "explicit"
diff --git a/test/units/mock/loader.py b/test/units/mock/loader.py
index c47ec39e..f6ceb379 100644
--- a/test/units/mock/loader.py
+++ b/test/units/mock/loader.py
@@ -47,12 +47,12 @@ class DictDataLoader(DataLoader):
# TODO: the real _get_file_contents returns a bytestring, so we actually convert the
# unicode/text it's created with to utf-8
- def _get_file_contents(self, path):
- path = to_text(path)
+ def _get_file_contents(self, file_name):
+ path = to_text(file_name)
if path in self._file_mapping:
- return (to_bytes(self._file_mapping[path]), False)
+ return to_bytes(self._file_mapping[file_name]), False
else:
- raise AnsibleParserError("file not found: %s" % path)
+ raise AnsibleParserError("file not found: %s" % file_name)
def path_exists(self, path):
path = to_text(path)
diff --git a/test/units/module_utils/basic/test_argument_spec.py b/test/units/module_utils/basic/test_argument_spec.py
index 1b3f7035..24bbe2e9 100644
--- a/test/units/module_utils/basic/test_argument_spec.py
+++ b/test/units/module_utils/basic/test_argument_spec.py
@@ -15,6 +15,7 @@ import pytest
from units.compat.mock import MagicMock
from ansible.module_utils import basic
from ansible.module_utils.api import basic_auth_argument_spec, rate_limit_argument_spec, retry_argument_spec
+from ansible.module_utils.common import warnings
from ansible.module_utils.common.warnings import get_deprecation_messages, get_warning_messages
from ansible.module_utils.six import integer_types, string_types
from ansible.module_utils.six.moves import builtins
@@ -400,8 +401,10 @@ class TestComplexArgSpecs:
assert am.params['bar3'][1] == 'test/'
@pytest.mark.parametrize('stdin', [{'foo': 'hello', 'zodraz': 'one'}], indirect=['stdin'])
- def test_deprecated_alias(self, capfd, mocker, stdin, complex_argspec):
+ def test_deprecated_alias(self, capfd, mocker, stdin, complex_argspec, monkeypatch):
"""Test a deprecated alias"""
+ monkeypatch.setattr(warnings, '_global_deprecations', [])
+
am = basic.AnsibleModule(**complex_argspec)
assert "Alias 'zodraz' is deprecated." in get_deprecation_messages()[0]['msg']
diff --git a/test/units/module_utils/basic/test_deprecate_warn.py b/test/units/module_utils/basic/test_deprecate_warn.py
index 351cf25b..7fd54ce0 100644
--- a/test/units/module_utils/basic/test_deprecate_warn.py
+++ b/test/units/module_utils/basic/test_deprecate_warn.py
@@ -10,6 +10,8 @@ import json
import pytest
+from ansible.module_utils.common import warnings
+
@pytest.mark.parametrize('stdin', [{}], indirect=['stdin'])
def test_warn(am, capfd):
@@ -23,7 +25,9 @@ def test_warn(am, capfd):
@pytest.mark.parametrize('stdin', [{}], indirect=['stdin'])
-def test_deprecate(am, capfd):
+def test_deprecate(am, capfd, monkeypatch):
+ monkeypatch.setattr(warnings, '_global_deprecations', [])
+
am.deprecate('deprecation1')
am.deprecate('deprecation2', '2.3') # pylint: disable=ansible-deprecated-no-collection-name
am.deprecate('deprecation3', version='2.4') # pylint: disable=ansible-deprecated-no-collection-name
diff --git a/test/units/module_utils/basic/test_exit_json.py b/test/units/module_utils/basic/test_exit_json.py
index fe7a8245..4afcb276 100644
--- a/test/units/module_utils/basic/test_exit_json.py
+++ b/test/units/module_utils/basic/test_exit_json.py
@@ -12,6 +12,7 @@ import datetime
import pytest
+from ansible.module_utils.common import warnings
EMPTY_INVOCATION = {u'module_args': {}}
DATETIME = datetime.datetime.strptime('2020-07-13 12:50:00', '%Y-%m-%d %H:%M:%S')
@@ -37,7 +38,9 @@ class TestAnsibleModuleExitJson:
# pylint bug: https://github.com/PyCQA/pylint/issues/511
# pylint: disable=undefined-variable
@pytest.mark.parametrize('args, expected, stdin', ((a, e, {}) for a, e in DATA), indirect=['stdin'])
- def test_exit_json_exits(self, am, capfd, args, expected):
+ def test_exit_json_exits(self, am, capfd, args, expected, monkeypatch):
+ monkeypatch.setattr(warnings, '_global_deprecations', [])
+
with pytest.raises(SystemExit) as ctx:
am.exit_json(**args)
assert ctx.value.code == 0
@@ -51,7 +54,9 @@ class TestAnsibleModuleExitJson:
@pytest.mark.parametrize('args, expected, stdin',
((a, e, {}) for a, e in DATA if 'msg' in a), # pylint: disable=undefined-variable
indirect=['stdin'])
- def test_fail_json_exits(self, am, capfd, args, expected):
+ def test_fail_json_exits(self, am, capfd, args, expected, monkeypatch):
+ monkeypatch.setattr(warnings, '_global_deprecations', [])
+
with pytest.raises(SystemExit) as ctx:
am.fail_json(**args)
assert ctx.value.code == 1
@@ -63,7 +68,9 @@ class TestAnsibleModuleExitJson:
assert return_val == expected
@pytest.mark.parametrize('stdin', [{}], indirect=['stdin'])
- def test_fail_json_msg_positional(self, am, capfd):
+ def test_fail_json_msg_positional(self, am, capfd, monkeypatch):
+ monkeypatch.setattr(warnings, '_global_deprecations', [])
+
with pytest.raises(SystemExit) as ctx:
am.fail_json('This is the msg')
assert ctx.value.code == 1
@@ -75,8 +82,10 @@ class TestAnsibleModuleExitJson:
'invocation': EMPTY_INVOCATION}
@pytest.mark.parametrize('stdin', [{}], indirect=['stdin'])
- def test_fail_json_msg_as_kwarg_after(self, am, capfd):
+ def test_fail_json_msg_as_kwarg_after(self, am, capfd, monkeypatch):
"""Test that msg as a kwarg after other kwargs works"""
+ monkeypatch.setattr(warnings, '_global_deprecations', [])
+
with pytest.raises(SystemExit) as ctx:
am.fail_json(arbitrary=42, msg='This is the msg')
assert ctx.value.code == 1
@@ -95,6 +104,8 @@ class TestAnsibleModuleExitJson:
if sys.version_info < (3,):
error_msg = "fail_json() takes exactly 2 arguments (1 given)"
+ elif sys.version_info >= (3, 10):
+ error_msg = "AnsibleModule.fail_json() missing 1 required positional argument: 'msg'"
else:
error_msg = "fail_json() missing 1 required positional argument: 'msg'"
@@ -139,7 +150,8 @@ class TestAnsibleModuleExitValuesRemoved:
(({'username': {}, 'password': {'no_log': True}, 'token': {'no_log': True}}, s, r, e)
for s, r, e in DATA), # pylint: disable=undefined-variable
indirect=['am', 'stdin'])
- def test_exit_json_removes_values(self, am, capfd, return_val, expected):
+ def test_exit_json_removes_values(self, am, capfd, return_val, expected, monkeypatch):
+ monkeypatch.setattr(warnings, '_global_deprecations', [])
with pytest.raises(SystemExit):
am.exit_json(**return_val)
out, err = capfd.readouterr()
@@ -151,7 +163,8 @@ class TestAnsibleModuleExitValuesRemoved:
(({'username': {}, 'password': {'no_log': True}, 'token': {'no_log': True}}, s, r, e)
for s, r, e in DATA), # pylint: disable=undefined-variable
indirect=['am', 'stdin'])
- def test_fail_json_removes_values(self, am, capfd, return_val, expected):
+ def test_fail_json_removes_values(self, am, capfd, return_val, expected, monkeypatch):
+ monkeypatch.setattr(warnings, '_global_deprecations', [])
expected['failed'] = True
with pytest.raises(SystemExit):
am.fail_json(**return_val) == expected
diff --git a/test/units/module_utils/basic/test_heuristic_log_sanitize.py b/test/units/module_utils/basic/test_heuristic_log_sanitize.py
index f8a0929d..664b8a5a 100644
--- a/test/units/module_utils/basic/test_heuristic_log_sanitize.py
+++ b/test/units/module_utils/basic/test_heuristic_log_sanitize.py
@@ -87,3 +87,6 @@ class TestHeuristicLogSanitize(unittest.TestCase):
def test_hides_parameter_secrets(self):
output = heuristic_log_sanitize('token="secret", user="person", token_entry="test=secret"', frozenset(['secret']))
self.assertNotIn('secret', output)
+
+ def test_no_password(self):
+ self.assertEqual(heuristic_log_sanitize('foo@bar'), 'foo@bar')
diff --git a/test/units/module_utils/basic/test_imports.py b/test/units/module_utils/basic/test_imports.py
index eb741de5..d1a5f379 100644
--- a/test/units/module_utils/basic/test_imports.py
+++ b/test/units/module_utils/basic/test_imports.py
@@ -43,10 +43,10 @@ class TestImports(ModuleTestCase):
@patch.object(builtins, '__import__')
def test_module_utils_basic_import_selinux(self, mock_import):
- def _mock_import(name, *args, **kwargs):
- if name == 'ansible.module_utils.compat.selinux':
+ def _mock_import(name, globals=None, locals=None, fromlist=tuple(), level=0, **kwargs):
+ if name == 'ansible.module_utils.compat' and fromlist == ('selinux',):
raise ImportError
- return realimport(name, *args, **kwargs)
+ return realimport(name, globals=globals, locals=locals, fromlist=fromlist, level=level, **kwargs)
try:
self.clear_modules(['ansible.module_utils.compat.selinux', 'ansible.module_utils.basic'])
diff --git a/test/units/module_utils/basic/test_platform_distribution.py b/test/units/module_utils/basic/test_platform_distribution.py
index d7a4510c..3c1afb7d 100644
--- a/test/units/module_utils/basic/test_platform_distribution.py
+++ b/test/units/module_utils/basic/test_platform_distribution.py
@@ -42,12 +42,6 @@ def test_get_platform():
# get_distribution tests
#
-def test_get_distribution_not_linux():
- """If it's not Linux, then it has no distribution"""
- with patch('platform.system', return_value='Foo'):
- assert get_distribution() is None
-
-
@pytest.mark.usefixtures("platform_linux")
class TestGetDistribution:
"""Tests for get_distribution that have to find something"""
@@ -114,11 +108,6 @@ class TestGetDistribution:
# get_distribution_version tests
#
-def test_get_distribution_version_not_linux():
- """If it's not Linux, then it has no distribution"""
- with patch('platform.system', return_value='Foo'):
- assert get_distribution_version() is None
-
@pytest.mark.usefixtures("platform_linux")
def test_distro_found():
diff --git a/test/units/module_utils/basic/test_run_command.py b/test/units/module_utils/basic/test_run_command.py
index 25f1c48e..04211e2d 100644
--- a/test/units/module_utils/basic/test_run_command.py
+++ b/test/units/module_utils/basic/test_run_command.py
@@ -163,27 +163,22 @@ class TestRunCommandArgs:
class TestRunCommandCwd:
@pytest.mark.parametrize('stdin', [{}], indirect=['stdin'])
def test_cwd(self, mocker, rc_am):
- rc_am._os.getcwd.return_value = '/old'
rc_am.run_command('/bin/ls', cwd='/new')
- assert rc_am._os.chdir.mock_calls == [mocker.call(b'/new'), mocker.call('/old'), ]
+ assert rc_am._subprocess.Popen.mock_calls[0][2]['cwd'] == b'/new'
@pytest.mark.parametrize('stdin', [{}], indirect=['stdin'])
def test_cwd_relative_path(self, mocker, rc_am):
- rc_am._os.getcwd.return_value = '/old'
rc_am.run_command('/bin/ls', cwd='sub-dir')
- assert rc_am._os.chdir.mock_calls == [mocker.call(b'/old/sub-dir'), mocker.call('/old'), ]
+ assert rc_am._subprocess.Popen.mock_calls[0][2]['cwd'] == b'/home/foo/sub-dir'
@pytest.mark.parametrize('stdin', [{}], indirect=['stdin'])
def test_cwd_not_a_dir(self, mocker, rc_am):
- rc_am._os.getcwd.return_value = '/old'
- rc_am._os.path.isdir.side_effect = lambda d: d != '/not-a-dir'
rc_am.run_command('/bin/ls', cwd='/not-a-dir')
- assert rc_am._os.chdir.mock_calls == [mocker.call('/old'), ]
+ assert rc_am._subprocess.Popen.mock_calls[0][2]['cwd'] == b'/not-a-dir'
@pytest.mark.parametrize('stdin', [{}], indirect=['stdin'])
def test_cwd_not_a_dir_noignore(self, rc_am):
- rc_am._os.getcwd.return_value = '/old'
- rc_am._os.path.isdir.side_effect = lambda d: d != '/not-a-dir'
+ rc_am._os.path.isdir.side_effect = lambda d: d != b'/not-a-dir'
with pytest.raises(SystemExit):
rc_am.run_command('/bin/ls', cwd='/not-a-dir', ignore_invalid_cwd=False)
assert rc_am.fail_json.called
diff --git a/test/units/module_utils/common/arg_spec/test_module_validate.py b/test/units/module_utils/common/arg_spec/test_module_validate.py
index 14e6e1e7..5041d521 100644
--- a/test/units/module_utils/common/arg_spec/test_module_validate.py
+++ b/test/units/module_utils/common/arg_spec/test_module_validate.py
@@ -5,7 +5,7 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
-import ansible.module_utils.common.warnings as warnings
+from ansible.module_utils.common import warnings
from ansible.module_utils.common.arg_spec import ModuleArgumentSpecValidator, ValidationResult
@@ -25,7 +25,9 @@ def test_module_validate():
assert result.validated_parameters == expected
-def test_module_alias_deprecations_warnings():
+def test_module_alias_deprecations_warnings(monkeypatch):
+ monkeypatch.setattr(warnings, '_global_deprecations', [])
+
arg_spec = {
'path': {
'aliases': ['source', 'src', 'flamethrower'],
diff --git a/test/units/module_utils/common/arg_spec/test_validate_valid.py b/test/units/module_utils/common/arg_spec/test_validate_valid.py
index b35b856f..7e41127e 100644
--- a/test/units/module_utils/common/arg_spec/test_validate_valid.py
+++ b/test/units/module_utils/common/arg_spec/test_validate_valid.py
@@ -7,8 +7,6 @@ __metaclass__ = type
import pytest
-import ansible.module_utils.common.warnings as warnings
-
from ansible.module_utils.common.arg_spec import ArgumentSpecValidator, ValidationResult
# Each item is id, argument_spec, parameters, expected, valid parameter names
diff --git a/test/units/module_utils/common/test_dict_transformations.py b/test/units/module_utils/common/test_dict_transformations.py
index ecb520b2..ba55299c 100644
--- a/test/units/module_utils/common/test_dict_transformations.py
+++ b/test/units/module_utils/common/test_dict_transformations.py
@@ -1,26 +1,20 @@
# -*- coding: utf-8 -*-
-# (c) 2017, Will Thames <will.thames@xvt.com.au>
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+# Copyright: (c) 2017, Will Thames <will.thames@xvt.com.au>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from units.compat import unittest
-from ansible.module_utils.common.dict_transformations import _camel_to_snake, _snake_to_camel, camel_dict_to_snake_dict, dict_merge
+import pytest
+
+from ansible.module_utils.common.dict_transformations import (
+ _camel_to_snake,
+ _snake_to_camel,
+ camel_dict_to_snake_dict,
+ dict_merge,
+ recursive_diff,
+)
+
EXPECTED_SNAKIFICATION = {
'alllower': 'alllower',
@@ -42,39 +36,39 @@ EXPECTED_REVERSIBLE = {
}
-class CamelToSnakeTestCase(unittest.TestCase):
+class TestCaseCamelToSnake:
def test_camel_to_snake(self):
for (k, v) in EXPECTED_SNAKIFICATION.items():
- self.assertEqual(_camel_to_snake(k), v)
+ assert _camel_to_snake(k) == v
def test_reversible_camel_to_snake(self):
for (k, v) in EXPECTED_REVERSIBLE.items():
- self.assertEqual(_camel_to_snake(k, reversible=True), v)
+ assert _camel_to_snake(k, reversible=True) == v
-class SnakeToCamelTestCase(unittest.TestCase):
+class TestCaseSnakeToCamel:
def test_snake_to_camel_reversed(self):
for (k, v) in EXPECTED_REVERSIBLE.items():
- self.assertEqual(_snake_to_camel(v, capitalize_first=True), k)
+ assert _snake_to_camel(v, capitalize_first=True) == k
-class CamelToSnakeAndBackTestCase(unittest.TestCase):
+class TestCaseCamelToSnakeAndBack:
def test_camel_to_snake_and_back(self):
for (k, v) in EXPECTED_REVERSIBLE.items():
- self.assertEqual(_snake_to_camel(_camel_to_snake(k, reversible=True), capitalize_first=True), k)
+ assert _snake_to_camel(_camel_to_snake(k, reversible=True), capitalize_first=True) == k
-class CamelDictToSnakeDictTestCase(unittest.TestCase):
+class TestCaseCamelDictToSnakeDict:
def test_ignore_list(self):
camel_dict = dict(Hello=dict(One='one', Two='two'), World=dict(Three='three', Four='four'))
snake_dict = camel_dict_to_snake_dict(camel_dict, ignore_list='World')
- self.assertEqual(snake_dict['hello'], dict(one='one', two='two'))
- self.assertEqual(snake_dict['world'], dict(Three='three', Four='four'))
+ assert snake_dict['hello'] == dict(one='one', two='two')
+ assert snake_dict['world'] == dict(Three='three', Four='four')
-class DictMergeTestCase(unittest.TestCase):
+class TestCaseDictMerge:
def test_dict_merge(self):
base = dict(obj2=dict(), b1=True, b2=False, b3=False,
one=1, two=2, three=3, obj1=dict(key1=1, key2=2),
@@ -89,42 +83,42 @@ class DictMergeTestCase(unittest.TestCase):
result = dict_merge(base, other)
# string assertions
- self.assertTrue('one' in result)
- self.assertTrue('two' in result)
- self.assertEqual(result['three'], 4)
- self.assertEqual(result['four'], 4)
+ assert 'one' in result
+ assert 'two' in result
+ assert result['three'] == 4
+ assert result['four'] == 4
# dict assertions
- self.assertTrue('obj1' in result)
- self.assertTrue('key1' in result['obj1'])
- self.assertTrue('key2' in result['obj1'])
+ assert 'obj1' in result
+ assert 'key1' in result['obj1']
+ assert 'key2' in result['obj1']
# list assertions
# this line differs from the network_utils/common test of the function of the
# same name as this method does not merge lists
- self.assertEqual(result['l1'], [2, 1])
- self.assertTrue('l2' in result)
- self.assertEqual(result['l3'], [1])
- self.assertTrue('l4' in result)
+ assert result['l1'], [2, 1]
+ assert 'l2' in result
+ assert result['l3'], [1]
+ assert 'l4' in result
# nested assertions
- self.assertTrue('obj1' in result)
- self.assertEqual(result['obj1']['key1'], 2)
- self.assertTrue('key2' in result['obj1'])
+ assert 'obj1' in result
+ assert result['obj1']['key1'], 2
+ assert 'key2' in result['obj1']
# bool assertions
- self.assertTrue('b1' in result)
- self.assertTrue('b2' in result)
- self.assertTrue(result['b3'])
- self.assertTrue(result['b4'])
+ assert 'b1' in result
+ assert 'b2' in result
+ assert result['b3']
+ assert result['b4']
-class AzureIncidentalTestCase(unittest.TestCase):
+class TestCaseAzureIncidental:
def test_dict_merge_invalid_dict(self):
''' if b is not a dict, return b '''
res = dict_merge({}, None)
- self.assertEqual(res, None)
+ assert res is None
def test_merge_sub_dicts(self):
'''merge sub dicts '''
@@ -132,4 +126,28 @@ class AzureIncidentalTestCase(unittest.TestCase):
b = {'a': {'b1': 2}}
c = {'a': {'a1': 1, 'b1': 2}}
res = dict_merge(a, b)
- self.assertEqual(res, c)
+ assert res == c
+
+
+class TestCaseRecursiveDiff:
+ def test_recursive_diff(self):
+ a = {'foo': {'bar': [{'baz': {'qux': 'ham_sandwich'}}]}}
+ c = {'foo': {'bar': [{'baz': {'qux': 'ham_sandwich'}}]}}
+ b = {'foo': {'bar': [{'baz': {'qux': 'turkey_sandwich'}}]}}
+
+ assert recursive_diff(a, b) is not None
+ assert len(recursive_diff(a, b)) == 2
+ assert recursive_diff(a, c) is None
+
+ @pytest.mark.parametrize(
+ 'p1, p2', (
+ ([1, 2], [2, 3]),
+ ({1: 2}, [2, 3]),
+ ([1, 2], {2: 3}),
+ ({2: 3}, 'notadict'),
+ ('notadict', {2: 3}),
+ )
+ )
+ def test_recursive_diff_negative(self, p1, p2):
+ with pytest.raises(TypeError, match="Unable to diff"):
+ recursive_diff(p1, p2)
diff --git a/test/units/module_utils/common/test_locale.py b/test/units/module_utils/common/test_locale.py
new file mode 100644
index 00000000..9d959860
--- /dev/null
+++ b/test/units/module_utils/common/test_locale.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+# (c) Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from units.compat.mock import MagicMock
+
+from ansible.module_utils.common.locale import get_best_parsable_locale
+
+
+class TestLocale:
+ """Tests for get_best_paresable_locale"""
+
+ mock_module = MagicMock()
+ mock_module.get_bin_path = MagicMock(return_value='/usr/bin/locale')
+
+ def test_finding_best(self):
+ self.mock_module.run_command = MagicMock(return_value=(0, "C.utf8\nen_US.utf8\nC\nPOSIX\n", ''))
+ locale = get_best_parsable_locale(self.mock_module)
+ assert locale == 'C.utf8'
+
+ def test_finding_last(self):
+ self.mock_module.run_command = MagicMock(return_value=(0, "fr_FR.utf8\nen_UK.utf8\nC\nPOSIX\n", ''))
+ locale = get_best_parsable_locale(self.mock_module)
+ assert locale == 'C'
+
+ def test_finding_middle(self):
+ self.mock_module.run_command = MagicMock(return_value=(0, "fr_FR.utf8\nen_US.utf8\nC\nPOSIX\n", ''))
+ locale = get_best_parsable_locale(self.mock_module)
+ assert locale == 'en_US.utf8'
+
+ def test_finding_prefered(self):
+ self.mock_module.run_command = MagicMock(return_value=(0, "es_ES.utf8\nMINE\nC\nPOSIX\n", ''))
+ locale = get_best_parsable_locale(self.mock_module, preferences=['MINE', 'C.utf8'])
+ assert locale == 'MINE'
+
+ def test_finding_C_on_no_match(self):
+ self.mock_module.run_command = MagicMock(return_value=(0, "fr_FR.UTF8\nMINE\n", ''))
+ locale = get_best_parsable_locale(self.mock_module)
+ assert locale == 'C'
diff --git a/test/units/module_utils/common/test_network.py b/test/units/module_utils/common/test_network.py
index 1267d0ce..27d95032 100644
--- a/test/units/module_utils/common/test_network.py
+++ b/test/units/module_utils/common/test_network.py
@@ -9,6 +9,7 @@ __metaclass__ = type
import pytest
from ansible.module_utils.common.network import (
+ to_bits,
to_masklen,
to_netmask,
to_subnet,
@@ -66,3 +67,13 @@ def test_to_ipv6_network():
assert '2001:db8::' == to_ipv6_network('2001:db8::')
assert '2001:0db8:85a3::' == to_ipv6_network('2001:0db8:85a3:0000:0000:8a2e:0370:7334')
assert '2001:0db8:85a3::' == to_ipv6_network('2001:0db8:85a3:0:0:8a2e:0370:7334')
+
+
+def test_to_bits():
+ assert to_bits('0') == '00000000'
+ assert to_bits('1') == '00000001'
+ assert to_bits('2') == '00000010'
+ assert to_bits('1337') == '10100111001'
+ assert to_bits('127.0.0.1') == '01111111000000000000000000000001'
+ assert to_bits('255.255.255.255') == '11111111111111111111111111111111'
+ assert to_bits('255.255.255.0') == '11111111111111111111111100000000'
diff --git a/test/units/module_utils/common/test_removed.py b/test/units/module_utils/common/test_removed.py
deleted file mode 100644
index 36c1c1e9..00000000
--- a/test/units/module_utils/common/test_removed.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2019, Andrew Klychkov @Andersson007 <aaklychkov@mail.ru>
-# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-import pytest
-
-from ansible.module_utils.common.removed import removed_module
-
-
-@pytest.mark.parametrize('input_data', [u'2.8', 2.8, 2, '', ])
-def test_removed_module_sys_exit(input_data):
- """Test for removed_module function, sys.exit()."""
-
- with pytest.raises(SystemExit) as wrapped_e:
- removed_module(input_data)
-
- assert wrapped_e.type == SystemExit
- assert wrapped_e.value.code == 1
-
-
-@pytest.mark.parametrize(
- 'input_data, expected_msg, expected_warn',
- [
- (
- u'2.8',
- u'This module has been removed. '
- 'The module documentation for Ansible-2.7 may contain hints for porting',
- u'',
- ),
- (
- 2.8,
- u'This module has been removed. '
- 'The module documentation for Ansible-2.7 may contain hints for porting',
- u'',
- ),
- (
- 2,
- u'This module has been removed. '
- 'The module documentation for Ansible-1 may contain hints for porting',
- u'',
- ),
- (
- u'café',
- u'This module has been removed',
- u'"warnings": ["removed modules should specify the version they were removed in"]',
- ),
- (
- 0.1,
- u'This module has been removed. '
- 'The module documentation for Ansible-0.0 may contain hints for porting',
- u'',
- ),
- ]
-)
-def test_removed_module_msgs(input_data, expected_msg, expected_warn, capsys):
- """Test for removed_module function, content of output messages."""
-
- captured = capsys.readouterr()
- assert expected_msg, expected_warn in captured.out
diff --git a/test/units/module_utils/common/test_sys_info.py b/test/units/module_utils/common/test_sys_info.py
index cd68225d..18aafe53 100644
--- a/test/units/module_utils/common/test_sys_info.py
+++ b/test/units/module_utils/common/test_sys_info.py
@@ -31,10 +31,19 @@ def platform_linux(mocker):
# get_distribution tests
#
-def test_get_distribution_not_linux():
- """If it's not Linux, then it has no distribution"""
- with patch('platform.system', return_value='Foo'):
- assert get_distribution() is None
+@pytest.mark.parametrize(
+ ('system', 'dist'),
+ (
+ ('Darwin', 'Darwin'),
+ ('SunOS', 'Solaris'),
+ ('FreeBSD', 'Freebsd'),
+ ),
+)
+def test_get_distribution_not_linux(system, dist, mocker):
+ """For platforms other than Linux, return the distribution"""
+ mocker.patch('platform.system', return_value=system)
+ mocker.patch('ansible.module_utils.common.sys_info.distro.id', return_value=dist)
+ assert get_distribution() == dist
@pytest.mark.usefixtures("platform_linux")
@@ -103,10 +112,19 @@ class TestGetDistribution:
# get_distribution_version tests
#
-def test_get_distribution_version_not_linux():
+@pytest.mark.parametrize(
+ ('system', 'version'),
+ (
+ ('Darwin', '19.6.0'),
+ ('SunOS', '11.4'),
+ ('FreeBSD', '12.1'),
+ ),
+)
+def test_get_distribution_version_not_linux(mocker, system, version):
"""If it's not Linux, then it has no distribution"""
- with patch('platform.system', return_value='Foo'):
- assert get_distribution_version() is None
+ mocker.patch('platform.system', return_value=system)
+ mocker.patch('ansible.module_utils.common.sys_info.distro.version', return_value=version)
+ assert get_distribution_version() == version
@pytest.mark.usefixtures("platform_linux")
diff --git a/test/units/module_utils/common/text/converters/test_to_str.py b/test/units/module_utils/common/text/converters/test_to_str.py
index b645db6d..712ed85b 100644
--- a/test/units/module_utils/common/text/converters/test_to_str.py
+++ b/test/units/module_utils/common/text/converters/test_to_str.py
@@ -13,7 +13,6 @@ import pytest
from ansible.module_utils.six import PY3
from ansible.module_utils.common.text.converters import to_text, to_bytes, to_native
-from ansible.utils.unsafe_proxy import AnsibleUnsafeBytes, AnsibleUnsafeText
# Format: byte representation, text representation, encoding of byte representation
@@ -49,13 +48,3 @@ def test_to_bytes(in_string, encoding, expected):
def test_to_native(in_string, encoding, expected):
"""test happy path of encoding to native strings"""
assert to_native(in_string, encoding) == expected
-
-
-def test_to_text_unsafe():
- assert isinstance(to_text(AnsibleUnsafeBytes(b'foo')), AnsibleUnsafeText)
- assert to_text(AnsibleUnsafeBytes(b'foo')) == AnsibleUnsafeText(u'foo')
-
-
-def test_to_bytes_unsafe():
- assert isinstance(to_bytes(AnsibleUnsafeText(u'foo')), AnsibleUnsafeBytes)
- assert to_bytes(AnsibleUnsafeText(u'foo')) == AnsibleUnsafeBytes(b'foo')
diff --git a/test/units/module_utils/common/validation/test_check_missing_parameters.py b/test/units/module_utils/common/validation/test_check_missing_parameters.py
new file mode 100644
index 00000000..6cbcb8bf
--- /dev/null
+++ b/test/units/module_utils/common/validation/test_check_missing_parameters.py
@@ -0,0 +1,35 @@
+# -*- coding: utf-8 -*-
+# Copyright: (c) 2021, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import pytest
+
+from ansible.module_utils._text import to_native
+from ansible.module_utils.common.validation import check_required_one_of
+from ansible.module_utils.common.validation import check_missing_parameters
+
+
+@pytest.fixture
+def arguments_terms():
+ return {"path": ""}
+
+
+def test_check_missing_parameters():
+ assert check_missing_parameters([], {}) == []
+
+
+def test_check_missing_parameters_list():
+ expected = "missing required arguments: path"
+
+ with pytest.raises(TypeError) as e:
+ check_missing_parameters({}, ["path"])
+
+ assert to_native(e.value) == expected
+
+
+def test_check_missing_parameters_positive():
+ assert check_missing_parameters({"path": "/foo"}, ["path"]) == []
diff --git a/test/units/module_utils/common/validation/test_check_required_by.py b/test/units/module_utils/common/validation/test_check_required_by.py
new file mode 100644
index 00000000..62cccff3
--- /dev/null
+++ b/test/units/module_utils/common/validation/test_check_required_by.py
@@ -0,0 +1,98 @@
+# -*- coding: utf-8 -*-
+# Copyright: (c) 2021, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import pytest
+
+from ansible.module_utils._text import to_native
+from ansible.module_utils.common.validation import check_required_by
+
+
+@pytest.fixture
+def path_arguments_terms():
+ return {
+ "path": ["mode", "owner"],
+ }
+
+
+def test_check_required_by():
+ arguments_terms = {}
+ params = {}
+ assert check_required_by(arguments_terms, params) == {}
+
+
+def test_check_required_by_missing():
+ arguments_terms = {
+ "force": "force_reason",
+ }
+ params = {"force": True}
+ expected = "missing parameter(s) required by 'force': force_reason"
+
+ with pytest.raises(TypeError) as e:
+ check_required_by(arguments_terms, params)
+
+ assert to_native(e.value) == expected
+
+
+def test_check_required_by_multiple(path_arguments_terms):
+ params = {
+ "path": "/foo/bar",
+ }
+ expected = "missing parameter(s) required by 'path': mode, owner"
+
+ with pytest.raises(TypeError) as e:
+ check_required_by(path_arguments_terms, params)
+
+ assert to_native(e.value) == expected
+
+
+def test_check_required_by_single(path_arguments_terms):
+ params = {"path": "/foo/bar", "mode": "0700"}
+ expected = "missing parameter(s) required by 'path': owner"
+
+ with pytest.raises(TypeError) as e:
+ check_required_by(path_arguments_terms, params)
+
+ assert to_native(e.value) == expected
+
+
+def test_check_required_by_missing_none(path_arguments_terms):
+ params = {
+ "path": "/foo/bar",
+ "mode": "0700",
+ "owner": "root",
+ }
+ assert check_required_by(path_arguments_terms, params)
+
+
+def test_check_required_by_options_context(path_arguments_terms):
+ params = {"path": "/foo/bar", "mode": "0700"}
+
+ options_context = ["foo_context"]
+
+ expected = "missing parameter(s) required by 'path': owner found in foo_context"
+
+ with pytest.raises(TypeError) as e:
+ check_required_by(path_arguments_terms, params, options_context)
+
+ assert to_native(e.value) == expected
+
+
+def test_check_required_by_missing_multiple_options_context(path_arguments_terms):
+ params = {
+ "path": "/foo/bar",
+ }
+ options_context = ["foo_context"]
+
+ expected = (
+ "missing parameter(s) required by 'path': mode, owner found in foo_context"
+ )
+
+ with pytest.raises(TypeError) as e:
+ check_required_by(path_arguments_terms, params, options_context)
+
+ assert to_native(e.value) == expected
diff --git a/test/units/module_utils/common/validation/test_check_required_if.py b/test/units/module_utils/common/validation/test_check_required_if.py
new file mode 100644
index 00000000..5b4b7983
--- /dev/null
+++ b/test/units/module_utils/common/validation/test_check_required_if.py
@@ -0,0 +1,79 @@
+# -*- coding: utf-8 -*-
+# Copyright: (c) 2021, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import pytest
+
+from ansible.module_utils._text import to_native
+from ansible.module_utils.common.validation import check_required_if
+
+
+def test_check_required_if():
+ arguments_terms = {}
+ params = {}
+ assert check_required_if(arguments_terms, params) == []
+
+
+def test_check_required_if_missing():
+ arguments_terms = [["state", "present", ("path",)]]
+ params = {"state": "present"}
+ expected = "state is present but all of the following are missing: path"
+
+ with pytest.raises(TypeError) as e:
+ check_required_if(arguments_terms, params)
+
+ assert to_native(e.value) == expected
+
+
+def test_check_required_if_missing_required():
+ arguments_terms = [["state", "present", ("path", "owner"), True]]
+ params = {"state": "present"}
+ expected = "state is present but any of the following are missing: path, owner"
+
+ with pytest.raises(TypeError) as e:
+ check_required_if(arguments_terms, params)
+
+ assert to_native(e.value) == expected
+
+
+def test_check_required_if_missing_multiple():
+ arguments_terms = [["state", "present", ("path", "owner")]]
+ params = {
+ "state": "present",
+ }
+ expected = "state is present but all of the following are missing: path, owner"
+
+ with pytest.raises(TypeError) as e:
+ check_required_if(arguments_terms, params)
+
+ assert to_native(e.value) == expected
+
+
+def test_check_required_if_missing_multiple():
+ arguments_terms = [["state", "present", ("path", "owner")]]
+ params = {
+ "state": "present",
+ }
+ options_context = ["foo_context"]
+ expected = "state is present but all of the following are missing: path, owner found in foo_context"
+
+ with pytest.raises(TypeError) as e:
+ check_required_if(arguments_terms, params, options_context)
+
+ assert to_native(e.value) == expected
+
+
+def test_check_required_if_multiple():
+ arguments_terms = [["state", "present", ("path", "owner")]]
+ params = {
+ "state": "present",
+ "path": "/foo",
+ "owner": "root",
+ }
+ options_context = ["foo_context"]
+ assert check_required_if(arguments_terms, params) == []
+ assert check_required_if(arguments_terms, params, options_context) == []
diff --git a/test/units/module_utils/common/validation/test_check_required_one_of.py b/test/units/module_utils/common/validation/test_check_required_one_of.py
new file mode 100644
index 00000000..b0818891
--- /dev/null
+++ b/test/units/module_utils/common/validation/test_check_required_one_of.py
@@ -0,0 +1,47 @@
+# -*- coding: utf-8 -*-
+# Copyright: (c) 2021, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import pytest
+
+from ansible.module_utils._text import to_native
+from ansible.module_utils.common.validation import check_required_one_of
+
+
+@pytest.fixture
+def arguments_terms():
+ return [["path", "owner"]]
+
+
+def test_check_required_one_of():
+ assert check_required_one_of([], {}) == []
+
+
+def test_check_required_one_of_missing(arguments_terms):
+ params = {"state": "present"}
+ expected = "one of the following is required: path, owner"
+
+ with pytest.raises(TypeError) as e:
+ check_required_one_of(arguments_terms, params)
+
+ assert to_native(e.value) == expected
+
+
+def test_check_required_one_of_provided(arguments_terms):
+ params = {"state": "present", "path": "/foo"}
+ assert check_required_one_of(arguments_terms, params) == []
+
+
+def test_check_required_one_of_context(arguments_terms):
+ params = {"state": "present"}
+ expected = "one of the following is required: path, owner found in foo_context"
+ option_context = ["foo_context"]
+
+ with pytest.raises(TypeError) as e:
+ check_required_one_of(arguments_terms, params, option_context)
+
+ assert to_native(e.value) == expected
diff --git a/test/units/module_utils/common/warnings/test_deprecate.py b/test/units/module_utils/common/warnings/test_deprecate.py
index 42046bfe..08c1b358 100644
--- a/test/units/module_utils/common/warnings/test_deprecate.py
+++ b/test/units/module_utils/common/warnings/test_deprecate.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-import ansible.module_utils.common.warnings as warnings
+from ansible.module_utils.common import warnings
from ansible.module_utils.common.warnings import deprecate, get_deprecation_messages
from ansible.module_utils.six import PY3
@@ -26,50 +26,55 @@ def deprecation_messages():
]
-def test_deprecate_message_only():
+@pytest.fixture
+def reset(monkeypatch):
+ monkeypatch.setattr(warnings, '_global_deprecations', [])
+
+
+def test_deprecate_message_only(reset):
deprecate('Deprecation message')
assert warnings._global_deprecations == [
{'msg': 'Deprecation message', 'version': None, 'collection_name': None}]
-def test_deprecate_with_collection():
+def test_deprecate_with_collection(reset):
deprecate(msg='Deprecation message', collection_name='ansible.builtin')
assert warnings._global_deprecations == [
{'msg': 'Deprecation message', 'version': None, 'collection_name': 'ansible.builtin'}]
-def test_deprecate_with_version():
+def test_deprecate_with_version(reset):
deprecate(msg='Deprecation message', version='2.14')
assert warnings._global_deprecations == [
{'msg': 'Deprecation message', 'version': '2.14', 'collection_name': None}]
-def test_deprecate_with_version_and_collection():
+def test_deprecate_with_version_and_collection(reset):
deprecate(msg='Deprecation message', version='2.14', collection_name='ansible.builtin')
assert warnings._global_deprecations == [
{'msg': 'Deprecation message', 'version': '2.14', 'collection_name': 'ansible.builtin'}]
-def test_deprecate_with_date():
+def test_deprecate_with_date(reset):
deprecate(msg='Deprecation message', date='2199-12-31')
assert warnings._global_deprecations == [
{'msg': 'Deprecation message', 'date': '2199-12-31', 'collection_name': None}]
-def test_deprecate_with_date_and_collection():
+def test_deprecate_with_date_and_collection(reset):
deprecate(msg='Deprecation message', date='2199-12-31', collection_name='ansible.builtin')
assert warnings._global_deprecations == [
{'msg': 'Deprecation message', 'date': '2199-12-31', 'collection_name': 'ansible.builtin'}]
-def test_multiple_deprecations(deprecation_messages):
+def test_multiple_deprecations(deprecation_messages, reset):
for d in deprecation_messages:
deprecate(**d)
assert deprecation_messages == warnings._global_deprecations
-def test_get_deprecation_messages(deprecation_messages):
+def test_get_deprecation_messages(deprecation_messages, reset):
for d in deprecation_messages:
deprecate(**d)
diff --git a/test/units/module_utils/common/warnings/test_warn.py b/test/units/module_utils/common/warnings/test_warn.py
index 020b0625..41e1a7b3 100644
--- a/test/units/module_utils/common/warnings/test_warn.py
+++ b/test/units/module_utils/common/warnings/test_warn.py
@@ -7,7 +7,7 @@ __metaclass__ = type
import pytest
-import ansible.module_utils.common.warnings as warnings
+from ansible.module_utils.common import warnings
from ansible.module_utils.common.warnings import warn, get_warning_messages
from ansible.module_utils.six import PY3
diff --git a/test/units/module_utils/facts/network/test_fc_wwn.py b/test/units/module_utils/facts/network/test_fc_wwn.py
index b98ae378..32a3a43d 100644
--- a/test/units/module_utils/facts/network/test_fc_wwn.py
+++ b/test/units/module_utils/facts/network/test_fc_wwn.py
@@ -54,8 +54,43 @@ HBA Port WWN: 10000090fa1658de
NPIV Not Supported
"""
+IOSCAN_OUT = """
+Class I H/W Path Driver S/W State H/W Type Description
+==================================================================
+fc 0 2/0/10/1/0 fcd CLAIMED INTERFACE HP AB379-60101 4Gb Dual Port PCI/PCI-X Fibre Channel Adapter (FC Port 1)
+ /dev/fcd0
+"""
+
+FCMSUTIL_OUT = """
+ Vendor ID is = 0x1077
+ Device ID is = 0x2422
+ PCI Sub-system Vendor ID is = 0x103C
+ PCI Sub-system ID is = 0x12D7
+ PCI Mode = PCI-X 133 MHz
+ ISP Code version = 5.4.0
+ ISP Chip version = 3
+ Topology = PTTOPT_FABRIC
+ Link Speed = 4Gb
+ Local N_Port_id is = 0x010300
+ Previous N_Port_id is = None
+ N_Port Node World Wide Name = 0x50060b00006975ed
+ N_Port Port World Wide Name = 0x50060b00006975ec
+ Switch Port World Wide Name = 0x200300051e046c0f
+ Switch Node World Wide Name = 0x100000051e046c0f
+ N_Port Symbolic Port Name = server1_fcd0
+ N_Port Symbolic Node Name = server1_HP-UX_B.11.31
+ Driver state = ONLINE
+ Hardware Path is = 2/0/10/1/0
+ Maximum Frame Size = 2048
+ Driver-Firmware Dump Available = NO
+ Driver-Firmware Dump Timestamp = N/A
+ TYPE = PFC
+ NPIV Supported = YES
+ Driver Version = @(#) fcd B.11.31.1103 Dec 6 2010
+"""
+
-def mock_get_bin_path(cmd, required=False):
+def mock_get_bin_path(cmd, required=False, opt_dirs=None):
result = None
if cmd == 'lsdev':
result = '/usr/sbin/lsdev'
@@ -63,6 +98,10 @@ def mock_get_bin_path(cmd, required=False):
result = '/usr/sbin/lscfg'
elif cmd == 'fcinfo':
result = '/usr/sbin/fcinfo'
+ elif cmd == 'ioscan':
+ result = '/usr/bin/ioscan'
+ elif cmd == 'fcmsutil':
+ result = '/opt/fcms/bin/fcmsutil'
return result
@@ -74,6 +113,10 @@ def mock_run_command(cmd):
result = LSCFG_OUTPUT
elif 'fcinfo' in cmd:
result = FCINFO_OUTPUT
+ elif 'ioscan' in cmd:
+ result = IOSCAN_OUT
+ elif 'fcmsutil' in cmd:
+ result = FCMSUTIL_OUT
else:
rc = 1
result = 'Error'
@@ -87,7 +130,7 @@ def test_get_fc_wwn_info(mocker):
mocker.patch.object(module, 'get_bin_path', side_effect=mock_get_bin_path)
mocker.patch.object(module, 'run_command', side_effect=mock_run_command)
- d = {'aix6': ['10000090FA551508'], 'sunos5': ['10000090fa1658de']}
+ d = {'aix6': ['10000090FA551508'], 'sunos5': ['10000090fa1658de'], 'hp-ux11': ['0x50060b00006975ec']}
for key, value in d.items():
mocker.patch('sys.platform', key)
wwn_expected = {"fibre_channel_wwn": value}
diff --git a/test/units/module_utils/facts/test_collectors.py b/test/units/module_utils/facts/test_collectors.py
index 83d54871..5492582b 100644
--- a/test/units/module_utils/facts/test_collectors.py
+++ b/test/units/module_utils/facts/test_collectors.py
@@ -366,7 +366,10 @@ class TestServiceMgrFacts(BaseFactsTest):
# TODO: dedupe some of this test code
@patch('ansible.module_utils.facts.system.service_mgr.get_file_content', return_value=None)
- def test_no_proc1(self, mock_gfc):
+ @patch('ansible.module_utils.facts.system.service_mgr.ServiceMgrFactCollector.is_systemd_managed', return_value=False)
+ @patch('ansible.module_utils.facts.system.service_mgr.ServiceMgrFactCollector.is_systemd_managed_offline', return_value=False)
+ @patch('ansible.module_utils.facts.system.service_mgr.os.path.exists', return_value=False)
+ def test_service_mgr_runit(self, mock_gfc, mock_ism, mock_ismo, mock_ope):
# no /proc/1/comm, ps returns non-0
# should fallback to 'service'
module = self._mock_module()
@@ -388,7 +391,10 @@ class TestServiceMgrFacts(BaseFactsTest):
self.assertEqual(facts_dict['service_mgr'], 'sys11')
@patch('ansible.module_utils.facts.system.service_mgr.get_file_content', return_value=None)
- def test_clowncar(self, mock_gfc):
+ @patch('ansible.module_utils.facts.system.service_mgr.ServiceMgrFactCollector.is_systemd_managed', return_value=False)
+ @patch('ansible.module_utils.facts.system.service_mgr.ServiceMgrFactCollector.is_systemd_managed_offline', return_value=False)
+ @patch('ansible.module_utils.facts.system.service_mgr.os.path.exists', return_value=False)
+ def test_service_mgr_runit(self, mock_gfc, mock_ism, mock_ismo, mock_ope):
# no /proc/1/comm, ps fails, distro and system are clowncar
# should end up return 'sys11'
module = self._mock_module()
@@ -401,6 +407,36 @@ class TestServiceMgrFacts(BaseFactsTest):
self.assertIsInstance(facts_dict, dict)
self.assertEqual(facts_dict['service_mgr'], 'service')
+ @patch('ansible.module_utils.facts.system.service_mgr.get_file_content', return_value='runit-init')
+ @patch('ansible.module_utils.facts.system.service_mgr.os.path.islink', side_effect=lambda x: x == '/sbin/init')
+ @patch('ansible.module_utils.facts.system.service_mgr.os.readlink', side_effect=lambda x: '/sbin/runit-init' if x == '/sbin/init' else '/bin/false')
+ def test_service_mgr_runit(self, mock_gfc, mock_opl, mock_orl):
+ # /proc/1/comm contains 'runit-init', ps fails, service manager is runit
+ # should end up return 'runit'
+ module = self._mock_module()
+ module.run_command = Mock(return_value=(1, '', ''))
+ collected_facts = {'ansible_system': 'Linux'}
+ fact_collector = self.collector_class()
+ facts_dict = fact_collector.collect(module=module,
+ collected_facts=collected_facts)
+ self.assertIsInstance(facts_dict, dict)
+ self.assertEqual(facts_dict['service_mgr'], 'runit')
+
+ @patch('ansible.module_utils.facts.system.service_mgr.get_file_content', return_value=None)
+ @patch('ansible.module_utils.facts.system.service_mgr.os.path.islink', side_effect=lambda x: x == '/sbin/init')
+ @patch('ansible.module_utils.facts.system.service_mgr.os.readlink', side_effect=lambda x: '/sbin/runit-init' if x == '/sbin/init' else '/bin/false')
+ def test_service_mgr_runit_no_comm(self, mock_gfc, mock_opl, mock_orl):
+ # no /proc/1/comm, ps returns 'COMMAND\n', service manager is runit
+ # should end up return 'runit'
+ module = self._mock_module()
+ module.run_command = Mock(return_value=(1, 'COMMAND\n', ''))
+ collected_facts = {'ansible_system': 'Linux'}
+ fact_collector = self.collector_class()
+ facts_dict = fact_collector.collect(module=module,
+ collected_facts=collected_facts)
+ self.assertIsInstance(facts_dict, dict)
+ self.assertEqual(facts_dict['service_mgr'], 'runit')
+
# TODO: reenable these tests when we can mock more easily
# @patch('ansible.module_utils.facts.system.service_mgr.get_file_content', return_value=None)
diff --git a/test/units/module_utils/facts/test_date_time.py b/test/units/module_utils/facts/test_date_time.py
index b57f0efa..6abc36a7 100644
--- a/test/units/module_utils/facts/test_date_time.py
+++ b/test/units/module_utils/facts/test_date_time.py
@@ -79,6 +79,8 @@ def test_date_time_epoch(fake_date_facts):
assert fake_date_facts['date_time']['epoch'].isdigit()
assert len(fake_date_facts['date_time']['epoch']) == 10 # This length will not change any time soon
+ assert fake_date_facts['date_time']['epoch_int'].isdigit()
+ assert len(fake_date_facts['date_time']['epoch_int']) == 10 # This length will not change any time soon
@pytest.mark.parametrize('fact_name', ('tz', 'tz_dst'))
diff --git a/test/units/module_utils/test_distro.py b/test/units/module_utils/test_distro.py
index 708e7bca..09af6249 100644
--- a/test/units/module_utils/test_distro.py
+++ b/test/units/module_utils/test_distro.py
@@ -28,11 +28,6 @@ class TestDistro():
assert isinstance(info, dict), \
'distro.info() returned %s (%s) which is not a dist' % (info, type(info))
- def test_linux_distribution(self):
- linux_dist = distro.linux_distribution()
- assert isinstance(linux_dist, tuple), \
- 'linux_distrution() returned %s (%s) which is not a tuple' % (linux_dist, type(linux_dist))
-
def test_id(self):
id = distro.id()
assert isinstance(id, string_types), 'distro.id() returned %s (%s) which is not a string' % (id, type(id))
diff --git a/test/units/module_utils/urls/test_channel_binding.py b/test/units/module_utils/urls/test_channel_binding.py
index 05b461e0..ea9cd01c 100644
--- a/test/units/module_utils/urls/test_channel_binding.py
+++ b/test/units/module_utils/urls/test_channel_binding.py
@@ -9,7 +9,7 @@ import base64
import os.path
import pytest
-import ansible.module_utils.urls as urls
+from ansible.module_utils import urls
@pytest.mark.skipif(not urls.HAS_CRYPTOGRAPHY, reason='Requires cryptography to be installed')
diff --git a/test/units/module_utils/urls/test_fetch_url.py b/test/units/module_utils/urls/test_fetch_url.py
index 9cac2a35..4869bb0f 100644
--- a/test/units/module_utils/urls/test_fetch_url.py
+++ b/test/units/module_utils/urls/test_fetch_url.py
@@ -67,7 +67,7 @@ def test_fetch_url(open_url_mock, fake_ansible_module):
open_url_mock.assert_called_once_with('http://ansible.com/', client_cert=None, client_key=None, cookies=kwargs['cookies'], data=None,
follow_redirects='urllib2', force=False, force_basic_auth='', headers=None,
http_agent='ansible-httpget', last_mod_time=None, method=None, timeout=10, url_password='', url_username='',
- use_proxy=True, validate_certs=True, use_gssapi=False, unix_socket=None, ca_path=None)
+ use_proxy=True, validate_certs=True, use_gssapi=False, unix_socket=None, ca_path=None, unredirected_headers=None)
def test_fetch_url_params(open_url_mock, fake_ansible_module):
@@ -89,7 +89,7 @@ def test_fetch_url_params(open_url_mock, fake_ansible_module):
open_url_mock.assert_called_once_with('http://ansible.com/', client_cert='client.pem', client_key='client.key', cookies=kwargs['cookies'], data=None,
follow_redirects='all', force=False, force_basic_auth=True, headers=None,
http_agent='ansible-test', last_mod_time=None, method=None, timeout=10, url_password='passwd', url_username='user',
- use_proxy=True, validate_certs=False, use_gssapi=False, unix_socket=None, ca_path=None)
+ use_proxy=True, validate_certs=False, use_gssapi=False, unix_socket=None, ca_path=None, unredirected_headers=None)
def test_fetch_url_cookies(mocker, fake_ansible_module):
diff --git a/test/units/modules/test_apt_key.py b/test/units/modules/test_apt_key.py
new file mode 100644
index 00000000..e348db0c
--- /dev/null
+++ b/test/units/modules/test_apt_key.py
@@ -0,0 +1,32 @@
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+
+from units.compat import mock
+from units.compat import unittest
+
+from ansible.modules import apt_key
+
+
+def returnc(x):
+ return 'C'
+
+
+class AptKeyTestCase(unittest.TestCase):
+
+ @mock.patch.object(apt_key, 'apt_key_bin', '/usr/bin/apt-key')
+ @mock.patch.object(apt_key, 'lang_env', returnc)
+ @mock.patch.dict(os.environ, {'HTTP_PROXY': 'proxy.example.com'})
+ def test_import_key_with_http_proxy(self):
+ m_mock = mock.Mock()
+ m_mock.run_command.return_value = (0, '', '')
+ apt_key.import_key(
+ m_mock, keyring=None, keyserver='keyserver.example.com',
+ key_id='0xDEADBEEF')
+ self.assertEqual(
+ m_mock.run_command.call_args_list[0][0][0],
+ '/usr/bin/apt-key adv --no-tty --keyserver keyserver.example.com'
+ ' --keyserver-options http-proxy=proxy.example.com'
+ ' --recv 0xDEADBEEF'
+ )
diff --git a/test/units/modules/test_async_wrapper.py b/test/units/modules/test_async_wrapper.py
index 762fc2fb..37b1fda3 100644
--- a/test/units/modules/test_async_wrapper.py
+++ b/test/units/modules/test_async_wrapper.py
@@ -42,11 +42,12 @@ class TestAsyncWrapper:
command = fn
jobid = 0
- jobpath = os.path.join(os.path.dirname(command), 'job')
+ job_path = os.path.join(os.path.dirname(command), 'job')
monkeypatch.setattr(async_wrapper, '_get_interpreter', mock_get_interpreter)
+ monkeypatch.setattr(async_wrapper, 'job_path', job_path)
- res = async_wrapper._run_module(command, jobid, jobpath)
+ res = async_wrapper._run_module(command, jobid)
with open(os.path.join(workdir, 'job'), 'r') as f:
jres = json.loads(f.read())
diff --git a/test/units/modules/test_hostname.py b/test/units/modules/test_hostname.py
new file mode 100644
index 00000000..2771293e
--- /dev/null
+++ b/test/units/modules/test_hostname.py
@@ -0,0 +1,35 @@
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from units.compat.mock import patch, MagicMock, mock_open
+from ansible.module_utils import basic
+from ansible.module_utils.common._utils import get_all_subclasses
+from ansible.modules import hostname
+from units.modules.utils import ModuleTestCase, set_module_args
+from ansible.module_utils.six import PY2
+
+
+class TestHostname(ModuleTestCase):
+ @patch('os.path.isfile')
+ def test_stategy_get_never_writes_in_check_mode(self, isfile):
+ isfile.return_value = True
+
+ set_module_args({'name': 'fooname', '_ansible_check_mode': True})
+ subclasses = get_all_subclasses(hostname.BaseStrategy)
+ module = MagicMock()
+ for cls in subclasses:
+ instance = cls(module)
+
+ instance.module.run_command = MagicMock()
+ instance.module.run_command.return_value = (0, '', '')
+
+ m = mock_open()
+ builtins = 'builtins'
+ if PY2:
+ builtins = '__builtin__'
+ with patch('%s.open' % builtins, m):
+ instance.get_permanent_hostname()
+ instance.get_current_hostname()
+ self.assertFalse(
+ m.return_value.write.called,
+ msg='%s called write, should not have' % str(cls))
diff --git a/test/units/modules/test_iptables.py b/test/units/modules/test_iptables.py
index 9998122f..5a55434f 100644
--- a/test/units/modules/test_iptables.py
+++ b/test/units/modules/test_iptables.py
@@ -87,8 +87,6 @@ class TestIptables(ModuleTestCase):
self.assertTrue(result.exception.args[0]['changed'])
self.assertEqual(run_command.call_count, 2)
- # import pdb
- # pdb.set_trace()
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t',
@@ -123,8 +121,6 @@ class TestIptables(ModuleTestCase):
self.assertFalse(result.exception.args[0]['changed'])
self.assertEqual(run_command.call_count, 1)
- # import pdb
- # pdb.set_trace()
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t',
@@ -151,8 +147,6 @@ class TestIptables(ModuleTestCase):
self.assertTrue(result.exception.args[0]['changed'])
self.assertEqual(run_command.call_count, 1)
- # import pdb
- # pdb.set_trace()
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t',
@@ -188,8 +182,6 @@ class TestIptables(ModuleTestCase):
self.assertTrue(result.exception.args[0]['changed'])
self.assertEqual(run_command.call_count, 1)
- # import pdb
- # pdb.set_trace()
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t',
@@ -226,8 +218,6 @@ class TestIptables(ModuleTestCase):
self.assertTrue(result.exception.args[0]['changed'])
self.assertEqual(run_command.call_count, 2)
- # import pdb
- # pdb.set_trace()
self.assertEqual(run_command.call_args_list[0][0][0], [
'/sbin/iptables',
'-t',
diff --git a/test/units/modules/test_pip.py b/test/units/modules/test_pip.py
index 7f0f8b07..5640b805 100644
--- a/test/units/modules/test_pip.py
+++ b/test/units/modules/test_pip.py
@@ -15,6 +15,8 @@ pytestmark = pytest.mark.usefixtures('patch_ansible_module')
@pytest.mark.parametrize('patch_ansible_module', [{'name': 'six'}], indirect=['patch_ansible_module'])
def test_failure_when_pip_absent(mocker, capfd):
+ mocker.patch('ansible.modules.pip._have_pip_module').return_value = False
+
get_bin_path = mocker.patch('ansible.module_utils.basic.AnsibleModule.get_bin_path')
get_bin_path.return_value = None
diff --git a/test/units/modules/test_service.py b/test/units/modules/test_service.py
new file mode 100644
index 00000000..caabd744
--- /dev/null
+++ b/test/units/modules/test_service.py
@@ -0,0 +1,70 @@
+# Copyright: (c) 2021, Ansible Project
+# Copyright: (c) 2021, Abhijeet Kasurde <akasurde@redhat.com>
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import json
+import platform
+
+import pytest
+from ansible.modules import service
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils.six import PY2
+from units.modules.utils import set_module_args
+
+
+def mocker_sunos_service(mocker):
+ """
+ Configure common mocker object for SunOSService
+ """
+ platform_system = mocker.patch.object(platform, "system")
+ platform_system.return_value = "SunOS"
+
+ get_bin_path = mocker.patch.object(AnsibleModule, "get_bin_path")
+ get_bin_path.return_value = "/usr/bin/svcs"
+
+ # Read a mocked /etc/release file
+ mocked_etc_release_data = mocker.mock_open(
+ read_data=" Oracle Solaris 12.0")
+ builtin_open = "__builtin__.open" if PY2 else "builtins.open"
+ mocker.patch(builtin_open, mocked_etc_release_data)
+
+ service_status = mocker.patch.object(
+ service.Service, "modify_service_state")
+ service_status.return_value = (0, "", "")
+
+ get_sunos_svcs_status = mocker.patch.object(
+ service.SunOSService, "get_sunos_svcs_status")
+ get_sunos_svcs_status.return_value = "offline"
+ get_service_status = mocker.patch.object(
+ service.Service, "get_service_status")
+ get_service_status.return_value = ""
+
+ mocker.patch('ansible.module_utils.common.sys_info.distro.id', return_value='')
+
+
+@pytest.fixture
+def mocked_sunos_service(mocker):
+ mocker_sunos_service(mocker)
+
+
+def test_sunos_service_start(mocked_sunos_service, capfd):
+ """
+ test SunOS Service Start
+ """
+ set_module_args(
+ {
+ "name": "environment",
+ "state": "started",
+ }
+ )
+ with pytest.raises(SystemExit):
+ service.main()
+
+ out, dummy = capfd.readouterr()
+ results = json.loads(out)
+ assert not results.get("failed")
+ assert results["changed"]
diff --git a/test/units/modules/test_yum.py b/test/units/modules/test_yum.py
index e5d601a6..8052effa 100644
--- a/test/units/modules/test_yum.py
+++ b/test/units/modules/test_yum.py
@@ -118,6 +118,17 @@ Security: kernel-3.10.0-327.28.2.el7.x86_64 is an installed security update
Security: kernel-3.10.0-327.22.2.el7.x86_64 is the currently running version
"""
+wrapped_output_multiple_empty_lines = """
+Loaded plugins: langpacks, product-id, search-disabled-repos, subscription-manager
+
+This system is not registered with an entitlement server. You can use subscription-manager to register.
+
+
+screen.x86_64 4.1.0-0.23.20120314git3c2946.el7_2
+ rhelosp-rhel-7.2-z
+sos.noarch 3.2-36.el7ost.2 rhelosp-9.0-puddle
+"""
+
longname = """
Loaded plugins: fastestmirror, priorities, rhnplugin
This system is receiving updates from RHN Classic or Red Hat Satellite.
@@ -205,3 +216,7 @@ class TestYumUpdateCheckParse(unittest.TestCase):
res
)
self._assert_expected(unwrapped_output_rhel7_expected_old_obsoletes_pkgs, obs)
+
+ def test_wrapped_output_multiple_empty_lines(self):
+ res, obs = YumModule.parse_check_update(wrapped_output_multiple_empty_lines)
+ self._assert_expected(['screen', 'sos'], res)
diff --git a/test/units/parsing/vault/test_vault.py b/test/units/parsing/vault/test_vault.py
index a9c4fc9e..0a9e395b 100644
--- a/test/units/parsing/vault/test_vault.py
+++ b/test/units/parsing/vault/test_vault.py
@@ -510,24 +510,6 @@ class TestVaultCipherAes256(unittest.TestCase):
b_key_cryptography = self.vault_cipher._create_key_cryptography(b_password, b_salt, key_length=32, iv_length=16)
self.assertIsInstance(b_key_cryptography, six.binary_type)
- @pytest.mark.skipif(not vault.HAS_PYCRYPTO, reason='Not testing pycrypto key as pycrypto is not installed')
- def test_create_key_pycrypto(self):
- b_password = b'hunter42'
- b_salt = os.urandom(32)
-
- b_key_pycrypto = self.vault_cipher._create_key_pycrypto(b_password, b_salt, key_length=32, iv_length=16)
- self.assertIsInstance(b_key_pycrypto, six.binary_type)
-
- @pytest.mark.skipif(not vault.HAS_PYCRYPTO,
- reason='Not comparing cryptography key to pycrypto key as pycrypto is not installed')
- def test_compare_new_keys(self):
- b_password = b'hunter42'
- b_salt = os.urandom(32)
- b_key_cryptography = self.vault_cipher._create_key_cryptography(b_password, b_salt, key_length=32, iv_length=16)
-
- b_key_pycrypto = self.vault_cipher._create_key_pycrypto(b_password, b_salt, key_length=32, iv_length=16)
- self.assertEqual(b_key_cryptography, b_key_pycrypto)
-
def test_create_key_known_cryptography(self):
b_password = b'hunter42'
@@ -543,33 +525,6 @@ class TestVaultCipherAes256(unittest.TestCase):
self.assertIsInstance(b_key_2, six.binary_type)
self.assertEqual(b_key_1, b_key_2)
- # And again with pycrypto
- b_key_3 = self.vault_cipher._create_key_pycrypto(b_password, b_salt, key_length=32, iv_length=16)
- self.assertIsInstance(b_key_3, six.binary_type)
-
- # verify we get the same answer
- # we could potentially run a few iterations of this and time it to see if it's roughly constant time
- # and or that it exceeds some minimal time, but that would likely cause unreliable fails, esp in CI
- b_key_4 = self.vault_cipher._create_key_pycrypto(b_password, b_salt, key_length=32, iv_length=16)
- self.assertIsInstance(b_key_4, six.binary_type)
- self.assertEqual(b_key_3, b_key_4)
- self.assertEqual(b_key_1, b_key_4)
-
- def test_create_key_known_pycrypto(self):
- b_password = b'hunter42'
-
- # A fixed salt
- b_salt = b'q' * 32 # q is the most random letter.
- b_key_3 = self.vault_cipher._create_key_pycrypto(b_password, b_salt, key_length=32, iv_length=16)
- self.assertIsInstance(b_key_3, six.binary_type)
-
- # verify we get the same answer
- # we could potentially run a few iterations of this and time it to see if it's roughly constant time
- # and or that it exceeds some minimal time, but that would likely cause unreliable fails, esp in CI
- b_key_4 = self.vault_cipher._create_key_pycrypto(b_password, b_salt, key_length=32, iv_length=16)
- self.assertIsInstance(b_key_4, six.binary_type)
- self.assertEqual(b_key_3, b_key_4)
-
def test_is_equal_is_equal(self):
self.assertTrue(self.vault_cipher._is_equal(b'abcdefghijklmnopqrstuvwxyz', b'abcdefghijklmnopqrstuvwxyz'))
@@ -603,19 +558,6 @@ class TestVaultCipherAes256(unittest.TestCase):
self.assertRaises(TypeError, self.vault_cipher._is_equal, b"blue fish", 2)
-@pytest.mark.skipif(not vault.HAS_PYCRYPTO,
- reason="Skipping Pycrypto tests because pycrypto is not installed")
-class TestVaultCipherAes256PyCrypto(TestVaultCipherAes256):
- def setUp(self):
- self.has_cryptography = vault.HAS_CRYPTOGRAPHY
- vault.HAS_CRYPTOGRAPHY = False
- super(TestVaultCipherAes256PyCrypto, self).setUp()
-
- def tearDown(self):
- vault.HAS_CRYPTOGRAPHY = self.has_cryptography
- super(TestVaultCipherAes256PyCrypto, self).tearDown()
-
-
class TestMatchSecrets(unittest.TestCase):
def test_empty_tuple(self):
secrets = [tuple()]
@@ -926,16 +868,3 @@ class TestVaultLib(unittest.TestCase):
plaintext = u"ansible"
self.v.encrypt(plaintext)
self.assertEqual(self.v.cipher_name, "AES256")
-
-
-@pytest.mark.skipif(not vault.HAS_PYCRYPTO,
- reason="Skipping Pycrypto tests because pycrypto is not installed")
-class TestVaultLibPyCrypto(TestVaultLib):
- def setUp(self):
- self.has_cryptography = vault.HAS_CRYPTOGRAPHY
- vault.HAS_CRYPTOGRAPHY = False
- super(TestVaultLibPyCrypto, self).setUp()
-
- def tearDown(self):
- vault.HAS_CRYPTOGRAPHY = self.has_cryptography
- super(TestVaultLibPyCrypto, self).tearDown()
diff --git a/test/units/parsing/vault/test_vault_editor.py b/test/units/parsing/vault/test_vault_editor.py
index 5601c49c..1483bb52 100644
--- a/test/units/parsing/vault/test_vault_editor.py
+++ b/test/units/parsing/vault/test_vault_editor.py
@@ -519,16 +519,3 @@ class TestVaultEditor(unittest.TestCase):
res = ve._real_path(file_link_path)
self.assertEqual(res, file_path)
-
-
-@pytest.mark.skipif(not vault.HAS_PYCRYPTO,
- reason="Skipping pycrypto tests because pycrypto is not installed")
-class TestVaultEditorPyCrypto(unittest.TestCase):
- def setUp(self):
- self.has_cryptography = vault.HAS_CRYPTOGRAPHY
- vault.HAS_CRYPTOGRAPHY = False
- super(TestVaultEditorPyCrypto, self).setUp()
-
- def tearDown(self):
- vault.HAS_CRYPTOGRAPHY = self.has_cryptography
- super(TestVaultEditorPyCrypto, self).tearDown()
diff --git a/test/units/parsing/yaml/test_dumper.py b/test/units/parsing/yaml/test_dumper.py
index 8129ca3a..5fbc139b 100644
--- a/test/units/parsing/yaml/test_dumper.py
+++ b/test/units/parsing/yaml/test_dumper.py
@@ -19,16 +19,21 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import io
+import yaml
+
+from jinja2.exceptions import UndefinedError
from units.compat import unittest
from ansible.parsing import vault
from ansible.parsing.yaml import dumper, objects
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.module_utils.six import PY2
+from ansible.template import AnsibleUndefined
from ansible.utils.unsafe_proxy import AnsibleUnsafeText, AnsibleUnsafeBytes
from units.mock.yaml_helper import YamlTestUtils
from units.mock.vault_helper import TextVaultSecret
+from ansible.vars.manager import VarsWithSources
class TestAnsibleDumper(unittest.TestCase, YamlTestUtils):
@@ -101,3 +106,18 @@ class TestAnsibleDumper(unittest.TestCase, YamlTestUtils):
data_from_yaml = loader.get_single_data()
self.assertEqual(u_text, data_from_yaml)
+
+ def test_vars_with_sources(self):
+ try:
+ self._dump_string(VarsWithSources(), dumper=self.dumper)
+ except yaml.representer.RepresenterError:
+ self.fail("Dump VarsWithSources raised RepresenterError unexpectedly!")
+
+ def test_undefined(self):
+ undefined_object = AnsibleUndefined()
+ try:
+ yaml_out = self._dump_string(undefined_object, dumper=self.dumper)
+ except UndefinedError:
+ yaml_out = None
+
+ self.assertIsNone(yaml_out)
diff --git a/test/units/playbook/test_helpers.py b/test/units/playbook/test_helpers.py
index a4ed6178..8574cb4c 100644
--- a/test/units/playbook/test_helpers.py
+++ b/test/units/playbook/test_helpers.py
@@ -193,8 +193,7 @@ class TestLoadListOfTasks(unittest.TestCase, MixinForMocks):
self.assertEqual(len(res), 0)
def test_one_bogus_include_static(self):
- ds = [{'include': 'somefile.yml',
- 'static': 'true'}]
+ ds = [{'import_tasks': 'somefile.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
variable_manager=self.mock_variable_manager, loader=self.fake_loader)
self.assertIsInstance(res, list)
@@ -241,28 +240,6 @@ class TestLoadListOfTasks(unittest.TestCase, MixinForMocks):
self.assertIn('test_one_parent_include_tags_tag1', res[0].tags)
self.assertIn('and_another_tag2', res[0].tags)
- # It would be useful to be able to tell what kind of deprecation we encountered and where we encountered it.
- def test_one_include_tags_deprecated_mixed(self):
- ds = [{'include': "/dev/null/includes/other_test_include.yml",
- 'vars': {'tags': "['tag_on_include1', 'tag_on_include2']"},
- 'tags': 'mixed_tag1, mixed_tag2'
- }]
- self.assertRaisesRegexp(errors.AnsibleParserError, 'Mixing styles',
- helpers.load_list_of_tasks,
- ds, play=self.mock_play,
- variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
-
- def test_one_include_tags_deprecated_include(self):
- ds = [{'include': '/dev/null/includes/other_test_include.yml',
- 'vars': {'tags': ['include_tag1_deprecated', 'and_another_tagB_deprecated']}
- }]
- res = helpers.load_list_of_tasks(ds, play=self.mock_play,
- variable_manager=self.mock_variable_manager, loader=self.fake_include_loader)
- self._assert_is_task_list_or_blocks(res)
- self.assertIsInstance(res[0], Block)
- self.assertIn('include_tag1_deprecated', res[0].tags)
- self.assertIn('and_another_tagB_deprecated', res[0].tags)
-
def test_one_include_use_handlers(self):
ds = [{'include': '/dev/null/includes/other_test_include.yml'}]
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
@@ -286,11 +263,10 @@ class TestLoadListOfTasks(unittest.TestCase, MixinForMocks):
# figure out how to get the non-static errors to be raised, this seems to just ignore everything
def test_one_include_not_static(self):
ds = [{
- 'include': '/dev/null/includes/static_test_include.yml',
- 'static': False
+ 'include_tasks': '/dev/null/includes/static_test_include.yml',
}]
# a_block = Block()
- ti_ds = {'include': '/dev/null/includes/ssdftatic_test_include.yml'}
+ ti_ds = {'include_tasks': '/dev/null/includes/ssdftatic_test_include.yml'}
a_task_include = TaskInclude()
ti = a_task_include.load(ti_ds)
res = helpers.load_list_of_tasks(ds, play=self.mock_play,
diff --git a/test/units/playbook/test_play.py b/test/units/playbook/test_play.py
index 725c28ea..bcc1e5ea 100644
--- a/test/units/playbook/test_play.py
+++ b/test/units/playbook/test_play.py
@@ -19,114 +19,273 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from units.compat import unittest
-from units.compat.mock import patch, MagicMock
+import pytest
-from ansible.errors import AnsibleParserError
+from ansible.errors import AnsibleAssertionError, AnsibleParserError
+from ansible.parsing.yaml.objects import AnsibleVaultEncryptedUnicode
+from ansible.playbook.block import Block
from ansible.playbook.play import Play
+from ansible.playbook.role import Role
+from ansible.playbook.task import Task
from units.mock.loader import DictDataLoader
-from units.mock.path import mock_unfrackpath_noop
-
-
-class TestPlay(unittest.TestCase):
-
- def test_empty_play(self):
- p = Play.load(dict())
- self.assertEqual(str(p), '')
-
- def test_basic_play(self):
- p = Play.load(dict(
- name="test play",
- hosts=['foo'],
- gather_facts=False,
- connection='local',
- remote_user="root",
- become=True,
- become_user="testing",
- ))
-
- def test_play_with_user_conflict(self):
- p = Play.load(dict(
- name="test play",
- hosts=['foo'],
- user="testing",
- gather_facts=False,
- ))
- self.assertEqual(p.remote_user, "testing")
-
- def test_play_with_user_conflict(self):
- play_data = dict(
- name="test play",
- hosts=['foo'],
- user="testing",
- remote_user="testing",
- )
- self.assertRaises(AnsibleParserError, Play.load, play_data)
-
- def test_play_with_tasks(self):
- p = Play.load(dict(
- name="test play",
- hosts=['foo'],
- gather_facts=False,
- tasks=[dict(action='shell echo "hello world"')],
- ))
-
- def test_play_with_handlers(self):
- p = Play.load(dict(
- name="test play",
- hosts=['foo'],
- gather_facts=False,
- handlers=[dict(action='shell echo "hello world"')],
- ))
-
- def test_play_with_pre_tasks(self):
- p = Play.load(dict(
- name="test play",
- hosts=['foo'],
- gather_facts=False,
- pre_tasks=[dict(action='shell echo "hello world"')],
- ))
-
- def test_play_with_post_tasks(self):
- p = Play.load(dict(
- name="test play",
- hosts=['foo'],
- gather_facts=False,
- post_tasks=[dict(action='shell echo "hello world"')],
- ))
-
- @patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop)
- def test_play_with_roles(self):
- fake_loader = DictDataLoader({
- '/etc/ansible/roles/foo/tasks.yml': """
- - name: role task
- shell: echo "hello world"
- """,
- })
-
- mock_var_manager = MagicMock()
- mock_var_manager.get_vars.return_value = dict()
-
- p = Play.load(dict(
- name="test play",
- hosts=['foo'],
- gather_facts=False,
- roles=['foo'],
- ), loader=fake_loader, variable_manager=mock_var_manager)
-
- blocks = p.compile()
-
- def test_play_compile(self):
- p = Play.load(dict(
- name="test play",
- hosts=['foo'],
- gather_facts=False,
- tasks=[dict(action='shell echo "hello world"')],
- ))
-
- blocks = p.compile()
-
- # with a single block, there will still be three
- # implicit meta flush_handler blocks inserted
- self.assertEqual(len(blocks), 4)
+
+
+def test_empty_play():
+ p = Play.load({})
+
+ assert str(p) == ''
+
+
+def test_play_with_hosts_string():
+ p = Play.load({'hosts': 'foo'})
+
+ assert str(p) == 'foo'
+
+ # Test the caching since self.name should be set by previous call.
+ assert p.get_name() == 'foo'
+
+
+def test_basic_play():
+ p = Play.load(dict(
+ name="test play",
+ hosts=['foo'],
+ gather_facts=False,
+ connection='local',
+ remote_user="root",
+ become=True,
+ become_user="testing",
+ ))
+
+ assert p.name == 'test play'
+ assert p.hosts == ['foo']
+ assert p.connection == 'local'
+
+
+def test_play_with_remote_user():
+ p = Play.load(dict(
+ name="test play",
+ hosts=['foo'],
+ user="testing",
+ gather_facts=False,
+ ))
+
+ assert p.remote_user == "testing"
+
+
+def test_play_with_user_conflict():
+ play_data = dict(
+ name="test play",
+ hosts=['foo'],
+ user="testing",
+ remote_user="testing",
+ )
+
+ with pytest.raises(AnsibleParserError):
+ Play.load(play_data)
+
+
+def test_play_with_bad_ds_type():
+ play_data = []
+ with pytest.raises(AnsibleAssertionError, match=r"while preprocessing data \(\[\]\), ds should be a dict but was a <(?:class|type) 'list'>"):
+ Play.load(play_data)
+
+
+def test_play_with_tasks():
+ p = Play.load(dict(
+ name="test play",
+ hosts=['foo'],
+ gather_facts=False,
+ tasks=[dict(action='shell echo "hello world"')],
+ ))
+
+ assert len(p.tasks) == 1
+ assert isinstance(p.tasks[0], Block)
+ assert p.tasks[0].has_tasks() is True
+
+
+def test_play_with_handlers():
+ p = Play.load(dict(
+ name="test play",
+ hosts=['foo'],
+ gather_facts=False,
+ handlers=[dict(action='shell echo "hello world"')],
+ ))
+
+ assert len(p.handlers) >= 1
+ assert len(p.get_handlers()) >= 1
+ assert isinstance(p.handlers[0], Block)
+ assert p.handlers[0].has_tasks() is True
+
+
+def test_play_with_pre_tasks():
+ p = Play.load(dict(
+ name="test play",
+ hosts=['foo'],
+ gather_facts=False,
+ pre_tasks=[dict(action='shell echo "hello world"')],
+ ))
+
+ assert len(p.pre_tasks) >= 1
+ assert isinstance(p.pre_tasks[0], Block)
+ assert p.pre_tasks[0].has_tasks() is True
+
+ assert len(p.get_tasks()) >= 1
+ assert isinstance(p.get_tasks()[0][0], Task)
+ assert p.get_tasks()[0][0].action == 'shell'
+
+
+def test_play_with_post_tasks():
+ p = Play.load(dict(
+ name="test play",
+ hosts=['foo'],
+ gather_facts=False,
+ post_tasks=[dict(action='shell echo "hello world"')],
+ ))
+
+ assert len(p.post_tasks) >= 1
+ assert isinstance(p.post_tasks[0], Block)
+ assert p.post_tasks[0].has_tasks() is True
+
+
+def test_play_with_roles(mocker):
+ mocker.patch('ansible.playbook.role.definition.RoleDefinition._load_role_path', return_value=('foo', '/etc/ansible/roles/foo'))
+ fake_loader = DictDataLoader({
+ '/etc/ansible/roles/foo/tasks.yml': """
+ - name: role task
+ shell: echo "hello world"
+ """,
+ })
+
+ mock_var_manager = mocker.MagicMock()
+ mock_var_manager.get_vars.return_value = {}
+
+ p = Play.load(dict(
+ name="test play",
+ hosts=['foo'],
+ gather_facts=False,
+ roles=['foo'],
+ ), loader=fake_loader, variable_manager=mock_var_manager)
+
+ blocks = p.compile()
+ assert len(blocks) > 1
+ assert all(isinstance(block, Block) for block in blocks)
+ assert isinstance(p.get_roles()[0], Role)
+
+
+def test_play_compile():
+ p = Play.load(dict(
+ name="test play",
+ hosts=['foo'],
+ gather_facts=False,
+ tasks=[dict(action='shell echo "hello world"')],
+ ))
+
+ blocks = p.compile()
+
+ # with a single block, there will still be three
+ # implicit meta flush_handler blocks inserted
+ assert len(blocks) == 4
+
+
+@pytest.mark.parametrize(
+ 'value, expected',
+ (
+ ('my_vars.yml', ['my_vars.yml']),
+ (['my_vars.yml'], ['my_vars.yml']),
+ (['my_vars1.yml', 'my_vars2.yml'], ['my_vars1.yml', 'my_vars2.yml']),
+ (None, []),
+ )
+)
+def test_play_with_vars_files(value, expected):
+ play = Play.load({
+ 'name': 'Play with vars_files',
+ 'hosts': ['testhost1'],
+ 'vars_files': value,
+ })
+
+ assert play.vars_files == value
+ assert play.get_vars_files() == expected
+
+
+@pytest.mark.parametrize('value', ([], tuple(), set(), {}, '', None, False, 0))
+def test_play_empty_hosts(value):
+ with pytest.raises(AnsibleParserError, match='Hosts list cannot be empty'):
+ Play.load({'hosts': value})
+
+
+@pytest.mark.parametrize('value', ([None], (None,), ['one', None]))
+def test_play_none_hosts(value):
+ with pytest.raises(AnsibleParserError, match="Hosts list cannot contain values of 'None'"):
+ Play.load({'hosts': value})
+
+
+@pytest.mark.parametrize(
+ 'value',
+ (
+ {'one': None},
+ {'one': 'two'},
+ True,
+ 1,
+ 1.75,
+ AnsibleVaultEncryptedUnicode('secret'),
+ )
+)
+def test_play_invalid_hosts_sequence(value):
+ with pytest.raises(AnsibleParserError, match='Hosts list must be a sequence or string'):
+ Play.load({'hosts': value})
+
+
+@pytest.mark.parametrize(
+ 'value',
+ (
+ [[1, 'two']],
+ [{'one': None}],
+ [set((None, 'one'))],
+ ['one', 'two', {'three': None}],
+ ['one', 'two', {'three': 'four'}],
+ [AnsibleVaultEncryptedUnicode('secret')],
+ )
+)
+def test_play_invalid_hosts_value(value):
+ with pytest.raises(AnsibleParserError, match='Hosts list contains an invalid host value'):
+ Play.load({'hosts': value})
+
+
+def test_play_with_vars():
+ play = Play.load({}, vars={'var1': 'val1'})
+
+ assert play.get_name() == ''
+ assert play.vars == {'var1': 'val1'}
+ assert play.get_vars() == {'var1': 'val1'}
+
+
+def test_play_no_name_hosts_sequence():
+ play = Play.load({'hosts': ['host1', 'host2']})
+
+ assert play.get_name() == 'host1,host2'
+
+
+def test_play_hosts_template_expression():
+ play = Play.load({'hosts': "{{ target_hosts }}"})
+
+ assert play.get_name() == '{{ target_hosts }}'
+
+
+@pytest.mark.parametrize(
+ 'call',
+ (
+ '_load_tasks',
+ '_load_pre_tasks',
+ '_load_post_tasks',
+ '_load_handlers',
+ '_load_roles',
+ )
+)
+def test_bad_blocks_roles(mocker, call):
+ mocker.patch('ansible.playbook.play.load_list_of_blocks', side_effect=AssertionError('Raised intentionally'))
+ mocker.patch('ansible.playbook.play.load_list_of_roles', side_effect=AssertionError('Raised intentionally'))
+
+ play = Play.load({})
+ with pytest.raises(AnsibleParserError, match='A malformed (block|(role declaration)) was encountered'):
+ getattr(play, call)('', None)
diff --git a/test/units/playbook/test_play_context.py b/test/units/playbook/test_play_context.py
index 0936775b..7c24de51 100644
--- a/test/units/playbook/test_play_context.py
+++ b/test/units/playbook/test_play_context.py
@@ -92,20 +92,3 @@ def test_play_context(mocker, parser, reset_cli_args):
mock_task.no_log = False
play_context = play_context.set_task_and_variable_override(task=mock_task, variables=all_vars, templar=mock_templar)
assert play_context.no_log is False
-
-
-def test_play_context_make_become_bad(mocker, parser, reset_cli_args):
- options = parser.parse_args([])
- context._init_global_context(options)
- play_context = PlayContext()
-
- default_cmd = "/bin/foo"
- default_exe = "/bin/bash"
-
- play_context.become = True
- play_context.become_user = 'foo'
- play_context.set_become_plugin(become_loader.get('bad'))
- play_context.become_method = 'bad'
-
- with pytest.raises(AnsibleError):
- play_context.make_become_cmd(cmd=default_cmd, executable=default_exe)
diff --git a/test/units/plugins/action/test_gather_facts.py b/test/units/plugins/action/test_gather_facts.py
index 0c699bfe..8f860e4a 100644
--- a/test/units/plugins/action/test_gather_facts.py
+++ b/test/units/plugins/action/test_gather_facts.py
@@ -24,9 +24,8 @@ from units.compat.mock import MagicMock, patch
from ansible import constants as C
from ansible.playbook.task import Task
from ansible.plugins.action.gather_facts import ActionModule as GatherFactsAction
-from ansible.plugins import loader as plugin_loader
from ansible.template import Templar
-import ansible.executor.module_common as module_common
+from ansible.executor import module_common
from units.mock.loader import DictDataLoader
@@ -67,8 +66,9 @@ class TestNetworkFacts(unittest.TestCase):
# assert the correct module was found
self.assertEqual(get_module_args.call_count, 1)
- get_module_args.assert_called_once_with(
- 'ansible.legacy.ios_facts', {'ansible_network_os': 'ios'},
+ self.assertEqual(
+ get_module_args.call_args.args,
+ ('ansible.legacy.ios_facts', {'ansible_network_os': 'ios'},)
)
@patch.object(module_common, '_get_collection_metadata', return_value={})
@@ -92,39 +92,7 @@ class TestNetworkFacts(unittest.TestCase):
# assert the correct module was found
self.assertEqual(get_module_args.call_count, 1)
- get_module_args.assert_called_once_with(
- 'cisco.ios.ios_facts', {'ansible_network_os': 'cisco.ios.ios'},
+ self.assertEqual(
+ get_module_args.call_args.args,
+ ('cisco.ios.ios_facts', {'ansible_network_os': 'cisco.ios.ios'},)
)
-
- def test_network_gather_facts(self):
- self.task_vars = {'ansible_network_os': 'ios'}
- self.task.action = 'gather_facts'
- self.task.async_val = False
- self.task.args = {'gather_subset': 'min'}
- self.task.module_defaults = [{'ios_facts': {'gather_subset': 'min'}}]
-
- plugin = GatherFactsAction(self.task, self.connection, self.play_context, loader=None, templar=self.templar, shared_loader_obj=plugin_loader)
- plugin._execute_module = MagicMock()
-
- res = plugin.run(task_vars=self.task_vars)
- self.assertEqual(res['ansible_facts']['_ansible_facts_gathered'], True)
-
- mod_args = plugin._get_module_args('ios_facts', task_vars=self.task_vars)
- self.assertEqual(mod_args['gather_subset'], 'min')
-
- @patch.object(module_common, '_get_collection_metadata', return_value={})
- def test_network_gather_facts_fqcn(self, mock_collection_metadata):
- self.fqcn_task_vars = {'ansible_network_os': 'cisco.ios.ios'}
- self.task.action = 'gather_facts'
- self.task.async_val = False
- self.task.args = {'gather_subset': 'min'}
- self.task.module_defaults = [{'cisco.ios.ios_facts': {'gather_subset': 'min'}}]
-
- plugin = GatherFactsAction(self.task, self.connection, self.play_context, loader=None, templar=self.templar, shared_loader_obj=plugin_loader)
- plugin._execute_module = MagicMock()
-
- res = plugin.run(task_vars=self.fqcn_task_vars)
- self.assertEqual(res['ansible_facts']['_ansible_facts_gathered'], True)
-
- mod_args = plugin._get_module_args('cisco.ios.ios_facts', task_vars=self.fqcn_task_vars)
- self.assertEqual(mod_args['gather_subset'], 'min')
diff --git a/test/units/plugins/become/test_su.py b/test/units/plugins/become/test_su.py
index 73eb71dd..bf74a4c3 100644
--- a/test/units/plugins/become/test_su.py
+++ b/test/units/plugins/become/test_su.py
@@ -10,31 +10,21 @@ __metaclass__ = type
import re
from ansible import context
-from ansible.playbook.play_context import PlayContext
-from ansible.plugins.loader import become_loader
+from ansible.plugins.loader import become_loader, shell_loader
def test_su(mocker, parser, reset_cli_args):
options = parser.parse_args([])
context._init_global_context(options)
- play_context = PlayContext()
-
- default_cmd = "/bin/foo"
- default_exe = "/bin/bash"
- su_exe = 'su'
- su_flags = ''
-
- cmd = play_context.make_become_cmd(cmd=default_cmd, executable=default_exe)
- assert cmd == default_cmd
-
- success = 'BECOME-SUCCESS-.+?'
-
- play_context.become = True
- play_context.become_user = 'foo'
- play_context.become_pass = None
- play_context.become_method = 'su'
- play_context.set_become_plugin(become_loader.get('su'))
- play_context.become_flags = su_flags
- cmd = play_context.make_become_cmd(cmd=default_cmd, executable=default_exe)
- assert (re.match("""%s %s -c '%s -c '"'"'echo %s; %s'"'"''""" % (su_exe, play_context.become_user, default_exe,
- success, default_cmd), cmd) is not None)
+
+ su = become_loader.get('su')
+ sh = shell_loader.get('sh')
+ sh.executable = "/bin/bash"
+
+ su.set_options(direct={
+ 'become_user': 'foo',
+ 'become_flags': '',
+ })
+
+ cmd = su.build_become_command('/bin/foo', sh)
+ assert re.match(r"""su\s+foo -c '/bin/bash -c '"'"'echo BECOME-SUCCESS-.+?; /bin/foo'"'"''""", cmd)
diff --git a/test/units/plugins/become/test_sudo.py b/test/units/plugins/become/test_sudo.py
index ba501296..8ccb2a12 100644
--- a/test/units/plugins/become/test_sudo.py
+++ b/test/units/plugins/become/test_sudo.py
@@ -10,36 +10,31 @@ __metaclass__ = type
import re
from ansible import context
-from ansible.playbook.play_context import PlayContext
-from ansible.plugins.loader import become_loader
+from ansible.plugins.loader import become_loader, shell_loader
def test_sudo(mocker, parser, reset_cli_args):
options = parser.parse_args([])
context._init_global_context(options)
- play_context = PlayContext()
- default_cmd = "/bin/foo"
- default_exe = "/bin/bash"
- sudo_exe = 'sudo'
- sudo_flags = '-H -s -n'
+ sudo = become_loader.get('sudo')
+ sh = shell_loader.get('sh')
+ sh.executable = "/bin/bash"
- cmd = play_context.make_become_cmd(cmd=default_cmd, executable=default_exe)
- assert cmd == default_cmd
+ sudo.set_options(direct={
+ 'become_user': 'foo',
+ 'become_flags': '-n -s -H',
+ })
- success = 'BECOME-SUCCESS-.+?'
+ cmd = sudo.build_become_command('/bin/foo', sh)
- play_context.become = True
- play_context.become_user = 'foo'
- play_context.set_become_plugin(become_loader.get('sudo'))
- play_context.become_flags = sudo_flags
- cmd = play_context.make_become_cmd(cmd=default_cmd, executable=default_exe)
+ assert re.match(r"""sudo\s+-n -s -H\s+-u foo /bin/bash -c 'echo BECOME-SUCCESS-.+? ; /bin/foo'""", cmd), cmd
- assert (re.match("""%s %s -u %s %s -c 'echo %s; %s'""" % (sudo_exe, sudo_flags, play_context.become_user,
- default_exe, success, default_cmd), cmd) is not None)
+ sudo.set_options(direct={
+ 'become_user': 'foo',
+ 'become_flags': '-n -s -H',
+ 'become_pass': 'testpass',
+ })
- play_context.become_pass = 'testpass'
- cmd = play_context.make_become_cmd(cmd=default_cmd, executable=default_exe)
- assert (re.match("""%s %s -p "%s" -u %s %s -c 'echo %s; %s'""" % (sudo_exe, sudo_flags.replace('-n', ''),
- r"\[sudo via ansible, key=.+?\] password:", play_context.become_user,
- default_exe, success, default_cmd), cmd) is not None)
+ cmd = sudo.build_become_command('/bin/foo', sh)
+ assert re.match(r"""sudo\s+-s\s-H\s+-p "\[sudo via ansible, key=.+?\] password:" -u foo /bin/bash -c 'echo BECOME-SUCCESS-.+? ; /bin/foo'""", cmd), cmd
diff --git a/test/units/plugins/cache/test_cache.py b/test/units/plugins/cache/test_cache.py
index 0f6183f3..c4e0079a 100644
--- a/test/units/plugins/cache/test_cache.py
+++ b/test/units/plugins/cache/test_cache.py
@@ -19,12 +19,17 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import os
+import shutil
+import tempfile
+
from units.compat import unittest, mock
from ansible.errors import AnsibleError
-from ansible.plugins.cache import FactCache, CachePluginAdjudicator
+from ansible.plugins.cache import CachePluginAdjudicator
from ansible.plugins.cache.base import BaseCacheModule
from ansible.plugins.cache.memory import CacheModule as MemoryCache
from ansible.plugins.loader import cache_loader
+from ansible.vars.fact_cache import FactCache
import pytest
@@ -59,14 +64,14 @@ class TestCachePluginAdjudicator(unittest.TestCase):
assert self.cache.get('foo') is None
def test___getitem__(self):
- with pytest.raises(KeyError) as err:
+ with pytest.raises(KeyError):
self.cache['foo']
def test_pop_with_default(self):
assert self.cache.pop('foo', 'bar') == 'bar'
def test_pop_without_default(self):
- with pytest.raises(KeyError) as err:
+ with pytest.raises(KeyError):
assert self.cache.pop('foo')
def test_pop(self):
@@ -78,6 +83,15 @@ class TestCachePluginAdjudicator(unittest.TestCase):
self.cache.update({'cache_key': {'key2': 'updatedvalue'}})
assert self.cache['cache_key']['key2'] == 'updatedvalue'
+ def test_update_cache_if_changed(self):
+ # Changes are stored in the CachePluginAdjudicator and will be
+ # persisted to the plugin when calling update_cache_if_changed()
+ # The exception is flush which flushes the plugin immediately.
+ assert len(self.cache.keys()) == 2
+ assert len(self.cache._plugin.keys()) == 0
+ self.cache.update_cache_if_changed()
+ assert len(self.cache._plugin.keys()) == 2
+
def test_flush(self):
# Fake that the cache already has some data in it but the adjudicator
# hasn't loaded it in.
@@ -86,19 +100,71 @@ class TestCachePluginAdjudicator(unittest.TestCase):
self.cache._plugin.set('another wolf', 'another animal')
# The adjudicator does't know about the new entries
- assert len(self.cache) == 2
+ assert len(self.cache.keys()) == 2
# But the cache itself does
- assert len(self.cache._plugin._cache) == 3
+ assert len(self.cache._plugin.keys()) == 3
# If we call flush, both the adjudicator and the cache should flush
self.cache.flush()
- assert len(self.cache) == 0
- assert len(self.cache._plugin._cache) == 0
+ assert len(self.cache.keys()) == 0
+ assert len(self.cache._plugin.keys()) == 0
-class TestFactCache(unittest.TestCase):
+class TestJsonFileCache(TestCachePluginAdjudicator):
+ cache_prefix = ''
def setUp(self):
+ self.cache_dir = tempfile.mkdtemp(prefix='ansible-plugins-cache-')
+ self.cache = CachePluginAdjudicator(
+ plugin_name='jsonfile', _uri=self.cache_dir,
+ _prefix=self.cache_prefix)
+ self.cache['cache_key'] = {'key1': 'value1', 'key2': 'value2'}
+ self.cache['cache_key_2'] = {'key': 'value'}
+
+ def test_keys(self):
+ # A cache without a prefix will consider all files in the cache
+ # directory as valid cache entries.
+ self.cache._plugin._dump(
+ 'no prefix', os.path.join(self.cache_dir, 'no_prefix'))
+ self.cache._plugin._dump(
+ 'special cache', os.path.join(self.cache_dir, 'special_test'))
+
+ # The plugin does not know the CachePluginAdjudicator entries.
+ assert sorted(self.cache._plugin.keys()) == [
+ 'no_prefix', 'special_test']
+
+ assert 'no_prefix' in self.cache
+ assert 'special_test' in self.cache
+ assert 'test' not in self.cache
+ assert self.cache['no_prefix'] == 'no prefix'
+ assert self.cache['special_test'] == 'special cache'
+
+ def tearDown(self):
+ shutil.rmtree(self.cache_dir)
+
+
+class TestJsonFileCachePrefix(TestJsonFileCache):
+ cache_prefix = 'special_'
+
+ def test_keys(self):
+ # For caches with a prefix only files that match the prefix are
+ # considered. The prefix is removed from the key name.
+ self.cache._plugin._dump(
+ 'no prefix', os.path.join(self.cache_dir, 'no_prefix'))
+ self.cache._plugin._dump(
+ 'special cache', os.path.join(self.cache_dir, 'special_test'))
+
+ # The plugin does not know the CachePluginAdjudicator entries.
+ assert sorted(self.cache._plugin.keys()) == ['test']
+
+ assert 'no_prefix' not in self.cache
+ assert 'special_test' not in self.cache
+ assert 'test' in self.cache
+ assert self.cache['test'] == 'special cache'
+
+
+class TestFactCache(unittest.TestCase):
+ def setUp(self):
with mock.patch('ansible.constants.CACHE_PLUGIN', 'memory'):
self.cache = FactCache()
@@ -109,6 +175,12 @@ class TestFactCache(unittest.TestCase):
self.assertEqual(type(a_copy), dict)
self.assertEqual(a_copy, dict(avocado='fruit', daisy='flower'))
+ def test_flush(self):
+ self.cache['motorcycle'] = 'vehicle'
+ self.cache['sock'] = 'clothing'
+ self.cache.flush()
+ assert len(self.cache.keys()) == 0
+
def test_plugin_load_failure(self):
# See https://github.com/ansible/ansible/issues/18751
# Note no fact_connection config set, so this will fail
@@ -121,16 +193,6 @@ class TestFactCache(unittest.TestCase):
self.cache.update({'cache_key': {'key2': 'updatedvalue'}})
assert self.cache['cache_key']['key2'] == 'updatedvalue'
- def test_update_legacy(self):
- self.cache.update('cache_key', {'key2': 'updatedvalue'})
- assert self.cache['cache_key']['key2'] == 'updatedvalue'
-
- def test_update_legacy_key_exists(self):
- self.cache['cache_key'] = {'key': 'value', 'key2': 'value2'}
- self.cache.update('cache_key', {'key': 'updatedvalue'})
- assert self.cache['cache_key']['key'] == 'updatedvalue'
- assert self.cache['cache_key']['key2'] == 'value2'
-
class TestAbstractClass(unittest.TestCase):
diff --git a/test/units/plugins/callback/test_callback.py b/test/units/plugins/callback/test_callback.py
index ac319986..c2ffbb4d 100644
--- a/test/units/plugins/callback/test_callback.py
+++ b/test/units/plugins/callback/test_callback.py
@@ -27,9 +27,15 @@ import types
from units.compat import unittest
from units.compat.mock import MagicMock
+from ansible.executor.task_result import TaskResult
+from ansible.inventory.host import Host
from ansible.plugins.callback import CallbackBase
+mock_task = MagicMock()
+mock_task.delegate_to = None
+
+
class TestCallback(unittest.TestCase):
# FIXME: This doesn't really test anything...
def test_init(self):
@@ -47,6 +53,20 @@ class TestCallback(unittest.TestCase):
cb = CallbackBase(display=display_mock)
self.assertIs(cb._display, display_mock)
+ def test_host_label(self):
+ result = TaskResult(host=Host('host1'), task=mock_task, return_data={})
+
+ self.assertEquals(CallbackBase.host_label(result), 'host1')
+
+ def test_host_label_delegated(self):
+ mock_task.delegate_to = 'host2'
+ result = TaskResult(
+ host=Host('host1'),
+ task=mock_task,
+ return_data={'_ansible_delegated_vars': {'ansible_host': 'host2'}},
+ )
+ self.assertEquals(CallbackBase.host_label(result), 'host1 -> host2')
+
# TODO: import callback module so we can patch callback.cli/callback.C
diff --git a/test/units/plugins/connection/test_connection.py b/test/units/plugins/connection/test_connection.py
index 17c2e085..0f484e62 100644
--- a/test/units/plugins/connection/test_connection.py
+++ b/test/units/plugins/connection/test_connection.py
@@ -160,10 +160,10 @@ debug1: Sending command: /bin/sh -c 'sudo -H -S -p "[sudo via ansible, key=ouzm
c.set_become_plugin(become_loader.get('sudo'))
c.become.prompt = '[sudo via ansible, key=ouzmdnewuhucvuaabtjmweasarviygqq] password: '
- self.assertTrue(c.check_password_prompt(local))
- self.assertTrue(c.check_password_prompt(ssh_pipelining_vvvv))
- self.assertTrue(c.check_password_prompt(ssh_nopipelining_vvvv))
- self.assertTrue(c.check_password_prompt(ssh_novvvv))
- self.assertTrue(c.check_password_prompt(dns_issue))
- self.assertFalse(c.check_password_prompt(nothing))
- self.assertFalse(c.check_password_prompt(in_front))
+ self.assertTrue(c.become.check_password_prompt(local))
+ self.assertTrue(c.become.check_password_prompt(ssh_pipelining_vvvv))
+ self.assertTrue(c.become.check_password_prompt(ssh_nopipelining_vvvv))
+ self.assertTrue(c.become.check_password_prompt(ssh_novvvv))
+ self.assertTrue(c.become.check_password_prompt(dns_issue))
+ self.assertFalse(c.become.check_password_prompt(nothing))
+ self.assertFalse(c.become.check_password_prompt(in_front))
diff --git a/test/units/plugins/connection/test_ssh.py b/test/units/plugins/connection/test_ssh.py
index 4c486ade..d693313f 100644
--- a/test/units/plugins/connection/test_ssh.py
+++ b/test/units/plugins/connection/test_ssh.py
@@ -80,6 +80,8 @@ class TestConnectionBaseClass(unittest.TestCase):
pc = PlayContext()
new_stdin = StringIO()
conn = connection_loader.get('ssh', pc, new_stdin)
+ conn.get_option = MagicMock()
+ conn.get_option.return_value = ""
conn._build_command('ssh', 'ssh')
def test_plugins_connection_ssh_exec_command(self):
@@ -104,10 +106,10 @@ class TestConnectionBaseClass(unittest.TestCase):
conn = connection_loader.get('ssh', pc, new_stdin)
conn.set_become_plugin(become_loader.get('sudo'))
- conn.check_password_prompt = MagicMock()
- conn.check_become_success = MagicMock()
- conn.check_incorrect_password = MagicMock()
- conn.check_missing_password = MagicMock()
+ conn.become.check_password_prompt = MagicMock()
+ conn.become.check_success = MagicMock()
+ conn.become.check_incorrect_password = MagicMock()
+ conn.become.check_missing_password = MagicMock()
def _check_password_prompt(line):
if b'foo' in line:
@@ -129,11 +131,6 @@ class TestConnectionBaseClass(unittest.TestCase):
return True
return False
- conn.become.check_password_prompt = MagicMock(side_effect=_check_password_prompt)
- conn.become.check_become_success = MagicMock(side_effect=_check_become_success)
- conn.become.check_incorrect_password = MagicMock(side_effect=_check_incorrect_password)
- conn.become.check_missing_password = MagicMock(side_effect=_check_missing_password)
-
# test examining output for prompt
conn._flags = dict(
become_prompt=False,
@@ -143,7 +140,13 @@ class TestConnectionBaseClass(unittest.TestCase):
)
pc.prompt = True
+
+ # override become plugin
conn.become.prompt = True
+ conn.become.check_password_prompt = MagicMock(side_effect=_check_password_prompt)
+ conn.become.check_success = MagicMock(side_effect=_check_become_success)
+ conn.become.check_incorrect_password = MagicMock(side_effect=_check_incorrect_password)
+ conn.become.check_missing_password = MagicMock(side_effect=_check_missing_password)
def get_option(option):
if option == 'become_pass':
diff --git a/test/units/plugins/filter/test_core.py b/test/units/plugins/filter/test_core.py
index 8a626d9a..df4e4725 100644
--- a/test/units/plugins/filter/test_core.py
+++ b/test/units/plugins/filter/test_core.py
@@ -3,6 +3,8 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
+from jinja2.runtime import Undefined
+from jinja2.exceptions import UndefinedError
__metaclass__ = type
import pytest
diff --git a/test/units/plugins/inventory/test_constructed.py b/test/units/plugins/inventory/test_constructed.py
index d12468d0..581e0253 100644
--- a/test/units/plugins/inventory/test_constructed.py
+++ b/test/units/plugins/inventory/test_constructed.py
@@ -205,3 +205,133 @@ def test_parent_group_templating_error(inventory_module):
)
# assert group was never added with invalid parent
assert 'betsy' not in inventory_module.inventory.groups
+
+
+def test_keyed_group_exclusive_argument(inventory_module):
+ inventory_module.inventory.add_host('cow')
+ inventory_module.inventory.set_variable('cow', 'nickname', 'betsy')
+ host = inventory_module.inventory.get_host('cow')
+ keyed_groups = [
+ {
+ 'key': 'tag',
+ 'separator': '_',
+ 'default_value': 'default_value_name',
+ 'trailing_separator': True
+ }
+ ]
+ with pytest.raises(AnsibleParserError) as err_message:
+ inventory_module._add_host_to_keyed_groups(
+ keyed_groups, host.vars, host.name, strict=True
+ )
+ assert 'parameters are mutually exclusive' in err_message
+
+
+def test_keyed_group_empty_value(inventory_module):
+ inventory_module.inventory.add_host('server0')
+ inventory_module.inventory.set_variable('server0', 'tags', {'environment': 'prod', 'status': ''})
+ host = inventory_module.inventory.get_host('server0')
+ keyed_groups = [
+ {
+ 'prefix': 'tag',
+ 'separator': '_',
+ 'key': 'tags'
+ }
+ ]
+ inventory_module._add_host_to_keyed_groups(
+ keyed_groups, host.vars, host.name, strict=False
+ )
+ for group_name in ('tag_environment_prod', 'tag_status_'):
+ assert group_name in inventory_module.inventory.groups
+
+
+def test_keyed_group_dict_with_default_value(inventory_module):
+ inventory_module.inventory.add_host('server0')
+ inventory_module.inventory.set_variable('server0', 'tags', {'environment': 'prod', 'status': ''})
+ host = inventory_module.inventory.get_host('server0')
+ keyed_groups = [
+ {
+ 'prefix': 'tag',
+ 'separator': '_',
+ 'key': 'tags',
+ 'default_value': 'running'
+ }
+ ]
+ inventory_module._add_host_to_keyed_groups(
+ keyed_groups, host.vars, host.name, strict=False
+ )
+ for group_name in ('tag_environment_prod', 'tag_status_running'):
+ assert group_name in inventory_module.inventory.groups
+
+
+def test_keyed_group_str_no_default_value(inventory_module):
+ inventory_module.inventory.add_host('server0')
+ inventory_module.inventory.set_variable('server0', 'tags', '')
+ host = inventory_module.inventory.get_host('server0')
+ keyed_groups = [
+ {
+ 'prefix': 'tag',
+ 'separator': '_',
+ 'key': 'tags'
+ }
+ ]
+ inventory_module._add_host_to_keyed_groups(
+ keyed_groups, host.vars, host.name, strict=False
+ )
+ # when the value is an empty string. this group is not generated
+ assert "tag_" not in inventory_module.inventory.groups
+
+
+def test_keyed_group_str_with_default_value(inventory_module):
+ inventory_module.inventory.add_host('server0')
+ inventory_module.inventory.set_variable('server0', 'tags', '')
+ host = inventory_module.inventory.get_host('server0')
+ keyed_groups = [
+ {
+ 'prefix': 'tag',
+ 'separator': '_',
+ 'key': 'tags',
+ 'default_value': 'running'
+ }
+ ]
+ inventory_module._add_host_to_keyed_groups(
+ keyed_groups, host.vars, host.name, strict=False
+ )
+ assert "tag_running" in inventory_module.inventory.groups
+
+
+def test_keyed_group_list_with_default_value(inventory_module):
+ inventory_module.inventory.add_host('server0')
+ inventory_module.inventory.set_variable('server0', 'tags', ['test', ''])
+ host = inventory_module.inventory.get_host('server0')
+ keyed_groups = [
+ {
+ 'prefix': 'tag',
+ 'separator': '_',
+ 'key': 'tags',
+ 'default_value': 'prod'
+ }
+ ]
+ inventory_module._add_host_to_keyed_groups(
+ keyed_groups, host.vars, host.name, strict=False
+ )
+ for group_name in ('tag_test', 'tag_prod'):
+ assert group_name in inventory_module.inventory.groups
+
+
+def test_keyed_group_with_trailing_separator(inventory_module):
+ inventory_module.inventory.add_host('server0')
+ inventory_module.inventory.set_variable('server0', 'tags', {'environment': 'prod', 'status': ''})
+ host = inventory_module.inventory.get_host('server0')
+ keyed_groups = [
+ {
+ 'prefix': 'tag',
+ 'separator': '_',
+ 'key': 'tags',
+ 'trailing_separator': False
+ }
+ ]
+ inventory_module._add_host_to_keyed_groups(
+ keyed_groups, host.vars, host.name, strict=False
+ )
+ for group_name in ('tag_environment_prod', 'tag_status'):
+ assert group_name in inventory_module.inventory.groups
diff --git a/test/units/plugins/lookup/test_ini.py b/test/units/plugins/lookup/test_ini.py
index adf2bac2..b2d883cf 100644
--- a/test/units/plugins/lookup/test_ini.py
+++ b/test/units/plugins/lookup/test_ini.py
@@ -56,8 +56,9 @@ class TestINILookup(unittest.TestCase):
)
def test_parse_parameters(self):
+ pvals = {'file': '', 'section': '', 'key': '', 'type': '', 're': '', 'default': '', 'encoding': ''}
for testcase in self.old_style_params_data:
# print(testcase)
- params = _parse_params(testcase['term'])
+ params = _parse_params(testcase['term'], pvals)
params.sort()
self.assertEqual(params, testcase['expected'])
diff --git a/test/units/plugins/lookup/test_password.py b/test/units/plugins/lookup/test_password.py
index 9871f4ab..f6cf10d1 100644
--- a/test/units/plugins/lookup/test_password.py
+++ b/test/units/plugins/lookup/test_password.py
@@ -50,7 +50,7 @@ old_style_params_data = (
dict(
term=u'/path/to/file',
filename=u'/path/to/file',
- params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
+ params=dict(length=password.DEFAULT_LENGTH, encrypt=None, ident=None, chars=DEFAULT_CHARS, seed=None),
candidate_chars=DEFAULT_CANDIDATE_CHARS,
),
@@ -58,38 +58,39 @@ old_style_params_data = (
dict(
term=u'/path/with/embedded spaces and/file',
filename=u'/path/with/embedded spaces and/file',
- params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
+ params=dict(length=password.DEFAULT_LENGTH, encrypt=None, ident=None, chars=DEFAULT_CHARS, seed=None),
candidate_chars=DEFAULT_CANDIDATE_CHARS,
),
dict(
term=u'/path/with/equals/cn=com.ansible',
filename=u'/path/with/equals/cn=com.ansible',
- params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
+ params=dict(length=password.DEFAULT_LENGTH, encrypt=None, ident=None, chars=DEFAULT_CHARS, seed=None),
candidate_chars=DEFAULT_CANDIDATE_CHARS,
),
dict(
term=u'/path/with/unicode/くらとみ/file',
filename=u'/path/with/unicode/くらとみ/file',
- params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
+ params=dict(length=password.DEFAULT_LENGTH, encrypt=None, ident=None, chars=DEFAULT_CHARS, seed=None),
candidate_chars=DEFAULT_CANDIDATE_CHARS,
),
+
# Mix several special chars
dict(
term=u'/path/with/utf 8 and spaces/くらとみ/file',
filename=u'/path/with/utf 8 and spaces/くらとみ/file',
- params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
+ params=dict(length=password.DEFAULT_LENGTH, encrypt=None, ident=None, chars=DEFAULT_CHARS, seed=None),
candidate_chars=DEFAULT_CANDIDATE_CHARS,
),
dict(
term=u'/path/with/encoding=unicode/くらとみ/file',
filename=u'/path/with/encoding=unicode/くらとみ/file',
- params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
+ params=dict(length=password.DEFAULT_LENGTH, encrypt=None, ident=None, chars=DEFAULT_CHARS, seed=None),
candidate_chars=DEFAULT_CANDIDATE_CHARS,
),
dict(
term=u'/path/with/encoding=unicode/くらとみ/and spaces file',
filename=u'/path/with/encoding=unicode/くらとみ/and spaces file',
- params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
+ params=dict(length=password.DEFAULT_LENGTH, encrypt=None, ident=None, chars=DEFAULT_CHARS, seed=None),
candidate_chars=DEFAULT_CANDIDATE_CHARS,
),
@@ -97,39 +98,48 @@ old_style_params_data = (
dict(
term=u'/path/to/file length=42',
filename=u'/path/to/file',
- params=dict(length=42, encrypt=None, chars=DEFAULT_CHARS),
+ params=dict(length=42, encrypt=None, ident=None, chars=DEFAULT_CHARS, seed=None),
candidate_chars=DEFAULT_CANDIDATE_CHARS,
),
dict(
term=u'/path/to/file encrypt=pbkdf2_sha256',
filename=u'/path/to/file',
- params=dict(length=password.DEFAULT_LENGTH, encrypt='pbkdf2_sha256', chars=DEFAULT_CHARS),
+ params=dict(length=password.DEFAULT_LENGTH, encrypt='pbkdf2_sha256', ident=None, chars=DEFAULT_CHARS, seed=None),
candidate_chars=DEFAULT_CANDIDATE_CHARS,
),
dict(
term=u'/path/to/file chars=abcdefghijklmnop',
filename=u'/path/to/file',
- params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u'abcdefghijklmnop']),
+ params=dict(length=password.DEFAULT_LENGTH, encrypt=None, ident=None, chars=[u'abcdefghijklmnop'], seed=None),
candidate_chars=u'abcdefghijklmnop',
),
dict(
term=u'/path/to/file chars=digits,abc,def',
filename=u'/path/to/file',
- params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'digits', u'abc', u'def'])),
+ params=dict(length=password.DEFAULT_LENGTH, encrypt=None, ident=None,
+ chars=sorted([u'digits', u'abc', u'def']), seed=None),
candidate_chars=u'abcdef0123456789',
),
+ dict(
+ term=u'/path/to/file seed=1',
+ filename=u'/path/to/file',
+ params=dict(length=password.DEFAULT_LENGTH, encrypt=None, ident=None, chars=DEFAULT_CHARS, seed='1'),
+ candidate_chars=DEFAULT_CANDIDATE_CHARS,
+ ),
# Including comma in chars
dict(
term=u'/path/to/file chars=abcdefghijklmnop,,digits',
filename=u'/path/to/file',
- params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'abcdefghijklmnop', u',', u'digits'])),
+ params=dict(length=password.DEFAULT_LENGTH, encrypt=None, ident=None,
+ chars=sorted([u'abcdefghijklmnop', u',', u'digits']), seed=None),
candidate_chars=u',abcdefghijklmnop0123456789',
),
dict(
term=u'/path/to/file chars=,,',
filename=u'/path/to/file',
- params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u',']),
+ params=dict(length=password.DEFAULT_LENGTH, encrypt=None, ident=None,
+ chars=[u','], seed=None),
candidate_chars=u',',
),
@@ -137,13 +147,15 @@ old_style_params_data = (
dict(
term=u'/path/to/file chars=digits,=,,',
filename=u'/path/to/file',
- params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'digits', u'=', u','])),
+ params=dict(length=password.DEFAULT_LENGTH, encrypt=None, ident=None,
+ chars=sorted([u'digits', u'=', u',']), seed=None),
candidate_chars=u',=0123456789',
),
dict(
term=u'/path/to/file chars=digits,abc=def',
filename=u'/path/to/file',
- params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'digits', u'abc=def'])),
+ params=dict(length=password.DEFAULT_LENGTH, encrypt=None, ident=None,
+ chars=sorted([u'digits', u'abc=def']), seed=None),
candidate_chars=u'abc=def0123456789',
),
@@ -151,14 +163,16 @@ old_style_params_data = (
dict(
term=u'/path/to/file chars=digits,くらとみ,,',
filename=u'/path/to/file',
- params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'digits', u'くらとみ', u','])),
+ params=dict(length=password.DEFAULT_LENGTH, encrypt=None, ident=None,
+ chars=sorted([u'digits', u'くらとみ', u',']), seed=None),
candidate_chars=u',0123456789くらとみ',
),
# Including only unicode in chars
dict(
term=u'/path/to/file chars=くらとみ',
filename=u'/path/to/file',
- params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'くらとみ'])),
+ params=dict(length=password.DEFAULT_LENGTH, encrypt=None, ident=None,
+ chars=sorted([u'くらとみ']), seed=None),
candidate_chars=u'くらとみ',
),
@@ -166,7 +180,8 @@ old_style_params_data = (
dict(
term=u'/path/to/file_with:colon chars=ascii_letters,digits',
filename=u'/path/to/file_with:colon',
- params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'ascii_letters', u'digits'])),
+ params=dict(length=password.DEFAULT_LENGTH, encrypt=None, ident=None,
+ chars=sorted([u'ascii_letters', u'digits']), seed=None),
candidate_chars=u'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789',
),
@@ -175,19 +190,19 @@ old_style_params_data = (
dict(
term=u'/path/with/embedded spaces and/file chars=abc=def',
filename=u'/path/with/embedded spaces and/file',
- params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u'abc=def']),
+ params=dict(length=password.DEFAULT_LENGTH, encrypt=None, ident=None, chars=[u'abc=def'], seed=None),
candidate_chars=u'abc=def',
),
dict(
term=u'/path/with/equals/cn=com.ansible chars=abc=def',
filename=u'/path/with/equals/cn=com.ansible',
- params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u'abc=def']),
+ params=dict(length=password.DEFAULT_LENGTH, encrypt=None, ident=None, chars=[u'abc=def'], seed=None),
candidate_chars=u'abc=def',
),
dict(
term=u'/path/with/unicode/くらとみ/file chars=くらとみ',
filename=u'/path/with/unicode/くらとみ/file',
- params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u'くらとみ']),
+ params=dict(length=password.DEFAULT_LENGTH, encrypt=None, ident=None, chars=[u'くらとみ'], seed=None),
candidate_chars=u'くらとみ',
),
)
@@ -280,6 +295,13 @@ class TestRandomPassword(unittest.TestCase):
self._assert_valid_chars(res, u'くらとみ')
self.assertEqual(len(res), 11)
+ def test_seed(self):
+ pw1 = password.random_password(seed=1)
+ pw2 = password.random_password(seed=1)
+ pw3 = password.random_password(seed=2)
+ self.assertEqual(pw1, pw2)
+ self.assertNotEqual(pw1, pw3)
+
def test_gen_password(self):
for testcase in old_style_params_data:
params = testcase['params']
@@ -499,3 +521,48 @@ class TestLookupModuleWithPasslib(BaseTestLookupModule):
results = self.password_lookup.run([u'/path/to/somewhere chars=anything encrypt=pbkdf2_sha256'], None)
for result in results:
self.assertEqual(result, u'$pbkdf2-sha256$20000$ODc2NTQzMjE$Uikde0cv0BKaRaAXMrUQB.zvG4GmnjClwjghwIRf2gU')
+
+
+@pytest.mark.skipif(passlib is None, reason='passlib must be installed to run these tests')
+class TestLookupModuleWithPasslibWrappedAlgo(BaseTestLookupModule):
+ def setUp(self):
+ super(TestLookupModuleWithPasslibWrappedAlgo, self).setUp()
+ self.os_path_exists = password.os.path.exists
+
+ def tearDown(self):
+ super(TestLookupModuleWithPasslibWrappedAlgo, self).tearDown()
+ password.os.path.exists = self.os_path_exists
+
+ @patch('ansible.plugins.lookup.password._write_password_file')
+ def test_encrypt_wrapped_crypt_algo(self, mock_write_file):
+
+ password.os.path.exists = self.password_lookup._loader.path_exists
+ with patch.object(builtins, 'open', mock_open(read_data=self.password_lookup._loader._get_file_contents('/path/to/somewhere')[0])) as m:
+ results = self.password_lookup.run([u'/path/to/somewhere encrypt=ldap_sha256_crypt'], None)
+
+ wrapper = getattr(passlib.hash, 'ldap_sha256_crypt')
+
+ self.assertEqual(len(results), 1)
+ result = results[0]
+ self.assertIsInstance(result, text_type)
+
+ expected_password_length = 76
+ self.assertEqual(len(result), expected_password_length)
+
+ # result should have 5 parts split by '$'
+ str_parts = result.split('$')
+ self.assertEqual(len(str_parts), 5)
+
+ # verify the string and passlib agree on the number of rounds
+ self.assertEqual(str_parts[2], "rounds=%s" % wrapper.default_rounds)
+
+ # verify it used the right algo type
+ self.assertEqual(str_parts[0], '{CRYPT}')
+
+ # verify it used the right algo type
+ self.assertTrue(wrapper.verify(self.password_lookup._loader._get_file_contents('/path/to/somewhere')[0], result))
+
+ # verify a password with a non default rounds value
+ # generated with: echo test | mkpasswd -s --rounds 660000 -m sha-256 --salt testansiblepass.
+ hashpw = '{CRYPT}$5$rounds=660000$testansiblepass.$KlRSdA3iFXoPI.dEwh7AixiXW3EtCkLrlQvlYA2sluD'
+ self.assertTrue(wrapper.verify('test', hashpw))
diff --git a/test/units/requirements.txt b/test/units/requirements.txt
index 153500e3..9ed7268f 100644
--- a/test/units/requirements.txt
+++ b/test/units/requirements.txt
@@ -1,6 +1,6 @@
-pycrypto
-passlib
-pywinrm
+bcrypt ; python_version >= '3.8' # controller only
+passlib ; python_version >= '3.8' # controller only
+pexpect ; python_version >= '3.8' # controller only
pytz
+pywinrm ; python_version >= '3.8' # controller only
unittest2 ; python_version < '2.7'
-pexpect
diff --git a/test/units/utils/test_encrypt.py b/test/units/utils/test_encrypt.py
index abf2683c..72fe3b07 100644
--- a/test/units/utils/test_encrypt.py
+++ b/test/units/utils/test_encrypt.py
@@ -1,19 +1,5 @@
# (c) 2018, Matthias Fuchs <matthias.s.fuchs@gmail.com>
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -54,14 +40,31 @@ def assert_hash(expected, secret, algorithm, **settings):
@pytest.mark.skipif(sys.platform.startswith('darwin'), reason='macOS requires passlib')
def test_encrypt_with_rounds_no_passlib():
with passlib_off():
- assert_hash("$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7",
+ assert_hash("$5$rounds=5000$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7",
secret="123", algorithm="sha256_crypt", salt="12345678", rounds=5000)
assert_hash("$5$rounds=10000$12345678$JBinliYMFEcBeAXKZnLjenhgEhTmJBvZn3aR8l70Oy/",
secret="123", algorithm="sha256_crypt", salt="12345678", rounds=10000)
- assert_hash("$6$12345678$LcV9LQiaPekQxZ.OfkMADjFdSO2k9zfbDQrHPVcYjSLqSdjLYpsgqviYvTEP/R41yPmhH3CCeEDqVhW1VHr3L.",
+ assert_hash("$6$rounds=5000$12345678$LcV9LQiaPekQxZ.OfkMADjFdSO2k9zfbDQrHPVcYjSLqSdjLYpsgqviYvTEP/R41yPmhH3CCeEDqVhW1VHr3L.",
secret="123", algorithm="sha512_crypt", salt="12345678", rounds=5000)
+@pytest.mark.skipif(not encrypt.PASSLIB_AVAILABLE, reason='passlib must be installed to run this test')
+def test_encrypt_with_ident():
+ assert_hash("$2$12$123456789012345678901ufd3hZRrev.WXCbemqGIV/gmWaTGLImm",
+ secret="123", algorithm="bcrypt", salt='1234567890123456789012', ident='2')
+ assert_hash("$2y$12$123456789012345678901ugbM1PeTfRQ0t6dCJu5lQA8hwrZOYgDu",
+ secret="123", algorithm="bcrypt", salt='1234567890123456789012', ident='2y')
+ assert_hash("$2a$12$123456789012345678901ugbM1PeTfRQ0t6dCJu5lQA8hwrZOYgDu",
+ secret="123", algorithm="bcrypt", salt='1234567890123456789012', ident='2a')
+ assert_hash("$2b$12$123456789012345678901ugbM1PeTfRQ0t6dCJu5lQA8hwrZOYgDu",
+ secret="123", algorithm="bcrypt", salt='1234567890123456789012', ident='2b')
+ assert_hash("$2b$12$123456789012345678901ugbM1PeTfRQ0t6dCJu5lQA8hwrZOYgDu",
+ secret="123", algorithm="bcrypt", salt='1234567890123456789012')
+ # negative test: sha256_crypt does not take ident as parameter so ignore it
+ assert_hash("$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7",
+ secret="123", algorithm="sha256_crypt", salt="12345678", rounds=5000, ident='invalid_ident')
+
+
# If passlib is not installed. this is identical to the test_encrypt_with_rounds_no_passlib() test
@pytest.mark.skipif(not encrypt.PASSLIB_AVAILABLE, reason='passlib must be installed to run this test')
def test_encrypt_with_rounds():
@@ -91,9 +94,9 @@ def test_encrypt_default_rounds_no_passlib():
def test_encrypt_default_rounds():
assert_hash("$1$12345678$tRy4cXc3kmcfRZVj4iFXr/",
secret="123", algorithm="md5_crypt", salt="12345678")
- assert_hash("$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7",
+ assert_hash("$5$rounds=535000$12345678$uy3TurUPaY71aioJi58HvUY8jkbhSQU8HepbyaNngv.",
secret="123", algorithm="sha256_crypt", salt="12345678")
- assert_hash("$6$12345678$LcV9LQiaPekQxZ.OfkMADjFdSO2k9zfbDQrHPVcYjSLqSdjLYpsgqviYvTEP/R41yPmhH3CCeEDqVhW1VHr3L.",
+ assert_hash("$6$rounds=656000$12345678$InMy49UwxyCh2pGJU1NpOhVSElDDzKeyuC6n6E9O34BCUGVNYADnI.rcA3m.Vro9BiZpYmjEoNhpREqQcbvQ80",
secret="123", algorithm="sha512_crypt", salt="12345678")
assert encrypt.PasslibHash("md5_crypt").hash("123")
@@ -109,15 +112,14 @@ def test_password_hash_filter_no_passlib():
get_encrypted_password("123", "crypt16", salt="12")
+@pytest.mark.skipif(not encrypt.PASSLIB_AVAILABLE, reason='passlib must be installed to run this test')
def test_password_hash_filter_passlib():
- if not encrypt.PASSLIB_AVAILABLE:
- pytest.skip("passlib not available")
with pytest.raises(AnsibleFilterError):
get_encrypted_password("123", "sha257", salt="12345678")
- # Uses 5000 rounds by default for sha256 matching crypt behaviour
- assert get_encrypted_password("123", "sha256", salt="12345678") == "$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7"
+ # Uses passlib default rounds value for sha256 matching crypt behaviour
+ assert get_encrypted_password("123", "sha256", salt="12345678") == "$5$rounds=535000$12345678$uy3TurUPaY71aioJi58HvUY8jkbhSQU8HepbyaNngv."
assert get_encrypted_password("123", "sha256", salt="12345678", rounds=5000) == "$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7"
assert (get_encrypted_password("123", "sha256", salt="12345678", rounds=10000) ==
@@ -133,6 +135,8 @@ def test_password_hash_filter_passlib():
# Try algorithm that uses a raw salt
assert get_encrypted_password("123", "pbkdf2_sha256")
+ # Try algorithm with ident
+ assert get_encrypted_password("123", "pbkdf2_sha256", ident='invalid_ident')
@pytest.mark.skipif(sys.platform.startswith('darwin'), reason='macOS requires passlib')
@@ -145,20 +149,22 @@ def test_do_encrypt_no_passlib():
encrypt.do_encrypt("123", "crypt16", salt="12")
+@pytest.mark.skipif(not encrypt.PASSLIB_AVAILABLE, reason='passlib must be installed to run this test')
def test_do_encrypt_passlib():
- if not encrypt.PASSLIB_AVAILABLE:
- pytest.skip("passlib not available")
-
with pytest.raises(AnsibleError):
encrypt.do_encrypt("123", "sha257_crypt", salt="12345678")
- # Uses 5000 rounds by default for sha256 matching crypt behaviour.
- assert encrypt.do_encrypt("123", "sha256_crypt", salt="12345678") == "$5$12345678$uAZsE3BenI2G.nA8DpTl.9Dc8JiqacI53pEqRr5ppT7"
+ # Uses passlib default rounds value for sha256 matching crypt behaviour.
+ assert encrypt.do_encrypt("123", "sha256_crypt", salt="12345678") == "$5$rounds=535000$12345678$uy3TurUPaY71aioJi58HvUY8jkbhSQU8HepbyaNngv."
assert encrypt.do_encrypt("123", "md5_crypt", salt="12345678") == "$1$12345678$tRy4cXc3kmcfRZVj4iFXr/"
assert encrypt.do_encrypt("123", "crypt16", salt="12") == "12pELHK2ME3McUFlHxel6uMM"
+ assert encrypt.do_encrypt("123", "bcrypt",
+ salt='1234567890123456789012',
+ ident='2a') == "$2a$12$123456789012345678901ugbM1PeTfRQ0t6dCJu5lQA8hwrZOYgDu"
+
def test_random_salt():
res = encrypt.random_salt()
@@ -168,6 +174,7 @@ def test_random_salt():
assert res_char in expected_salt_candidate_chars
+@pytest.mark.skipif(sys.platform.startswith('darwin'), reason='macOS requires passlib')
def test_invalid_crypt_salt():
pytest.raises(
AnsibleError,
@@ -198,15 +205,16 @@ def test_passlib_bcrypt_salt(recwarn):
salt = '1234567890123456789012'
repaired_salt = '123456789012345678901u'
expected = '$2b$12$123456789012345678901uMv44x.2qmQeefEGb3bcIRc1mLuO7bqa'
+ ident = '2b'
p = encrypt.PasslibHash('bcrypt')
- result = p.hash(secret, salt=salt)
+ result = p.hash(secret, salt=salt, ident=ident)
passlib_warnings = [w.message for w in recwarn if isinstance(w.message, passlib_exc.PasslibHashWarning)]
assert len(passlib_warnings) == 0
assert result == expected
recwarn.clear()
- result = p.hash(secret, salt=repaired_salt)
+ result = p.hash(secret, salt=repaired_salt, ident=ident)
assert result == expected
diff --git a/test/units/utils/test_unsafe_proxy.py b/test/units/utils/test_unsafe_proxy.py
index 205c0c65..ea653cfe 100644
--- a/test/units/utils/test_unsafe_proxy.py
+++ b/test/units/utils/test_unsafe_proxy.py
@@ -7,6 +7,7 @@ __metaclass__ = type
from ansible.module_utils.six import PY3
from ansible.utils.unsafe_proxy import AnsibleUnsafe, AnsibleUnsafeBytes, AnsibleUnsafeText, wrap_var
+from ansible.module_utils.common.text.converters import to_text, to_bytes
def test_wrap_var_text():
@@ -108,3 +109,13 @@ def test_AnsibleUnsafeText():
def test_AnsibleUnsafeBytes():
assert isinstance(AnsibleUnsafeBytes(b'foo'), AnsibleUnsafe)
+
+
+def test_to_text_unsafe():
+ assert isinstance(to_text(AnsibleUnsafeBytes(b'foo')), AnsibleUnsafeText)
+ assert to_text(AnsibleUnsafeBytes(b'foo')) == AnsibleUnsafeText(u'foo')
+
+
+def test_to_bytes_unsafe():
+ assert isinstance(to_bytes(AnsibleUnsafeText(u'foo')), AnsibleUnsafeBytes)
+ assert to_bytes(AnsibleUnsafeText(u'foo')) == AnsibleUnsafeBytes(b'foo')
diff --git a/test/units/utils/test_version.py b/test/units/utils/test_version.py
index 7d04c112..3c2cbaf4 100644
--- a/test/units/utils/test_version.py
+++ b/test/units/utils/test_version.py
@@ -5,7 +5,7 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
-from distutils.version import LooseVersion, StrictVersion
+from ansible.module_utils.compat.version import LooseVersion, StrictVersion
import pytest