From 37d585600d2e2d1c64e33ffaaadce57e4c8407bd Mon Sep 17 00:00:00 2001 From: Lee Garrett Date: Sun, 6 Mar 2022 17:56:03 +0100 Subject: New upstream version 2.12.3 --- .../tasks/download.yml | 3 +- .../tasks/individual_collection_repo.yml | 2 +- .../tasks/requirements.yml | 6 +- test/integration/targets/ansible-test/aliases | 4 +- .../ns/col/plugins/lookup/vendor1.py | 33 ++++++ .../ns/col/plugins/lookup/vendor2.py | 33 ++++++ .../ns/col/plugins/modules/no_callable.py | 23 ++++ .../ns/ps_only/meta/runtime.yml | 1 + .../ns/ps_only/plugins/module_utils/validate.psm1 | 8 ++ .../ns/ps_only/plugins/modules/validate.ps1 | 8 ++ .../ns/ps_only/plugins/modules/validate.py | 14 +++ .../ansible-test/collection-tests/constraints.sh | 20 ---- .../ansible-test/collection-tests/coverage.sh | 13 +-- .../collection-tests/integration-constraints.sh | 8 ++ .../ansible-test/collection-tests/integration.sh | 8 ++ .../ansible-test/collection-tests/sanity-vendor.sh | 25 ++++ .../ansible-test/collection-tests/sanity.sh | 10 ++ .../collection-tests/units-constraints.sh | 11 ++ .../targets/ansible-test/collection-tests/units.sh | 11 ++ .../collection-tests/unsupported-directory.sh | 17 +++ .../ansible-test/collection-tests/update-ignore.py | 51 ++++++++ .../validate-modules-collection-loader.sh | 14 +++ .../ansible-test/collection-tests/venv-pythons.py | 47 ++++++++ .../targets/ansible-test/collection-tests/venv.sh | 49 -------- test/integration/targets/blockinfile/meta/main.yml | 1 + .../tasks/add_block_to_existing_file.yml | 8 +- .../tasks/block_without_trailing_newline.yml | 6 +- .../targets/blockinfile/tasks/create_file.yml | 6 +- .../integration/targets/blockinfile/tasks/diff.yml | 4 +- .../tasks/file_without_trailing_newline.yml | 8 +- .../targets/blockinfile/tasks/insertafter.yml | 8 +- .../targets/blockinfile/tasks/insertbefore.yml | 10 +- .../integration/targets/blockinfile/tasks/main.yml | 6 +- .../blockinfile/tasks/preserve_line_endings.yml | 6 +- .../targets/blockinfile/tasks/validate.yml | 6 +- .../callback_default.out.fqcn_free.stdout | 35 ++++++ test/integration/targets/callback_default/runme.sh | 1 + .../playbooks/roles/non_coll_role/tasks/main.yml | 2 +- .../roles/non_coll_role_to_call/tasks/main.yml | 4 +- test/integration/targets/connection_ssh/runme.sh | 4 + .../targets/delegate_to/delegate_facts_loop.yml | 40 +++++++ test/integration/targets/delegate_to/inventory | 8 ++ test/integration/targets/delegate_to/runme.sh | 3 +- .../targets/delegate_to/test_delegate_to.yml | 24 ++++ test/integration/targets/dnf/tasks/cacheonly.yml | 5 +- test/integration/targets/filter_mathstuff/runme.sh | 2 +- test/integration/targets/filter_urls/runme.sh | 2 +- test/integration/targets/git/meta/main.yml | 1 + .../targets/git/tasks/missing_hostkey.yml | 8 +- .../git/tasks/missing_hostkey_acceptnew.yml | 10 +- test/integration/targets/git/tasks/setup.yml | 8 +- test/integration/targets/git/vars/main.yml | 6 +- test/integration/targets/groupby_filter/runme.sh | 4 +- .../incidental_setup_mongodb/defaults/main.yml | 4 +- .../bar/plugins/filter/bad_collection_filter2.py | 10 ++ .../targets/jinja_plugins/tasks/main.yml | 4 +- .../targets/lookup_template/tasks/main.yml | 7 ++ .../targets/lookup_template/templates/dict.j2 | 1 + test/integration/targets/meta_tasks/runme.sh | 7 ++ .../meta_tasks/test_end_play_multiple_plays.yml | 18 +++ test/integration/targets/pip/tasks/pip.yml | 21 ++-- .../roles/test1/meta/argument_specs.yml | 5 + .../roles_arg_spec/test_complex_role_fails.yml | 6 +- .../setup_paramiko/install-FreeBSD-python-3.yml | 2 + .../setup_paramiko/install-RedHat-8-python-3.yml | 2 + test/integration/targets/template/tasks/main.yml | 5 - test/integration/targets/unarchive/tasks/main.yml | 1 + .../tasks/test_different_language_var.yml | 41 +++++++ .../targets/unarchive/tasks/test_exclude.yml | 15 +++ test/integration/targets/uri/tasks/main.yml | 34 +++--- .../targets/uri/tasks/unexpected-failures.yml | 6 +- .../targets/user/tasks/test_ssh_key_passphrase.yml | 7 +- test/integration/targets/var_inheritance/aliases | 2 + .../targets/var_inheritance/tasks/main.yml | 16 +++ test/integration/targets/wait_for/tasks/main.yml | 2 +- test/lib/ansible_test/_data/completion/docker.txt | 6 +- .../_data/requirements/sanity.import.plugin.txt | 12 ++ .../_data/requirements/sanity.import.txt | 1 + test/lib/ansible_test/_data/requirements/units.txt | 1 + test/lib/ansible_test/_internal/cli/compat.py | 10 +- .../lib/ansible_test/_internal/cli/environments.py | 24 ++-- .../ansible_test/_internal/cli/parsers/helpers.py | 8 +- .../_internal/cli/parsers/host_config_parsers.py | 24 ++-- .../_internal/commands/integration/__init__.py | 19 +-- .../_internal/commands/integration/cloud/aws.py | 2 +- .../_internal/commands/integration/cloud/galaxy.py | 2 +- .../commands/integration/cloud/openshift.py | 2 +- .../_internal/commands/sanity/__init__.py | 13 +-- .../_internal/commands/sanity/import.py | 76 ++++++++---- .../commands/sanity/integration_aliases.py | 2 +- .../_internal/commands/units/__init__.py | 4 +- test/lib/ansible_test/_internal/completion.py | 27 ++++- test/lib/ansible_test/_internal/constants.py | 50 +++++++- test/lib/ansible_test/_internal/containers.py | 2 +- test/lib/ansible_test/_internal/delegation.py | 7 +- test/lib/ansible_test/_internal/host_configs.py | 20 ++-- test/lib/ansible_test/_internal/host_profiles.py | 6 +- .../ansible_test/_internal/python_requirements.py | 128 ++++++++++++++++++--- test/lib/ansible_test/_internal/test.py | 2 +- test/lib/ansible_test/_internal/venv.py | 43 +++++-- .../sanity/code-smell/runtime-metadata.py | 2 +- .../validate-modules/validate_modules/main.py | 46 +++++--- .../_util/controller/sanity/yamllint/yamllinter.py | 2 +- .../_util/controller/tools/virtualenvcheck.py | 9 +- .../_util/controller/tools/yaml_to_json.py | 28 +++++ .../ansible_test/_util/target/common/constants.py | 44 +------ .../_util/target/sanity/import/importer.py | 50 ++++++-- .../_util/target/sanity/import/yaml_to_json.py | 28 ----- .../ansible_test/_util/target/setup/bootstrap.sh | 12 +- .../ansible_test/_util/target/setup/quiet_pip.py | 16 ++- .../_util/target/setup/requirements.py | 78 ++++++++++++- test/sanity/code-smell/package-data.py | 3 +- test/sanity/ignore.txt | 2 + test/units/galaxy/test_collection.py | 83 ++++++++++++- .../units/module_utils/facts/virtual/test_linux.py | 24 ++++ test/units/parsing/yaml/test_loader.py | 8 +- test/units/plugins/become/test_sudo.py | 27 +++++ test/units/plugins/connection/test_ssh.py | 40 ++++--- 118 files changed, 1416 insertions(+), 448 deletions(-) create mode 100644 test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/lookup/vendor1.py create mode 100644 test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/lookup/vendor2.py create mode 100644 test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/modules/no_callable.py create mode 100644 test/integration/targets/ansible-test/ansible_collections/ns/ps_only/meta/runtime.yml create mode 100644 test/integration/targets/ansible-test/ansible_collections/ns/ps_only/plugins/module_utils/validate.psm1 create mode 100644 test/integration/targets/ansible-test/ansible_collections/ns/ps_only/plugins/modules/validate.ps1 create mode 100644 test/integration/targets/ansible-test/ansible_collections/ns/ps_only/plugins/modules/validate.py delete mode 100755 test/integration/targets/ansible-test/collection-tests/constraints.sh create mode 100755 test/integration/targets/ansible-test/collection-tests/integration-constraints.sh create mode 100755 test/integration/targets/ansible-test/collection-tests/integration.sh create mode 100755 test/integration/targets/ansible-test/collection-tests/sanity-vendor.sh create mode 100755 test/integration/targets/ansible-test/collection-tests/sanity.sh create mode 100755 test/integration/targets/ansible-test/collection-tests/units-constraints.sh create mode 100755 test/integration/targets/ansible-test/collection-tests/units.sh create mode 100755 test/integration/targets/ansible-test/collection-tests/unsupported-directory.sh create mode 100755 test/integration/targets/ansible-test/collection-tests/update-ignore.py create mode 100755 test/integration/targets/ansible-test/collection-tests/validate-modules-collection-loader.sh create mode 100755 test/integration/targets/ansible-test/collection-tests/venv-pythons.py delete mode 100755 test/integration/targets/ansible-test/collection-tests/venv.sh create mode 100644 test/integration/targets/callback_default/callback_default.out.fqcn_free.stdout create mode 100644 test/integration/targets/delegate_to/delegate_facts_loop.yml create mode 100644 test/integration/targets/jinja_plugins/collections/ansible_collections/foo/bar/plugins/filter/bad_collection_filter2.py create mode 100644 test/integration/targets/lookup_template/templates/dict.j2 create mode 100644 test/integration/targets/meta_tasks/test_end_play_multiple_plays.yml create mode 100644 test/integration/targets/unarchive/tasks/test_different_language_var.yml create mode 100644 test/integration/targets/var_inheritance/aliases create mode 100644 test/integration/targets/var_inheritance/tasks/main.yml create mode 100644 test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt create mode 100644 test/lib/ansible_test/_data/requirements/sanity.import.txt mode change 120000 => 100644 test/lib/ansible_test/_internal/constants.py create mode 100644 test/lib/ansible_test/_util/controller/tools/yaml_to_json.py delete mode 100644 test/lib/ansible_test/_util/target/sanity/import/yaml_to_json.py (limited to 'test') diff --git a/test/integration/targets/ansible-galaxy-collection-scm/tasks/download.yml b/test/integration/targets/ansible-galaxy-collection-scm/tasks/download.yml index b1017e78..6b52bd1d 100644 --- a/test/integration/targets/ansible-galaxy-collection-scm/tasks/download.yml +++ b/test/integration/targets/ansible-galaxy-collection-scm/tasks/download.yml @@ -8,6 +8,7 @@ ansible-galaxy collection download git+https://github.com/ansible-collections/amazon.aws.git,37875c5b4ba5bf3cc43e07edf29f3432fd76def5 git+https://github.com/AlanCoding/awx.git#awx_collection,750c22a150d04eef1cb625fd4f83cce57949416c + --no-deps args: chdir: '{{ galaxy_dir }}/download' register: download_collection @@ -30,7 +31,7 @@ - download_collection_awx_actual.stat.exists - name: test the downloaded repository can be installed - command: 'ansible-galaxy collection install -r requirements.yml' + command: 'ansible-galaxy collection install -r requirements.yml --no-deps' args: chdir: '{{ galaxy_dir }}/download/collections/' diff --git a/test/integration/targets/ansible-galaxy-collection-scm/tasks/individual_collection_repo.yml b/test/integration/targets/ansible-galaxy-collection-scm/tasks/individual_collection_repo.yml index 9ca7c8f0..e51c3a99 100644 --- a/test/integration/targets/ansible-galaxy-collection-scm/tasks/individual_collection_repo.yml +++ b/test/integration/targets/ansible-galaxy-collection-scm/tasks/individual_collection_repo.yml @@ -4,7 +4,7 @@ dest: '{{ scm_path }}/amazon.aws/' - name: install - command: 'ansible-galaxy collection install git+file://{{ scm_path }}/amazon.aws/.git' + command: 'ansible-galaxy collection install git+file://{{ scm_path }}/amazon.aws/.git --no-deps' - name: list installed collections command: 'ansible-galaxy collection list' diff --git a/test/integration/targets/ansible-galaxy-collection-scm/tasks/requirements.yml b/test/integration/targets/ansible-galaxy-collection-scm/tasks/requirements.yml index 235ed126..c7743426 100644 --- a/test/integration/targets/ansible-galaxy-collection-scm/tasks/requirements.yml +++ b/test/integration/targets/ansible-galaxy-collection-scm/tasks/requirements.yml @@ -66,7 +66,7 @@ appear to be a git repository") - name: test using name as a git repo without git+ prefix - command: 'ansible-galaxy collection install -r name_without_type.yml' + command: 'ansible-galaxy collection install -r name_without_type.yml --no-deps' register: result ignore_errors: true args: @@ -83,13 +83,13 @@ dest: '{{ scm_path }}/amazon.aws/' - name: test using name as a git repo - command: 'ansible-galaxy collection install -r git_prefix_name.yml' + command: 'ansible-galaxy collection install -r git_prefix_name.yml --no-deps' register: result args: chdir: '{{ galaxy_dir }}/requirements' - name: test using name plus type as a git repo - command: 'ansible-galaxy collection install -r name_and_type.yml --force' + command: 'ansible-galaxy collection install -r name_and_type.yml --force --no-deps' register: result args: chdir: '{{ galaxy_dir }}/requirements' diff --git a/test/integration/targets/ansible-test/aliases b/test/integration/targets/ansible-test/aliases index 13e01f0c..b98e7bb2 100644 --- a/test/integration/targets/ansible-test/aliases +++ b/test/integration/targets/ansible-test/aliases @@ -1,2 +1,4 @@ -shippable/posix/group1 +shippable/posix/group1 # runs in the distro test containers +shippable/generic/group1 # runs in the default test container context/controller +destructive # adds and then removes packages into lib/ansible/_vendor/ diff --git a/test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/lookup/vendor1.py b/test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/lookup/vendor1.py new file mode 100644 index 00000000..f59b9091 --- /dev/null +++ b/test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/lookup/vendor1.py @@ -0,0 +1,33 @@ +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +DOCUMENTATION = ''' +name: vendor1 +short_description: lookup +description: Lookup. +author: + - Ansible Core Team +''' + +EXAMPLES = '''#''' +RETURN = '''#''' + +from ansible.plugins.lookup import LookupBase +# noinspection PyUnresolvedReferences +from ansible.plugins import loader # import the loader to verify it works when the collection loader has already been loaded + +try: + import demo +except ImportError: + pass +else: + raise Exception('demo import found when it should not be') + + +class LookupModule(LookupBase): + def run(self, terms, variables, **kwargs): + self.set_options(var_options=variables, direct=kwargs) + + return terms diff --git a/test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/lookup/vendor2.py b/test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/lookup/vendor2.py new file mode 100644 index 00000000..22b4236a --- /dev/null +++ b/test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/lookup/vendor2.py @@ -0,0 +1,33 @@ +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +DOCUMENTATION = ''' +name: vendor2 +short_description: lookup +description: Lookup. +author: + - Ansible Core Team +''' + +EXAMPLES = '''#''' +RETURN = '''#''' + +from ansible.plugins.lookup import LookupBase +# noinspection PyUnresolvedReferences +from ansible.plugins import loader # import the loader to verify it works when the collection loader has already been loaded + +try: + import demo +except ImportError: + pass +else: + raise Exception('demo import found when it should not be') + + +class LookupModule(LookupBase): + def run(self, terms, variables, **kwargs): + self.set_options(var_options=variables, direct=kwargs) + + return terms diff --git a/test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/modules/no_callable.py b/test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/modules/no_callable.py new file mode 100644 index 00000000..176376ab --- /dev/null +++ b/test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/modules/no_callable.py @@ -0,0 +1,23 @@ +#!/usr/bin/python +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +DOCUMENTATION = ''' +module: no_callable +short_description: No callale test module +description: No callable test module. +author: + - Ansible Core Team +''' + +EXAMPLES = '''#''' +RETURN = '''''' + +from ansible.module_utils.basic import AnsibleModule + + +if __name__ == '__main__': + module = AnsibleModule(argument_spec=dict()) + module.exit_json() diff --git a/test/integration/targets/ansible-test/ansible_collections/ns/ps_only/meta/runtime.yml b/test/integration/targets/ansible-test/ansible_collections/ns/ps_only/meta/runtime.yml new file mode 100644 index 00000000..1602a255 --- /dev/null +++ b/test/integration/targets/ansible-test/ansible_collections/ns/ps_only/meta/runtime.yml @@ -0,0 +1 @@ +requires_ansible: '>=2.9' diff --git a/test/integration/targets/ansible-test/ansible_collections/ns/ps_only/plugins/module_utils/validate.psm1 b/test/integration/targets/ansible-test/ansible_collections/ns/ps_only/plugins/module_utils/validate.psm1 new file mode 100644 index 00000000..7072b311 --- /dev/null +++ b/test/integration/targets/ansible-test/ansible_collections/ns/ps_only/plugins/module_utils/validate.psm1 @@ -0,0 +1,8 @@ +function Validate { + <# + .SYNOPSIS + validate + #> +} + +Export-ModuleMember -Function "Validate" diff --git a/test/integration/targets/ansible-test/ansible_collections/ns/ps_only/plugins/modules/validate.ps1 b/test/integration/targets/ansible-test/ansible_collections/ns/ps_only/plugins/modules/validate.ps1 new file mode 100644 index 00000000..a587af80 --- /dev/null +++ b/test/integration/targets/ansible-test/ansible_collections/ns/ps_only/plugins/modules/validate.ps1 @@ -0,0 +1,8 @@ +#!powershell +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +#AnsibleRequires -CSharpUtil Ansible.Basic +#AnsibleRequires -PowerShell ..module_utils.validate + +$module = [Ansible.Basic.AnsibleModule]::Create($args, @{}) +$module.ExitJson() diff --git a/test/integration/targets/ansible-test/ansible_collections/ns/ps_only/plugins/modules/validate.py b/test/integration/targets/ansible-test/ansible_collections/ns/ps_only/plugins/modules/validate.py new file mode 100644 index 00000000..ee1fb138 --- /dev/null +++ b/test/integration/targets/ansible-test/ansible_collections/ns/ps_only/plugins/modules/validate.py @@ -0,0 +1,14 @@ +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +DOCUMENTATION = r''' +module: validate +short_description: validate +description: validate +author: "validate (@validate)" +''' + +EXAMPLES = r''' +''' + +RETURN = r''' +''' diff --git a/test/integration/targets/ansible-test/collection-tests/constraints.sh b/test/integration/targets/ansible-test/collection-tests/constraints.sh deleted file mode 100755 index d3bbc6ab..00000000 --- a/test/integration/targets/ansible-test/collection-tests/constraints.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env bash - -set -eux -o pipefail - -cp -a "${TEST_DIR}/ansible_collections" "${WORK_DIR}" -cd "${WORK_DIR}/ansible_collections/ns/col_constraints" - -# common args for all tests -# each test will be run in a separate venv to verify that requirements have been properly specified -common=(--venv --python "${ANSIBLE_TEST_PYTHON_VERSION}" --color --truncate 0 "${@}") - -# unit tests - -rm -rf "tests/output" -ansible-test units "${common[@]}" - -# integration tests - -rm -rf "tests/output" -ansible-test integration "${common[@]}" diff --git a/test/integration/targets/ansible-test/collection-tests/coverage.sh b/test/integration/targets/ansible-test/collection-tests/coverage.sh index 221ae66a..c2336a32 100755 --- a/test/integration/targets/ansible-test/collection-tests/coverage.sh +++ b/test/integration/targets/ansible-test/collection-tests/coverage.sh @@ -5,22 +5,13 @@ set -eux -o pipefail cp -a "${TEST_DIR}/ansible_collections" "${WORK_DIR}" cd "${WORK_DIR}/ansible_collections/ns/col" -# rename the sanity ignore file to match the current ansible version and update import ignores with the python version -ansible_version="$(python -c 'import ansible.release; print(".".join(ansible.release.__version__.split(".")[:2]))')" -if [[ "${ANSIBLE_TEST_PYTHON_VERSION}" =~ ^2\. ]] || [[ "${ANSIBLE_TEST_PYTHON_VERSION}" =~ ^3\.[567] ]]; then - # Non-module/module_utils plugins are not checked on these remote-only Python versions - sed "s/ import$/ import-${ANSIBLE_TEST_PYTHON_VERSION}/;" < "tests/sanity/ignore.txt" | grep -v 'plugins/[^m].* import' > "tests/sanity/ignore-${ansible_version}.txt" -else - sed "s/ import$/ import-${ANSIBLE_TEST_PYTHON_VERSION}/;" < "tests/sanity/ignore.txt" > "tests/sanity/ignore-${ansible_version}.txt" -fi -cat "tests/sanity/ignore-${ansible_version}.txt" +"${TEST_DIR}/collection-tests/update-ignore.py" # common args for all tests common=(--venv --color --truncate 0 "${@}") -test_common=("${common[@]}" --python "${ANSIBLE_TEST_PYTHON_VERSION}") # run a lightweight test that generates code coverge output -ansible-test sanity --test import "${test_common[@]}" --coverage +ansible-test sanity --test import "${common[@]}" --coverage # report on code coverage in all supported formats ansible-test coverage report "${common[@]}" diff --git a/test/integration/targets/ansible-test/collection-tests/integration-constraints.sh b/test/integration/targets/ansible-test/collection-tests/integration-constraints.sh new file mode 100755 index 00000000..35e5a26b --- /dev/null +++ b/test/integration/targets/ansible-test/collection-tests/integration-constraints.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -eux -o pipefail + +cp -a "${TEST_DIR}/ansible_collections" "${WORK_DIR}" +cd "${WORK_DIR}/ansible_collections/ns/col_constraints" + +ansible-test integration --venv --color --truncate 0 "${@}" diff --git a/test/integration/targets/ansible-test/collection-tests/integration.sh b/test/integration/targets/ansible-test/collection-tests/integration.sh new file mode 100755 index 00000000..b257093a --- /dev/null +++ b/test/integration/targets/ansible-test/collection-tests/integration.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -eux -o pipefail + +cp -a "${TEST_DIR}/ansible_collections" "${WORK_DIR}" +cd "${WORK_DIR}/ansible_collections/ns/col" + +ansible-test integration --venv --color --truncate 0 "${@}" diff --git a/test/integration/targets/ansible-test/collection-tests/sanity-vendor.sh b/test/integration/targets/ansible-test/collection-tests/sanity-vendor.sh new file mode 100755 index 00000000..0fcd659b --- /dev/null +++ b/test/integration/targets/ansible-test/collection-tests/sanity-vendor.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +set -eux -o pipefail + +cp -a "${TEST_DIR}/ansible_collections" "${WORK_DIR}" +cd "${WORK_DIR}/ansible_collections/ns/col" + +"${TEST_DIR}/collection-tests/update-ignore.py" + +vendor_dir="$(python -c 'import pathlib, ansible._vendor; print(pathlib.Path(ansible._vendor.__file__).parent)')" + +cleanup() { + rm -rf "${vendor_dir}/demo/" +} + +trap cleanup EXIT + +# Verify that packages installed in the vendor directory are not available to the import test. +# If they are, the vendor logic will generate a warning which will be turned into an error. +# Testing this requires at least two plugins (not modules) to be run through the import test. + +mkdir "${vendor_dir}/demo/" +touch "${vendor_dir}/demo/__init__.py" + +ansible-test sanity --test import --color --truncate 0 plugins/lookup/vendor1.py plugins/lookup/vendor2.py "${@}" diff --git a/test/integration/targets/ansible-test/collection-tests/sanity.sh b/test/integration/targets/ansible-test/collection-tests/sanity.sh new file mode 100755 index 00000000..21e8607b --- /dev/null +++ b/test/integration/targets/ansible-test/collection-tests/sanity.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +set -eux -o pipefail + +cp -a "${TEST_DIR}/ansible_collections" "${WORK_DIR}" +cd "${WORK_DIR}/ansible_collections/ns/col" + +"${TEST_DIR}/collection-tests/update-ignore.py" + +ansible-test sanity --color --truncate 0 "${@}" diff --git a/test/integration/targets/ansible-test/collection-tests/units-constraints.sh b/test/integration/targets/ansible-test/collection-tests/units-constraints.sh new file mode 100755 index 00000000..3440eb12 --- /dev/null +++ b/test/integration/targets/ansible-test/collection-tests/units-constraints.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +set -eux -o pipefail + +cp -a "${TEST_DIR}/ansible_collections" "${WORK_DIR}" +cd "${WORK_DIR}/ansible_collections/ns/col_constraints" + +options=$("${TEST_DIR}"/collection-tests/venv-pythons.py) +IFS=', ' read -r -a pythons <<< "${options}" + +ansible-test units --color --truncate 0 "${pythons[@]}" "${@}" diff --git a/test/integration/targets/ansible-test/collection-tests/units.sh b/test/integration/targets/ansible-test/collection-tests/units.sh new file mode 100755 index 00000000..ecb2e162 --- /dev/null +++ b/test/integration/targets/ansible-test/collection-tests/units.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +set -eux -o pipefail + +cp -a "${TEST_DIR}/ansible_collections" "${WORK_DIR}" +cd "${WORK_DIR}/ansible_collections/ns/col" + +options=$("${TEST_DIR}"/collection-tests/venv-pythons.py) +IFS=', ' read -r -a pythons <<< "${options}" + +ansible-test units --color --truncate 0 "${pythons[@]}" "${@}" diff --git a/test/integration/targets/ansible-test/collection-tests/unsupported-directory.sh b/test/integration/targets/ansible-test/collection-tests/unsupported-directory.sh new file mode 100755 index 00000000..713bd5d6 --- /dev/null +++ b/test/integration/targets/ansible-test/collection-tests/unsupported-directory.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash + +set -eux -o pipefail + +cd "${WORK_DIR}" + +if ansible-test --help 1>stdout 2>stderr; then + echo "ansible-test did not fail" + exit 1 +fi + +grep '^Current working directory: ' stderr + +if grep raise stderr; then + echo "ansible-test failed with a traceback instead of an error message" + exit 2 +fi diff --git a/test/integration/targets/ansible-test/collection-tests/update-ignore.py b/test/integration/targets/ansible-test/collection-tests/update-ignore.py new file mode 100755 index 00000000..51ddf9ac --- /dev/null +++ b/test/integration/targets/ansible-test/collection-tests/update-ignore.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python +"""Rewrite a sanity ignore file to expand Python versions for import ignores and write the file out with the correct Ansible version in the name.""" + +import os +import sys + +from ansible import release + + +def main(): + ansible_root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(release.__file__)))) + source_root = os.path.join(ansible_root, 'test', 'lib') + + sys.path.insert(0, source_root) + + from ansible_test._internal import constants + + src_path = 'tests/sanity/ignore.txt' + directory = os.path.dirname(src_path) + name, ext = os.path.splitext(os.path.basename(src_path)) + major_minor = '.'.join(release.__version__.split('.')[:2]) + dst_path = os.path.join(directory, f'{name}-{major_minor}{ext}') + + with open(src_path) as src_file: + src_lines = src_file.read().splitlines() + + dst_lines = [] + + for line in src_lines: + path, rule = line.split(' ') + + if rule != 'import': + dst_lines.append(line) + continue + + if path.startswith('plugins/module'): + python_versions = constants.SUPPORTED_PYTHON_VERSIONS + else: + python_versions = constants.CONTROLLER_PYTHON_VERSIONS + + for python_version in python_versions: + dst_lines.append(f'{line}-{python_version}') + + ignores = '\n'.join(dst_lines) + '\n' + + with open(dst_path, 'w') as dst_file: + dst_file.write(ignores) + + +if __name__ == '__main__': + main() diff --git a/test/integration/targets/ansible-test/collection-tests/validate-modules-collection-loader.sh b/test/integration/targets/ansible-test/collection-tests/validate-modules-collection-loader.sh new file mode 100755 index 00000000..3f77a6af --- /dev/null +++ b/test/integration/targets/ansible-test/collection-tests/validate-modules-collection-loader.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash + +set -eux -o pipefail + +cp -a "${TEST_DIR}/ansible_collections" "${WORK_DIR}" +cd "${WORK_DIR}/ansible_collections/ns/ps_only" + +if ! command -V pwsh; then + echo "skipping test since pwsh is not available" + exit 0 +fi + +# Use a PowerShell-only collection to verify that validate-modules does not load the collection loader multiple times. +ansible-test sanity --test validate-modules --color --truncate 0 "${@}" diff --git a/test/integration/targets/ansible-test/collection-tests/venv-pythons.py b/test/integration/targets/ansible-test/collection-tests/venv-pythons.py new file mode 100755 index 00000000..ad41b1f5 --- /dev/null +++ b/test/integration/targets/ansible-test/collection-tests/venv-pythons.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python +"""Return target Python options for use with ansible-test.""" + +import os +import shutil +import subprocess +import sys + +from ansible import release + + +def main(): + ansible_root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(release.__file__)))) + source_root = os.path.join(ansible_root, 'test', 'lib') + + sys.path.insert(0, source_root) + + from ansible_test._internal import constants + + args = [] + + for python_version in constants.SUPPORTED_PYTHON_VERSIONS: + executable = shutil.which(f'python{python_version}') + + if executable: + if python_version == '2.6': + # skip Python 2.6 since it requires docker to provide a PyPI proxy container + print(f'{executable} - skip', file=sys.stderr) + continue + + if python_version.startswith('2.'): + cmd = [executable, '-m', 'virtualenv', '--version'] + else: + cmd = [executable, '-m', 'venv', '--help'] + + process = subprocess.run(cmd, capture_output=True, check=False) + + print(f'{executable} - {"fail" if process.returncode else "pass"}', file=sys.stderr) + + if not process.returncode: + args.extend(['--target-python', f'venv/{python_version}']) + + print(' '.join(args)) + + +if __name__ == '__main__': + main() diff --git a/test/integration/targets/ansible-test/collection-tests/venv.sh b/test/integration/targets/ansible-test/collection-tests/venv.sh deleted file mode 100755 index 42dbfde4..00000000 --- a/test/integration/targets/ansible-test/collection-tests/venv.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env bash - -set -eux -o pipefail - -cp -a "${TEST_DIR}/ansible_collections" "${WORK_DIR}" -cd "${WORK_DIR}/ansible_collections/ns/col" - -# rename the sanity ignore file to match the current ansible version and update import ignores with the python version -ansible_version="$(python -c 'import ansible.release; print(".".join(ansible.release.__version__.split(".")[:2]))')" -if [[ "${ANSIBLE_TEST_PYTHON_VERSION}" =~ ^2\. ]] || [[ "${ANSIBLE_TEST_PYTHON_VERSION}" =~ ^3\.[567] ]]; then - # Non-module/module_utils plugins are not checked on these remote-only Python versions - sed "s/ import$/ import-${ANSIBLE_TEST_PYTHON_VERSION}/;" < "tests/sanity/ignore.txt" | grep -v 'plugins/[^m].* import' > "tests/sanity/ignore-${ansible_version}.txt" -else - sed "s/ import$/ import-${ANSIBLE_TEST_PYTHON_VERSION}/;" < "tests/sanity/ignore.txt" > "tests/sanity/ignore-${ansible_version}.txt" -fi -cat "tests/sanity/ignore-${ansible_version}.txt" - -# common args for all tests -# each test will be run in a separate venv to verify that requirements have been properly specified -common=(--venv --python "${ANSIBLE_TEST_PYTHON_VERSION}" --color --truncate 0 "${@}") - -# sanity tests - -tests=() - -set +x - -while IFS='' read -r line; do - tests+=("$line"); -done < <( - ansible-test sanity --list-tests -) - -set -x - -for test in "${tests[@]}"; do - rm -rf "tests/output" - ansible-test sanity "${common[@]}" --test "${test}" -done - -# unit tests - -rm -rf "tests/output" -ansible-test units "${common[@]}" - -# integration tests - -rm -rf "tests/output" -ansible-test integration "${common[@]}" diff --git a/test/integration/targets/blockinfile/meta/main.yml b/test/integration/targets/blockinfile/meta/main.yml index 07faa217..cb6005d0 100644 --- a/test/integration/targets/blockinfile/meta/main.yml +++ b/test/integration/targets/blockinfile/meta/main.yml @@ -1,2 +1,3 @@ dependencies: - prepare_tests + - setup_remote_tmp_dir diff --git a/test/integration/targets/blockinfile/tasks/add_block_to_existing_file.yml b/test/integration/targets/blockinfile/tasks/add_block_to_existing_file.yml index 7093ed2b..c610905c 100644 --- a/test/integration/targets/blockinfile/tasks/add_block_to_existing_file.yml +++ b/test/integration/targets/blockinfile/tasks/add_block_to_existing_file.yml @@ -1,11 +1,11 @@ - name: copy the sshd_config to the test dir copy: src: sshd_config - dest: "{{ output_dir_test }}" + dest: "{{ remote_tmp_dir_test }}" - name: insert/update "Match User" configuration block in sshd_config blockinfile: - path: "{{ output_dir_test }}/sshd_config" + path: "{{ remote_tmp_dir_test }}/sshd_config" block: | Match User ansible-agent PasswordAuthentication no @@ -18,7 +18,7 @@ - "'backup_file' in blockinfile_test0" - name: check content - shell: 'grep -c -e "Match User ansible-agent" -e "PasswordAuthentication no" {{ output_dir_test }}/sshd_config' + shell: 'grep -c -e "Match User ansible-agent" -e "PasswordAuthentication no" {{ remote_tmp_dir_test }}/sshd_config' register: blockinfile_test0_grep - debug: @@ -40,7 +40,7 @@ - name: check idemptotence blockinfile: - path: "{{ output_dir_test }}/sshd_config" + path: "{{ remote_tmp_dir_test }}/sshd_config" block: | Match User ansible-agent PasswordAuthentication no diff --git a/test/integration/targets/blockinfile/tasks/block_without_trailing_newline.yml b/test/integration/targets/blockinfile/tasks/block_without_trailing_newline.yml index 57dac60e..466e86c0 100644 --- a/test/integration/targets/blockinfile/tasks/block_without_trailing_newline.yml +++ b/test/integration/targets/blockinfile/tasks/block_without_trailing_newline.yml @@ -1,6 +1,6 @@ - name: Add block without trailing line separator blockinfile: - path: "{{ output_dir_test }}/chomped_block_test.txt" + path: "{{ remote_tmp_dir_test }}/chomped_block_test.txt" create: yes content: |- one @@ -10,7 +10,7 @@ - name: Add block without trailing line separator again blockinfile: - path: "{{ output_dir_test }}/chomped_block_test.txt" + path: "{{ remote_tmp_dir_test }}/chomped_block_test.txt" content: |- one two @@ -19,7 +19,7 @@ - name: Check output file stat: - path: "{{ output_dir_test }}/chomped_block_test.txt" + path: "{{ remote_tmp_dir_test }}/chomped_block_test.txt" register: chomptest_file - name: Ensure chomptest results are correct diff --git a/test/integration/targets/blockinfile/tasks/create_file.yml b/test/integration/targets/blockinfile/tasks/create_file.yml index 94e47203..c8ded300 100644 --- a/test/integration/targets/blockinfile/tasks/create_file.yml +++ b/test/integration/targets/blockinfile/tasks/create_file.yml @@ -1,6 +1,6 @@ - name: Create a file with blockinfile blockinfile: - path: "{{ output_dir_test }}/empty.txt" + path: "{{ remote_tmp_dir_test }}/empty.txt" block: | Hey there @@ -10,7 +10,7 @@ - name: Run a task that results in an empty file blockinfile: - path: "{{ output_dir_test }}/empty.txt" + path: "{{ remote_tmp_dir_test }}/empty.txt" block: | Hey there @@ -19,7 +19,7 @@ register: empty_test_2 - stat: - path: "{{ output_dir_test }}/empty.txt" + path: "{{ remote_tmp_dir_test }}/empty.txt" register: empty_test_stat - name: Ensure empty file was created diff --git a/test/integration/targets/blockinfile/tasks/diff.yml b/test/integration/targets/blockinfile/tasks/diff.yml index 4a2f9454..56ef08dc 100644 --- a/test/integration/targets/blockinfile/tasks/diff.yml +++ b/test/integration/targets/blockinfile/tasks/diff.yml @@ -1,11 +1,11 @@ - name: Create a test file copy: content: diff test - dest: "{{ output_dir_test }}/diff.txt" + dest: "{{ remote_tmp_dir_test }}/diff.txt" - name: Add block to file with diff blockinfile: - path: "{{ output_dir_test }}/diff.txt" + path: "{{ remote_tmp_dir_test }}/diff.txt" block: | line 1 line 2 diff --git a/test/integration/targets/blockinfile/tasks/file_without_trailing_newline.yml b/test/integration/targets/blockinfile/tasks/file_without_trailing_newline.yml index fe4e2abc..797ffc5b 100644 --- a/test/integration/targets/blockinfile/tasks/file_without_trailing_newline.yml +++ b/test/integration/targets/blockinfile/tasks/file_without_trailing_newline.yml @@ -1,13 +1,13 @@ - name: Create file without trailing newline copy: content: '# File with no newline' - dest: "{{ output_dir_test }}/no_newline_at_end.txt" + dest: "{{ remote_tmp_dir_test }}/no_newline_at_end.txt" register: no_newline - name: Add block to file that does not have a newline at the end blockinfile: - path: "{{ output_dir_test }}/no_newline_at_end.txt" + path: "{{ remote_tmp_dir_test }}/no_newline_at_end.txt" content: | one two @@ -16,7 +16,7 @@ - name: Add block to file that does not have a newline at the end again blockinfile: - path: "{{ output_dir_test }}/no_newline_at_end.txt" + path: "{{ remote_tmp_dir_test }}/no_newline_at_end.txt" content: | one two @@ -25,7 +25,7 @@ - name: Stat the file stat: - path: "{{ output_dir_test }}/no_newline_at_end.txt" + path: "{{ remote_tmp_dir_test }}/no_newline_at_end.txt" register: no_newline_file - name: Ensure block was correctly written to file with no newline at end diff --git a/test/integration/targets/blockinfile/tasks/insertafter.yml b/test/integration/targets/blockinfile/tasks/insertafter.yml index daf7bcf1..a4cdd5f6 100644 --- a/test/integration/targets/blockinfile/tasks/insertafter.yml +++ b/test/integration/targets/blockinfile/tasks/insertafter.yml @@ -1,6 +1,6 @@ - name: Create insertafter test file copy: - dest: "{{ output_dir }}/after.txt" + dest: "{{ remote_tmp_dir }}/after.txt" content: | line1 line2 @@ -8,7 +8,7 @@ - name: Add block using insertafter blockinfile: - path: "{{ output_dir }}/after.txt" + path: "{{ remote_tmp_dir }}/after.txt" insertafter: line2 block: | block1 @@ -17,7 +17,7 @@ - name: Add block using insertafter again blockinfile: - path: "{{ output_dir }}/after.txt" + path: "{{ remote_tmp_dir }}/after.txt" insertafter: line2 block: | block1 @@ -26,7 +26,7 @@ - name: Stat the after.txt file stat: - path: "{{ output_dir }}/after.txt" + path: "{{ remote_tmp_dir }}/after.txt" register: after_file - name: Ensure insertafter worked correctly diff --git a/test/integration/targets/blockinfile/tasks/insertbefore.yml b/test/integration/targets/blockinfile/tasks/insertbefore.yml index 6089af15..03e51c99 100644 --- a/test/integration/targets/blockinfile/tasks/insertbefore.yml +++ b/test/integration/targets/blockinfile/tasks/insertbefore.yml @@ -1,6 +1,6 @@ - name: Create insertbefore test file copy: - dest: "{{ output_dir }}/before.txt" + dest: "{{ remote_tmp_dir }}/before.txt" content: | line1 line2 @@ -8,7 +8,7 @@ - name: Add block using insertbefore blockinfile: - path: "{{ output_dir }}/before.txt" + path: "{{ remote_tmp_dir }}/before.txt" insertbefore: line2 block: | block1 @@ -17,7 +17,7 @@ - name: Add block using insertbefore again blockinfile: - path: "{{ output_dir }}/before.txt" + path: "{{ remote_tmp_dir }}/before.txt" insertbefore: line2 block: | block1 @@ -26,10 +26,10 @@ - name: Stat the before.txt file stat: - path: "{{ output_dir }}/before.txt" + path: "{{ remote_tmp_dir }}/before.txt" register: after_file -- command: cat {{ output_dir }}/before.txt +- command: cat {{ remote_tmp_dir }}/before.txt - name: Ensure insertbefore worked correctly assert: diff --git a/test/integration/targets/blockinfile/tasks/main.yml b/test/integration/targets/blockinfile/tasks/main.yml index 4bc0b8d1..03507684 100644 --- a/test/integration/targets/blockinfile/tasks/main.yml +++ b/test/integration/targets/blockinfile/tasks/main.yml @@ -17,16 +17,16 @@ # along with Ansible. If not, see . - set_fact: - output_dir_test: "{{ output_dir }}/test_blockinfile" + remote_tmp_dir_test: "{{ remote_tmp_dir }}/test_blockinfile" - name: make sure our testing sub-directory does not exist file: - path: "{{ output_dir_test }}" + path: "{{ remote_tmp_dir_test }}" state: absent - name: create our testing sub-directory file: - path: "{{ output_dir_test }}" + path: "{{ remote_tmp_dir_test }}" state: directory - import_tasks: add_block_to_existing_file.yml diff --git a/test/integration/targets/blockinfile/tasks/preserve_line_endings.yml b/test/integration/targets/blockinfile/tasks/preserve_line_endings.yml index bb2dee29..0528c3bf 100644 --- a/test/integration/targets/blockinfile/tasks/preserve_line_endings.yml +++ b/test/integration/targets/blockinfile/tasks/preserve_line_endings.yml @@ -1,19 +1,19 @@ - name: create line_endings_test.txt in the test dir copy: - dest: "{{ output_dir_test }}/line_endings_test.txt" + dest: "{{ remote_tmp_dir_test }}/line_endings_test.txt" # generating the content like this instead of copying a fixture file # prevents sanity checks from warning about mixed line endings content: "unix\nunix\nunix\n\ndos\r\ndos\r\ndos\r\n\nunix\nunix\n# BEGIN ANSIBLE MANAGED BLOCK\ndos\r\n# END ANSIBLE MANAGED BLOCK\nunix\nunix\nunix\nunix\n" - name: insert/update "dos" configuration block in line_endings_test.txt blockinfile: - path: "{{ output_dir_test }}/line_endings_test.txt" + path: "{{ remote_tmp_dir_test }}/line_endings_test.txt" block: "dos\r\ndos\r\ndos\r\n" register: blockinfile_test2 - name: check content # using the more precise `grep -Pc "^dos\\r$" ...` fails on BSD/macOS - shell: 'grep -c "^dos.$" {{ output_dir_test }}/line_endings_test.txt' + shell: 'grep -c "^dos.$" {{ remote_tmp_dir_test }}/line_endings_test.txt' register: blockinfile_test2_grep - name: validate line_endings_test.txt results diff --git a/test/integration/targets/blockinfile/tasks/validate.yml b/test/integration/targets/blockinfile/tasks/validate.yml index 105bca53..aa7aa636 100644 --- a/test/integration/targets/blockinfile/tasks/validate.yml +++ b/test/integration/targets/blockinfile/tasks/validate.yml @@ -1,6 +1,6 @@ - name: EXPECTED FAILURE test improper validate blockinfile: - path: "{{ output_dir }}/validate.txt" + path: "{{ remote_tmp_dir }}/validate.txt" block: | line1 line2 @@ -10,7 +10,7 @@ - name: EXPECTED FAILURE test failure to validate blockinfile: - path: "{{ output_dir }}/validate.txt" + path: "{{ remote_tmp_dir }}/validate.txt" block: | line1 line2 @@ -20,7 +20,7 @@ - name: Test proper validate blockinfile: - path: "{{ output_dir }}/validate.txt" + path: "{{ remote_tmp_dir }}/validate.txt" block: | line1 line2 diff --git a/test/integration/targets/callback_default/callback_default.out.fqcn_free.stdout b/test/integration/targets/callback_default/callback_default.out.fqcn_free.stdout new file mode 100644 index 00000000..0ec04479 --- /dev/null +++ b/test/integration/targets/callback_default/callback_default.out.fqcn_free.stdout @@ -0,0 +1,35 @@ + +PLAY [nonlockstep] ************************************************************* + +TASK [command] ***************************************************************** +changed: [testhost10] + +TASK [command] ***************************************************************** +changed: [testhost10] + +TASK [command] ***************************************************************** +changed: [testhost10] + +TASK [command] ***************************************************************** +changed: [testhost11] + +TASK [command] ***************************************************************** +changed: [testhost11] + +TASK [command] ***************************************************************** +changed: [testhost11] + +TASK [command] ***************************************************************** +changed: [testhost12] + +TASK [command] ***************************************************************** +changed: [testhost12] + +TASK [command] ***************************************************************** +changed: [testhost12] + +PLAY RECAP ********************************************************************* +testhost10 : ok=3 changed=3 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 +testhost11 : ok=3 changed=3 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 +testhost12 : ok=3 changed=3 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 + diff --git a/test/integration/targets/callback_default/runme.sh b/test/integration/targets/callback_default/runme.sh index f9b60b6b..50b4cf70 100755 --- a/test/integration/targets/callback_default/runme.sh +++ b/test/integration/targets/callback_default/runme.sh @@ -211,4 +211,5 @@ rm -f meta_test.out # Ensure free/host_pinned non-lockstep strategies display correctly diff -u callback_default.out.free.stdout <(ANSIBLE_STRATEGY=free ansible-playbook -i inventory test_non_lockstep.yml 2>/dev/null) +diff -u callback_default.out.fqcn_free.stdout <(ANSIBLE_STRATEGY=ansible.builtin.free ansible-playbook -i inventory test_non_lockstep.yml 2>/dev/null) diff -u callback_default.out.host_pinned.stdout <(ANSIBLE_STRATEGY=host_pinned ansible-playbook -i inventory test_non_lockstep.yml 2>/dev/null) diff --git a/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/playbooks/roles/non_coll_role/tasks/main.yml b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/playbooks/roles/non_coll_role/tasks/main.yml index d41ae90e..3fab7fe9 100644 --- a/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/playbooks/roles/non_coll_role/tasks/main.yml +++ b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/playbooks/roles/non_coll_role/tasks/main.yml @@ -26,4 +26,4 @@ - assert: that: - - test_role_output.msg == test_role_input + - non_coll_role_to_call_test_role_output.msg == test_role_input diff --git a/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/playbooks/roles/non_coll_role_to_call/tasks/main.yml b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/playbooks/roles/non_coll_role_to_call/tasks/main.yml index 98445ce3..2b1c15f2 100644 --- a/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/playbooks/roles/non_coll_role_to_call/tasks/main.yml +++ b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/playbooks/roles/non_coll_role_to_call/tasks/main.yml @@ -1,7 +1,7 @@ - debug: msg: '{{ test_role_input | default("(undefined)") }}' - register: test_role_output + register: non_coll_role_to_call_test_role_output - assert: that: - - test_role_input is not defined or test_role_input == test_role_output.msg + - 'non_coll_role_to_call_test_role_output.msg == "include another non-coll role"' diff --git a/test/integration/targets/connection_ssh/runme.sh b/test/integration/targets/connection_ssh/runme.sh index cbadf1d5..4d430263 100755 --- a/test/integration/targets/connection_ssh/runme.sh +++ b/test/integration/targets/connection_ssh/runme.sh @@ -71,3 +71,7 @@ ansible-playbook check_ssh_defaults.yml "$@" -i test_connection.inventory # ensure we can load from ini cfg ANSIBLE_CONFIG=./test_ssh_defaults.cfg ansible-playbook verify_config.yml "$@" + +# ensure we handle cp with spaces correctly, otherwise would fail with +# `"Failed to connect to the host via ssh: command-line line 0: keyword controlpath extra arguments at end of line"` +ANSIBLE_SSH_CONTROL_PATH='/tmp/ssh cp with spaces' ansible -m ping all -e ansible_connection=ssh -i test_connection.inventory "$@" diff --git a/test/integration/targets/delegate_to/delegate_facts_loop.yml b/test/integration/targets/delegate_to/delegate_facts_loop.yml new file mode 100644 index 00000000..28a1488d --- /dev/null +++ b/test/integration/targets/delegate_to/delegate_facts_loop.yml @@ -0,0 +1,40 @@ +- hosts: localhost + gather_facts: no + tasks: + - set_fact: + test: 123 + delegate_to: "{{ item }}" + delegate_facts: true + loop: "{{ groups['all'] | difference(['localhost']) }}" + + - name: ensure we didnt create it on current host + assert: + that: + - test is undefined + + - name: ensure facts get created + assert: + that: + - "'test' in hostvars[item]" + - hostvars[item]['test'] == 123 + loop: "{{ groups['all'] | difference(['localhost']) }}" + + +- name: test that we don't polute whole group with one value + hosts: localhost + gather_facts: no + vars: + cluster_name: bleh + tasks: + - name: construct different fact per host in loop + set_fact: + vm_name: "{{ cluster_name }}-{{item}}" + delegate_to: "{{ item }}" + delegate_facts: True + with_items: "{{ groups['all'] }}" + + - name: ensure the fact is personalized for each host + assert: + that: + - hostvars[item]['vm_name'].endswith(item) + loop: "{{ groups['all'] }}" diff --git a/test/integration/targets/delegate_to/inventory b/test/integration/targets/delegate_to/inventory index f7ad0a33..ebc33254 100644 --- a/test/integration/targets/delegate_to/inventory +++ b/test/integration/targets/delegate_to/inventory @@ -7,3 +7,11 @@ testhost5 ansible_connection=fakelocal [all:vars] ansible_python_interpreter="{{ ansible_playbook_python }}" + +[delegated_vars] +testhost6 myhost=127.0.0.3 +testhost7 myhost=127.0.0.4 + +[delegated_vars:vars] +ansible_host={{myhost}} +ansible_connection=ssh diff --git a/test/integration/targets/delegate_to/runme.sh b/test/integration/targets/delegate_to/runme.sh index af090cdf..1bdf27cf 100755 --- a/test/integration/targets/delegate_to/runme.sh +++ b/test/integration/targets/delegate_to/runme.sh @@ -48,7 +48,7 @@ ANSIBLE_SSH_ARGS='-C -o ControlMaster=auto -o ControlPersist=60s -o UserKnownHos # this test is not doing what it says it does, also relies on var that should not be available #ansible-playbook test_loop_control.yml -v "$@" -ansible-playbook test_delegate_to_loop_randomness.yml -v "$@" +ansible-playbook test_delegate_to_loop_randomness.yml -i inventory -v "$@" ansible-playbook delegate_and_nolog.yml -i inventory -v "$@" @@ -75,3 +75,4 @@ ansible-playbook resolve_vars.yml -i inventory -v "$@" ansible-playbook test_delegate_to_lookup_context.yml -i inventory -v "$@" ansible-playbook delegate_local_from_root.yml -i inventory -v "$@" -e 'ansible_user=root' ansible-playbook delegate_with_fact_from_delegate_host.yml "$@" +ansible-playbook delegate_facts_loop.yml -i inventory -v "$@" diff --git a/test/integration/targets/delegate_to/test_delegate_to.yml b/test/integration/targets/delegate_to/test_delegate_to.yml index 05b0536e..dcfa9d03 100644 --- a/test/integration/targets/delegate_to/test_delegate_to.yml +++ b/test/integration/targets/delegate_to/test_delegate_to.yml @@ -56,3 +56,27 @@ - name: remove test file file: path={{ output_dir }}/tmp.txt state=absent + + +- name: verify delegation with per host vars + hosts: testhost6 + gather_facts: yes + tasks: + - debug: msg={{ansible_facts['env']}} + + - name: ensure normal facts still work as expected + assert: + that: + - '"127.0.0.3" in ansible_facts["env"]["SSH_CONNECTION"]' + + - name: Test delegate_to with other host defined using same named var + setup: + register: setup_results + delegate_to: testhost7 + + - debug: msg={{setup_results.ansible_facts.ansible_env}} + + - name: verify ssh plugin resolves variable for ansible_host correctly + assert: + that: + - '"127.0.0.4" in setup_results.ansible_facts.ansible_env["SSH_CONNECTION"]' diff --git a/test/integration/targets/dnf/tasks/cacheonly.yml b/test/integration/targets/dnf/tasks/cacheonly.yml index a5c84a37..eb19156c 100644 --- a/test/integration/targets/dnf/tasks/cacheonly.yml +++ b/test/integration/targets/dnf/tasks/cacheonly.yml @@ -8,8 +8,9 @@ state: latest cacheonly: true register: dnf_result + ignore_errors: true -- name: Verify dnf has not changed +- name: Verify dnf failed or has not changed assert: that: - - "not dnf_result is changed" + - "dnf_result is failed or not dnf_result is changed" diff --git a/test/integration/targets/filter_mathstuff/runme.sh b/test/integration/targets/filter_mathstuff/runme.sh index 36503003..fad8443c 100755 --- a/test/integration/targets/filter_mathstuff/runme.sh +++ b/test/integration/targets/filter_mathstuff/runme.sh @@ -11,7 +11,7 @@ source virtualenv.sh # Install Jinja < 2.10 since we want to test the fallback to Ansible's custom # unique filter. Jinja < 2.10 does not have do_unique so we will trigger the # fallback. -pip install 'jinja2 < 2.10' +pip install 'jinja2 < 2.10' 'markupsafe < 2.1' # Run the playbook again in the venv with Jinja < 2.10 ansible-playbook runme.yml "$@" diff --git a/test/integration/targets/filter_urls/runme.sh b/test/integration/targets/filter_urls/runme.sh index f6460acb..2ed1cd84 100755 --- a/test/integration/targets/filter_urls/runme.sh +++ b/test/integration/targets/filter_urls/runme.sh @@ -16,7 +16,7 @@ pip install 'setuptools<45' # Install Jinja 2.6 since we want to test the fallback to Ansible's custom # urlencode functions. Jinja 2.6 does not have urlencode so we will trigger the # fallback. -pip install 'jinja2 >= 2.6, < 2.7' +pip install 'jinja2 >= 2.6, < 2.7' 'markupsafe < 2.1' # Run the playbook again in the venv with Jinja 2.6 ansible-playbook runme.yml "$@" diff --git a/test/integration/targets/git/meta/main.yml b/test/integration/targets/git/meta/main.yml index 34a77cb7..5e461383 100644 --- a/test/integration/targets/git/meta/main.yml +++ b/test/integration/targets/git/meta/main.yml @@ -1,3 +1,4 @@ dependencies: - prepare_tests - setup_gnutar + - setup_remote_tmp_dir diff --git a/test/integration/targets/git/tasks/missing_hostkey.yml b/test/integration/targets/git/tasks/missing_hostkey.yml index 6e4d53c3..136c5d5d 100644 --- a/test/integration/targets/git/tasks/missing_hostkey.yml +++ b/test/integration/targets/git/tasks/missing_hostkey.yml @@ -2,7 +2,7 @@ git: repo: '{{ repo_format2 }}' dest: '{{ checkout_dir }}' - ssh_opts: '-o UserKnownHostsFile={{ output_dir }}/known_hosts' + ssh_opts: '-o UserKnownHostsFile={{ remote_tmp_dir }}/known_hosts' register: git_result ignore_errors: true @@ -16,7 +16,7 @@ dest: '{{ checkout_dir }}' accept_hostkey: true key_file: '{{ github_ssh_private_key }}' - ssh_opts: '-o UserKnownHostsFile={{ output_dir }}/known_hosts' + ssh_opts: '-o UserKnownHostsFile={{ remote_tmp_dir }}/known_hosts' register: git_result when: github_ssh_private_key is defined @@ -38,7 +38,7 @@ version: 'master' accept_hostkey: false # should already have been accepted key_file: '{{ github_ssh_private_key }}' - ssh_opts: '-o UserKnownHostsFile={{ output_dir }}/known_hosts' + ssh_opts: '-o UserKnownHostsFile={{ remote_tmp_dir }}/known_hosts' register: git_result when: github_ssh_private_key is defined @@ -49,7 +49,7 @@ - name: MISSING-HOSTEKY | Remove github.com hostkey from known_hosts lineinfile: - dest: '{{ output_dir }}/known_hosts' + dest: '{{ remote_tmp_dir }}/known_hosts' regexp: "github.com" state: absent when: github_ssh_private_key is defined diff --git a/test/integration/targets/git/tasks/missing_hostkey_acceptnew.yml b/test/integration/targets/git/tasks/missing_hostkey_acceptnew.yml index fb8bb063..3fd19067 100644 --- a/test/integration/targets/git/tasks/missing_hostkey_acceptnew.yml +++ b/test/integration/targets/git/tasks/missing_hostkey_acceptnew.yml @@ -9,7 +9,7 @@ repo: '{{ repo_format2 }}' dest: '{{ checkout_dir }}' accept_newhostkey: true - ssh_opts: '-o UserKnownHostsFile={{ output_dir }}/known_hosts' + ssh_opts: '-o UserKnownHostsFile={{ remote_tmp_dir }}/known_hosts' register: git_result ignore_errors: true @@ -24,7 +24,7 @@ git: repo: '{{ repo_format2 }}' dest: '{{ checkout_dir }}' - ssh_opts: '-o UserKnownHostsFile={{ output_dir }}/known_hosts' + ssh_opts: '-o UserKnownHostsFile={{ remote_tmp_dir }}/known_hosts' register: git_result ignore_errors: true @@ -39,7 +39,7 @@ dest: '{{ checkout_dir }}' accept_newhostkey: true key_file: '{{ github_ssh_private_key }}' - ssh_opts: '-o UserKnownHostsFile={{ output_dir }}/known_hosts' + ssh_opts: '-o UserKnownHostsFile={{ remote_tmp_dir }}/known_hosts' register: git_result - assert: @@ -58,7 +58,7 @@ version: 'master' accept_newhostkey: false # should already have been accepted key_file: '{{ github_ssh_private_key }}' - ssh_opts: '-o UserKnownHostsFile={{ output_dir }}/known_hosts' + ssh_opts: '-o UserKnownHostsFile={{ remote_tmp_dir }}/known_hosts' register: git_result - assert: @@ -67,7 +67,7 @@ - name: MISSING-HOSTEKY | Remove github.com hostkey from known_hosts lineinfile: - dest: '{{ output_dir }}/known_hosts' + dest: '{{ remote_tmp_dir }}/known_hosts' regexp: "github.com" state: absent diff --git a/test/integration/targets/git/tasks/setup.yml b/test/integration/targets/git/tasks/setup.yml index 3158bf62..06511053 100644 --- a/test/integration/targets/git/tasks/setup.yml +++ b/test/integration/targets/git/tasks/setup.yml @@ -1,11 +1,11 @@ -- name: SETUP | clean out the output_dir +- name: SETUP | clean out the remote_tmp_dir file: - path: "{{ output_dir }}" + path: "{{ remote_tmp_dir }}" state: absent -- name: SETUP | create clean output_dir +- name: SETUP | create clean remote_tmp_dir file: - path: "{{ output_dir }}" + path: "{{ remote_tmp_dir }}" state: directory - name: SETUP | install git diff --git a/test/integration/targets/git/vars/main.yml b/test/integration/targets/git/vars/main.yml index a1bbfca9..b38531f3 100644 --- a/test/integration/targets/git/vars/main.yml +++ b/test/integration/targets/git/vars/main.yml @@ -27,9 +27,9 @@ git_list_commands: tgz: tar -tf zip: unzip -Z1 -checkout_dir: '{{ output_dir }}/git' -repo_dir: '{{ output_dir }}/local_repos' -separate_git_dir: '{{ output_dir }}/sep_git_dir' +checkout_dir: '{{ remote_tmp_dir }}/git' +repo_dir: '{{ remote_tmp_dir }}/local_repos' +separate_git_dir: '{{ remote_tmp_dir }}/sep_git_dir' repo_format1: 'https://github.com/jimi-c/test_role' repo_format2: 'git@github.com:jimi-c/test_role.git' repo_format3: 'ssh://git@github.com/jimi-c/test_role.git' diff --git a/test/integration/targets/groupby_filter/runme.sh b/test/integration/targets/groupby_filter/runme.sh index 09b47d55..9c9c6f03 100755 --- a/test/integration/targets/groupby_filter/runme.sh +++ b/test/integration/targets/groupby_filter/runme.sh @@ -7,10 +7,10 @@ source virtualenv.sh # Update pip in the venv to a version that supports constraints pip install --requirement requirements.txt -pip install -U jinja2==2.9.4 --constraint "../../../lib/ansible_test/_data/requirements/constraints.txt" +pip install -U jinja2==2.9.4 "markupsafe<2.1.0" --constraint "../../../lib/ansible_test/_data/requirements/constraints.txt" ansible-playbook -i ../../inventory test_jinja2_groupby.yml -v "$@" -pip install -U "jinja2<2.9.0" --constraint "../../../lib/ansible_test/_data/requirements/constraints.txt" +pip install -U "jinja2<2.9.0" "markupsafe<2.1.0" --constraint "../../../lib/ansible_test/_data/requirements/constraints.txt" ansible-playbook -i ../../inventory test_jinja2_groupby.yml -v "$@" diff --git a/test/integration/targets/incidental_setup_mongodb/defaults/main.yml b/test/integration/targets/incidental_setup_mongodb/defaults/main.yml index b205013c..de0b76d3 100644 --- a/test/integration/targets/incidental_setup_mongodb/defaults/main.yml +++ b/test/integration/targets/incidental_setup_mongodb/defaults/main.yml @@ -42,5 +42,5 @@ redhat_packages_py3: # Do not install requests[security] via pip. It will cause test failures. # See https://github.com/ansible/ansible/pull/66319 pip_packages: - - psutil - - pymongo + - psutil==5.8.0 + - pymongo==3.12.2 diff --git a/test/integration/targets/jinja_plugins/collections/ansible_collections/foo/bar/plugins/filter/bad_collection_filter2.py b/test/integration/targets/jinja_plugins/collections/ansible_collections/foo/bar/plugins/filter/bad_collection_filter2.py new file mode 100644 index 00000000..96e726ac --- /dev/null +++ b/test/integration/targets/jinja_plugins/collections/ansible_collections/foo/bar/plugins/filter/bad_collection_filter2.py @@ -0,0 +1,10 @@ +# Copyright (c) 2021 Matt Martz +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +class FilterModule: + pass diff --git a/test/integration/targets/jinja_plugins/tasks/main.yml b/test/integration/targets/jinja_plugins/tasks/main.yml index 012ec954..013baecb 100644 --- a/test/integration/targets/jinja_plugins/tasks/main.yml +++ b/test/integration/targets/jinja_plugins/tasks/main.yml @@ -17,6 +17,8 @@ - | result.stderr|regex_findall('bad_test')|length == 2 - | - result.stderr|regex_findall('bad_collection_filter')|length == 2 + result.stderr|regex_findall('bad_collection_filter')|length == 3 + - | + result.stderr|regex_findall('bad_collection_filter2')|length == 1 - | result.stderr|regex_findall('bad_collection_test')|length == 2 diff --git a/test/integration/targets/lookup_template/tasks/main.yml b/test/integration/targets/lookup_template/tasks/main.yml index 36a8ee31..9ebdf0c5 100644 --- a/test/integration/targets/lookup_template/tasks/main.yml +++ b/test/integration/targets/lookup_template/tasks/main.yml @@ -25,3 +25,10 @@ - assert: that: - "hello_world_comment|trim == 'Hello world!'" + +# 77004 +- assert: + that: + - lookup('template', 'dict.j2') is mapping + - lookup('template', 'dict.j2', convert_data=True) is mapping + - lookup('template', 'dict.j2', convert_data=False) is not mapping diff --git a/test/integration/targets/lookup_template/templates/dict.j2 b/test/integration/targets/lookup_template/templates/dict.j2 new file mode 100644 index 00000000..0439155b --- /dev/null +++ b/test/integration/targets/lookup_template/templates/dict.j2 @@ -0,0 +1 @@ +{"foo": "{{ 'bar' }}"} diff --git a/test/integration/targets/meta_tasks/runme.sh b/test/integration/targets/meta_tasks/runme.sh index f5916ec7..c29579bf 100755 --- a/test/integration/targets/meta_tasks/runme.sh +++ b/test/integration/targets/meta_tasks/runme.sh @@ -55,6 +55,13 @@ for test_strategy in linear free; do [ "$(grep -c "Testing end_play on host" <<< "$out" )" -eq 1 ] grep -q "META: ending play" <<< "$out" grep -qv 'Failed to end using end_play' <<< "$out" + + out="$(ansible-playbook test_end_play_multiple_plays.yml -i inventory.yml -e test_strategy=$test_strategy -vv "$@")" + + grep -q "META: ending play" <<< "$out" + grep -q "Play 1" <<< "$out" + grep -q "Play 2" <<< "$out" + grep -qv 'Failed to end using end_play' <<< "$out" done # test end_batch meta task diff --git a/test/integration/targets/meta_tasks/test_end_play_multiple_plays.yml b/test/integration/targets/meta_tasks/test_end_play_multiple_plays.yml new file mode 100644 index 00000000..2cc8d1e6 --- /dev/null +++ b/test/integration/targets/meta_tasks/test_end_play_multiple_plays.yml @@ -0,0 +1,18 @@ +- name: Testing end_play with multiple plays with strategy {{ test_strategy | default('linear') }} + hosts: testhost + gather_facts: no + strategy: "{{ test_strategy | default('linear') }}" + tasks: + - debug: + msg: "Play 1" + - meta: end_play + - fail: + msg: 'Failed to end using end_play' + +- name: Testing end_play with multiple plays with strategy {{ test_strategy | default('linear') }} + hosts: testhost + gather_facts: no + strategy: "{{ test_strategy | default('linear') }}" + tasks: + - debug: + msg: "Play 2" diff --git a/test/integration/targets/pip/tasks/pip.yml b/test/integration/targets/pip/tasks/pip.yml index 2b9ad561..2ffb3dfa 100644 --- a/test/integration/targets/pip/tasks/pip.yml +++ b/test/integration/targets/pip/tasks/pip.yml @@ -580,27 +580,24 @@ # https://github.com/ansible/ansible/issues/68592 # Handle pre-release version numbers in check_mode for already-installed # packages. -# TODO: Limiting to py3 test boxes for now so the example of 'black' installs, -# we should probably find another package to use with a similar versioning -# scheme or make a small one and enable this test for py2 as well. - block: - - name: Install a beta version of a package + - name: Install a pre-release version of a package pip: - name: black - version: 19.10b0 + name: fallible + version: 0.0.1a2 state: present - name: Use check_mode and ensure that the package is shown as installed check_mode: true pip: - name: black + name: fallible state: present register: pip_prereleases - - name: Uninstall the beta package if we need to + - name: Uninstall the pre-release package if we need to pip: - name: black - version: 19.10b0 + name: fallible + version: 0.0.1a2 state: absent when: pip_prereleases is changed @@ -608,6 +605,4 @@ that: - pip_prereleases is successful - pip_prereleases is not changed - - '"black==19.10b0" in pip_prereleases.stdout_lines' - - when: ansible_python.version.major == 3 + - '"fallible==0.0.1a2" in pip_prereleases.stdout_lines' diff --git a/test/integration/targets/roles_arg_spec/roles/test1/meta/argument_specs.yml b/test/integration/targets/roles_arg_spec/roles/test1/meta/argument_specs.yml index 02edac66..427946e5 100644 --- a/test/integration/targets/roles_arg_spec/roles/test1/meta/argument_specs.yml +++ b/test/integration/targets/roles_arg_spec/roles/test1/meta/argument_specs.yml @@ -73,6 +73,11 @@ argument_specs: some_second_level: type: "bool" default: true + some_more_dict_options: + type: "dict" + options: + some_second_level: + type: "str" some_str_removed_in: type: "str" removed_in: 2.10 diff --git a/test/integration/targets/roles_arg_spec/test_complex_role_fails.yml b/test/integration/targets/roles_arg_spec/test_complex_role_fails.yml index a04785fb..923e92f7 100644 --- a/test/integration/targets/roles_arg_spec/test_complex_role_fails.yml +++ b/test/integration/targets/roles_arg_spec/test_complex_role_fails.yml @@ -34,7 +34,10 @@ "value of test1_choices must be one of: this paddle game, the astray, this remote control, the chair, got: My dog", "value of some_choices must be one of: choice1, choice2, got: choice4", "argument 'some_second_level' is of type found in 'some_dict_options'. and we were unable to convert to bool: The value 'not-a-bool' is not a valid boolean. ", - "argument 'third_level' is of type found in 'multi_level_option -> second_level'. and we were unable to convert to int: cannot be converted to an int" + "argument 'third_level' is of type found in 'multi_level_option -> second_level'. and we were unable to convert to int: cannot be converted to an int", + "argument 'some_more_dict_options' is of type and we were unable to convert to dict: dictionary requested, could not parse JSON or key=value", + "value of 'some_more_dict_options' must be of type dict or list of dicts", + "dictionary requested, could not parse JSON or key=value", ] tasks: @@ -86,6 +89,7 @@ some_str_removed_in: "foo" some_dict_options: some_second_level: "not-a-bool" + some_more_dict_options: "not-a-dict" multi_level_option: second_level: third_level: "should_be_int" diff --git a/test/integration/targets/setup_paramiko/install-FreeBSD-python-3.yml b/test/integration/targets/setup_paramiko/install-FreeBSD-python-3.yml index 27daf3cf..f737fe30 100644 --- a/test/integration/targets/setup_paramiko/install-FreeBSD-python-3.yml +++ b/test/integration/targets/setup_paramiko/install-FreeBSD-python-3.yml @@ -4,3 +4,5 @@ pip: # no package in pkg, just use pip name: paramiko extra_args: "-c {{ remote_constraints }}" + environment: + SETUPTOOLS_USE_DISTUTILS: stdlib diff --git a/test/integration/targets/setup_paramiko/install-RedHat-8-python-3.yml b/test/integration/targets/setup_paramiko/install-RedHat-8-python-3.yml index 19fd3f63..55677f21 100644 --- a/test/integration/targets/setup_paramiko/install-RedHat-8-python-3.yml +++ b/test/integration/targets/setup_paramiko/install-RedHat-8-python-3.yml @@ -4,3 +4,5 @@ pip: # no python3-paramiko package exists for RHEL 8 name: paramiko extra_args: "-c {{ remote_constraints }}" + environment: + SETUPTOOLS_USE_DISTUTILS: stdlib diff --git a/test/integration/targets/template/tasks/main.yml b/test/integration/targets/template/tasks/main.yml index f8848ef5..14ef6180 100644 --- a/test/integration/targets/template/tasks/main.yml +++ b/test/integration/targets/template/tasks/main.yml @@ -250,11 +250,6 @@ # VERIFY CONTENTS -- name: check what python version ansible is running on - command: "{{ ansible_python.executable }} -c 'import distutils.sysconfig ; print(distutils.sysconfig.get_python_version())'" - register: pyver - delegate_to: localhost - - name: copy known good into place copy: src=foo.txt dest={{output_dir}}/foo.txt diff --git a/test/integration/targets/unarchive/tasks/main.yml b/test/integration/targets/unarchive/tasks/main.yml index 035a5561..baa2a8cf 100644 --- a/test/integration/targets/unarchive/tasks/main.yml +++ b/test/integration/targets/unarchive/tasks/main.yml @@ -17,3 +17,4 @@ - import_tasks: test_symlink.yml - import_tasks: test_download.yml - import_tasks: test_unprivileged_user.yml +- import_tasks: test_different_language_var.yml diff --git a/test/integration/targets/unarchive/tasks/test_different_language_var.yml b/test/integration/targets/unarchive/tasks/test_different_language_var.yml new file mode 100644 index 00000000..1d7d3f60 --- /dev/null +++ b/test/integration/targets/unarchive/tasks/test_different_language_var.yml @@ -0,0 +1,41 @@ +- name: test non-ascii with different LANGUAGE + when: ansible_os_family == 'Debian' + block: + - name: install de language pack + apt: + name: language-pack-de + state: present + + - name: create our unarchive destination + file: + path: "{{ remote_tmp_dir }}/test-unarchive-nonascii-くらとみ-tar-gz" + state: directory + + - name: test that unarchive works with an archive that contains non-ascii filenames + unarchive: + # Both the filename of the tarball and the filename inside the tarball have + # nonascii chars + src: "test-unarchive-nonascii-くらとみ.tar.gz" + dest: "{{ remote_tmp_dir }}/test-unarchive-nonascii-くらとみ-tar-gz" + mode: "u+rwX,go+rX" + remote_src: no + register: nonascii_result0 + + - name: Check that file is really there + stat: + path: "{{ remote_tmp_dir }}/test-unarchive-nonascii-くらとみ-tar-gz/storage/àâæçéèïîôœ(copy)!@#$%^&-().jpg" + register: nonascii_stat0 + + - name: Assert that nonascii tests succeeded + assert: + that: + - "nonascii_result0.changed == true" + - "nonascii_stat0.stat.exists == true" + + - name: remove nonascii test + file: + path: "{{ remote_tmp_dir }}/test-unarchive-nonascii-くらとみ-tar-gz" + state: absent + + environment: + LANGUAGE: de_DE:en \ No newline at end of file diff --git a/test/integration/targets/unarchive/tasks/test_exclude.yml b/test/integration/targets/unarchive/tasks/test_exclude.yml index bf9f14fb..8d3183c3 100644 --- a/test/integration/targets/unarchive/tasks/test_exclude.yml +++ b/test/integration/targets/unarchive/tasks/test_exclude.yml @@ -11,13 +11,28 @@ src: "{{ remote_tmp_dir }}/unarchive-00.{{item}}" dest: "{{ remote_tmp_dir }}/exclude-{{item}}" remote_src: yes + list_files: yes exclude: - "exclude/exclude-*.txt" - "other/exclude-1.ext" + register: result_of_unarchive with_items: - zip - tar +- name: Make sure unarchive module reported back extracted files + assert: + that: + - "'include/include-1.txt' in item.files" + - "'include/include-2.txt' in item.files" + - "'include/include-3.txt' in item.files" + - "'other/include-1.ext' in item.files" + - "'other/include-2.ext' in item.files" + - "'other/exclude-2.ext' in item.files" + - "'other/other-1.ext' in item.files" + - "'other/other-2.ext' in item.files" + loop: "{{ result_of_unarchive.results }}" + - name: verify that the file was unarchived shell: find {{ remote_tmp_dir }}/exclude-{{item}} chdir={{ remote_tmp_dir }} register: unarchive00 diff --git a/test/integration/targets/uri/tasks/main.yml b/test/integration/targets/uri/tasks/main.yml index 700e7f10..755cd432 100644 --- a/test/integration/targets/uri/tasks/main.yml +++ b/test/integration/targets/uri/tasks/main.yml @@ -19,8 +19,8 @@ - name: set role facts set_fact: http_port: 15260 - files_dir: '{{ output_dir|expanduser }}/files' - checkout_dir: '{{ output_dir }}/git' + files_dir: '{{ remote_tmp_dir|expanduser }}/files' + checkout_dir: '{{ remote_tmp_dir }}/git' - name: create a directory to serve files from file: @@ -39,10 +39,10 @@ - copy: src: "testserver.py" - dest: "{{ output_dir }}/testserver.py" + dest: "{{ remote_tmp_dir }}/testserver.py" - name: start SimpleHTTPServer - shell: cd {{ files_dir }} && {{ ansible_python.executable }} {{ output_dir}}/testserver.py {{ http_port }} + shell: cd {{ files_dir }} && {{ ansible_python.executable }} {{ remote_tmp_dir}}/testserver.py {{ http_port }} async: 120 # this test set can take ~1m to run on FreeBSD (via Shippable) poll: 0 @@ -91,12 +91,12 @@ - name: test https fetch to a site with mismatched hostname and certificate uri: url: "https://{{ badssl_host }}/" - dest: "{{ output_dir }}/shouldnotexist.html" + dest: "{{ remote_tmp_dir }}/shouldnotexist.html" ignore_errors: True register: result - stat: - path: "{{ output_dir }}/shouldnotexist.html" + path: "{{ remote_tmp_dir }}/shouldnotexist.html" register: stat_result - name: Assert that the file was not downloaded @@ -111,18 +111,18 @@ - name: Clean up any cruft from the results directory file: - name: "{{ output_dir }}/kreitz.html" + name: "{{ remote_tmp_dir }}/kreitz.html" state: absent - name: test https fetch to a site with mismatched hostname and certificate and validate_certs=no uri: url: "https://{{ badssl_host }}/" - dest: "{{ output_dir }}/kreitz.html" + dest: "{{ remote_tmp_dir }}/kreitz.html" validate_certs: no register: result - stat: - path: "{{ output_dir }}/kreitz.html" + path: "{{ remote_tmp_dir }}/kreitz.html" register: stat_result - name: Assert that the file was downloaded @@ -139,13 +139,13 @@ - name: test https fetch to a site with self signed certificate using ca_path uri: url: "https://{{ self_signed_host }}:444/" - dest: "{{ output_dir }}/self-signed_using_ca_path.html" + dest: "{{ remote_tmp_dir }}/self-signed_using_ca_path.html" ca_path: "{{ remote_tmp_dir }}/ca2cert.pem" validate_certs: yes register: result - stat: - path: "{{ output_dir }}/self-signed_using_ca_path.html" + path: "{{ remote_tmp_dir }}/self-signed_using_ca_path.html" register: stat_result - name: Assert that the file was downloaded @@ -157,13 +157,13 @@ - name: test https fetch to a site with self signed certificate without using ca_path uri: url: "https://{{ self_signed_host }}:444/" - dest: "{{ output_dir }}/self-signed-without_using_ca_path.html" + dest: "{{ remote_tmp_dir }}/self-signed-without_using_ca_path.html" validate_certs: yes register: result ignore_errors: true - stat: - path: "{{ output_dir }}/self-signed-without_using_ca_path.html" + path: "{{ remote_tmp_dir }}/self-signed-without_using_ca_path.html" register: stat_result - name: Assure that https access to a host with self-signed certificate without providing ca_path fails @@ -653,16 +653,16 @@ - name: Create a testing file copy: content: "content" - dest: "{{ output_dir }}/output" + dest: "{{ remote_tmp_dir }}/output" - name: Download a file from non existing location uri: url: http://does/not/exist - dest: "{{ output_dir }}/output" + dest: "{{ remote_tmp_dir }}/output" ignore_errors: yes - name: Save testing file's output - command: "cat {{ output_dir }}/output" + command: "cat {{ remote_tmp_dir }}/output" register: file_out - name: Test the testing file was not overwritten @@ -672,7 +672,7 @@ - name: Clean up file: - dest: "{{ output_dir }}/output" + dest: "{{ remote_tmp_dir }}/output" state: absent - name: Test follow_redirects=none diff --git a/test/integration/targets/uri/tasks/unexpected-failures.yml b/test/integration/targets/uri/tasks/unexpected-failures.yml index ac38871c..f00380ab 100644 --- a/test/integration/targets/uri/tasks/unexpected-failures.yml +++ b/test/integration/targets/uri/tasks/unexpected-failures.yml @@ -1,6 +1,6 @@ --- # same as expanduser & expandvars called on managed host -- command: 'echo {{ output_dir }}' +- command: 'echo {{ remote_tmp_dir }}' register: echo - set_fact: @@ -8,13 +8,13 @@ - name: ensure test directory doesn't exist file: - path: '{{ output_dir }}/non/existent/path' + path: '{{ remote_tmp_dir }}/non/existent/path' state: absent - name: destination doesn't exist uri: url: 'https://{{ httpbin_host }}/get' - dest: '{{ output_dir }}/non/existent/path' + dest: '{{ remote_tmp_dir }}/non/existent/path' ignore_errors: true register: ret diff --git a/test/integration/targets/user/tasks/test_ssh_key_passphrase.yml b/test/integration/targets/user/tasks/test_ssh_key_passphrase.yml index bb0486da..f0725ed6 100644 --- a/test/integration/targets/user/tasks/test_ssh_key_passphrase.yml +++ b/test/integration/targets/user/tasks/test_ssh_key_passphrase.yml @@ -10,11 +10,12 @@ state: present generate_ssh_key: yes force: yes - ssh_key_file: "{{ output_dir }}/test_id_rsa" + ssh_key_file: .ssh/test_id_rsa ssh_key_passphrase: secret_passphrase + register: ansibulluser_create_with_ssh_key - name: Unlock ssh key - command: "ssh-keygen -y -f {{ output_dir }}/test_id_rsa -P secret_passphrase" + command: "ssh-keygen -y -f {{ ansibulluser_create_with_ssh_key.ssh_key_file|quote }} -P secret_passphrase" register: result - name: Check that ssh key was unlocked successfully @@ -24,6 +25,6 @@ - name: Clean ssh key file: - path: "{{ output_dir }}/test_id_rsa" + path: "{{ ansibulluser_create_with_ssh_key.ssh_key_file }}" state: absent when: ansible_os_family == 'FreeBSD' diff --git a/test/integration/targets/var_inheritance/aliases b/test/integration/targets/var_inheritance/aliases new file mode 100644 index 00000000..498fedd5 --- /dev/null +++ b/test/integration/targets/var_inheritance/aliases @@ -0,0 +1,2 @@ +shippable/posix/group4 +context/controller diff --git a/test/integration/targets/var_inheritance/tasks/main.yml b/test/integration/targets/var_inheritance/tasks/main.yml new file mode 100644 index 00000000..48d7b3d3 --- /dev/null +++ b/test/integration/targets/var_inheritance/tasks/main.yml @@ -0,0 +1,16 @@ +- name: outer + vars: + myvar: abc + block: + - assert: + that: + - "myvar == 'abc'" + + - name: inner block + vars: + myvar: 123 + block: + + - assert: + that: + - myvar|int == 123 diff --git a/test/integration/targets/wait_for/tasks/main.yml b/test/integration/targets/wait_for/tasks/main.yml index 67e07786..1af08924 100644 --- a/test/integration/targets/wait_for/tasks/main.yml +++ b/test/integration/targets/wait_for/tasks/main.yml @@ -150,7 +150,7 @@ - name: install psutil using pip (non-Linux only) pip: - name: psutil + name: psutil==5.8.0 when: ansible_system != 'Linux' - name: Copy zombie.py diff --git a/test/lib/ansible_test/_data/completion/docker.txt b/test/lib/ansible_test/_data/completion/docker.txt index cbb4c362..05d173a6 100644 --- a/test/lib/ansible_test/_data/completion/docker.txt +++ b/test/lib/ansible_test/_data/completion/docker.txt @@ -1,7 +1,7 @@ base image=quay.io/ansible/base-test-container:1.1.0 python=3.9,2.6,2.7,3.5,3.6,3.7,3.8,3.10 seccomp=unconfined -default image=quay.io/ansible/default-test-container:4.1.0 python=3.9,2.6,2.7,3.5,3.6,3.7,3.8,3.10 seccomp=unconfined context=collection -default image=quay.io/ansible/ansible-core-test-container:4.1.0 python=3.9,2.6,2.7,3.5,3.6,3.7,3.8,3.10 seccomp=unconfined context=ansible-core -alpine3 image=quay.io/ansible/alpine3-test-container:3.1.0 python=3.9 +default image=quay.io/ansible/default-test-container:4.2.0 python=3.9,2.6,2.7,3.5,3.6,3.7,3.8,3.10 seccomp=unconfined context=collection +default image=quay.io/ansible/ansible-core-test-container:4.2.0 python=3.9,2.6,2.7,3.5,3.6,3.7,3.8,3.10 seccomp=unconfined context=ansible-core +alpine3 image=quay.io/ansible/alpine3-test-container:3.3.0 python=3.9 centos6 image=quay.io/ansible/centos6-test-container:3.1.0 python=2.6 seccomp=unconfined centos7 image=quay.io/ansible/centos7-test-container:3.1.0 python=2.7 seccomp=unconfined centos8 image=quay.io/ansible/centos8-test-container:3.1.0 python=3.6 seccomp=unconfined diff --git a/test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt b/test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt new file mode 100644 index 00000000..76d16725 --- /dev/null +++ b/test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt @@ -0,0 +1,12 @@ +jinja2 == 3.0.1 +PyYAML == 5.4.1 +cryptography == 3.3.2 +packaging == 21.0 +resolvelib == 0.5.4 + +# dependencies +MarkupSafe == 2.0.1 +cffi == 1.15.0 +pycparser == 2.20 +pyparsing == 2.4.7 +six == 1.16.0 diff --git a/test/lib/ansible_test/_data/requirements/sanity.import.txt b/test/lib/ansible_test/_data/requirements/sanity.import.txt new file mode 100644 index 00000000..d77a09d7 --- /dev/null +++ b/test/lib/ansible_test/_data/requirements/sanity.import.txt @@ -0,0 +1 @@ +pyyaml == 5.4.1 # needed for yaml_to_json.py diff --git a/test/lib/ansible_test/_data/requirements/units.txt b/test/lib/ansible_test/_data/requirements/units.txt index d723a65f..d2f56d35 100644 --- a/test/lib/ansible_test/_data/requirements/units.txt +++ b/test/lib/ansible_test/_data/requirements/units.txt @@ -2,4 +2,5 @@ mock pytest pytest-mock pytest-xdist +pytest-forked pyyaml # required by the collection loader (only needed for collections) diff --git a/test/lib/ansible_test/_internal/cli/compat.py b/test/lib/ansible_test/_internal/cli/compat.py index cf6c01f1..2090aac7 100644 --- a/test/lib/ansible_test/_internal/cli/compat.py +++ b/test/lib/ansible_test/_internal/cli/compat.py @@ -26,8 +26,8 @@ from ..docker_util import ( ) from ..completion import ( - DOCKER_COMPLETION, - REMOTE_COMPLETION, + docker_completion, + remote_completion, filter_completion, ) @@ -68,7 +68,7 @@ def controller_python(version): # type: (t.Optional[str]) -> t.Optional[str] def get_fallback_remote_controller(): # type: () -> str """Return the remote fallback platform for the controller.""" platform = 'freebsd' # lower cost than RHEL and macOS - candidates = [item for item in filter_completion(REMOTE_COMPLETION).values() if item.controller_supported and item.platform == platform] + candidates = [item for item in filter_completion(remote_completion()).values() if item.controller_supported and item.platform == platform] fallback = sorted(candidates, key=lambda value: str_to_version(value.version), reverse=True)[0] return fallback.name @@ -316,7 +316,7 @@ def get_legacy_host_config( targets = [ControllerConfig(python=VirtualPythonConfig(version=options.python or 'default', system_site_packages=options.venv_system_site_packages))] elif options.docker: - docker_config = filter_completion(DOCKER_COMPLETION).get(options.docker) + docker_config = filter_completion(docker_completion()).get(options.docker) if docker_config: if options.python and options.python not in docker_config.supported_pythons: @@ -350,7 +350,7 @@ def get_legacy_host_config( targets = [DockerConfig(name=options.docker, python=native_python(options), privileged=options.docker_privileged, seccomp=options.docker_seccomp, memory=options.docker_memory)] elif options.remote: - remote_config = filter_completion(REMOTE_COMPLETION).get(options.remote) + remote_config = filter_completion(remote_completion()).get(options.remote) context, reason = None, None if remote_config: diff --git a/test/lib/ansible_test/_internal/cli/environments.py b/test/lib/ansible_test/_internal/cli/environments.py index 640ff56b..3c0230ca 100644 --- a/test/lib/ansible_test/_internal/cli/environments.py +++ b/test/lib/ansible_test/_internal/cli/environments.py @@ -14,10 +14,10 @@ from ..constants import ( ) from ..completion import ( - DOCKER_COMPLETION, - NETWORK_COMPLETION, - REMOTE_COMPLETION, - WINDOWS_COMPLETION, + docker_completion, + network_completion, + remote_completion, + windows_completion, filter_completion, ) @@ -425,9 +425,9 @@ def add_environment_docker( ): # type: (...) -> None """Add environment arguments for running in docker containers.""" if target_mode in (TargetMode.POSIX_INTEGRATION, TargetMode.SHELL): - docker_images = sorted(filter_completion(DOCKER_COMPLETION)) + docker_images = sorted(filter_completion(docker_completion())) else: - docker_images = sorted(filter_completion(DOCKER_COMPLETION, controller_only=True)) + docker_images = sorted(filter_completion(docker_completion(), controller_only=True)) exclusive_parser.add_argument( '--docker', @@ -538,7 +538,7 @@ def complete_windows(prefix, parsed_args, **_): # type: (str, argparse.Namespac def complete_network_platform(prefix, parsed_args, **_): # type: (str, argparse.Namespace, ...) -> t.List[str] """Return a list of supported network platforms matching the given prefix, excluding platforms already parsed from the command line.""" - images = sorted(filter_completion(NETWORK_COMPLETION)) + images = sorted(filter_completion(network_completion())) return [i for i in images if i.startswith(prefix) and (not parsed_args.platform or i not in parsed_args.platform)] @@ -546,7 +546,7 @@ def complete_network_platform(prefix, parsed_args, **_): # type: (str, argparse def complete_network_platform_collection(prefix, parsed_args, **_): # type: (str, argparse.Namespace, ...) -> t.List[str] """Return a list of supported network platforms matching the given prefix, excluding collection platforms already parsed from the command line.""" left = prefix.split('=')[0] - images = sorted(set(image.platform for image in filter_completion(NETWORK_COMPLETION).values())) + images = sorted(set(image.platform for image in filter_completion(network_completion()).values())) return [i + '=' for i in images if i.startswith(left) and (not parsed_args.platform_collection or i not in [x[0] for x in parsed_args.platform_collection])] @@ -554,21 +554,21 @@ def complete_network_platform_collection(prefix, parsed_args, **_): # type: (st def complete_network_platform_connection(prefix, parsed_args, **_): # type: (str, argparse.Namespace, ...) -> t.List[str] """Return a list of supported network platforms matching the given prefix, excluding connection platforms already parsed from the command line.""" left = prefix.split('=')[0] - images = sorted(set(image.platform for image in filter_completion(NETWORK_COMPLETION).values())) + images = sorted(set(image.platform for image in filter_completion(network_completion()).values())) return [i + '=' for i in images if i.startswith(left) and (not parsed_args.platform_connection or i not in [x[0] for x in parsed_args.platform_connection])] def get_remote_platform_choices(controller=False): # type: (bool) -> t.List[str] """Return a list of supported remote platforms matching the given prefix.""" - return sorted(filter_completion(REMOTE_COMPLETION, controller_only=controller)) + return sorted(filter_completion(remote_completion(), controller_only=controller)) def get_windows_platform_choices(): # type: () -> t.List[str] """Return a list of supported Windows versions matching the given prefix.""" - return sorted(f'windows/{windows.version}' for windows in filter_completion(WINDOWS_COMPLETION).values()) + return sorted(f'windows/{windows.version}' for windows in filter_completion(windows_completion()).values()) def get_windows_version_choices(): # type: () -> t.List[str] """Return a list of supported Windows versions.""" - return sorted(windows.version for windows in filter_completion(WINDOWS_COMPLETION).values()) + return sorted(windows.version for windows in filter_completion(windows_completion()).values()) diff --git a/test/lib/ansible_test/_internal/cli/parsers/helpers.py b/test/lib/ansible_test/_internal/cli/parsers/helpers.py index 0cf13f8d..8dc7a65c 100644 --- a/test/lib/ansible_test/_internal/cli/parsers/helpers.py +++ b/test/lib/ansible_test/_internal/cli/parsers/helpers.py @@ -9,8 +9,8 @@ from ...constants import ( ) from ...completion import ( - DOCKER_COMPLETION, - REMOTE_COMPLETION, + docker_completion, + remote_completion, filter_completion, ) @@ -23,7 +23,7 @@ from ...host_configs import ( def get_docker_pythons(name, controller, strict): # type: (str, bool, bool) -> t.List[str] """Return a list of docker instance Python versions supported by the specified host config.""" - image_config = filter_completion(DOCKER_COMPLETION).get(name) + image_config = filter_completion(docker_completion()).get(name) available_pythons = CONTROLLER_PYTHON_VERSIONS if controller else SUPPORTED_PYTHON_VERSIONS if not image_config: @@ -36,7 +36,7 @@ def get_docker_pythons(name, controller, strict): # type: (str, bool, bool) -> def get_remote_pythons(name, controller, strict): # type: (str, bool, bool) -> t.List[str] """Return a list of remote instance Python versions supported by the specified host config.""" - platform_config = filter_completion(REMOTE_COMPLETION).get(name) + platform_config = filter_completion(remote_completion()).get(name) available_pythons = CONTROLLER_PYTHON_VERSIONS if controller else SUPPORTED_PYTHON_VERSIONS if not platform_config: diff --git a/test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py b/test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py index 37322630..8a7e0ef9 100644 --- a/test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py +++ b/test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py @@ -4,10 +4,10 @@ from __future__ import annotations import typing as t from ...completion import ( - DOCKER_COMPLETION, - NETWORK_COMPLETION, - REMOTE_COMPLETION, - WINDOWS_COMPLETION, + docker_completion, + network_completion, + remote_completion, + windows_completion, filter_completion, ) @@ -108,7 +108,7 @@ class DockerParser(PairParser): def get_left_parser(self, state): # type: (ParserState) -> Parser """Return the parser for the left side.""" - return NamespaceWrappedParser('name', ChoicesParser(list(filter_completion(DOCKER_COMPLETION, controller_only=self.controller)), + return NamespaceWrappedParser('name', ChoicesParser(list(filter_completion(docker_completion(), controller_only=self.controller)), conditions=MatchConditions.CHOICE | MatchConditions.ANY)) def get_right_parser(self, choice): # type: (t.Any) -> Parser @@ -128,7 +128,7 @@ class DockerParser(PairParser): """Generate and return documentation for this parser.""" default = 'default' content = '\n'.join([f' {image} ({", ".join(get_docker_pythons(image, self.controller, False))})' - for image, item in filter_completion(DOCKER_COMPLETION, controller_only=self.controller).items()]) + for image, item in filter_completion(docker_completion(), controller_only=self.controller).items()]) content += '\n'.join([ '', @@ -151,7 +151,7 @@ class PosixRemoteParser(PairParser): def get_left_parser(self, state): # type: (ParserState) -> Parser """Return the parser for the left side.""" - return NamespaceWrappedParser('name', PlatformParser(list(filter_completion(REMOTE_COMPLETION, controller_only=self.controller)))) + return NamespaceWrappedParser('name', PlatformParser(list(filter_completion(remote_completion(), controller_only=self.controller)))) def get_right_parser(self, choice): # type: (t.Any) -> Parser """Return the parser for the right side.""" @@ -170,7 +170,7 @@ class PosixRemoteParser(PairParser): """Generate and return documentation for this parser.""" default = get_fallback_remote_controller() content = '\n'.join([f' {name} ({", ".join(get_remote_pythons(name, self.controller, False))})' - for name, item in filter_completion(REMOTE_COMPLETION, controller_only=self.controller).items()]) + for name, item in filter_completion(remote_completion(), controller_only=self.controller).items()]) content += '\n'.join([ '', @@ -190,7 +190,7 @@ class WindowsRemoteParser(PairParser): def get_left_parser(self, state): # type: (ParserState) -> Parser """Return the parser for the left side.""" - names = list(filter_completion(WINDOWS_COMPLETION)) + names = list(filter_completion(windows_completion())) for target in state.root_namespace.targets or []: # type: WindowsRemoteConfig names.remove(target.name) @@ -203,7 +203,7 @@ class WindowsRemoteParser(PairParser): def document(self, state): # type: (DocumentationState) -> t.Optional[str] """Generate and return documentation for this parser.""" - content = '\n'.join([f' {name}' for name, item in filter_completion(WINDOWS_COMPLETION).items()]) + content = '\n'.join([f' {name}' for name, item in filter_completion(windows_completion()).items()]) content += '\n'.join([ '', @@ -223,7 +223,7 @@ class NetworkRemoteParser(PairParser): def get_left_parser(self, state): # type: (ParserState) -> Parser """Return the parser for the left side.""" - names = list(filter_completion(NETWORK_COMPLETION)) + names = list(filter_completion(network_completion())) for target in state.root_namespace.targets or []: # type: NetworkRemoteConfig names.remove(target.name) @@ -236,7 +236,7 @@ class NetworkRemoteParser(PairParser): def document(self, state): # type: (DocumentationState) -> t.Optional[str] """Generate and return documentation for this parser.""" - content = '\n'.join([f' {name}' for name, item in filter_completion(NETWORK_COMPLETION).items()]) + content = '\n'.join([f' {name}' for name, item in filter_completion(network_completion()).items()]) content += '\n'.join([ '', diff --git a/test/lib/ansible_test/_internal/commands/integration/__init__.py b/test/lib/ansible_test/_internal/commands/integration/__init__.py index 09eb889c..839b24ae 100644 --- a/test/lib/ansible_test/_internal/commands/integration/__init__.py +++ b/test/lib/ansible_test/_internal/commands/integration/__init__.py @@ -242,7 +242,7 @@ def integration_test_environment( ansible_config = ansible_config_src vars_file = os.path.join(data_context().content.root, data_context().content.integration_vars_path) - yield IntegrationEnvironment(integration_dir, targets_dir, inventory_path, ansible_config, vars_file) + yield IntegrationEnvironment(data_context().content.root, integration_dir, targets_dir, inventory_path, ansible_config, vars_file) return # When testing a collection, the temporary directory must reside within the collection. @@ -320,7 +320,7 @@ def integration_test_environment( make_dirs(os.path.dirname(file_dst)) shutil.copy2(file_src, file_dst) - yield IntegrationEnvironment(integration_dir, targets_dir, inventory_path, ansible_config, vars_file) + yield IntegrationEnvironment(temp_dir, integration_dir, targets_dir, inventory_path, ansible_config, vars_file) finally: if not args.explain: remove_tree(temp_dir) @@ -440,9 +440,10 @@ def command_integration_filtered( if isinstance(target_profile, ControllerProfile): if host_state.controller_profile.python.path != target_profile.python.path: - install_requirements(args, target_python, command=True) # integration + install_requirements(args, target_python, command=True, controller=False) # integration elif isinstance(target_profile, SshTargetHostProfile): - install_requirements(args, target_python, command=True, connection=target_profile.get_controller_target_connections()[0]) # integration + connection = target_profile.get_controller_target_connections()[0] + install_requirements(args, target_python, command=True, controller=False, connection=connection) # integration coverage_manager = CoverageManager(args, host_state, inventory_path) coverage_manager.setup() @@ -589,7 +590,7 @@ def command_integration_script( if args.verbosity: cmd.append('-' + ('v' * args.verbosity)) - env = integration_environment(args, target, test_dir, test_env.inventory_path, test_env.ansible_config, env_config) + env = integration_environment(args, target, test_dir, test_env.inventory_path, test_env.ansible_config, env_config, test_env) cwd = os.path.join(test_env.targets_dir, target.relative_path) env.update(dict( @@ -710,7 +711,7 @@ def command_integration_role( if args.verbosity: cmd.append('-' + ('v' * args.verbosity)) - env = integration_environment(args, target, test_dir, test_env.inventory_path, test_env.ansible_config, env_config) + env = integration_environment(args, target, test_dir, test_env.inventory_path, test_env.ansible_config, env_config, test_env) cwd = test_env.integration_dir env.update(dict( @@ -765,6 +766,7 @@ def integration_environment( inventory_path, # type: str ansible_config, # type: t.Optional[str] env_config, # type: t.Optional[CloudEnvironmentConfig] + test_env, # type: IntegrationEnvironment ): # type: (...) -> t.Dict[str, str] """Return a dictionary of environment variables to use when running the given integration test target.""" env = ansible_environment(args, ansible_config=ansible_config) @@ -773,6 +775,8 @@ def integration_environment( integration = dict( JUNIT_OUTPUT_DIR=ResultType.JUNIT.path, + JUNIT_TASK_RELATIVE_PATH=test_env.test_dir, + JUNIT_REPLACE_OUT_OF_TREE_PATH='out-of-tree:', ANSIBLE_CALLBACKS_ENABLED=','.join(sorted(set(callback_plugins))), ANSIBLE_TEST_CI=args.metadata.ci_provider or get_ci_provider().code, ANSIBLE_TEST_COVERAGE='check' if args.coverage_check else ('yes' if args.coverage else ''), @@ -796,7 +800,8 @@ def integration_environment( class IntegrationEnvironment: """Details about the integration environment.""" - def __init__(self, integration_dir, targets_dir, inventory_path, ansible_config, vars_file): + def __init__(self, test_dir, integration_dir, targets_dir, inventory_path, ansible_config, vars_file): + self.test_dir = test_dir self.integration_dir = integration_dir self.targets_dir = targets_dir self.inventory_path = inventory_path diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py b/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py index f4493933..a34d714d 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py @@ -127,5 +127,5 @@ class AwsCloudEnvironment(CloudEnvironment): """Callback to run when an integration target fails.""" if not tries and self.managed: display.notice('If %s failed due to permissions, the IAM test policy may need to be updated. ' - 'https://docs.ansible.com/ansible/devel/dev_guide/platforms/aws_guidelines.html#aws-permissions-for-integration-tests.' + 'https://docs.ansible.com/ansible-core/devel/dev_guide/platforms/aws_guidelines.html#aws-permissions-for-integration-tests.' % target.name) diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py b/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py index 9c900071..de58cbf5 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py @@ -86,7 +86,7 @@ class GalaxyProvider(CloudProvider): # the newer update is available. self.pulp = os.environ.get( 'ANSIBLE_PULP_CONTAINER', - 'docker.io/pulp/pulp-galaxy-ng@sha256:b79a7be64eff86d8f58db9ca83ed4967bd8b4e45c99addb17a91d11926480cf1' + 'quay.io/ansible/pulp-galaxy-ng:b79a7be64eff' ) self.uses_docker = True diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py b/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py index c30785af..10f63ac0 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py @@ -36,7 +36,7 @@ class OpenShiftCloudProvider(CloudProvider): super().__init__(args, config_extension='.kubeconfig') # The image must be pinned to a specific version to guarantee CI passes with the version used. - self.image = 'openshift/origin:v3.9.0' + self.image = 'quay.io/ansible/openshift-origin:v3.9.0' self.uses_docker = True self.uses_config = True diff --git a/test/lib/ansible_test/_internal/commands/sanity/__init__.py b/test/lib/ansible_test/_internal/commands/sanity/__init__.py index 542e078a..8c1340f2 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/__init__.py +++ b/test/lib/ansible_test/_internal/commands/sanity/__init__.py @@ -242,7 +242,7 @@ def command_sanity(args): # type: (SanityConfig) -> None elif isinstance(test, SanitySingleVersion): # single version sanity tests use the controller python test_profile = host_state.controller_profile - virtualenv_python = create_sanity_virtualenv(args, test_profile.python, test.name, context=test.name) + virtualenv_python = create_sanity_virtualenv(args, test_profile.python, test.name) if virtualenv_python: virtualenv_yaml = check_sanity_virtualenv_yaml(virtualenv_python) @@ -1077,10 +1077,8 @@ def create_sanity_virtualenv( args, # type: SanityConfig python, # type: PythonConfig name, # type: str - ansible=False, # type: bool coverage=False, # type: bool minimize=False, # type: bool - context=None, # type: t.Optional[str] ): # type: (...) -> t.Optional[VirtualPythonConfig] """Return an existing sanity virtual environment matching the requested parameters or create a new one.""" commands = collect_requirements( # create_sanity_virtualenv() @@ -1088,13 +1086,11 @@ def create_sanity_virtualenv( controller=True, virtualenv=False, command=None, - # used by import tests - ansible=ansible, - cryptography=ansible, + ansible=False, + cryptography=False, coverage=coverage, minimize=minimize, - # used by non-import tests - sanity=context, + sanity=name, ) if commands: @@ -1129,6 +1125,7 @@ def create_sanity_virtualenv( write_text_file(meta_install, virtualenv_install) + # false positive: pylint: disable=no-member if any(isinstance(command, PipInstall) and command.has_package('pyyaml') for command in commands): virtualenv_yaml = yamlcheck(virtualenv_python) else: diff --git a/test/lib/ansible_test/_internal/commands/sanity/import.py b/test/lib/ansible_test/_internal/commands/sanity/import.py index 9a961015..aa0239d5 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/import.py +++ b/test/lib/ansible_test/_internal/commands/sanity/import.py @@ -2,7 +2,6 @@ from __future__ import annotations import os -import tempfile import typing as t from . import ( @@ -18,9 +17,14 @@ from . import ( ) from ...constants import ( + CONTROLLER_MIN_PYTHON_VERSION, REMOTE_ONLY_PYTHON_VERSIONS, ) +from ...io import ( + write_text_file, +) + from ...test import ( TestResult, ) @@ -30,14 +34,18 @@ from ...target import ( ) from ...util import ( + cache, SubprocessError, display, parse_to_list_of_dict, is_subdir, + ANSIBLE_TEST_TOOLS_ROOT, + ANSIBLE_TEST_TARGET_ROOT, ) from ...util_common import ( ResultType, + create_temp_dir, ) from ...ansible_util import ( @@ -45,6 +53,7 @@ from ...ansible_util import ( ) from ...python_requirements import ( + PipUnavailableError, install_requirements, ) @@ -64,6 +73,10 @@ from ...host_configs import ( PythonConfig, ) +from ...venv import ( + get_virtualenv_version, +) + def _get_module_test(module_restrictions): # type: (bool) -> t.Callable[[str], bool] """Create a predicate which tests whether a path can be used by modules or not.""" @@ -91,18 +104,22 @@ class ImportTest(SanityMultipleVersion): paths = [target.path for target in targets.include] - if python.version.startswith('2.'): + if python.version.startswith('2.') and (get_virtualenv_version(args, python.path) or (0,)) < (13,): # hack to make sure that virtualenv is available under Python 2.x # on Python 3.x we can use the built-in venv - install_requirements(args, python, virtualenv=True) # sanity (import) + # version 13+ is required to use the `--no-wheel` option + try: + install_requirements(args, python, virtualenv=True, controller=False) # sanity (import) + except PipUnavailableError as ex: + display.warning(ex) temp_root = os.path.join(ResultType.TMP.path, 'sanity', 'import') messages = [] - for import_type, test, controller in ( - ('module', _get_module_test(True), False), - ('plugin', _get_module_test(False), True), + for import_type, test in ( + ('module', _get_module_test(True)), + ('plugin', _get_module_test(False)), ): if import_type == 'plugin' and python.version in REMOTE_ONLY_PYTHON_VERSIONS: continue @@ -112,7 +129,7 @@ class ImportTest(SanityMultipleVersion): if not data and not args.prime_venvs: continue - virtualenv_python = create_sanity_virtualenv(args, python, f'{self.name}.{import_type}', ansible=controller, coverage=args.coverage, minimize=True) + virtualenv_python = create_sanity_virtualenv(args, python, f'{self.name}.{import_type}', coverage=args.coverage, minimize=True) if not virtualenv_python: display.warning(f'Skipping sanity test "{self.name}" on Python {python.version} due to missing virtual environment support.') @@ -123,9 +140,6 @@ class ImportTest(SanityMultipleVersion): if virtualenv_yaml is False: display.warning(f'Sanity test "{self.name}" ({import_type}) on Python {python.version} may be slow due to missing libyaml support in PyYAML.') - if args.prime_venvs: - continue - env = ansible_environment(args, color=False) env.update( @@ -134,25 +148,30 @@ class ImportTest(SanityMultipleVersion): ) if data_context().content.collection: + external_python = create_sanity_virtualenv(args, args.controller_python, self.name) + env.update( SANITY_COLLECTION_FULL_NAME=data_context().content.collection.full_name, - SANITY_EXTERNAL_PYTHON=python.path, + SANITY_EXTERNAL_PYTHON=external_python.path, + SANITY_YAML_TO_JSON=os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'yaml_to_json.py'), + ANSIBLE_CONTROLLER_MIN_PYTHON_VERSION=CONTROLLER_MIN_PYTHON_VERSION, + PYTHONPATH=':'.join((get_ansible_test_python_path(), env["PYTHONPATH"])), ) + if args.prime_venvs: + continue + display.info(import_type + ': ' + data, verbosity=4) cmd = ['importer.py'] - try: - with tempfile.TemporaryDirectory(prefix='ansible-test', suffix='-import') as temp_dir: - # make the importer available in the temporary directory - os.symlink(os.path.abspath(os.path.join(TARGET_SANITY_ROOT, 'import', 'importer.py')), os.path.join(temp_dir, 'importer.py')) - os.symlink(os.path.abspath(os.path.join(TARGET_SANITY_ROOT, 'import', 'yaml_to_json.py')), os.path.join(temp_dir, 'yaml_to_json.py')) - - # add the importer to the path so it can be accessed through the coverage injector - env['PATH'] = os.pathsep.join([temp_dir, env['PATH']]) + # add the importer to the path so it can be accessed through the coverage injector + env.update( + PATH=os.pathsep.join([os.path.join(TARGET_SANITY_ROOT, 'import'), env['PATH']]), + ) - stdout, stderr = cover_python(args, virtualenv_python, cmd, self.name, env, capture=True, data=data) + try: + stdout, stderr = cover_python(args, virtualenv_python, cmd, self.name, env, capture=True, data=data) if stdout or stderr: raise SubprocessError(cmd, stdout=stdout, stderr=stderr) @@ -182,3 +201,20 @@ class ImportTest(SanityMultipleVersion): return SanityFailure(self.name, messages=results, python_version=python.version) return SanitySuccess(self.name, python_version=python.version) + + +@cache +def get_ansible_test_python_path(): # type: () -> str + """ + Return a directory usable for PYTHONPATH, containing only the ansible-test collection loader. + The temporary directory created will be cached for the lifetime of the process and cleaned up at exit. + """ + python_path = create_temp_dir(prefix='ansible-test-') + ansible_test_path = os.path.join(python_path, 'ansible_test') + + # legacy collection loader required by all python versions not supported by the controller + write_text_file(os.path.join(ansible_test_path, '__init__.py'), '', True) + write_text_file(os.path.join(ansible_test_path, '_internal', '__init__.py'), '', True) + os.symlink(os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'legacy_collection_loader'), os.path.join(ansible_test_path, '_internal', 'legacy_collection_loader')) + + return python_path diff --git a/test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py b/test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py index bc3ebc0d..6d29968b 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py +++ b/test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py @@ -64,7 +64,7 @@ class IntegrationAliasesTest(SanitySingleVersion): UNSTABLE = 'unstable/' UNSUPPORTED = 'unsupported/' - EXPLAIN_URL = 'https://docs.ansible.com/ansible/devel/dev_guide/testing/sanity/integration-aliases.html' + EXPLAIN_URL = 'https://docs.ansible.com/ansible-core/devel/dev_guide/testing/sanity/integration-aliases.html' TEMPLATE_DISABLED = """ The following integration tests are **disabled** [[explain]({explain_url}#disabled)]: diff --git a/test/lib/ansible_test/_internal/commands/units/__init__.py b/test/lib/ansible_test/_internal/commands/units/__init__.py index d23d36fc..9a2b1827 100644 --- a/test/lib/ansible_test/_internal/commands/units/__init__.py +++ b/test/lib/ansible_test/_internal/commands/units/__init__.py @@ -227,7 +227,7 @@ def command_units(args): # type: (UnitsConfig) -> None controller = any(test_context == TestContext.controller for test_context, python, paths, env in final_candidates) if args.requirements_mode != 'skip': - install_requirements(args, target_profile.python, ansible=controller, command=True) # units + install_requirements(args, target_profile.python, ansible=controller, command=True, controller=False) # units test_sets.extend(final_candidates) @@ -237,7 +237,7 @@ def command_units(args): # type: (UnitsConfig) -> None for test_context, python, paths, env in test_sets: cmd = [ 'pytest', - '--boxed', + '--forked', '-r', 'a', '-n', str(args.num_workers) if args.num_workers else 'auto', '--color', diff --git a/test/lib/ansible_test/_internal/completion.py b/test/lib/ansible_test/_internal/completion.py index 25cc6367..86674cb2 100644 --- a/test/lib/ansible_test/_internal/completion.py +++ b/test/lib/ansible_test/_internal/completion.py @@ -13,6 +13,7 @@ from .constants import ( from .util import ( ANSIBLE_TEST_DATA_ROOT, + cache, read_lines_without_comments, ) @@ -220,7 +221,25 @@ def filter_completion( return completion -DOCKER_COMPLETION = load_completion('docker', DockerCompletionConfig) -REMOTE_COMPLETION = load_completion('remote', PosixRemoteCompletionConfig) -WINDOWS_COMPLETION = load_completion('windows', WindowsRemoteCompletionConfig) -NETWORK_COMPLETION = load_completion('network', NetworkRemoteCompletionConfig) +@cache +def docker_completion(): # type: () -> t.Dict[str, DockerCompletionConfig] + """Return docker completion entries.""" + return load_completion('docker', DockerCompletionConfig) + + +@cache +def remote_completion(): # type: () -> t.Dict[str, PosixRemoteCompletionConfig] + """Return remote completion entries.""" + return load_completion('remote', PosixRemoteCompletionConfig) + + +@cache +def windows_completion(): # type: () -> t.Dict[str, WindowsRemoteCompletionConfig] + """Return windows completion entries.""" + return load_completion('windows', WindowsRemoteCompletionConfig) + + +@cache +def network_completion(): # type: () -> t.Dict[str, NetworkRemoteCompletionConfig] + """Return network completion entries.""" + return load_completion('network', NetworkRemoteCompletionConfig) diff --git a/test/lib/ansible_test/_internal/constants.py b/test/lib/ansible_test/_internal/constants.py deleted file mode 120000 index 07455d8e..00000000 --- a/test/lib/ansible_test/_internal/constants.py +++ /dev/null @@ -1 +0,0 @@ -../_util/target/common/constants.py \ No newline at end of file diff --git a/test/lib/ansible_test/_internal/constants.py b/test/lib/ansible_test/_internal/constants.py new file mode 100644 index 00000000..cac72408 --- /dev/null +++ b/test/lib/ansible_test/_internal/constants.py @@ -0,0 +1,49 @@ +"""Constants used by ansible-test. Imports should not be used in this file (other than to import the target common constants).""" +from __future__ import annotations + +from .._util.target.common.constants import ( + CONTROLLER_PYTHON_VERSIONS, + REMOTE_ONLY_PYTHON_VERSIONS, +) + +# Setting a low soft RLIMIT_NOFILE value will improve the performance of subprocess.Popen on Python 2.x when close_fds=True. +# This will affect all Python subprocesses. It will also affect the current Python process if set before subprocess is imported for the first time. +SOFT_RLIMIT_NOFILE = 1024 + +# File used to track the ansible-test test execution timeout. +TIMEOUT_PATH = '.ansible-test-timeout.json' + +CONTROLLER_MIN_PYTHON_VERSION = CONTROLLER_PYTHON_VERSIONS[0] +SUPPORTED_PYTHON_VERSIONS = REMOTE_ONLY_PYTHON_VERSIONS + CONTROLLER_PYTHON_VERSIONS + +COVERAGE_REQUIRED_VERSION = '4.5.4' + +REMOTE_PROVIDERS = [ + 'default', + 'aws', + 'azure', + 'ibmps', + 'parallels', +] + +SECCOMP_CHOICES = [ + 'default', + 'unconfined', +] + +# This bin symlink map must exactly match the contents of the bin directory. +# It is necessary for payload creation to reconstruct the bin directory when running ansible-test from an installed version of ansible. +# It is also used to construct the injector directory at runtime. +ANSIBLE_BIN_SYMLINK_MAP = { + 'ansible': '../lib/ansible/cli/scripts/ansible_cli_stub.py', + 'ansible-config': 'ansible', + 'ansible-connection': '../lib/ansible/cli/scripts/ansible_connection_cli_stub.py', + 'ansible-console': 'ansible', + 'ansible-doc': 'ansible', + 'ansible-galaxy': 'ansible', + 'ansible-inventory': 'ansible', + 'ansible-playbook': 'ansible', + 'ansible-pull': 'ansible', + 'ansible-test': '../test/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py', + 'ansible-vault': 'ansible', +} diff --git a/test/lib/ansible_test/_internal/containers.py b/test/lib/ansible_test/_internal/containers.py index 97e84880..7ffbfb4c 100644 --- a/test/lib/ansible_test/_internal/containers.py +++ b/test/lib/ansible_test/_internal/containers.py @@ -829,7 +829,7 @@ def cleanup_ssh_ports( for process in ssh_processes: process.terminate() - display.info('Waiting for the %s host SSH port forwarding processs(es) to terminate.' % host_type, verbosity=1) + display.info('Waiting for the %s host SSH port forwarding process(es) to terminate.' % host_type, verbosity=1) for process in ssh_processes: process.wait() diff --git a/test/lib/ansible_test/_internal/delegation.py b/test/lib/ansible_test/_internal/delegation.py index aaee0dfa..83ff24c0 100644 --- a/test/lib/ansible_test/_internal/delegation.py +++ b/test/lib/ansible_test/_internal/delegation.py @@ -53,7 +53,6 @@ from .ci import ( from .host_configs import ( OriginConfig, PythonConfig, - VirtualPythonConfig, ) from .connections import ( @@ -254,9 +253,9 @@ def generate_command( ANSIBLE_TEST_CONTENT_ROOT=content_root, ) - if isinstance(args.controller.python, VirtualPythonConfig): - # Expose the ansible and ansible_test library directories to the virtual environment. - # This is only required when running from an install. + if isinstance(args.controller, OriginConfig): + # Expose the ansible and ansible_test library directories to the Python environment. + # This is only required when delegation is used on the origin host. library_path = process_scoped_temporary_directory(args) os.symlink(ANSIBLE_LIB_ROOT, os.path.join(library_path, 'ansible')) diff --git a/test/lib/ansible_test/_internal/host_configs.py b/test/lib/ansible_test/_internal/host_configs.py index a819652e..87030ae0 100644 --- a/test/lib/ansible_test/_internal/host_configs.py +++ b/test/lib/ansible_test/_internal/host_configs.py @@ -19,17 +19,17 @@ from .io import ( from .completion import ( CompletionConfig, - DOCKER_COMPLETION, + docker_completion, DockerCompletionConfig, InventoryCompletionConfig, - NETWORK_COMPLETION, + network_completion, NetworkRemoteCompletionConfig, PosixCompletionConfig, PosixRemoteCompletionConfig, PosixSshCompletionConfig, - REMOTE_COMPLETION, + remote_completion, RemoteCompletionConfig, - WINDOWS_COMPLETION, + windows_completion, WindowsRemoteCompletionConfig, filter_completion, ) @@ -277,7 +277,7 @@ class DockerConfig(ControllerHostConfig, PosixConfig): def get_defaults(self, context): # type: (HostContext) -> DockerCompletionConfig """Return the default settings.""" - return filter_completion(DOCKER_COMPLETION).get(self.name) or DockerCompletionConfig( + return filter_completion(docker_completion()).get(self.name) or DockerCompletionConfig( name=self.name, image=self.name, placeholder=True, @@ -285,7 +285,7 @@ class DockerConfig(ControllerHostConfig, PosixConfig): def get_default_targets(self, context): # type: (HostContext) -> t.List[ControllerConfig] """Return the default targets for this host config.""" - if self.name in filter_completion(DOCKER_COMPLETION): + if self.name in filter_completion(docker_completion()): defaults = self.get_defaults(context) pythons = {version: defaults.get_python_path(version) for version in defaults.supported_pythons} else: @@ -327,14 +327,14 @@ class PosixRemoteConfig(RemoteConfig, ControllerHostConfig, PosixConfig): def get_defaults(self, context): # type: (HostContext) -> PosixRemoteCompletionConfig """Return the default settings.""" - return filter_completion(REMOTE_COMPLETION).get(self.name) or REMOTE_COMPLETION.get(self.platform) or PosixRemoteCompletionConfig( + return filter_completion(remote_completion()).get(self.name) or remote_completion().get(self.platform) or PosixRemoteCompletionConfig( name=self.name, placeholder=True, ) def get_default_targets(self, context): # type: (HostContext) -> t.List[ControllerConfig] """Return the default targets for this host config.""" - if self.name in filter_completion(REMOTE_COMPLETION): + if self.name in filter_completion(remote_completion()): defaults = self.get_defaults(context) pythons = {version: defaults.get_python_path(version) for version in defaults.supported_pythons} else: @@ -358,7 +358,7 @@ class WindowsRemoteConfig(RemoteConfig, WindowsConfig): """Configuration for a remoe Windows host.""" def get_defaults(self, context): # type: (HostContext) -> WindowsRemoteCompletionConfig """Return the default settings.""" - return filter_completion(WINDOWS_COMPLETION).get(self.name) or WindowsRemoteCompletionConfig( + return filter_completion(windows_completion()).get(self.name) or WindowsRemoteCompletionConfig( name=self.name, ) @@ -381,7 +381,7 @@ class NetworkRemoteConfig(RemoteConfig, NetworkConfig): def get_defaults(self, context): # type: (HostContext) -> NetworkRemoteCompletionConfig """Return the default settings.""" - return filter_completion(NETWORK_COMPLETION).get(self.name) or NetworkRemoteCompletionConfig( + return filter_completion(network_completion()).get(self.name) or NetworkRemoteCompletionConfig( name=self.name, ) diff --git a/test/lib/ansible_test/_internal/host_profiles.py b/test/lib/ansible_test/_internal/host_profiles.py index 0a08d68f..e3aeeeeb 100644 --- a/test/lib/ansible_test/_internal/host_profiles.py +++ b/test/lib/ansible_test/_internal/host_profiles.py @@ -209,11 +209,7 @@ class PosixProfile(HostProfile[TPosixConfig], metaclass=abc.ABCMeta): python = self.config.python if isinstance(python, VirtualPythonConfig): - python = VirtualPythonConfig( - version=python.version, - system_site_packages=python.system_site_packages, - path=os.path.join(get_virtual_python(self.args, python), 'bin', 'python'), - ) + python = get_virtual_python(self.args, python) self.state['python'] = python diff --git a/test/lib/ansible_test/_internal/python_requirements.py b/test/lib/ansible_test/_internal/python_requirements.py index 8fca7834..aaaf44b8 100644 --- a/test/lib/ansible_test/_internal/python_requirements.py +++ b/test/lib/ansible_test/_internal/python_requirements.py @@ -25,6 +25,7 @@ from .util import ( ANSIBLE_TEST_DATA_ROOT, ANSIBLE_TEST_TARGET_ROOT, ANSIBLE_TEST_TOOLS_ROOT, + ApplicationError, SubprocessError, display, find_executable, @@ -65,6 +66,12 @@ REQUIREMENTS_SCRIPT_PATH = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'requ # Pip Abstraction +class PipUnavailableError(ApplicationError): + """Exception raised when pip is not available.""" + def __init__(self, python): # type: (PythonConfig) -> None + super().__init__(f'Python {python.version} at "{python.path}" does not have pip available.') + + @dataclasses.dataclass(frozen=True) class PipCommand: """Base class for pip commands.""""" @@ -97,6 +104,18 @@ class PipUninstall(PipCommand): ignore_errors: bool +@dataclasses.dataclass(frozen=True) +class PipVersion(PipCommand): + """Details required to get the pip version.""" + + +@dataclasses.dataclass(frozen=True) +class PipBootstrap(PipCommand): + """Details required to bootstrap pip.""" + pip_version: str + packages: t.List[str] + + # Entry Points @@ -107,13 +126,12 @@ def install_requirements( command=False, # type: bool coverage=False, # type: bool virtualenv=False, # type: bool + controller=True, # type: bool connection=None, # type: t.Optional[Connection] ): # type: (...) -> None """Install requirements for the given Python using the specified arguments.""" create_result_directories(args) - controller = not connection - if not requirements_allowed(args, controller): return @@ -157,10 +175,25 @@ def install_requirements( run_pip(args, python, commands, connection) + # false positive: pylint: disable=no-member if any(isinstance(command, PipInstall) and command.has_package('pyyaml') for command in commands): check_pyyaml(python) +def collect_bootstrap(python): # type: (PythonConfig) -> t.List[PipCommand] + """Return the details necessary to bootstrap pip into an empty virtual environment.""" + infrastructure_packages = get_venv_packages(python) + pip_version = infrastructure_packages['pip'] + packages = [f'{name}=={version}' for name, version in infrastructure_packages.items()] + + bootstrap = PipBootstrap( + pip_version=pip_version, + packages=packages, + ) + + return [bootstrap] + + def collect_requirements( python, # type: PythonConfig controller, # type: bool @@ -176,7 +209,10 @@ def collect_requirements( commands = [] # type: t.List[PipCommand] if virtualenv: - commands.extend(collect_package_install(packages=['virtualenv'])) + # sanity tests on Python 2.x install virtualenv when it is too old or is not already installed and the `--requirements` option is given + # the last version of virtualenv with no dependencies is used to minimize the changes made outside a virtual environment + virtualenv_version = '15.2.0' if python.version == '2.6' else '16.7.12' + commands.extend(collect_package_install(packages=[f'virtualenv=={virtualenv_version}'], constraints=False)) if coverage: commands.extend(collect_package_install(packages=[f'coverage=={COVERAGE_REQUIRED_VERSION}'], constraints=False)) @@ -196,15 +232,20 @@ def collect_requirements( if command in ('integration', 'windows-integration', 'network-integration'): commands.extend(collect_integration_install(command, controller)) - if minimize: - # In some environments pkg_resources is installed as a separate pip package which needs to be removed. - # For example, using Python 3.8 on Ubuntu 18.04 a virtualenv is created with only pip and setuptools. - # However, a venv is created with an additional pkg-resources package which is independent of setuptools. - # Making sure pkg-resources is removed preserves the import test consistency between venv and virtualenv. - # Additionally, in the above example, the pyparsing package vendored with pkg-resources is out-of-date and generates deprecation warnings. - # Thus it is important to remove pkg-resources to prevent system installed packages from generating deprecation warnings. - commands.extend(collect_uninstall(packages=['pkg-resources'], ignore_errors=True)) - commands.extend(collect_uninstall(packages=['setuptools', 'pip'])) + if (sanity or minimize) and any(isinstance(command, PipInstall) for command in commands): + # bootstrap the managed virtual environment, which will have been created without any installed packages + # sanity tests which install no packages skip this step + commands = collect_bootstrap(python) + commands + + # most infrastructure packages can be removed from sanity test virtual environments after they've been created + # removing them reduces the size of environments cached in containers + uninstall_packages = list(get_venv_packages(python)) + + if not minimize: + # installed packages may have run-time dependencies on setuptools + uninstall_packages.remove('setuptools') + + commands.extend(collect_uninstall(packages=uninstall_packages)) return commands @@ -220,7 +261,18 @@ def run_pip( script = prepare_pip_script(commands) if not args.explain: - connection.run([python.path], data=script) + try: + connection.run([python.path], data=script) + except SubprocessError: + script = prepare_pip_script([PipVersion()]) + + try: + connection.run([python.path], data=script, capture=True) + except SubprocessError as ex: + if 'pip is unavailable:' in ex.stdout + ex.stderr: + raise PipUnavailableError(python) + + raise # Collect @@ -355,6 +407,46 @@ def collect_uninstall(packages, ignore_errors=False): # type: (t.List[str], boo # Support +def get_venv_packages(python): # type: (PythonConfig) -> t.Dict[str, str] + """Return a dictionary of Python packages needed for a consistent virtual environment specific to the given Python version.""" + + # NOTE: This same information is needed for building the base-test-container image. + # See: https://github.com/ansible/base-test-container/blob/main/files/installer.py + + default_packages = dict( + pip='21.3.1', + setuptools='60.8.2', + wheel='0.37.1', + ) + + override_packages = { + '2.6': dict( + pip='9.0.3', # 10.0 requires Python 2.7+ + setuptools='36.8.0', # 37.0.0 requires Python 2.7+ + wheel='0.29.0', # 0.30.0 requires Python 2.7+ + ), + '2.7': dict( + pip='20.3.4', # 21.0 requires Python 3.6+ + setuptools='44.1.1', # 45.0.0 requires Python 3.5+ + wheel=None, + ), + '3.5': dict( + pip='20.3.4', # 21.0 requires Python 3.6+ + setuptools='50.3.2', # 51.0.0 requires Python 3.6+ + wheel=None, + ), + '3.6': dict( + pip='21.3.1', # 22.0 requires Python 3.7+ + setuptools='59.6.0', # 59.7.0 requires Python 3.7+ + wheel=None, + ), + } + + packages = {name: version or default_packages[name] for name, version in override_packages.get(python.version, default_packages).items()} + + return packages + + def requirements_allowed(args, controller): # type: (EnvironmentConfig, bool) -> bool """ Return True if requirements can be installed, otherwise return False. @@ -430,11 +522,11 @@ def get_cryptography_requirements(python): # type: (PythonConfig) -> t.List[str # pyopenssl 20.0.0 requires cryptography 3.2 or later pyopenssl = 'pyopenssl < 20.0.0' else: - # cryptography 3.4+ fails to install on many systems - # this is a temporary work-around until a more permanent solution is available - cryptography = 'cryptography < 3.4' - # no specific version of pyopenssl required, don't install it - pyopenssl = None + # cryptography 3.4+ builds require a working rust toolchain + # systems bootstrapped using ansible-core-ci can access additional wheels through the spare-tire package index + cryptography = 'cryptography' + # any future installation of pyopenssl is free to use any compatible version of cryptography + pyopenssl = '' requirements = [ cryptography, diff --git a/test/lib/ansible_test/_internal/test.py b/test/lib/ansible_test/_internal/test.py index af21cbd6..b67addc3 100644 --- a/test/lib/ansible_test/_internal/test.py +++ b/test/lib/ansible_test/_internal/test.py @@ -347,7 +347,7 @@ class TestFailure(TestResult): if re.search(r'^[0-9.]+$', ansible_version): url_version = '.'.join(ansible_version.split('.')[:2]) - testing_docs_url = 'https://docs.ansible.com/ansible/%s/dev_guide/testing' % url_version + testing_docs_url = 'https://docs.ansible.com/ansible-core/%s/dev_guide/testing' % url_version url = '%s/%s/' % (testing_docs_url, self.command) diff --git a/test/lib/ansible_test/_internal/venv.py b/test/lib/ansible_test/_internal/venv.py index 2cfd978d..cf436775 100644 --- a/test/lib/ansible_test/_internal/venv.py +++ b/test/lib/ansible_test/_internal/venv.py @@ -3,6 +3,7 @@ from __future__ import annotations import json import os +import pathlib import sys import typing as t @@ -31,11 +32,16 @@ from .host_configs import ( PythonConfig, ) +from .python_requirements import ( + collect_bootstrap, + run_pip, +) + def get_virtual_python( args, # type: EnvironmentConfig python, # type: VirtualPythonConfig -): +): # type: (...) -> VirtualPythonConfig """Create a virtual environment for the given Python and return the path to its root.""" if python.system_site_packages: suffix = '-ssp' @@ -43,24 +49,40 @@ def get_virtual_python( suffix = '' virtual_environment_path = os.path.join(ResultType.TMP.path, 'delegation', f'python{python.version}{suffix}') + virtual_environment_marker = os.path.join(virtual_environment_path, 'marker.txt') + + virtual_environment_python = VirtualPythonConfig( + version=python.version, + path=os.path.join(virtual_environment_path, 'bin', 'python'), + system_site_packages=python.system_site_packages, + ) + + if os.path.exists(virtual_environment_marker): + display.info('Using existing Python %s virtual environment: %s' % (python.version, virtual_environment_path), verbosity=1) + else: + # a virtualenv without a marker is assumed to have been partially created + remove_tree(virtual_environment_path) - if not create_virtual_environment(args, python, virtual_environment_path, python.system_site_packages): - raise ApplicationError(f'Python {python.version} does not provide virtual environment support.') + if not create_virtual_environment(args, python, virtual_environment_path, python.system_site_packages): + raise ApplicationError(f'Python {python.version} does not provide virtual environment support.') - return virtual_environment_path + commands = collect_bootstrap(virtual_environment_python) + + run_pip(args, virtual_environment_python, commands, None) # get_virtual_python() + + # touch the marker to keep track of when the virtualenv was last used + pathlib.Path(virtual_environment_marker).touch() + + return virtual_environment_python def create_virtual_environment(args, # type: EnvironmentConfig python, # type: PythonConfig path, # type: str system_site_packages=False, # type: bool - pip=True, # type: bool + pip=False, # type: bool ): # type: (...) -> bool """Create a virtual environment using venv or virtualenv for the requested Python version.""" - if os.path.isdir(path): - display.info('Using existing Python %s virtual environment: %s' % (python.version, path), verbosity=1) - return True - if not os.path.exists(python.path): # the requested python version could not be found return False @@ -207,6 +229,9 @@ def run_virtualenv(args, # type: EnvironmentConfig if not pip: cmd.append('--no-pip') + # these options provide consistency with venv, which does not install them without pip + cmd.append('--no-setuptools') + cmd.append('--no-wheel') cmd.append(path) diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py index cad82a55..929f371f 100644 --- a/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py @@ -141,7 +141,7 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False): current_version = get_collection_version() # Updates to schema MUST also be reflected in the documentation - # ~https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html + # ~https://docs.ansible.com/ansible-core/devel/dev_guide/developing_collections.html # plugin_routing schema diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py index 9f4f5c47..f9eaa02a 100644 --- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py +++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py @@ -30,20 +30,48 @@ import subprocess import sys import tempfile import traceback +import warnings from collections import OrderedDict from contextlib import contextmanager -from ansible.module_utils.compat.version import StrictVersion, LooseVersion from fnmatch import fnmatch import yaml +from voluptuous.humanize import humanize_error + + +def setup_collection_loader(): + """ + Configure the collection loader if a collection is being tested. + This must be done before the plugin loader is imported. + """ + if '--collection' not in sys.argv: + return + + # noinspection PyProtectedMember + from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder + + collections_paths = os.environ.get('ANSIBLE_COLLECTIONS_PATH', '').split(os.pathsep) + collection_loader = _AnsibleCollectionFinder(collections_paths) + # noinspection PyProtectedMember + collection_loader._install() # pylint: disable=protected-access + + warnings.filterwarnings( + "ignore", + "AnsibleCollectionFinder has already been configured") + + +setup_collection_loader() + from ansible import __version__ as ansible_version from ansible.executor.module_common import REPLACER_WINDOWS from ansible.module_utils.common._collections_compat import Mapping from ansible.module_utils.common.parameters import DEFAULT_TYPE_VALIDATORS +from ansible.module_utils.compat.version import StrictVersion, LooseVersion +from ansible.module_utils.basic import to_bytes +from ansible.module_utils.six import PY3, with_metaclass, string_types from ansible.plugins.loader import fragment_loader -from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder from ansible.utils.plugin_docs import REJECTLIST, add_collection_to_versions_and_dates, add_fragments, get_docstring from ansible.utils.version import SemanticVersion @@ -52,9 +80,7 @@ from .module_args import AnsibleModuleImportError, AnsibleModuleNotInitialized, from .schema import ansible_module_kwargs_schema, doc_schema, return_schema from .utils import CaptureStd, NoArgsAnsibleModule, compare_unordered_lists, is_empty, parse_yaml, parse_isodate -from voluptuous.humanize import humanize_error -from ansible.module_utils.six import PY3, with_metaclass, string_types if PY3: # Because there is no ast.TryExcept in Python 3 ast module @@ -487,7 +513,7 @@ class ModuleValidator(Validator): path=self.object_path, code='use-short-gplv3-license', msg='Found old style GPLv3 license header: ' - 'https://docs.ansible.com/ansible/devel/dev_guide/developing_modules_documenting.html#copyright' + 'https://docs.ansible.com/ansible-core/devel/dev_guide/developing_modules_documenting.html#copyright' ) def _check_for_subprocess(self): @@ -606,7 +632,7 @@ class ModuleValidator(Validator): if isinstance(child, (ast.FunctionDef, ast.ClassDef)): linenos.append(child.lineno) - return min(linenos) + return min(linenos) if linenos else None def _find_has_import(self): for child in self.ast.body: @@ -2179,7 +2205,7 @@ class ModuleValidator(Validator): self._find_rejectlist_imports() self._find_module_utils() self._find_has_import() - first_callable = self._get_first_callable() + first_callable = self._get_first_callable() or 1000000 # use a bogus "high" line number if no callable exists self._ensure_imports_below_docs(doc_info, first_callable) self._check_for_subprocess() self._check_for_os_call() @@ -2238,11 +2264,6 @@ class PythonPackageValidator(Validator): ) -def setup_collection_loader(): - collections_paths = os.environ.get('ANSIBLE_COLLECTIONS_PATH', '').split(os.pathsep) - _AnsibleCollectionFinder(collections_paths) - - def re_compile(value): """ Argparse expects things to raise TypeError, re.compile raises an re.error @@ -2295,7 +2316,6 @@ def run(): routing = None if args.collection: - setup_collection_loader() routing_file = 'meta/runtime.yml' # Load meta/runtime.yml if it exists, as it may contain deprecation information if os.path.isfile(routing_file): diff --git a/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py b/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py index 42822111..7f9df40a 100644 --- a/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py +++ b/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py @@ -13,7 +13,7 @@ import yaml from yaml.resolver import Resolver from yaml.constructor import SafeConstructor from yaml.error import MarkedYAMLError -from _yaml import CParser # pylint: disable=no-name-in-module +from yaml.cyaml import CParser from yamllint import linter from yamllint.config import YamlLintConfig diff --git a/test/lib/ansible_test/_util/controller/tools/virtualenvcheck.py b/test/lib/ansible_test/_util/controller/tools/virtualenvcheck.py index 90dfa394..a38ad074 100644 --- a/test/lib/ansible_test/_util/controller/tools/virtualenvcheck.py +++ b/test/lib/ansible_test/_util/controller/tools/virtualenvcheck.py @@ -5,10 +5,17 @@ __metaclass__ = type import json try: + # virtualenv <20 from sys import real_prefix except ImportError: real_prefix = None +try: + # venv and virtualenv >= 20 + from sys import base_exec_prefix +except ImportError: + base_exec_prefix = None + print(json.dumps(dict( - real_prefix=real_prefix, + real_prefix=real_prefix or base_exec_prefix, ))) diff --git a/test/lib/ansible_test/_util/controller/tools/yaml_to_json.py b/test/lib/ansible_test/_util/controller/tools/yaml_to_json.py new file mode 100644 index 00000000..1164168e --- /dev/null +++ b/test/lib/ansible_test/_util/controller/tools/yaml_to_json.py @@ -0,0 +1,28 @@ +"""Read YAML from stdin and write JSON to stdout.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import datetime +import json +import sys + +from yaml import load + +try: + from yaml import CSafeLoader as SafeLoader +except ImportError: + from yaml import SafeLoader + +# unique ISO date marker matching the one present in importer.py +ISO_DATE_MARKER = 'isodate:f23983df-f3df-453c-9904-bcd08af468cc:' + + +def default(value): + """Custom default serializer which supports datetime.date types.""" + if isinstance(value, datetime.date): + return '%s%s' % (ISO_DATE_MARKER, value.isoformat()) + + raise TypeError('cannot serialize type: %s' % type(value)) + + +json.dump(load(sys.stdin, Loader=SafeLoader), sys.stdout, default=default) diff --git a/test/lib/ansible_test/_util/target/common/constants.py b/test/lib/ansible_test/_util/target/common/constants.py index 3c02eb2e..fdaa9e5f 100644 --- a/test/lib/ansible_test/_util/target/common/constants.py +++ b/test/lib/ansible_test/_util/target/common/constants.py @@ -1,17 +1,10 @@ -"""Constants used by ansible-test. Imports should not be used in this file.""" +"""Constants used by ansible-test's CLI entry point (as well as the rest of ansible-test). Imports should not be used in this file.""" # NOTE: This file resides in the _util/target directory to ensure compatibility with all supported Python versions. from __future__ import (absolute_import, division, print_function) __metaclass__ = type -# Setting a low soft RLIMIT_NOFILE value will improve the performance of subprocess.Popen on Python 2.x when close_fds=True. -# This will affect all Python subprocesses. It will also affect the current Python process if set before subprocess is imported for the first time. -SOFT_RLIMIT_NOFILE = 1024 - -# File used to track the ansible-test test execution timeout. -TIMEOUT_PATH = '.ansible-test-timeout.json' - REMOTE_ONLY_PYTHON_VERSIONS = ( '2.6', '2.7', @@ -25,38 +18,3 @@ CONTROLLER_PYTHON_VERSIONS = ( '3.9', '3.10', ) - -CONTROLLER_MIN_PYTHON_VERSION = CONTROLLER_PYTHON_VERSIONS[0] -SUPPORTED_PYTHON_VERSIONS = REMOTE_ONLY_PYTHON_VERSIONS + CONTROLLER_PYTHON_VERSIONS - -COVERAGE_REQUIRED_VERSION = '4.5.4' - -REMOTE_PROVIDERS = [ - 'default', - 'aws', - 'azure', - 'ibmps', - 'parallels', -] - -SECCOMP_CHOICES = [ - 'default', - 'unconfined', -] - -# This bin symlink map must exactly match the contents of the bin directory. -# It is necessary for payload creation to reconstruct the bin directory when running ansible-test from an installed version of ansible. -# It is also used to construct the injector directory at runtime. -ANSIBLE_BIN_SYMLINK_MAP = { - 'ansible': '../lib/ansible/cli/scripts/ansible_cli_stub.py', - 'ansible-config': 'ansible', - 'ansible-connection': '../lib/ansible/cli/scripts/ansible_connection_cli_stub.py', - 'ansible-console': 'ansible', - 'ansible-doc': 'ansible', - 'ansible-galaxy': 'ansible', - 'ansible-inventory': 'ansible', - 'ansible-playbook': 'ansible', - 'ansible-pull': 'ansible', - 'ansible-test': '../test/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py', - 'ansible-vault': 'ansible', -} diff --git a/test/lib/ansible_test/_util/target/sanity/import/importer.py b/test/lib/ansible_test/_util/target/sanity/import/importer.py index 778643bb..60255da6 100644 --- a/test/lib/ansible_test/_util/target/sanity/import/importer.py +++ b/test/lib/ansible_test/_util/target/sanity/import/importer.py @@ -8,25 +8,38 @@ def main(): Main program function used to isolate globals from imported code. Changes to globals in imported modules on Python 2.x will overwrite our own globals. """ + import os + import sys + import types + + # preload an empty ansible._vendor module to prevent use of any embedded modules during the import test + vendor_module_name = 'ansible._vendor' + + vendor_module = types.ModuleType(vendor_module_name) + vendor_module.__file__ = os.path.join(os.path.sep.join(os.path.abspath(__file__).split(os.path.sep)[:-8]), 'lib/ansible/_vendor/__init__.py') + vendor_module.__path__ = [] + vendor_module.__package__ = vendor_module_name + + sys.modules[vendor_module_name] = vendor_module + import ansible import contextlib import datetime import json - import os import re import runpy import subprocess - import sys import traceback - import types import warnings ansible_path = os.path.dirname(os.path.dirname(ansible.__file__)) temp_path = os.environ['SANITY_TEMP_PATH'] + os.path.sep - external_python = os.environ.get('SANITY_EXTERNAL_PYTHON') or sys.executable + external_python = os.environ.get('SANITY_EXTERNAL_PYTHON') + yaml_to_json_path = os.environ.get('SANITY_YAML_TO_JSON') collection_full_name = os.environ.get('SANITY_COLLECTION_FULL_NAME') collection_root = os.environ.get('ANSIBLE_COLLECTIONS_PATH') import_type = os.environ.get('SANITY_IMPORTER_TYPE') + ansible_controller_min_python_version = tuple(int(x) for x in os.environ.get('ANSIBLE_CONTROLLER_MIN_PYTHON_VERSION', '0').split('.')) try: # noinspection PyCompatibility @@ -45,10 +58,17 @@ def main(): if collection_full_name: # allow importing code from collections when testing a collection from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native, text_type - from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder - from ansible.utils.collection_loader import _collection_finder - yaml_to_json_path = os.path.join(os.path.dirname(__file__), 'yaml_to_json.py') + if sys.version_info >= ansible_controller_min_python_version: + # noinspection PyProtectedMember + from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder + from ansible.utils.collection_loader import _collection_finder + else: + # noinspection PyProtectedMember + from ansible_test._internal.legacy_collection_loader._collection_finder import _AnsibleCollectionFinder + # noinspection PyProtectedMember + from ansible_test._internal.legacy_collection_loader import _collection_finder + yaml_to_dict_cache = {} # unique ISO date marker matching the one present in yaml_to_json.py @@ -101,8 +121,13 @@ def main(): # do not support collection loading when not testing a collection collection_loader = None - # remove all modules under the ansible package - list(map(sys.modules.pop, [m for m in sys.modules if m.partition('.')[0] == ansible.__name__])) + if collection_loader and import_type == 'plugin': + # do not unload ansible code for collection plugin (not module) tests + # doing so could result in the collection loader being initialized multiple times + pass + else: + # remove all modules under the ansible package, except the preloaded vendor module + list(map(sys.modules.pop, [m for m in sys.modules if m.partition('.')[0] == ansible.__name__ and m != vendor_module_name])) if import_type == 'module': # pre-load an empty ansible package to prevent unwanted code in __init__.py from loading @@ -424,7 +449,7 @@ def main(): try: yield finally: - if import_type == 'plugin': + if import_type == 'plugin' and not collection_loader: from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder _AnsibleCollectionFinder._remove() # pylint: disable=protected-access @@ -473,6 +498,11 @@ def main(): with warnings.catch_warnings(): warnings.simplefilter('error') + if collection_loader and import_type == 'plugin': + warnings.filterwarnings( + "ignore", + "AnsibleCollectionFinder has already been configured") + if sys.version_info[0] == 2: warnings.filterwarnings( "ignore", diff --git a/test/lib/ansible_test/_util/target/sanity/import/yaml_to_json.py b/test/lib/ansible_test/_util/target/sanity/import/yaml_to_json.py deleted file mode 100644 index 1164168e..00000000 --- a/test/lib/ansible_test/_util/target/sanity/import/yaml_to_json.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Read YAML from stdin and write JSON to stdout.""" -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import datetime -import json -import sys - -from yaml import load - -try: - from yaml import CSafeLoader as SafeLoader -except ImportError: - from yaml import SafeLoader - -# unique ISO date marker matching the one present in importer.py -ISO_DATE_MARKER = 'isodate:f23983df-f3df-453c-9904-bcd08af468cc:' - - -def default(value): - """Custom default serializer which supports datetime.date types.""" - if isinstance(value, datetime.date): - return '%s%s' % (ISO_DATE_MARKER, value.isoformat()) - - raise TypeError('cannot serialize type: %s' % type(value)) - - -json.dump(load(sys.stdin, Loader=SafeLoader), sys.stdout, default=default) diff --git a/test/lib/ansible_test/_util/target/setup/bootstrap.sh b/test/lib/ansible_test/_util/target/setup/bootstrap.sh index 2d31945e..53e2ca71 100644 --- a/test/lib/ansible_test/_util/target/setup/bootstrap.sh +++ b/test/lib/ansible_test/_util/target/setup/bootstrap.sh @@ -49,8 +49,11 @@ customize_bashrc() install_pip() { if ! "${python_interpreter}" -m pip.__main__ --version --disable-pip-version-check 2>/dev/null; then case "${python_version}" in + "2.7") + pip_bootstrap_url="https://ci-files.testing.ansible.com/ansible-test/get-pip-20.3.4.py" + ;; *) - pip_bootstrap_url="https://ansible-ci-files.s3.amazonaws.com/ansible-test/get-pip-20.3.4.py" + pip_bootstrap_url="https://ci-files.testing.ansible.com/ansible-test/get-pip-21.3.1.py" ;; esac @@ -180,6 +183,13 @@ bootstrap_remote_freebsd() sed -i '' 's/^# *PermitRootLogin.*$/PermitRootLogin yes/;' /etc/ssh/sshd_config service sshd restart fi + + # make additional wheels available for packages which lack them for this platform + echo "# generated by ansible-test +[global] +extra-index-url = https://spare-tire.testing.ansible.com/simple/ +prefer-binary = yes +" > /etc/pip.conf } bootstrap_remote_macos() diff --git a/test/lib/ansible_test/_util/target/setup/quiet_pip.py b/test/lib/ansible_test/_util/target/setup/quiet_pip.py index 83d4576b..fc65c88b 100644 --- a/test/lib/ansible_test/_util/target/setup/quiet_pip.py +++ b/test/lib/ansible_test/_util/target/setup/quiet_pip.py @@ -3,8 +3,10 @@ from __future__ import (absolute_import, division, print_function) __metaclass__ = type import logging +import os import re import runpy +import sys import warnings BUILTIN_FILTERER_FILTER = logging.Filterer.filter @@ -69,7 +71,19 @@ def main(): # Python 2.7 cannot use the -W option to match warning text after a colon. This makes it impossible to match specific warning messages. warnings.filterwarnings('ignore', message_filter) - runpy.run_module('pip.__main__', run_name='__main__', alter_sys=True) + get_pip = os.environ.get('GET_PIP') + + try: + if get_pip: + directory, filename = os.path.split(get_pip) + module = os.path.splitext(filename)[0] + sys.path.insert(0, directory) + runpy.run_module(module, run_name='__main__', alter_sys=True) + else: + runpy.run_module('pip.__main__', run_name='__main__', alter_sys=True) + except ImportError as ex: + print('pip is unavailable: %s' % ex) + sys.exit(1) if __name__ == '__main__': diff --git a/test/lib/ansible_test/_util/target/setup/requirements.py b/test/lib/ansible_test/_util/target/setup/requirements.py index 0e3b1e63..f460c5c5 100644 --- a/test/lib/ansible_test/_util/target/setup/requirements.py +++ b/test/lib/ansible_test/_util/target/setup/requirements.py @@ -38,6 +38,11 @@ except ImportError: # noinspection PyProtectedMember from pipes import quote as cmd_quote +try: + from urllib.request import urlopen +except ImportError: + from urllib import urlopen + ENCODING = 'utf-8' PAYLOAD = b'{payload}' # base-64 encoded JSON payload which will be populated before this script is executed @@ -70,6 +75,38 @@ def main(): # type: () -> None sys.exit(1) +# noinspection PyUnusedLocal +def bootstrap(pip, options): # type: (str, t.Dict[str, t.Any]) -> None + """Bootstrap pip and related packages in an empty virtual environment.""" + pip_version = options['pip_version'] + packages = options['packages'] + + url = 'https://ansible-ci-files.s3.amazonaws.com/ansible-test/get-pip-%s.py' % pip_version + cache_path = os.path.expanduser('~/.ansible/test/cache/get_pip_%s.py' % pip_version.replace(".", "_")) + temp_path = cache_path + '.download' + + if os.path.exists(cache_path): + log('Using cached pip %s bootstrap script: %s' % (pip_version, cache_path)) + else: + log('Downloading pip %s bootstrap script: %s' % (pip_version, url)) + + make_dirs(os.path.dirname(cache_path)) + download_file(url, temp_path) + shutil.move(temp_path, cache_path) + + log('Cached pip %s bootstrap script: %s' % (pip_version, cache_path)) + + env = common_pip_environment() + env.update(GET_PIP=cache_path) + + options = common_pip_options() + options.extend(packages) + + command = [sys.executable, pip] + options + + execute_command(command, env=env) + + def install(pip, options): # type: (str, t.Dict[str, t.Any]) -> None """Perform a pip install.""" requirements = options['requirements'] @@ -92,7 +129,9 @@ def install(pip, options): # type: (str, t.Dict[str, t.Any]) -> None command = [sys.executable, pip, 'install'] + options - execute_command(command, tempdir) + env = common_pip_environment() + + execute_command(command, env=env, cwd=tempdir) finally: remove_tree(tempdir) @@ -107,13 +146,36 @@ def uninstall(pip, options): # type: (str, t.Dict[str, t.Any]) -> None command = [sys.executable, pip, 'uninstall', '-y'] + options + env = common_pip_environment() + try: - execute_command(command, capture=True) + execute_command(command, env=env, capture=True) except SubprocessError: if not ignore_errors: raise +# noinspection PyUnusedLocal +def version(pip, options): # type: (str, t.Dict[str, t.Any]) -> None + """Report the pip version.""" + del options + + options = common_pip_options() + + command = [sys.executable, pip, '-V'] + options + + env = common_pip_environment() + + execute_command(command, env=env, capture=True) + + +def common_pip_environment(): # type: () -> t.Dict[str, str] + """Return common environment variables used to run pip.""" + env = os.environ.copy() + + return env + + def common_pip_options(): # type: () -> t.List[str] """Return a list of common pip options.""" return [ @@ -131,6 +193,13 @@ def devnull(): # type: () -> t.IO[bytes] return devnull.file +def download_file(url, path): # type: (str, str) -> None + """Download the given URL to the specified file path.""" + with open(to_bytes(path), 'wb') as saved_file: + download = urlopen(url) + shutil.copyfileobj(download, saved_file) + + class ApplicationError(Exception): """Base class for application exceptions.""" @@ -158,7 +227,7 @@ def log(message, verbosity=0): # type: (str, int) -> None CONSOLE.flush() -def execute_command(cmd, cwd=None, capture=False): # type: (t.List[str], t.Optional[str], bool) -> None +def execute_command(cmd, cwd=None, capture=False, env=None): # type: (t.List[str], t.Optional[str], bool, t.Optional[t.Dict[str, str]]) -> None """Execute the specified command.""" log('Execute command: %s' % ' '.join(cmd_quote(c) for c in cmd), verbosity=1) @@ -171,7 +240,8 @@ def execute_command(cmd, cwd=None, capture=False): # type: (t.List[str], t.Opti stdout = None stderr = None - process = subprocess.Popen(cmd_bytes, cwd=to_optional_bytes(cwd), stdin=devnull(), stdout=stdout, stderr=stderr) # pylint: disable=consider-using-with + cwd_bytes = to_optional_bytes(cwd) + process = subprocess.Popen(cmd_bytes, cwd=cwd_bytes, stdin=devnull(), stdout=stdout, stderr=stderr, env=env) # pylint: disable=consider-using-with stdout_bytes, stderr_bytes = process.communicate() stdout_text = to_optional_text(stdout_bytes) or u'' stderr_text = to_optional_text(stderr_bytes) or u'' diff --git a/test/sanity/code-smell/package-data.py b/test/sanity/code-smell/package-data.py index 921cb197..06f3f916 100644 --- a/test/sanity/code-smell/package-data.py +++ b/test/sanity/code-smell/package-data.py @@ -223,8 +223,9 @@ def install_sdist(tmp_dir, sdist_dir): raise Exception('sdist install failed:\n%s' % stderr) # Determine the prefix for the installed files - match = re.search('^creating (%s/.*?/(?:site|dist)-packages)/ansible$' % + match = re.search('^copying .* -> (%s/.*?/(?:site|dist)-packages)/ansible$' % tmp_dir, stdout, flags=re.M) + return match.group(1) diff --git a/test/sanity/ignore.txt b/test/sanity/ignore.txt index 4a3bb620..058331e1 100644 --- a/test/sanity/ignore.txt +++ b/test/sanity/ignore.txt @@ -1,3 +1,4 @@ +.azure-pipelines/scripts/publish-codecov.py replace-urlopen docs/docsite/rst/dev_guide/testing/sanity/no-smart-quotes.rst no-smart-quotes docs/docsite/rst/locales/ja/LC_MESSAGES/dev_guide.po no-smart-quotes # Translation of the no-smart-quotes rule examples/play.yml shebang @@ -183,6 +184,7 @@ test/integration/targets/win_script/files/test_script_with_splatting.ps1 pslint: test/integration/targets/windows-minimal/library/win_ping_syntax_error.ps1 pslint!skip test/lib/ansible_test/_data/requirements/sanity.pslint.ps1 pslint:PSCustomUseLiteralPath # Uses wildcards on purpose test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath +test/lib/ansible_test/_util/target/setup/requirements.py replace-urlopen test/support/integration/plugins/inventory/aws_ec2.py pylint:use-a-generator test/support/integration/plugins/module_utils/network/common/utils.py pylint:use-a-generator test/support/integration/plugins/modules/ec2_group.py pylint:use-a-generator diff --git a/test/units/galaxy/test_collection.py b/test/units/galaxy/test_collection.py index 3de2e89a..65243df1 100644 --- a/test/units/galaxy/test_collection.py +++ b/test/units/galaxy/test_collection.py @@ -218,7 +218,49 @@ def server_config(monkeypatch): @pytest.mark.parametrize('global_ignore_certs', [True, False]) -def test_validate_certs(global_ignore_certs, server_config, monkeypatch): +def test_validate_certs(global_ignore_certs, monkeypatch): + cli_args = [ + 'ansible-galaxy', + 'collection', + 'install', + 'namespace.collection:1.0.0', + ] + if global_ignore_certs: + cli_args.append('--ignore-certs') + + galaxy_cli = GalaxyCLI(args=cli_args) + mock_execute_install = MagicMock() + monkeypatch.setattr(galaxy_cli, '_execute_install_collection', mock_execute_install) + galaxy_cli.run() + + assert len(galaxy_cli.api_servers) == 1 + assert galaxy_cli.api_servers[0].validate_certs is not global_ignore_certs + + +@pytest.mark.parametrize('global_ignore_certs', [True, False]) +def test_validate_certs_with_server_url(global_ignore_certs, monkeypatch): + cli_args = [ + 'ansible-galaxy', + 'collection', + 'install', + 'namespace.collection:1.0.0', + '-s', + 'https://galaxy.ansible.com' + ] + if global_ignore_certs: + cli_args.append('--ignore-certs') + + galaxy_cli = GalaxyCLI(args=cli_args) + mock_execute_install = MagicMock() + monkeypatch.setattr(galaxy_cli, '_execute_install_collection', mock_execute_install) + galaxy_cli.run() + + assert len(galaxy_cli.api_servers) == 1 + assert galaxy_cli.api_servers[0].validate_certs is not global_ignore_certs + + +@pytest.mark.parametrize('global_ignore_certs', [True, False]) +def test_validate_certs_with_server_config(global_ignore_certs, server_config, monkeypatch): get_plugin_options = MagicMock(side_effect=server_config) monkeypatch.setattr(C.config, 'get_plugin_options', get_plugin_options) @@ -289,6 +331,45 @@ def test_build_existing_output_with_force(collection_input): assert tarfile.is_tarfile(existing_output) +def test_build_with_existing_files_and_manifest(collection_input): + input_dir, output_dir = collection_input + + with open(os.path.join(input_dir, 'MANIFEST.json'), "wb") as fd: + fd.write(b'{"collection_info": {"version": "6.6.6"}, "version": 1}') + + with open(os.path.join(input_dir, 'FILES.json'), "wb") as fd: + fd.write(b'{"files": [], "format": 1}') + + with open(os.path.join(input_dir, "plugins", "MANIFEST.json"), "wb") as fd: + fd.write(b"test data that should be in build") + + collection.build_collection(to_text(input_dir, errors='surrogate_or_strict'), to_text(output_dir, errors='surrogate_or_strict'), False) + + output_artifact = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz') + assert tarfile.is_tarfile(output_artifact) + + with tarfile.open(output_artifact, mode='r') as actual: + members = actual.getmembers() + + manifest_file = next(m for m in members if m.path == "MANIFEST.json") + manifest_file_obj = actual.extractfile(manifest_file.name) + manifest_file_text = manifest_file_obj.read() + manifest_file_obj.close() + assert manifest_file_text != b'{"collection_info": {"version": "6.6.6"}, "version": 1}' + + json_file = next(m for m in members if m.path == "MANIFEST.json") + json_file_obj = actual.extractfile(json_file.name) + json_file_text = json_file_obj.read() + json_file_obj.close() + assert json_file_text != b'{"files": [], "format": 1}' + + sub_manifest_file = next(m for m in members if m.path == "plugins/MANIFEST.json") + sub_manifest_file_obj = actual.extractfile(sub_manifest_file.name) + sub_manifest_file_text = sub_manifest_file_obj.read() + sub_manifest_file_obj.close() + assert sub_manifest_file_text == b"test data that should be in build" + + @pytest.mark.parametrize('galaxy_yml_dir', [b'namespace: value: broken'], indirect=True) def test_invalid_yaml_galaxy_file(galaxy_yml_dir): galaxy_file = os.path.join(galaxy_yml_dir, b'galaxy.yml') diff --git a/test/units/module_utils/facts/virtual/test_linux.py b/test/units/module_utils/facts/virtual/test_linux.py index 25503750..7c13299e 100644 --- a/test/units/module_utils/facts/virtual/test_linux.py +++ b/test/units/module_utils/facts/virtual/test_linux.py @@ -8,6 +8,30 @@ __metaclass__ = type from ansible.module_utils.facts.virtual import linux +def mock_os_path_is_file_docker(filename): + if filename in ('/.dockerenv', '/.dockerinit'): + return True + return False + + +def test_get_virtual_facts_docker(mocker): + mocker.patch('os.path.exists', mock_os_path_is_file_docker) + + module = mocker.Mock() + module.run_command.return_value = (0, '', '') + inst = linux.LinuxVirtual(module) + facts = inst.get_virtual_facts() + + expected = { + 'virtualization_role': 'guest', + 'virtualization_tech_host': set(), + 'virtualization_type': 'docker', + 'virtualization_tech_guest': set(['docker', 'container']), + } + + assert facts == expected + + def test_get_virtual_facts_bhyve(mocker): mocker.patch('os.path.exists', return_value=False) mocker.patch('ansible.module_utils.facts.virtual.linux.get_file_content', return_value='') diff --git a/test/units/parsing/yaml/test_loader.py b/test/units/parsing/yaml/test_loader.py index d6989f44..fbe69a97 100644 --- a/test/units/parsing/yaml/test_loader.py +++ b/test/units/parsing/yaml/test_loader.py @@ -35,12 +35,8 @@ from ansible.parsing.yaml.dumper import AnsibleDumper from units.mock.yaml_helper import YamlTestUtils from units.mock.vault_helper import TextVaultSecret -try: - from _yaml import ParserError - from _yaml import ScannerError -except ImportError: - from yaml.parser import ParserError - from yaml.scanner import ScannerError +from yaml.parser import ParserError +from yaml.scanner import ScannerError class NameStringIO(StringIO): diff --git a/test/units/plugins/become/test_sudo.py b/test/units/plugins/become/test_sudo.py index 8ccb2a12..67eb9a46 100644 --- a/test/units/plugins/become/test_sudo.py +++ b/test/units/plugins/become/test_sudo.py @@ -38,3 +38,30 @@ def test_sudo(mocker, parser, reset_cli_args): cmd = sudo.build_become_command('/bin/foo', sh) assert re.match(r"""sudo\s+-s\s-H\s+-p "\[sudo via ansible, key=.+?\] password:" -u foo /bin/bash -c 'echo BECOME-SUCCESS-.+? ; /bin/foo'""", cmd), cmd + + sudo.set_options(direct={ + 'become_user': 'foo', + 'become_flags': '-snH', + 'become_pass': 'testpass', + }) + + cmd = sudo.build_become_command('/bin/foo', sh) + assert re.match(r"""sudo\s+-sH\s+-p "\[sudo via ansible, key=.+?\] password:" -u foo /bin/bash -c 'echo BECOME-SUCCESS-.+? ; /bin/foo'""", cmd), cmd + + sudo.set_options(direct={ + 'become_user': 'foo', + 'become_flags': '--non-interactive -s -H', + 'become_pass': 'testpass', + }) + + cmd = sudo.build_become_command('/bin/foo', sh) + assert re.match(r"""sudo\s+-s\s-H\s+-p "\[sudo via ansible, key=.+?\] password:" -u foo /bin/bash -c 'echo BECOME-SUCCESS-.+? ; /bin/foo'""", cmd), cmd + + sudo.set_options(direct={ + 'become_user': 'foo', + 'become_flags': '--non-interactive -nC5 -s -H', + 'become_pass': 'testpass', + }) + + cmd = sudo.build_become_command('/bin/foo', sh) + assert re.match(r"""sudo\s+-C5\s-s\s-H\s+-p "\[sudo via ansible, key=.+?\] password:" -u foo /bin/bash -c 'echo BECOME-SUCCESS-.+? ; /bin/foo'""", cmd), cmd diff --git a/test/units/plugins/connection/test_ssh.py b/test/units/plugins/connection/test_ssh.py index d693313f..9b3e3c9d 100644 --- a/test/units/plugins/connection/test_ssh.py +++ b/test/units/plugins/connection/test_ssh.py @@ -102,6 +102,7 @@ class TestConnectionBaseClass(unittest.TestCase): def test_plugins_connection_ssh__examine_output(self): pc = PlayContext() new_stdin = StringIO() + become_success_token = b'BECOME-SUCCESS-abcdefghijklmnopqrstuvxyz' conn = connection_loader.get('ssh', pc, new_stdin) conn.set_become_plugin(become_loader.get('sudo')) @@ -112,24 +113,16 @@ class TestConnectionBaseClass(unittest.TestCase): conn.become.check_missing_password = MagicMock() def _check_password_prompt(line): - if b'foo' in line: - return True - return False + return b'foo' in line def _check_become_success(line): - if b'BECOME-SUCCESS-abcdefghijklmnopqrstuvxyz' in line: - return True - return False + return become_success_token in line def _check_incorrect_password(line): - if b'incorrect password' in line: - return True - return False + return b'incorrect password' in line def _check_missing_password(line): - if b'bad password' in line: - return True - return False + return b'bad password' in line # test examining output for prompt conn._flags = dict( @@ -172,9 +165,9 @@ class TestConnectionBaseClass(unittest.TestCase): pc.prompt = False conn.become.prompt = False - pc.success_key = u'BECOME-SUCCESS-abcdefghijklmnopqrstuvxyz' - conn.become.success = u'BECOME-SUCCESS-abcdefghijklmnopqrstuvxyz' - output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\nBECOME-SUCCESS-abcdefghijklmnopqrstuvxyz\nline 3\n', False) + pc.success_key = str(become_success_token) + conn.become.success = str(become_success_token) + output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\n%s\nline 3\n' % become_success_token, False) self.assertEqual(output, b'line 1\nline 2\nline 3\n') self.assertEqual(unprocessed, b'') self.assertFalse(conn._flags['become_prompt']) @@ -182,6 +175,23 @@ class TestConnectionBaseClass(unittest.TestCase): self.assertFalse(conn._flags['become_error']) self.assertFalse(conn._flags['become_nopasswd_error']) + # test we dont detect become success from ssh debug: lines + conn._flags = dict( + become_prompt=False, + become_success=False, + become_error=False, + become_nopasswd_error=False, + ) + + pc.prompt = False + conn.become.prompt = True + pc.success_key = str(become_success_token) + conn.become.success = str(become_success_token) + output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\ndebug1: %s\nline 3\n' % become_success_token, False) + self.assertEqual(output, b'line 1\nline 2\ndebug1: %s\nline 3\n' % become_success_token) + self.assertEqual(unprocessed, b'') + self.assertFalse(conn._flags['become_success']) + # test examining output for become failure conn._flags = dict( become_prompt=False, -- cgit v1.2.3