diff options
Diffstat (limited to 'test/lib/ansible_test')
95 files changed, 1037 insertions, 656 deletions
diff --git a/test/lib/ansible_test/_data/completion/docker.txt b/test/lib/ansible_test/_data/completion/docker.txt index 9e1a9d5e..a863ecbf 100644 --- a/test/lib/ansible_test/_data/completion/docker.txt +++ b/test/lib/ansible_test/_data/completion/docker.txt @@ -1,9 +1,9 @@ -base image=quay.io/ansible/base-test-container:3.9.0 python=3.11,2.7,3.5,3.6,3.7,3.8,3.9,3.10 -default image=quay.io/ansible/default-test-container:6.13.0 python=3.11,2.7,3.5,3.6,3.7,3.8,3.9,3.10 context=collection -default image=quay.io/ansible/ansible-core-test-container:6.13.0 python=3.11,2.7,3.5,3.6,3.7,3.8,3.9,3.10 context=ansible-core -alpine3 image=quay.io/ansible/alpine3-test-container:4.8.0 python=3.10 cgroup=none audit=none -centos7 image=quay.io/ansible/centos7-test-container:4.8.0 python=2.7 cgroup=v1-only -fedora36 image=quay.io/ansible/fedora36-test-container:4.8.0 python=3.10 -opensuse15 image=quay.io/ansible/opensuse15-test-container:4.8.0 python=3.6 -ubuntu2004 image=quay.io/ansible/ubuntu2004-test-container:4.8.0 python=3.8 -ubuntu2204 image=quay.io/ansible/ubuntu2204-test-container:4.8.0 python=3.10 +base image=quay.io/ansible/base-test-container:5.10.0 python=3.12,2.7,3.6,3.7,3.8,3.9,3.10,3.11 +default image=quay.io/ansible/default-test-container:8.12.0 python=3.12,2.7,3.6,3.7,3.8,3.9,3.10,3.11 context=collection +default image=quay.io/ansible/ansible-core-test-container:8.12.0 python=3.12,2.7,3.6,3.7,3.8,3.9,3.10,3.11 context=ansible-core +alpine3 image=quay.io/ansible/alpine3-test-container:6.3.0 python=3.11 cgroup=none audit=none +centos7 image=quay.io/ansible/centos7-test-container:6.3.0 python=2.7 cgroup=v1-only +fedora38 image=quay.io/ansible/fedora38-test-container:6.3.0 python=3.11 +opensuse15 image=quay.io/ansible/opensuse15-test-container:6.3.0 python=3.6 +ubuntu2004 image=quay.io/ansible/ubuntu2004-test-container:6.3.0 python=3.8 +ubuntu2204 image=quay.io/ansible/ubuntu2204-test-container:6.3.0 python=3.10 diff --git a/test/lib/ansible_test/_data/completion/remote.txt b/test/lib/ansible_test/_data/completion/remote.txt index 9cb8dee8..06d4b5ef 100644 --- a/test/lib/ansible_test/_data/completion/remote.txt +++ b/test/lib/ansible_test/_data/completion/remote.txt @@ -1,16 +1,14 @@ -alpine/3.16 python=3.10 become=doas_sudo provider=aws arch=x86_64 +alpine/3.18 python=3.11 become=doas_sudo provider=aws arch=x86_64 alpine become=doas_sudo provider=aws arch=x86_64 -fedora/36 python=3.10 become=sudo provider=aws arch=x86_64 +fedora/38 python=3.11 become=sudo provider=aws arch=x86_64 fedora become=sudo provider=aws arch=x86_64 -freebsd/12.4 python=3.9 python_dir=/usr/local/bin become=su_sudo provider=aws arch=x86_64 -freebsd/13.2 python=3.8,3.7,3.9,3.10 python_dir=/usr/local/bin become=su_sudo provider=aws arch=x86_64 +freebsd/13.2 python=3.9,3.11 python_dir=/usr/local/bin become=su_sudo provider=aws arch=x86_64 freebsd python_dir=/usr/local/bin become=su_sudo provider=aws arch=x86_64 -macos/12.0 python=3.10 python_dir=/usr/local/bin become=sudo provider=parallels arch=x86_64 +macos/13.2 python=3.11 python_dir=/usr/local/bin become=sudo provider=parallels arch=x86_64 macos python_dir=/usr/local/bin become=sudo provider=parallels arch=x86_64 rhel/7.9 python=2.7 become=sudo provider=aws arch=x86_64 -rhel/8.6 python=3.6,3.8,3.9 become=sudo provider=aws arch=x86_64 -rhel/9.0 python=3.9 become=sudo provider=aws arch=x86_64 +rhel/8.8 python=3.6,3.11 become=sudo provider=aws arch=x86_64 +rhel/9.2 python=3.9,3.11 become=sudo provider=aws arch=x86_64 rhel become=sudo provider=aws arch=x86_64 -ubuntu/20.04 python=3.8,3.9 become=sudo provider=aws arch=x86_64 ubuntu/22.04 python=3.10 become=sudo provider=aws arch=x86_64 ubuntu become=sudo provider=aws arch=x86_64 diff --git a/test/lib/ansible_test/_data/completion/windows.txt b/test/lib/ansible_test/_data/completion/windows.txt index 92b0d086..860a2e32 100644 --- a/test/lib/ansible_test/_data/completion/windows.txt +++ b/test/lib/ansible_test/_data/completion/windows.txt @@ -1,5 +1,3 @@ -windows/2012 provider=azure arch=x86_64 -windows/2012-R2 provider=azure arch=x86_64 windows/2016 provider=aws arch=x86_64 windows/2019 provider=aws arch=x86_64 windows/2022 provider=aws arch=x86_64 diff --git a/test/lib/ansible_test/_data/requirements/ansible-test.txt b/test/lib/ansible_test/_data/requirements/ansible-test.txt index f7cb9c27..17662f07 100644 --- a/test/lib/ansible_test/_data/requirements/ansible-test.txt +++ b/test/lib/ansible_test/_data/requirements/ansible-test.txt @@ -1,4 +1,5 @@ # The test-constraints sanity test verifies this file, but changes must be made manually to keep it in up-to-date. virtualenv == 16.7.12 ; python_version < '3' -coverage == 6.5.0 ; python_version >= '3.7' and python_version <= '3.11' +coverage == 7.3.2 ; python_version >= '3.8' and python_version <= '3.12' +coverage == 6.5.0 ; python_version >= '3.7' and python_version <= '3.7' coverage == 4.5.4 ; python_version >= '2.6' and python_version <= '3.6' diff --git a/test/lib/ansible_test/_data/requirements/ansible.txt b/test/lib/ansible_test/_data/requirements/ansible.txt index 20562c3e..5eaf9f2c 100644 --- a/test/lib/ansible_test/_data/requirements/ansible.txt +++ b/test/lib/ansible_test/_data/requirements/ansible.txt @@ -12,4 +12,4 @@ packaging # NOTE: Ref: https://github.com/sarugaku/resolvelib/issues/69 # NOTE: When updating the upper bound, also update the latest version used # NOTE: in the ansible-galaxy-collection test suite. -resolvelib >= 0.5.3, < 0.9.0 # dependency resolver used by ansible-galaxy +resolvelib >= 0.5.3, < 1.1.0 # dependency resolver used by ansible-galaxy diff --git a/test/lib/ansible_test/_data/requirements/constraints.txt b/test/lib/ansible_test/_data/requirements/constraints.txt index 627f41df..dd837e3b 100644 --- a/test/lib/ansible_test/_data/requirements/constraints.txt +++ b/test/lib/ansible_test/_data/requirements/constraints.txt @@ -5,7 +5,6 @@ pywinrm >= 0.3.0 ; python_version < '3.11' # message encryption support pywinrm >= 0.4.3 ; python_version >= '3.11' # support for Python 3.11 pytest < 5.0.0, >= 4.5.0 ; python_version == '2.7' # pytest 5.0.0 and later will no longer support python 2.7 pytest >= 4.5.0 ; python_version > '2.7' # pytest 4.5.0 added support for --strict-markers -pytest-forked >= 1.0.2 # pytest-forked before 1.0.2 does not work with pytest 4.2.0+ ntlm-auth >= 1.3.0 # message encryption support using cryptography requests-ntlm >= 1.1.0 # message encryption support requests-credssp >= 0.1.0 # message encryption support @@ -13,5 +12,4 @@ pyparsing < 3.0.0 ; python_version < '3.5' # pyparsing 3 and later require pytho mock >= 2.0.0 # needed for features backported from Python 3.6 unittest.mock (assert_called, assert_called_once...) pytest-mock >= 1.4.0 # needed for mock_use_standalone_module pytest option setuptools < 45 ; python_version == '2.7' # setuptools 45 and later require python 3.5 or later -pyspnego >= 0.1.6 ; python_version >= '3.10' # bug in older releases breaks on Python 3.10 wheel < 0.38.0 ; python_version < '3.7' # wheel 0.38.0 and later require python 3.7 or later diff --git a/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt b/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt index 580f0641..66801459 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt @@ -1,8 +1,5 @@ # edit "sanity.ansible-doc.in" and generate with: hacking/update-sanity-requirements.py --test ansible-doc -# pre-build requirement: pyyaml == 6.0 -# pre-build constraint: Cython < 3.0 Jinja2==3.1.2 -MarkupSafe==2.1.1 -packaging==21.3 -pyparsing==3.0.9 -PyYAML==6.0 +MarkupSafe==2.1.3 +packaging==23.2 +PyYAML==6.0.1 diff --git a/test/lib/ansible_test/_data/requirements/sanity.changelog.in b/test/lib/ansible_test/_data/requirements/sanity.changelog.in index 7f231827..81d65ff8 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.changelog.in +++ b/test/lib/ansible_test/_data/requirements/sanity.changelog.in @@ -1,3 +1,2 @@ -rstcheck < 4 # match version used in other sanity tests +rstcheck < 6 # newer versions have too many dependencies antsibull-changelog -docutils < 0.18 # match version required by sphinx in the docs-build sanity test diff --git a/test/lib/ansible_test/_data/requirements/sanity.changelog.txt b/test/lib/ansible_test/_data/requirements/sanity.changelog.txt index 1755a489..d763bad2 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.changelog.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.changelog.txt @@ -1,10 +1,9 @@ # edit "sanity.changelog.in" and generate with: hacking/update-sanity-requirements.py --test changelog -# pre-build requirement: pyyaml == 6.0 -# pre-build constraint: Cython < 3.0 -antsibull-changelog==0.16.0 -docutils==0.17.1 -packaging==21.3 -pyparsing==3.0.9 -PyYAML==6.0 -rstcheck==3.5.0 +antsibull-changelog==0.23.0 +docutils==0.18.1 +packaging==23.2 +PyYAML==6.0.1 +rstcheck==5.0.0 semantic-version==2.10.0 +types-docutils==0.18.3 +typing_extensions==4.8.0 diff --git a/test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt b/test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt index 93e147a5..56366b77 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt @@ -1,6 +1,4 @@ # edit "sanity.import.plugin.in" and generate with: hacking/update-sanity-requirements.py --test import.plugin -# pre-build requirement: pyyaml == 6.0 -# pre-build constraint: Cython < 3.0 Jinja2==3.1.2 -MarkupSafe==2.1.1 -PyYAML==6.0 +MarkupSafe==2.1.3 +PyYAML==6.0.1 diff --git a/test/lib/ansible_test/_data/requirements/sanity.import.txt b/test/lib/ansible_test/_data/requirements/sanity.import.txt index 4fda120d..4d9d4f53 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.import.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.import.txt @@ -1,4 +1,2 @@ # edit "sanity.import.in" and generate with: hacking/update-sanity-requirements.py --test import -# pre-build requirement: pyyaml == 6.0 -# pre-build constraint: Cython < 3.0 -PyYAML==6.0 +PyYAML==6.0.1 diff --git a/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt b/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt index 51cc1ca3..17d60b6f 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt @@ -1,4 +1,2 @@ # edit "sanity.integration-aliases.in" and generate with: hacking/update-sanity-requirements.py --test integration-aliases -# pre-build requirement: pyyaml == 6.0 -# pre-build constraint: Cython < 3.0 -PyYAML==6.0 +PyYAML==6.0.1 diff --git a/test/lib/ansible_test/_data/requirements/sanity.mypy.in b/test/lib/ansible_test/_data/requirements/sanity.mypy.in index 98dead6c..f01ae948 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.mypy.in +++ b/test/lib/ansible_test/_data/requirements/sanity.mypy.in @@ -1,10 +1,10 @@ -mypy[python2] != 0.971 # regression in 0.971 (see https://github.com/python/mypy/pull/13223) +mypy +cryptography # type stubs not published separately +jinja2 # type stubs not published separately packaging # type stubs not published separately types-backports -types-jinja2 -types-paramiko < 2.8.14 # newer versions drop support for Python 2.7 -types-pyyaml < 6 # PyYAML 6+ stubs do not support Python 2.7 -types-cryptography < 3.3.16 # newer versions drop support for Python 2.7 +types-paramiko +types-pyyaml types-requests types-setuptools types-toml diff --git a/test/lib/ansible_test/_data/requirements/sanity.mypy.txt b/test/lib/ansible_test/_data/requirements/sanity.mypy.txt index 9dffc8fb..f6a47fb0 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.mypy.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.mypy.txt @@ -1,20 +1,18 @@ # edit "sanity.mypy.in" and generate with: hacking/update-sanity-requirements.py --test mypy -mypy==0.961 -mypy-extensions==0.4.3 -packaging==21.3 -pyparsing==3.0.9 +cffi==1.16.0 +cryptography==41.0.4 +Jinja2==3.1.2 +MarkupSafe==2.1.3 +mypy==1.5.1 +mypy-extensions==1.0.0 +packaging==23.2 +pycparser==2.21 tomli==2.0.1 -typed-ast==1.5.4 types-backports==0.1.3 -types-cryptography==3.3.15 -types-enum34==1.1.8 -types-ipaddress==1.0.8 -types-Jinja2==2.11.9 -types-MarkupSafe==1.1.10 -types-paramiko==2.8.13 -types-PyYAML==5.4.12 -types-requests==2.28.10 -types-setuptools==65.3.0 -types-toml==0.10.8 -types-urllib3==1.26.24 -typing_extensions==4.3.0 +types-paramiko==3.3.0.0 +types-PyYAML==6.0.12.12 +types-requests==2.31.0.7 +types-setuptools==68.2.0.0 +types-toml==0.10.8.7 +typing_extensions==4.8.0 +urllib3==2.0.6 diff --git a/test/lib/ansible_test/_data/requirements/sanity.pep8.txt b/test/lib/ansible_test/_data/requirements/sanity.pep8.txt index 60d5784f..1a36d4da 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.pep8.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.pep8.txt @@ -1,2 +1,2 @@ # edit "sanity.pep8.in" and generate with: hacking/update-sanity-requirements.py --test pep8 -pycodestyle==2.9.1 +pycodestyle==2.11.0 diff --git a/test/lib/ansible_test/_data/requirements/sanity.pslint.ps1 b/test/lib/ansible_test/_data/requirements/sanity.pslint.ps1 index 68545c9e..df36d61a 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.pslint.ps1 +++ b/test/lib/ansible_test/_data/requirements/sanity.pslint.ps1 @@ -28,8 +28,10 @@ Function Install-PSModule { } } +# Versions changes should be made first in ansible-test which is then synced to +# the default-test-container over time Set-PSRepository -Name PSGallery -InstallationPolicy Trusted -Install-PSModule -Name PSScriptAnalyzer -RequiredVersion 1.20.0 +Install-PSModule -Name PSScriptAnalyzer -RequiredVersion 1.21.0 if ($IsContainer) { # PSScriptAnalyzer contain lots of json files for the UseCompatibleCommands check. We don't use this rule so by diff --git a/test/lib/ansible_test/_data/requirements/sanity.pylint.in b/test/lib/ansible_test/_data/requirements/sanity.pylint.in index fde21f12..ae189587 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.pylint.in +++ b/test/lib/ansible_test/_data/requirements/sanity.pylint.in @@ -1,2 +1,2 @@ -pylint == 2.15.5 # currently vetted version +pylint pyyaml # needed for collection_detail.py diff --git a/test/lib/ansible_test/_data/requirements/sanity.pylint.txt b/test/lib/ansible_test/_data/requirements/sanity.pylint.txt index 44d8b88c..c3144fe5 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.pylint.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.pylint.txt @@ -1,15 +1,11 @@ # edit "sanity.pylint.in" and generate with: hacking/update-sanity-requirements.py --test pylint -# pre-build requirement: pyyaml == 6.0 -# pre-build constraint: Cython < 3.0 -astroid==2.12.12 -dill==0.3.6 -isort==5.10.1 -lazy-object-proxy==1.7.1 +astroid==3.0.0 +dill==0.3.7 +isort==5.12.0 mccabe==0.7.0 -platformdirs==2.5.2 -pylint==2.15.5 -PyYAML==6.0 +platformdirs==3.11.0 +pylint==3.0.1 +PyYAML==6.0.1 tomli==2.0.1 -tomlkit==0.11.5 -typing_extensions==4.3.0 -wrapt==1.14.1 +tomlkit==0.12.1 +typing_extensions==4.8.0 diff --git a/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt b/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt index b2b70567..4af9b95e 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt @@ -1,5 +1,3 @@ # edit "sanity.runtime-metadata.in" and generate with: hacking/update-sanity-requirements.py --test runtime-metadata -# pre-build requirement: pyyaml == 6.0 -# pre-build constraint: Cython < 3.0 -PyYAML==6.0 +PyYAML==6.0.1 voluptuous==0.13.1 diff --git a/test/lib/ansible_test/_data/requirements/sanity.validate-modules.in b/test/lib/ansible_test/_data/requirements/sanity.validate-modules.in index efe94004..78e116f5 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.validate-modules.in +++ b/test/lib/ansible_test/_data/requirements/sanity.validate-modules.in @@ -1,3 +1,4 @@ jinja2 # ansible-core requirement pyyaml # needed for collection_detail.py voluptuous +antsibull-docs-parser==1.0.0 diff --git a/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt b/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt index 8a877bba..4e24d64d 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt @@ -1,7 +1,6 @@ # edit "sanity.validate-modules.in" and generate with: hacking/update-sanity-requirements.py --test validate-modules -# pre-build requirement: pyyaml == 6.0 -# pre-build constraint: Cython < 3.0 +antsibull-docs-parser==1.0.0 Jinja2==3.1.2 -MarkupSafe==2.1.1 -PyYAML==6.0 +MarkupSafe==2.1.3 +PyYAML==6.0.1 voluptuous==0.13.1 diff --git a/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt b/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt index dd401113..bafd30b6 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt @@ -1,6 +1,4 @@ # edit "sanity.yamllint.in" and generate with: hacking/update-sanity-requirements.py --test yamllint -# pre-build requirement: pyyaml == 6.0 -# pre-build constraint: Cython < 3.0 -pathspec==0.10.1 -PyYAML==6.0 -yamllint==1.28.0 +pathspec==0.11.2 +PyYAML==6.0.1 +yamllint==1.32.0 diff --git a/test/lib/ansible_test/_data/requirements/units.txt b/test/lib/ansible_test/_data/requirements/units.txt index d2f56d35..d723a65f 100644 --- a/test/lib/ansible_test/_data/requirements/units.txt +++ b/test/lib/ansible_test/_data/requirements/units.txt @@ -2,5 +2,4 @@ mock pytest pytest-mock pytest-xdist -pytest-forked pyyaml # required by the collection loader (only needed for collections) diff --git a/test/lib/ansible_test/_internal/ci/azp.py b/test/lib/ansible_test/_internal/ci/azp.py index 404f8056..ebf260b9 100644 --- a/test/lib/ansible_test/_internal/ci/azp.py +++ b/test/lib/ansible_test/_internal/ci/azp.py @@ -70,7 +70,7 @@ class AzurePipelines(CIProvider): os.environ['SYSTEM_JOBIDENTIFIER'], ) except KeyError as ex: - raise MissingEnvironmentVariable(name=ex.args[0]) + raise MissingEnvironmentVariable(name=ex.args[0]) from None return prefix @@ -121,7 +121,7 @@ class AzurePipelines(CIProvider): task_id=str(uuid.UUID(os.environ['SYSTEM_TASKINSTANCEID'])), ) except KeyError as ex: - raise MissingEnvironmentVariable(name=ex.args[0]) + raise MissingEnvironmentVariable(name=ex.args[0]) from None self.auth.sign_request(request) @@ -154,7 +154,7 @@ class AzurePipelinesAuthHelper(CryptographyAuthHelper): try: agent_temp_directory = os.environ['AGENT_TEMPDIRECTORY'] except KeyError as ex: - raise MissingEnvironmentVariable(name=ex.args[0]) + raise MissingEnvironmentVariable(name=ex.args[0]) from None # the temporary file cannot be deleted because we do not know when the agent has processed it # placing the file in the agent's temp directory allows it to be picked up when the job is running in a container @@ -181,7 +181,7 @@ class AzurePipelinesChanges: self.source_branch_name = os.environ['BUILD_SOURCEBRANCHNAME'] self.pr_branch_name = os.environ.get('SYSTEM_PULLREQUEST_TARGETBRANCH') except KeyError as ex: - raise MissingEnvironmentVariable(name=ex.args[0]) + raise MissingEnvironmentVariable(name=ex.args[0]) from None if self.source_branch.startswith('refs/tags/'): raise ChangeDetectionNotSupported('Change detection is not supported for tags.') diff --git a/test/lib/ansible_test/_internal/cli/environments.py b/test/lib/ansible_test/_internal/cli/environments.py index 94cafae3..7b1fd1c2 100644 --- a/test/lib/ansible_test/_internal/cli/environments.py +++ b/test/lib/ansible_test/_internal/cli/environments.py @@ -146,12 +146,6 @@ def add_global_options( help='install command requirements', ) - global_parser.add_argument( - '--no-pip-check', - action='store_true', - help=argparse.SUPPRESS, # deprecated, kept for now (with a warning) for backwards compatibility - ) - add_global_remote(global_parser, controller_mode) add_global_docker(global_parser, controller_mode) @@ -396,7 +390,6 @@ def add_global_docker( """Add global options for Docker.""" if controller_mode != ControllerMode.DELEGATED: parser.set_defaults( - docker_no_pull=False, docker_network=None, docker_terminate=None, prime_containers=False, @@ -407,12 +400,6 @@ def add_global_docker( return parser.add_argument( - '--docker-no-pull', - action='store_true', - help=argparse.SUPPRESS, # deprecated, kept for now (with a warning) for backwards compatibility - ) - - parser.add_argument( '--docker-network', metavar='NET', help='run using the specified network', diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py index ad6cf86f..64bb13b0 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py +++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py @@ -57,9 +57,9 @@ def load_report(report: dict[str, t.Any]) -> tuple[list[str], Arcs, Lines]: arc_data: dict[str, dict[str, int]] = report['arcs'] line_data: dict[str, dict[int, int]] = report['lines'] except KeyError as ex: - raise ApplicationError('Document is missing key "%s".' % ex.args) + raise ApplicationError('Document is missing key "%s".' % ex.args) from None except TypeError: - raise ApplicationError('Document is type "%s" instead of "dict".' % type(report).__name__) + raise ApplicationError('Document is type "%s" instead of "dict".' % type(report).__name__) from None arcs = dict((path, dict((parse_arc(arc), set(target_sets[index])) for arc, index in data.items())) for path, data in arc_data.items()) lines = dict((path, dict((int(line), set(target_sets[index])) for line, index in data.items())) for path, data in line_data.items()) @@ -72,12 +72,12 @@ def read_report(path: str) -> tuple[list[str], Arcs, Lines]: try: report = read_json_file(path) except Exception as ex: - raise ApplicationError('File "%s" is not valid JSON: %s' % (path, ex)) + raise ApplicationError('File "%s" is not valid JSON: %s' % (path, ex)) from None try: return load_report(report) except ApplicationError as ex: - raise ApplicationError('File "%s" is not an aggregated coverage data file. %s' % (path, ex)) + raise ApplicationError('File "%s" is not an aggregated coverage data file. %s' % (path, ex)) from None def write_report(args: CoverageAnalyzeTargetsConfig, report: dict[str, t.Any], path: str) -> None: diff --git a/test/lib/ansible_test/_internal/commands/coverage/combine.py b/test/lib/ansible_test/_internal/commands/coverage/combine.py index 12cb54e2..fdeac838 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/combine.py +++ b/test/lib/ansible_test/_internal/commands/coverage/combine.py @@ -121,7 +121,7 @@ def _command_coverage_combine_python(args: CoverageCombineConfig, host_state: Ho coverage_files = get_python_coverage_files() def _default_stub_value(source_paths: list[str]) -> dict[str, set[tuple[int, int]]]: - return {path: set() for path in source_paths} + return {path: {(0, 0)} for path in source_paths} counter = 0 sources = _get_coverage_targets(args, walk_compile_targets) diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py b/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py index e8020ca9..136c5331 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py @@ -8,7 +8,6 @@ from ....config import ( ) from ....containers import ( - CleanupMode, run_support_container, ) @@ -22,8 +21,6 @@ from . import ( class ACMEProvider(CloudProvider): """ACME plugin. Sets up cloud resources for tests.""" - DOCKER_SIMULATOR_NAME = 'acme-simulator' - def __init__(self, args: IntegrationConfig) -> None: super().__init__(args) @@ -51,17 +48,18 @@ class ACMEProvider(CloudProvider): 14000, # Pebble ACME CA ] - run_support_container( + descriptor = run_support_container( self.args, self.platform, self.image, - self.DOCKER_SIMULATOR_NAME, + 'acme-simulator', ports, - allow_existing=True, - cleanup=CleanupMode.YES, ) - self._set_cloud_config('acme_host', self.DOCKER_SIMULATOR_NAME) + if not descriptor: + return + + self._set_cloud_config('acme_host', descriptor.name) def _setup_static(self) -> None: raise NotImplementedError() diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py b/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py index 8588df7d..8060804a 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py @@ -21,7 +21,6 @@ from ....docker_util import ( ) from ....containers import ( - CleanupMode, run_support_container, wait_for_file, ) @@ -36,12 +35,10 @@ from . import ( class CsCloudProvider(CloudProvider): """CloudStack cloud provider plugin. Sets up cloud resources before delegation.""" - DOCKER_SIMULATOR_NAME = 'cloudstack-sim' - def __init__(self, args: IntegrationConfig) -> None: super().__init__(args) - self.image = os.environ.get('ANSIBLE_CLOUDSTACK_CONTAINER', 'quay.io/ansible/cloudstack-test-container:1.4.0') + self.image = os.environ.get('ANSIBLE_CLOUDSTACK_CONTAINER', 'quay.io/ansible/cloudstack-test-container:1.6.1') self.host = '' self.port = 0 @@ -96,10 +93,8 @@ class CsCloudProvider(CloudProvider): self.args, self.platform, self.image, - self.DOCKER_SIMULATOR_NAME, + 'cloudstack-sim', ports, - allow_existing=True, - cleanup=CleanupMode.YES, ) if not descriptor: @@ -107,7 +102,7 @@ class CsCloudProvider(CloudProvider): # apply work-around for OverlayFS issue # https://github.com/docker/for-linux/issues/72#issuecomment-319904698 - docker_exec(self.args, self.DOCKER_SIMULATOR_NAME, ['find', '/var/lib/mysql', '-type', 'f', '-exec', 'touch', '{}', ';'], capture=True) + docker_exec(self.args, descriptor.name, ['find', '/var/lib/mysql', '-type', 'f', '-exec', 'touch', '{}', ';'], capture=True) if self.args.explain: values = dict( @@ -115,10 +110,10 @@ class CsCloudProvider(CloudProvider): PORT=str(self.port), ) else: - credentials = self._get_credentials(self.DOCKER_SIMULATOR_NAME) + credentials = self._get_credentials(descriptor.name) values = dict( - HOST=self.DOCKER_SIMULATOR_NAME, + HOST=descriptor.name, PORT=str(self.port), KEY=credentials['apikey'], SECRET=credentials['secretkey'], diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py b/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py index 1391cd84..f7053c8b 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py @@ -10,12 +10,21 @@ from ....config import ( from ....docker_util import ( docker_cp_to, + docker_exec, ) from ....containers import ( run_support_container, ) +from ....encoding import ( + to_text, +) + +from ....util import ( + display, +) + from . import ( CloudEnvironment, CloudEnvironmentConfig, @@ -23,53 +32,59 @@ from . import ( ) -# We add BasicAuthentication, to make the tasks that deal with -# direct API access easier to deal with across galaxy_ng and pulp -SETTINGS = b''' -CONTENT_ORIGIN = 'http://ansible-ci-pulp:80' -ANSIBLE_API_HOSTNAME = 'http://ansible-ci-pulp:80' -ANSIBLE_CONTENT_HOSTNAME = 'http://ansible-ci-pulp:80/pulp/content' -TOKEN_AUTH_DISABLED = True -GALAXY_REQUIRE_CONTENT_APPROVAL = False -GALAXY_AUTHENTICATION_CLASSES = [ - "rest_framework.authentication.SessionAuthentication", - "rest_framework.authentication.TokenAuthentication", - "rest_framework.authentication.BasicAuthentication", -] -''' - -SET_ADMIN_PASSWORD = b'''#!/usr/bin/execlineb -S0 -foreground { - redirfd -w 1 /dev/null - redirfd -w 2 /dev/null - export DJANGO_SETTINGS_MODULE pulpcore.app.settings - export PULP_CONTENT_ORIGIN localhost - s6-setuidgid postgres - if { /usr/local/bin/django-admin reset-admin-password --password password } - if { /usr/local/bin/pulpcore-manager create-group system:partner-engineers --users admin } -} -''' - -# There are 2 overrides here: -# 1. Change the gunicorn bind address from 127.0.0.1 to 0.0.0.0 now that Galaxy NG does not allow us to access the -# Pulp API through it. -# 2. Grant access allowing us to DELETE a namespace in Galaxy NG. This is as CI deletes and recreates repos and -# distributions in Pulp which now breaks the namespace in Galaxy NG. Recreating it is the "simple" fix to get it -# working again. -# These may not be needed in the future, especially if 1 becomes configurable by an env var but for now they must be -# done. -OVERRIDES = b'''#!/usr/bin/execlineb -S0 -foreground { - sed -i "0,/\\"127.0.0.1:24817\\"/s//\\"0.0.0.0:24817\\"/" /etc/services.d/pulpcore-api/run +GALAXY_HOST_NAME = 'galaxy-pulp' +SETTINGS = { + 'PULP_CONTENT_ORIGIN': f'http://{GALAXY_HOST_NAME}', + 'PULP_ANSIBLE_API_HOSTNAME': f'http://{GALAXY_HOST_NAME}', + 'PULP_GALAXY_API_PATH_PREFIX': '/api/galaxy/', + # These paths are unique to the container image which has an nginx location for /pulp/content to route + # requests to the content backend + 'PULP_ANSIBLE_CONTENT_HOSTNAME': f'http://{GALAXY_HOST_NAME}/pulp/content/api/galaxy/v3/artifacts/collections/', + 'PULP_CONTENT_PATH_PREFIX': '/pulp/content/api/galaxy/v3/artifacts/collections/', + 'PULP_GALAXY_AUTHENTICATION_CLASSES': [ + 'rest_framework.authentication.SessionAuthentication', + 'rest_framework.authentication.TokenAuthentication', + 'rest_framework.authentication.BasicAuthentication', + 'django.contrib.auth.backends.ModelBackend', + ], + # This should probably be false see https://issues.redhat.com/browse/AAH-2328 + 'PULP_GALAXY_REQUIRE_CONTENT_APPROVAL': 'true', + 'PULP_GALAXY_DEPLOYMENT_MODE': 'standalone', + 'PULP_GALAXY_AUTO_SIGN_COLLECTIONS': 'false', + 'PULP_GALAXY_COLLECTION_SIGNING_SERVICE': 'ansible-default', + 'PULP_RH_ENTITLEMENT_REQUIRED': 'insights', + 'PULP_TOKEN_AUTH_DISABLED': 'false', + 'PULP_TOKEN_SERVER': f'http://{GALAXY_HOST_NAME}/token/', + 'PULP_TOKEN_SIGNATURE_ALGORITHM': 'ES256', + 'PULP_PUBLIC_KEY_PATH': '/src/galaxy_ng/dev/common/container_auth_public_key.pem', + 'PULP_PRIVATE_KEY_PATH': '/src/galaxy_ng/dev/common/container_auth_private_key.pem', + 'PULP_ANALYTICS': 'false', + 'PULP_GALAXY_ENABLE_UNAUTHENTICATED_COLLECTION_ACCESS': 'true', + 'PULP_GALAXY_ENABLE_UNAUTHENTICATED_COLLECTION_DOWNLOAD': 'true', + 'PULP_GALAXY_ENABLE_LEGACY_ROLES': 'true', + 'PULP_GALAXY_FEATURE_FLAGS__execution_environments': 'false', + 'PULP_SOCIAL_AUTH_LOGIN_REDIRECT_URL': '/', + 'PULP_GALAXY_FEATURE_FLAGS__ai_deny_index': 'true', + 'PULP_DEFAULT_ADMIN_PASSWORD': 'password' } -# This sed calls changes the first occurrence to "allow" which is conveniently the delete operation for a namespace. -# https://github.com/ansible/galaxy_ng/blob/master/galaxy_ng/app/access_control/statements/standalone.py#L9-L11. -backtick NG_PREFIX { python -c "import galaxy_ng; print(galaxy_ng.__path__[0], end='')" } -importas ng_prefix NG_PREFIX -foreground { - sed -i "0,/\\"effect\\": \\"deny\\"/s//\\"effect\\": \\"allow\\"/" ${ng_prefix}/app/access_control/statements/standalone.py -}''' + +GALAXY_IMPORTER = b''' +[galaxy-importer] +ansible_local_tmp=~/.ansible/tmp +ansible_test_local_image=false +check_required_tags=false +check_runtime_yaml=false +check_changelog=false +infra_osd=false +local_image_docker=false +log_level_main=INFO +require_v1_or_greater=false +run_ansible_doc=false +run_ansible_lint=false +run_ansible_test=false +run_flake8=false +'''.strip() class GalaxyProvider(CloudProvider): @@ -81,13 +96,9 @@ class GalaxyProvider(CloudProvider): def __init__(self, args: IntegrationConfig) -> None: super().__init__(args) - # Cannot use the latest container image as either galaxy_ng 4.2.0rc2 or pulp 0.5.0 has sporatic issues with - # dropping published collections in CI. Try running the tests multiple times when updating. Will also need to - # comment out the cache tests in 'test/integration/targets/ansible-galaxy-collection/tasks/install.yml' when - # the newer update is available. - self.pulp = os.environ.get( + self.image = os.environ.get( 'ANSIBLE_PULP_CONTAINER', - 'quay.io/ansible/pulp-galaxy-ng:b79a7be64eff' + 'quay.io/pulp/galaxy:4.7.1' ) self.uses_docker = True @@ -96,48 +107,46 @@ class GalaxyProvider(CloudProvider): """Setup cloud resource before delegation and reg cleanup callback.""" super().setup() - galaxy_port = 80 - pulp_host = 'ansible-ci-pulp' - pulp_port = 24817 - - ports = [ - galaxy_port, - pulp_port, - ] - - # Create the container, don't run it, we need to inject configs before it starts - descriptor = run_support_container( - self.args, - self.platform, - self.pulp, - pulp_host, - ports, - start=False, - allow_existing=True, - ) + with tempfile.NamedTemporaryFile(mode='w+') as env_fd: + settings = '\n'.join( + f'{key}={value}' for key, value in SETTINGS.items() + ) + env_fd.write(settings) + env_fd.flush() + display.info(f'>>> galaxy_ng Configuration\n{settings}', verbosity=3) + descriptor = run_support_container( + self.args, + self.platform, + self.image, + GALAXY_HOST_NAME, + [ + 80, + ], + aliases=[ + GALAXY_HOST_NAME, + ], + start=True, + options=[ + '--env-file', env_fd.name, + ], + ) if not descriptor: return - if not descriptor.running: - pulp_id = descriptor.container_id - - injected_files = { - '/etc/pulp/settings.py': SETTINGS, - '/etc/cont-init.d/111-postgres': SET_ADMIN_PASSWORD, - '/etc/cont-init.d/000-ansible-test-overrides': OVERRIDES, - } - for path, content in injected_files.items(): - with tempfile.NamedTemporaryFile() as temp_fd: - temp_fd.write(content) - temp_fd.flush() - docker_cp_to(self.args, pulp_id, temp_fd.name, path) - - descriptor.start(self.args) - - self._set_cloud_config('PULP_HOST', pulp_host) - self._set_cloud_config('PULP_PORT', str(pulp_port)) - self._set_cloud_config('GALAXY_PORT', str(galaxy_port)) + injected_files = [ + ('/etc/galaxy-importer/galaxy-importer.cfg', GALAXY_IMPORTER, 'galaxy-importer'), + ] + for path, content, friendly_name in injected_files: + with tempfile.NamedTemporaryFile() as temp_fd: + temp_fd.write(content) + temp_fd.flush() + display.info(f'>>> {friendly_name} Configuration\n{to_text(content)}', verbosity=3) + docker_exec(self.args, descriptor.container_id, ['mkdir', '-p', os.path.dirname(path)], True) + docker_cp_to(self.args, descriptor.container_id, temp_fd.name, path) + docker_exec(self.args, descriptor.container_id, ['chown', 'pulp:pulp', path], True) + + self._set_cloud_config('PULP_HOST', GALAXY_HOST_NAME) self._set_cloud_config('PULP_USER', 'admin') self._set_cloud_config('PULP_PASSWORD', 'password') @@ -150,21 +159,19 @@ class GalaxyEnvironment(CloudEnvironment): pulp_user = str(self._get_cloud_config('PULP_USER')) pulp_password = str(self._get_cloud_config('PULP_PASSWORD')) pulp_host = self._get_cloud_config('PULP_HOST') - galaxy_port = self._get_cloud_config('GALAXY_PORT') - pulp_port = self._get_cloud_config('PULP_PORT') return CloudEnvironmentConfig( ansible_vars=dict( pulp_user=pulp_user, pulp_password=pulp_password, - pulp_api='http://%s:%s' % (pulp_host, pulp_port), - pulp_server='http://%s:%s/pulp_ansible/galaxy/' % (pulp_host, pulp_port), - galaxy_ng_server='http://%s:%s/api/galaxy/' % (pulp_host, galaxy_port), + pulp_api=f'http://{pulp_host}', + pulp_server=f'http://{pulp_host}/pulp_ansible/galaxy/', + galaxy_ng_server=f'http://{pulp_host}/api/galaxy/', ), env_vars=dict( PULP_USER=pulp_user, PULP_PASSWORD=pulp_password, - PULP_SERVER='http://%s:%s/pulp_ansible/galaxy/api/' % (pulp_host, pulp_port), - GALAXY_NG_SERVER='http://%s:%s/api/galaxy/' % (pulp_host, galaxy_port), + PULP_SERVER=f'http://{pulp_host}/pulp_ansible/galaxy/api/', + GALAXY_NG_SERVER=f'http://{pulp_host}/api/galaxy/', ), ) diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py b/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py index 85065d6f..b3cf2d49 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py @@ -13,7 +13,6 @@ from ....config import ( ) from ....containers import ( - CleanupMode, run_support_container, ) @@ -62,8 +61,6 @@ class HttptesterProvider(CloudProvider): 'http-test-container', ports, aliases=aliases, - allow_existing=True, - cleanup=CleanupMode.YES, env={ KRB5_PASSWORD_ENV: generate_password(), }, diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py b/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py index 5bed8340..62dd1558 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py @@ -8,7 +8,6 @@ from ....config import ( ) from ....containers import ( - CleanupMode, run_support_container, ) @@ -22,8 +21,6 @@ from . import ( class NiosProvider(CloudProvider): """Nios plugin. Sets up NIOS mock server for tests.""" - DOCKER_SIMULATOR_NAME = 'nios-simulator' - # Default image to run the nios simulator. # # The simulator must be pinned to a specific version @@ -31,7 +28,7 @@ class NiosProvider(CloudProvider): # # It's source source itself resides at: # https://github.com/ansible/nios-test-container - DOCKER_IMAGE = 'quay.io/ansible/nios-test-container:1.4.0' + DOCKER_IMAGE = 'quay.io/ansible/nios-test-container:2.0.0' def __init__(self, args: IntegrationConfig) -> None: super().__init__(args) @@ -65,17 +62,18 @@ class NiosProvider(CloudProvider): nios_port, ] - run_support_container( + descriptor = run_support_container( self.args, self.platform, self.image, - self.DOCKER_SIMULATOR_NAME, + 'nios-simulator', ports, - allow_existing=True, - cleanup=CleanupMode.YES, ) - self._set_cloud_config('NIOS_HOST', self.DOCKER_SIMULATOR_NAME) + if not descriptor: + return + + self._set_cloud_config('NIOS_HOST', descriptor.name) def _setup_static(self) -> None: raise NotImplementedError() diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py b/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py index ddd434a8..6e8a5e4f 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py @@ -16,7 +16,6 @@ from ....config import ( ) from ....containers import ( - CleanupMode, run_support_container, wait_for_file, ) @@ -31,8 +30,6 @@ from . import ( class OpenShiftCloudProvider(CloudProvider): """OpenShift cloud provider plugin. Sets up cloud resources before delegation.""" - DOCKER_CONTAINER_NAME = 'openshift-origin' - def __init__(self, args: IntegrationConfig) -> None: super().__init__(args, config_extension='.kubeconfig') @@ -74,10 +71,8 @@ class OpenShiftCloudProvider(CloudProvider): self.args, self.platform, self.image, - self.DOCKER_CONTAINER_NAME, + 'openshift-origin', ports, - allow_existing=True, - cleanup=CleanupMode.YES, cmd=cmd, ) @@ -87,7 +82,7 @@ class OpenShiftCloudProvider(CloudProvider): if self.args.explain: config = '# Unknown' else: - config = self._get_config(self.DOCKER_CONTAINER_NAME, 'https://%s:%s/' % (self.DOCKER_CONTAINER_NAME, port)) + config = self._get_config(descriptor.name, 'https://%s:%s/' % (descriptor.name, port)) self._write_config(config) diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py b/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py index 242b0204..b0ff7fe3 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py @@ -2,7 +2,6 @@ from __future__ import annotations import configparser -import os from ....util import ( ApplicationError, @@ -13,11 +12,6 @@ from ....config import ( IntegrationConfig, ) -from ....containers import ( - CleanupMode, - run_support_container, -) - from . import ( CloudEnvironment, CloudEnvironmentConfig, @@ -28,66 +22,16 @@ from . import ( class VcenterProvider(CloudProvider): """VMware vcenter/esx plugin. Sets up cloud resources for tests.""" - DOCKER_SIMULATOR_NAME = 'vcenter-simulator' - def __init__(self, args: IntegrationConfig) -> None: super().__init__(args) - # The simulator must be pinned to a specific version to guarantee CI passes with the version used. - if os.environ.get('ANSIBLE_VCSIM_CONTAINER'): - self.image = os.environ.get('ANSIBLE_VCSIM_CONTAINER') - else: - self.image = 'quay.io/ansible/vcenter-test-container:1.7.0' - - # VMware tests can be run on govcsim or BYO with a static config file. - # The simulator is the default if no config is provided. - self.vmware_test_platform = os.environ.get('VMWARE_TEST_PLATFORM', 'govcsim') - - if self.vmware_test_platform == 'govcsim': - self.uses_docker = True - self.uses_config = False - elif self.vmware_test_platform == 'static': - self.uses_docker = False - self.uses_config = True + self.uses_config = True def setup(self) -> None: """Setup the cloud resource before delegation and register a cleanup callback.""" super().setup() - self._set_cloud_config('vmware_test_platform', self.vmware_test_platform) - - if self.vmware_test_platform == 'govcsim': - self._setup_dynamic_simulator() - self.managed = True - elif self.vmware_test_platform == 'static': - self._use_static_config() - self._setup_static() - else: - raise ApplicationError('Unknown vmware_test_platform: %s' % self.vmware_test_platform) - - def _setup_dynamic_simulator(self) -> None: - """Create a vcenter simulator using docker.""" - ports = [ - 443, - 8080, - 8989, - 5000, # control port for flask app in simulator - ] - - run_support_container( - self.args, - self.platform, - self.image, - self.DOCKER_SIMULATOR_NAME, - ports, - allow_existing=True, - cleanup=CleanupMode.YES, - ) - - self._set_cloud_config('vcenter_hostname', self.DOCKER_SIMULATOR_NAME) - - def _setup_static(self) -> None: - if not os.path.exists(self.config_static_path): + if not self._use_static_config(): raise ApplicationError('Configuration file does not exist: %s' % self.config_static_path) @@ -96,37 +40,21 @@ class VcenterEnvironment(CloudEnvironment): def get_environment_config(self) -> CloudEnvironmentConfig: """Return environment configuration for use in the test environment after delegation.""" - try: - # We may be in a container, so we cannot just reach VMWARE_TEST_PLATFORM, - # We do a try/except instead - parser = configparser.ConfigParser() - parser.read(self.config_path) # static - - env_vars = {} - ansible_vars = dict( - resource_prefix=self.resource_prefix, - ) - ansible_vars.update(dict(parser.items('DEFAULT', raw=True))) - except KeyError: # govcsim - env_vars = dict( - VCENTER_HOSTNAME=str(self._get_cloud_config('vcenter_hostname')), - VCENTER_USERNAME='user', - VCENTER_PASSWORD='pass', - ) - - ansible_vars = dict( - vcsim=str(self._get_cloud_config('vcenter_hostname')), - vcenter_hostname=str(self._get_cloud_config('vcenter_hostname')), - vcenter_username='user', - vcenter_password='pass', - ) + # We may be in a container, so we cannot just reach VMWARE_TEST_PLATFORM, + # We do a try/except instead + parser = configparser.ConfigParser() + parser.read(self.config_path) # static + + ansible_vars = dict( + resource_prefix=self.resource_prefix, + ) + ansible_vars.update(dict(parser.items('DEFAULT', raw=True))) for key, value in ansible_vars.items(): if key.endswith('_password'): display.sensitive.add(value) return CloudEnvironmentConfig( - env_vars=env_vars, ansible_vars=ansible_vars, module_defaults={ 'group/vmware': { diff --git a/test/lib/ansible_test/_internal/commands/sanity/__init__.py b/test/lib/ansible_test/_internal/commands/sanity/__init__.py index 0bc68a21..9b675e4a 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/__init__.py +++ b/test/lib/ansible_test/_internal/commands/sanity/__init__.py @@ -127,9 +127,13 @@ TARGET_SANITY_ROOT = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'sanity') # NOTE: must match ansible.constants.DOCUMENTABLE_PLUGINS, but with 'module' replaced by 'modules'! DOCUMENTABLE_PLUGINS = ( - 'become', 'cache', 'callback', 'cliconf', 'connection', 'httpapi', 'inventory', 'lookup', 'netconf', 'modules', 'shell', 'strategy', 'vars' + 'become', 'cache', 'callback', 'cliconf', 'connection', 'filter', 'httpapi', 'inventory', + 'lookup', 'netconf', 'modules', 'shell', 'strategy', 'test', 'vars', ) +# Plugin types that can have multiple plugins per file, and where filenames not always correspond to plugin names +MULTI_FILE_PLUGINS = ('filter', 'test', ) + created_venvs: list[str] = [] @@ -260,7 +264,7 @@ def command_sanity(args: SanityConfig) -> None: virtualenv_python = create_sanity_virtualenv(args, test_profile.python, test.name) if virtualenv_python: - virtualenv_yaml = check_sanity_virtualenv_yaml(virtualenv_python) + virtualenv_yaml = args.explain or check_sanity_virtualenv_yaml(virtualenv_python) if test.require_libyaml and not virtualenv_yaml: result = SanitySkipped(test.name) @@ -875,6 +879,7 @@ class SanityCodeSmellTest(SanitySingleVersion): self.__include_directories: bool = self.config.get('include_directories') self.__include_symlinks: bool = self.config.get('include_symlinks') self.__py2_compat: bool = self.config.get('py2_compat', False) + self.__error_code: str | None = self.config.get('error_code', None) else: self.output = None self.extensions = [] @@ -890,6 +895,7 @@ class SanityCodeSmellTest(SanitySingleVersion): self.__include_directories = False self.__include_symlinks = False self.__py2_compat = False + self.__error_code = None if self.no_targets: mutually_exclusive = ( @@ -909,6 +915,11 @@ class SanityCodeSmellTest(SanitySingleVersion): raise ApplicationError('Sanity test "%s" option "no_targets" is mutually exclusive with options: %s' % (self.name, ', '.join(problems))) @property + def error_code(self) -> t.Optional[str]: + """Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes.""" + return self.__error_code + + @property def all_targets(self) -> bool: """True if test targets will not be filtered using includes, excludes, requires or changes. Mutually exclusive with no_targets.""" return self.__all_targets @@ -992,6 +1003,8 @@ class SanityCodeSmellTest(SanitySingleVersion): pattern = '^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<message>.*)$' elif self.output == 'path-message': pattern = '^(?P<path>[^:]*): (?P<message>.*)$' + elif self.output == 'path-line-column-code-message': + pattern = '^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<code>[^:]*): (?P<message>.*)$' else: raise ApplicationError('Unsupported output type: %s' % self.output) @@ -1021,6 +1034,7 @@ class SanityCodeSmellTest(SanitySingleVersion): path=m['path'], line=int(m.get('line', 0)), column=int(m.get('column', 0)), + code=m.get('code'), ) for m in matches] messages = settings.process_errors(messages, paths) @@ -1166,20 +1180,23 @@ def create_sanity_virtualenv( run_pip(args, virtualenv_python, commands, None) # create_sanity_virtualenv() - write_text_file(meta_install, virtualenv_install) + if not args.explain: + write_text_file(meta_install, virtualenv_install) # false positive: pylint: disable=no-member if any(isinstance(command, PipInstall) and command.has_package('pyyaml') for command in commands): - virtualenv_yaml = yamlcheck(virtualenv_python) + virtualenv_yaml = yamlcheck(virtualenv_python, args.explain) else: virtualenv_yaml = None - write_json_file(meta_yaml, virtualenv_yaml) + if not args.explain: + write_json_file(meta_yaml, virtualenv_yaml) created_venvs.append(f'{label}-{python.version}') - # touch the marker to keep track of when the virtualenv was last used - pathlib.Path(virtualenv_marker).touch() + if not args.explain: + # touch the marker to keep track of when the virtualenv was last used + pathlib.Path(virtualenv_marker).touch() return virtualenv_python diff --git a/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py b/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py index 04080f60..ff035ef9 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py +++ b/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py @@ -2,11 +2,13 @@ from __future__ import annotations import collections +import json import os import re from . import ( DOCUMENTABLE_PLUGINS, + MULTI_FILE_PLUGINS, SanitySingleVersion, SanityFailure, SanitySuccess, @@ -85,6 +87,44 @@ class AnsibleDocTest(SanitySingleVersion): doc_targets[plugin_type].append(plugin_fqcn) env = ansible_environment(args, color=False) + + for doc_type in MULTI_FILE_PLUGINS: + if doc_targets.get(doc_type): + # List plugins + cmd = ['ansible-doc', '-l', '--json', '-t', doc_type] + prefix = data_context().content.prefix if data_context().content.collection else 'ansible.builtin.' + cmd.append(prefix[:-1]) + try: + stdout, stderr = intercept_python(args, python, cmd, env, capture=True) + status = 0 + except SubprocessError as ex: + stdout = ex.stdout + stderr = ex.stderr + status = ex.status + + if status: + summary = '%s' % SubprocessError(cmd=cmd, status=status, stderr=stderr) + return SanityFailure(self.name, summary=summary) + + if stdout: + display.info(stdout.strip(), verbosity=3) + + if stderr: + summary = 'Output on stderr from ansible-doc is considered an error.\n\n%s' % SubprocessError(cmd, stderr=stderr) + return SanityFailure(self.name, summary=summary) + + if args.explain: + continue + + plugin_list_json = json.loads(stdout) + doc_targets[doc_type] = [] + for plugin_name, plugin_value in sorted(plugin_list_json.items()): + if plugin_value != 'UNDOCUMENTED': + doc_targets[doc_type].append(plugin_name) + + if not doc_targets[doc_type]: + del doc_targets[doc_type] + error_messages: list[SanityMessage] = [] for doc_type in sorted(doc_targets): diff --git a/test/lib/ansible_test/_internal/commands/sanity/import.py b/test/lib/ansible_test/_internal/commands/sanity/import.py index b8083324..36f52415 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/import.py +++ b/test/lib/ansible_test/_internal/commands/sanity/import.py @@ -127,20 +127,26 @@ class ImportTest(SanityMultipleVersion): ('plugin', _get_module_test(False)), ): if import_type == 'plugin' and python.version in REMOTE_ONLY_PYTHON_VERSIONS: - continue + # Plugins are not supported on remote-only Python versions. + # However, the collection loader is used by the import sanity test and unit tests on remote-only Python versions. + # To support this, it is tested as a plugin, but using a venv which installs no requirements. + # Filtering of paths relevant to the Python version tested has already been performed by filter_remote_targets. + venv_type = 'empty' + else: + venv_type = import_type data = '\n'.join([path for path in paths if test(path)]) if not data and not args.prime_venvs: continue - virtualenv_python = create_sanity_virtualenv(args, python, f'{self.name}.{import_type}', coverage=args.coverage, minimize=True) + virtualenv_python = create_sanity_virtualenv(args, python, f'{self.name}.{venv_type}', coverage=args.coverage, minimize=True) if not virtualenv_python: display.warning(f'Skipping sanity test "{self.name}" on Python {python.version} due to missing virtual environment support.') return SanitySkipped(self.name, python.version) - virtualenv_yaml = check_sanity_virtualenv_yaml(virtualenv_python) + virtualenv_yaml = args.explain or check_sanity_virtualenv_yaml(virtualenv_python) if virtualenv_yaml is False: display.warning(f'Sanity test "{self.name}" ({import_type}) on Python {python.version} may be slow due to missing libyaml support in PyYAML.') diff --git a/test/lib/ansible_test/_internal/commands/sanity/mypy.py b/test/lib/ansible_test/_internal/commands/sanity/mypy.py index 57ce1277..c93474e8 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/mypy.py +++ b/test/lib/ansible_test/_internal/commands/sanity/mypy.py @@ -19,6 +19,7 @@ from . import ( from ...constants import ( CONTROLLER_PYTHON_VERSIONS, REMOTE_ONLY_PYTHON_VERSIONS, + SUPPORTED_PYTHON_VERSIONS, ) from ...test import ( @@ -36,6 +37,7 @@ from ...util import ( ANSIBLE_TEST_CONTROLLER_ROOT, ApplicationError, is_subdir, + str_to_version, ) from ...util_common import ( @@ -71,9 +73,19 @@ class MypyTest(SanityMultipleVersion): """Return the given list of test targets, filtered to include only those relevant for the test.""" return [target for target in targets if os.path.splitext(target.path)[1] == '.py' and target.path not in self.vendored_paths and ( target.path.startswith('lib/ansible/') or target.path.startswith('test/lib/ansible_test/_internal/') + or target.path.startswith('packaging/') or target.path.startswith('test/lib/ansible_test/_util/target/sanity/import/'))] @property + def supported_python_versions(self) -> t.Optional[tuple[str, ...]]: + """A tuple of supported Python versions or None if the test does not depend on specific Python versions.""" + # mypy 0.981 dropped support for Python 2 + # see: https://mypy-lang.blogspot.com/2022/09/mypy-0981-released.html + # cryptography dropped support for Python 3.5 in version 3.3 + # see: https://cryptography.io/en/latest/changelog/#v3-3 + return tuple(version for version in SUPPORTED_PYTHON_VERSIONS if str_to_version(version) >= (3, 6)) + + @property def error_code(self) -> t.Optional[str]: """Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes.""" return 'ansible-test' @@ -105,6 +117,7 @@ class MypyTest(SanityMultipleVersion): MyPyContext('ansible-test', ['test/lib/ansible_test/_internal/'], controller_python_versions), MyPyContext('ansible-core', ['lib/ansible/'], controller_python_versions), MyPyContext('modules', ['lib/ansible/modules/', 'lib/ansible/module_utils/'], remote_only_python_versions), + MyPyContext('packaging', ['packaging/'], controller_python_versions), ) unfiltered_messages: list[SanityMessage] = [] @@ -157,6 +170,9 @@ class MypyTest(SanityMultipleVersion): # However, it will also report issues on those files, which is not the desired behavior. messages = [message for message in messages if message.path in paths_set] + if args.explain: + return SanitySuccess(self.name, python_version=python.version) + results = settings.process_errors(messages, paths) if results: @@ -239,7 +255,7 @@ class MypyTest(SanityMultipleVersion): pattern = r'^(?P<path>[^:]*):(?P<line>[0-9]+):((?P<column>[0-9]+):)? (?P<level>[^:]+): (?P<message>.*)$' - parsed = parse_to_list_of_dict(pattern, stdout) + parsed = parse_to_list_of_dict(pattern, stdout or '') messages = [SanityMessage( level=r['level'], diff --git a/test/lib/ansible_test/_internal/commands/sanity/pylint.py b/test/lib/ansible_test/_internal/commands/sanity/pylint.py index c089f834..54b1952f 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/pylint.py +++ b/test/lib/ansible_test/_internal/commands/sanity/pylint.py @@ -18,6 +18,11 @@ from . import ( SANITY_ROOT, ) +from ...constants import ( + CONTROLLER_PYTHON_VERSIONS, + REMOTE_ONLY_PYTHON_VERSIONS, +) + from ...io import ( make_dirs, ) @@ -38,6 +43,7 @@ from ...util import ( from ...util_common import ( run_command, + process_scoped_temporary_file, ) from ...ansible_util import ( @@ -81,6 +87,8 @@ class PylintTest(SanitySingleVersion): return [target for target in targets if os.path.splitext(target.path)[1] == '.py' or is_subdir(target.path, 'bin')] def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult: + min_python_version_db_path = self.create_min_python_db(args, targets.targets) + plugin_dir = os.path.join(SANITY_ROOT, 'pylint', 'plugins') plugin_names = sorted(p[0] for p in [ os.path.splitext(p) for p in os.listdir(plugin_dir)] if p[1] == '.py' and p[0] != '__init__') @@ -163,7 +171,7 @@ class PylintTest(SanitySingleVersion): continue context_start = datetime.datetime.now(tz=datetime.timezone.utc) - messages += self.pylint(args, context, context_paths, plugin_dir, plugin_names, python, collection_detail) + messages += self.pylint(args, context, context_paths, plugin_dir, plugin_names, python, collection_detail, min_python_version_db_path) context_end = datetime.datetime.now(tz=datetime.timezone.utc) context_times.append('%s: %d (%s)' % (context, len(context_paths), context_end - context_start)) @@ -194,6 +202,22 @@ class PylintTest(SanitySingleVersion): return SanitySuccess(self.name) + def create_min_python_db(self, args: SanityConfig, targets: t.Iterable[TestTarget]) -> str: + """Create a database of target file paths and their minimum required Python version, returning the path to the database.""" + target_paths = set(target.path for target in self.filter_remote_targets(list(targets))) + controller_min_version = CONTROLLER_PYTHON_VERSIONS[0] + target_min_version = REMOTE_ONLY_PYTHON_VERSIONS[0] + min_python_versions = { + os.path.abspath(target.path): target_min_version if target.path in target_paths else controller_min_version for target in targets + } + + min_python_version_db_path = process_scoped_temporary_file(args) + + with open(min_python_version_db_path, 'w') as database_file: + json.dump(min_python_versions, database_file) + + return min_python_version_db_path + @staticmethod def pylint( args: SanityConfig, @@ -203,6 +227,7 @@ class PylintTest(SanitySingleVersion): plugin_names: list[str], python: PythonConfig, collection_detail: CollectionDetail, + min_python_version_db_path: str, ) -> list[dict[str, str]]: """Run pylint using the config specified by the context on the specified paths.""" rcfile = os.path.join(SANITY_ROOT, 'pylint', 'config', context.split('/')[0] + '.cfg') @@ -234,6 +259,7 @@ class PylintTest(SanitySingleVersion): '--rcfile', rcfile, '--output-format', 'json', '--load-plugins', ','.join(sorted(load_plugins)), + '--min-python-version-db', min_python_version_db_path, ] + paths # fmt: skip if data_context().content.collection: diff --git a/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py b/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py index 3153bc99..e29b5dec 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py +++ b/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py @@ -10,6 +10,7 @@ import typing as t from . import ( DOCUMENTABLE_PLUGINS, + MULTI_FILE_PLUGINS, SanitySingleVersion, SanityMessage, SanityFailure, @@ -128,6 +129,10 @@ class ValidateModulesTest(SanitySingleVersion): for target in targets.include: target_per_type[self.get_plugin_type(target)].append(target) + # Remove plugins that cannot be associated to a single file (test and filter plugins). + for plugin_type in MULTI_FILE_PLUGINS: + target_per_type.pop(plugin_type, None) + cmd = [ python.path, os.path.join(SANITY_ROOT, 'validate-modules', 'validate.py'), diff --git a/test/lib/ansible_test/_internal/commands/units/__init__.py b/test/lib/ansible_test/_internal/commands/units/__init__.py index 7d192e1b..71ce5c4d 100644 --- a/test/lib/ansible_test/_internal/commands/units/__init__.py +++ b/test/lib/ansible_test/_internal/commands/units/__init__.py @@ -253,7 +253,6 @@ def command_units(args: UnitsConfig) -> None: cmd = [ 'pytest', - '--forked', '-r', 'a', '-n', str(args.num_workers) if args.num_workers else 'auto', '--color', 'yes' if args.color else 'no', @@ -262,6 +261,7 @@ def command_units(args: UnitsConfig) -> None: '--junit-xml', os.path.join(ResultType.JUNIT.path, 'python%s-%s-units.xml' % (python.version, test_context)), '--strict-markers', # added in pytest 4.5.0 '--rootdir', data_context().content.root, + '--confcutdir', data_context().content.root, # avoid permission errors when running from an installed version and using pytest >= 8 ] # fmt:skip if not data_context().content.collection: @@ -275,6 +275,8 @@ def command_units(args: UnitsConfig) -> None: if data_context().content.collection: plugins.append('ansible_pytest_collections') + plugins.append('ansible_forked') + if plugins: env['PYTHONPATH'] += ':%s' % os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'pytest/plugins') env['PYTEST_PLUGINS'] = ','.join(plugins) diff --git a/test/lib/ansible_test/_internal/config.py b/test/lib/ansible_test/_internal/config.py index 4e697933..dbc137b5 100644 --- a/test/lib/ansible_test/_internal/config.py +++ b/test/lib/ansible_test/_internal/config.py @@ -8,7 +8,6 @@ import sys import typing as t from .util import ( - display, verify_sys_executable, version_to_str, type_guard, @@ -136,12 +135,6 @@ class EnvironmentConfig(CommonConfig): data_context().register_payload_callback(host_callback) - if args.docker_no_pull: - display.warning('The --docker-no-pull option is deprecated and has no effect. It will be removed in a future version of ansible-test.') - - if args.no_pip_check: - display.warning('The --no-pip-check option is deprecated and has no effect. It will be removed in a future version of ansible-test.') - @property def controller(self) -> ControllerHostConfig: """Host configuration for the controller.""" diff --git a/test/lib/ansible_test/_internal/containers.py b/test/lib/ansible_test/_internal/containers.py index 869f1fba..92a40a48 100644 --- a/test/lib/ansible_test/_internal/containers.py +++ b/test/lib/ansible_test/_internal/containers.py @@ -3,7 +3,6 @@ from __future__ import annotations import collections.abc as c import contextlib -import enum import json import random import time @@ -46,6 +45,7 @@ from .docker_util import ( get_docker_container_id, get_docker_host_ip, get_podman_host_ip, + get_session_container_name, require_docker, detect_host_properties, ) @@ -101,14 +101,6 @@ class HostType: managed = 'managed' -class CleanupMode(enum.Enum): - """How container cleanup should be handled.""" - - YES = enum.auto() - NO = enum.auto() - INFO = enum.auto() - - def run_support_container( args: EnvironmentConfig, context: str, @@ -117,8 +109,7 @@ def run_support_container( ports: list[int], aliases: t.Optional[list[str]] = None, start: bool = True, - allow_existing: bool = False, - cleanup: t.Optional[CleanupMode] = None, + cleanup: bool = True, cmd: t.Optional[list[str]] = None, env: t.Optional[dict[str, str]] = None, options: t.Optional[list[str]] = None, @@ -128,6 +119,8 @@ def run_support_container( Start a container used to support tests, but not run them. Containers created this way will be accessible from tests. """ + name = get_session_container_name(args, name) + if args.prime_containers: docker_pull(args, image) return None @@ -165,46 +158,13 @@ def run_support_container( options.extend(['--ulimit', 'nofile=%s' % max_open_files]) - support_container_id = None - - if allow_existing: - try: - container = docker_inspect(args, name) - except ContainerNotFoundError: - container = None - - if container: - support_container_id = container.id - - if not container.running: - display.info('Ignoring existing "%s" container which is not running.' % name, verbosity=1) - support_container_id = None - elif not container.image: - display.info('Ignoring existing "%s" container which has the wrong image.' % name, verbosity=1) - support_container_id = None - elif publish_ports and not all(port and len(port) == 1 for port in [container.get_tcp_port(port) for port in ports]): - display.info('Ignoring existing "%s" container which does not have the required published ports.' % name, verbosity=1) - support_container_id = None - - if not support_container_id: - docker_rm(args, name) - if args.dev_systemd_debug: options.extend(('--env', 'SYSTEMD_LOG_LEVEL=debug')) - if support_container_id: - display.info('Using existing "%s" container.' % name) - running = True - existing = True - else: - display.info('Starting new "%s" container.' % name) - docker_pull(args, image) - support_container_id = run_container(args, image, name, options, create_only=not start, cmd=cmd) - running = start - existing = False - - if cleanup is None: - cleanup = CleanupMode.INFO if existing else CleanupMode.YES + display.info('Starting new "%s" container.' % name) + docker_pull(args, image) + support_container_id = run_container(args, image, name, options, create_only=not start, cmd=cmd) + running = start descriptor = ContainerDescriptor( image, @@ -215,7 +175,6 @@ def run_support_container( aliases, publish_ports, running, - existing, cleanup, env, ) @@ -694,8 +653,7 @@ class ContainerDescriptor: aliases: list[str], publish_ports: bool, running: bool, - existing: bool, - cleanup: CleanupMode, + cleanup: bool, env: t.Optional[dict[str, str]], ) -> None: self.image = image @@ -706,7 +664,6 @@ class ContainerDescriptor: self.aliases = aliases self.publish_ports = publish_ports self.running = running - self.existing = existing self.cleanup = cleanup self.env = env self.details: t.Optional[SupportContainer] = None @@ -805,10 +762,8 @@ def wait_for_file( def cleanup_containers(args: EnvironmentConfig) -> None: """Clean up containers.""" for container in support_containers.values(): - if container.cleanup == CleanupMode.YES: - docker_rm(args, container.container_id) - elif container.cleanup == CleanupMode.INFO: - display.notice(f'Remember to run `{require_docker().command} rm -f {container.name}` when finished testing.') + if container.cleanup: + docker_rm(args, container.name) def create_hosts_entries(context: dict[str, ContainerAccess]) -> list[str]: diff --git a/test/lib/ansible_test/_internal/core_ci.py b/test/lib/ansible_test/_internal/core_ci.py index 6e44b3d9..77e6753f 100644 --- a/test/lib/ansible_test/_internal/core_ci.py +++ b/test/lib/ansible_test/_internal/core_ci.py @@ -28,7 +28,6 @@ from .io import ( from .util import ( ApplicationError, display, - ANSIBLE_TEST_TARGET_ROOT, mutex, ) @@ -292,18 +291,12 @@ class AnsibleCoreCI: """Start instance.""" display.info(f'Initializing new {self.label} instance using: {self._uri}', verbosity=1) - if self.platform == 'windows': - winrm_config = read_text_file(os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'ConfigureRemotingForAnsible.ps1')) - else: - winrm_config = None - data = dict( config=dict( platform=self.platform, version=self.version, architecture=self.arch, public_key=self.ssh_key.pub_contents, - winrm_config=winrm_config, ) ) diff --git a/test/lib/ansible_test/_internal/coverage_util.py b/test/lib/ansible_test/_internal/coverage_util.py index ae640249..30176236 100644 --- a/test/lib/ansible_test/_internal/coverage_util.py +++ b/test/lib/ansible_test/_internal/coverage_util.py @@ -69,7 +69,8 @@ class CoverageVersion: COVERAGE_VERSIONS = ( # IMPORTANT: Keep this in sync with the ansible-test.txt requirements file. - CoverageVersion('6.5.0', 7, (3, 7), (3, 11)), + CoverageVersion('7.3.2', 7, (3, 8), (3, 12)), + CoverageVersion('6.5.0', 7, (3, 7), (3, 7)), CoverageVersion('4.5.4', 0, (2, 6), (3, 6)), ) """ @@ -250,7 +251,9 @@ def generate_ansible_coverage_config() -> str: coverage_config = ''' [run] branch = True -concurrency = multiprocessing +concurrency = + multiprocessing + thread parallel = True omit = @@ -271,7 +274,9 @@ def generate_collection_coverage_config(args: TestConfig) -> str: coverage_config = ''' [run] branch = True -concurrency = multiprocessing +concurrency = + multiprocessing + thread parallel = True disable_warnings = no-data-collected diff --git a/test/lib/ansible_test/_internal/delegation.py b/test/lib/ansible_test/_internal/delegation.py index f9e54455..84896830 100644 --- a/test/lib/ansible_test/_internal/delegation.py +++ b/test/lib/ansible_test/_internal/delegation.py @@ -328,7 +328,6 @@ def filter_options( ) -> c.Iterable[str]: """Return an iterable that filters out unwanted CLI options and injects new ones as requested.""" replace: list[tuple[str, int, t.Optional[t.Union[bool, str, list[str]]]]] = [ - ('--docker-no-pull', 0, False), ('--truncate', 1, str(args.truncate)), ('--color', 1, 'yes' if args.color else 'no'), ('--redact', 0, False), diff --git a/test/lib/ansible_test/_internal/diff.py b/test/lib/ansible_test/_internal/diff.py index 2ddc2ff9..5a94aafc 100644 --- a/test/lib/ansible_test/_internal/diff.py +++ b/test/lib/ansible_test/_internal/diff.py @@ -143,7 +143,7 @@ class DiffParser: traceback.format_exc(), ) - raise ApplicationError(message.strip()) + raise ApplicationError(message.strip()) from None self.previous_line = self.line diff --git a/test/lib/ansible_test/_internal/docker_util.py b/test/lib/ansible_test/_internal/docker_util.py index 06f383b5..52b9691e 100644 --- a/test/lib/ansible_test/_internal/docker_util.py +++ b/test/lib/ansible_test/_internal/docker_util.py @@ -300,7 +300,7 @@ def detect_host_properties(args: CommonConfig) -> ContainerHostProperties: options = ['--volume', '/sys/fs/cgroup:/probe:ro'] cmd = ['sh', '-c', ' && echo "-" && '.join(multi_line_commands)] - stdout = run_utility_container(args, f'ansible-test-probe-{args.session_name}', cmd, options)[0] + stdout = run_utility_container(args, 'ansible-test-probe', cmd, options)[0] if args.explain: return ContainerHostProperties( @@ -336,7 +336,7 @@ def detect_host_properties(args: CommonConfig) -> ContainerHostProperties: cmd = ['sh', '-c', 'ulimit -Hn'] try: - stdout = run_utility_container(args, f'ansible-test-ulimit-{args.session_name}', cmd, options)[0] + stdout = run_utility_container(args, 'ansible-test-ulimit', cmd, options)[0] except SubprocessError as ex: display.warning(str(ex)) else: @@ -402,6 +402,11 @@ def detect_host_properties(args: CommonConfig) -> ContainerHostProperties: return properties +def get_session_container_name(args: CommonConfig, name: str) -> str: + """Return the given container name with the current test session name applied to it.""" + return f'{name}-{args.session_name}' + + def run_utility_container( args: CommonConfig, name: str, @@ -410,6 +415,8 @@ def run_utility_container( data: t.Optional[str] = None, ) -> tuple[t.Optional[str], t.Optional[str]]: """Run the specified command using the ansible-test utility container, returning stdout and stderr.""" + name = get_session_container_name(args, name) + options = options + [ '--name', name, '--rm', diff --git a/test/lib/ansible_test/_internal/host_profiles.py b/test/lib/ansible_test/_internal/host_profiles.py index a51eb693..09812456 100644 --- a/test/lib/ansible_test/_internal/host_profiles.py +++ b/test/lib/ansible_test/_internal/host_profiles.py @@ -99,7 +99,6 @@ from .ansible_util import ( ) from .containers import ( - CleanupMode, HostType, get_container_database, run_support_container, @@ -447,7 +446,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do @property def label(self) -> str: """Label to apply to resources related to this profile.""" - return f'{"controller" if self.controller else "target"}-{self.args.session_name}' + return f'{"controller" if self.controller else "target"}' def provision(self) -> None: """Provision the host before delegation.""" @@ -462,7 +461,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do ports=[22], publish_ports=not self.controller, # connections to the controller over SSH are not required options=init_config.options, - cleanup=CleanupMode.NO, + cleanup=False, cmd=self.build_init_command(init_config, init_probe), ) @@ -807,6 +806,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do - Avoid hanging indefinitely or for an unreasonably long time. NOTE: The container must have a POSIX-compliant default shell "sh" with a non-builtin "sleep" command. + The "sleep" command is invoked through "env" to avoid using a shell builtin "sleep" (if present). """ command = '' @@ -814,7 +814,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do command += f'{init_config.command} && ' if sleep or init_config.command_privileged: - command += 'sleep 60 ; ' + command += 'env sleep 60 ; ' if not command: return None @@ -838,7 +838,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do """Check the cgroup v1 systemd hierarchy to verify it is writeable for our container.""" probe_script = (read_text_file(os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'check_systemd_cgroup_v1.sh')) .replace('@MARKER@', self.MARKER) - .replace('@LABEL@', self.label)) + .replace('@LABEL@', f'{self.label}-{self.args.session_name}')) cmd = ['sh'] @@ -853,7 +853,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do def create_systemd_cgroup_v1(self) -> str: """Create a unique ansible-test cgroup in the v1 systemd hierarchy and return its path.""" - self.cgroup_path = f'/sys/fs/cgroup/systemd/ansible-test-{self.label}' + self.cgroup_path = f'/sys/fs/cgroup/systemd/ansible-test-{self.label}-{self.args.session_name}' # Privileged mode is required to create the cgroup directories on some hosts, such as Fedora 36 and RHEL 9.0. # The mkdir command will fail with "Permission denied" otherwise. diff --git a/test/lib/ansible_test/_internal/http.py b/test/lib/ansible_test/_internal/http.py index 8b4154bf..66afc60d 100644 --- a/test/lib/ansible_test/_internal/http.py +++ b/test/lib/ansible_test/_internal/http.py @@ -126,7 +126,7 @@ class HttpResponse: try: return json.loads(self.response) except ValueError: - raise HttpError(self.status_code, 'Cannot parse response to %s %s as JSON:\n%s' % (self.method, self.url, self.response)) + raise HttpError(self.status_code, 'Cannot parse response to %s %s as JSON:\n%s' % (self.method, self.url, self.response)) from None class HttpError(ApplicationError): diff --git a/test/lib/ansible_test/_internal/junit_xml.py b/test/lib/ansible_test/_internal/junit_xml.py index 76c8878b..8c4dba01 100644 --- a/test/lib/ansible_test/_internal/junit_xml.py +++ b/test/lib/ansible_test/_internal/junit_xml.py @@ -15,7 +15,7 @@ from xml.dom import minidom from xml.etree import ElementTree as ET -@dataclasses.dataclass # type: ignore[misc] # https://github.com/python/mypy/issues/5374 +@dataclasses.dataclass class TestResult(metaclass=abc.ABCMeta): """Base class for the result of a test case.""" diff --git a/test/lib/ansible_test/_internal/pypi_proxy.py b/test/lib/ansible_test/_internal/pypi_proxy.py index 5380dd9b..d119efa1 100644 --- a/test/lib/ansible_test/_internal/pypi_proxy.py +++ b/test/lib/ansible_test/_internal/pypi_proxy.py @@ -76,7 +76,7 @@ def run_pypi_proxy(args: EnvironmentConfig, targets_use_pypi: bool) -> None: args=args, context='__pypi_proxy__', image=image, - name=f'pypi-test-container-{args.session_name}', + name='pypi-test-container', ports=[port], ) diff --git a/test/lib/ansible_test/_internal/python_requirements.py b/test/lib/ansible_test/_internal/python_requirements.py index 506b802c..81006e41 100644 --- a/test/lib/ansible_test/_internal/python_requirements.py +++ b/test/lib/ansible_test/_internal/python_requirements.py @@ -297,7 +297,7 @@ def run_pip( connection.run([python.path], data=script, capture=True) except SubprocessError as ex: if 'pip is unavailable:' in ex.stdout + ex.stderr: - raise PipUnavailableError(python) + raise PipUnavailableError(python) from None raise @@ -441,8 +441,8 @@ def get_venv_packages(python: PythonConfig) -> dict[str, str]: # See: https://github.com/ansible/base-test-container/blob/main/files/installer.py default_packages = dict( - pip='21.3.1', - setuptools='60.8.2', + pip='23.1.2', + setuptools='67.7.2', wheel='0.37.1', ) @@ -452,11 +452,6 @@ def get_venv_packages(python: PythonConfig) -> dict[str, str]: setuptools='44.1.1', # 45.0.0 requires Python 3.5+ wheel=None, ), - '3.5': dict( - pip='20.3.4', # 21.0 requires Python 3.6+ - setuptools='50.3.2', # 51.0.0 requires Python 3.6+ - wheel=None, - ), '3.6': dict( pip='21.3.1', # 22.0 requires Python 3.7+ setuptools='59.6.0', # 59.7.0 requires Python 3.7+ diff --git a/test/lib/ansible_test/_internal/util.py b/test/lib/ansible_test/_internal/util.py index 1859be5b..394c2632 100644 --- a/test/lib/ansible_test/_internal/util.py +++ b/test/lib/ansible_test/_internal/util.py @@ -31,11 +31,6 @@ from termios import TIOCGWINSZ # CAUTION: Avoid third-party imports in this module whenever possible. # Any third-party imports occurring here will result in an error if they are vendored by ansible-core. -try: - from typing_extensions import TypeGuard # TypeGuard was added in Python 3.10 -except ImportError: - TypeGuard = None - from .locale_util import ( LOCALE_WARNING, CONFIGURED_LOCALE, @@ -436,7 +431,7 @@ def raw_command( display.info(f'{description}: {escaped_cmd}', verbosity=cmd_verbosity, truncate=True) display.info('Working directory: %s' % cwd, verbosity=2) - program = find_executable(cmd[0], cwd=cwd, path=env['PATH'], required='warning') + program = find_executable(cmd[0], cwd=cwd, path=env['PATH'], required=False) if program: display.info('Program found: %s' % program, verbosity=2) @@ -1155,7 +1150,7 @@ def verify_sys_executable(path: str) -> t.Optional[str]: return expected_executable -def type_guard(sequence: c.Sequence[t.Any], guard_type: t.Type[C]) -> TypeGuard[c.Sequence[C]]: +def type_guard(sequence: c.Sequence[t.Any], guard_type: t.Type[C]) -> t.TypeGuard[c.Sequence[C]]: """ Raises an exception if any item in the given sequence does not match the specified guard type. Use with assert so that type checkers are aware of the type guard. diff --git a/test/lib/ansible_test/_internal/util_common.py b/test/lib/ansible_test/_internal/util_common.py index 222366e4..77a6165c 100644 --- a/test/lib/ansible_test/_internal/util_common.py +++ b/test/lib/ansible_test/_internal/util_common.py @@ -88,7 +88,7 @@ class ExitHandler: try: func(*args, **kwargs) - except BaseException as ex: # pylint: disable=broad-except + except BaseException as ex: # pylint: disable=broad-exception-caught last_exception = ex display.fatal(f'Exit handler failed: {ex}') @@ -498,9 +498,14 @@ def run_command( ) -def yamlcheck(python: PythonConfig) -> t.Optional[bool]: +def yamlcheck(python: PythonConfig, explain: bool = False) -> t.Optional[bool]: """Return True if PyYAML has libyaml support, False if it does not and None if it was not found.""" - result = json.loads(raw_command([python.path, os.path.join(ANSIBLE_TEST_TARGET_TOOLS_ROOT, 'yamlcheck.py')], capture=True)[0]) + stdout = raw_command([python.path, os.path.join(ANSIBLE_TEST_TARGET_TOOLS_ROOT, 'yamlcheck.py')], capture=True, explain=explain)[0] + + if explain: + return None + + result = json.loads(stdout) if not result['yaml']: return None diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.json index 88858aeb..da4a0b10 100644 --- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.json +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.json @@ -2,6 +2,10 @@ "extensions": [ ".py" ], + "prefixes": [ + "lib/ansible/", + "plugins/" + ], "ignore_self": true, "output": "path-line-column-message" } diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.json index 88858aeb..da4a0b10 100644 --- a/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.json +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.json @@ -2,6 +2,10 @@ "extensions": [ ".py" ], + "prefixes": [ + "lib/ansible/", + "plugins/" + ], "ignore_self": true, "output": "path-line-column-message" } diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py index 6cf27774..188d50fe 100644 --- a/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py @@ -16,9 +16,19 @@ from voluptuous.humanize import humanize_error from ansible.module_utils.compat.version import StrictVersion, LooseVersion from ansible.module_utils.six import string_types +from ansible.utils.collection_loader import AnsibleCollectionRef from ansible.utils.version import SemanticVersion +def fqcr(value): + """Validate a FQCR.""" + if not isinstance(value, string_types): + raise Invalid('Must be a string that is a FQCR') + if not AnsibleCollectionRef.is_valid_fqcr(value): + raise Invalid('Must be a FQCR') + return value + + def isodate(value, check_deprecation_date=False, is_tombstone=False): """Validate a datetime.date or ISO 8601 date string.""" # datetime.date objects come from YAML dates, these are ok @@ -126,12 +136,15 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False): with open(path, 'r', encoding='utf-8') as f_path: routing = yaml.safe_load(f_path) except yaml.error.MarkedYAMLError as ex: - print('%s:%d:%d: YAML load failed: %s' % (path, ex.context_mark.line + - 1, ex.context_mark.column + 1, re.sub(r'\s+', ' ', str(ex)))) + print('%s:%d:%d: YAML load failed: %s' % ( + path, + ex.context_mark.line + 1 if ex.context_mark else 0, + ex.context_mark.column + 1 if ex.context_mark else 0, + re.sub(r'\s+', ' ', str(ex)), + )) return except Exception as ex: # pylint: disable=broad-except - print('%s:%d:%d: YAML load failed: %s' % - (path, 0, 0, re.sub(r'\s+', ' ', str(ex)))) + print('%s:%d:%d: YAML load failed: %s' % (path, 0, 0, re.sub(r'\s+', ' ', str(ex)))) return if is_ansible: @@ -184,17 +197,37 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False): avoid_additional_data ) - plugin_routing_schema = Any( - Schema({ - ('deprecation'): Any(deprecation_schema), - ('tombstone'): Any(tombstoning_schema), - ('redirect'): Any(*string_types), - }, extra=PREVENT_EXTRA), + plugins_routing_common_schema = Schema({ + ('deprecation'): Any(deprecation_schema), + ('tombstone'): Any(tombstoning_schema), + ('redirect'): fqcr, + }, extra=PREVENT_EXTRA) + + plugin_routing_schema = Any(plugins_routing_common_schema) + + # Adjusted schema for modules only + plugin_routing_schema_modules = Any( + plugins_routing_common_schema.extend({ + ('action_plugin'): fqcr} + ) + ) + + # Adjusted schema for module_utils + plugin_routing_schema_mu = Any( + plugins_routing_common_schema.extend({ + ('redirect'): Any(*string_types)} + ), ) list_dict_plugin_routing_schema = [{str_type: plugin_routing_schema} for str_type in string_types] + list_dict_plugin_routing_schema_mu = [{str_type: plugin_routing_schema_mu} + for str_type in string_types] + + list_dict_plugin_routing_schema_modules = [{str_type: plugin_routing_schema_modules} + for str_type in string_types] + plugin_schema = Schema({ ('action'): Any(None, *list_dict_plugin_routing_schema), ('become'): Any(None, *list_dict_plugin_routing_schema), @@ -207,8 +240,8 @@ def validate_metadata_file(path, is_ansible, check_deprecation_dates=False): ('httpapi'): Any(None, *list_dict_plugin_routing_schema), ('inventory'): Any(None, *list_dict_plugin_routing_schema), ('lookup'): Any(None, *list_dict_plugin_routing_schema), - ('module_utils'): Any(None, *list_dict_plugin_routing_schema), - ('modules'): Any(None, *list_dict_plugin_routing_schema), + ('module_utils'): Any(None, *list_dict_plugin_routing_schema_mu), + ('modules'): Any(None, *list_dict_plugin_routing_schema_modules), ('netconf'): Any(None, *list_dict_plugin_routing_schema), ('shell'): Any(None, *list_dict_plugin_routing_schema), ('strategy'): Any(None, *list_dict_plugin_routing_schema), diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.json index 776590b7..ccee80a2 100644 --- a/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.json +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.json @@ -2,5 +2,9 @@ "extensions": [ ".py" ], + "prefixes": [ + "lib/ansible/", + "plugins/" + ], "output": "path-line-column-message" } diff --git a/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-core.ini b/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-core.ini index 4d93f359..41d824b2 100644 --- a/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-core.ini +++ b/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-core.ini @@ -34,6 +34,9 @@ ignore_missing_imports = True [mypy-md5.*] ignore_missing_imports = True +[mypy-imp.*] +ignore_missing_imports = True + [mypy-scp.*] ignore_missing_imports = True diff --git a/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-test.ini b/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-test.ini index 55738f87..6be35724 100644 --- a/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-test.ini +++ b/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-test.ini @@ -6,10 +6,10 @@ # There are ~350 errors reported in ansible-test when strict optional checking is enabled. # Until the number of occurrences are greatly reduced, it's better to disable strict checking. strict_optional = False -# There are ~25 errors reported in ansible-test under the 'misc' code. -# The majority of those errors are "Only concrete class can be given", which is due to a limitation of mypy. -# See: https://github.com/python/mypy/issues/5374 -disable_error_code = misc +# There are ~13 type-abstract errors reported in ansible-test. +# This is due to assumptions mypy makes about Type and abstract types. +# See: https://discuss.python.org/t/add-abstracttype-to-the-typing-module/21996/13 +disable_error_code = type-abstract [mypy-argcomplete] ignore_missing_imports = True diff --git a/test/lib/ansible_test/_util/controller/sanity/pep8/current-ignore.txt b/test/lib/ansible_test/_util/controller/sanity/pep8/current-ignore.txt index 659c7f59..4d1de692 100644 --- a/test/lib/ansible_test/_util/controller/sanity/pep8/current-ignore.txt +++ b/test/lib/ansible_test/_util/controller/sanity/pep8/current-ignore.txt @@ -2,3 +2,8 @@ E402 W503 W504 E741 + +# The E203 rule is not PEP 8 compliant. +# Unfortunately this means it also conflicts with the output from `black`. +# See: https://github.com/PyCQA/pycodestyle/issues/373 +E203 diff --git a/test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd1 b/test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd1 index 2ae13b4c..7beb38c1 100644 --- a/test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd1 +++ b/test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd1 @@ -4,6 +4,9 @@ Enable = $true MaximumLineLength = 160 } + PSAvoidSemicolonsAsLineTerminators = @{ + Enable = $true + } PSPlaceOpenBrace = @{ Enable = $true OnSameLine = $true diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg index aa347729..f8a0a8af 100644 --- a/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg +++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg @@ -10,6 +10,7 @@ disable= raise-missing-from, # Python 2.x does not support raise from super-with-arguments, # Python 2.x does not support super without arguments redundant-u-string-prefix, # Python 2.x support still required + broad-exception-raised, # many exceptions with no need for a custom type too-few-public-methods, too-many-arguments, too-many-branches, @@ -19,6 +20,7 @@ disable= too-many-nested-blocks, too-many-return-statements, too-many-statements, + use-dict-literal, # ignoring as a common style issue useless-return, # complains about returning None when the return type is optional [BASIC] @@ -55,3 +57,5 @@ preferred-modules = # Listing them here makes it possible to enable the import-error check. ignored-modules = py, + pytest, + _pytest.runner, diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg index 1c03472c..5bec36fd 100644 --- a/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg +++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg @@ -7,7 +7,7 @@ disable= deprecated-module, # results vary by Python version duplicate-code, # consistent results require running with --jobs 1 and testing all files import-outside-toplevel, # common pattern in ansible related code - raise-missing-from, # Python 2.x does not support raise from + broad-exception-raised, # many exceptions with no need for a custom type too-few-public-methods, too-many-public-methods, too-many-arguments, @@ -18,6 +18,7 @@ disable= too-many-nested-blocks, too-many-return-statements, too-many-statements, + use-dict-literal, # ignoring as a common style issue unspecified-encoding, # always run with UTF-8 encoding enforced useless-return, # complains about returning None when the return type is optional diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/code-smell.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/code-smell.cfg index e3aa8eed..c30eb37a 100644 --- a/test/lib/ansible_test/_util/controller/sanity/pylint/config/code-smell.cfg +++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/code-smell.cfg @@ -17,6 +17,7 @@ disable= too-many-nested-blocks, too-many-return-statements, too-many-statements, + use-dict-literal, # ignoring as a common style issue unspecified-encoding, # always run with UTF-8 encoding enforced useless-return, # complains about returning None when the return type is optional diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/collection.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/collection.cfg index 38b8d2d0..762d488d 100644 --- a/test/lib/ansible_test/_util/controller/sanity/pylint/config/collection.cfg +++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/collection.cfg @@ -9,7 +9,8 @@ disable= attribute-defined-outside-init, bad-indentation, bad-mcs-classmethod-argument, - broad-except, + broad-exception-caught, + broad-exception-raised, c-extension-no-member, cell-var-from-loop, chained-comparison, @@ -29,6 +30,7 @@ disable= consider-using-max-builtin, consider-using-min-builtin, cyclic-import, # consistent results require running with --jobs 1 and testing all files + deprecated-comment, # custom plugin only used by ansible-core, not collections deprecated-method, # results vary by Python version deprecated-module, # results vary by Python version duplicate-code, # consistent results require running with --jobs 1 and testing all files @@ -95,8 +97,6 @@ disable= too-many-public-methods, too-many-return-statements, too-many-statements, - trailing-comma-tuple, - trailing-comma-tuple, try-except-raise, unbalanced-tuple-unpacking, undefined-loop-variable, @@ -110,10 +110,9 @@ disable= unsupported-delete-operation, unsupported-membership-test, unused-argument, - unused-import, unused-variable, unspecified-encoding, # always run with UTF-8 encoding enforced - use-dict-literal, # many occurrences + use-dict-literal, # ignoring as a common style issue use-list-literal, # many occurrences use-implicit-booleaness-not-comparison, # many occurrences useless-object-inheritance, diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg index 6a242b8d..825e5df7 100644 --- a/test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg +++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg @@ -10,7 +10,8 @@ disable= attribute-defined-outside-init, bad-indentation, bad-mcs-classmethod-argument, - broad-except, + broad-exception-caught, + broad-exception-raised, c-extension-no-member, cell-var-from-loop, chained-comparison, @@ -61,8 +62,6 @@ disable= not-a-mapping, not-an-iterable, not-callable, - pointless-statement, - pointless-string-statement, possibly-unused-variable, protected-access, raise-missing-from, # Python 2.x does not support raise from @@ -91,8 +90,6 @@ disable= too-many-public-methods, too-many-return-statements, too-many-statements, - trailing-comma-tuple, - trailing-comma-tuple, try-except-raise, unbalanced-tuple-unpacking, undefined-loop-variable, @@ -105,10 +102,9 @@ disable= unsupported-delete-operation, unsupported-membership-test, unused-argument, - unused-import, unused-variable, unspecified-encoding, # always run with UTF-8 encoding enforced - use-dict-literal, # many occurrences + use-dict-literal, # ignoring as a common style issue use-list-literal, # many occurrences use-implicit-booleaness-not-comparison, # many occurrences useless-object-inheritance, diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py index 79b8bf15..f6c83373 100644 --- a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py +++ b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py @@ -5,14 +5,31 @@ from __future__ import annotations import datetime +import functools +import json import re +import shlex import typing as t +from tokenize import COMMENT, TokenInfo import astroid -from pylint.interfaces import IAstroidChecker -from pylint.checkers import BaseChecker -from pylint.checkers.utils import check_messages +# support pylint 2.x and 3.x -- remove when supporting only 3.x +try: + from pylint.interfaces import IAstroidChecker, ITokenChecker +except ImportError: + class IAstroidChecker: + """Backwards compatibility for 2.x / 3.x support.""" + + class ITokenChecker: + """Backwards compatibility for 2.x / 3.x support.""" + +try: + from pylint.checkers.utils import check_messages +except ImportError: + from pylint.checkers.utils import only_required_for_messages as check_messages + +from pylint.checkers import BaseChecker, BaseTokenChecker from ansible.module_utils.compat.version import LooseVersion from ansible.module_utils.six import string_types @@ -95,7 +112,7 @@ ANSIBLE_VERSION = LooseVersion('.'.join(ansible_version_raw.split('.')[:3])) def _get_expr_name(node): - """Funciton to get either ``attrname`` or ``name`` from ``node.func.expr`` + """Function to get either ``attrname`` or ``name`` from ``node.func.expr`` Created specifically for the case of ``display.deprecated`` or ``self._display.deprecated`` """ @@ -106,6 +123,17 @@ def _get_expr_name(node): return node.func.expr.name +def _get_func_name(node): + """Function to get either ``attrname`` or ``name`` from ``node.func`` + + Created specifically for the case of ``from ansible.module_utils.common.warnings import deprecate`` + """ + try: + return node.func.attrname + except AttributeError: + return node.func.name + + def parse_isodate(value): """Parse an ISO 8601 date string.""" msg = 'Expected ISO 8601 date string (YYYY-MM-DD)' @@ -118,7 +146,7 @@ def parse_isodate(value): try: return datetime.datetime.strptime(value, '%Y-%m-%d').date() except ValueError: - raise ValueError(msg) + raise ValueError(msg) from None class AnsibleDeprecatedChecker(BaseChecker): @@ -160,6 +188,8 @@ class AnsibleDeprecatedChecker(BaseChecker): self.add_message('ansible-deprecated-date', node=node, args=(date,)) def _check_version(self, node, version, collection_name): + if collection_name is None: + collection_name = 'ansible.builtin' if not isinstance(version, (str, float)): if collection_name == 'ansible.builtin': symbol = 'ansible-invalid-deprecated-version' @@ -197,12 +227,17 @@ class AnsibleDeprecatedChecker(BaseChecker): @property def collection_name(self) -> t.Optional[str]: """Return the collection name, or None if ansible-core is being tested.""" - return self.config.collection_name + return self.linter.config.collection_name @property def collection_version(self) -> t.Optional[SemanticVersion]: """Return the collection version, or None if ansible-core is being tested.""" - return SemanticVersion(self.config.collection_version) if self.config.collection_version is not None else None + if self.linter.config.collection_version is None: + return None + sem_ver = SemanticVersion(self.linter.config.collection_version) + # Ignore pre-release for version comparison to catch issues before the final release is cut. + sem_ver.prerelease = () + return sem_ver @check_messages(*(MSGS.keys())) def visit_call(self, node): @@ -211,8 +246,9 @@ class AnsibleDeprecatedChecker(BaseChecker): date = None collection_name = None try: - if (node.func.attrname == 'deprecated' and 'display' in _get_expr_name(node) or - node.func.attrname == 'deprecate' and _get_expr_name(node)): + funcname = _get_func_name(node) + if (funcname == 'deprecated' and 'display' in _get_expr_name(node) or + funcname == 'deprecate'): if node.keywords: for keyword in node.keywords: if len(node.keywords) == 1 and keyword.arg is None: @@ -258,6 +294,137 @@ class AnsibleDeprecatedChecker(BaseChecker): pass +class AnsibleDeprecatedCommentChecker(BaseTokenChecker): + """Checks for ``# deprecated:`` comments to ensure that the ``version`` + has not passed or met the time for removal + """ + + __implements__ = (ITokenChecker,) + + name = 'deprecated-comment' + msgs = { + 'E9601': ("Deprecated core version (%r) found: %s", + "ansible-deprecated-version-comment", + "Used when a '# deprecated:' comment specifies a version " + "less than or equal to the current version of Ansible", + {'minversion': (2, 6)}), + 'E9602': ("Deprecated comment contains invalid keys %r", + "ansible-deprecated-version-comment-invalid-key", + "Used when a '#deprecated:' comment specifies invalid data", + {'minversion': (2, 6)}), + 'E9603': ("Deprecated comment missing version", + "ansible-deprecated-version-comment-missing-version", + "Used when a '#deprecated:' comment specifies invalid data", + {'minversion': (2, 6)}), + 'E9604': ("Deprecated python version (%r) found: %s", + "ansible-deprecated-python-version-comment", + "Used when a '#deprecated:' comment specifies a python version " + "less than or equal to the minimum python version", + {'minversion': (2, 6)}), + 'E9605': ("Deprecated comment contains invalid version %r: %s", + "ansible-deprecated-version-comment-invalid-version", + "Used when a '#deprecated:' comment specifies an invalid version", + {'minversion': (2, 6)}), + } + + options = ( + ('min-python-version-db', { + 'default': None, + 'type': 'string', + 'metavar': '<path>', + 'help': 'The path to the DB mapping paths to minimum Python versions.', + }), + ) + + def process_tokens(self, tokens: list[TokenInfo]) -> None: + for token in tokens: + if token.type == COMMENT: + self._process_comment(token) + + def _deprecated_string_to_dict(self, token: TokenInfo, string: str) -> dict[str, str]: + valid_keys = {'description', 'core_version', 'python_version'} + data = dict.fromkeys(valid_keys) + for opt in shlex.split(string): + if '=' not in opt: + data[opt] = None + continue + key, _sep, value = opt.partition('=') + data[key] = value + if not any((data['core_version'], data['python_version'])): + self.add_message( + 'ansible-deprecated-version-comment-missing-version', + line=token.start[0], + col_offset=token.start[1], + ) + bad = set(data).difference(valid_keys) + if bad: + self.add_message( + 'ansible-deprecated-version-comment-invalid-key', + line=token.start[0], + col_offset=token.start[1], + args=(','.join(bad),) + ) + return data + + @functools.cached_property + def _min_python_version_db(self) -> dict[str, str]: + """A dictionary of absolute file paths and their minimum required Python version.""" + with open(self.linter.config.min_python_version_db) as db_file: + return json.load(db_file) + + def _process_python_version(self, token: TokenInfo, data: dict[str, str]) -> None: + current_file = self.linter.current_file + check_version = self._min_python_version_db[current_file] + + try: + if LooseVersion(data['python_version']) < LooseVersion(check_version): + self.add_message( + 'ansible-deprecated-python-version-comment', + line=token.start[0], + col_offset=token.start[1], + args=( + data['python_version'], + data['description'] or 'description not provided', + ), + ) + except (ValueError, TypeError) as exc: + self.add_message( + 'ansible-deprecated-version-comment-invalid-version', + line=token.start[0], + col_offset=token.start[1], + args=(data['python_version'], exc) + ) + + def _process_core_version(self, token: TokenInfo, data: dict[str, str]) -> None: + try: + if ANSIBLE_VERSION >= LooseVersion(data['core_version']): + self.add_message( + 'ansible-deprecated-version-comment', + line=token.start[0], + col_offset=token.start[1], + args=( + data['core_version'], + data['description'] or 'description not provided', + ) + ) + except (ValueError, TypeError) as exc: + self.add_message( + 'ansible-deprecated-version-comment-invalid-version', + line=token.start[0], + col_offset=token.start[1], + args=(data['core_version'], exc) + ) + + def _process_comment(self, token: TokenInfo) -> None: + if token.string.startswith('# deprecated:'): + data = self._deprecated_string_to_dict(token, token.string[13:].strip()) + if data['core_version']: + self._process_core_version(token, data) + if data['python_version']: + self._process_python_version(token, data) + + def register(linter): """required method to auto register this checker """ linter.register_checker(AnsibleDeprecatedChecker(linter)) + linter.register_checker(AnsibleDeprecatedCommentChecker(linter)) diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py index 934a9ae7..83c27734 100644 --- a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py +++ b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py @@ -5,23 +5,26 @@ from __future__ import annotations import astroid -from pylint.interfaces import IAstroidChecker -from pylint.checkers import BaseChecker -from pylint.checkers import utils -from pylint.checkers.utils import check_messages + +# support pylint 2.x and 3.x -- remove when supporting only 3.x +try: + from pylint.interfaces import IAstroidChecker +except ImportError: + class IAstroidChecker: + """Backwards compatibility for 2.x / 3.x support.""" + try: - from pylint.checkers.utils import parse_format_method_string + from pylint.checkers.utils import check_messages except ImportError: - # noinspection PyUnresolvedReferences - from pylint.checkers.strings import parse_format_method_string + from pylint.checkers.utils import only_required_for_messages as check_messages + +from pylint.checkers import BaseChecker +from pylint.checkers import utils MSGS = { - 'E9305': ("Format string contains automatic field numbering " - "specification", + 'E9305': ("disabled", # kept for backwards compatibility with inline ignores, remove after 2.14 is EOL "ansible-format-automatic-specification", - "Used when a PEP 3101 format string contains automatic " - "field numbering (e.g. '{}').", - {'minversion': (2, 6)}), + "disabled"), 'E9390': ("bytes object has no .format attribute", "ansible-no-format-on-bytestring", "Used when a bytestring was used as a PEP 3101 format string " @@ -64,20 +67,6 @@ class AnsibleStringFormatChecker(BaseChecker): if isinstance(strnode.value, bytes): self.add_message('ansible-no-format-on-bytestring', node=node) return - if not isinstance(strnode.value, str): - return - - if node.starargs or node.kwargs: - return - try: - num_args = parse_format_method_string(strnode.value)[1] - except utils.IncompleteFormatString: - return - - if num_args: - self.add_message('ansible-format-automatic-specification', - node=node) - return def register(linter): diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py index 1be42f51..f121ea58 100644 --- a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py +++ b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py @@ -6,8 +6,14 @@ import typing as t import astroid +# support pylint 2.x and 3.x -- remove when supporting only 3.x +try: + from pylint.interfaces import IAstroidChecker +except ImportError: + class IAstroidChecker: + """Backwards compatibility for 2.x / 3.x support.""" + from pylint.checkers import BaseChecker -from pylint.interfaces import IAstroidChecker ANSIBLE_TEST_MODULES_PATH = os.environ['ANSIBLE_TEST_MODULES_PATH'] ANSIBLE_TEST_MODULE_UTILS_PATH = os.environ['ANSIBLE_TEST_MODULE_UTILS_PATH'] @@ -94,10 +100,7 @@ class AnsibleUnwantedChecker(BaseChecker): )), # see https://docs.python.org/3/library/collections.abc.html - collections=UnwantedEntry('ansible.module_utils.common._collections_compat', - ignore_paths=( - '/lib/ansible/module_utils/common/_collections_compat.py', - ), + collections=UnwantedEntry('ansible.module_utils.six.moves.collections_abc', names=( 'MappingView', 'ItemsView', diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py index 25c61798..2b92a56c 100644 --- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py +++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py @@ -33,6 +33,9 @@ from collections.abc import Mapping from contextlib import contextmanager from fnmatch import fnmatch +from antsibull_docs_parser import dom +from antsibull_docs_parser.parser import parse, Context + import yaml from voluptuous.humanize import humanize_error @@ -63,6 +66,7 @@ setup_collection_loader() from ansible import __version__ as ansible_version from ansible.executor.module_common import REPLACER_WINDOWS, NEW_STYLE_PYTHON_MODULE_RE +from ansible.module_utils.common.collections import is_iterable from ansible.module_utils.common.parameters import DEFAULT_TYPE_VALIDATORS from ansible.module_utils.compat.version import StrictVersion, LooseVersion from ansible.module_utils.basic import to_bytes @@ -74,9 +78,13 @@ from ansible.utils.version import SemanticVersion from .module_args import AnsibleModuleImportError, AnsibleModuleNotInitialized, get_argument_spec -from .schema import ansible_module_kwargs_schema, doc_schema, return_schema +from .schema import ( + ansible_module_kwargs_schema, + doc_schema, + return_schema, +) -from .utils import CaptureStd, NoArgsAnsibleModule, compare_unordered_lists, is_empty, parse_yaml, parse_isodate +from .utils import CaptureStd, NoArgsAnsibleModule, compare_unordered_lists, parse_yaml, parse_isodate if PY3: @@ -297,8 +305,6 @@ class ModuleValidator(Validator): # win_dsc is a dynamic arg spec, the docs won't ever match PS_ARG_VALIDATE_REJECTLIST = frozenset(('win_dsc.ps1', )) - ACCEPTLIST_FUTURE_IMPORTS = frozenset(('absolute_import', 'division', 'print_function')) - def __init__(self, path, git_cache: GitCache, analyze_arg_spec=False, collection=None, collection_version=None, reporter=None, routing=None, plugin_type='module'): super(ModuleValidator, self).__init__(reporter=reporter or Reporter()) @@ -401,13 +407,10 @@ class ModuleValidator(Validator): if isinstance(child, ast.Expr) and isinstance(child.value, ast.Constant) and isinstance(child.value.value, str): continue - # allowed from __future__ imports + # allow __future__ imports (the specific allowed imports are checked by other sanity tests) if isinstance(child, ast.ImportFrom) and child.module == '__future__': - for future_import in child.names: - if future_import.name not in self.ACCEPTLIST_FUTURE_IMPORTS: - break - else: - continue + continue + return False return True except AttributeError: @@ -636,29 +639,21 @@ class ModuleValidator(Validator): ) def _ensure_imports_below_docs(self, doc_info, first_callable): - min_doc_line = min(doc_info[key]['lineno'] for key in doc_info) + doc_line_numbers = [lineno for lineno in (doc_info[key]['lineno'] for key in doc_info) if lineno > 0] + + min_doc_line = min(doc_line_numbers) if doc_line_numbers else None max_doc_line = max(doc_info[key]['end_lineno'] for key in doc_info) import_lines = [] for child in self.ast.body: if isinstance(child, (ast.Import, ast.ImportFrom)): + # allow __future__ imports (the specific allowed imports are checked by other sanity tests) if isinstance(child, ast.ImportFrom) and child.module == '__future__': - # allowed from __future__ imports - for future_import in child.names: - if future_import.name not in self.ACCEPTLIST_FUTURE_IMPORTS: - self.reporter.error( - path=self.object_path, - code='illegal-future-imports', - msg=('Only the following from __future__ imports are allowed: %s' - % ', '.join(self.ACCEPTLIST_FUTURE_IMPORTS)), - line=child.lineno - ) - break - else: # for-else. If we didn't find a problem nad break out of the loop, then this is a legal import - continue + continue + import_lines.append(child.lineno) - if child.lineno < min_doc_line: + if min_doc_line and child.lineno < min_doc_line: self.reporter.error( path=self.object_path, code='import-before-documentation', @@ -675,7 +670,7 @@ class ModuleValidator(Validator): for grandchild in bodies: if isinstance(grandchild, (ast.Import, ast.ImportFrom)): import_lines.append(grandchild.lineno) - if grandchild.lineno < min_doc_line: + if min_doc_line and grandchild.lineno < min_doc_line: self.reporter.error( path=self.object_path, code='import-before-documentation', @@ -813,22 +808,22 @@ class ModuleValidator(Validator): continue if grandchild.id == 'DOCUMENTATION': - docs['DOCUMENTATION']['value'] = child.value.s + docs['DOCUMENTATION']['value'] = child.value.value docs['DOCUMENTATION']['lineno'] = child.lineno docs['DOCUMENTATION']['end_lineno'] = ( - child.lineno + len(child.value.s.splitlines()) + child.lineno + len(child.value.value.splitlines()) ) elif grandchild.id == 'EXAMPLES': - docs['EXAMPLES']['value'] = child.value.s + docs['EXAMPLES']['value'] = child.value.value docs['EXAMPLES']['lineno'] = child.lineno docs['EXAMPLES']['end_lineno'] = ( - child.lineno + len(child.value.s.splitlines()) + child.lineno + len(child.value.value.splitlines()) ) elif grandchild.id == 'RETURN': - docs['RETURN']['value'] = child.value.s + docs['RETURN']['value'] = child.value.value docs['RETURN']['lineno'] = child.lineno docs['RETURN']['end_lineno'] = ( - child.lineno + len(child.value.s.splitlines()) + child.lineno + len(child.value.value.splitlines()) ) return docs @@ -1041,6 +1036,8 @@ class ModuleValidator(Validator): 'invalid-documentation', ) + self._validate_all_semantic_markup(doc, returns) + if not self.collection: existing_doc = self._check_for_new_args(doc) self._check_version_added(doc, existing_doc) @@ -1166,6 +1163,113 @@ class ModuleValidator(Validator): return doc_info, doc + def _check_sem_option(self, part: dom.OptionNamePart, current_plugin: dom.PluginIdentifier) -> None: + if part.plugin is None or part.plugin != current_plugin: + return + if part.entrypoint is not None: + return + if tuple(part.link) not in self._all_options: + self.reporter.error( + path=self.object_path, + code='invalid-documentation-markup', + msg='Directive "%s" contains a non-existing option "%s"' % (part.source, part.name) + ) + + def _check_sem_return_value(self, part: dom.ReturnValuePart, current_plugin: dom.PluginIdentifier) -> None: + if part.plugin is None or part.plugin != current_plugin: + return + if part.entrypoint is not None: + return + if tuple(part.link) not in self._all_return_values: + self.reporter.error( + path=self.object_path, + code='invalid-documentation-markup', + msg='Directive "%s" contains a non-existing return value "%s"' % (part.source, part.name) + ) + + def _validate_semantic_markup(self, object) -> None: + # Make sure we operate on strings + if is_iterable(object): + for entry in object: + self._validate_semantic_markup(entry) + return + if not isinstance(object, string_types): + return + + if self.collection: + fqcn = f'{self.collection_name}.{self.name}' + else: + fqcn = f'ansible.builtin.{self.name}' + current_plugin = dom.PluginIdentifier(fqcn=fqcn, type=self.plugin_type) + for par in parse(object, Context(current_plugin=current_plugin), errors='message', add_source=True): + for part in par: + # Errors are already covered during schema validation, we only check for option and + # return value references + if part.type == dom.PartType.OPTION_NAME: + self._check_sem_option(part, current_plugin) + if part.type == dom.PartType.RETURN_VALUE: + self._check_sem_return_value(part, current_plugin) + + def _validate_semantic_markup_collect(self, destination, sub_key, data, all_paths): + if not isinstance(data, dict): + return + for key, value in data.items(): + if not isinstance(value, dict): + continue + keys = {key} + if is_iterable(value.get('aliases')): + keys.update(value['aliases']) + new_paths = [path + [key] for path in all_paths for key in keys] + destination.update([tuple(path) for path in new_paths]) + self._validate_semantic_markup_collect(destination, sub_key, value.get(sub_key), new_paths) + + def _validate_semantic_markup_options(self, options): + if not isinstance(options, dict): + return + for key, value in options.items(): + self._validate_semantic_markup(value.get('description')) + self._validate_semantic_markup_options(value.get('suboptions')) + + def _validate_semantic_markup_return_values(self, return_vars): + if not isinstance(return_vars, dict): + return + for key, value in return_vars.items(): + self._validate_semantic_markup(value.get('description')) + self._validate_semantic_markup(value.get('returned')) + self._validate_semantic_markup_return_values(value.get('contains')) + + def _validate_all_semantic_markup(self, docs, return_docs): + if not isinstance(docs, dict): + docs = {} + if not isinstance(return_docs, dict): + return_docs = {} + + self._all_options = set() + self._all_return_values = set() + self._validate_semantic_markup_collect(self._all_options, 'suboptions', docs.get('options'), [[]]) + self._validate_semantic_markup_collect(self._all_return_values, 'contains', return_docs, [[]]) + + for string_keys in ('short_description', 'description', 'notes', 'requirements', 'todo'): + self._validate_semantic_markup(docs.get(string_keys)) + + if is_iterable(docs.get('seealso')): + for entry in docs.get('seealso'): + if isinstance(entry, dict): + self._validate_semantic_markup(entry.get('description')) + + if isinstance(docs.get('attributes'), dict): + for entry in docs.get('attributes').values(): + if isinstance(entry, dict): + for key in ('description', 'details'): + self._validate_semantic_markup(entry.get(key)) + + if isinstance(docs.get('deprecated'), dict): + for key in ('why', 'alternative'): + self._validate_semantic_markup(docs.get('deprecated').get(key)) + + self._validate_semantic_markup_options(docs.get('options')) + self._validate_semantic_markup_return_values(return_docs) + def _check_version_added(self, doc, existing_doc): version_added_raw = doc.get('version_added') try: @@ -1233,6 +1337,31 @@ class ModuleValidator(Validator): self._validate_argument_spec(docs, spec, kwargs) + if isinstance(docs, Mapping) and isinstance(docs.get('attributes'), Mapping): + if isinstance(docs['attributes'].get('check_mode'), Mapping): + support_value = docs['attributes']['check_mode'].get('support') + if not kwargs.get('supports_check_mode', False): + if support_value != 'none': + self.reporter.error( + path=self.object_path, + code='attributes-check-mode', + msg="The module does not declare support for check mode, but the check_mode attribute's" + " support value is '%s' and not 'none'" % support_value + ) + else: + if support_value not in ('full', 'partial', 'N/A'): + self.reporter.error( + path=self.object_path, + code='attributes-check-mode', + msg="The module does declare support for check mode, but the check_mode attribute's support value is '%s'" % support_value + ) + if support_value in ('partial', 'N/A') and docs['attributes']['check_mode'].get('details') in (None, '', []): + self.reporter.error( + path=self.object_path, + code='attributes-check-mode-details', + msg="The module declares it does not fully support check mode, but has no details on what exactly that means" + ) + def _validate_list_of_module_args(self, name, terms, spec, context): if terms is None: return @@ -1748,7 +1877,7 @@ class ModuleValidator(Validator): ) arg_default = None - if 'default' in data and not is_empty(data['default']): + if 'default' in data and data['default'] is not None: try: with CaptureStd(): arg_default = _type_checker(data['default']) @@ -1789,7 +1918,7 @@ class ModuleValidator(Validator): try: doc_default = None - if 'default' in doc_options_arg and not is_empty(doc_options_arg['default']): + if 'default' in doc_options_arg and doc_options_arg['default'] is not None: with CaptureStd(): doc_default = _type_checker(doc_options_arg['default']) except (Exception, SystemExit): diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py index 03a14019..1b712171 100644 --- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py +++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py @@ -29,7 +29,7 @@ from contextlib import contextmanager from ansible.executor.powershell.module_manifest import PSModuleDepFinder from ansible.module_utils.basic import FILE_COMMON_ARGUMENTS, AnsibleModule from ansible.module_utils.six import reraise -from ansible.module_utils._text import to_bytes, to_text +from ansible.module_utils.common.text.converters import to_bytes, to_text from .utils import CaptureStd, find_executable, get_module_name_from_filename diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py index b2623ff7..a6068c60 100644 --- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py +++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py @@ -11,7 +11,8 @@ from ansible.module_utils.compat.version import StrictVersion from functools import partial from urllib.parse import urlparse -from voluptuous import ALLOW_EXTRA, PREVENT_EXTRA, All, Any, Invalid, Length, Required, Schema, Self, ValueInvalid, Exclusive +from voluptuous import ALLOW_EXTRA, PREVENT_EXTRA, All, Any, Invalid, Length, MultipleInvalid, Required, Schema, Self, ValueInvalid, Exclusive +from ansible.constants import DOCUMENTABLE_PLUGINS from ansible.module_utils.six import string_types from ansible.module_utils.common.collections import is_iterable from ansible.module_utils.parsing.convert_bool import boolean @@ -19,6 +20,9 @@ from ansible.parsing.quoting import unquote from ansible.utils.version import SemanticVersion from ansible.release import __version__ +from antsibull_docs_parser import dom +from antsibull_docs_parser.parser import parse, Context + from .utils import parse_isodate list_string_types = list(string_types) @@ -80,26 +84,8 @@ def date(error_code=None): return Any(isodate, error_code=error_code) -_MODULE = re.compile(r"\bM\(([^)]+)\)") -_LINK = re.compile(r"\bL\(([^)]+)\)") -_URL = re.compile(r"\bU\(([^)]+)\)") -_REF = re.compile(r"\bR\(([^)]+)\)") - - -def _check_module_link(directive, content): - if not FULLY_QUALIFIED_COLLECTION_RESOURCE_RE.match(content): - raise _add_ansible_error_code( - Invalid('Directive "%s" must contain a FQCN' % directive), 'invalid-documentation-markup') - - -def _check_link(directive, content): - if ',' not in content: - raise _add_ansible_error_code( - Invalid('Directive "%s" must contain a comma' % directive), 'invalid-documentation-markup') - idx = content.rindex(',') - title = content[:idx] - url = content[idx + 1:].lstrip(' ') - _check_url(directive, url) +# Roles can also be referenced by semantic markup +_VALID_PLUGIN_TYPES = set(DOCUMENTABLE_PLUGINS + ('role', )) def _check_url(directive, content): @@ -107,15 +93,10 @@ def _check_url(directive, content): parsed_url = urlparse(content) if parsed_url.scheme not in ('', 'http', 'https'): raise ValueError('Schema must be HTTP, HTTPS, or not specified') - except ValueError as exc: - raise _add_ansible_error_code( - Invalid('Directive "%s" must contain an URL' % directive), 'invalid-documentation-markup') - - -def _check_ref(directive, content): - if ',' not in content: - raise _add_ansible_error_code( - Invalid('Directive "%s" must contain a comma' % directive), 'invalid-documentation-markup') + return [] + except ValueError: + return [_add_ansible_error_code( + Invalid('Directive %s must contain a valid URL' % directive), 'invalid-documentation-markup')] def doc_string(v): @@ -123,25 +104,55 @@ def doc_string(v): if not isinstance(v, string_types): raise _add_ansible_error_code( Invalid('Must be a string'), 'invalid-documentation') - for m in _MODULE.finditer(v): - _check_module_link(m.group(0), m.group(1)) - for m in _LINK.finditer(v): - _check_link(m.group(0), m.group(1)) - for m in _URL.finditer(v): - _check_url(m.group(0), m.group(1)) - for m in _REF.finditer(v): - _check_ref(m.group(0), m.group(1)) + errors = [] + for par in parse(v, Context(), errors='message', strict=True, add_source=True): + for part in par: + if part.type == dom.PartType.ERROR: + errors.append(_add_ansible_error_code(Invalid(part.message), 'invalid-documentation-markup')) + if part.type == dom.PartType.URL: + errors.extend(_check_url('U()', part.url)) + if part.type == dom.PartType.LINK: + errors.extend(_check_url('L()', part.url)) + if part.type == dom.PartType.MODULE: + if not FULLY_QUALIFIED_COLLECTION_RESOURCE_RE.match(part.fqcn): + errors.append(_add_ansible_error_code(Invalid( + 'Directive "%s" must contain a FQCN; found "%s"' % (part.source, part.fqcn)), + 'invalid-documentation-markup')) + if part.type == dom.PartType.PLUGIN: + if not FULLY_QUALIFIED_COLLECTION_RESOURCE_RE.match(part.plugin.fqcn): + errors.append(_add_ansible_error_code(Invalid( + 'Directive "%s" must contain a FQCN; found "%s"' % (part.source, part.plugin.fqcn)), + 'invalid-documentation-markup')) + if part.plugin.type not in _VALID_PLUGIN_TYPES: + errors.append(_add_ansible_error_code(Invalid( + 'Directive "%s" must contain a valid plugin type; found "%s"' % (part.source, part.plugin.type)), + 'invalid-documentation-markup')) + if part.type == dom.PartType.OPTION_NAME: + if part.plugin is not None and not FULLY_QUALIFIED_COLLECTION_RESOURCE_RE.match(part.plugin.fqcn): + errors.append(_add_ansible_error_code(Invalid( + 'Directive "%s" must contain a FQCN; found "%s"' % (part.source, part.plugin.fqcn)), + 'invalid-documentation-markup')) + if part.plugin is not None and part.plugin.type not in _VALID_PLUGIN_TYPES: + errors.append(_add_ansible_error_code(Invalid( + 'Directive "%s" must contain a valid plugin type; found "%s"' % (part.source, part.plugin.type)), + 'invalid-documentation-markup')) + if part.type == dom.PartType.RETURN_VALUE: + if part.plugin is not None and not FULLY_QUALIFIED_COLLECTION_RESOURCE_RE.match(part.plugin.fqcn): + errors.append(_add_ansible_error_code(Invalid( + 'Directive "%s" must contain a FQCN; found "%s"' % (part.source, part.plugin.fqcn)), + 'invalid-documentation-markup')) + if part.plugin is not None and part.plugin.type not in _VALID_PLUGIN_TYPES: + errors.append(_add_ansible_error_code(Invalid( + 'Directive "%s" must contain a valid plugin type; found "%s"' % (part.source, part.plugin.type)), + 'invalid-documentation-markup')) + if len(errors) == 1: + raise errors[0] + if errors: + raise MultipleInvalid(errors) return v -def doc_string_or_strings(v): - """Match a documentation string, or list of strings.""" - if isinstance(v, string_types): - return doc_string(v) - if isinstance(v, (list, tuple)): - return [doc_string(vv) for vv in v] - raise _add_ansible_error_code( - Invalid('Must be a string or list of strings'), 'invalid-documentation') +doc_string_or_strings = Any(doc_string, [doc_string]) def is_callable(v): @@ -173,6 +184,11 @@ seealso_schema = Schema( 'description': doc_string, }, { + Required('plugin'): Any(*string_types), + Required('plugin_type'): Any(*DOCUMENTABLE_PLUGINS), + 'description': doc_string, + }, + { Required('ref'): Any(*string_types), Required('description'): doc_string, }, @@ -794,7 +810,7 @@ def author(value): def doc_schema(module_name, for_collection=False, deprecated_module=False, plugin_type='module'): - if module_name.startswith('_'): + if module_name.startswith('_') and not for_collection: module_name = module_name[1:] deprecated_module = True if for_collection is False and plugin_type == 'connection' and module_name == 'paramiko_ssh': @@ -864,9 +880,6 @@ def doc_schema(module_name, for_collection=False, deprecated_module=False, plugi 'action_group': add_default_attributes({ Required('membership'): list_string_types, }), - 'forced_action_plugin': add_default_attributes({ - Required('action_plugin'): any_string_types, - }), 'platform': add_default_attributes({ Required('platforms'): Any(list_string_types, *string_types) }), diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py index 88d5b01a..15cb7037 100644 --- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py +++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py @@ -28,7 +28,7 @@ from io import BytesIO, TextIOWrapper import yaml import yaml.reader -from ansible.module_utils._text import to_text +from ansible.module_utils.common.text.converters import to_text from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.common.yaml import SafeLoader from ansible.module_utils.six import string_types diff --git a/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py b/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py index d6de6117..ed1afcf3 100644 --- a/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py +++ b/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py @@ -181,15 +181,15 @@ class YamlChecker: if doc_types and target.id not in doc_types: continue - fmt_match = fmt_re.match(statement.value.s.lstrip()) + fmt_match = fmt_re.match(statement.value.value.lstrip()) fmt = 'yaml' if fmt_match: fmt = fmt_match.group(1) docs[target.id] = dict( - yaml=statement.value.s, + yaml=statement.value.value, lineno=statement.lineno, - end_lineno=statement.lineno + len(statement.value.s.splitlines()), + end_lineno=statement.lineno + len(statement.value.value.splitlines()), fmt=fmt.lower(), ) diff --git a/test/lib/ansible_test/_util/controller/tools/collection_detail.py b/test/lib/ansible_test/_util/controller/tools/collection_detail.py index 870ea59e..df52d099 100644 --- a/test/lib/ansible_test/_util/controller/tools/collection_detail.py +++ b/test/lib/ansible_test/_util/controller/tools/collection_detail.py @@ -50,7 +50,7 @@ def read_manifest_json(collection_path): ) validate_version(result['version']) except Exception as ex: # pylint: disable=broad-except - raise Exception('{0}: {1}'.format(os.path.basename(manifest_path), ex)) + raise Exception('{0}: {1}'.format(os.path.basename(manifest_path), ex)) from None return result @@ -71,7 +71,7 @@ def read_galaxy_yml(collection_path): ) validate_version(result['version']) except Exception as ex: # pylint: disable=broad-except - raise Exception('{0}: {1}'.format(os.path.basename(galaxy_path), ex)) + raise Exception('{0}: {1}'.format(os.path.basename(galaxy_path), ex)) from None return result diff --git a/test/lib/ansible_test/_util/target/common/constants.py b/test/lib/ansible_test/_util/target/common/constants.py index 9bddfaf4..36a5a2c4 100644 --- a/test/lib/ansible_test/_util/target/common/constants.py +++ b/test/lib/ansible_test/_util/target/common/constants.py @@ -7,14 +7,14 @@ __metaclass__ = type REMOTE_ONLY_PYTHON_VERSIONS = ( '2.7', - '3.5', '3.6', '3.7', '3.8', + '3.9', ) CONTROLLER_PYTHON_VERSIONS = ( - '3.9', '3.10', '3.11', + '3.12', ) diff --git a/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py b/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py index fefd6b0f..2f77c03b 100644 --- a/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py +++ b/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py @@ -32,6 +32,50 @@ def collection_pypkgpath(self): raise Exception('File "%s" not found in collection path "%s".' % (self.strpath, ANSIBLE_COLLECTIONS_PATH)) +def enable_assertion_rewriting_hook(): # type: () -> None + """ + Enable pytest's AssertionRewritingHook on Python 3.x. + This is necessary because the Ansible collection loader intercepts imports before the pytest provided loader ever sees them. + """ + import sys + + if sys.version_info[0] == 2: + return # Python 2.x is not supported + + hook_name = '_pytest.assertion.rewrite.AssertionRewritingHook' + hooks = [hook for hook in sys.meta_path if hook.__class__.__module__ + '.' + hook.__class__.__qualname__ == hook_name] + + if len(hooks) != 1: + raise Exception('Found {} instance(s) of "{}" in sys.meta_path.'.format(len(hooks), hook_name)) + + assertion_rewriting_hook = hooks[0] + + # This is based on `_AnsibleCollectionPkgLoaderBase.exec_module` from `ansible/utils/collection_loader/_collection_finder.py`. + def exec_module(self, module): + # short-circuit redirect; avoid reinitializing existing modules + if self._redirect_module: # pylint: disable=protected-access + return + + # execute the module's code in its namespace + code_obj = self.get_code(self._fullname) # pylint: disable=protected-access + + if code_obj is not None: # things like NS packages that can't have code on disk will return None + # This logic is loosely based on `AssertionRewritingHook._should_rewrite` from pytest. + # See: https://github.com/pytest-dev/pytest/blob/779a87aada33af444f14841a04344016a087669e/src/_pytest/assertion/rewrite.py#L209 + should_rewrite = self._package_to_load == 'conftest' or self._package_to_load.startswith('test_') # pylint: disable=protected-access + + if should_rewrite: + # noinspection PyUnresolvedReferences + assertion_rewriting_hook.exec_module(module) + else: + exec(code_obj, module.__dict__) # pylint: disable=exec-used + + # noinspection PyProtectedMember + from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionPkgLoaderBase + + _AnsibleCollectionPkgLoaderBase.exec_module = exec_module + + def pytest_configure(): """Configure this pytest plugin.""" try: @@ -40,6 +84,8 @@ def pytest_configure(): except AttributeError: pytest_configure.executed = True + enable_assertion_rewriting_hook() + # noinspection PyProtectedMember from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder diff --git a/test/lib/ansible_test/_util/target/sanity/import/importer.py b/test/lib/ansible_test/_util/target/sanity/import/importer.py index 44a5ddc9..38a73643 100644 --- a/test/lib/ansible_test/_util/target/sanity/import/importer.py +++ b/test/lib/ansible_test/_util/target/sanity/import/importer.py @@ -552,13 +552,11 @@ def main(): "Python 2 is no longer supported by the Python core team. Support for it is now deprecated in cryptography," " and will be removed in the next release.") - if sys.version_info[:2] == (3, 5): - warnings.filterwarnings( - "ignore", - "Python 3.5 support will be dropped in the next release ofcryptography. Please upgrade your Python.") - warnings.filterwarnings( - "ignore", - "Python 3.5 support will be dropped in the next release of cryptography. Please upgrade your Python.") + # ansible.utils.unsafe_proxy attempts patching sys.intern generating a warning if it was already patched + warnings.filterwarnings( + "ignore", + "skipped sys.intern patch; appears to have already been patched" + ) try: yield diff --git a/test/lib/ansible_test/_util/target/setup/bootstrap.sh b/test/lib/ansible_test/_util/target/setup/bootstrap.sh index ea17dad3..65673da5 100644 --- a/test/lib/ansible_test/_util/target/setup/bootstrap.sh +++ b/test/lib/ansible_test/_util/target/setup/bootstrap.sh @@ -53,7 +53,7 @@ install_pip() { pip_bootstrap_url="https://ci-files.testing.ansible.com/ansible-test/get-pip-20.3.4.py" ;; *) - pip_bootstrap_url="https://ci-files.testing.ansible.com/ansible-test/get-pip-21.3.1.py" + pip_bootstrap_url="https://ci-files.testing.ansible.com/ansible-test/get-pip-23.1.2.py" ;; esac @@ -111,6 +111,15 @@ bootstrap_remote_alpine() echo "Failed to install packages. Sleeping before trying again..." sleep 10 done + + # Upgrade the `libexpat` package to ensure that an upgraded Python (`pyexpat`) continues to work. + while true; do + # shellcheck disable=SC2086 + apk upgrade -q libexpat \ + && break + echo "Failed to upgrade libexpat. Sleeping before trying again..." + sleep 10 + done } bootstrap_remote_fedora() @@ -163,8 +172,6 @@ bootstrap_remote_freebsd() # Declare platform/python version combinations which do not have supporting OS packages available. # For these combinations ansible-test will use pip to install the requirements instead. case "${platform_version}/${python_version}" in - "12.4/3.9") - ;; *) jinja2_pkg="" # not available cryptography_pkg="" # not available @@ -261,7 +268,7 @@ bootstrap_remote_rhel_8() if [ "${python_version}" = "3.6" ]; then py_pkg_prefix="python3" else - py_pkg_prefix="python${python_package_version}" + py_pkg_prefix="python${python_version}" fi packages=" @@ -269,6 +276,14 @@ bootstrap_remote_rhel_8() ${py_pkg_prefix}-devel " + # pip isn't included in the Python devel package under Python 3.11 + if [ "${python_version}" != "3.6" ]; then + packages=" + ${packages} + ${py_pkg_prefix}-pip + " + fi + # Jinja2 is not installed with an OS package since the provided version is too old. # Instead, ansible-test will install it using pip. if [ "${controller}" ]; then @@ -278,9 +293,19 @@ bootstrap_remote_rhel_8() " fi + # Python 3.11 isn't a module like the earlier versions + if [ "${python_version}" = "3.6" ]; then + while true; do + # shellcheck disable=SC2086 + yum module install -q -y "python${python_package_version}" \ + && break + echo "Failed to install packages. Sleeping before trying again..." + sleep 10 + done + fi + while true; do # shellcheck disable=SC2086 - yum module install -q -y "python${python_package_version}" && \ yum install -q -y ${packages} \ && break echo "Failed to install packages. Sleeping before trying again..." @@ -292,22 +317,34 @@ bootstrap_remote_rhel_8() bootstrap_remote_rhel_9() { - py_pkg_prefix="python3" + if [ "${python_version}" = "3.9" ]; then + py_pkg_prefix="python3" + else + py_pkg_prefix="python${python_version}" + fi packages=" gcc ${py_pkg_prefix}-devel " + # pip is not included in the Python devel package under Python 3.11 + if [ "${python_version}" != "3.9" ]; then + packages=" + ${packages} + ${py_pkg_prefix}-pip + " + fi + # Jinja2 is not installed with an OS package since the provided version is too old. # Instead, ansible-test will install it using pip. + # packaging and resolvelib are missing for Python 3.11 (and possible later) so we just + # skip them and let ansible-test install them from PyPI. if [ "${controller}" ]; then packages=" ${packages} ${py_pkg_prefix}-cryptography - ${py_pkg_prefix}-packaging ${py_pkg_prefix}-pyyaml - ${py_pkg_prefix}-resolvelib " fi @@ -387,14 +424,6 @@ bootstrap_remote_ubuntu() echo "Failed to install packages. Sleeping before trying again..." sleep 10 done - - if [ "${controller}" ]; then - if [ "${platform_version}/${python_version}" = "20.04/3.9" ]; then - # Install pyyaml using pip so libyaml support is available on Python 3.9. - # The OS package install (which is installed by default) only has a .so file for Python 3.8. - pip_install "--upgrade pyyaml" - fi - fi } bootstrap_docker() diff --git a/test/lib/ansible_test/_util/target/setup/quiet_pip.py b/test/lib/ansible_test/_util/target/setup/quiet_pip.py index 54f0f860..171ff8f3 100644 --- a/test/lib/ansible_test/_util/target/setup/quiet_pip.py +++ b/test/lib/ansible_test/_util/target/setup/quiet_pip.py @@ -27,10 +27,6 @@ WARNING_MESSAGE_FILTERS = ( # pip 21.0 will drop support for Python 2.7 in January 2021. # More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support 'DEPRECATION: Python 2.7 reached the end of its life ', - - # DEPRECATION: Python 3.5 reached the end of its life on September 13th, 2020. Please upgrade your Python as Python 3.5 is no longer maintained. - # pip 21.0 will drop support for Python 3.5 in January 2021. pip 21.0 will remove support for this functionality. - 'DEPRECATION: Python 3.5 reached the end of its life ', ) diff --git a/test/lib/ansible_test/config/cloud-config-aws.ini.template b/test/lib/ansible_test/config/cloud-config-aws.ini.template index 88b9fea6..503a14b3 100644 --- a/test/lib/ansible_test/config/cloud-config-aws.ini.template +++ b/test/lib/ansible_test/config/cloud-config-aws.ini.template @@ -6,7 +6,9 @@ # 2) Using the automatically provisioned AWS credentials in ansible-test. # # If you do not want to use the automatically provisioned temporary AWS credentials, -# fill in the @VAR placeholders below and save this file without the .template extension. +# fill in the @VAR placeholders below and save this file without the .template extension, +# into the tests/integration directory of the collection you're testing. +# If you need to omit optional fields like security_token, comment out that line. # This will cause ansible-test to use the given configuration instead of temporary credentials. # # NOTE: Automatic provisioning of AWS credentials requires an ansible-core-ci API key. diff --git a/test/lib/ansible_test/config/cloud-config-azure.ini.template b/test/lib/ansible_test/config/cloud-config-azure.ini.template index 766553d1..bf7cc022 100644 --- a/test/lib/ansible_test/config/cloud-config-azure.ini.template +++ b/test/lib/ansible_test/config/cloud-config-azure.ini.template @@ -6,7 +6,8 @@ # 2) Using the automatically provisioned Azure credentials in ansible-test. # # If you do not want to use the automatically provisioned temporary Azure credentials, -# fill in the values below and save this file without the .template extension. +# fill in the values below and save this file without the .template extension, +# into the tests/integration directory of the collection you're testing. # This will cause ansible-test to use the given configuration instead of temporary credentials. # # NOTE: Automatic provisioning of Azure credentials requires an ansible-core-ci API key in ~/.ansible-core-ci.key diff --git a/test/lib/ansible_test/config/cloud-config-cloudscale.ini.template b/test/lib/ansible_test/config/cloud-config-cloudscale.ini.template index 1c99e9b8..8396e4c8 100644 --- a/test/lib/ansible_test/config/cloud-config-cloudscale.ini.template +++ b/test/lib/ansible_test/config/cloud-config-cloudscale.ini.template @@ -4,6 +4,8 @@ # # 1) Running integration tests without using ansible-test. # +# Fill in the value below and save this file without the .template extension, +# into the tests/integration directory of the collection you're testing. [default] cloudscale_api_token = @API_TOKEN diff --git a/test/lib/ansible_test/config/cloud-config-cs.ini.template b/test/lib/ansible_test/config/cloud-config-cs.ini.template index f8d8a915..0589fd5f 100644 --- a/test/lib/ansible_test/config/cloud-config-cs.ini.template +++ b/test/lib/ansible_test/config/cloud-config-cs.ini.template @@ -6,7 +6,8 @@ # 2) Using the automatically provisioned cloudstack-sim docker container in ansible-test. # # If you do not want to use the automatically provided CloudStack simulator, -# fill in the @VAR placeholders below and save this file without the .template extension. +# fill in the @VAR placeholders below and save this file without the .template extension, +# into the tests/integration directory of the collection you're testing. # This will cause ansible-test to use the given configuration and not launch the simulator. # # It is recommended that you DO NOT use this template unless you cannot use the simulator. diff --git a/test/lib/ansible_test/config/cloud-config-gcp.ini.template b/test/lib/ansible_test/config/cloud-config-gcp.ini.template index 00a20971..626063da 100644 --- a/test/lib/ansible_test/config/cloud-config-gcp.ini.template +++ b/test/lib/ansible_test/config/cloud-config-gcp.ini.template @@ -6,7 +6,8 @@ # 2) Using the automatically provisioned cloudstack-sim docker container in ansible-test. # # If you do not want to use the automatically provided GCP simulator, -# fill in the @VAR placeholders below and save this file without the .template extension. +# fill in the @VAR placeholders below and save this file without the .template extension, +# into the tests/integration directory of the collection you're testing. # This will cause ansible-test to use the given configuration and not launch the simulator. # # It is recommended that you DO NOT use this template unless you cannot use the simulator. diff --git a/test/lib/ansible_test/config/cloud-config-hcloud.ini.template b/test/lib/ansible_test/config/cloud-config-hcloud.ini.template index 8db658db..8fc7fa77 100644 --- a/test/lib/ansible_test/config/cloud-config-hcloud.ini.template +++ b/test/lib/ansible_test/config/cloud-config-hcloud.ini.template @@ -6,7 +6,8 @@ # 2) Using the automatically provisioned Hetzner Cloud credentials in ansible-test. # # If you do not want to use the automatically provisioned temporary Hetzner Cloud credentials, -# fill in the @VAR placeholders below and save this file without the .template extension. +# fill in the @VAR placeholders below and save this file without the .template extension, +# into the tests/integration directory of the collection you're testing. # This will cause ansible-test to use the given configuration instead of temporary credentials. # # NOTE: Automatic provisioning of Hetzner Cloud credentials requires an ansible-core-ci API key. diff --git a/test/lib/ansible_test/config/cloud-config-opennebula.ini.template b/test/lib/ansible_test/config/cloud-config-opennebula.ini.template index 00c56db1..f155d987 100644 --- a/test/lib/ansible_test/config/cloud-config-opennebula.ini.template +++ b/test/lib/ansible_test/config/cloud-config-opennebula.ini.template @@ -6,7 +6,8 @@ # 2) Running integration tests against previously recorded XMLRPC fixtures # # If you want to test against a Live OpenNebula platform, -# fill in the values below and save this file without the .template extension. +# fill in the values below and save this file without the .template extension, +# into the tests/integration directory of the collection you're testing. # This will cause ansible-test to use the given configuration. # # If you run with @FIXTURES enabled (true) then you can decide if you want to @@ -17,4 +18,4 @@ opennebula_url: @URL opennebula_username: @USERNAME opennebula_password: @PASSWORD opennebula_test_fixture: @FIXTURES -opennebula_test_fixture_replay: @REPLAY
\ No newline at end of file +opennebula_test_fixture_replay: @REPLAY diff --git a/test/lib/ansible_test/config/cloud-config-openshift.kubeconfig.template b/test/lib/ansible_test/config/cloud-config-openshift.kubeconfig.template index 0a10f23b..5c022cde 100644 --- a/test/lib/ansible_test/config/cloud-config-openshift.kubeconfig.template +++ b/test/lib/ansible_test/config/cloud-config-openshift.kubeconfig.template @@ -6,7 +6,8 @@ # 2) Using the automatically provisioned openshift-origin docker container in ansible-test. # # If you do not want to use the automatically provided OpenShift container, -# place your kubeconfig file next to this file, with the same name, but without the .template extension. +# place your kubeconfig file next into the tests/integration directory of the collection you're testing, +# with the same name is this file, but without the .template extension. # This will cause ansible-test to use the given configuration and not launch the automatically provided container. # # It is recommended that you DO NOT use this template unless you cannot use the automatically provided container. diff --git a/test/lib/ansible_test/config/cloud-config-scaleway.ini.template b/test/lib/ansible_test/config/cloud-config-scaleway.ini.template index f10419e0..63e4e48f 100644 --- a/test/lib/ansible_test/config/cloud-config-scaleway.ini.template +++ b/test/lib/ansible_test/config/cloud-config-scaleway.ini.template @@ -5,7 +5,8 @@ # 1) Running integration tests without using ansible-test. # # If you want to test against the Vultr public API, -# fill in the values below and save this file without the .template extension. +# fill in the values below and save this file without the .template extension, +# into the tests/integration directory of the collection you're testing. # This will cause ansible-test to use the given configuration. [default] diff --git a/test/lib/ansible_test/config/cloud-config-vcenter.ini.template b/test/lib/ansible_test/config/cloud-config-vcenter.ini.template index eff8bf74..4e980137 100644 --- a/test/lib/ansible_test/config/cloud-config-vcenter.ini.template +++ b/test/lib/ansible_test/config/cloud-config-vcenter.ini.template @@ -6,7 +6,8 @@ # 2) Using the automatically provisioned VMware credentials in ansible-test. # # If you do not want to use the automatically provisioned temporary VMware credentials, -# fill in the @VAR placeholders below and save this file without the .template extension. +# fill in the @VAR placeholders below and save this file without the .template extension, +# into the tests/integration directory of the collection you're testing. # This will cause ansible-test to use the given configuration instead of temporary credentials. # # NOTE: Automatic provisioning of VMware credentials requires an ansible-core-ci API key. diff --git a/test/lib/ansible_test/config/cloud-config-vultr.ini.template b/test/lib/ansible_test/config/cloud-config-vultr.ini.template index 48b82108..4530c326 100644 --- a/test/lib/ansible_test/config/cloud-config-vultr.ini.template +++ b/test/lib/ansible_test/config/cloud-config-vultr.ini.template @@ -5,7 +5,8 @@ # 1) Running integration tests without using ansible-test. # # If you want to test against the Vultr public API, -# fill in the values below and save this file without the .template extension. +# fill in the values below and save this file without the .template extension, +# into the tests/integration directory of the collection you're testing. # This will cause ansible-test to use the given configuration. [default] diff --git a/test/lib/ansible_test/config/inventory.networking.template b/test/lib/ansible_test/config/inventory.networking.template index a1545684..40a9f207 100644 --- a/test/lib/ansible_test/config/inventory.networking.template +++ b/test/lib/ansible_test/config/inventory.networking.template @@ -6,7 +6,8 @@ # 2) Using the `--platform` option to provision temporary network instances on EC2. # # If you do not want to use the automatically provisioned temporary network instances, -# fill in the @VAR placeholders below and save this file without the .template extension. +# fill in the @VAR placeholders below and save this file without the .template extension, +# into the tests/integration directory of the collection you're testing. # # NOTE: Automatic provisioning of network instances on EC2 requires an ansible-core-ci API key. diff --git a/test/lib/ansible_test/config/inventory.winrm.template b/test/lib/ansible_test/config/inventory.winrm.template index 34bbee2d..3238b22e 100644 --- a/test/lib/ansible_test/config/inventory.winrm.template +++ b/test/lib/ansible_test/config/inventory.winrm.template @@ -6,7 +6,8 @@ # 1) Using the `--windows` option to provision temporary Windows instances on EC2. # # If you do not want to use the automatically provisioned temporary Windows instances, -# fill in the @VAR placeholders below and save this file without the .template extension. +# fill in the @VAR placeholders below and save this file without the .template extension, +# into the tests/integration directory of the collection you're testing. # # NOTE: Automatic provisioning of Windows instances on EC2 requires an ansible-core-ci API key. # |