summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
authorLee Garrett <lgarrett@rocketjump.eu>2023-06-16 15:40:52 +0200
committerLee Garrett <lgarrett@rocketjump.eu>2023-06-16 15:40:52 +0200
commit8d9a6d9cdf440b0a9b254a8a4bf063c0cb6a6201 (patch)
treee25b4160deb15b08aaf0aea65fc8c7bbc01dea12 /test
parent3cda7ad4dd15b514ff660905294b5b6330ecfb6f (diff)
downloaddebian-ansible-core-8d9a6d9cdf440b0a9b254a8a4bf063c0cb6a6201.zip
New upstream version 2.14.6
Diffstat (limited to 'test')
-rw-r--r--test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py12
-rw-r--r--test/integration/targets/ansible-doc/fix-urls.py15
-rw-r--r--test/integration/targets/ansible-doc/randommodule-text.output19
-rw-r--r--test/integration/targets/ansible-doc/randommodule.output25
-rwxr-xr-xtest/integration/targets/ansible-doc/runme.sh4
-rw-r--r--test/integration/targets/ansible-galaxy-collection/tasks/install.yml3
-rw-r--r--test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/target-prefixes.something2
-rw-r--r--test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/targets/one-part_test/aliases0
-rw-r--r--test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/targets/two_part_test/aliases0
-rwxr-xr-xtest/integration/targets/ansible-test-integration-targets/test.py12
-rw-r--r--test/integration/targets/ansible-test-vendoring/aliases5
-rw-r--r--test/integration/targets/ansible-test-vendoring/ansible_collections/ns/col/tests/config.yml4
-rwxr-xr-xtest/integration/targets/ansible-test-vendoring/runme.sh33
-rw-r--r--test/integration/targets/async/tasks/main.yml8
-rw-r--r--test/integration/targets/async_fail/tasks/main.yml2
-rwxr-xr-xtest/integration/targets/callback_default/runme.sh4
-rw-r--r--test/integration/targets/canonical-pep517-self-packaging/aliases3
-rw-r--r--test/integration/targets/canonical-pep517-self-packaging/minimum-build-constraints.txt16
-rw-r--r--test/integration/targets/canonical-pep517-self-packaging/modernish-build-constraints.txt11
-rwxr-xr-xtest/integration/targets/canonical-pep517-self-packaging/runme.sh31
-rw-r--r--test/integration/targets/canonical-pep517-self-packaging/runme_test.py385
-rw-r--r--test/integration/targets/copy/tasks/check_mode.yml41
-rw-r--r--test/integration/targets/entry_points/aliases1
-rw-r--r--test/integration/targets/include_import/roles/role_with_argspec/meta/argument_specs.yml7
-rw-r--r--test/integration/targets/include_import/roles/role_with_argspec/tasks/main.yml1
-rwxr-xr-xtest/integration/targets/include_import/runme.sh5
-rw-r--r--test/integration/targets/include_import/tasks/test_templating_IncludeRole_FA.yml28
-rw-r--r--test/integration/targets/keyword_inheritance/dep_keyword_inheritance.yml8
-rw-r--r--test/integration/targets/keyword_inheritance/roles/role-meta-inheritance/meta/main.yml4
-rwxr-xr-xtest/integration/targets/keyword_inheritance/runme.sh2
-rw-r--r--test/integration/targets/lookup_url/tasks/main.yml4
-rw-r--r--test/integration/targets/module_utils_Ansible.Basic/library/ansible_basic_tests.ps1162
-rw-r--r--test/integration/targets/template_jinja2_non_native/macro_override.yml15
-rwxr-xr-xtest/integration/targets/template_jinja2_non_native/runme.sh2
-rw-r--r--test/integration/targets/template_jinja2_non_native/templates/macro_override.j27
-rw-r--r--test/integration/targets/uri/tasks/main.yml4
-rw-r--r--test/integration/targets/win_async_wrapper/tasks/main.yml16
-rw-r--r--test/lib/ansible_test/_internal/__init__.py4
-rw-r--r--test/lib/ansible_test/_internal/ansible_util.py18
-rw-r--r--test/lib/ansible_test/_internal/become.py6
-rw-r--r--test/lib/ansible_test/_internal/bootstrap.py3
-rw-r--r--test/lib/ansible_test/_internal/cache.py1
-rw-r--r--test/lib/ansible_test/_internal/cgroup.py6
-rw-r--r--test/lib/ansible_test/_internal/ci/__init__.py8
-rw-r--r--test/lib/ansible_test/_internal/ci/azp.py23
-rw-r--r--test/lib/ansible_test/_internal/ci/local.py7
-rw-r--r--test/lib/ansible_test/_internal/classification/__init__.py95
-rw-r--r--test/lib/ansible_test/_internal/classification/python.py1
-rw-r--r--test/lib/ansible_test/_internal/cli/actions.py10
-rw-r--r--test/lib/ansible_test/_internal/cli/argparsing/__init__.py5
-rw-r--r--test/lib/ansible_test/_internal/cli/argparsing/actions.py1
-rw-r--r--test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py28
-rw-r--r--test/lib/ansible_test/_internal/cli/argparsing/parsers.py25
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/env.py2
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/integration/network.py3
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/sanity.py20
-rw-r--r--test/lib/ansible_test/_internal/cli/compat.py6
-rw-r--r--test/lib/ansible_test/_internal/cli/environments.py7
-rw-r--r--test/lib/ansible_test/_internal/cli/parsers/__init__.py11
-rw-r--r--test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py4
-rw-r--r--test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py9
-rw-r--r--test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py8
-rw-r--r--test/lib/ansible_test/_internal/cli/parsers/value_parsers.py16
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/__init__.py4
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/__init__.py1
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py1
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py1
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py1
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py1
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py1
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/combine.py9
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/report.py1
-rw-r--r--test/lib/ansible_test/_internal/commands/env/__init__.py31
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/__init__.py46
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py23
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/acme.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/aws.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/azure.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/cloudscale.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/cs.py3
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/digitalocean.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/gcp.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/hcloud.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/nios.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/opennebula.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/scaleway.py4
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/vultr.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/coverage.py4
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/filters.py5
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/__init__.py29
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py1
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/compile.py1
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/ignores.py1
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/import.py1
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/mypy.py4
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/pep8.py3
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/pslint.py1
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/pylint.py23
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/sanity_docs.py1
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/shellcheck.py3
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/validate_modules.py73
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/yamllint.py1
-rw-r--r--test/lib/ansible_test/_internal/commands/shell/__init__.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/units/__init__.py6
-rw-r--r--test/lib/ansible_test/_internal/compat/yaml.py1
-rw-r--r--test/lib/ansible_test/_internal/completion.py11
-rw-r--r--test/lib/ansible_test/_internal/config.py30
-rw-r--r--test/lib/ansible_test/_internal/connections.py102
-rw-r--r--test/lib/ansible_test/_internal/containers.py63
-rw-r--r--test/lib/ansible_test/_internal/core_ci.py35
-rw-r--r--test/lib/ansible_test/_internal/coverage_util.py2
-rw-r--r--test/lib/ansible_test/_internal/data.py36
-rw-r--r--test/lib/ansible_test/_internal/delegation.py3
-rw-r--r--test/lib/ansible_test/_internal/dev/container_probe.py2
-rw-r--r--test/lib/ansible_test/_internal/diff.py3
-rw-r--r--test/lib/ansible_test/_internal/docker_util.py35
-rw-r--r--test/lib/ansible_test/_internal/executor.py5
-rw-r--r--test/lib/ansible_test/_internal/git.py1
-rw-r--r--test/lib/ansible_test/_internal/host_configs.py20
-rw-r--r--test/lib/ansible_test/_internal/host_profiles.py30
-rw-r--r--test/lib/ansible_test/_internal/http.py3
-rw-r--r--test/lib/ansible_test/_internal/io.py14
-rw-r--r--test/lib/ansible_test/_internal/junit_xml.py13
-rw-r--r--test/lib/ansible_test/_internal/locale_util.py1
-rw-r--r--test/lib/ansible_test/_internal/metadata.py6
-rw-r--r--test/lib/ansible_test/_internal/payload.py79
-rw-r--r--test/lib/ansible_test/_internal/provider/__init__.py13
-rw-r--r--test/lib/ansible_test/_internal/provider/layout/__init__.py64
-rw-r--r--test/lib/ansible_test/_internal/provider/layout/ansible.py42
-rw-r--r--test/lib/ansible_test/_internal/provider/layout/collection.py46
-rw-r--r--test/lib/ansible_test/_internal/provider/layout/unsupported.py38
-rw-r--r--test/lib/ansible_test/_internal/provider/source/__init__.py1
-rw-r--r--test/lib/ansible_test/_internal/provider/source/git.py1
-rw-r--r--test/lib/ansible_test/_internal/provider/source/installed.py1
-rw-r--r--test/lib/ansible_test/_internal/provider/source/unsupported.py1
-rw-r--r--test/lib/ansible_test/_internal/provider/source/unversioned.py1
-rw-r--r--test/lib/ansible_test/_internal/provisioning.py1
-rw-r--r--test/lib/ansible_test/_internal/python_requirements.py21
-rw-r--r--test/lib/ansible_test/_internal/ssh.py8
-rw-r--r--test/lib/ansible_test/_internal/target.py25
-rw-r--r--test/lib/ansible_test/_internal/test.py16
-rw-r--r--test/lib/ansible_test/_internal/thread.py1
-rw-r--r--test/lib/ansible_test/_internal/timeout.py75
-rw-r--r--test/lib/ansible_test/_internal/util.py31
-rw-r--r--test/lib/ansible_test/_internal/util_common.py33
-rw-r--r--test/lib/ansible_test/_internal/venv.py41
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/mypy/ansible-test.ini3
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py216
-rw-r--r--test/lib/ansible_test/_util/target/setup/bootstrap.sh1
-rw-r--r--test/sanity/code-smell/package-data.py4
-rw-r--r--test/support/README.md11
-rw-r--r--test/units/plugins/lookup/test_password.py17
157 files changed, 2136 insertions, 615 deletions
diff --git a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py
index f251a69f..79b7a704 100644
--- a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py
+++ b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py
@@ -48,6 +48,18 @@ options:
version: '2.0.0'
extends_documentation_fragment:
- testns.testcol2.module
+seealso:
+ - module: ansible.builtin.ping
+ - module: ansible.builtin.uri
+ description: Use this to fetch an URI
+ - module: testns.testcol.test
+ - module: testns.testcol.fakemodule
+ description: A fake module
+ - link: https://docs.ansible.com
+ name: Ansible docsite
+ description: See also the Ansible docsite.
+ - ref: foo_bar
+ description: Some foo bar.
'''
EXAMPLES = '''
diff --git a/test/integration/targets/ansible-doc/fix-urls.py b/test/integration/targets/ansible-doc/fix-urls.py
new file mode 100644
index 00000000..1379a4e4
--- /dev/null
+++ b/test/integration/targets/ansible-doc/fix-urls.py
@@ -0,0 +1,15 @@
+"""Unwrap URLs to docs.ansible.com and remove version"""
+
+import re
+import sys
+
+
+def main():
+ data = sys.stdin.read()
+ data = re.sub('(https://docs\\.ansible\\.com/[^ ]+)\n +([^ ]+)\n', '\\1\\2\n', data, flags=re.MULTILINE)
+ data = re.sub('https://docs\\.ansible\\.com/ansible(|-core)/(?:[^/]+)/', 'https://docs.ansible.com/ansible\\1/devel/', data)
+ sys.stdout.write(data)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/integration/targets/ansible-doc/randommodule-text.output b/test/integration/targets/ansible-doc/randommodule-text.output
index 51d7930a..602d66ec 100644
--- a/test/integration/targets/ansible-doc/randommodule-text.output
+++ b/test/integration/targets/ansible-doc/randommodule-text.output
@@ -65,6 +65,25 @@ OPTIONS (= is mandatory):
type: str
+SEE ALSO:
+ * Module ansible.builtin.ping
+ The official documentation on the
+ ansible.builtin.ping module.
+ https://docs.ansible.com/ansible-core/devel/collections/ansible/builtin/ping_module.html
+ * Module ansible.builtin.uri
+ Use this to fetch an URI
+ https://docs.ansible.com/ansible-core/devel/collections/ansible/builtin/uri_module.html
+ * Module testns.testcol.test
+ * Module testns.testcol.fakemodule
+ A fake module
+ * Ansible docsite
+ See also the Ansible docsite.
+ https://docs.ansible.com
+ * Ansible documentation [foo_bar]
+ Some foo bar.
+ https://docs.ansible.com/ansible-core/devel/#stq=foo_bar&stp=1
+
+
AUTHOR: Ansible Core Team
EXAMPLES:
diff --git a/test/integration/targets/ansible-doc/randommodule.output b/test/integration/targets/ansible-doc/randommodule.output
index 25f46c36..cf036000 100644
--- a/test/integration/targets/ansible-doc/randommodule.output
+++ b/test/integration/targets/ansible-doc/randommodule.output
@@ -70,6 +70,31 @@
"type": "str"
}
},
+ "seealso": [
+ {
+ "module": "ansible.builtin.ping"
+ },
+ {
+ "description": "Use this to fetch an URI",
+ "module": "ansible.builtin.uri"
+ },
+ {
+ "module": "testns.testcol.test"
+ },
+ {
+ "description": "A fake module",
+ "module": "testns.testcol.fakemodule"
+ },
+ {
+ "description": "See also the Ansible docsite.",
+ "link": "https://docs.ansible.com",
+ "name": "Ansible docsite"
+ },
+ {
+ "description": "Some foo bar.",
+ "ref": "foo_bar"
+ }
+ ],
"short_description": "A random module",
"version_added": "1.0.0",
"version_added_collection": "testns.testcol"
diff --git a/test/integration/targets/ansible-doc/runme.sh b/test/integration/targets/ansible-doc/runme.sh
index 887d3c41..f51fa8a4 100755
--- a/test/integration/targets/ansible-doc/runme.sh
+++ b/test/integration/targets/ansible-doc/runme.sh
@@ -19,8 +19,8 @@ current_out="$(ansible-doc --playbook-dir ./ testns.testcol.fakemodule | sed '1
expected_out="$(sed '1 s/\(^> TESTNS\.TESTCOL\.FAKEMODULE\).*(.*)$/\1/' fakemodule.output)"
test "$current_out" == "$expected_out"
-# we use sed to strip the module path from the first line
-current_out="$(ansible-doc --playbook-dir ./ testns.testcol.randommodule | sed '1 s/\(^> TESTNS\.TESTCOL\.RANDOMMODULE\).*(.*)$/\1/')"
+# we use sed to strip the plugin path from the first line, and fix-urls.py to unbreak and replace URLs from stable-X branches
+current_out="$(ansible-doc --playbook-dir ./ testns.testcol.randommodule | sed '1 s/\(^> TESTNS\.TESTCOL\.RANDOMMODULE\).*(.*)$/\1/' | python fix-urls.py)"
expected_out="$(sed '1 s/\(^> TESTNS\.TESTCOL\.RANDOMMODULE\).*(.*)$/\1/' randommodule-text.output)"
test "$current_out" == "$expected_out"
diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/install.yml b/test/integration/targets/ansible-galaxy-collection/tasks/install.yml
index 8916faf5..cca83c7b 100644
--- a/test/integration/targets/ansible-galaxy-collection/tasks/install.yml
+++ b/test/integration/targets/ansible-galaxy-collection/tasks/install.yml
@@ -5,7 +5,7 @@
state: directory
- name: install simple collection from first accessible server
- command: ansible-galaxy collection install namespace1.name1 {{ galaxy_verbosity }}
+ command: ansible-galaxy collection install namespace1.name1 -vvvv
environment:
ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections'
register: from_first_good_server
@@ -30,6 +30,7 @@
- install_normal_files.files[1].path | basename in ['MANIFEST.json', 'FILES.json', 'README.md']
- install_normal_files.files[2].path | basename in ['MANIFEST.json', 'FILES.json', 'README.md']
- (install_normal_manifest.content | b64decode | from_json).collection_info.version == '1.0.9'
+ - 'from_first_good_server.stdout|regex_findall("has not signed namespace1\.name1")|length == 1'
- name: Remove the collection
file:
diff --git a/test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/target-prefixes.something b/test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/target-prefixes.something
new file mode 100644
index 00000000..b3065492
--- /dev/null
+++ b/test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/target-prefixes.something
@@ -0,0 +1,2 @@
+one-part
+two_part
diff --git a/test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/targets/one-part_test/aliases b/test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/targets/one-part_test/aliases
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/targets/one-part_test/aliases
diff --git a/test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/targets/two_part_test/aliases b/test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/targets/two_part_test/aliases
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/targets/two_part_test/aliases
diff --git a/test/integration/targets/ansible-test-integration-targets/test.py b/test/integration/targets/ansible-test-integration-targets/test.py
index 443ed59d..8effb647 100755
--- a/test/integration/targets/ansible-test-integration-targets/test.py
+++ b/test/integration/targets/ansible-test-integration-targets/test.py
@@ -31,5 +31,17 @@ class OptionsTest(unittest.TestCase):
raise Exception(f'{ex}:\n>>> Standard Output:\n{ex.stdout}\n>>> Standard Error:\n{ex.stderr}') from ex
+class PrefixesTest(unittest.TestCase):
+ def test_prefixes(self):
+ try:
+ command = ['ansible-test', 'integration', '--list-targets']
+
+ something = subprocess.run([*command, 'something/'], text=True, capture_output=True, check=True)
+
+ self.assertEqual(something.stdout.splitlines(), ['one-part_test', 'two_part_test'])
+ except subprocess.CalledProcessError as ex:
+ raise Exception(f'{ex}:\n>>> Standard Output:\n{ex.stdout}\n>>> Standard Error:\n{ex.stderr}') from ex
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/test/integration/targets/ansible-test-vendoring/aliases b/test/integration/targets/ansible-test-vendoring/aliases
new file mode 100644
index 00000000..09cbf4b8
--- /dev/null
+++ b/test/integration/targets/ansible-test-vendoring/aliases
@@ -0,0 +1,5 @@
+shippable/posix/group3 # runs in the distro test containers
+shippable/generic/group1 # runs in the default test container
+context/controller
+needs/target/collection
+destructive # adds and then removes packages into lib/ansible/_vendor/
diff --git a/test/integration/targets/ansible-test-vendoring/ansible_collections/ns/col/tests/config.yml b/test/integration/targets/ansible-test-vendoring/ansible_collections/ns/col/tests/config.yml
new file mode 100644
index 00000000..c73de69d
--- /dev/null
+++ b/test/integration/targets/ansible-test-vendoring/ansible_collections/ns/col/tests/config.yml
@@ -0,0 +1,4 @@
+# This config file is included to cause ansible-test to import the `packaging` module.
+
+modules:
+ python_requires: default
diff --git a/test/integration/targets/ansible-test-vendoring/runme.sh b/test/integration/targets/ansible-test-vendoring/runme.sh
new file mode 100755
index 00000000..fa6f652a
--- /dev/null
+++ b/test/integration/targets/ansible-test-vendoring/runme.sh
@@ -0,0 +1,33 @@
+#!/usr/bin/env bash
+
+set -eux
+
+# Run import sanity tests which require modifications to the source directory.
+
+vendor_dir="$(python -c 'import pathlib, ansible._vendor; print(pathlib.Path(ansible._vendor.__file__).parent)')"
+
+mkdir "${vendor_dir}/packaging/" # intended to fail if packaging is already present (to avoid deleting it later)
+
+cleanup() {
+ rm -rf "${vendor_dir}/packaging/"
+}
+
+trap cleanup EXIT
+
+# Verify that packages installed in the vendor directory are loaded by ansible-test.
+# This is done by injecting a broken `packaging` package, which should cause ansible-test to fail.
+
+echo 'raise Exception("intentional failure from ansible-test-vendoring integration test")' > "${vendor_dir}/packaging/__init__.py"
+
+if ansible-test sanity --test import --color --truncate 0 "${@}" > output.log 2>&1; then
+ echo "ansible-test did not exit with a non-zero status"
+ cat output.log
+ exit 1
+fi
+
+if ! grep '^Exception: intentional failure from ansible-test-vendoring integration test$' output.log; then
+ echo "ansible-test did not fail with the expected output"
+ cat output.log
+ exit 1
+fi
+
diff --git a/test/integration/targets/async/tasks/main.yml b/test/integration/targets/async/tasks/main.yml
index 05c789e6..f5e5c992 100644
--- a/test/integration/targets/async/tasks/main.yml
+++ b/test/integration/targets/async/tasks/main.yml
@@ -122,7 +122,7 @@
- name: assert task failed correctly
assert:
that:
- - async_result.ansible_job_id is match('\d+\.\d+')
+ - async_result.ansible_job_id is match('j\d+\.\d+')
- async_result.finished == 1
- async_result is finished
- async_result is not changed
@@ -140,7 +140,7 @@
- name: validate response
assert:
that:
- - async_result.ansible_job_id is match('\d+\.\d+')
+ - async_result.ansible_job_id is match('j\d+\.\d+')
- async_result.finished == 1
- async_result is finished
- async_result.changed == false
@@ -159,7 +159,7 @@
- name: validate response
assert:
that:
- - async_result.ansible_job_id is match('\d+\.\d+')
+ - async_result.ansible_job_id is match('j\d+\.\d+')
- async_result.finished == 1
- async_result is finished
- async_result.changed == true
@@ -176,7 +176,7 @@
- name: validate response
assert:
that:
- - async_result.ansible_job_id is match('\d+\.\d+')
+ - async_result.ansible_job_id is match('j\d+\.\d+')
- async_result.finished == 1
- async_result is finished
- async_result.changed == true
diff --git a/test/integration/targets/async_fail/tasks/main.yml b/test/integration/targets/async_fail/tasks/main.yml
index 40f72e10..24cea1d5 100644
--- a/test/integration/targets/async_fail/tasks/main.yml
+++ b/test/integration/targets/async_fail/tasks/main.yml
@@ -28,7 +28,7 @@
- name: validate that by the end of the retry interval, we succeeded
assert:
that:
- - async_result.ansible_job_id is match('\d+\.\d+')
+ - async_result.ansible_job_id is match('j\d+\.\d+')
- async_result.finished == 1
- async_result is finished
- async_result is changed
diff --git a/test/integration/targets/callback_default/runme.sh b/test/integration/targets/callback_default/runme.sh
index 0ee4259d..a815132a 100755
--- a/test/integration/targets/callback_default/runme.sh
+++ b/test/integration/targets/callback_default/runme.sh
@@ -135,8 +135,8 @@ run_test default test.yml
# Check for async output
# NOTE: regex to match 1 or more digits works for both BSD and GNU grep
ansible-playbook -i inventory test_async.yml 2>&1 | tee async_test.out
-grep "ASYNC OK .* jid=[0-9]\{1,\}" async_test.out
-grep "ASYNC FAILED .* jid=[0-9]\{1,\}" async_test.out
+grep "ASYNC OK .* jid=j[0-9]\{1,\}" async_test.out
+grep "ASYNC FAILED .* jid=j[0-9]\{1,\}" async_test.out
rm -f async_test.out
# Hide skipped
diff --git a/test/integration/targets/canonical-pep517-self-packaging/aliases b/test/integration/targets/canonical-pep517-self-packaging/aliases
new file mode 100644
index 00000000..4667aa4f
--- /dev/null
+++ b/test/integration/targets/canonical-pep517-self-packaging/aliases
@@ -0,0 +1,3 @@
+shippable/posix/group3
+context/controller
+packaging
diff --git a/test/integration/targets/canonical-pep517-self-packaging/minimum-build-constraints.txt b/test/integration/targets/canonical-pep517-self-packaging/minimum-build-constraints.txt
new file mode 100644
index 00000000..ea5d8084
--- /dev/null
+++ b/test/integration/targets/canonical-pep517-self-packaging/minimum-build-constraints.txt
@@ -0,0 +1,16 @@
+# Lowest supporting Python 3.9 and 3.10:
+setuptools == 57.0.0; python_version == "3.9" or python_version == "3.10"
+
+# Lowest supporting Python 3.11:
+setuptools == 60.0.0; python_version >= "3.11"
+
+
+# An arbitrary old version that was released before Python 3.9.0:
+wheel == 0.33.6
+
+# Conditional dependencies:
+docutils == 0.16
+Jinja2 == 3.0.0
+MarkupSafe == 2.0.0
+PyYAML == 5.3
+straight.plugin == 1.4.2
diff --git a/test/integration/targets/canonical-pep517-self-packaging/modernish-build-constraints.txt b/test/integration/targets/canonical-pep517-self-packaging/modernish-build-constraints.txt
new file mode 100644
index 00000000..7f744afd
--- /dev/null
+++ b/test/integration/targets/canonical-pep517-self-packaging/modernish-build-constraints.txt
@@ -0,0 +1,11 @@
+setuptools == 67.4.0
+
+# Wheel-only build dependency
+wheel == 0.38.4
+
+# Conditional dependencies:
+docutils == 0.19
+Jinja2 == 3.1.2
+MarkupSafe == 2.1.2
+PyYAML == 6.0
+straight.plugin == 1.5.0 # WARNING: v1.5.0 doesn't have a Git tag / src
diff --git a/test/integration/targets/canonical-pep517-self-packaging/runme.sh b/test/integration/targets/canonical-pep517-self-packaging/runme.sh
new file mode 100755
index 00000000..028348f8
--- /dev/null
+++ b/test/integration/targets/canonical-pep517-self-packaging/runme.sh
@@ -0,0 +1,31 @@
+#!/usr/bin/env bash
+
+if [[ "${ANSIBLE_DEBUG}" == true ]] # `ansible-test` invoked with `--debug`
+then
+ PYTEST_VERY_VERBOSE_FLAG=-vvvvv
+ SET_DEBUG_MODE=-x
+else
+ ANSIBLE_DEBUG=false
+ PYTEST_VERY_VERBOSE_FLAG=
+ SET_DEBUG_MODE=+x
+fi
+
+
+set -eEuo pipefail
+
+source virtualenv.sh
+
+set "${SET_DEBUG_MODE}"
+
+export PIP_DISABLE_PIP_VERSION_CHECK=true
+export PIP_NO_PYTHON_VERSION_WARNING=true
+export PIP_NO_WARN_SCRIPT_LOCATION=true
+
+python -Im pip install 'pytest ~= 7.2.0'
+python -Im pytest ${PYTEST_VERY_VERBOSE_FLAG} \
+ --basetemp="${OUTPUT_DIR}/pytest-tmp" \
+ --color=yes \
+ --showlocals \
+ -p no:forked \
+ -p no:mock \
+ -ra
diff --git a/test/integration/targets/canonical-pep517-self-packaging/runme_test.py b/test/integration/targets/canonical-pep517-self-packaging/runme_test.py
new file mode 100644
index 00000000..86b0f753
--- /dev/null
+++ b/test/integration/targets/canonical-pep517-self-packaging/runme_test.py
@@ -0,0 +1,385 @@
+"""Smoke tests for the in-tree PEP 517 backend."""
+
+from __future__ import annotations
+
+from filecmp import dircmp
+from os import chdir, environ, PathLike
+from pathlib import Path
+from shutil import rmtree
+from subprocess import check_call, check_output, PIPE
+from sys import executable as current_interpreter, version_info
+from tarfile import TarFile
+import typing as t
+
+try:
+ from contextlib import chdir as _chdir_cm
+except ImportError:
+ from contextlib import contextmanager as _contextmanager
+
+ @_contextmanager
+ def _chdir_cm(path: PathLike) -> t.Iterator[None]:
+ original_wd = Path.cwd()
+ chdir(path)
+ try:
+ yield
+ finally:
+ chdir(original_wd)
+
+import pytest
+
+
+DIST_NAME = 'ansible_core'
+DIST_FILENAME_BASE = 'ansible-core'
+OUTPUT_DIR = Path(environ['OUTPUT_DIR']).resolve().absolute()
+SRC_ROOT_DIR = OUTPUT_DIR.parents[3]
+GENERATED_MANPAGES_SUBDIR = SRC_ROOT_DIR / 'docs' / 'man' / 'man1'
+LOWEST_SUPPORTED_BUILD_DEPS_FILE = (
+ Path(__file__).parent / 'minimum-build-constraints.txt'
+).resolve().absolute()
+MODERNISH_BUILD_DEPS_FILE = (
+ Path(__file__).parent / 'modernish-build-constraints.txt'
+).resolve().absolute()
+RELEASE_MODULE = SRC_ROOT_DIR / 'lib' / 'ansible' / 'release.py'
+VERSION_LINE_PREFIX = "__version__ = '"
+PKG_DIST_VERSION = next(
+ line[len(VERSION_LINE_PREFIX):-1]
+ for line in RELEASE_MODULE.read_text().splitlines()
+ if line.startswith(VERSION_LINE_PREFIX)
+)
+EXPECTED_SDIST_NAME_BASE = f'{DIST_FILENAME_BASE}-{PKG_DIST_VERSION}'
+EXPECTED_SDIST_NAME = f'{EXPECTED_SDIST_NAME_BASE}.tar.gz'
+EXPECTED_WHEEL_NAME = f'{DIST_NAME}-{PKG_DIST_VERSION}-py3-none-any.whl'
+
+IS_PYTHON310_PLUS = version_info[:2] >= (3, 10)
+
+
+def wipe_generated_manpages() -> None:
+ """Ensure man1 pages aren't present in the source checkout."""
+ # Cleaning up the gitignored manpages...
+ if not GENERATED_MANPAGES_SUBDIR.exists():
+ return
+
+ rmtree(GENERATED_MANPAGES_SUBDIR)
+ # Removed the generated manpages...
+
+
+def contains_man1_pages(sdist_tarball: Path) -> Path:
+ """Check if the man1 pages are present in given tarball."""
+ with sdist_tarball.open(mode='rb') as tarball_fd:
+ with TarFile.gzopen(fileobj=tarball_fd, name=None) as tarball:
+ try:
+ tarball.getmember(
+ name=f'{EXPECTED_SDIST_NAME_BASE}/docs/man/man1',
+ )
+ except KeyError:
+ return False
+
+ return True
+
+
+def unpack_sdist(sdist_tarball: Path, target_directory: Path) -> Path:
+ """Unarchive given tarball.
+
+ :returns: Path of the package source checkout.
+ """
+ with sdist_tarball.open(mode='rb') as tarball_fd:
+ with TarFile.gzopen(fileobj=tarball_fd, name=None) as tarball:
+ tarball.extractall(path=target_directory)
+ return target_directory / EXPECTED_SDIST_NAME_BASE
+
+
+def assert_dirs_equal(*dir_paths: t.List[Path]) -> None:
+ dir_comparison = dircmp(*dir_paths)
+ assert not dir_comparison.left_only
+ assert not dir_comparison.right_only
+ assert not dir_comparison.diff_files
+ assert not dir_comparison.funny_files
+
+
+def normalize_unpacked_rebuilt_sdist(sdist_path: Path) -> None:
+ top_pkg_info_path = sdist_path / 'PKG-INFO'
+ nested_pkg_info_path = (
+ sdist_path / 'lib' / f'{DIST_NAME}.egg-info' / 'PKG-INFO'
+ )
+ entry_points_path = nested_pkg_info_path.parent / 'entry_points.txt'
+
+ # setuptools v39 write out two trailing empty lines and an unknown platform
+ # while the recent don't
+ top_pkg_info_path.write_text(
+ top_pkg_info_path.read_text().replace(
+ 'Classifier: Development Status :: 5',
+ 'Platform: UNKNOWN\nClassifier: Development Status :: 5',
+ ) + '\n\n'
+ )
+ nested_pkg_info_path.write_text(
+ nested_pkg_info_path.read_text().replace(
+ 'Classifier: Development Status :: 5',
+ 'Platform: UNKNOWN\nClassifier: Development Status :: 5',
+ ) + '\n\n'
+ )
+
+ # setuptools v39 write out one trailing empty line while the recent don't
+ entry_points_path.write_text(entry_points_path.read_text() + '\n')
+
+
+@pytest.fixture
+def venv_python_exe(tmp_path: Path) -> t.Iterator[Path]:
+ venv_path = tmp_path / 'pytest-managed-venv'
+ mkvenv_cmd = (
+ current_interpreter, '-m', 'venv', str(venv_path),
+ )
+ check_call(mkvenv_cmd, env={}, stderr=PIPE, stdout=PIPE)
+ yield venv_path / 'bin' / 'python'
+ rmtree(venv_path)
+
+
+def run_with_venv_python(
+ python_exe: Path, *cli_args: t.Iterable[str],
+ env_vars: t.Dict[str, str] = None,
+) -> str:
+ if env_vars is None:
+ env_vars = {}
+ full_cmd = str(python_exe), *cli_args
+ return check_output(full_cmd, env=env_vars, stderr=PIPE)
+
+
+def build_dists(
+ python_exe: Path, *cli_args: t.Iterable[str],
+ env_vars: t.Dict[str, str],
+) -> str:
+ return run_with_venv_python(
+ python_exe, '-m', 'build',
+ *cli_args, env_vars=env_vars,
+ )
+
+
+def pip_install(
+ python_exe: Path, *cli_args: t.Iterable[str],
+ env_vars: t.Dict[str, str] = None,
+) -> str:
+ return run_with_venv_python(
+ python_exe, '-m', 'pip', 'install',
+ *cli_args, env_vars=env_vars,
+ )
+
+
+def test_installing_sdist_build_with_modern_deps_to_old_env(
+ venv_python_exe: Path, tmp_path: Path,
+) -> None:
+ pip_install(venv_python_exe, 'build ~= 0.10.0')
+ tmp_dir_sdist_w_modern_tools = tmp_path / 'sdist-w-modern-tools'
+ build_dists(
+ venv_python_exe, '--sdist',
+ '--config-setting=--build-manpages',
+ f'--outdir={tmp_dir_sdist_w_modern_tools!s}',
+ str(SRC_ROOT_DIR),
+ env_vars={
+ 'PIP_CONSTRAINT': str(MODERNISH_BUILD_DEPS_FILE),
+ },
+ )
+ tmp_path_sdist_w_modern_tools = (
+ tmp_dir_sdist_w_modern_tools / EXPECTED_SDIST_NAME
+ )
+
+ # Downgrading pip, because v20+ supports in-tree build backends
+ pip_install(venv_python_exe, 'pip ~= 19.3.1')
+
+ # Smoke test — installing an sdist with pip that does not support
+ # in-tree build backends.
+ pip_install(
+ venv_python_exe, str(tmp_path_sdist_w_modern_tools), '--no-deps',
+ )
+
+ # Downgrading pip, because versions that support PEP 517 don't allow
+ # disabling it with `--no-use-pep517` when `build-backend` is set in
+ # the `[build-system]` section of `pyproject.toml`, considering this
+ # an explicit opt-in.
+ if not IS_PYTHON310_PLUS:
+ pip_install(venv_python_exe, 'pip == 18.0')
+
+ # Smoke test — installing an sdist with pip that does not support invoking
+ # PEP 517 interface at all.
+ # In this scenario, pip will run `setup.py install` since `wheel` is not in
+ # the environment.
+ if IS_PYTHON310_PLUS:
+ tmp_dir_unpacked_sdist_root = tmp_path / 'unpacked-sdist'
+ tmp_dir_unpacked_sdist_path = tmp_dir_unpacked_sdist_root / EXPECTED_SDIST_NAME_BASE
+ with TarFile.gzopen(tmp_path_sdist_w_modern_tools) as sdist_fd:
+ sdist_fd.extractall(path=tmp_dir_unpacked_sdist_root)
+
+ pip_install(
+ venv_python_exe, 'setuptools',
+ env_vars={
+ 'PIP_CONSTRAINT': str(LOWEST_SUPPORTED_BUILD_DEPS_FILE),
+ },
+ )
+ with _chdir_cm(tmp_dir_unpacked_sdist_path):
+ run_with_venv_python(
+ venv_python_exe, 'setup.py', 'sdist',
+ env_vars={'PATH': environ['PATH']},
+ )
+ else:
+ pip_install(
+ venv_python_exe, str(tmp_path_sdist_w_modern_tools), '--no-deps',
+ env_vars={
+ 'PIP_CONSTRAINT': str(LOWEST_SUPPORTED_BUILD_DEPS_FILE),
+ },
+ )
+
+ # Smoke test — installing an sdist with pip that does not support invoking
+ # PEP 517 interface at all.
+ # With `wheel` present, pip will run `setup.py bdist_wheel` and then,
+ # unpack the result.
+ pip_install(venv_python_exe, 'wheel')
+ if IS_PYTHON310_PLUS:
+ with _chdir_cm(tmp_dir_unpacked_sdist_path):
+ run_with_venv_python(
+ venv_python_exe, 'setup.py', 'bdist_wheel',
+ env_vars={'PATH': environ['PATH']},
+ )
+ else:
+ pip_install(
+ venv_python_exe, str(tmp_path_sdist_w_modern_tools), '--no-deps',
+ )
+
+
+def test_dist_rebuilds_with_manpages_premutations(
+ venv_python_exe: Path, tmp_path: Path,
+) -> None:
+ """Test a series of sdist rebuilds under different conditions.
+
+ This check builds sdists right from the Git checkout with and without
+ the manpages. It also does this using different versions of the setuptools
+ PEP 517 build backend being pinned. Finally, it builds a wheel out of one
+ of the rebuilt sdists.
+ As intermediate assertions, this test makes simple smoke tests along
+ the way.
+ """
+ pip_install(venv_python_exe, 'build ~= 0.10.0')
+
+ # Test building an sdist without manpages from the Git checkout
+ tmp_dir_sdist_without_manpages = tmp_path / 'sdist-without-manpages'
+ wipe_generated_manpages()
+ build_dists(
+ venv_python_exe, '--sdist',
+ f'--outdir={tmp_dir_sdist_without_manpages!s}',
+ str(SRC_ROOT_DIR),
+ env_vars={
+ 'PIP_CONSTRAINT': str(MODERNISH_BUILD_DEPS_FILE),
+ },
+ )
+ tmp_path_sdist_without_manpages = (
+ tmp_dir_sdist_without_manpages / EXPECTED_SDIST_NAME
+ )
+ assert tmp_path_sdist_without_manpages.exists()
+ assert not contains_man1_pages(tmp_path_sdist_without_manpages)
+ sdist_without_manpages_path = unpack_sdist(
+ tmp_path_sdist_without_manpages,
+ tmp_dir_sdist_without_manpages / 'src',
+ )
+
+ # Test building an sdist with manpages from the Git checkout
+ # and lowest supported build deps
+ wipe_generated_manpages()
+ tmp_dir_sdist_with_manpages = tmp_path / 'sdist-with-manpages'
+ build_dists(
+ venv_python_exe, '--sdist',
+ '--config-setting=--build-manpages',
+ f'--outdir={tmp_dir_sdist_with_manpages!s}',
+ str(SRC_ROOT_DIR),
+ env_vars={
+ 'PIP_CONSTRAINT': str(LOWEST_SUPPORTED_BUILD_DEPS_FILE),
+ },
+ )
+ tmp_path_sdist_with_manpages = (
+ tmp_dir_sdist_with_manpages / EXPECTED_SDIST_NAME
+ )
+ assert tmp_path_sdist_with_manpages.exists()
+ assert contains_man1_pages(tmp_path_sdist_with_manpages)
+ sdist_with_manpages_path = unpack_sdist(
+ tmp_path_sdist_with_manpages,
+ tmp_dir_sdist_with_manpages / 'src',
+ )
+
+ # Test re-building an sdist with manpages from the
+ # sdist contents that does not include the manpages
+ tmp_dir_rebuilt_sdist = tmp_path / 'rebuilt-sdist'
+ build_dists(
+ venv_python_exe, '--sdist',
+ '--config-setting=--build-manpages',
+ f'--outdir={tmp_dir_rebuilt_sdist!s}',
+ str(sdist_without_manpages_path),
+ env_vars={
+ 'PIP_CONSTRAINT': str(MODERNISH_BUILD_DEPS_FILE),
+ },
+ )
+ tmp_path_rebuilt_sdist = tmp_dir_rebuilt_sdist / EXPECTED_SDIST_NAME
+ # Checking that the expected sdist got created
+ # from the previous unpacked sdist...
+ assert tmp_path_rebuilt_sdist.exists()
+ # NOTE: The following assertion is disabled due to the fact that, when
+ # NOTE: building an sdist from the original source checkout, the build
+ # NOTE: backend replaces itself with pure setuptools in the resulting
+ # NOTE: sdist, and the following rebuilds from that sdist are no longer
+ # NOTE: able to process the custom config settings that are implemented in
+ # NOTE: the in-tree build backend. It is expected that said
+ # NOTE: `pyproject.toml` mutation change will be reverted once all of the
+ # NOTE: supported `ansible-core` versions ship wheels, meaning that the
+ # NOTE: end-users won't be building the distribution from sdist on install.
+ # NOTE: Another case, when it can be reverted is declaring pip below v20
+ # NOTE: unsupported — it is the first version to support in-tree build
+ # NOTE: backends natively.
+ # assert contains_man1_pages(tmp_path_rebuilt_sdist) # FIXME: See #80255
+ rebuilt_sdist_path = unpack_sdist(
+ tmp_path_rebuilt_sdist,
+ tmp_dir_rebuilt_sdist / 'src',
+ )
+ assert rebuilt_sdist_path.exists()
+ assert rebuilt_sdist_path.is_dir()
+ normalize_unpacked_rebuilt_sdist(rebuilt_sdist_path)
+ assert_dirs_equal(rebuilt_sdist_path, sdist_with_manpages_path)
+
+ # Test building a wheel from the rebuilt sdist with manpages contents
+ # and lowest supported build deps
+ tmp_dir_rebuilt_wheel = tmp_path / 'rebuilt-wheel'
+ build_dists(
+ venv_python_exe, '--wheel',
+ f'--outdir={tmp_dir_rebuilt_wheel!s}',
+ str(sdist_with_manpages_path),
+ env_vars={
+ 'PIP_CONSTRAINT': str(LOWEST_SUPPORTED_BUILD_DEPS_FILE),
+ },
+ )
+ tmp_path_rebuilt_wheel = tmp_dir_rebuilt_wheel / EXPECTED_WHEEL_NAME
+ # Checking that the expected wheel got created...
+ assert tmp_path_rebuilt_wheel.exists()
+
+
+def test_pep660_editable_install_smoke(venv_python_exe: Path) -> None:
+ """Smoke-test PEP 660 editable install.
+
+ This verifies that the in-tree build backend wrapper
+ does not break any required interfaces.
+ """
+ pip_install(venv_python_exe, '-e', str(SRC_ROOT_DIR))
+
+ pip_show_cmd = (
+ str(venv_python_exe), '-m',
+ 'pip', 'show', DIST_FILENAME_BASE,
+ )
+ installed_ansible_meta = check_output(
+ pip_show_cmd,
+ env={}, stderr=PIPE, text=True,
+ ).splitlines()
+ assert f'Name: {DIST_FILENAME_BASE}' in installed_ansible_meta
+ assert f'Version: {PKG_DIST_VERSION}' in installed_ansible_meta
+
+ pip_runtime_version_cmd = (
+ str(venv_python_exe), '-c',
+ 'from ansible import __version__; print(__version__)',
+ )
+ runtime_ansible_version = check_output(
+ pip_runtime_version_cmd,
+ env={}, stderr=PIPE, text=True,
+ ).strip()
+ assert runtime_ansible_version == PKG_DIST_VERSION
diff --git a/test/integration/targets/copy/tasks/check_mode.yml b/test/integration/targets/copy/tasks/check_mode.yml
index 5b405cc4..9702e070 100644
--- a/test/integration/targets/copy/tasks/check_mode.yml
+++ b/test/integration/targets/copy/tasks/check_mode.yml
@@ -113,8 +113,7 @@
- check_mode_subdir_first is changed
- check_mode_trailing_slash_first is changed
- # TODO: This is a legitimate bug
- #- not check_mode_trailing_slash_first_stat.stat.exists
+ - not check_mode_trailing_slash_first_stat.stat.exists
- check_mode_trailing_slash_real is changed
- check_mode_trailing_slash_real_stat.stat.exists
- check_mode_trailing_slash_second is not changed
@@ -124,3 +123,41 @@
- check_mode_foo_real is changed
- check_mode_foo_real_stat.stat.exists
- check_mode_foo_second is not changed
+
+ - name: check_mode - Do a basic copy to setup next test (without check mode)
+ copy:
+ src: foo.txt
+ dest: "{{ remote_dir }}/foo-check_mode.txt"
+ mode: 0444
+
+ - name: check_mode - Copy the same src with a different mode (check mode)
+ copy:
+ src: foo.txt
+ dest: "{{ remote_dir }}/foo-check_mode.txt"
+ mode: 0666
+ check_mode: True
+ register: check_mode_file_attribute
+
+ - name: stat the file to make sure the mode was not updated in check mode
+ stat:
+ path: "{{ remote_dir }}/foo-check_mode.txt"
+ register: check_mode_file_attribute_stat
+
+ - name: check_mode - Copy the same src with a different mode (without check mode)
+ copy:
+ src: foo.txt
+ dest: "{{ remote_dir }}/foo-check_mode.txt"
+ mode: 0666
+ register: real_file_attribute
+
+ - name: stat the file to make sure the mode was updated without check mode
+ stat:
+ path: "{{ remote_dir }}/foo-check_mode.txt"
+ register: real_file_attribute_stat
+
+ - assert:
+ that:
+ - check_mode_file_attribute is changed
+ - real_file_attribute is changed
+ - "check_mode_file_attribute_stat.stat.mode == '0444'"
+ - "real_file_attribute_stat.stat.mode == '0666'"
diff --git a/test/integration/targets/entry_points/aliases b/test/integration/targets/entry_points/aliases
index 9d967564..7f0ffcf0 100644
--- a/test/integration/targets/entry_points/aliases
+++ b/test/integration/targets/entry_points/aliases
@@ -1,2 +1,3 @@
context/controller
shippable/posix/group4
+packaging
diff --git a/test/integration/targets/include_import/roles/role_with_argspec/meta/argument_specs.yml b/test/integration/targets/include_import/roles/role_with_argspec/meta/argument_specs.yml
new file mode 100644
index 00000000..e6d200c1
--- /dev/null
+++ b/test/integration/targets/include_import/roles/role_with_argspec/meta/argument_specs.yml
@@ -0,0 +1,7 @@
+argument_specs:
+ main:
+ short_description: The main entry point for dup_allowed_role
+ options:
+ optional_int:
+ type: int
+ description: An integer value
diff --git a/test/integration/targets/include_import/roles/role_with_argspec/tasks/main.yml b/test/integration/targets/include_import/roles/role_with_argspec/tasks/main.yml
new file mode 100644
index 00000000..23f52ef5
--- /dev/null
+++ b/test/integration/targets/include_import/roles/role_with_argspec/tasks/main.yml
@@ -0,0 +1 @@
+- debug: msg='Running role_with_argspec'
diff --git a/test/integration/targets/include_import/runme.sh b/test/integration/targets/include_import/runme.sh
index d384a12e..078f080b 100755
--- a/test/integration/targets/include_import/runme.sh
+++ b/test/integration/targets/include_import/runme.sh
@@ -121,6 +121,11 @@ ansible-playbook valid_include_keywords/playbook.yml "$@"
ansible-playbook tasks/test_allow_single_role_dup.yml 2>&1 | tee test_allow_single_role_dup.out
test "$(grep -c 'ok=3' test_allow_single_role_dup.out)" = 1
+# test templating public, allow_duplicates, and rolespec_validate
+ansible-playbook tasks/test_templating_IncludeRole_FA.yml 2>&1 | tee IncludeRole_FA_template.out
+test "$(grep -c 'ok=4' IncludeRole_FA_template.out)" = 1
+test "$(grep -c 'failed=0' IncludeRole_FA_template.out)" = 1
+
# https://github.com/ansible/ansible/issues/66764
ANSIBLE_HOST_PATTERN_MISMATCH=error ansible-playbook empty_group_warning/playbook.yml
diff --git a/test/integration/targets/include_import/tasks/test_templating_IncludeRole_FA.yml b/test/integration/targets/include_import/tasks/test_templating_IncludeRole_FA.yml
new file mode 100644
index 00000000..cb67a9bb
--- /dev/null
+++ b/test/integration/targets/include_import/tasks/test_templating_IncludeRole_FA.yml
@@ -0,0 +1,28 @@
+---
+- name: test templating allow_duplicates, public, and rolespec_validate
+ hosts: localhost
+ gather_facts: false
+ tasks:
+ - name: prevent duplicate roles with a templated value
+ block:
+ - import_role:
+ name: dup_allowed_role
+ allow_duplicates: "{{ False | bool }}"
+ - import_role:
+ name: dup_allowed_role
+ allow_duplicates: "{{ False | bool }}"
+
+ - name: prevent leaky vars with a templated value
+ include_role:
+ name: role1
+ public: "{{ False | bool }}"
+ - assert:
+ that:
+ - where_am_i_defined is undefined
+
+ - name: skip role argspec validation with a templated value
+ include_role:
+ name: role_with_argspec
+ rolespec_validate: "{{ False | bool }}"
+ vars:
+ optional_int: wrong_type
diff --git a/test/integration/targets/keyword_inheritance/dep_keyword_inheritance.yml b/test/integration/targets/keyword_inheritance/dep_keyword_inheritance.yml
new file mode 100644
index 00000000..3d3b684a
--- /dev/null
+++ b/test/integration/targets/keyword_inheritance/dep_keyword_inheritance.yml
@@ -0,0 +1,8 @@
+- hosts: localhost
+ gather_facts: false
+ tasks:
+ - include_role:
+ name: "{{ item }}"
+ loop:
+ - setup_test_user
+ - role-meta-inheritance
diff --git a/test/integration/targets/keyword_inheritance/roles/role-meta-inheritance/meta/main.yml b/test/integration/targets/keyword_inheritance/roles/role-meta-inheritance/meta/main.yml
new file mode 100644
index 00000000..b0af49fb
--- /dev/null
+++ b/test/integration/targets/keyword_inheritance/roles/role-meta-inheritance/meta/main.yml
@@ -0,0 +1,4 @@
+dependencies:
+ - role: whoami
+ become: true
+ become_user: ansibletest0
diff --git a/test/integration/targets/keyword_inheritance/runme.sh b/test/integration/targets/keyword_inheritance/runme.sh
index 6b78a06d..1f13ef85 100755
--- a/test/integration/targets/keyword_inheritance/runme.sh
+++ b/test/integration/targets/keyword_inheritance/runme.sh
@@ -3,3 +3,5 @@
set -eux
ANSIBLE_ROLES_PATH=../ ansible-playbook -i ../../inventory test.yml "$@"
+
+ANSIBLE_ROLES_PATH=../ ansible-playbook -i ../../inventory dep_keyword_inheritance.yml "$@"
diff --git a/test/integration/targets/lookup_url/tasks/main.yml b/test/integration/targets/lookup_url/tasks/main.yml
index a7de5063..2fb227ad 100644
--- a/test/integration/targets/lookup_url/tasks/main.yml
+++ b/test/integration/targets/lookup_url/tasks/main.yml
@@ -1,11 +1,11 @@
- name: Test that retrieving a url works
set_fact:
- web_data: "{{ lookup('url', 'https://gist.githubusercontent.com/abadger/9858c22712f62a8effff/raw/43dd47ea691c90a5fa7827892c70241913351963/test') }}"
+ web_data: "{{ lookup('url', 'https://{{ httpbin_host }}/get?one') }}"
- name: Assert that the url was retrieved
assert:
that:
- - "'one' in web_data"
+ - "'one' in web_data.args"
- name: Test that retrieving a url with invalid cert fails
set_fact:
diff --git a/test/integration/targets/module_utils_Ansible.Basic/library/ansible_basic_tests.ps1 b/test/integration/targets/module_utils_Ansible.Basic/library/ansible_basic_tests.ps1
index cfa73c60..6170f046 100644
--- a/test/integration/targets/module_utils_Ansible.Basic/library/ansible_basic_tests.ps1
+++ b/test/integration/targets/module_utils_Ansible.Basic/library/ansible_basic_tests.ps1
@@ -1747,6 +1747,168 @@ test_no_log - Invoked with:
Remove-Item -LiteralPath $actual_tmpdir -Force -Recurse
}
+ "Module tmpdir with symlinks" = {
+ $remote_tmp = Join-Path -Path $tmpdir -ChildPath "moduletmpdir-$(Get-Random)"
+ New-Item -Path $remote_tmp -ItemType Directory > $null
+ Set-Variable -Name complex_args -Scope Global -Value @{
+ _ansible_remote_tmp = $remote_tmp.ToString()
+ }
+ $m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
+
+ $actual_tmpdir = $m.Tmpdir
+
+ $dir1 = Join-Path $actual_tmpdir Dir1
+ $dir2 = Join-Path $actual_tmpdir Dir2
+ $dir1, $dir2 | New-Item -Path { $_ } -ItemType Directory > $null
+
+ $file1 = Join-Path $dir1 test.txt
+ $file2 = Join-Path $dir2 test.txt
+ $file3 = Join-Path $actual_tmpdir test.txt
+ Set-Content -LiteralPath $file1 ''
+ Set-Content -LiteralPath $file2 ''
+ Set-Content -LiteralPath $file3 ''
+
+ $outside_target = Join-Path -Path $tmpdir -ChildPath "moduleoutsidedir-$(Get-Random)"
+ $outside_file = Join-Path -Path $outside_target -ChildPath "file"
+ New-Item -Path $outside_target -ItemType Directory > $null
+ Set-Content -LiteralPath $outside_file ''
+
+ cmd.exe /c mklink /d "$dir1\missing-dir-link" "$actual_tmpdir\fake"
+ cmd.exe /c mklink /d "$dir1\good-dir-link" "$dir2"
+ cmd.exe /c mklink /d "$dir1\recursive-target-link" "$dir1"
+ cmd.exe /c mklink "$dir1\missing-file-link" "$actual_tmpdir\fake"
+ cmd.exe /c mklink "$dir1\good-file-link" "$dir2\test.txt"
+ cmd.exe /c mklink /d "$actual_tmpdir\outside-dir" $outside_target
+ cmd.exe /c mklink "$actual_tmpdir\outside-file" $outside_file
+
+ try {
+ $m.ExitJson()
+ }
+ catch [System.Management.Automation.RuntimeException] {
+ $output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
+ }
+
+ $output.warnings.Count | Assert-Equal -Expected 0
+ (Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $false
+ (Test-Path -LiteralPath $outside_target -PathType Container) | Assert-Equal -Expected $true
+ (Test-Path -LiteralPath $outside_file -PathType Leaf) | Assert-Equal -Expected $true
+
+ Remove-Item -LiteralPath $remote_tmp -Force -Recurse
+ }
+
+ "Module tmpdir with undeletable file" = {
+ $remote_tmp = Join-Path -Path $tmpdir -ChildPath "moduletmpdir-$(Get-Random)"
+ New-Item -Path $remote_tmp -ItemType Directory > $null
+ Set-Variable -Name complex_args -Scope Global -Value @{
+ _ansible_remote_tmp = $remote_tmp.ToString()
+ }
+ $m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
+
+ $actual_tmpdir = $m.Tmpdir
+
+ $dir1 = Join-Path $actual_tmpdir Dir1
+ $dir2 = Join-Path $actual_tmpdir Dir2
+ $dir1, $dir2 | New-Item -Path { $_ } -ItemType Directory > $null
+
+ $file1 = Join-Path $dir1 test.txt
+ $file2 = Join-Path $dir2 test.txt
+ $file3 = Join-Path $actual_tmpdir test.txt
+ Set-Content -LiteralPath $file1 ''
+ Set-Content -LiteralPath $file2 ''
+ Set-Content -LiteralPath $file3 ''
+
+ $fs = [System.IO.File]::Open($file1, "Open", "Read", "None")
+ try {
+ $m.ExitJson()
+ }
+ catch [System.Management.Automation.RuntimeException] {
+ $output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
+ }
+
+ $expected_msg = "Failure cleaning temp path '$actual_tmpdir': IOException Directory contains files still open by other processes"
+ $output.warnings.Count | Assert-Equal -Expected 1
+ $output.warnings[0] | Assert-Equal -Expected $expected_msg
+
+ (Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $true
+ (Test-Path -LiteralPath $dir1 -PathType Container) | Assert-Equal -Expected $true
+ # Test-Path tries to open the file in a way that fails if it's marked as deleted
+ (Get-ChildItem -LiteralPath $dir1 -File).Count | Assert-Equal -Expected 1
+ (Test-Path -LiteralPath $dir2 -PathType Container) | Assert-Equal -Expected $false
+ (Test-Path -LiteralPath $file3 -PathType Leaf) | Assert-Equal -Expected $false
+
+ # Releasing the file handle releases the lock on the file but as the
+ # cleanup couldn't access the file to mark as delete on close it is
+ # still going to be present.
+ $fs.Dispose()
+ (Test-Path -LiteralPath $dir1 -PathType Container) | Assert-Equal -Expected $true
+ (Test-Path -LiteralPath $file1 -PathType Leaf) | Assert-Equal -Expected $true
+
+ Remove-Item -LiteralPath $remote_tmp -Force -Recurse
+ }
+
+ "Module tmpdir delete with locked handle" = {
+ $remote_tmp = Join-Path -Path $tmpdir -ChildPath "moduletmpdir-$(Get-Random)"
+ New-Item -Path $remote_tmp -ItemType Directory > $null
+ Set-Variable -Name complex_args -Scope Global -Value @{
+ _ansible_remote_tmp = $remote_tmp.ToString()
+ }
+ $m = [Ansible.Basic.AnsibleModule]::Create(@(), @{})
+
+ $actual_tmpdir = $m.Tmpdir
+
+ $dir1 = Join-Path $actual_tmpdir Dir1
+ $dir2 = Join-Path $actual_tmpdir Dir2
+ $dir1, $dir2 | New-Item -Path { $_ } -ItemType Directory > $null
+
+ $file1 = Join-Path $dir1 test.txt
+ $file2 = Join-Path $dir2 test.txt
+ $file3 = Join-Path $actual_tmpdir test.txt
+ Set-Content -LiteralPath $file1 ''
+ Set-Content -LiteralPath $file2 ''
+ Set-Content -LiteralPath $file3 ''
+
+ [System.IO.File]::SetAttributes($file1, "ReadOnly")
+ [System.IO.File]::SetAttributes($file2, "ReadOnly")
+ [System.IO.File]::SetAttributes($file3, "ReadOnly")
+ $fs = [System.IO.File]::Open($file1, "Open", "Read", "Delete")
+ try {
+ $m.ExitJson()
+ }
+ catch [System.Management.Automation.RuntimeException] {
+ $output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output)
+ }
+
+ if ([System.Environment]::OSVersion.Version -lt [Version]'10.0') {
+ # Older hosts can only do delete on close. This means Dir1 and its
+ # file will still be present but Dir2 should be deleted.
+ $expected_msg = "Failure cleaning temp path '$actual_tmpdir': IOException Directory contains files still open by other processes"
+ $output.warnings.Count | Assert-Equal -Expected 1
+ $output.warnings[0] | Assert-Equal -Expected $expected_msg
+
+ (Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $true
+ (Test-Path -LiteralPath $dir1 -PathType Container) | Assert-Equal -Expected $true
+ # Test-Path tries to open the file in a way that fails if it's marked as deleted
+ (Get-ChildItem -LiteralPath $dir1 -File).Count | Assert-Equal -Expected 1
+ (Test-Path -LiteralPath $dir2 -PathType Container) | Assert-Equal -Expected $false
+ (Test-Path -LiteralPath $file3 -PathType Leaf) | Assert-Equal -Expected $false
+
+ # Releasing the file handle releases the lock on the file deleting
+ # it. Unfortunately the parent dir will still be present
+ $fs.Dispose()
+ (Test-Path -LiteralPath $dir1 -PathType Container) | Assert-Equal -Expected $true
+ (Test-Path -LiteralPath $file1 -PathType Leaf) | Assert-Equal -Expected $false
+ }
+ else {
+ # Server 2016+ can use the POSIX APIs which will delete it straight away
+ (Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $false
+ $output.warnings.Count | Assert-Equal -Expected 0
+
+ $fs.Dispose()
+ }
+
+ Remove-Item -LiteralPath $remote_tmp -Force -Recurse
+ }
+
"Invalid argument spec key" = {
$spec = @{
invalid = $true
diff --git a/test/integration/targets/template_jinja2_non_native/macro_override.yml b/test/integration/targets/template_jinja2_non_native/macro_override.yml
new file mode 100644
index 00000000..8a1cabd2
--- /dev/null
+++ b/test/integration/targets/template_jinja2_non_native/macro_override.yml
@@ -0,0 +1,15 @@
+- hosts: localhost
+ gather_facts: false
+ vars:
+ output_dir: "{{ lookup('env', 'OUTPUT_DIR') }}"
+ tasks:
+ - template:
+ src: macro_override.j2
+ dest: "{{ output_dir }}/macro_override.out"
+
+ - assert:
+ that:
+ - "'foobar' not in data"
+ - "'\"foo\" \"bar\"' in data"
+ vars:
+ data: "{{ lookup('file', '{{ output_dir }}/macro_override.out') }}"
diff --git a/test/integration/targets/template_jinja2_non_native/runme.sh b/test/integration/targets/template_jinja2_non_native/runme.sh
index fe9d495a..c02d6b33 100755
--- a/test/integration/targets/template_jinja2_non_native/runme.sh
+++ b/test/integration/targets/template_jinja2_non_native/runme.sh
@@ -4,4 +4,6 @@ set -eux
export ANSIBLE_JINJA2_NATIVE=1
ansible-playbook 46169.yml -v "$@"
+python -m pip install "Jinja2>=3.1.0"
+ansible-playbook macro_override.yml -v "$@"
unset ANSIBLE_JINJA2_NATIVE
diff --git a/test/integration/targets/template_jinja2_non_native/templates/macro_override.j2 b/test/integration/targets/template_jinja2_non_native/templates/macro_override.j2
new file mode 100644
index 00000000..51908da0
--- /dev/null
+++ b/test/integration/targets/template_jinja2_non_native/templates/macro_override.j2
@@ -0,0 +1,7 @@
+#jinja2: variable_start_string:'<<',variable_end_string:'>>'
+Use a jinja2 override to trigger creating and using an environment overlay.
+
+{% macro m() %}
+"foo" "bar"
+{% endmacro %}
+<< m() >>
diff --git a/test/integration/targets/uri/tasks/main.yml b/test/integration/targets/uri/tasks/main.yml
index d821f286..7fa687b4 100644
--- a/test/integration/targets/uri/tasks/main.yml
+++ b/test/integration/targets/uri/tasks/main.yml
@@ -305,7 +305,7 @@
environment:
https_proxy: 'https://localhost:3456'
uri:
- url: 'https://httpbin.org/get'
+ url: 'https://{{ httpbin_host }}/get'
register: result
ignore_errors: true
@@ -318,7 +318,7 @@
environment:
https_proxy: 'https://localhost:3456'
uri:
- url: 'https://httpbin.org/get'
+ url: 'https://{{ httpbin_host }}/get'
use_proxy: no
# Ubuntu12.04 doesn't have python-urllib3, this makes handling required dependencies a pain across all variations
diff --git a/test/integration/targets/win_async_wrapper/tasks/main.yml b/test/integration/targets/win_async_wrapper/tasks/main.yml
index 91b45846..0fc64d8c 100644
--- a/test/integration/targets/win_async_wrapper/tasks/main.yml
+++ b/test/integration/targets/win_async_wrapper/tasks/main.yml
@@ -12,12 +12,12 @@
- name: validate response
assert:
that:
- - asyncresult.ansible_job_id is match('\d+\.\d+')
+ - asyncresult.ansible_job_id is match('j\d+\.\d+')
- asyncresult.started == 1
- asyncresult is started
- asyncresult.finished == 0
- asyncresult is not finished
- - asyncresult.results_file is search('\.ansible_async.+\d+\.\d+')
+ - asyncresult.results_file is search('\.ansible_async.+j\d+\.\d+')
# ensure that async is actually async- this test will fail if # hosts > forks or if the target host is VERY slow
- (lookup('pipe', 'date +%s') | int) - (start_timestamp | int) < 15
@@ -31,7 +31,7 @@
- name: validate response
assert:
that:
- - asyncresult.ansible_job_id is match('\d+\.\d+')
+ - asyncresult.ansible_job_id is match('j\d+\.\d+')
- asyncresult.finished == 1
- asyncresult is finished
- asyncresult is changed
@@ -69,7 +69,7 @@
- name: validate response
assert:
that:
- - asyncresult.ansible_job_id is match('\d+\.\d+')
+ - asyncresult.ansible_job_id is match('j\d+\.\d+')
- asyncresult.finished == 1
- asyncresult is finished
- asyncresult is changed
@@ -107,7 +107,7 @@
- name: validate response
assert:
that:
- - asyncresult.ansible_job_id is match('\d+\.\d+')
+ - asyncresult.ansible_job_id is match('j\d+\.\d+')
- asyncresult.finished == 1
- asyncresult is finished
- asyncresult is not changed
@@ -125,7 +125,7 @@
- name: validate response
assert:
that:
- - asyncresult.ansible_job_id is match('\d+\.\d+')
+ - asyncresult.ansible_job_id is match('j\d+\.\d+')
- asyncresult.finished == 1
- asyncresult is finished
- asyncresult is changed
@@ -143,7 +143,7 @@
- name: validate response
assert:
that:
- - asyncresult.ansible_job_id is match('\d+\.\d+')
+ - asyncresult.ansible_job_id is match('j\d+\.\d+')
- asyncresult.finished == 1
- asyncresult is finished
- asyncresult is not changed
@@ -231,7 +231,7 @@
# - item is finished
# - item.slept_sec == 3
# - item is changed
-# - item.ansible_job_id is match('\d+\.\d+')
+# - item.ansible_job_id is match('j\d+\.\d+')
# with_items: "{{ asyncout.results }}"
# this part of the test is flaky- Windows PIDs are reused aggressively, so this occasionally fails due to a new process with the same ID
diff --git a/test/lib/ansible_test/_internal/__init__.py b/test/lib/ansible_test/_internal/__init__.py
index d218b561..ee24a852 100644
--- a/test/lib/ansible_test/_internal/__init__.py
+++ b/test/lib/ansible_test/_internal/__init__.py
@@ -18,6 +18,7 @@ from .constants import (
from .util import (
ApplicationError,
HostConnectionError,
+ TimeoutExpiredError,
display,
report_locale,
)
@@ -109,6 +110,9 @@ def main(cli_args: t.Optional[list[str]] = None) -> None:
except ApplicationError as ex:
display.fatal('%s' % ex)
sys.exit(1)
+ except TimeoutExpiredError as ex:
+ display.fatal('%s' % ex)
+ sys.exit(1)
except KeyboardInterrupt:
sys.exit(2)
except BrokenPipeError:
diff --git a/test/lib/ansible_test/_internal/ansible_util.py b/test/lib/ansible_test/_internal/ansible_util.py
index 9efcda26..9548d37c 100644
--- a/test/lib/ansible_test/_internal/ansible_util.py
+++ b/test/lib/ansible_test/_internal/ansible_util.py
@@ -114,27 +114,27 @@ def ansible_environment(args: CommonConfig, color: bool = True, ansible_config:
# standard path injection is not effective for ansible-connection, instead the location must be configured
# ansible-connection only requires the injector for code coverage
# the correct python interpreter is already selected using the sys.executable used to invoke ansible
- ansible.update(dict(
+ ansible.update(
ANSIBLE_CONNECTION_PATH=os.path.join(get_injector_path(), 'ansible-connection'),
- ))
+ )
if isinstance(args, PosixIntegrationConfig):
- ansible.update(dict(
+ ansible.update(
ANSIBLE_PYTHON_INTERPRETER='/set/ansible_python_interpreter/in/inventory', # force tests to set ansible_python_interpreter in inventory
- ))
+ )
env.update(ansible)
if args.debug:
- env.update(dict(
+ env.update(
ANSIBLE_DEBUG='true',
ANSIBLE_LOG_PATH=os.path.join(ResultType.LOGS.name, 'debug.log'),
- ))
+ )
if data_context().content.collection:
- env.update(dict(
+ env.update(
ANSIBLE_COLLECTIONS_PATH=data_context().content.collection.root,
- ))
+ )
if data_context().content.is_ansible:
env.update(configure_plugin_paths(args))
@@ -252,12 +252,14 @@ License: GPLv3+
class CollectionDetail:
"""Collection detail."""
+
def __init__(self) -> None:
self.version: t.Optional[str] = None
class CollectionDetailError(ApplicationError):
"""An error occurred retrieving collection detail."""
+
def __init__(self, reason: str) -> None:
super().__init__('Error collecting collection detail: %s' % reason)
self.reason = reason
diff --git a/test/lib/ansible_test/_internal/become.py b/test/lib/ansible_test/_internal/become.py
index e653959a..f8320b3b 100644
--- a/test/lib/ansible_test/_internal/become.py
+++ b/test/lib/ansible_test/_internal/become.py
@@ -11,6 +11,7 @@ from .util import (
class Become(metaclass=abc.ABCMeta):
"""Base class for become implementations."""
+
@classmethod
def name(cls) -> str:
"""The name of this plugin."""
@@ -28,6 +29,7 @@ class Become(metaclass=abc.ABCMeta):
class Doas(Become):
"""Become using 'doas'."""
+
@property
def method(self) -> str:
"""The name of the Ansible become plugin that is equivalent to this."""
@@ -47,6 +49,7 @@ class Doas(Become):
class DoasSudo(Doas):
"""Become using 'doas' in ansible-test and then after bootstrapping use 'sudo' for other ansible commands."""
+
@classmethod
def name(cls) -> str:
"""The name of this plugin."""
@@ -60,6 +63,7 @@ class DoasSudo(Doas):
class Su(Become):
"""Become using 'su'."""
+
@property
def method(self) -> str:
"""The name of the Ansible become plugin that is equivalent to this."""
@@ -77,6 +81,7 @@ class Su(Become):
class SuSudo(Su):
"""Become using 'su' in ansible-test and then after bootstrapping use 'sudo' for other ansible commands."""
+
@classmethod
def name(cls) -> str:
"""The name of this plugin."""
@@ -90,6 +95,7 @@ class SuSudo(Su):
class Sudo(Become):
"""Become using 'sudo'."""
+
@property
def method(self) -> str:
"""The name of the Ansible become plugin that is equivalent to this."""
diff --git a/test/lib/ansible_test/_internal/bootstrap.py b/test/lib/ansible_test/_internal/bootstrap.py
index 261ef59b..b0cfb601 100644
--- a/test/lib/ansible_test/_internal/bootstrap.py
+++ b/test/lib/ansible_test/_internal/bootstrap.py
@@ -26,6 +26,7 @@ from .core_ci import (
@dataclasses.dataclass
class Bootstrap:
"""Base class for bootstrapping systems."""
+
controller: bool
python_versions: list[str]
ssh_key: SshKey
@@ -65,6 +66,7 @@ class Bootstrap:
@dataclasses.dataclass
class BootstrapDocker(Bootstrap):
"""Bootstrap docker instances."""
+
def get_variables(self) -> dict[str, t.Union[str, list[str]]]:
"""The variables to template in the bootstrapping script."""
variables = super().get_variables()
@@ -80,6 +82,7 @@ class BootstrapDocker(Bootstrap):
@dataclasses.dataclass
class BootstrapRemote(Bootstrap):
"""Bootstrap remote instances."""
+
platform: str
platform_version: str
diff --git a/test/lib/ansible_test/_internal/cache.py b/test/lib/ansible_test/_internal/cache.py
index 3afe422f..d291dcee 100644
--- a/test/lib/ansible_test/_internal/cache.py
+++ b/test/lib/ansible_test/_internal/cache.py
@@ -13,6 +13,7 @@ TValue = t.TypeVar('TValue')
class CommonCache:
"""Common cache."""
+
def __init__(self, args: CommonConfig) -> None:
self.args = args
diff --git a/test/lib/ansible_test/_internal/cgroup.py b/test/lib/ansible_test/_internal/cgroup.py
index 977e359d..a08513a5 100644
--- a/test/lib/ansible_test/_internal/cgroup.py
+++ b/test/lib/ansible_test/_internal/cgroup.py
@@ -9,6 +9,7 @@ import re
class CGroupPath:
"""Linux cgroup path constants."""
+
ROOT = '/sys/fs/cgroup'
SYSTEMD = '/sys/fs/cgroup/systemd'
SYSTEMD_RELEASE_AGENT = '/sys/fs/cgroup/systemd/release_agent'
@@ -16,6 +17,7 @@ class CGroupPath:
class MountType:
"""Linux filesystem mount type constants."""
+
TMPFS = 'tmpfs'
CGROUP_V1 = 'cgroup'
CGROUP_V2 = 'cgroup2'
@@ -24,6 +26,7 @@ class MountType:
@dataclasses.dataclass(frozen=True)
class CGroupEntry:
"""A single cgroup entry parsed from '/proc/{pid}/cgroup' in the proc filesystem."""
+
id: int
subsystem: str
path: pathlib.PurePosixPath
@@ -46,7 +49,7 @@ class CGroupEntry:
return cls(
id=int(cid),
subsystem=subsystem.removeprefix('name='),
- path=pathlib.PurePosixPath(path)
+ path=pathlib.PurePosixPath(path),
)
@classmethod
@@ -58,6 +61,7 @@ class CGroupEntry:
@dataclasses.dataclass(frozen=True)
class MountEntry:
"""A single mount info entry parsed from '/proc/{pid}/mountinfo' in the proc filesystem."""
+
mount_id: int
parent_id: int
device_major: int
diff --git a/test/lib/ansible_test/_internal/ci/__init__.py b/test/lib/ansible_test/_internal/ci/__init__.py
index 97e41dae..5e53b150 100644
--- a/test/lib/ansible_test/_internal/ci/__init__.py
+++ b/test/lib/ansible_test/_internal/ci/__init__.py
@@ -39,6 +39,7 @@ class ChangeDetectionNotSupported(ApplicationError):
class CIProvider(metaclass=abc.ABCMeta):
"""Base class for CI provider plugins."""
+
priority = 500
@staticmethod
@@ -61,8 +62,8 @@ class CIProvider(metaclass=abc.ABCMeta):
"""Return a resource prefix specific to this CI provider."""
@abc.abstractmethod
- def get_base_branch(self) -> str:
- """Return the base branch or an empty string."""
+ def get_base_commit(self, args: CommonConfig) -> str:
+ """Return the base commit or an empty string."""
@abc.abstractmethod
def detect_changes(self, args: TestConfig) -> t.Optional[list[str]]:
@@ -103,6 +104,7 @@ def get_ci_provider() -> CIProvider:
class AuthHelper(metaclass=abc.ABCMeta):
"""Public key based authentication helper for Ansible Core CI."""
+
def sign_request(self, request: dict[str, t.Any]) -> None:
"""Sign the given auth request and make the public key available."""
payload_bytes = to_bytes(json.dumps(request, sort_keys=True))
@@ -141,6 +143,7 @@ class AuthHelper(metaclass=abc.ABCMeta):
class CryptographyAuthHelper(AuthHelper, metaclass=abc.ABCMeta):
"""Cryptography based public key based authentication helper for Ansible Core CI."""
+
def sign_bytes(self, payload_bytes: bytes) -> bytes:
"""Sign the given payload and return the signature, initializing a new key pair if required."""
# import cryptography here to avoid overhead and failures in environments which do not use/provide it
@@ -186,6 +189,7 @@ class CryptographyAuthHelper(AuthHelper, metaclass=abc.ABCMeta):
class OpenSSLAuthHelper(AuthHelper, metaclass=abc.ABCMeta):
"""OpenSSL based public key based authentication helper for Ansible Core CI."""
+
def sign_bytes(self, payload_bytes: bytes) -> bytes:
"""Sign the given payload and return the signature, initializing a new key pair if required."""
private_key_pem = self.initialize_private_key()
diff --git a/test/lib/ansible_test/_internal/ci/azp.py b/test/lib/ansible_test/_internal/ci/azp.py
index 9170dfec..404f8056 100644
--- a/test/lib/ansible_test/_internal/ci/azp.py
+++ b/test/lib/ansible_test/_internal/ci/azp.py
@@ -40,9 +40,12 @@ CODE = 'azp'
class AzurePipelines(CIProvider):
"""CI provider implementation for Azure Pipelines."""
+
def __init__(self) -> None:
self.auth = AzurePipelinesAuthHelper()
+ self._changes: AzurePipelinesChanges | None = None
+
@staticmethod
def is_supported() -> bool:
"""Return True if this provider is supported in the current running environment."""
@@ -71,18 +74,20 @@ class AzurePipelines(CIProvider):
return prefix
- def get_base_branch(self) -> str:
- """Return the base branch or an empty string."""
- base_branch = os.environ.get('SYSTEM_PULLREQUEST_TARGETBRANCH') or os.environ.get('BUILD_SOURCEBRANCHNAME')
+ def get_base_commit(self, args: CommonConfig) -> str:
+ """Return the base commit or an empty string."""
+ return self._get_changes(args).base_commit or ''
- if base_branch:
- base_branch = 'origin/%s' % base_branch
+ def _get_changes(self, args: CommonConfig) -> AzurePipelinesChanges:
+ """Return an AzurePipelinesChanges instance, which will be created on first use."""
+ if not self._changes:
+ self._changes = AzurePipelinesChanges(args)
- return base_branch or ''
+ return self._changes
def detect_changes(self, args: TestConfig) -> t.Optional[list[str]]:
"""Initialize change detection."""
- result = AzurePipelinesChanges(args)
+ result = self._get_changes(args)
if result.is_pr:
job_type = 'pull request'
@@ -128,7 +133,7 @@ class AzurePipelines(CIProvider):
def get_git_details(self, args: CommonConfig) -> t.Optional[dict[str, t.Any]]:
"""Return details about git in the current environment."""
- changes = AzurePipelinesChanges(args)
+ changes = self._get_changes(args)
details = dict(
base_commit=changes.base_commit,
@@ -143,6 +148,7 @@ class AzurePipelinesAuthHelper(CryptographyAuthHelper):
Authentication helper for Azure Pipelines.
Based on cryptography since it is provided by the default Azure Pipelines environment.
"""
+
def publish_public_key(self, public_key_pem: str) -> None:
"""Publish the given public key."""
try:
@@ -162,6 +168,7 @@ class AzurePipelinesAuthHelper(CryptographyAuthHelper):
class AzurePipelinesChanges:
"""Change information for an Azure Pipelines build."""
+
def __init__(self, args: CommonConfig) -> None:
self.args = args
self.git = Git()
diff --git a/test/lib/ansible_test/_internal/ci/local.py b/test/lib/ansible_test/_internal/ci/local.py
index ec031944..4b9ab13e 100644
--- a/test/lib/ansible_test/_internal/ci/local.py
+++ b/test/lib/ansible_test/_internal/ci/local.py
@@ -36,6 +36,7 @@ CODE = '' # not really a CI provider, so use an empty string for the code
class Local(CIProvider):
"""CI provider implementation when not using CI."""
+
priority = 1000
@staticmethod
@@ -62,8 +63,8 @@ class Local(CIProvider):
return prefix
- def get_base_branch(self) -> str:
- """Return the base branch or an empty string."""
+ def get_base_commit(self, args: CommonConfig) -> str:
+ """Return the base commit or an empty string."""
return ''
def detect_changes(self, args: TestConfig) -> t.Optional[list[str]]:
@@ -149,6 +150,7 @@ class Local(CIProvider):
class InvalidBranch(ApplicationError):
"""Exception for invalid branch specification."""
+
def __init__(self, branch: str, reason: str) -> None:
message = 'Invalid branch: %s\n%s' % (branch, reason)
@@ -159,6 +161,7 @@ class InvalidBranch(ApplicationError):
class LocalChanges:
"""Change information for local work."""
+
def __init__(self, args: TestConfig) -> None:
self.args = args
self.git = Git()
diff --git a/test/lib/ansible_test/_internal/classification/__init__.py b/test/lib/ansible_test/_internal/classification/__init__.py
index aacc2ca9..bca02403 100644
--- a/test/lib/ansible_test/_internal/classification/__init__.py
+++ b/test/lib/ansible_test/_internal/classification/__init__.py
@@ -176,6 +176,7 @@ def categorize_changes(args: TestConfig, paths: list[str], verbose_command: t.Op
class PathMapper:
"""Map file paths to test commands and targets."""
+
def __init__(self, args: TestConfig) -> None:
self.args = args
self.integration_all_target = get_integration_all_target(self.args)
@@ -661,21 +662,58 @@ class PathMapper:
def _classify_ansible(self, path: str) -> t.Optional[dict[str, str]]:
"""Return the classification for the given path using rules specific to Ansible."""
+ dirname = os.path.dirname(path)
+ filename = os.path.basename(path)
+ name, ext = os.path.splitext(filename)
+
+ minimal: dict[str, str] = {}
+
+ packaging = {
+ 'integration': 'packaging/',
+ }
+
+ # Early classification that needs to occur before common classification belongs here.
+
if path.startswith('test/units/compat/'):
return {
'units': 'test/units/',
}
+ if dirname == '.azure-pipelines/commands':
+ test_map = {
+ 'cloud.sh': 'integration:cloud/',
+ 'linux.sh': 'integration:all',
+ 'network.sh': 'network-integration:all',
+ 'remote.sh': 'integration:all',
+ 'sanity.sh': 'sanity:all',
+ 'units.sh': 'units:all',
+ 'windows.sh': 'windows-integration:all',
+ }
+
+ test_match = test_map.get(filename)
+
+ if test_match:
+ test_command, test_target = test_match.split(':')
+
+ return {
+ test_command: test_target,
+ }
+
+ cloud_target = f'cloud/{name}/'
+
+ if cloud_target in self.integration_targets_by_alias:
+ return {
+ 'integration': cloud_target,
+ }
+
+ # Classification common to both ansible and collections.
+
result = self._classify_common(path)
if result is not None:
return result
- dirname = os.path.dirname(path)
- filename = os.path.basename(path)
- name, ext = os.path.splitext(filename)
-
- minimal: dict[str, str] = {}
+ # Classification here is specific to ansible, and runs after common classification.
if path.startswith('bin/'):
return all_tests(self.args) # broad impact, run all tests
@@ -715,6 +753,9 @@ class PathMapper:
return minimal
if path.startswith('packaging/'):
+ if path.startswith('packaging/pep517_backend/'):
+ return packaging
+
return minimal
if path.startswith('test/ansible_test/'):
@@ -791,39 +832,6 @@ class PathMapper:
if path.startswith('test/support/'):
return all_tests(self.args) # test infrastructure, run all tests
- if path.startswith('test/utils/shippable/'):
- if dirname == 'test/utils/shippable':
- test_map = {
- 'cloud.sh': 'integration:cloud/',
- 'linux.sh': 'integration:all',
- 'network.sh': 'network-integration:all',
- 'remote.sh': 'integration:all',
- 'sanity.sh': 'sanity:all',
- 'units.sh': 'units:all',
- 'windows.sh': 'windows-integration:all',
- }
-
- test_match = test_map.get(filename)
-
- if test_match:
- test_command, test_target = test_match.split(':')
-
- return {
- test_command: test_target,
- }
-
- cloud_target = 'cloud/%s/' % name
-
- if cloud_target in self.integration_targets_by_alias:
- return {
- 'integration': cloud_target,
- }
-
- return all_tests(self.args) # test infrastructure, run all tests
-
- if path.startswith('test/utils/'):
- return minimal
-
if '/' not in path:
if path in (
'.gitattributes',
@@ -835,16 +843,17 @@ class PathMapper:
return minimal
if path in (
- 'setup.py',
+ 'MANIFEST.in',
+ 'pyproject.toml',
+ 'requirements.txt',
+ 'setup.cfg',
+ 'setup.py',
):
- return all_tests(self.args) # broad impact, run all tests
+ return packaging
if ext in (
- '.in',
'.md',
'.rst',
- '.toml',
- '.txt',
):
return minimal
diff --git a/test/lib/ansible_test/_internal/classification/python.py b/test/lib/ansible_test/_internal/classification/python.py
index 77ffeacf..7036de1a 100644
--- a/test/lib/ansible_test/_internal/classification/python.py
+++ b/test/lib/ansible_test/_internal/classification/python.py
@@ -231,6 +231,7 @@ def relative_to_absolute(name: str, level: int, module: str, path: str, lineno:
class ModuleUtilFinder(ast.NodeVisitor):
"""AST visitor to find valid module_utils imports."""
+
def __init__(self, path: str, module_utils: set[str]) -> None:
self.path = path
self.module_utils = module_utils
diff --git a/test/lib/ansible_test/_internal/cli/actions.py b/test/lib/ansible_test/_internal/cli/actions.py
index 3359a848..9e1b7b44 100644
--- a/test/lib/ansible_test/_internal/cli/actions.py
+++ b/test/lib/ansible_test/_internal/cli/actions.py
@@ -22,6 +22,7 @@ from .parsers import (
class OriginControllerAction(CompositeAction):
"""Composite action parser for the controller when the only option is `origin`."""
+
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
return OriginControllerParser()
@@ -29,6 +30,7 @@ class OriginControllerAction(CompositeAction):
class DelegatedControllerAction(CompositeAction):
"""Composite action parser for the controller when delegation is supported."""
+
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
return DelegatedControllerParser()
@@ -36,6 +38,7 @@ class DelegatedControllerAction(CompositeAction):
class PosixTargetAction(CompositeAction):
"""Composite action parser for a POSIX target."""
+
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
return PosixTargetParser()
@@ -43,6 +46,7 @@ class PosixTargetAction(CompositeAction):
class WindowsTargetAction(CompositeAction):
"""Composite action parser for a Windows target."""
+
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
return WindowsTargetParser()
@@ -50,6 +54,7 @@ class WindowsTargetAction(CompositeAction):
class NetworkTargetAction(CompositeAction):
"""Composite action parser for a network target."""
+
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
return NetworkTargetParser()
@@ -57,6 +62,7 @@ class NetworkTargetAction(CompositeAction):
class SanityPythonTargetAction(CompositeAction):
"""Composite action parser for a sanity target."""
+
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
return SanityPythonTargetParser()
@@ -64,6 +70,7 @@ class SanityPythonTargetAction(CompositeAction):
class UnitsPythonTargetAction(CompositeAction):
"""Composite action parser for a units target."""
+
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
return UnitsPythonTargetParser()
@@ -71,6 +78,7 @@ class UnitsPythonTargetAction(CompositeAction):
class PosixSshTargetAction(CompositeAction):
"""Composite action parser for a POSIX SSH target."""
+
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
return PosixSshTargetParser()
@@ -78,6 +86,7 @@ class PosixSshTargetAction(CompositeAction):
class WindowsSshTargetAction(CompositeAction):
"""Composite action parser for a Windows SSH target."""
+
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
return WindowsSshTargetParser()
@@ -85,6 +94,7 @@ class WindowsSshTargetAction(CompositeAction):
class NetworkSshTargetAction(CompositeAction):
"""Composite action parser for a network SSH target."""
+
def create_parser(self) -> NamespaceParser:
"""Return a namespace parser to parse the argument associated with this action."""
return NetworkSshTargetParser()
diff --git a/test/lib/ansible_test/_internal/cli/argparsing/__init__.py b/test/lib/ansible_test/_internal/cli/argparsing/__init__.py
index 540cf552..4ee845f7 100644
--- a/test/lib/ansible_test/_internal/cli/argparsing/__init__.py
+++ b/test/lib/ansible_test/_internal/cli/argparsing/__init__.py
@@ -34,6 +34,7 @@ class RegisteredCompletionFinder(OptionCompletionFinder):
These registered completions, if provided, are used to filter the final completion results.
This works around a known bug: https://github.com/kislyuk/argcomplete/issues/221
"""
+
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
@@ -86,6 +87,7 @@ class RegisteredCompletionFinder(OptionCompletionFinder):
class CompositeAction(argparse.Action, metaclass=abc.ABCMeta):
"""Base class for actions that parse composite arguments."""
+
documentation_state: dict[t.Type[CompositeAction], DocumentationState] = {}
def __init__(
@@ -134,6 +136,7 @@ class CompositeAction(argparse.Action, metaclass=abc.ABCMeta):
class CompositeActionCompletionFinder(RegisteredCompletionFinder):
"""Completion finder with support for composite argument parsing."""
+
def get_completions(
self,
prefix: str,
@@ -255,7 +258,7 @@ def complete(
list_mode=True, # abuse list mode to enable preservation of the literal results
consumed='',
continuation='',
- matches=['completion', 'invalid']
+ matches=['completion', 'invalid'],
)
else:
answer = ex
diff --git a/test/lib/ansible_test/_internal/cli/argparsing/actions.py b/test/lib/ansible_test/_internal/cli/argparsing/actions.py
index 2bcf982c..7399fe9c 100644
--- a/test/lib/ansible_test/_internal/cli/argparsing/actions.py
+++ b/test/lib/ansible_test/_internal/cli/argparsing/actions.py
@@ -8,6 +8,7 @@ import typing as t
class EnumAction(argparse.Action):
"""Parse an enum using the lowercase enum names."""
+
def __init__(self, **kwargs: t.Any) -> None:
self.enum_type: t.Type[enum.Enum] = kwargs.pop('type', None)
kwargs.setdefault('choices', tuple(e.name.lower() for e in self.enum_type))
diff --git a/test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py b/test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py
index cf5776da..26abcf82 100644
--- a/test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py
+++ b/test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py
@@ -9,6 +9,7 @@ import typing as t
class Substitute:
"""Substitute for missing class which accepts all arguments."""
+
def __init__(self, *args, **kwargs) -> None:
pass
@@ -16,10 +17,19 @@ class Substitute:
try:
import argcomplete
- from argcomplete import (
- CompletionFinder,
- default_validator,
- )
+ try:
+ # argcomplete 3+
+ # see: https://github.com/kislyuk/argcomplete/commit/bd781cb08512b94966312377186ebc5550f46ae0
+ from argcomplete.finders import (
+ CompletionFinder,
+ default_validator,
+ )
+ except ImportError:
+ # argcomplete <3
+ from argcomplete import (
+ CompletionFinder,
+ default_validator,
+ )
warn = argcomplete.warn # pylint: disable=invalid-name
except ImportError:
@@ -35,6 +45,7 @@ class CompType(enum.Enum):
Bash COMP_TYPE argument completion types.
For documentation, see: https://www.gnu.org/software/bash/manual/html_node/Bash-Variables.html#index-COMP_005fTYPE
"""
+
COMPLETION = '\t'
"""
Standard completion, typically triggered by a single tab.
@@ -70,7 +81,13 @@ class CompType(enum.Enum):
def register_safe_action(action_type: t.Type[argparse.Action]) -> None:
"""Register the given action as a safe action for argcomplete to use during completion if it is not already registered."""
if argcomplete and action_type not in argcomplete.safe_actions:
- argcomplete.safe_actions += (action_type,)
+ if isinstance(argcomplete.safe_actions, set):
+ # argcomplete 3+
+ # see: https://github.com/kislyuk/argcomplete/commit/bd781cb08512b94966312377186ebc5550f46ae0
+ argcomplete.safe_actions.add(action_type)
+ else:
+ # argcomplete <3
+ argcomplete.safe_actions += (action_type,)
def get_comp_type() -> t.Optional[CompType]:
@@ -85,6 +102,7 @@ class OptionCompletionFinder(CompletionFinder):
Custom completion finder for argcomplete.
It provides support for running completion in list mode, which argcomplete natively handles the same as standard completion.
"""
+
enabled = bool(argcomplete)
def __init__(self, *args, validator=None, **kwargs) -> None:
diff --git a/test/lib/ansible_test/_internal/cli/argparsing/parsers.py b/test/lib/ansible_test/_internal/cli/argparsing/parsers.py
index d07e03cb..00fa97e8 100644
--- a/test/lib/ansible_test/_internal/cli/argparsing/parsers.py
+++ b/test/lib/ansible_test/_internal/cli/argparsing/parsers.py
@@ -32,18 +32,21 @@ class Completion(Exception):
@dataclasses.dataclass
class CompletionUnavailable(Completion):
"""Argument completion unavailable."""
+
message: str = 'No completions available.'
@dataclasses.dataclass
class CompletionError(Completion):
"""Argument completion error."""
+
message: t.Optional[str] = None
@dataclasses.dataclass
class CompletionSuccess(Completion):
"""Successful argument completion result."""
+
list_mode: bool
consumed: str
continuation: str
@@ -72,6 +75,7 @@ class CompletionSuccess(Completion):
class ParserMode(enum.Enum):
"""Mode the parser is operating in."""
+
PARSE = enum.auto()
COMPLETE = enum.auto()
LIST = enum.auto()
@@ -84,6 +88,7 @@ class ParserError(Exception):
@dataclasses.dataclass
class ParserBoundary:
"""Boundary details for parsing composite input."""
+
delimiters: str
required: bool
match: t.Optional[str] = None
@@ -93,6 +98,7 @@ class ParserBoundary:
@dataclasses.dataclass
class ParserState:
"""State of the composite argument parser."""
+
mode: ParserMode
remainder: str = ''
consumed: str = ''
@@ -194,11 +200,13 @@ class ParserState:
@dataclasses.dataclass
class DocumentationState:
"""State of the composite argument parser's generated documentation."""
+
sections: dict[str, str] = dataclasses.field(default_factory=dict)
class Parser(metaclass=abc.ABCMeta):
"""Base class for all composite argument parsers."""
+
@abc.abstractmethod
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
@@ -210,6 +218,7 @@ class Parser(metaclass=abc.ABCMeta):
class MatchConditions(enum.Flag):
"""Acceptable condition(s) for matching user input to available choices."""
+
CHOICE = enum.auto()
"""Match any choice."""
ANY = enum.auto()
@@ -220,6 +229,7 @@ class MatchConditions(enum.Flag):
class DynamicChoicesParser(Parser, metaclass=abc.ABCMeta):
"""Base class for composite argument parsers which use a list of choices that can be generated during completion."""
+
def __init__(self, conditions: MatchConditions = MatchConditions.CHOICE) -> None:
self.conditions = conditions
@@ -275,6 +285,7 @@ class DynamicChoicesParser(Parser, metaclass=abc.ABCMeta):
class ChoicesParser(DynamicChoicesParser):
"""Composite argument parser which relies on a static list of choices."""
+
def __init__(self, choices: list[str], conditions: MatchConditions = MatchConditions.CHOICE) -> None:
self.choices = choices
@@ -291,6 +302,7 @@ class ChoicesParser(DynamicChoicesParser):
class EnumValueChoicesParser(ChoicesParser):
"""Composite argument parser which relies on a static list of choices derived from the values of an enum."""
+
def __init__(self, enum_type: t.Type[enum.Enum], conditions: MatchConditions = MatchConditions.CHOICE) -> None:
self.enum_type = enum_type
@@ -304,6 +316,7 @@ class EnumValueChoicesParser(ChoicesParser):
class IntegerParser(DynamicChoicesParser):
"""Composite argument parser for integers."""
+
PATTERN = re.compile('^[1-9][0-9]*$')
def __init__(self, maximum: t.Optional[int] = None) -> None:
@@ -341,6 +354,7 @@ class IntegerParser(DynamicChoicesParser):
class BooleanParser(ChoicesParser):
"""Composite argument parser for boolean (yes/no) values."""
+
def __init__(self) -> None:
super().__init__(['yes', 'no'])
@@ -352,6 +366,7 @@ class BooleanParser(ChoicesParser):
class AnyParser(ChoicesParser):
"""Composite argument parser which accepts any input value."""
+
def __init__(self, nothing: bool = False, no_match_message: t.Optional[str] = None) -> None:
self.no_match_message = no_match_message
@@ -379,6 +394,7 @@ class AnyParser(ChoicesParser):
class RelativePathNameParser(DynamicChoicesParser):
"""Composite argument parser for relative path names."""
+
RELATIVE_NAMES = ['.', '..']
def __init__(self, choices: list[str]) -> None:
@@ -400,6 +416,7 @@ class RelativePathNameParser(DynamicChoicesParser):
class FileParser(Parser):
"""Composite argument parser for absolute or relative file paths."""
+
def parse(self, state: ParserState) -> str:
"""Parse the input from the given state and return the result."""
if state.mode == ParserMode.PARSE:
@@ -432,6 +449,7 @@ class FileParser(Parser):
class AbsolutePathParser(Parser):
"""Composite argument parser for absolute file paths. Paths are only verified for proper syntax, not for existence."""
+
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
path = ''
@@ -443,13 +461,14 @@ class AbsolutePathParser(Parser):
else:
path += ChoicesParser([PATH_DELIMITER]).parse(state)
- path += (boundary.match or '')
+ path += boundary.match or ''
return path
class NamespaceParser(Parser, metaclass=abc.ABCMeta):
"""Base class for composite argument parsers that store their results in a namespace."""
+
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
namespace = state.current_namespace
@@ -496,6 +515,7 @@ class NamespaceParser(Parser, metaclass=abc.ABCMeta):
class NamespaceWrappedParser(NamespaceParser):
"""Composite argument parser that wraps a non-namespace parser and stores the result in a namespace."""
+
def __init__(self, dest: str, parser: Parser) -> None:
self._dest = dest
self.parser = parser
@@ -512,6 +532,7 @@ class NamespaceWrappedParser(NamespaceParser):
class KeyValueParser(Parser, metaclass=abc.ABCMeta):
"""Base class for key/value composite argument parsers."""
+
@abc.abstractmethod
def get_parsers(self, state: ParserState) -> dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
@@ -538,6 +559,7 @@ class KeyValueParser(Parser, metaclass=abc.ABCMeta):
class PairParser(Parser, metaclass=abc.ABCMeta):
"""Base class for composite argument parsers consisting of a left and right argument parser, with input separated by a delimiter."""
+
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
namespace = self.create_namespace()
@@ -577,6 +599,7 @@ class PairParser(Parser, metaclass=abc.ABCMeta):
class TypeParser(Parser, metaclass=abc.ABCMeta):
"""Base class for composite argument parsers which parse a type name, a colon and then parse results based on the type given by the type name."""
+
def get_parsers(self, state: ParserState) -> dict[str, Parser]: # pylint: disable=unused-argument
"""Return a dictionary of type names and type parsers."""
return self.get_stateless_parsers()
diff --git a/test/lib/ansible_test/_internal/cli/commands/env.py b/test/lib/ansible_test/_internal/cli/commands/env.py
index 0cd21145..8b56e4f1 100644
--- a/test/lib/ansible_test/_internal/cli/commands/env.py
+++ b/test/lib/ansible_test/_internal/cli/commands/env.py
@@ -55,7 +55,7 @@ def do_env(
env.add_argument(
'--timeout',
- type=int,
+ type=float,
metavar='MINUTES',
help='timeout for future ansible-test commands (0 clears)',
)
diff --git a/test/lib/ansible_test/_internal/cli/commands/integration/network.py b/test/lib/ansible_test/_internal/cli/commands/integration/network.py
index a05985b5..a42ba919 100644
--- a/test/lib/ansible_test/_internal/cli/commands/integration/network.py
+++ b/test/lib/ansible_test/_internal/cli/commands/integration/network.py
@@ -50,7 +50,8 @@ def do_network_integration(
parser.set_defaults(
func=command_network_integration,
targets_func=walk_network_integration_targets,
- config=NetworkIntegrationConfig)
+ config=NetworkIntegrationConfig,
+ )
network_integration = t.cast(argparse.ArgumentParser, parser.add_argument_group(title='network integration test arguments'))
diff --git a/test/lib/ansible_test/_internal/cli/commands/sanity.py b/test/lib/ansible_test/_internal/cli/commands/sanity.py
index 8b4a9ae5..c4f0c0a0 100644
--- a/test/lib/ansible_test/_internal/cli/commands/sanity.py
+++ b/test/lib/ansible_test/_internal/cli/commands/sanity.py
@@ -16,10 +16,6 @@ from ...target import (
walk_sanity_targets,
)
-from ...data import (
- data_context,
-)
-
from ..environments import (
CompositeActionCompletionFinder,
ControllerMode,
@@ -43,7 +39,8 @@ def do_sanity(
parser.set_defaults(
func=command_sanity,
targets_func=walk_sanity_targets,
- config=SanityConfig)
+ config=SanityConfig,
+ )
sanity = parser.add_argument_group(title='sanity test arguments')
@@ -81,17 +78,6 @@ def do_sanity(
help='enable optional errors',
)
- if data_context().content.is_ansible:
- sanity.add_argument(
- '--keep-git',
- action='store_true',
- help='transfer git related files to the remote host/container',
- )
- else:
- sanity.set_defaults(
- keep_git=False,
- )
-
sanity.add_argument(
'--lint',
action='store_true',
@@ -113,7 +99,7 @@ def do_sanity(
sanity.add_argument(
'--prime-venvs',
action='store_true',
- help='prepare virtual environments without running tests'
+ help='prepare virtual environments without running tests',
)
add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.SANITY) # sanity
diff --git a/test/lib/ansible_test/_internal/cli/compat.py b/test/lib/ansible_test/_internal/cli/compat.py
index 93006d5c..19996faf 100644
--- a/test/lib/ansible_test/_internal/cli/compat.py
+++ b/test/lib/ansible_test/_internal/cli/compat.py
@@ -84,24 +84,28 @@ def get_option_name(name: str) -> str:
class PythonVersionUnsupportedError(ApplicationError):
"""A Python version was requested for a context which does not support that version."""
+
def __init__(self, context: str, version: str, versions: c.Iterable[str]) -> None:
super().__init__(f'Python {version} is not supported by environment `{context}`. Supported Python version(s) are: {", ".join(versions)}')
class PythonVersionUnspecifiedError(ApplicationError):
"""A Python version was not specified for a context which is unknown, thus the Python version is unknown."""
+
def __init__(self, context: str) -> None:
super().__init__(f'A Python version was not specified for environment `{context}`. Use the `--python` option to specify a Python version.')
class ControllerNotSupportedError(ApplicationError):
"""Option(s) were specified which do not provide support for the controller and would be ignored because they are irrelevant for the target."""
+
def __init__(self, context: str) -> None:
super().__init__(f'Environment `{context}` does not provide a Python version supported by the controller.')
class OptionsConflictError(ApplicationError):
"""Option(s) were specified which conflict with other options."""
+
def __init__(self, first: c.Iterable[str], second: c.Iterable[str]) -> None:
super().__init__(f'Options `{" ".join(first)}` cannot be combined with options `{" ".join(second)}`.')
@@ -109,6 +113,7 @@ class OptionsConflictError(ApplicationError):
@dataclasses.dataclass(frozen=True)
class LegacyHostOptions:
"""Legacy host options used prior to the availability of separate controller and target host configuration."""
+
python: t.Optional[str] = None
python_interpreter: t.Optional[str] = None
local: t.Optional[bool] = None
@@ -161,6 +166,7 @@ class LegacyHostOptions:
class TargetMode(enum.Enum):
"""Type of provisioning to use for the targets."""
+
WINDOWS_INTEGRATION = enum.auto() # windows-integration
NETWORK_INTEGRATION = enum.auto() # network-integration
POSIX_INTEGRATION = enum.auto() # integration
diff --git a/test/lib/ansible_test/_internal/cli/environments.py b/test/lib/ansible_test/_internal/cli/environments.py
index 5063715a..94cafae3 100644
--- a/test/lib/ansible_test/_internal/cli/environments.py
+++ b/test/lib/ansible_test/_internal/cli/environments.py
@@ -75,6 +75,7 @@ from ..ci import (
class ControllerMode(enum.Enum):
"""Type of provisioning to use for the controller."""
+
NO_DELEGATION = enum.auto()
ORIGIN = enum.auto()
DELEGATED = enum.auto()
@@ -252,7 +253,8 @@ def add_legacy_environment_options(
):
"""Add legacy options for controlling the test environment."""
environment: argparse.ArgumentParser = parser.add_argument_group( # type: ignore[assignment] # real type private
- title='environment arguments (mutually exclusive with "composite environment arguments" below)')
+ title='environment arguments (mutually exclusive with "composite environment arguments" below)',
+ )
add_environments_python(environment, target_mode)
add_environments_host(environment, controller_mode, target_mode)
@@ -383,7 +385,8 @@ def add_environment_venv(
environments_parser.add_argument(
'--venv-system-site-packages',
action='store_true',
- help='enable system site packages')
+ help='enable system site packages',
+ )
def add_global_docker(
diff --git a/test/lib/ansible_test/_internal/cli/parsers/__init__.py b/test/lib/ansible_test/_internal/cli/parsers/__init__.py
index 1aedf630..93ead807 100644
--- a/test/lib/ansible_test/_internal/cli/parsers/__init__.py
+++ b/test/lib/ansible_test/_internal/cli/parsers/__init__.py
@@ -53,6 +53,7 @@ from .base_argument_parsers import (
class OriginControllerParser(ControllerNamespaceParser, TypeParser):
"""Composite argument parser for the controller when delegation is not supported."""
+
def get_stateless_parsers(self) -> dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
return dict(
@@ -71,6 +72,7 @@ class OriginControllerParser(ControllerNamespaceParser, TypeParser):
class DelegatedControllerParser(ControllerNamespaceParser, TypeParser):
"""Composite argument parser for the controller when delegation is supported."""
+
def get_stateless_parsers(self) -> dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
parsers: dict[str, Parser] = dict(
@@ -97,6 +99,7 @@ class DelegatedControllerParser(ControllerNamespaceParser, TypeParser):
class PosixTargetParser(TargetNamespaceParser, TypeParser):
"""Composite argument parser for a POSIX target."""
+
def get_stateless_parsers(self) -> dict[str, Parser]:
"""Return a dictionary of type names and type parsers."""
parsers: dict[str, Parser] = dict(
@@ -127,6 +130,7 @@ class PosixTargetParser(TargetNamespaceParser, TypeParser):
class WindowsTargetParser(TargetsNamespaceParser, TypeParser):
"""Composite argument parser for a Windows target."""
+
@property
def allow_inventory(self) -> bool:
"""True if inventory is allowed, otherwise False."""
@@ -169,6 +173,7 @@ class WindowsTargetParser(TargetsNamespaceParser, TypeParser):
class NetworkTargetParser(TargetsNamespaceParser, TypeParser):
"""Composite argument parser for a network target."""
+
@property
def allow_inventory(self) -> bool:
"""True if inventory is allowed, otherwise False."""
@@ -211,6 +216,7 @@ class NetworkTargetParser(TargetsNamespaceParser, TypeParser):
class PythonTargetParser(TargetsNamespaceParser, Parser):
"""Composite argument parser for a Python target."""
+
def __init__(self, allow_venv: bool) -> None:
super().__init__()
@@ -249,18 +255,21 @@ class PythonTargetParser(TargetsNamespaceParser, Parser):
class SanityPythonTargetParser(PythonTargetParser):
"""Composite argument parser for a sanity Python target."""
+
def __init__(self) -> None:
super().__init__(allow_venv=False)
class UnitsPythonTargetParser(PythonTargetParser):
"""Composite argument parser for a units Python target."""
+
def __init__(self) -> None:
super().__init__(allow_venv=True)
class PosixSshTargetParser(PosixTargetParser):
"""Composite argument parser for a POSIX SSH target."""
+
@property
def option_name(self) -> str:
"""The option name used for this parser."""
@@ -269,6 +278,7 @@ class PosixSshTargetParser(PosixTargetParser):
class WindowsSshTargetParser(WindowsTargetParser):
"""Composite argument parser for a Windows SSH target."""
+
@property
def option_name(self) -> str:
"""The option name used for this parser."""
@@ -287,6 +297,7 @@ class WindowsSshTargetParser(WindowsTargetParser):
class NetworkSshTargetParser(NetworkTargetParser):
"""Composite argument parser for a network SSH target."""
+
@property
def option_name(self) -> str:
"""The option name used for this parser."""
diff --git a/test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py b/test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py
index aac7a694..d0124cf5 100644
--- a/test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py
+++ b/test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py
@@ -13,6 +13,7 @@ from ..argparsing.parsers import (
class ControllerNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta):
"""Base class for controller namespace parsers."""
+
@property
def dest(self) -> str:
"""The name of the attribute where the value should be stored."""
@@ -28,6 +29,7 @@ class ControllerNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta):
class TargetNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta):
"""Base class for target namespace parsers involving a single target."""
+
@property
def option_name(self) -> str:
"""The option name used for this parser."""
@@ -51,6 +53,7 @@ class TargetNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta):
class TargetsNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta):
"""Base class for controller namespace parsers involving multiple targets."""
+
@property
def option_name(self) -> str:
"""The option name used for this parser."""
@@ -69,5 +72,6 @@ class TargetsNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta):
class ControllerRequiredFirstError(CompletionError):
"""Exception raised when controller and target options are specified out-of-order."""
+
def __init__(self) -> None:
super().__init__('The `--controller` option must be specified before `--target` option(s).')
diff --git a/test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py b/test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py
index ee6f146c..a90a59ac 100644
--- a/test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py
+++ b/test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py
@@ -63,6 +63,7 @@ from .helpers import (
class OriginParser(Parser):
"""Composite argument parser for the origin."""
+
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
namespace = OriginConfig()
@@ -81,6 +82,7 @@ class OriginParser(Parser):
class ControllerParser(Parser):
"""Composite argument parser for the controller."""
+
def parse(self, state: ParserState) -> t.Any:
"""Parse the input from the given state and return the result."""
namespace = ControllerConfig()
@@ -99,6 +101,7 @@ class ControllerParser(Parser):
class DockerParser(PairParser):
"""Composite argument parser for a docker host."""
+
def __init__(self, controller: bool) -> None:
self.controller = controller
@@ -142,6 +145,7 @@ class DockerParser(PairParser):
class PosixRemoteParser(PairParser):
"""Composite argument parser for a POSIX remote host."""
+
def __init__(self, controller: bool) -> None:
self.controller = controller
@@ -184,6 +188,7 @@ class PosixRemoteParser(PairParser):
class WindowsRemoteParser(PairParser):
"""Composite argument parser for a Windows remote host."""
+
def create_namespace(self) -> t.Any:
"""Create and return a namespace."""
return WindowsRemoteConfig()
@@ -217,6 +222,7 @@ class WindowsRemoteParser(PairParser):
class NetworkRemoteParser(PairParser):
"""Composite argument parser for a network remote host."""
+
def create_namespace(self) -> t.Any:
"""Create and return a namespace."""
return NetworkRemoteConfig()
@@ -250,6 +256,7 @@ class NetworkRemoteParser(PairParser):
class WindowsInventoryParser(PairParser):
"""Composite argument parser for a Windows inventory."""
+
def create_namespace(self) -> t.Any:
"""Create and return a namespace."""
return WindowsInventoryConfig()
@@ -269,6 +276,7 @@ class WindowsInventoryParser(PairParser):
class NetworkInventoryParser(PairParser):
"""Composite argument parser for a network inventory."""
+
def create_namespace(self) -> t.Any:
"""Create and return a namespace."""
return NetworkInventoryConfig()
@@ -288,6 +296,7 @@ class NetworkInventoryParser(PairParser):
class PosixSshParser(PairParser):
"""Composite argument parser for a POSIX SSH host."""
+
def create_namespace(self) -> t.Any:
"""Create and return a namespace."""
return PosixSshConfig()
diff --git a/test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py b/test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py
index 049b71ee..a046e51a 100644
--- a/test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py
+++ b/test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py
@@ -52,6 +52,7 @@ from .helpers import (
class OriginKeyValueParser(KeyValueParser):
"""Composite argument parser for origin key/value pairs."""
+
def get_parsers(self, state: ParserState) -> dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
versions = CONTROLLER_PYTHON_VERSIONS
@@ -75,6 +76,7 @@ class OriginKeyValueParser(KeyValueParser):
class ControllerKeyValueParser(KeyValueParser):
"""Composite argument parser for controller key/value pairs."""
+
def get_parsers(self, state: ParserState) -> dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
versions = get_controller_pythons(state.root_namespace.controller, False)
@@ -99,6 +101,7 @@ class ControllerKeyValueParser(KeyValueParser):
class DockerKeyValueParser(KeyValueParser):
"""Composite argument parser for docker key/value pairs."""
+
def __init__(self, image: str, controller: bool) -> None:
self.controller = controller
self.versions = get_docker_pythons(image, controller, False)
@@ -135,6 +138,7 @@ class DockerKeyValueParser(KeyValueParser):
class PosixRemoteKeyValueParser(KeyValueParser):
"""Composite argument parser for POSIX remote key/value pairs."""
+
def __init__(self, name: str, controller: bool) -> None:
self.controller = controller
self.versions = get_remote_pythons(name, controller, False)
@@ -167,6 +171,7 @@ class PosixRemoteKeyValueParser(KeyValueParser):
class WindowsRemoteKeyValueParser(KeyValueParser):
"""Composite argument parser for Windows remote key/value pairs."""
+
def get_parsers(self, state: ParserState) -> dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
return dict(
@@ -188,6 +193,7 @@ class WindowsRemoteKeyValueParser(KeyValueParser):
class NetworkRemoteKeyValueParser(KeyValueParser):
"""Composite argument parser for network remote key/value pairs."""
+
def get_parsers(self, state: ParserState) -> dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
return dict(
@@ -213,6 +219,7 @@ class NetworkRemoteKeyValueParser(KeyValueParser):
class PosixSshKeyValueParser(KeyValueParser):
"""Composite argument parser for POSIX SSH host key/value pairs."""
+
def get_parsers(self, state: ParserState) -> dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
return dict(
@@ -234,6 +241,7 @@ class PosixSshKeyValueParser(KeyValueParser):
class EmptyKeyValueParser(KeyValueParser):
"""Composite argument parser when a key/value parser is required but there are no keys available."""
+
def get_parsers(self, state: ParserState) -> dict[str, Parser]:
"""Return a dictionary of key names and value parsers."""
return {}
diff --git a/test/lib/ansible_test/_internal/cli/parsers/value_parsers.py b/test/lib/ansible_test/_internal/cli/parsers/value_parsers.py
index 9453b760..f416281e 100644
--- a/test/lib/ansible_test/_internal/cli/parsers/value_parsers.py
+++ b/test/lib/ansible_test/_internal/cli/parsers/value_parsers.py
@@ -60,12 +60,14 @@ class PythonParser(Parser):
Known docker/remote environments limit the available Python versions to configured values known to be valid.
The origin host and unknown environments assume all relevant Python versions are available.
"""
- def __init__(self,
- versions: c.Sequence[str],
- *,
- allow_default: bool,
- allow_venv: bool,
- ):
+
+ def __init__(
+ self,
+ versions: c.Sequence[str],
+ *,
+ allow_default: bool,
+ allow_venv: bool,
+ ):
version_choices = list(versions)
if allow_default:
@@ -134,6 +136,7 @@ class PythonParser(Parser):
class PlatformParser(ChoicesParser):
"""Composite argument parser for "{platform}/{version}" formatted choices."""
+
def __init__(self, choices: list[str]) -> None:
super().__init__(choices, conditions=MatchConditions.CHOICE | MatchConditions.ANY)
@@ -152,6 +155,7 @@ class SshConnectionParser(Parser):
Composite argument parser for connecting to a host using SSH.
Format: user@host[:port]
"""
+
EXPECTED_FORMAT = '{user}@{host}[:{port}]'
def parse(self, state: ParserState) -> t.Any:
diff --git a/test/lib/ansible_test/_internal/commands/coverage/__init__.py b/test/lib/ansible_test/_internal/commands/coverage/__init__.py
index 139cf3c6..c4c5f09e 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/__init__.py
@@ -68,6 +68,7 @@ COVERAGE_OUTPUT_FILE_NAME = 'coverage'
class CoverageConfig(EnvironmentConfig):
"""Configuration for the coverage command."""
+
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'coverage')
@@ -96,7 +97,7 @@ def initialize_coverage(args: CoverageConfig, host_state: HostState) -> coverage
def run_coverage(args: CoverageConfig, host_state: HostState, output_file: str, command: str, cmd: list[str]) -> None:
"""Run the coverage cli tool with the specified options."""
env = common_environment()
- env.update(dict(COVERAGE_FILE=output_file))
+ env.update(COVERAGE_FILE=output_file)
cmd = ['python', '-m', 'coverage.__main__', command, '--rcfile', COVERAGE_CONFIG_PATH] + cmd
@@ -340,6 +341,7 @@ def sanitize_filename(
class PathChecker:
"""Checks code coverage paths to verify they are valid and reports on the findings."""
+
def __init__(self, args: CoverageConfig, collection_search_re: t.Optional[t.Pattern] = None) -> None:
self.args = args
self.collection_search_re = collection_search_re
diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/__init__.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/__init__.py
index 37859e8f..2029d7be 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/analyze/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/__init__.py
@@ -9,6 +9,7 @@ from .. import (
class CoverageAnalyzeConfig(CoverageConfig):
"""Configuration for the `coverage analyze` command."""
+
def __init__(self, args: t.Any) -> None:
super().__init__(args)
diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py
index e3782cee..0bbb2873 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py
@@ -28,6 +28,7 @@ from . import (
class CoverageAnalyzeTargetsCombineConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets combine` command."""
+
def __init__(self, args: t.Any) -> None:
super().__init__(args)
diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py
index ba903878..93197543 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py
@@ -26,6 +26,7 @@ from . import (
class CoverageAnalyzeTargetsExpandConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets expand` command."""
+
def __init__(self, args: t.Any) -> None:
super().__init__(args)
diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py
index 29a8ee5b..ccedae7d 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py
@@ -31,6 +31,7 @@ from . import (
class CoverageAnalyzeTargetsFilterConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets filter` command."""
+
def __init__(self, args: t.Any) -> None:
super().__init__(args)
diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py
index 127b5b7f..0f0da5de 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py
@@ -52,6 +52,7 @@ from . import (
class CoverageAnalyzeTargetsGenerateConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets generate` command."""
+
def __init__(self, args: t.Any) -> None:
super().__init__(args)
diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py
index c1c77e75..0a756643 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py
@@ -32,6 +32,7 @@ from . import (
class CoverageAnalyzeTargetsMissingConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets missing` command."""
+
def __init__(self, args: t.Any) -> None:
super().__init__(args)
diff --git a/test/lib/ansible_test/_internal/commands/coverage/combine.py b/test/lib/ansible_test/_internal/commands/coverage/combine.py
index 66210c73..12cb54e2 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/combine.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/combine.py
@@ -34,6 +34,7 @@ from ...executor import (
from ...data import (
data_context,
+ PayloadConfig,
)
from ...host_configs import (
@@ -82,9 +83,10 @@ def combine_coverage_files(args: CoverageCombineConfig, host_state: HostState) -
pairs = [(path, os.path.relpath(path, data_context().content.root)) for path in exported_paths]
- def coverage_callback(files: list[tuple[str, str]]) -> None:
+ def coverage_callback(payload_config: PayloadConfig) -> None:
"""Add the coverage files to the payload file list."""
display.info('Including %d exported coverage file(s) in payload.' % len(pairs), verbosity=1)
+ files = payload_config.files
files.extend(pairs)
data_context().register_payload_callback(coverage_callback)
@@ -101,11 +103,13 @@ def combine_coverage_files(args: CoverageCombineConfig, host_state: HostState) -
class ExportedCoverageDataNotFound(ApplicationError):
"""Exception when no combined coverage data is present yet is required."""
+
def __init__(self) -> None:
super().__init__(
'Coverage data must be exported before processing with the `--docker` or `--remote` option.\n'
'Export coverage with `ansible-test coverage combine` using the `--export` option.\n'
- 'The exported files must be in the directory: %s/' % ResultType.COVERAGE.relative_path)
+ 'The exported files must be in the directory: %s/' % ResultType.COVERAGE.relative_path
+ )
def _command_coverage_combine_python(args: CoverageCombineConfig, host_state: HostState) -> list[str]:
@@ -351,6 +355,7 @@ def get_coverage_group(args: CoverageCombineConfig, coverage_file: str) -> t.Opt
class CoverageCombineConfig(CoverageConfig):
"""Configuration for the coverage combine command."""
+
def __init__(self, args: t.Any) -> None:
super().__init__(args)
diff --git a/test/lib/ansible_test/_internal/commands/coverage/report.py b/test/lib/ansible_test/_internal/commands/coverage/report.py
index fadc13f3..c0f40186 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/report.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/report.py
@@ -144,6 +144,7 @@ def _generate_powershell_output_report(args: CoverageReportConfig, coverage_file
class CoverageReportConfig(CoverageCombineConfig):
"""Configuration for the coverage report command."""
+
def __init__(self, args: t.Any) -> None:
super().__init__(args)
diff --git a/test/lib/ansible_test/_internal/commands/env/__init__.py b/test/lib/ansible_test/_internal/commands/env/__init__.py
index 44f229f8..92d2c973 100644
--- a/test/lib/ansible_test/_internal/commands/env/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/env/__init__.py
@@ -42,16 +42,20 @@ from ...ci import (
get_ci_provider,
)
+from ...timeout import (
+ TimeoutDetail,
+)
+
class EnvConfig(CommonConfig):
"""Configuration for the `env` command."""
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'env')
- self.show = args.show
- self.dump = args.dump
- self.timeout = args.timeout
- self.list_files = args.list_files
+ self.show: bool = args.show
+ self.dump: bool = args.dump
+ self.timeout: int | float | None = args.timeout
+ self.list_files: bool = args.list_files
if not self.show and not self.dump and self.timeout is None and not self.list_files:
# default to --show if no options were given
@@ -85,7 +89,7 @@ def show_dump_env(args: EnvConfig) -> None:
),
git=get_ci_provider().get_git_details(args),
platform=dict(
- datetime=datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),
+ datetime=datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ'),
platform=platform.platform(),
uname=platform.uname(),
),
@@ -124,25 +128,18 @@ def set_timeout(args: EnvConfig) -> None:
if args.timeout is None:
return
- if args.timeout:
- deadline = (datetime.datetime.utcnow() + datetime.timedelta(minutes=args.timeout)).strftime('%Y-%m-%dT%H:%M:%SZ')
+ timeout = TimeoutDetail.create(args.timeout)
- display.info('Setting a %d minute test timeout which will end at: %s' % (args.timeout, deadline), verbosity=1)
+ if timeout:
+ display.info(f'Setting a {timeout.duration} minute test timeout which will end at: {timeout.deadline}', verbosity=1)
else:
- deadline = None
-
display.info('Clearing existing test timeout.', verbosity=1)
if args.explain:
return
- if deadline:
- data = dict(
- duration=args.timeout,
- deadline=deadline,
- )
-
- write_json_file(TIMEOUT_PATH, data)
+ if timeout:
+ write_json_file(TIMEOUT_PATH, timeout.to_dict())
elif os.path.exists(TIMEOUT_PATH):
os.remove(TIMEOUT_PATH)
diff --git a/test/lib/ansible_test/_internal/commands/integration/__init__.py b/test/lib/ansible_test/_internal/commands/integration/__init__.py
index 8864d2ee..5bd04407 100644
--- a/test/lib/ansible_test/_internal/commands/integration/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/integration/__init__.py
@@ -90,6 +90,7 @@ from .cloud import (
from ...data import (
data_context,
+ PayloadConfig,
)
from ...host_configs import (
@@ -214,11 +215,13 @@ def delegate_inventory(args: IntegrationConfig, inventory_path_src: str) -> None
if isinstance(args, PosixIntegrationConfig):
return
- def inventory_callback(files: list[tuple[str, str]]) -> None:
+ def inventory_callback(payload_config: PayloadConfig) -> None:
"""
Add the inventory file to the payload file list.
This will preserve the file during delegation even if it is ignored or is outside the content and install roots.
"""
+ files = payload_config.files
+
inventory_path = get_inventory_relative_path(args)
inventory_tuple = inventory_path_src, inventory_path
@@ -311,7 +314,7 @@ def integration_test_environment(
directory_copies = [
(
os.path.join(integration_targets_relative_path, target.relative_path),
- os.path.join(temp_dir, integration_targets_relative_path, target.relative_path)
+ os.path.join(temp_dir, integration_targets_relative_path, target.relative_path),
)
for target in target_dependencies
]
@@ -354,12 +357,12 @@ def integration_test_config_file(
config_vars = (env_config.ansible_vars or {}).copy()
- config_vars.update(dict(
+ config_vars.update(
ansible_test=dict(
environment=env_config.env_vars,
module_defaults=env_config.module_defaults,
)
- ))
+ )
config_file = json.dumps(config_vars, indent=4, sort_keys=True)
@@ -563,7 +566,7 @@ def command_integration_filtered(
coverage_manager.teardown()
result_name = '%s-%s.json' % (
- args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0))))
+ args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.now(tz=datetime.timezone.utc).replace(microsecond=0, tzinfo=None))))
data = dict(
targets=results,
@@ -612,10 +615,10 @@ def command_integration_script(
env = integration_environment(args, target, test_dir, test_env.inventory_path, test_env.ansible_config, env_config, test_env)
cwd = os.path.join(test_env.targets_dir, target.relative_path)
- env.update(dict(
+ env.update(
# support use of adhoc ansible commands in collections without specifying the fully qualified collection name
ANSIBLE_PLAYBOOK_DIR=cwd,
- ))
+ )
if env_config and env_config.env_vars:
env.update(env_config.env_vars)
@@ -650,9 +653,9 @@ def command_integration_role(
if isinstance(args, WindowsIntegrationConfig):
hosts = 'windows'
gather_facts = False
- variables.update(dict(
+ variables.update(
win_output_dir=r'C:\ansible_testing',
- ))
+ )
elif isinstance(args, NetworkIntegrationConfig):
hosts = target.network_platform
gather_facts = False
@@ -697,10 +700,10 @@ def command_integration_role(
if env_config.ansible_vars:
variables.update(env_config.ansible_vars)
- play.update(dict(
+ play.update(
environment=env_config.env_vars,
module_defaults=env_config.module_defaults,
- ))
+ )
playbook = json.dumps([play], indent=4, sort_keys=True)
@@ -733,10 +736,10 @@ def command_integration_role(
env = integration_environment(args, target, test_dir, test_env.inventory_path, test_env.ansible_config, env_config, test_env)
cwd = test_env.integration_dir
- env.update(dict(
+ env.update(
# support use of adhoc ansible commands in collections without specifying the fully qualified collection name
ANSIBLE_PLAYBOOK_DIR=cwd,
- ))
+ )
if env_config and env_config.env_vars:
env.update(env_config.env_vars)
@@ -804,13 +807,13 @@ def integration_environment(
)
if args.debug_strategy:
- env.update(dict(ANSIBLE_STRATEGY='debug'))
+ env.update(ANSIBLE_STRATEGY='debug')
if 'non_local/' in target.aliases:
if args.coverage:
display.warning('Skipping coverage reporting on Ansible modules for non-local test: %s' % target.name)
- env.update(dict(ANSIBLE_TEST_REMOTE_INTERPRETER=''))
+ env.update(ANSIBLE_TEST_REMOTE_INTERPRETER='')
env.update(integration)
@@ -819,6 +822,7 @@ def integration_environment(
class IntegrationEnvironment:
"""Details about the integration environment."""
+
def __init__(self, test_dir: str, integration_dir: str, targets_dir: str, inventory_path: str, ansible_config: str, vars_file: str) -> None:
self.test_dir = test_dir
self.integration_dir = integration_dir
@@ -830,6 +834,7 @@ class IntegrationEnvironment:
class IntegrationCache(CommonCache):
"""Integration cache."""
+
@property
def integration_targets(self) -> list[IntegrationTarget]:
"""The list of integration test targets."""
@@ -897,9 +902,10 @@ If necessary, context can be controlled by adding entries to the "aliases" file
return exclude
-def command_integration_filter(args: TIntegrationConfig,
- targets: c.Iterable[TIntegrationTarget],
- ) -> tuple[HostState, tuple[TIntegrationTarget, ...]]:
+def command_integration_filter(
+ args: TIntegrationConfig,
+ targets: c.Iterable[TIntegrationTarget],
+) -> tuple[HostState, tuple[TIntegrationTarget, ...]]:
"""Filter the given integration test targets."""
targets = tuple(target for target in targets if 'hidden/' not in target.aliases)
changes = get_changes_filter(args)
@@ -937,11 +943,13 @@ def command_integration_filter(args: TIntegrationConfig,
vars_file_src = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
if os.path.exists(vars_file_src):
- def integration_config_callback(files: list[tuple[str, str]]) -> None:
+
+ def integration_config_callback(payload_config: PayloadConfig) -> None:
"""
Add the integration config vars file to the payload file list.
This will preserve the file during delegation even if the file is ignored by source control.
"""
+ files = payload_config.files
files.append((vars_file_src, data_context().content.integration_vars_path))
data_context().register_payload_callback(integration_config_callback)
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py b/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py
index 0c078b98..eac9265a 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py
@@ -47,6 +47,7 @@ from ....ci import (
from ....data import (
data_context,
+ PayloadConfig,
)
from ....docker_util import (
@@ -169,7 +170,7 @@ def cloud_init(args: IntegrationConfig, targets: tuple[IntegrationTarget, ...])
if not args.explain and results:
result_name = '%s-%s.json' % (
- args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0))))
+ args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.now(tz=datetime.timezone.utc).replace(microsecond=0, tzinfo=None))))
data = dict(
clouds=results,
@@ -180,6 +181,7 @@ def cloud_init(args: IntegrationConfig, targets: tuple[IntegrationTarget, ...])
class CloudBase(metaclass=abc.ABCMeta):
"""Base class for cloud plugins."""
+
_CONFIG_PATH = 'config_path'
_RESOURCE_PREFIX = 'resource_prefix'
_MANAGED = 'managed'
@@ -189,13 +191,14 @@ class CloudBase(metaclass=abc.ABCMeta):
self.args = args
self.platform = self.__module__.rsplit('.', 1)[-1]
- def config_callback(files: list[tuple[str, str]]) -> None:
+ def config_callback(payload_config: PayloadConfig) -> None:
"""Add the config file to the payload file list."""
if self.platform not in self.args.metadata.cloud_config:
return # platform was initialized, but not used -- such as being skipped due to all tests being disabled
if self._get_cloud_config(self._CONFIG_PATH, ''):
pair = (self.config_path, os.path.relpath(self.config_path, data_context().content.root))
+ files = payload_config.files
if pair not in files:
display.info('Including %s config: %s -> %s' % (self.platform, pair[0], pair[1]), verbosity=3)
@@ -257,6 +260,7 @@ class CloudBase(metaclass=abc.ABCMeta):
class CloudProvider(CloudBase):
"""Base class for cloud provider plugins. Sets up cloud resources before delegation."""
+
def __init__(self, args: IntegrationConfig, config_extension: str = '.ini') -> None:
super().__init__(args)
@@ -356,6 +360,7 @@ class CloudProvider(CloudBase):
class CloudEnvironment(CloudBase):
"""Base class for cloud environment plugins. Updates integration test environment after delegation."""
+
def setup_once(self) -> None:
"""Run setup if it has not already been run."""
if self.setup_executed:
@@ -377,12 +382,14 @@ class CloudEnvironment(CloudBase):
class CloudEnvironmentConfig:
"""Configuration for the environment."""
- def __init__(self,
- env_vars: t.Optional[dict[str, str]] = None,
- ansible_vars: t.Optional[dict[str, t.Any]] = None,
- module_defaults: t.Optional[dict[str, dict[str, t.Any]]] = None,
- callback_plugins: t.Optional[list[str]] = None,
- ):
+
+ def __init__(
+ self,
+ env_vars: t.Optional[dict[str, str]] = None,
+ ansible_vars: t.Optional[dict[str, t.Any]] = None,
+ module_defaults: t.Optional[dict[str, dict[str, t.Any]]] = None,
+ callback_plugins: t.Optional[list[str]] = None,
+ ):
self.env_vars = env_vars
self.ansible_vars = ansible_vars
self.module_defaults = module_defaults
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py b/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py
index 007d383c..e8020ca9 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py
@@ -21,6 +21,7 @@ from . import (
class ACMEProvider(CloudProvider):
"""ACME plugin. Sets up cloud resources for tests."""
+
DOCKER_SIMULATOR_NAME = 'acme-simulator'
def __init__(self, args: IntegrationConfig) -> None:
@@ -68,6 +69,7 @@ class ACMEProvider(CloudProvider):
class ACMEEnvironment(CloudEnvironment):
"""ACME environment plugin. Updates integration test environment after delegation."""
+
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
ansible_vars = dict(
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py b/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py
index 234f3112..470f3be5 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py
@@ -37,6 +37,7 @@ from . import (
class AwsCloudProvider(CloudProvider):
"""AWS cloud provider plugin. Sets up cloud resources before delegation."""
+
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
@@ -97,6 +98,7 @@ class AwsCloudProvider(CloudProvider):
class AwsCloudEnvironment(CloudEnvironment):
"""AWS cloud environment plugin. Updates integration test environment after delegation."""
+
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
parser = configparser.ConfigParser()
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/azure.py b/test/lib/ansible_test/_internal/commands/integration/cloud/azure.py
index dc5136ac..4225f8f4 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/azure.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/azure.py
@@ -31,6 +31,7 @@ from . import (
class AzureCloudProvider(CloudProvider):
"""Azure cloud provider plugin. Sets up cloud resources before delegation."""
+
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
@@ -103,6 +104,7 @@ class AzureCloudProvider(CloudProvider):
class AzureCloudEnvironment(CloudEnvironment):
"""Azure cloud environment plugin. Updates integration test environment after delegation."""
+
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
env_vars = get_config(self.config_path)
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/cloudscale.py b/test/lib/ansible_test/_internal/commands/integration/cloud/cloudscale.py
index f453ef3e..cbc80997 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/cloudscale.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/cloudscale.py
@@ -25,6 +25,7 @@ from . import (
class CloudscaleCloudProvider(CloudProvider):
"""Cloudscale cloud provider plugin. Sets up cloud resources before delegation."""
+
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
@@ -39,6 +40,7 @@ class CloudscaleCloudProvider(CloudProvider):
class CloudscaleCloudEnvironment(CloudEnvironment):
"""Cloudscale cloud environment plugin. Updates integration test environment after delegation."""
+
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
parser = configparser.ConfigParser()
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py b/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py
index 0037b423..8588df7d 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py
@@ -35,6 +35,7 @@ from . import (
class CsCloudProvider(CloudProvider):
"""CloudStack cloud provider plugin. Sets up cloud resources before delegation."""
+
DOCKER_SIMULATOR_NAME = 'cloudstack-sim'
def __init__(self, args: IntegrationConfig) -> None:
@@ -131,6 +132,7 @@ class CsCloudProvider(CloudProvider):
def _get_credentials(self, container_name: str) -> dict[str, t.Any]:
"""Wait for the CloudStack simulator to return credentials."""
+
def check(value) -> bool:
"""Return True if the given configuration is valid JSON, otherwise return False."""
# noinspection PyBroadException
@@ -148,6 +150,7 @@ class CsCloudProvider(CloudProvider):
class CsCloudEnvironment(CloudEnvironment):
"""CloudStack cloud environment plugin. Updates integration test environment after delegation."""
+
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
parser = configparser.ConfigParser()
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/digitalocean.py b/test/lib/ansible_test/_internal/commands/integration/cloud/digitalocean.py
index a46bf70e..1a15a98d 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/digitalocean.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/digitalocean.py
@@ -20,6 +20,7 @@ from . import (
class DigitalOceanCloudProvider(CloudProvider):
"""Checks if a configuration file has been passed or fixtures are going to be used for testing"""
+
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
@@ -34,6 +35,7 @@ class DigitalOceanCloudProvider(CloudProvider):
class DigitalOceanCloudEnvironment(CloudEnvironment):
"""Updates integration test environment after delegation. Will setup the config file as parameter."""
+
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
parser = configparser.ConfigParser()
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py b/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py
index c2413ee8..9e919cd8 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py
@@ -21,6 +21,7 @@ from . import (
class ForemanProvider(CloudProvider):
"""Foreman plugin. Sets up Foreman stub server for tests."""
+
DOCKER_SIMULATOR_NAME = 'foreman-stub'
# Default image to run Foreman stub from.
@@ -82,6 +83,7 @@ class ForemanProvider(CloudProvider):
class ForemanEnvironment(CloudEnvironment):
"""Foreman environment plugin. Updates integration test environment after delegation."""
+
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
env_vars = dict(
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py b/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py
index e180a024..1391cd84 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py
@@ -77,6 +77,7 @@ class GalaxyProvider(CloudProvider):
Galaxy plugin. Sets up pulp (ansible-galaxy) servers for tests.
The pulp source itself resides at: https://github.com/pulp/pulp-oci-images
"""
+
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
@@ -143,6 +144,7 @@ class GalaxyProvider(CloudProvider):
class GalaxyEnvironment(CloudEnvironment):
"""Galaxy environment plugin. Updates integration test environment after delegation."""
+
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
pulp_user = str(self._get_cloud_config('PULP_USER'))
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/gcp.py b/test/lib/ansible_test/_internal/commands/integration/cloud/gcp.py
index 28ffb7b6..e3cd1df2 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/gcp.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/gcp.py
@@ -22,6 +22,7 @@ from . import (
class GcpCloudProvider(CloudProvider):
"""GCP cloud provider plugin. Sets up cloud resources before delegation."""
+
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
@@ -39,6 +40,7 @@ class GcpCloudProvider(CloudProvider):
class GcpCloudEnvironment(CloudEnvironment):
"""GCP cloud environment plugin. Updates integration test environment after delegation."""
+
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
parser = configparser.ConfigParser()
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/hcloud.py b/test/lib/ansible_test/_internal/commands/integration/cloud/hcloud.py
index 4d75f221..04d6f7c6 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/hcloud.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/hcloud.py
@@ -29,6 +29,7 @@ from . import (
class HcloudCloudProvider(CloudProvider):
"""Hetzner Cloud provider plugin. Sets up cloud resources before delegation."""
+
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
@@ -83,6 +84,7 @@ class HcloudCloudProvider(CloudProvider):
class HcloudCloudEnvironment(CloudEnvironment):
"""Hetzner Cloud cloud environment plugin. Updates integration test environment after delegation."""
+
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
parser = configparser.ConfigParser()
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py b/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py
index e250eed7..85065d6f 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py
@@ -28,6 +28,7 @@ KRB5_PASSWORD_ENV = 'KRB5_PASSWORD'
class HttptesterProvider(CloudProvider):
"""HTTP Tester provider plugin. Sets up resources before delegation."""
+
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
@@ -82,6 +83,7 @@ class HttptesterProvider(CloudProvider):
class HttptesterEnvironment(CloudEnvironment):
"""HTTP Tester environment plugin. Updates integration test environment after delegation."""
+
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
return CloudEnvironmentConfig(
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py b/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py
index df0ebb0e..5bed8340 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py
@@ -21,6 +21,7 @@ from . import (
class NiosProvider(CloudProvider):
"""Nios plugin. Sets up NIOS mock server for tests."""
+
DOCKER_SIMULATOR_NAME = 'nios-simulator'
# Default image to run the nios simulator.
@@ -82,6 +83,7 @@ class NiosProvider(CloudProvider):
class NiosEnvironment(CloudEnvironment):
"""NIOS environment plugin. Updates integration test environment after delegation."""
+
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
ansible_vars = dict(
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/opennebula.py b/test/lib/ansible_test/_internal/commands/integration/cloud/opennebula.py
index d005a3ca..836cb22c 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/opennebula.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/opennebula.py
@@ -16,6 +16,7 @@ from . import (
class OpenNebulaCloudProvider(CloudProvider):
"""Checks if a configuration file has been passed or fixtures are going to be used for testing"""
+
def setup(self) -> None:
"""Setup the cloud resource before delegation and register a cleanup callback."""
super().setup()
@@ -42,6 +43,7 @@ class OpenNebulaCloudProvider(CloudProvider):
class OpenNebulaCloudEnvironment(CloudEnvironment):
"""Updates integration test environment after delegation. Will setup the config file as parameter."""
+
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
parser = configparser.ConfigParser()
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py b/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py
index da930c01..ddd434a8 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py
@@ -30,6 +30,7 @@ from . import (
class OpenShiftCloudProvider(CloudProvider):
"""OpenShift cloud provider plugin. Sets up cloud resources before delegation."""
+
DOCKER_CONTAINER_NAME = 'openshift-origin'
def __init__(self, args: IntegrationConfig) -> None:
@@ -103,6 +104,7 @@ class OpenShiftCloudProvider(CloudProvider):
class OpenShiftCloudEnvironment(CloudEnvironment):
"""OpenShift cloud environment plugin. Updates integration test environment after delegation."""
+
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
env_vars = dict(
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/scaleway.py b/test/lib/ansible_test/_internal/commands/integration/cloud/scaleway.py
index 04c2d89b..69df093e 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/scaleway.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/scaleway.py
@@ -20,6 +20,7 @@ from . import (
class ScalewayCloudProvider(CloudProvider):
"""Checks if a configuration file has been passed or fixtures are going to be used for testing"""
+
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
@@ -34,6 +35,7 @@ class ScalewayCloudProvider(CloudProvider):
class ScalewayCloudEnvironment(CloudEnvironment):
"""Updates integration test environment after delegation. Will setup the config file as parameter."""
+
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
parser = configparser.ConfigParser()
@@ -41,7 +43,7 @@ class ScalewayCloudEnvironment(CloudEnvironment):
env_vars = dict(
SCW_API_KEY=parser.get('default', 'key'),
- SCW_ORG=parser.get('default', 'org')
+ SCW_ORG=parser.get('default', 'org'),
)
display.sensitive.add(env_vars['SCW_API_KEY'])
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py b/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py
index df1651f9..242b0204 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py
@@ -27,6 +27,7 @@ from . import (
class VcenterProvider(CloudProvider):
"""VMware vcenter/esx plugin. Sets up cloud resources for tests."""
+
DOCKER_SIMULATOR_NAME = 'vcenter-simulator'
def __init__(self, args: IntegrationConfig) -> None:
@@ -92,6 +93,7 @@ class VcenterProvider(CloudProvider):
class VcenterEnvironment(CloudEnvironment):
"""VMware vcenter/esx environment plugin. Updates integration test environment after delegation."""
+
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
try:
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/vultr.py b/test/lib/ansible_test/_internal/commands/integration/cloud/vultr.py
index 1993cdab..57e4fca7 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/vultr.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/vultr.py
@@ -20,6 +20,7 @@ from . import (
class VultrCloudProvider(CloudProvider):
"""Checks if a configuration file has been passed or fixtures are going to be used for testing"""
+
def __init__(self, args: IntegrationConfig) -> None:
super().__init__(args)
@@ -34,6 +35,7 @@ class VultrCloudProvider(CloudProvider):
class VultrCloudEnvironment(CloudEnvironment):
"""Updates integration test environment after delegation. Will setup the config file as parameter."""
+
def get_environment_config(self) -> CloudEnvironmentConfig:
"""Return environment configuration for use in the test environment after delegation."""
parser = configparser.ConfigParser()
diff --git a/test/lib/ansible_test/_internal/commands/integration/coverage.py b/test/lib/ansible_test/_internal/commands/integration/coverage.py
index 5a486e93..ed072052 100644
--- a/test/lib/ansible_test/_internal/commands/integration/coverage.py
+++ b/test/lib/ansible_test/_internal/commands/integration/coverage.py
@@ -82,6 +82,7 @@ THostConfig = t.TypeVar('THostConfig', bound=HostConfig)
class CoverageHandler(t.Generic[THostConfig], metaclass=abc.ABCMeta):
"""Base class for configuring hosts for integration test code coverage."""
+
def __init__(self, args: IntegrationConfig, host_state: HostState, inventory_path: str) -> None:
self.args = args
self.host_state = host_state
@@ -124,6 +125,7 @@ class CoverageHandler(t.Generic[THostConfig], metaclass=abc.ABCMeta):
class PosixCoverageHandler(CoverageHandler[PosixConfig]):
"""Configure integration test code coverage for POSIX hosts."""
+
def __init__(self, args: IntegrationConfig, host_state: HostState, inventory_path: str) -> None:
super().__init__(args, host_state, inventory_path)
@@ -263,6 +265,7 @@ class PosixCoverageHandler(CoverageHandler[PosixConfig]):
class WindowsCoverageHandler(CoverageHandler[WindowsConfig]):
"""Configure integration test code coverage for Windows hosts."""
+
def __init__(self, args: IntegrationConfig, host_state: HostState, inventory_path: str) -> None:
super().__init__(args, host_state, inventory_path)
@@ -334,6 +337,7 @@ class WindowsCoverageHandler(CoverageHandler[WindowsConfig]):
class CoverageManager:
"""Manager for code coverage configuration and state."""
+
def __init__(self, args: IntegrationConfig, host_state: HostState, inventory_path: str) -> None:
self.args = args
self.host_state = host_state
diff --git a/test/lib/ansible_test/_internal/commands/integration/filters.py b/test/lib/ansible_test/_internal/commands/integration/filters.py
index be03d7f4..571c8163 100644
--- a/test/lib/ansible_test/_internal/commands/integration/filters.py
+++ b/test/lib/ansible_test/_internal/commands/integration/filters.py
@@ -47,6 +47,7 @@ THostProfile = t.TypeVar('THostProfile', bound=HostProfile)
class TargetFilter(t.Generic[THostConfig], metaclass=abc.ABCMeta):
"""Base class for target filters."""
+
def __init__(self, args: IntegrationConfig, configs: list[THostConfig], controller: bool) -> None:
self.args = args
self.configs = configs
@@ -138,6 +139,7 @@ class TargetFilter(t.Generic[THostConfig], metaclass=abc.ABCMeta):
class PosixTargetFilter(TargetFilter[TPosixConfig]):
"""Target filter for POSIX hosts."""
+
def filter_targets(self, targets: list[IntegrationTarget], exclude: set[str]) -> None:
"""Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
super().filter_targets(targets, exclude)
@@ -151,6 +153,7 @@ class PosixTargetFilter(TargetFilter[TPosixConfig]):
class DockerTargetFilter(PosixTargetFilter[DockerConfig]):
"""Target filter for docker hosts."""
+
def filter_targets(self, targets: list[IntegrationTarget], exclude: set[str]) -> None:
"""Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
super().filter_targets(targets, exclude)
@@ -167,6 +170,7 @@ class PosixSshTargetFilter(PosixTargetFilter[PosixSshConfig]):
class RemoteTargetFilter(TargetFilter[TRemoteConfig]):
"""Target filter for remote Ansible Core CI managed hosts."""
+
def filter_profiles(self, profiles: list[THostProfile], target: IntegrationTarget) -> list[THostProfile]:
"""Filter the list of profiles, returning only those which are not skipped for the given target."""
profiles = super().filter_profiles(profiles, target)
@@ -224,6 +228,7 @@ class NetworkInventoryTargetFilter(TargetFilter[NetworkInventoryConfig]):
class OriginTargetFilter(PosixTargetFilter[OriginConfig]):
"""Target filter for localhost."""
+
def filter_targets(self, targets: list[IntegrationTarget], exclude: set[str]) -> None:
"""Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
super().filter_targets(targets, exclude)
diff --git a/test/lib/ansible_test/_internal/commands/sanity/__init__.py b/test/lib/ansible_test/_internal/commands/sanity/__init__.py
index 00b30310..f399f2ad 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/__init__.py
@@ -160,6 +160,10 @@ def command_sanity(args: SanityConfig) -> None:
if args.skip_test:
tests = [target for target in tests if target.name not in args.skip_test]
+ if not args.host_path:
+ for test in tests:
+ test.origin_hook(args)
+
targets_use_pypi = any(isinstance(test, SanityMultipleVersion) and test.needs_pypi for test in tests) and not args.list_tests
host_state = prepare_profiles(args, targets_use_pypi=targets_use_pypi) # sanity
@@ -325,6 +329,7 @@ def collect_code_smell_tests() -> tuple[SanityTest, ...]:
class SanityIgnoreParser:
"""Parser for the consolidated sanity test ignore file."""
+
NO_CODE = '_'
def __init__(self, args: SanityConfig) -> None:
@@ -526,11 +531,13 @@ class SanityIgnoreParser:
class SanityIgnoreProcessor:
"""Processor for sanity test ignores for a single run of one sanity test."""
- def __init__(self,
- args: SanityConfig,
- test: SanityTest,
- python_version: t.Optional[str],
- ) -> None:
+
+ def __init__(
+ self,
+ args: SanityConfig,
+ test: SanityTest,
+ python_version: t.Optional[str],
+ ) -> None:
name = test.name
code = test.error_code
@@ -618,18 +625,21 @@ class SanityIgnoreProcessor:
class SanitySuccess(TestSuccess):
"""Sanity test success."""
+
def __init__(self, test: str, python_version: t.Optional[str] = None) -> None:
super().__init__(COMMAND, test, python_version)
class SanitySkipped(TestSkipped):
"""Sanity test skipped."""
+
def __init__(self, test: str, python_version: t.Optional[str] = None) -> None:
super().__init__(COMMAND, test, python_version)
class SanityFailure(TestFailure):
"""Sanity test failure."""
+
def __init__(
self,
test: str,
@@ -646,6 +656,7 @@ class SanityMessage(TestMessage):
class SanityTargets:
"""Sanity test target information."""
+
def __init__(self, targets: tuple[TestTarget, ...], include: tuple[TestTarget, ...]) -> None:
self.targets = targets
self.include = include
@@ -695,6 +706,7 @@ class SanityTargets:
class SanityTest(metaclass=abc.ABCMeta):
"""Sanity test base class."""
+
ansible_only = False
def __init__(self, name: t.Optional[str] = None) -> None:
@@ -757,6 +769,9 @@ class SanityTest(metaclass=abc.ABCMeta):
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
return CONTROLLER_PYTHON_VERSIONS
+ def origin_hook(self, args: SanityConfig) -> None:
+ """This method is called on the origin, before the test runs or delegation occurs."""
+
def filter_targets(self, targets: list[TestTarget]) -> list[TestTarget]: # pylint: disable=unused-argument
"""Return the given list of test targets, filtered to include only those relevant for the test."""
if self.no_targets:
@@ -811,6 +826,7 @@ class SanityTest(metaclass=abc.ABCMeta):
class SanitySingleVersion(SanityTest, metaclass=abc.ABCMeta):
"""Base class for sanity test plugins which should run on a single python version."""
+
@property
def require_libyaml(self) -> bool:
"""True if the test requires PyYAML to have libyaml support."""
@@ -827,6 +843,7 @@ class SanitySingleVersion(SanityTest, metaclass=abc.ABCMeta):
class SanityCodeSmellTest(SanitySingleVersion):
"""Sanity test script."""
+
def __init__(self, path) -> None:
name = os.path.splitext(os.path.basename(path))[0]
config_path = os.path.splitext(path)[0] + '.json'
@@ -1030,6 +1047,7 @@ class SanityCodeSmellTest(SanitySingleVersion):
class SanityVersionNeutral(SanityTest, metaclass=abc.ABCMeta):
"""Base class for sanity test plugins which are idependent of the python version being used."""
+
@abc.abstractmethod
def test(self, args: SanityConfig, targets: SanityTargets) -> TestResult:
"""Run the sanity test and return the result."""
@@ -1046,6 +1064,7 @@ class SanityVersionNeutral(SanityTest, metaclass=abc.ABCMeta):
class SanityMultipleVersion(SanityTest, metaclass=abc.ABCMeta):
"""Base class for sanity test plugins which should run on multiple python versions."""
+
@abc.abstractmethod
def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult:
"""Run the sanity test and return the result."""
diff --git a/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py b/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py
index 6815f884..04080f60 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py
@@ -48,6 +48,7 @@ from ...host_configs import (
class AnsibleDocTest(SanitySingleVersion):
"""Sanity test for ansible-doc."""
+
def filter_targets(self, targets: list[TestTarget]) -> list[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
plugin_paths = [plugin_path for plugin_type, plugin_path in data_context().content.plugin_paths.items() if plugin_type in DOCUMENTABLE_PLUGINS]
diff --git a/test/lib/ansible_test/_internal/commands/sanity/compile.py b/test/lib/ansible_test/_internal/commands/sanity/compile.py
index 45053383..a0f599f1 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/compile.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/compile.py
@@ -43,6 +43,7 @@ from ...host_configs import (
class CompileTest(SanityMultipleVersion):
"""Sanity test for proper python syntax."""
+
def filter_targets(self, targets: list[TestTarget]) -> list[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' or is_subdir(target.path, 'bin')]
diff --git a/test/lib/ansible_test/_internal/commands/sanity/ignores.py b/test/lib/ansible_test/_internal/commands/sanity/ignores.py
index 6d9837d0..251f8326 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/ignores.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/ignores.py
@@ -25,6 +25,7 @@ from ...config import (
class IgnoresTest(SanityVersionNeutral):
"""Sanity test for sanity test ignore entries."""
+
@property
def can_ignore(self) -> bool:
"""True if the test supports ignore entries."""
diff --git a/test/lib/ansible_test/_internal/commands/sanity/import.py b/test/lib/ansible_test/_internal/commands/sanity/import.py
index 8511d7ac..b8083324 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/import.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/import.py
@@ -84,6 +84,7 @@ def _get_module_test(module_restrictions: bool) -> c.Callable[[str], bool]:
class ImportTest(SanityMultipleVersion):
"""Sanity test for proper import exception handling."""
+
def filter_targets(self, targets: list[TestTarget]) -> list[TestTarget]:
"""Return the given list of test targets, filtered to include only those relevant for the test."""
if data_context().content.is_ansible:
diff --git a/test/lib/ansible_test/_internal/commands/sanity/mypy.py b/test/lib/ansible_test/_internal/commands/sanity/mypy.py
index cb8ed12c..57ce1277 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/mypy.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/mypy.py
@@ -58,6 +58,7 @@ from ...host_configs import (
class MypyTest(SanityMultipleVersion):
"""Sanity test which executes mypy."""
+
ansible_only = True
vendored_paths = (
@@ -221,7 +222,7 @@ class MypyTest(SanityMultipleVersion):
# Below are context specific arguments.
# They are primarily useful for listing individual 'ignore_missing_imports' entries instead of using a global ignore.
'--config-file', config_path,
- ]
+ ] # fmt: skip
cmd.extend(context_paths)
@@ -254,6 +255,7 @@ class MypyTest(SanityMultipleVersion):
@dataclasses.dataclass(frozen=True)
class MyPyContext:
"""Context details for a single run of mypy."""
+
name: str
paths: list[str]
python_versions: tuple[str, ...]
diff --git a/test/lib/ansible_test/_internal/commands/sanity/pep8.py b/test/lib/ansible_test/_internal/commands/sanity/pep8.py
index 5df9ace7..610dbd64 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/pep8.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/pep8.py
@@ -43,6 +43,7 @@ from ...host_configs import (
class Pep8Test(SanitySingleVersion):
"""Sanity test for PEP 8 style guidelines using pycodestyle."""
+
@property
def error_code(self) -> t.Optional[str]:
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
@@ -66,7 +67,7 @@ class Pep8Test(SanitySingleVersion):
'--max-line-length', '160',
'--config', '/dev/null',
'--ignore', ','.join(sorted(current_ignore)),
- ] + paths
+ ] + paths # fmt: skip
if paths:
try:
diff --git a/test/lib/ansible_test/_internal/commands/sanity/pslint.py b/test/lib/ansible_test/_internal/commands/sanity/pslint.py
index 9136d51c..1694488d 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/pslint.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/pslint.py
@@ -45,6 +45,7 @@ from ...data import (
class PslintTest(SanityVersionNeutral):
"""Sanity test using PSScriptAnalyzer."""
+
@property
def error_code(self) -> t.Optional[str]:
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
diff --git a/test/lib/ansible_test/_internal/commands/sanity/pylint.py b/test/lib/ansible_test/_internal/commands/sanity/pylint.py
index 86f287ab..c089f834 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/pylint.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/pylint.py
@@ -18,6 +18,10 @@ from . import (
SANITY_ROOT,
)
+from ...io import (
+ make_dirs,
+)
+
from ...test import (
TestResult,
)
@@ -41,6 +45,7 @@ from ...ansible_util import (
get_collection_detail,
CollectionDetail,
CollectionDetailError,
+ ResultType,
)
from ...config import (
@@ -58,6 +63,7 @@ from ...host_configs import (
class PylintTest(SanitySingleVersion):
"""Sanity test using pylint."""
+
def __init__(self) -> None:
super().__init__()
self.optional_error_codes.update([
@@ -106,6 +112,7 @@ class PylintTest(SanitySingleVersion):
def filter_path(path_filter: str = None) -> c.Callable[[str], bool]:
"""Return a function that filters out paths which are not a subdirectory of the given path."""
+
def context_filter(path_to_filter: str) -> bool:
"""Return true if the given path matches, otherwise return False."""
return is_subdir(path_to_filter, path_filter)
@@ -149,19 +156,19 @@ class PylintTest(SanitySingleVersion):
except CollectionDetailError as ex:
display.warning('Skipping pylint collection version checks since collection detail loading failed: %s' % ex.reason)
- test_start = datetime.datetime.utcnow()
+ test_start = datetime.datetime.now(tz=datetime.timezone.utc)
for context, context_paths in sorted(contexts):
if not context_paths:
continue
- context_start = datetime.datetime.utcnow()
+ context_start = datetime.datetime.now(tz=datetime.timezone.utc)
messages += self.pylint(args, context, context_paths, plugin_dir, plugin_names, python, collection_detail)
- context_end = datetime.datetime.utcnow()
+ context_end = datetime.datetime.now(tz=datetime.timezone.utc)
context_times.append('%s: %d (%s)' % (context, len(context_paths), context_end - context_start))
- test_end = datetime.datetime.utcnow()
+ test_end = datetime.datetime.now(tz=datetime.timezone.utc)
for context_time in context_times:
display.info(context_time, verbosity=4)
@@ -227,7 +234,7 @@ class PylintTest(SanitySingleVersion):
'--rcfile', rcfile,
'--output-format', 'json',
'--load-plugins', ','.join(sorted(load_plugins)),
- ] + paths
+ ] + paths # fmt: skip
if data_context().content.collection:
cmd.extend(['--collection-name', data_context().content.collection.full_name])
@@ -246,6 +253,12 @@ class PylintTest(SanitySingleVersion):
# expose plugin paths for use in custom plugins
env.update(dict(('ANSIBLE_TEST_%s_PATH' % k.upper(), os.path.abspath(v) + os.path.sep) for k, v in data_context().content.plugin_paths.items()))
+ # Set PYLINTHOME to prevent pylint from checking for an obsolete directory, which can result in a test failure due to stderr output.
+ # See: https://github.com/PyCQA/pylint/blob/e6c6bf5dfd61511d64779f54264b27a368c43100/pylint/constants.py#L148
+ pylint_home = os.path.join(ResultType.TMP.path, 'pylint')
+ make_dirs(pylint_home)
+ env.update(PYLINTHOME=pylint_home)
+
if paths:
display.info('Checking %d file(s) in context "%s" with config: %s' % (len(paths), context, rcfile), verbosity=1)
diff --git a/test/lib/ansible_test/_internal/commands/sanity/sanity_docs.py b/test/lib/ansible_test/_internal/commands/sanity/sanity_docs.py
index 4f14a3a2..48f1b0b1 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/sanity_docs.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/sanity_docs.py
@@ -27,6 +27,7 @@ from ...data import (
class SanityDocsTest(SanityVersionNeutral):
"""Sanity test for documentation of sanity tests."""
+
ansible_only = True
@property
diff --git a/test/lib/ansible_test/_internal/commands/sanity/shellcheck.py b/test/lib/ansible_test/_internal/commands/sanity/shellcheck.py
index 7de0bdae..4576622c 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/shellcheck.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/shellcheck.py
@@ -44,6 +44,7 @@ from ...config import (
class ShellcheckTest(SanityVersionNeutral):
"""Sanity test using shellcheck."""
+
@property
def error_code(self) -> t.Optional[str]:
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
@@ -68,7 +69,7 @@ class ShellcheckTest(SanityVersionNeutral):
'shellcheck',
'-e', ','.join(sorted(exclude)),
'--format', 'checkstyle',
- ] + paths
+ ] + paths # fmt: skip
try:
stdout, stderr = run_command(args, cmd, capture=True)
diff --git a/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py b/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py
index e1dacb7c..ab7dd93c 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py
@@ -1,9 +1,12 @@
"""Sanity test using validate-modules."""
from __future__ import annotations
+import atexit
import collections
+import contextlib
import json
import os
+import tarfile
import typing as t
from . import (
@@ -16,6 +19,10 @@ from . import (
SANITY_ROOT,
)
+from ...io import (
+ make_dirs,
+)
+
from ...test import (
TestResult,
)
@@ -30,7 +37,9 @@ from ...util import (
)
from ...util_common import (
+ process_scoped_temporary_directory,
run_command,
+ ResultType,
)
from ...ansible_util import (
@@ -49,12 +58,21 @@ from ...ci import (
from ...data import (
data_context,
+ PayloadConfig,
)
from ...host_configs import (
PythonConfig,
)
+from ...git import (
+ Git,
+)
+
+from ...provider.source import (
+ SourceProvider as GitSourceProvider,
+)
+
class ValidateModulesTest(SanitySingleVersion):
"""Sanity test using validate-modules."""
@@ -115,7 +133,7 @@ class ValidateModulesTest(SanitySingleVersion):
os.path.join(SANITY_ROOT, 'validate-modules', 'validate.py'),
'--format', 'json',
'--arg-spec',
- ]
+ ] # fmt: skip
if data_context().content.collection:
cmd.extend(['--collection', data_context().content.collection.directory])
@@ -130,14 +148,17 @@ class ValidateModulesTest(SanitySingleVersion):
except CollectionDetailError as ex:
display.warning('Skipping validate-modules collection version checks since collection detail loading failed: %s' % ex.reason)
else:
- base_branch = args.base_branch or get_ci_provider().get_base_branch()
+ path = self.get_archive_path(args)
+
+ if os.path.exists(path):
+ temp_dir = process_scoped_temporary_directory(args)
+
+ with tarfile.open(path) as file:
+ file.extractall(temp_dir)
- if base_branch:
cmd.extend([
- '--base-branch', base_branch,
+ '--original-plugins', temp_dir,
])
- else:
- display.warning('Cannot perform module comparison against the base branch because the base branch was not detected.')
errors = []
@@ -188,3 +209,43 @@ class ValidateModulesTest(SanitySingleVersion):
return SanityFailure(self.name, messages=all_errors)
return SanitySuccess(self.name)
+
+ def origin_hook(self, args: SanityConfig) -> None:
+ """This method is called on the origin, before the test runs or delegation occurs."""
+ if not data_context().content.is_ansible:
+ return
+
+ if not isinstance(data_context().source_provider, GitSourceProvider):
+ display.warning('The validate-modules sanity test cannot compare against the base commit because git is not being used.')
+ return
+
+ base_commit = args.base_branch or get_ci_provider().get_base_commit(args)
+
+ if not base_commit:
+ display.warning('The validate-modules sanity test cannot compare against the base commit because it was not detected.')
+ return
+
+ path = self.get_archive_path(args)
+
+ def cleanup() -> None:
+ """Cleanup callback called when the process exits."""
+ with contextlib.suppress(FileNotFoundError):
+ os.unlink(path)
+
+ def git_callback(payload_config: PayloadConfig) -> None:
+ """Include the previous plugin content archive in the payload."""
+ files = payload_config.files
+ files.append((path, os.path.relpath(path, data_context().content.root)))
+
+ atexit.register(cleanup)
+ data_context().register_payload_callback(git_callback)
+
+ make_dirs(os.path.dirname(path))
+
+ git = Git()
+ git.run_git(['archive', '--output', path, base_commit, 'lib/ansible/modules/', 'lib/ansible/plugins/'])
+
+ @staticmethod
+ def get_archive_path(args: SanityConfig) -> str:
+ """Return the path to the original plugin content archive."""
+ return os.path.join(ResultType.TMP.path, f'validate-modules-{args.metadata.session_id}.tar')
diff --git a/test/lib/ansible_test/_internal/commands/sanity/yamllint.py b/test/lib/ansible_test/_internal/commands/sanity/yamllint.py
index a0d859f0..0af8d65e 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/yamllint.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/yamllint.py
@@ -47,6 +47,7 @@ from ...host_configs import (
class YamllintTest(SanitySingleVersion):
"""Sanity test using yamllint."""
+
@property
def error_code(self) -> t.Optional[str]:
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
diff --git a/test/lib/ansible_test/_internal/commands/shell/__init__.py b/test/lib/ansible_test/_internal/commands/shell/__init__.py
index 5e8c101a..4ddce297 100644
--- a/test/lib/ansible_test/_internal/commands/shell/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/shell/__init__.py
@@ -124,9 +124,11 @@ def command_shell(args: ShellConfig) -> None:
# 255 indicates SSH itself failed, rather than a command run on the remote host.
# In this case, report a host connection error so additional troubleshooting output is provided.
if not args.delegate and not args.host_path:
+
def callback() -> None:
"""Callback to run during error display."""
target_profile.on_target_failure() # when the controller is not delegated, report failures immediately
+
else:
callback = None
diff --git a/test/lib/ansible_test/_internal/commands/units/__init__.py b/test/lib/ansible_test/_internal/commands/units/__init__.py
index f666d418..7d192e1b 100644
--- a/test/lib/ansible_test/_internal/commands/units/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/units/__init__.py
@@ -88,6 +88,7 @@ from ...host_profiles import (
class TestContext:
"""Contexts that unit tests run in based on the type of content."""
+
controller = 'controller'
modules = 'modules'
module_utils = 'module_utils'
@@ -255,14 +256,13 @@ def command_units(args: UnitsConfig) -> None:
'--forked',
'-r', 'a',
'-n', str(args.num_workers) if args.num_workers else 'auto',
- '--color',
- 'yes' if args.color else 'no',
+ '--color', 'yes' if args.color else 'no',
'-p', 'no:cacheprovider',
'-c', os.path.join(ANSIBLE_TEST_DATA_ROOT, 'pytest', 'config', config_name),
'--junit-xml', os.path.join(ResultType.JUNIT.path, 'python%s-%s-units.xml' % (python.version, test_context)),
'--strict-markers', # added in pytest 4.5.0
'--rootdir', data_context().content.root,
- ]
+ ] # fmt:skip
if not data_context().content.collection:
cmd.append('--durations=25')
diff --git a/test/lib/ansible_test/_internal/compat/yaml.py b/test/lib/ansible_test/_internal/compat/yaml.py
index 4b471365..fc338e21 100644
--- a/test/lib/ansible_test/_internal/compat/yaml.py
+++ b/test/lib/ansible_test/_internal/compat/yaml.py
@@ -9,6 +9,7 @@ from functools import (
try:
import yaml as _yaml
+
YAML_IMPORT_ERROR = None
except ImportError as ex:
yaml_load = None # pylint: disable=invalid-name
diff --git a/test/lib/ansible_test/_internal/completion.py b/test/lib/ansible_test/_internal/completion.py
index f443181c..31f89087 100644
--- a/test/lib/ansible_test/_internal/completion.py
+++ b/test/lib/ansible_test/_internal/completion.py
@@ -29,6 +29,7 @@ from .become import (
class CGroupVersion(enum.Enum):
"""The control group version(s) required by a container."""
+
NONE = 'none'
V1_ONLY = 'v1-only'
V2_ONLY = 'v2-only'
@@ -40,6 +41,7 @@ class CGroupVersion(enum.Enum):
class AuditMode(enum.Enum):
"""The audit requirements of a container."""
+
NONE = 'none'
REQUIRED = 'required'
@@ -50,6 +52,7 @@ class AuditMode(enum.Enum):
@dataclasses.dataclass(frozen=True)
class CompletionConfig(metaclass=abc.ABCMeta):
"""Base class for completion configuration."""
+
name: str
@property
@@ -61,6 +64,7 @@ class CompletionConfig(metaclass=abc.ABCMeta):
@dataclasses.dataclass(frozen=True)
class PosixCompletionConfig(CompletionConfig, metaclass=abc.ABCMeta):
"""Base class for completion configuration of POSIX environments."""
+
@property
@abc.abstractmethod
def supported_pythons(self) -> list[str]:
@@ -85,6 +89,7 @@ class PosixCompletionConfig(CompletionConfig, metaclass=abc.ABCMeta):
@dataclasses.dataclass(frozen=True)
class PythonCompletionConfig(PosixCompletionConfig, metaclass=abc.ABCMeta):
"""Base class for completion configuration of Python environments."""
+
python: str = ''
python_dir: str = '/usr/bin'
@@ -103,6 +108,7 @@ class PythonCompletionConfig(PosixCompletionConfig, metaclass=abc.ABCMeta):
@dataclasses.dataclass(frozen=True)
class RemoteCompletionConfig(CompletionConfig):
"""Base class for completion configuration of remote environments provisioned through Ansible Core CI."""
+
provider: t.Optional[str] = None
arch: t.Optional[str] = None
@@ -132,6 +138,7 @@ class RemoteCompletionConfig(CompletionConfig):
@dataclasses.dataclass(frozen=True)
class InventoryCompletionConfig(CompletionConfig):
"""Configuration for inventory files."""
+
def __init__(self) -> None:
super().__init__(name='inventory')
@@ -144,6 +151,7 @@ class InventoryCompletionConfig(CompletionConfig):
@dataclasses.dataclass(frozen=True)
class PosixSshCompletionConfig(PythonCompletionConfig):
"""Configuration for a POSIX host reachable over SSH."""
+
def __init__(self, user: str, host: str) -> None:
super().__init__(
name=f'{user}@{host}',
@@ -159,6 +167,7 @@ class PosixSshCompletionConfig(PythonCompletionConfig):
@dataclasses.dataclass(frozen=True)
class DockerCompletionConfig(PythonCompletionConfig):
"""Configuration for Docker containers."""
+
image: str = ''
seccomp: str = 'default'
cgroup: str = CGroupVersion.V1_V2.value
@@ -201,6 +210,7 @@ class DockerCompletionConfig(PythonCompletionConfig):
@dataclasses.dataclass(frozen=True)
class NetworkRemoteCompletionConfig(RemoteCompletionConfig):
"""Configuration for remote network platforms."""
+
collection: str = ''
connection: str = ''
placeholder: bool = False
@@ -213,6 +223,7 @@ class NetworkRemoteCompletionConfig(RemoteCompletionConfig):
@dataclasses.dataclass(frozen=True)
class PosixRemoteCompletionConfig(RemoteCompletionConfig, PythonCompletionConfig):
"""Configuration for remote POSIX platforms."""
+
become: t.Optional[str] = None
placeholder: bool = False
diff --git a/test/lib/ansible_test/_internal/config.py b/test/lib/ansible_test/_internal/config.py
index 372c23ab..4e697933 100644
--- a/test/lib/ansible_test/_internal/config.py
+++ b/test/lib/ansible_test/_internal/config.py
@@ -24,6 +24,7 @@ from .metadata import (
from .data import (
data_context,
+ PayloadConfig,
)
from .host_configs import (
@@ -41,6 +42,7 @@ THostConfig = t.TypeVar('THostConfig', bound=HostConfig)
class TerminateMode(enum.Enum):
"""When to terminate instances."""
+
ALWAYS = enum.auto()
NEVER = enum.auto()
SUCCESS = enum.auto()
@@ -52,6 +54,7 @@ class TerminateMode(enum.Enum):
@dataclasses.dataclass(frozen=True)
class ModulesConfig:
"""Configuration for modules."""
+
python_requires: str
python_versions: tuple[str, ...]
controller_only: bool
@@ -60,6 +63,7 @@ class ModulesConfig:
@dataclasses.dataclass(frozen=True)
class ContentConfig:
"""Configuration for all content."""
+
modules: ModulesConfig
python_versions: tuple[str, ...]
py2_support: bool
@@ -67,6 +71,7 @@ class ContentConfig:
class EnvironmentConfig(CommonConfig):
"""Configuration common to all commands which execute in an environment."""
+
def __init__(self, args: t.Any, command: str) -> None:
super().__init__(args, command)
@@ -114,7 +119,7 @@ class EnvironmentConfig(CommonConfig):
self.dev_systemd_debug: bool = args.dev_systemd_debug
self.dev_probe_cgroups: t.Optional[str] = args.dev_probe_cgroups
- def host_callback(files: list[tuple[str, str]]) -> None:
+ def host_callback(payload_config: PayloadConfig) -> None:
"""Add the host files to the payload file list."""
config = self
@@ -123,6 +128,8 @@ class EnvironmentConfig(CommonConfig):
state_path = os.path.join(config.host_path, 'state.dat')
config_path = os.path.join(config.host_path, 'config.dat')
+ files = payload_config.files
+
files.append((os.path.abspath(settings_path), settings_path))
files.append((os.path.abspath(state_path), state_path))
files.append((os.path.abspath(config_path), config_path))
@@ -196,6 +203,7 @@ class EnvironmentConfig(CommonConfig):
class TestConfig(EnvironmentConfig):
"""Configuration common to all test commands."""
+
def __init__(self, args: t.Any, command: str) -> None:
super().__init__(args, command)
@@ -225,9 +233,10 @@ class TestConfig(EnvironmentConfig):
if self.coverage_check:
self.coverage = True
- def metadata_callback(files: list[tuple[str, str]]) -> None:
+ def metadata_callback(payload_config: PayloadConfig) -> None:
"""Add the metadata file to the payload file list."""
config = self
+ files = payload_config.files
if config.metadata_path:
files.append((os.path.abspath(config.metadata_path), config.metadata_path))
@@ -237,6 +246,7 @@ class TestConfig(EnvironmentConfig):
class ShellConfig(EnvironmentConfig):
"""Configuration for the shell command."""
+
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'shell')
@@ -250,6 +260,7 @@ class ShellConfig(EnvironmentConfig):
class SanityConfig(TestConfig):
"""Configuration for the sanity command."""
+
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'sanity')
@@ -258,23 +269,14 @@ class SanityConfig(TestConfig):
self.list_tests: bool = args.list_tests
self.allow_disabled: bool = args.allow_disabled
self.enable_optional_errors: bool = args.enable_optional_errors
- self.keep_git: bool = args.keep_git
self.prime_venvs: bool = args.prime_venvs
self.display_stderr = self.lint or self.list_tests
- if self.keep_git:
- def git_callback(files: list[tuple[str, str]]) -> None:
- """Add files from the content root .git directory to the payload file list."""
- for dirpath, _dirnames, filenames in os.walk(os.path.join(data_context().content.root, '.git')):
- paths = [os.path.join(dirpath, filename) for filename in filenames]
- files.extend((path, os.path.relpath(path, data_context().content.root)) for path in paths)
-
- data_context().register_payload_callback(git_callback)
-
class IntegrationConfig(TestConfig):
"""Configuration for the integration command."""
+
def __init__(self, args: t.Any, command: str) -> None:
super().__init__(args, command)
@@ -319,18 +321,21 @@ TIntegrationConfig = t.TypeVar('TIntegrationConfig', bound=IntegrationConfig)
class PosixIntegrationConfig(IntegrationConfig):
"""Configuration for the posix integration command."""
+
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'integration')
class WindowsIntegrationConfig(IntegrationConfig):
"""Configuration for the windows integration command."""
+
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'windows-integration')
class NetworkIntegrationConfig(IntegrationConfig):
"""Configuration for the network integration command."""
+
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'network-integration')
@@ -339,6 +344,7 @@ class NetworkIntegrationConfig(IntegrationConfig):
class UnitsConfig(TestConfig):
"""Configuration for the units command."""
+
def __init__(self, args: t.Any) -> None:
super().__init__(args, 'units')
diff --git a/test/lib/ansible_test/_internal/connections.py b/test/lib/ansible_test/_internal/connections.py
index 4823b1a4..84dc84b2 100644
--- a/test/lib/ansible_test/_internal/connections.py
+++ b/test/lib/ansible_test/_internal/connections.py
@@ -44,33 +44,37 @@ from .become import (
class Connection(metaclass=abc.ABCMeta):
"""Base class for connecting to a host."""
+
@abc.abstractmethod
- def run(self,
- command: list[str],
- capture: bool,
- interactive: bool = False,
- data: t.Optional[str] = None,
- stdin: t.Optional[t.IO[bytes]] = None,
- stdout: t.Optional[t.IO[bytes]] = None,
- output_stream: t.Optional[OutputStream] = None,
- ) -> tuple[t.Optional[str], t.Optional[str]]:
+ def run(
+ self,
+ command: list[str],
+ capture: bool,
+ interactive: bool = False,
+ data: t.Optional[str] = None,
+ stdin: t.Optional[t.IO[bytes]] = None,
+ stdout: t.Optional[t.IO[bytes]] = None,
+ output_stream: t.Optional[OutputStream] = None,
+ ) -> tuple[t.Optional[str], t.Optional[str]]:
"""Run the specified command and return the result."""
- def extract_archive(self,
- chdir: str,
- src: t.IO[bytes],
- ):
+ def extract_archive(
+ self,
+ chdir: str,
+ src: t.IO[bytes],
+ ):
"""Extract the given archive file stream in the specified directory."""
tar_cmd = ['tar', 'oxzf', '-', '-C', chdir]
retry(lambda: self.run(tar_cmd, stdin=src, capture=True))
- def create_archive(self,
- chdir: str,
- name: str,
- dst: t.IO[bytes],
- exclude: t.Optional[str] = None,
- ):
+ def create_archive(
+ self,
+ chdir: str,
+ name: str,
+ dst: t.IO[bytes],
+ exclude: t.Optional[str] = None,
+ ):
"""Create the specified archive file stream from the specified directory, including the given name and optionally excluding the given name."""
tar_cmd = ['tar', 'cf', '-', '-C', chdir]
gzip_cmd = ['gzip']
@@ -90,18 +94,20 @@ class Connection(metaclass=abc.ABCMeta):
class LocalConnection(Connection):
"""Connect to localhost."""
+
def __init__(self, args: EnvironmentConfig) -> None:
self.args = args
- def run(self,
- command: list[str],
- capture: bool,
- interactive: bool = False,
- data: t.Optional[str] = None,
- stdin: t.Optional[t.IO[bytes]] = None,
- stdout: t.Optional[t.IO[bytes]] = None,
- output_stream: t.Optional[OutputStream] = None,
- ) -> tuple[t.Optional[str], t.Optional[str]]:
+ def run(
+ self,
+ command: list[str],
+ capture: bool,
+ interactive: bool = False,
+ data: t.Optional[str] = None,
+ stdin: t.Optional[t.IO[bytes]] = None,
+ stdout: t.Optional[t.IO[bytes]] = None,
+ output_stream: t.Optional[OutputStream] = None,
+ ) -> tuple[t.Optional[str], t.Optional[str]]:
"""Run the specified command and return the result."""
return run_command(
args=self.args,
@@ -117,6 +123,7 @@ class LocalConnection(Connection):
class SshConnection(Connection):
"""Connect to a host using SSH."""
+
def __init__(self, args: EnvironmentConfig, settings: SshConnectionDetail, become: t.Optional[Become] = None) -> None:
self.args = args
self.settings = settings
@@ -136,15 +143,16 @@ class SshConnection(Connection):
self.options.extend(ssh_options_to_list(ssh_options))
- def run(self,
- command: list[str],
- capture: bool,
- interactive: bool = False,
- data: t.Optional[str] = None,
- stdin: t.Optional[t.IO[bytes]] = None,
- stdout: t.Optional[t.IO[bytes]] = None,
- output_stream: t.Optional[OutputStream] = None,
- ) -> tuple[t.Optional[str], t.Optional[str]]:
+ def run(
+ self,
+ command: list[str],
+ capture: bool,
+ interactive: bool = False,
+ data: t.Optional[str] = None,
+ stdin: t.Optional[t.IO[bytes]] = None,
+ stdout: t.Optional[t.IO[bytes]] = None,
+ output_stream: t.Optional[OutputStream] = None,
+ ) -> tuple[t.Optional[str], t.Optional[str]]:
"""Run the specified command and return the result."""
options = list(self.options)
@@ -213,20 +221,22 @@ class SshConnection(Connection):
class DockerConnection(Connection):
"""Connect to a host using Docker."""
+
def __init__(self, args: EnvironmentConfig, container_id: str, user: t.Optional[str] = None) -> None:
self.args = args
self.container_id = container_id
self.user: t.Optional[str] = user
- def run(self,
- command: list[str],
- capture: bool,
- interactive: bool = False,
- data: t.Optional[str] = None,
- stdin: t.Optional[t.IO[bytes]] = None,
- stdout: t.Optional[t.IO[bytes]] = None,
- output_stream: t.Optional[OutputStream] = None,
- ) -> tuple[t.Optional[str], t.Optional[str]]:
+ def run(
+ self,
+ command: list[str],
+ capture: bool,
+ interactive: bool = False,
+ data: t.Optional[str] = None,
+ stdin: t.Optional[t.IO[bytes]] = None,
+ stdout: t.Optional[t.IO[bytes]] = None,
+ output_stream: t.Optional[OutputStream] = None,
+ ) -> tuple[t.Optional[str], t.Optional[str]]:
"""Run the specified command and return the result."""
options = []
diff --git a/test/lib/ansible_test/_internal/containers.py b/test/lib/ansible_test/_internal/containers.py
index a581ecf2..bfc36434 100644
--- a/test/lib/ansible_test/_internal/containers.py
+++ b/test/lib/ansible_test/_internal/containers.py
@@ -95,6 +95,7 @@ support_containers_mutex = threading.Lock()
class HostType:
"""Enum representing the types of hosts involved in running tests."""
+
origin = 'origin'
control = 'control'
managed = 'managed'
@@ -102,6 +103,7 @@ class HostType:
class CleanupMode(enum.Enum):
"""How container cleanup should be handled."""
+
YES = enum.auto()
NO = enum.auto()
INFO = enum.auto()
@@ -146,7 +148,7 @@ def run_support_container(
if current_container_id:
publish_ports = False # publishing ports is pointless if already running in a docker container
- options = (options or [])
+ options = options or []
if start:
options.append('-dt') # the -t option is required to cause systemd in the container to log output to the console
@@ -378,6 +380,7 @@ def get_container_database(args: EnvironmentConfig) -> ContainerDatabase:
class ContainerAccess:
"""Information needed for one test host to access a single container supporting tests."""
+
def __init__(self, host_ip: str, names: list[str], ports: t.Optional[list[int]], forwards: t.Optional[dict[int, int]]) -> None:
# if forwards is set
# this is where forwards are sent (it is the host that provides an indirect connection to the containers on alternate ports)
@@ -437,6 +440,7 @@ class ContainerAccess:
class ContainerDatabase:
"""Database of running containers used to support tests."""
+
def __init__(self, data: dict[str, dict[str, dict[str, ContainerAccess]]]) -> None:
self.data = data
@@ -576,6 +580,7 @@ def create_container_database(args: EnvironmentConfig) -> ContainerDatabase:
class SupportContainerContext:
"""Context object for tracking information relating to access of support containers."""
+
def __init__(self, containers: ContainerDatabase, process: t.Optional[SshProcess]) -> None:
self.containers = containers
self.process = process
@@ -678,19 +683,21 @@ def create_support_container_context(
class ContainerDescriptor:
"""Information about a support container."""
- def __init__(self,
- image: str,
- context: str,
- name: str,
- container_id: str,
- ports: list[int],
- aliases: list[str],
- publish_ports: bool,
- running: bool,
- existing: bool,
- cleanup: CleanupMode,
- env: t.Optional[dict[str, str]],
- ) -> None:
+
+ def __init__(
+ self,
+ image: str,
+ context: str,
+ name: str,
+ container_id: str,
+ ports: list[int],
+ aliases: list[str],
+ publish_ports: bool,
+ running: bool,
+ existing: bool,
+ cleanup: CleanupMode,
+ env: t.Optional[dict[str, str]],
+ ) -> None:
self.image = image
self.context = context
self.name = name
@@ -757,23 +764,26 @@ class ContainerDescriptor:
class SupportContainer:
"""Information about a running support container available for use by tests."""
- def __init__(self,
- container: DockerInspect,
- container_ip: str,
- published_ports: dict[int, int],
- ) -> None:
+
+ def __init__(
+ self,
+ container: DockerInspect,
+ container_ip: str,
+ published_ports: dict[int, int],
+ ) -> None:
self.container = container
self.container_ip = container_ip
self.published_ports = published_ports
-def wait_for_file(args: EnvironmentConfig,
- container_name: str,
- path: str,
- sleep: int,
- tries: int,
- check: t.Optional[c.Callable[[str], bool]] = None,
- ) -> str:
+def wait_for_file(
+ args: EnvironmentConfig,
+ container_name: str,
+ path: str,
+ sleep: int,
+ tries: int,
+ check: t.Optional[c.Callable[[str], bool]] = None,
+) -> str:
"""Wait for the specified file to become available in the requested container and return its contents."""
display.info('Waiting for container "%s" to provide file: %s' % (container_name, path))
@@ -853,6 +863,7 @@ def create_container_hooks(
"""Clean up previously configured SSH port forwarding which was required by the specified target."""
cleanup_ssh_ports(args, control_connections, '%s_hosts_restore.yml' % control_type, control_state, target, HostType.control)
cleanup_ssh_ports(args, managed_connections, '%s_hosts_restore.yml' % managed_type, managed_state, target, HostType.managed)
+
else:
pre_target, post_target = None, None
diff --git a/test/lib/ansible_test/_internal/core_ci.py b/test/lib/ansible_test/_internal/core_ci.py
index d62b9039..6e44b3d9 100644
--- a/test/lib/ansible_test/_internal/core_ci.py
+++ b/test/lib/ansible_test/_internal/core_ci.py
@@ -6,6 +6,7 @@ import dataclasses
import json
import os
import re
+import stat
import traceback
import uuid
import time
@@ -46,12 +47,14 @@ from .ci import (
from .data import (
data_context,
+ PayloadConfig,
)
@dataclasses.dataclass(frozen=True)
class Resource(metaclass=abc.ABCMeta):
"""Base class for Ansible Core CI resources."""
+
@abc.abstractmethod
def as_tuple(self) -> tuple[str, str, str, str]:
"""Return the resource as a tuple of platform, version, architecture and provider."""
@@ -69,6 +72,7 @@ class Resource(metaclass=abc.ABCMeta):
@dataclasses.dataclass(frozen=True)
class VmResource(Resource):
"""Details needed to request a VM from Ansible Core CI."""
+
platform: str
version: str
architecture: str
@@ -92,6 +96,7 @@ class VmResource(Resource):
@dataclasses.dataclass(frozen=True)
class CloudResource(Resource):
"""Details needed to request cloud credentials from Ansible Core CI."""
+
platform: str
def as_tuple(self) -> tuple[str, str, str, str]:
@@ -110,6 +115,7 @@ class CloudResource(Resource):
class AnsibleCoreCI:
"""Client for Ansible Core CI services."""
+
DEFAULT_ENDPOINT = 'https://ansible-core-ci.testing.ansible.com'
def __init__(
@@ -301,7 +307,7 @@ class AnsibleCoreCI:
)
)
- data.update(dict(auth=auth))
+ data.update(auth=auth)
headers = {
'Content-Type': 'application/json',
@@ -418,6 +424,7 @@ class AnsibleCoreCI:
class CoreHttpError(HttpError):
"""HTTP response as an error."""
+
def __init__(self, status: int, remote_message: str, remote_stack_trace: str) -> None:
super().__init__(status, f'{remote_message}{remote_stack_trace}')
@@ -427,6 +434,7 @@ class CoreHttpError(HttpError):
class SshKey:
"""Container for SSH key used to connect to remote instances."""
+
KEY_TYPE = 'rsa' # RSA is used to maintain compatibility with paramiko and EC2
KEY_NAME = f'id_{KEY_TYPE}'
PUB_NAME = f'{KEY_NAME}.pub'
@@ -441,14 +449,19 @@ class SshKey:
key, pub = key_pair
key_dst, pub_dst = self.get_in_tree_key_pair_paths()
- def ssh_key_callback(files: list[tuple[str, str]]) -> None:
+ def ssh_key_callback(payload_config: PayloadConfig) -> None:
"""
Add the SSH keys to the payload file list.
They are either outside the source tree or in the cache dir which is ignored by default.
"""
+ files = payload_config.files
+ permissions = payload_config.permissions
+
files.append((key, os.path.relpath(key_dst, data_context().content.root)))
files.append((pub, os.path.relpath(pub_dst, data_context().content.root)))
+ permissions[os.path.relpath(key_dst, data_context().content.root)] = stat.S_IRUSR | stat.S_IWUSR
+
data_context().register_payload_callback(ssh_key_callback)
self.key, self.pub = key, pub
@@ -525,14 +538,16 @@ class SshKey:
class InstanceConnection:
"""Container for remote instance status and connection details."""
- def __init__(self,
- running: bool,
- hostname: t.Optional[str] = None,
- port: t.Optional[int] = None,
- username: t.Optional[str] = None,
- password: t.Optional[str] = None,
- response_json: t.Optional[dict[str, t.Any]] = None,
- ) -> None:
+
+ def __init__(
+ self,
+ running: bool,
+ hostname: t.Optional[str] = None,
+ port: t.Optional[int] = None,
+ username: t.Optional[str] = None,
+ password: t.Optional[str] = None,
+ response_json: t.Optional[dict[str, t.Any]] = None,
+ ) -> None:
self.running = running
self.hostname = hostname
self.port = port
diff --git a/test/lib/ansible_test/_internal/coverage_util.py b/test/lib/ansible_test/_internal/coverage_util.py
index 0f445059..0af1cac4 100644
--- a/test/lib/ansible_test/_internal/coverage_util.py
+++ b/test/lib/ansible_test/_internal/coverage_util.py
@@ -60,6 +60,7 @@ from .thread import (
@dataclasses.dataclass(frozen=True)
class CoverageVersion:
"""Details about a coverage version and its supported Python versions."""
+
coverage_version: str
schema_version: int
min_python: tuple[int, int]
@@ -81,6 +82,7 @@ CONTROLLER_COVERAGE_VERSION = COVERAGE_VERSIONS[0]
class CoverageError(ApplicationError):
"""Exception caused while attempting to read a coverage file."""
+
def __init__(self, path: str, message: str) -> None:
self.path = path
self.message = message
diff --git a/test/lib/ansible_test/_internal/data.py b/test/lib/ansible_test/_internal/data.py
index 635b0c32..379ee7b0 100644
--- a/test/lib/ansible_test/_internal/data.py
+++ b/test/lib/ansible_test/_internal/data.py
@@ -50,8 +50,17 @@ from .provider.layout.unsupported import (
)
+@dataclasses.dataclass(frozen=True)
+class PayloadConfig:
+ """Configuration required to build a source tree payload for delegation."""
+
+ files: list[tuple[str, str]]
+ permissions: dict[str, int]
+
+
class DataContext:
"""Data context providing details about the current execution environment for ansible-test."""
+
def __init__(self) -> None:
content_path = os.environ.get('ANSIBLE_TEST_CONTENT_ROOT')
current_path = os.getcwd()
@@ -63,16 +72,17 @@ class DataContext:
self.__source_providers = source_providers
self.__ansible_source: t.Optional[tuple[tuple[str, str], ...]] = None
- self.payload_callbacks: list[c.Callable[[list[tuple[str, str]]], None]] = []
+ self.payload_callbacks: list[c.Callable[[PayloadConfig], None]] = []
if content_path:
- content = self.__create_content_layout(layout_providers, source_providers, content_path, False)
+ content, source_provider = self.__create_content_layout(layout_providers, source_providers, content_path, False)
elif ANSIBLE_SOURCE_ROOT and is_subdir(current_path, ANSIBLE_SOURCE_ROOT):
- content = self.__create_content_layout(layout_providers, source_providers, ANSIBLE_SOURCE_ROOT, False)
+ content, source_provider = self.__create_content_layout(layout_providers, source_providers, ANSIBLE_SOURCE_ROOT, False)
else:
- content = self.__create_content_layout(layout_providers, source_providers, current_path, True)
+ content, source_provider = self.__create_content_layout(layout_providers, source_providers, current_path, True)
self.content: ContentLayout = content
+ self.source_provider = source_provider
def create_collection_layouts(self) -> list[ContentLayout]:
"""
@@ -100,7 +110,7 @@ class DataContext:
if collection_path == os.path.join(collection.root, collection.directory):
collection_layout = layout
else:
- collection_layout = self.__create_content_layout(self.__layout_providers, self.__source_providers, collection_path, False)
+ collection_layout = self.__create_content_layout(self.__layout_providers, self.__source_providers, collection_path, False)[0]
file_count = len(collection_layout.all_files())
@@ -113,11 +123,12 @@ class DataContext:
return collections
@staticmethod
- def __create_content_layout(layout_providers: list[t.Type[LayoutProvider]],
- source_providers: list[t.Type[SourceProvider]],
- root: str,
- walk: bool,
- ) -> ContentLayout:
+ def __create_content_layout(
+ layout_providers: list[t.Type[LayoutProvider]],
+ source_providers: list[t.Type[SourceProvider]],
+ root: str,
+ walk: bool,
+ ) -> t.Tuple[ContentLayout, SourceProvider]:
"""Create a content layout using the given providers and root path."""
try:
layout_provider = find_path_provider(LayoutProvider, layout_providers, root, walk)
@@ -138,7 +149,7 @@ class DataContext:
layout = layout_provider.create(layout_provider.root, source_provider.get_paths(layout_provider.root))
- return layout
+ return layout, source_provider
def __create_ansible_source(self):
"""Return a tuple of Ansible source files with both absolute and relative paths."""
@@ -173,7 +184,7 @@ class DataContext:
return self.__ansible_source
- def register_payload_callback(self, callback: c.Callable[[list[tuple[str, str]]], None]) -> None:
+ def register_payload_callback(self, callback: c.Callable[[PayloadConfig], None]) -> None:
"""Register the given payload callback."""
self.payload_callbacks.append(callback)
@@ -239,6 +250,7 @@ def data_context() -> DataContext:
@dataclasses.dataclass(frozen=True)
class PluginInfo:
"""Information about an Ansible plugin."""
+
plugin_type: str
name: str
paths: list[str]
diff --git a/test/lib/ansible_test/_internal/delegation.py b/test/lib/ansible_test/_internal/delegation.py
index 0f181a23..7114f2ab 100644
--- a/test/lib/ansible_test/_internal/delegation.py
+++ b/test/lib/ansible_test/_internal/delegation.py
@@ -177,7 +177,6 @@ def delegate_command(args: EnvironmentConfig, host_state: HostState, exclude: li
con.run(['mkdir', '-p'] + writable_dirs, capture=True)
con.run(['chmod', '777'] + writable_dirs, capture=True)
con.run(['chmod', '755', working_directory], capture=True)
- con.run(['chmod', '644', os.path.join(content_root, args.metadata_path)], capture=True)
con.run(['useradd', pytest_user, '--create-home'], capture=True)
con.run(insert_options(command, options + ['--requirements-mode', 'only']), capture=False)
@@ -347,7 +346,7 @@ def filter_options(
('--metadata', 1, args.metadata_path),
('--exclude', 1, exclude),
('--require', 1, require),
- ('--base-branch', 1, args.base_branch or get_ci_provider().get_base_branch()),
+ ('--base-branch', 1, False),
])
pass_through_args: list[str] = []
diff --git a/test/lib/ansible_test/_internal/dev/container_probe.py b/test/lib/ansible_test/_internal/dev/container_probe.py
index be22e01c..fcbfbe4f 100644
--- a/test/lib/ansible_test/_internal/dev/container_probe.py
+++ b/test/lib/ansible_test/_internal/dev/container_probe.py
@@ -45,6 +45,7 @@ from ..cgroup import (
class CGroupState(enum.Enum):
"""The expected state of a cgroup related mount point."""
+
HOST = enum.auto()
PRIVATE = enum.auto()
SHADOWED = enum.auto()
@@ -53,6 +54,7 @@ class CGroupState(enum.Enum):
@dataclasses.dataclass(frozen=True)
class CGroupMount:
"""Details on a cgroup mount point that is expected to be present in the container."""
+
path: str
type: t.Optional[str]
writable: t.Optional[bool]
diff --git a/test/lib/ansible_test/_internal/diff.py b/test/lib/ansible_test/_internal/diff.py
index edaf6c5f..2ddc2ff9 100644
--- a/test/lib/ansible_test/_internal/diff.py
+++ b/test/lib/ansible_test/_internal/diff.py
@@ -18,6 +18,7 @@ def parse_diff(lines: list[str]) -> list[FileDiff]:
class FileDiff:
"""Parsed diff for a single file."""
+
def __init__(self, old_path: str, new_path: str) -> None:
self.old = DiffSide(old_path, new=False)
self.new = DiffSide(new_path, new=True)
@@ -36,6 +37,7 @@ class FileDiff:
class DiffSide:
"""Parsed diff for a single 'side' of a single file."""
+
def __init__(self, path: str, new: bool) -> None:
self.path = path
self.new = new
@@ -109,6 +111,7 @@ class DiffSide:
class DiffParser:
"""Parse diff lines."""
+
def __init__(self, lines: list[str]) -> None:
self.lines = lines
self.files: list[FileDiff] = []
diff --git a/test/lib/ansible_test/_internal/docker_util.py b/test/lib/ansible_test/_internal/docker_util.py
index 6c38ddbd..06f383b5 100644
--- a/test/lib/ansible_test/_internal/docker_util.py
+++ b/test/lib/ansible_test/_internal/docker_util.py
@@ -243,6 +243,7 @@ def get_docker_info(args: CommonConfig) -> DockerInfo:
class SystemdControlGroupV1Status(enum.Enum):
"""The state of the cgroup v1 systemd hierarchy on the container host."""
+
SUBSYSTEM_MISSING = 'The systemd cgroup subsystem was not found.'
FILESYSTEM_NOT_MOUNTED = 'The "/sys/fs/cgroup/systemd" filesystem is not mounted.'
MOUNT_TYPE_NOT_CORRECT = 'The "/sys/fs/cgroup/systemd" mount type is not correct.'
@@ -252,6 +253,7 @@ class SystemdControlGroupV1Status(enum.Enum):
@dataclasses.dataclass(frozen=True)
class ContainerHostProperties:
"""Container host properties detected at run time."""
+
audit_code: str
max_open_files: int
loginuid: t.Optional[int]
@@ -411,7 +413,7 @@ def run_utility_container(
options = options + [
'--name', name,
'--rm',
- ]
+ ] # fmt: skip
if data:
options.append('-i')
@@ -423,6 +425,7 @@ def run_utility_container(
class DockerCommand:
"""Details about the available docker command."""
+
def __init__(self, command: str, executable: str, version: str) -> None:
self.command = command
self.executable = executable
@@ -720,6 +723,7 @@ class DockerError(Exception):
class ContainerNotFoundError(DockerError):
"""The container identified by `identifier` was not found."""
+
def __init__(self, identifier: str) -> None:
super().__init__('The container "%s" was not found.' % identifier)
@@ -728,6 +732,7 @@ class ContainerNotFoundError(DockerError):
class DockerInspect:
"""The results of `docker inspect` for a single container."""
+
def __init__(self, args: CommonConfig, inspection: dict[str, t.Any]) -> None:
self.args = args
self.inspection = inspection
@@ -847,6 +852,7 @@ def docker_network_disconnect(args: CommonConfig, container_id: str, network: st
class DockerImageInspect:
"""The results of `docker image inspect` for a single image."""
+
def __init__(self, args: CommonConfig, inspection: dict[str, t.Any]) -> None:
self.args = args
self.inspection = inspection
@@ -909,6 +915,7 @@ def docker_image_inspect(args: CommonConfig, image: str, always: bool = False) -
class DockerNetworkInspect:
"""The results of `docker network inspect` for a single network."""
+
def __init__(self, args: CommonConfig, inspection: dict[str, t.Any]) -> None:
self.args = args
self.inspection = inspection
@@ -961,8 +968,16 @@ def docker_exec(
if data or stdin or stdout:
options.append('-i')
- return docker_command(args, ['exec'] + options + [container_id] + cmd, capture=capture, stdin=stdin, stdout=stdout, interactive=interactive,
- output_stream=output_stream, data=data)
+ return docker_command(
+ args,
+ ['exec'] + options + [container_id] + cmd,
+ capture=capture,
+ stdin=stdin,
+ stdout=stdout,
+ interactive=interactive,
+ output_stream=output_stream,
+ data=data,
+ )
def docker_command(
@@ -983,8 +998,18 @@ def docker_command(
if command[0] == 'podman' and get_podman_remote():
command.append('--remote')
- return run_command(args, command + cmd, env=env, capture=capture, stdin=stdin, stdout=stdout, interactive=interactive, always=always,
- output_stream=output_stream, data=data)
+ return run_command(
+ args,
+ command + cmd,
+ env=env,
+ capture=capture,
+ stdin=stdin,
+ stdout=stdout,
+ interactive=interactive,
+ always=always,
+ output_stream=output_stream,
+ data=data,
+ )
def docker_environment() -> dict[str, str]:
diff --git a/test/lib/ansible_test/_internal/executor.py b/test/lib/ansible_test/_internal/executor.py
index 0c94cf3b..d7d6f1a8 100644
--- a/test/lib/ansible_test/_internal/executor.py
+++ b/test/lib/ansible_test/_internal/executor.py
@@ -81,18 +81,21 @@ def detect_changes(args: TestConfig) -> t.Optional[list[str]]:
class NoChangesDetected(ApplicationWarning):
"""Exception when change detection was performed, but no changes were found."""
+
def __init__(self) -> None:
super().__init__('No changes detected.')
class NoTestsForChanges(ApplicationWarning):
"""Exception when changes detected, but no tests trigger as a result."""
+
def __init__(self) -> None:
super().__init__('No tests found for detected changes.')
class Delegate(Exception):
"""Trigger command delegation."""
+
def __init__(self, host_state: HostState, exclude: list[str] = None, require: list[str] = None) -> None:
super().__init__()
@@ -103,6 +106,7 @@ class Delegate(Exception):
class ListTargets(Exception):
"""List integration test targets instead of executing them."""
+
def __init__(self, target_names: list[str]) -> None:
super().__init__()
@@ -111,5 +115,6 @@ class ListTargets(Exception):
class AllTargetsSkipped(ApplicationWarning):
"""All targets skipped."""
+
def __init__(self) -> None:
super().__init__('All targets skipped.')
diff --git a/test/lib/ansible_test/_internal/git.py b/test/lib/ansible_test/_internal/git.py
index c1909f08..4685f1d2 100644
--- a/test/lib/ansible_test/_internal/git.py
+++ b/test/lib/ansible_test/_internal/git.py
@@ -12,6 +12,7 @@ from .util import (
class Git:
"""Wrapper around git command-line tools."""
+
def __init__(self, root: t.Optional[str] = None) -> None:
self.git = 'git'
self.root = root
diff --git a/test/lib/ansible_test/_internal/host_configs.py b/test/lib/ansible_test/_internal/host_configs.py
index 48d5fd31..ddc4727c 100644
--- a/test/lib/ansible_test/_internal/host_configs.py
+++ b/test/lib/ansible_test/_internal/host_configs.py
@@ -48,6 +48,7 @@ from .util import (
@dataclasses.dataclass(frozen=True)
class OriginCompletionConfig(PosixCompletionConfig):
"""Pseudo completion config for the origin."""
+
def __init__(self) -> None:
super().__init__(name='origin')
@@ -73,6 +74,7 @@ class OriginCompletionConfig(PosixCompletionConfig):
@dataclasses.dataclass(frozen=True)
class HostContext:
"""Context used when getting and applying defaults for host configurations."""
+
controller_config: t.Optional['PosixConfig']
@property
@@ -84,6 +86,7 @@ class HostContext:
@dataclasses.dataclass
class HostConfig(metaclass=abc.ABCMeta):
"""Base class for host configuration."""
+
@abc.abstractmethod
def get_defaults(self, context: HostContext) -> CompletionConfig:
"""Return the default settings."""
@@ -104,6 +107,7 @@ class HostConfig(metaclass=abc.ABCMeta):
@dataclasses.dataclass
class PythonConfig(metaclass=abc.ABCMeta):
"""Configuration for Python."""
+
version: t.Optional[str] = None
path: t.Optional[str] = None
@@ -142,6 +146,7 @@ class PythonConfig(metaclass=abc.ABCMeta):
@dataclasses.dataclass
class NativePythonConfig(PythonConfig):
"""Configuration for native Python."""
+
@property
def is_managed(self) -> bool:
"""
@@ -154,6 +159,7 @@ class NativePythonConfig(PythonConfig):
@dataclasses.dataclass
class VirtualPythonConfig(PythonConfig):
"""Configuration for Python in a virtual environment."""
+
system_site_packages: t.Optional[bool] = None
def apply_defaults(self, context: HostContext, defaults: PosixCompletionConfig) -> None:
@@ -175,6 +181,7 @@ class VirtualPythonConfig(PythonConfig):
@dataclasses.dataclass
class PosixConfig(HostConfig, metaclass=abc.ABCMeta):
"""Base class for POSIX host configuration."""
+
python: t.Optional[PythonConfig] = None
@property
@@ -199,6 +206,7 @@ class PosixConfig(HostConfig, metaclass=abc.ABCMeta):
@dataclasses.dataclass
class ControllerHostConfig(PosixConfig, metaclass=abc.ABCMeta):
"""Base class for host configurations which support the controller."""
+
@abc.abstractmethod
def get_default_targets(self, context: HostContext) -> list[ControllerConfig]:
"""Return the default targets for this host config."""
@@ -207,6 +215,7 @@ class ControllerHostConfig(PosixConfig, metaclass=abc.ABCMeta):
@dataclasses.dataclass
class RemoteConfig(HostConfig, metaclass=abc.ABCMeta):
"""Base class for remote host configuration."""
+
name: t.Optional[str] = None
provider: t.Optional[str] = None
arch: t.Optional[str] = None
@@ -245,6 +254,7 @@ class RemoteConfig(HostConfig, metaclass=abc.ABCMeta):
@dataclasses.dataclass
class PosixSshConfig(PosixConfig):
"""Configuration for a POSIX SSH host."""
+
user: t.Optional[str] = None
host: t.Optional[str] = None
port: t.Optional[int] = None
@@ -265,6 +275,7 @@ class PosixSshConfig(PosixConfig):
@dataclasses.dataclass
class InventoryConfig(HostConfig):
"""Configuration using inventory."""
+
path: t.Optional[str] = None
def get_defaults(self, context: HostContext) -> InventoryCompletionConfig:
@@ -279,6 +290,7 @@ class InventoryConfig(HostConfig):
@dataclasses.dataclass
class DockerConfig(ControllerHostConfig, PosixConfig):
"""Configuration for a docker host."""
+
name: t.Optional[str] = None
image: t.Optional[str] = None
memory: t.Optional[int] = None
@@ -343,6 +355,7 @@ class DockerConfig(ControllerHostConfig, PosixConfig):
@dataclasses.dataclass
class PosixRemoteConfig(RemoteConfig, ControllerHostConfig, PosixConfig):
"""Configuration for a POSIX remote host."""
+
become: t.Optional[str] = None
def get_defaults(self, context: HostContext) -> PosixRemoteCompletionConfig:
@@ -385,6 +398,7 @@ class WindowsConfig(HostConfig, metaclass=abc.ABCMeta):
@dataclasses.dataclass
class WindowsRemoteConfig(RemoteConfig, WindowsConfig):
"""Configuration for a remote Windows host."""
+
def get_defaults(self, context: HostContext) -> WindowsRemoteCompletionConfig:
"""Return the default settings."""
return filter_completion(windows_completion()).get(self.name) or windows_completion().get(self.platform)
@@ -403,6 +417,7 @@ class NetworkConfig(HostConfig, metaclass=abc.ABCMeta):
@dataclasses.dataclass
class NetworkRemoteConfig(RemoteConfig, NetworkConfig):
"""Configuration for a remote network host."""
+
collection: t.Optional[str] = None
connection: t.Optional[str] = None
@@ -431,6 +446,7 @@ class NetworkInventoryConfig(InventoryConfig, NetworkConfig):
@dataclasses.dataclass
class OriginConfig(ControllerHostConfig, PosixConfig):
"""Configuration for the origin host."""
+
def get_defaults(self, context: HostContext) -> OriginCompletionConfig:
"""Return the default settings."""
return OriginCompletionConfig()
@@ -448,6 +464,7 @@ class OriginConfig(ControllerHostConfig, PosixConfig):
@dataclasses.dataclass
class ControllerConfig(PosixConfig):
"""Configuration for the controller host."""
+
controller: t.Optional[PosixConfig] = None
def get_defaults(self, context: HostContext) -> PosixCompletionConfig:
@@ -482,6 +499,7 @@ class ControllerConfig(PosixConfig):
class FallbackReason(enum.Enum):
"""Reason fallback was performed."""
+
ENVIRONMENT = enum.auto()
PYTHON = enum.auto()
@@ -489,6 +507,7 @@ class FallbackReason(enum.Enum):
@dataclasses.dataclass(frozen=True)
class FallbackDetail:
"""Details about controller fallback behavior."""
+
reason: FallbackReason
message: str
@@ -496,6 +515,7 @@ class FallbackDetail:
@dataclasses.dataclass(frozen=True)
class HostSettings:
"""Host settings for the controller and targets."""
+
controller: ControllerHostConfig
targets: list[HostConfig]
skipped_python_versions: list[str]
diff --git a/test/lib/ansible_test/_internal/host_profiles.py b/test/lib/ansible_test/_internal/host_profiles.py
index 0abc9961..a51eb693 100644
--- a/test/lib/ansible_test/_internal/host_profiles.py
+++ b/test/lib/ansible_test/_internal/host_profiles.py
@@ -139,6 +139,7 @@ TRemoteConfig = t.TypeVar('TRemoteConfig', bound=RemoteConfig)
class ControlGroupError(ApplicationError):
"""Raised when the container host does not have the necessary cgroup support to run a container."""
+
def __init__(self, args: CommonConfig, reason: str) -> None:
engine = require_docker().command
dd_wsl2 = get_docker_info(args).docker_desktop_wsl2
@@ -181,6 +182,7 @@ NOTE: These changes must be applied each time the container host is rebooted.
@dataclasses.dataclass(frozen=True)
class Inventory:
"""Simple representation of an Ansible inventory."""
+
host_groups: dict[str, dict[str, dict[str, t.Union[str, int]]]]
extra_groups: t.Optional[dict[str, list[str]]] = None
@@ -226,12 +228,14 @@ class Inventory:
class HostProfile(t.Generic[THostConfig], metaclass=abc.ABCMeta):
"""Base class for host profiles."""
- def __init__(self,
- *,
- args: EnvironmentConfig,
- config: THostConfig,
- targets: t.Optional[list[HostConfig]],
- ) -> None:
+
+ def __init__(
+ self,
+ *,
+ args: EnvironmentConfig,
+ config: THostConfig,
+ targets: t.Optional[list[HostConfig]],
+ ) -> None:
self.args = args
self.config = config
self.controller = bool(targets)
@@ -272,6 +276,7 @@ class HostProfile(t.Generic[THostConfig], metaclass=abc.ABCMeta):
class PosixProfile(HostProfile[TPosixConfig], metaclass=abc.ABCMeta):
"""Base class for POSIX host profiles."""
+
@property
def python(self) -> PythonConfig:
"""
@@ -293,6 +298,7 @@ class PosixProfile(HostProfile[TPosixConfig], metaclass=abc.ABCMeta):
class ControllerHostProfile(PosixProfile[TControllerHostConfig], metaclass=abc.ABCMeta):
"""Base class for profiles usable as a controller."""
+
@abc.abstractmethod
def get_origin_controller_connection(self) -> Connection:
"""Return a connection for accessing the host as a controller from the origin."""
@@ -304,6 +310,7 @@ class ControllerHostProfile(PosixProfile[TControllerHostConfig], metaclass=abc.A
class SshTargetHostProfile(HostProfile[THostConfig], metaclass=abc.ABCMeta):
"""Base class for profiles offering SSH connectivity."""
+
@abc.abstractmethod
def get_controller_target_connections(self) -> list[SshConnection]:
"""Return SSH connection(s) for accessing the host as a target from the controller."""
@@ -311,6 +318,7 @@ class SshTargetHostProfile(HostProfile[THostConfig], metaclass=abc.ABCMeta):
class RemoteProfile(SshTargetHostProfile[TRemoteConfig], metaclass=abc.ABCMeta):
"""Base class for remote instance profiles."""
+
@property
def core_ci_state(self) -> t.Optional[dict[str, str]]:
"""The saved Ansible Core CI state."""
@@ -387,6 +395,7 @@ class RemoteProfile(SshTargetHostProfile[TRemoteConfig], metaclass=abc.ABCMeta):
class ControllerProfile(SshTargetHostProfile[ControllerConfig], PosixProfile[ControllerConfig]):
"""Host profile for the controller as a target."""
+
def get_controller_target_connections(self) -> list[SshConnection]:
"""Return SSH connection(s) for accessing the host as a target from the controller."""
settings = SshConnectionDetail(
@@ -409,6 +418,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do
@dataclasses.dataclass(frozen=True)
class InitConfig:
"""Configuration details required to run the container init."""
+
options: list[str]
command: str
command_privileged: bool
@@ -1003,9 +1013,11 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do
display.info(last_error)
if not self.args.delegate and not self.args.host_path:
+
def callback() -> None:
"""Callback to run during error display."""
self.on_target_failure() # when the controller is not delegated, report failures immediately
+
else:
callback = None
@@ -1105,6 +1117,7 @@ class NetworkInventoryProfile(HostProfile[NetworkInventoryConfig]):
class NetworkRemoteProfile(RemoteProfile[NetworkRemoteConfig]):
"""Host profile for a network remote instance."""
+
def wait(self) -> None:
"""Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
self.wait_until_ready()
@@ -1181,6 +1194,7 @@ class NetworkRemoteProfile(RemoteProfile[NetworkRemoteConfig]):
class OriginProfile(ControllerHostProfile[OriginConfig]):
"""Host profile for origin."""
+
def get_origin_controller_connection(self) -> LocalConnection:
"""Return a connection for accessing the host as a controller from the origin."""
return LocalConnection(self.args)
@@ -1192,6 +1206,7 @@ class OriginProfile(ControllerHostProfile[OriginConfig]):
class PosixRemoteProfile(ControllerHostProfile[PosixRemoteConfig], RemoteProfile[PosixRemoteConfig]):
"""Host profile for a POSIX remote instance."""
+
def wait(self) -> None:
"""Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
self.wait_until_ready()
@@ -1298,6 +1313,7 @@ class PosixRemoteProfile(ControllerHostProfile[PosixRemoteConfig], RemoteProfile
class PosixSshProfile(SshTargetHostProfile[PosixSshConfig], PosixProfile[PosixSshConfig]):
"""Host profile for a POSIX SSH instance."""
+
def get_controller_target_connections(self) -> list[SshConnection]:
"""Return SSH connection(s) for accessing the host as a target from the controller."""
settings = SshConnectionDetail(
@@ -1314,6 +1330,7 @@ class PosixSshProfile(SshTargetHostProfile[PosixSshConfig], PosixProfile[PosixSs
class WindowsInventoryProfile(SshTargetHostProfile[WindowsInventoryConfig]):
"""Host profile for a Windows inventory."""
+
def get_controller_target_connections(self) -> list[SshConnection]:
"""Return SSH connection(s) for accessing the host as a target from the controller."""
inventory = parse_inventory(self.args, self.config.path)
@@ -1338,6 +1355,7 @@ class WindowsInventoryProfile(SshTargetHostProfile[WindowsInventoryConfig]):
class WindowsRemoteProfile(RemoteProfile[WindowsRemoteConfig]):
"""Host profile for a Windows remote instance."""
+
def wait(self) -> None:
"""Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
self.wait_until_ready()
diff --git a/test/lib/ansible_test/_internal/http.py b/test/lib/ansible_test/_internal/http.py
index ca514470..8b4154bf 100644
--- a/test/lib/ansible_test/_internal/http.py
+++ b/test/lib/ansible_test/_internal/http.py
@@ -22,6 +22,7 @@ from .util_common import (
class HttpClient:
"""Make HTTP requests via curl."""
+
def __init__(self, args: CommonConfig, always: bool = False, insecure: bool = False, proxy: t.Optional[str] = None) -> None:
self.args = args
self.always = always
@@ -113,6 +114,7 @@ class HttpClient:
class HttpResponse:
"""HTTP response from curl."""
+
def __init__(self, method: str, url: str, status_code: int, response: str) -> None:
self.method = method
self.url = url
@@ -129,6 +131,7 @@ class HttpResponse:
class HttpError(ApplicationError):
"""HTTP response as an error."""
+
def __init__(self, status: int, message: str) -> None:
super().__init__('%s: %s' % (status, message))
self.status = status
diff --git a/test/lib/ansible_test/_internal/io.py b/test/lib/ansible_test/_internal/io.py
index 80d47699..eb745be4 100644
--- a/test/lib/ansible_test/_internal/io.py
+++ b/test/lib/ansible_test/_internal/io.py
@@ -34,12 +34,13 @@ def make_dirs(path: str) -> None:
os.makedirs(to_bytes(path), exist_ok=True)
-def write_json_file(path: str,
- content: t.Any,
- create_directories: bool = False,
- formatted: bool = True,
- encoder: t.Optional[t.Type[json.JSONEncoder]] = None,
- ) -> str:
+def write_json_file(
+ path: str,
+ content: t.Any,
+ create_directories: bool = False,
+ formatted: bool = True,
+ encoder: t.Optional[t.Type[json.JSONEncoder]] = None,
+) -> str:
"""Write the given json content to the specified path, optionally creating missing directories."""
text_content = json.dumps(content,
sort_keys=formatted,
@@ -80,6 +81,7 @@ def open_binary_file(path: str, mode: str = 'rb') -> t.IO[bytes]:
class SortedSetEncoder(json.JSONEncoder):
"""Encode sets as sorted lists."""
+
def default(self, o: t.Any) -> t.Any:
"""Return a serialized version of the `o` object."""
if isinstance(o, set):
diff --git a/test/lib/ansible_test/_internal/junit_xml.py b/test/lib/ansible_test/_internal/junit_xml.py
index 3b958672..76c8878b 100644
--- a/test/lib/ansible_test/_internal/junit_xml.py
+++ b/test/lib/ansible_test/_internal/junit_xml.py
@@ -10,6 +10,7 @@ import datetime
import decimal
from xml.dom import minidom
+
# noinspection PyPep8Naming
from xml.etree import ElementTree as ET
@@ -17,6 +18,7 @@ from xml.etree import ElementTree as ET
@dataclasses.dataclass # type: ignore[misc] # https://github.com/python/mypy/issues/5374
class TestResult(metaclass=abc.ABCMeta):
"""Base class for the result of a test case."""
+
output: str | None = None
message: str | None = None
type: str | None = None
@@ -48,6 +50,7 @@ class TestResult(metaclass=abc.ABCMeta):
@dataclasses.dataclass
class TestFailure(TestResult):
"""Failure info for a test case."""
+
@property
def tag(self) -> str:
"""Tag name for the XML element created by this result type."""
@@ -57,6 +60,7 @@ class TestFailure(TestResult):
@dataclasses.dataclass
class TestError(TestResult):
"""Error info for a test case."""
+
@property
def tag(self) -> str:
"""Tag name for the XML element created by this result type."""
@@ -66,6 +70,7 @@ class TestError(TestResult):
@dataclasses.dataclass
class TestCase:
"""An individual test case."""
+
name: str
assertions: int | None = None
classname: str | None = None
@@ -127,6 +132,7 @@ class TestCase:
@dataclasses.dataclass
class TestSuite:
"""A collection of test cases."""
+
name: str
hostname: str | None = None
id: str | None = None
@@ -138,6 +144,10 @@ class TestSuite:
system_out: str | None = None
system_err: str | None = None
+ def __post_init__(self):
+ if self.timestamp and self.timestamp.tzinfo != datetime.timezone.utc:
+ raise ValueError(f'timestamp.tzinfo must be {datetime.timezone.utc!r}')
+
@property
def disabled(self) -> int:
"""The number of disabled test cases."""
@@ -181,7 +191,7 @@ class TestSuite:
skipped=self.skipped,
tests=self.tests,
time=self.time,
- timestamp=self.timestamp.isoformat(timespec='seconds') if self.timestamp else None,
+ timestamp=self.timestamp.replace(tzinfo=None).isoformat(timespec='seconds') if self.timestamp else None,
)
def get_xml_element(self) -> ET.Element:
@@ -205,6 +215,7 @@ class TestSuite:
@dataclasses.dataclass
class TestSuites:
"""A collection of test suites."""
+
name: str | None = None
suites: list[TestSuite] = dataclasses.field(default_factory=list)
diff --git a/test/lib/ansible_test/_internal/locale_util.py b/test/lib/ansible_test/_internal/locale_util.py
index 3fb74ad5..22ccce75 100644
--- a/test/lib/ansible_test/_internal/locale_util.py
+++ b/test/lib/ansible_test/_internal/locale_util.py
@@ -21,6 +21,7 @@ It was not needed in previous ansible-core releases since they do not verify the
class LocaleError(SystemExit):
"""Exception to raise when locale related errors occur."""
+
def __init__(self, message: str) -> None:
super().__init__(f'ERROR: {message}')
diff --git a/test/lib/ansible_test/_internal/metadata.py b/test/lib/ansible_test/_internal/metadata.py
index 94bbc34a..b8b598e8 100644
--- a/test/lib/ansible_test/_internal/metadata.py
+++ b/test/lib/ansible_test/_internal/metadata.py
@@ -4,6 +4,7 @@ import typing as t
from .util import (
display,
+ generate_name,
)
from .io import (
@@ -19,12 +20,14 @@ from .diff import (
class Metadata:
"""Metadata object for passing data to delegated tests."""
+
def __init__(self) -> None:
"""Initialize metadata."""
self.changes: dict[str, tuple[tuple[int, int], ...]] = {}
self.cloud_config: t.Optional[dict[str, dict[str, t.Union[int, str, bool]]]] = None
self.change_description: t.Optional[ChangeDescription] = None
self.ci_provider: t.Optional[str] = None
+ self.session_id = generate_name()
def populate_changes(self, diff: t.Optional[list[str]]) -> None:
"""Populate the changeset using the given diff."""
@@ -52,6 +55,7 @@ class Metadata:
cloud_config=self.cloud_config,
ci_provider=self.ci_provider,
change_description=self.change_description.to_dict(),
+ session_id=self.session_id,
)
def to_file(self, path: str) -> None:
@@ -76,12 +80,14 @@ class Metadata:
metadata.cloud_config = data['cloud_config']
metadata.ci_provider = data['ci_provider']
metadata.change_description = ChangeDescription.from_dict(data['change_description'])
+ metadata.session_id = data['session_id']
return metadata
class ChangeDescription:
"""Description of changes."""
+
def __init__(self) -> None:
self.command: str = ''
self.changed_paths: list[str] = []
diff --git a/test/lib/ansible_test/_internal/payload.py b/test/lib/ansible_test/_internal/payload.py
index 94150cb4..10dde7b8 100644
--- a/test/lib/ansible_test/_internal/payload.py
+++ b/test/lib/ansible_test/_internal/payload.py
@@ -27,6 +27,7 @@ from .util import (
from .data import (
data_context,
+ PayloadConfig,
)
from .util_common import (
@@ -44,11 +45,74 @@ def create_payload(args: CommonConfig, dst_path: str) -> None:
return
files = list(data_context().ansible_source)
- filters = {}
+ permissions: dict[str, int] = {}
+ filters: dict[str, t.Callable[[tarfile.TarInfo], t.Optional[tarfile.TarInfo]]] = {}
+
+ # Exclude vendored files from the payload.
+ # They may not be compatible with the delegated environment.
+ files = [
+ (abs_path, rel_path) for abs_path, rel_path in files
+ if not rel_path.startswith('lib/ansible/_vendor/')
+ or rel_path == 'lib/ansible/_vendor/__init__.py'
+ ]
+
+ def apply_permissions(tar_info: tarfile.TarInfo, mode: int) -> t.Optional[tarfile.TarInfo]:
+ """
+ Apply the specified permissions to the given file.
+ Existing file type bits are preserved.
+ """
+ tar_info.mode &= ~(stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
+ tar_info.mode |= mode
+
+ return tar_info
def make_executable(tar_info: tarfile.TarInfo) -> t.Optional[tarfile.TarInfo]:
- """Make the given file executable."""
- tar_info.mode |= stat.S_IXUSR | stat.S_IXOTH | stat.S_IXGRP
+ """
+ Make the given file executable and readable by all, and writeable by the owner.
+ Existing file type bits are preserved.
+ This ensures consistency of test results when using unprivileged users.
+ """
+ return apply_permissions(
+ tar_info,
+ stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH |
+ stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH |
+ stat.S_IWUSR
+ ) # fmt: skip
+
+ def make_non_executable(tar_info: tarfile.TarInfo) -> t.Optional[tarfile.TarInfo]:
+ """
+ Make the given file readable by all, and writeable by the owner.
+ Existing file type bits are preserved.
+ This ensures consistency of test results when using unprivileged users.
+ """
+ return apply_permissions(
+ tar_info,
+ stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH |
+ stat.S_IWUSR
+ ) # fmt: skip
+
+ def detect_permissions(tar_info: tarfile.TarInfo) -> t.Optional[tarfile.TarInfo]:
+ """
+ Detect and apply the appropriate permissions for a file.
+ Existing file type bits are preserved.
+ This ensures consistency of test results when using unprivileged users.
+ """
+ if tar_info.path.startswith('ansible/'):
+ mode = permissions.get(os.path.relpath(tar_info.path, 'ansible'))
+ elif data_context().content.collection and is_subdir(tar_info.path, data_context().content.collection.directory):
+ mode = permissions.get(os.path.relpath(tar_info.path, data_context().content.collection.directory))
+ else:
+ mode = None
+
+ if mode:
+ tar_info = apply_permissions(tar_info, mode)
+ elif tar_info.mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH):
+ # If any execute bit is set, treat the file as executable.
+ # This ensures that sanity tests which check execute bits behave correctly.
+ tar_info = make_executable(tar_info)
+ else:
+ tar_info = make_non_executable(tar_info)
+
return tar_info
if not ANSIBLE_SOURCE_ROOT:
@@ -85,10 +149,15 @@ def create_payload(args: CommonConfig, dst_path: str) -> None:
# there are no extra files when testing ansible itself
extra_files = []
+ payload_config = PayloadConfig(
+ files=content_files,
+ permissions=permissions,
+ )
+
for callback in data_context().payload_callbacks:
# execute callbacks only on the content paths
# this is done before placing them in the appropriate subdirectory (see below)
- callback(content_files)
+ callback(payload_config)
# place ansible source files under the 'ansible' directory on the delegated host
files = [(src, os.path.join('ansible', dst)) for src, dst in files]
@@ -109,7 +178,7 @@ def create_payload(args: CommonConfig, dst_path: str) -> None:
with tarfile.open(dst_path, mode='w:gz', compresslevel=4, format=tarfile.GNU_FORMAT) as tar:
for src, dst in files:
display.info('%s -> %s' % (src, dst), verbosity=4)
- tar.add(src, dst, filter=filters.get(dst))
+ tar.add(src, dst, filter=filters.get(dst, detect_permissions))
duration = time.time() - start
payload_size_bytes = os.path.getsize(dst_path)
diff --git a/test/lib/ansible_test/_internal/provider/__init__.py b/test/lib/ansible_test/_internal/provider/__init__.py
index 61d7baf9..9b73ae72 100644
--- a/test/lib/ansible_test/_internal/provider/__init__.py
+++ b/test/lib/ansible_test/_internal/provider/__init__.py
@@ -16,11 +16,12 @@ def get_path_provider_classes(provider_type: t.Type[TPathProvider]) -> list[t.Ty
return sorted(get_subclasses(provider_type), key=lambda subclass: (subclass.priority, subclass.__name__))
-def find_path_provider(provider_type: t.Type[TPathProvider],
- provider_classes: list[t.Type[TPathProvider]],
- path: str,
- walk: bool,
- ) -> TPathProvider:
+def find_path_provider(
+ provider_type: t.Type[TPathProvider],
+ provider_classes: list[t.Type[TPathProvider]],
+ path: str,
+ walk: bool,
+) -> TPathProvider:
"""Return the first found path provider of the given type for the given path."""
sequences = sorted(set(pc.sequence for pc in provider_classes if pc.sequence > 0))
@@ -48,6 +49,7 @@ def find_path_provider(provider_type: t.Type[TPathProvider],
class ProviderNotFoundForPath(ApplicationError):
"""Exception generated when a path based provider cannot be found for a given path."""
+
def __init__(self, provider_type: t.Type, path: str) -> None:
super().__init__('No %s found for path: %s' % (provider_type.__name__, path))
@@ -57,6 +59,7 @@ class ProviderNotFoundForPath(ApplicationError):
class PathProvider(metaclass=abc.ABCMeta):
"""Base class for provider plugins that are path based."""
+
sequence = 500
priority = 500
diff --git a/test/lib/ansible_test/_internal/provider/layout/__init__.py b/test/lib/ansible_test/_internal/provider/layout/__init__.py
index aa6693f0..4eca05ce 100644
--- a/test/lib/ansible_test/_internal/provider/layout/__init__.py
+++ b/test/lib/ansible_test/_internal/provider/layout/__init__.py
@@ -17,10 +17,12 @@ from .. import (
class Layout:
"""Description of content locations and helper methods to access content."""
- def __init__(self,
- root: str,
- paths: list[str],
- ) -> None:
+
+ def __init__(
+ self,
+ root: str,
+ paths: list[str],
+ ) -> None:
self.root = root
self.__paths = paths # contains both file paths and symlinked directory paths (ending with os.path.sep)
@@ -74,25 +76,27 @@ class Layout:
class ContentLayout(Layout):
"""Information about the current Ansible content being tested."""
- def __init__(self,
- root: str,
- paths: list[str],
- plugin_paths: dict[str, str],
- collection: t.Optional[CollectionDetail],
- test_path: str,
- results_path: str,
- sanity_path: str,
- sanity_messages: t.Optional[LayoutMessages],
- integration_path: str,
- integration_targets_path: str,
- integration_vars_path: str,
- integration_messages: t.Optional[LayoutMessages],
- unit_path: str,
- unit_module_path: str,
- unit_module_utils_path: str,
- unit_messages: t.Optional[LayoutMessages],
- unsupported: bool = False,
- ) -> None:
+
+ def __init__(
+ self,
+ root: str,
+ paths: list[str],
+ plugin_paths: dict[str, str],
+ collection: t.Optional[CollectionDetail],
+ test_path: str,
+ results_path: str,
+ sanity_path: str,
+ sanity_messages: t.Optional[LayoutMessages],
+ integration_path: str,
+ integration_targets_path: str,
+ integration_vars_path: str,
+ integration_messages: t.Optional[LayoutMessages],
+ unit_path: str,
+ unit_module_path: str,
+ unit_module_utils_path: str,
+ unit_messages: t.Optional[LayoutMessages],
+ unsupported: bool = False,
+ ) -> None:
super().__init__(root, paths)
self.plugin_paths = plugin_paths
@@ -150,6 +154,7 @@ class ContentLayout(Layout):
class LayoutMessages:
"""Messages generated during layout creation that should be deferred for later display."""
+
def __init__(self) -> None:
self.info: list[str] = []
self.warning: list[str] = []
@@ -158,11 +163,13 @@ class LayoutMessages:
class CollectionDetail:
"""Details about the layout of the current collection."""
- def __init__(self,
- name: str,
- namespace: str,
- root: str,
- ) -> None:
+
+ def __init__(
+ self,
+ name: str,
+ namespace: str,
+ root: str,
+ ) -> None:
self.name = name
self.namespace = namespace
self.root = root
@@ -173,6 +180,7 @@ class CollectionDetail:
class LayoutProvider(PathProvider):
"""Base class for layout providers."""
+
PLUGIN_TYPES = (
'action',
'become',
diff --git a/test/lib/ansible_test/_internal/provider/layout/ansible.py b/test/lib/ansible_test/_internal/provider/layout/ansible.py
index e8d01919..d2f8cc81 100644
--- a/test/lib/ansible_test/_internal/provider/layout/ansible.py
+++ b/test/lib/ansible_test/_internal/provider/layout/ansible.py
@@ -11,6 +11,7 @@ from . import (
class AnsibleLayout(LayoutProvider):
"""Layout provider for Ansible source."""
+
@staticmethod
def is_content_root(path: str) -> bool:
"""Return True if the given path is a content root for this provider."""
@@ -20,25 +21,26 @@ class AnsibleLayout(LayoutProvider):
"""Create a Layout using the given root and paths."""
plugin_paths = dict((p, os.path.join('lib/ansible/plugins', p)) for p in self.PLUGIN_TYPES)
- plugin_paths.update(dict(
+ plugin_paths.update(
modules='lib/ansible/modules',
module_utils='lib/ansible/module_utils',
- ))
-
- return ContentLayout(root,
- paths,
- plugin_paths=plugin_paths,
- collection=None,
- test_path='test',
- results_path='test/results',
- sanity_path='test/sanity',
- sanity_messages=None,
- integration_path='test/integration',
- integration_targets_path='test/integration/targets',
- integration_vars_path='test/integration/integration_config.yml',
- integration_messages=None,
- unit_path='test/units',
- unit_module_path='test/units/modules',
- unit_module_utils_path='test/units/module_utils',
- unit_messages=None,
- )
+ )
+
+ return ContentLayout(
+ root,
+ paths,
+ plugin_paths=plugin_paths,
+ collection=None,
+ test_path='test',
+ results_path='test/results',
+ sanity_path='test/sanity',
+ sanity_messages=None,
+ integration_path='test/integration',
+ integration_targets_path='test/integration/targets',
+ integration_vars_path='test/integration/integration_config.yml',
+ integration_messages=None,
+ unit_path='test/units',
+ unit_module_path='test/units/modules',
+ unit_module_utils_path='test/units/module_utils',
+ unit_messages=None,
+ )
diff --git a/test/lib/ansible_test/_internal/provider/layout/collection.py b/test/lib/ansible_test/_internal/provider/layout/collection.py
index 299d0bc6..d747f31f 100644
--- a/test/lib/ansible_test/_internal/provider/layout/collection.py
+++ b/test/lib/ansible_test/_internal/provider/layout/collection.py
@@ -17,6 +17,7 @@ from ...util import (
class CollectionLayout(LayoutProvider):
"""Layout provider for Ansible collections."""
+
@staticmethod
def is_content_root(path: str) -> bool:
"""Return True if the given path is a content root for this provider."""
@@ -52,28 +53,29 @@ class CollectionLayout(LayoutProvider):
integration_targets_path = self.__check_integration_path(paths, integration_messages)
self.__check_unit_path(paths, unit_messages)
- return ContentLayout(root,
- paths,
- plugin_paths=plugin_paths,
- collection=CollectionDetail(
- name=collection_name,
- namespace=collection_namespace,
- root=collection_root,
- ),
- test_path='tests',
- results_path='tests/output',
- sanity_path='tests/sanity',
- sanity_messages=sanity_messages,
- integration_path='tests/integration',
- integration_targets_path=integration_targets_path.rstrip(os.path.sep),
- integration_vars_path='tests/integration/integration_config.yml',
- integration_messages=integration_messages,
- unit_path='tests/unit',
- unit_module_path='tests/unit/plugins/modules',
- unit_module_utils_path='tests/unit/plugins/module_utils',
- unit_messages=unit_messages,
- unsupported=not (is_valid_identifier(collection_namespace) and is_valid_identifier(collection_name)),
- )
+ return ContentLayout(
+ root,
+ paths,
+ plugin_paths=plugin_paths,
+ collection=CollectionDetail(
+ name=collection_name,
+ namespace=collection_namespace,
+ root=collection_root,
+ ),
+ test_path='tests',
+ results_path='tests/output',
+ sanity_path='tests/sanity',
+ sanity_messages=sanity_messages,
+ integration_path='tests/integration',
+ integration_targets_path=integration_targets_path.rstrip(os.path.sep),
+ integration_vars_path='tests/integration/integration_config.yml',
+ integration_messages=integration_messages,
+ unit_path='tests/unit',
+ unit_module_path='tests/unit/plugins/modules',
+ unit_module_utils_path='tests/unit/plugins/module_utils',
+ unit_messages=unit_messages,
+ unsupported=not (is_valid_identifier(collection_namespace) and is_valid_identifier(collection_name)),
+ )
@staticmethod
def __check_test_path(paths: list[str], messages: LayoutMessages) -> None:
diff --git a/test/lib/ansible_test/_internal/provider/layout/unsupported.py b/test/lib/ansible_test/_internal/provider/layout/unsupported.py
index 16aa254c..e3d81e63 100644
--- a/test/lib/ansible_test/_internal/provider/layout/unsupported.py
+++ b/test/lib/ansible_test/_internal/provider/layout/unsupported.py
@@ -9,6 +9,7 @@ from . import (
class UnsupportedLayout(LayoutProvider):
"""Layout provider for an unsupported directory layout."""
+
sequence = 0 # disable automatic detection
@staticmethod
@@ -20,21 +21,22 @@ class UnsupportedLayout(LayoutProvider):
"""Create a Layout using the given root and paths."""
plugin_paths = dict((p, p) for p in self.PLUGIN_TYPES)
- return ContentLayout(root,
- paths,
- plugin_paths=plugin_paths,
- collection=None,
- test_path='',
- results_path='',
- sanity_path='',
- sanity_messages=None,
- integration_path='',
- integration_targets_path='',
- integration_vars_path='',
- integration_messages=None,
- unit_path='',
- unit_module_path='',
- unit_module_utils_path='',
- unit_messages=None,
- unsupported=True,
- )
+ return ContentLayout(
+ root,
+ paths,
+ plugin_paths=plugin_paths,
+ collection=None,
+ test_path='',
+ results_path='',
+ sanity_path='',
+ sanity_messages=None,
+ integration_path='',
+ integration_targets_path='',
+ integration_vars_path='',
+ integration_messages=None,
+ unit_path='',
+ unit_module_path='',
+ unit_module_utils_path='',
+ unit_messages=None,
+ unsupported=True,
+ )
diff --git a/test/lib/ansible_test/_internal/provider/source/__init__.py b/test/lib/ansible_test/_internal/provider/source/__init__.py
index aa8ca47f..68fe380f 100644
--- a/test/lib/ansible_test/_internal/provider/source/__init__.py
+++ b/test/lib/ansible_test/_internal/provider/source/__init__.py
@@ -10,6 +10,7 @@ from .. import (
class SourceProvider(PathProvider):
"""Base class for source providers."""
+
@abc.abstractmethod
def get_paths(self, path: str) -> list[str]:
"""Return the list of available content paths under the given path."""
diff --git a/test/lib/ansible_test/_internal/provider/source/git.py b/test/lib/ansible_test/_internal/provider/source/git.py
index 37f16bfa..f8637edd 100644
--- a/test/lib/ansible_test/_internal/provider/source/git.py
+++ b/test/lib/ansible_test/_internal/provider/source/git.py
@@ -22,6 +22,7 @@ from . import (
class GitSource(SourceProvider):
"""Source provider for a content root managed by git version control."""
+
@staticmethod
def is_content_root(path: str) -> bool:
"""Return True if the given path is a content root for this provider."""
diff --git a/test/lib/ansible_test/_internal/provider/source/installed.py b/test/lib/ansible_test/_internal/provider/source/installed.py
index 6b821888..1e5a6ba2 100644
--- a/test/lib/ansible_test/_internal/provider/source/installed.py
+++ b/test/lib/ansible_test/_internal/provider/source/installed.py
@@ -10,6 +10,7 @@ from . import (
class InstalledSource(SourceProvider):
"""Source provider for content which has been installed."""
+
sequence = 0 # disable automatic detection
@staticmethod
diff --git a/test/lib/ansible_test/_internal/provider/source/unsupported.py b/test/lib/ansible_test/_internal/provider/source/unsupported.py
index e2f8953e..caa49941 100644
--- a/test/lib/ansible_test/_internal/provider/source/unsupported.py
+++ b/test/lib/ansible_test/_internal/provider/source/unsupported.py
@@ -8,6 +8,7 @@ from . import (
class UnsupportedSource(SourceProvider):
"""Source provider to use when the layout is unsupported."""
+
sequence = 0 # disable automatic detection
@staticmethod
diff --git a/test/lib/ansible_test/_internal/provider/source/unversioned.py b/test/lib/ansible_test/_internal/provider/source/unversioned.py
index d8eff5d1..699de889 100644
--- a/test/lib/ansible_test/_internal/provider/source/unversioned.py
+++ b/test/lib/ansible_test/_internal/provider/source/unversioned.py
@@ -18,6 +18,7 @@ from . import (
class UnversionedSource(SourceProvider):
"""Fallback source provider when no other provider matches the content root."""
+
sequence = 0 # disable automatic detection
@staticmethod
diff --git a/test/lib/ansible_test/_internal/provisioning.py b/test/lib/ansible_test/_internal/provisioning.py
index 7547a302..e7f0fd31 100644
--- a/test/lib/ansible_test/_internal/provisioning.py
+++ b/test/lib/ansible_test/_internal/provisioning.py
@@ -55,6 +55,7 @@ class PrimeContainers(ApplicationError):
@dataclasses.dataclass(frozen=True)
class HostState:
"""State of hosts and profiles to be passed to ansible-test during delegation."""
+
controller_profile: ControllerHostProfile
target_profiles: list[HostProfile]
diff --git a/test/lib/ansible_test/_internal/python_requirements.py b/test/lib/ansible_test/_internal/python_requirements.py
index e3733a5c..fc88b637 100644
--- a/test/lib/ansible_test/_internal/python_requirements.py
+++ b/test/lib/ansible_test/_internal/python_requirements.py
@@ -48,6 +48,7 @@ from .data import (
from .host_configs import (
PosixConfig,
PythonConfig,
+ VirtualPythonConfig,
)
from .connections import (
@@ -70,13 +71,14 @@ VIRTUALENV_VERSION = '16.7.12'
class PipUnavailableError(ApplicationError):
"""Exception raised when pip is not available."""
+
def __init__(self, python: PythonConfig) -> None:
super().__init__(f'Python {python.version} at "{python.path}" does not have pip available.')
@dataclasses.dataclass(frozen=True)
class PipCommand:
- """Base class for pip commands."""""
+ """Base class for pip commands."""
def serialize(self) -> tuple[str, dict[str, t.Any]]:
"""Return a serialized representation of this command."""
@@ -87,6 +89,7 @@ class PipCommand:
@dataclasses.dataclass(frozen=True)
class PipInstall(PipCommand):
"""Details required to perform a pip install."""
+
requirements: list[tuple[str, str]]
constraints: list[tuple[str, str]]
packages: list[str]
@@ -102,6 +105,7 @@ class PipInstall(PipCommand):
@dataclasses.dataclass(frozen=True)
class PipUninstall(PipCommand):
"""Details required to perform a pip uninstall."""
+
packages: list[str]
ignore_errors: bool
@@ -114,6 +118,7 @@ class PipVersion(PipCommand):
@dataclasses.dataclass(frozen=True)
class PipBootstrap(PipCommand):
"""Details required to bootstrap pip."""
+
pip_version: str
packages: list[str]
@@ -261,6 +266,20 @@ def run_pip(
connection = connection or LocalConnection(args)
script = prepare_pip_script(commands)
+ if isinstance(args, IntegrationConfig):
+ # Integration tests can involve two hosts (controller and target).
+ # The connection type can be used to disambiguate between the two.
+ context = " (controller)" if isinstance(connection, LocalConnection) else " (target)"
+ else:
+ context = ""
+
+ if isinstance(python, VirtualPythonConfig):
+ context += " [venv]"
+
+ # The interpreter path is not included below.
+ # It can be seen by running ansible-test with increased verbosity (showing all commands executed).
+ display.info(f'Installing requirements for Python {python.version}{context}')
+
if not args.explain:
try:
connection.run([python.path], data=script, capture=False)
diff --git a/test/lib/ansible_test/_internal/ssh.py b/test/lib/ansible_test/_internal/ssh.py
index 840edf62..b2a26787 100644
--- a/test/lib/ansible_test/_internal/ssh.py
+++ b/test/lib/ansible_test/_internal/ssh.py
@@ -32,6 +32,7 @@ from .config import (
@dataclasses.dataclass
class SshConnectionDetail:
"""Information needed to establish an SSH connection to a host."""
+
name: str
host: str
port: t.Optional[int]
@@ -56,7 +57,7 @@ class SshConnectionDetail:
# See: https://www.openssh.com/txt/release-8.8
algorithms = '+ssh-rsa' # append the algorithm to the default list, requires OpenSSH 7.0 or later
- options.update(dict(
+ options.update(
# Host key signature algorithms that the client wants to use.
# Available options can be found with `ssh -Q HostKeyAlgorithms` or `ssh -Q key` on older clients.
# This option was updated in OpenSSH 7.0, released on 2015-08-11, to support the "+" prefix.
@@ -69,13 +70,14 @@ class SshConnectionDetail:
# This option is an alias for PubkeyAcceptedAlgorithms, which was added in OpenSSH 8.5.
# See: https://www.openssh.com/txt/release-8.5
PubkeyAcceptedKeyTypes=algorithms,
- ))
+ )
return options
class SshProcess:
"""Wrapper around an SSH process."""
+
def __init__(self, process: t.Optional[subprocess.Popen]) -> None:
self._process = process
self.pending_forwards: t.Optional[list[tuple[str, int]]] = None
@@ -161,7 +163,7 @@ def create_ssh_command(
'ssh',
'-n', # prevent reading from stdin
'-i', ssh.identity_file, # file from which the identity for public key authentication is read
- ]
+ ] # fmt: skip
if not command:
cmd.append('-N') # do not execute a remote command
diff --git a/test/lib/ansible_test/_internal/target.py b/test/lib/ansible_test/_internal/target.py
index 80411483..a6fa3bf2 100644
--- a/test/lib/ansible_test/_internal/target.py
+++ b/test/lib/ansible_test/_internal/target.py
@@ -86,11 +86,12 @@ def walk_internal_targets(
return tuple(sorted(internal_targets, key=lambda sort_target: sort_target.name))
-def filter_targets(targets: c.Iterable[TCompletionTarget],
- patterns: list[str],
- include: bool = True,
- errors: bool = True,
- ) -> c.Iterable[TCompletionTarget]:
+def filter_targets(
+ targets: c.Iterable[TCompletionTarget],
+ patterns: list[str],
+ include: bool = True,
+ errors: bool = True,
+) -> c.Iterable[TCompletionTarget]:
"""Iterate over the given targets and filter them based on the supplied arguments."""
unmatched = set(patterns or ())
compiled_patterns = dict((p, re.compile('^%s$' % p)) for p in patterns) if patterns else None
@@ -401,6 +402,7 @@ def analyze_integration_target_dependencies(integration_targets: list[Integratio
class CompletionTarget(metaclass=abc.ABCMeta):
"""Command-line argument completion target base class."""
+
def __init__(self) -> None:
self.name = ''
self.path = ''
@@ -435,6 +437,7 @@ class CompletionTarget(metaclass=abc.ABCMeta):
class TestTarget(CompletionTarget):
"""Generic test target."""
+
def __init__(
self,
path: str,
@@ -476,6 +479,7 @@ class TestTarget(CompletionTarget):
class IntegrationTargetType(enum.Enum):
"""Type of integration test target."""
+
CONTROLLER = enum.auto()
TARGET = enum.auto()
UNKNOWN = enum.auto()
@@ -606,13 +610,9 @@ class IntegrationTarget(CompletionTarget):
if 'needs/httptester' in groups:
groups.append('cloud/httptester') # backwards compatibility for when it was not a cloud plugin
- if '_' in self.name:
- prefix = self.name[:self.name.find('_')]
- else:
- prefix = None
-
- if prefix in prefixes:
- group = prefixes[prefix]
+ for prefix, group in prefixes.items():
+ if not self.name.startswith(f'{prefix}_'):
+ continue
if group != prefix:
group = '%s/%s' % (group, prefix)
@@ -692,6 +692,7 @@ class IntegrationTarget(CompletionTarget):
class TargetPatternsNotMatched(ApplicationError):
"""One or more targets were not matched when a match was required."""
+
def __init__(self, patterns: set[str]) -> None:
self.patterns = sorted(patterns)
diff --git a/test/lib/ansible_test/_internal/test.py b/test/lib/ansible_test/_internal/test.py
index 211635c5..c36d17e2 100644
--- a/test/lib/ansible_test/_internal/test.py
+++ b/test/lib/ansible_test/_internal/test.py
@@ -47,7 +47,7 @@ def calculate_confidence(path: str, line: int, metadata: Metadata) -> int:
return 0
# changes were made to the same file and line
- if any(r[0] <= line <= r[1] in r for r in ranges):
+ if any(r[0] <= line <= r[1] for r in ranges):
return 100
# changes were made to the same file and the line number is unknown
@@ -60,6 +60,7 @@ def calculate_confidence(path: str, line: int, metadata: Metadata) -> int:
class TestResult:
"""Base class for test results."""
+
def __init__(self, command: str, test: str, python_version: t.Optional[str] = None) -> None:
self.command = command
self.test = test
@@ -113,7 +114,7 @@ class TestResult:
junit_xml.TestSuite(
name='ansible-test',
cases=[test_case],
- timestamp=datetime.datetime.utcnow(),
+ timestamp=datetime.datetime.now(tz=datetime.timezone.utc),
),
],
)
@@ -128,7 +129,8 @@ class TestResult:
class TestTimeout(TestResult):
"""Test timeout."""
- def __init__(self, timeout_duration: int) -> None:
+
+ def __init__(self, timeout_duration: int | float) -> None:
super().__init__(command='timeout', test='')
self.timeout_duration = timeout_duration
@@ -151,13 +153,11 @@ One or more of the following situations may be responsible:
output += '\n\nConsult the console log for additional details on where the timeout occurred.'
- timestamp = datetime.datetime.utcnow()
-
suites = junit_xml.TestSuites(
suites=[
junit_xml.TestSuite(
name='ansible-test',
- timestamp=timestamp,
+ timestamp=datetime.datetime.now(tz=datetime.timezone.utc),
cases=[
junit_xml.TestCase(
name='timeout',
@@ -180,6 +180,7 @@ One or more of the following situations may be responsible:
class TestSuccess(TestResult):
"""Test success."""
+
def write_junit(self, args: TestConfig) -> None:
"""Write results to a junit XML file."""
test_case = junit_xml.TestCase(classname=self.command, name=self.name)
@@ -189,6 +190,7 @@ class TestSuccess(TestResult):
class TestSkipped(TestResult):
"""Test skipped."""
+
def __init__(self, command: str, test: str, python_version: t.Optional[str] = None) -> None:
super().__init__(command, test, python_version)
@@ -214,6 +216,7 @@ class TestSkipped(TestResult):
class TestFailure(TestResult):
"""Test failure."""
+
def __init__(
self,
command: str,
@@ -378,6 +381,7 @@ class TestFailure(TestResult):
class TestMessage:
"""Single test message for one file."""
+
def __init__(
self,
message: str,
diff --git a/test/lib/ansible_test/_internal/thread.py b/test/lib/ansible_test/_internal/thread.py
index edaf1b5c..c4574377 100644
--- a/test/lib/ansible_test/_internal/thread.py
+++ b/test/lib/ansible_test/_internal/thread.py
@@ -15,6 +15,7 @@ TCallable = t.TypeVar('TCallable', bound=t.Callable[..., t.Any])
class WrappedThread(threading.Thread):
"""Wrapper around Thread which captures results and exceptions."""
+
def __init__(self, action: c.Callable[[], t.Any]) -> None:
super().__init__()
self._result: queue.Queue[t.Any] = queue.Queue()
diff --git a/test/lib/ansible_test/_internal/timeout.py b/test/lib/ansible_test/_internal/timeout.py
index 90ba5835..2c57d4cf 100644
--- a/test/lib/ansible_test/_internal/timeout.py
+++ b/test/lib/ansible_test/_internal/timeout.py
@@ -1,6 +1,7 @@
"""Timeout management for tests."""
from __future__ import annotations
+import dataclasses
import datetime
import functools
import os
@@ -19,7 +20,7 @@ from .config import (
from .util import (
display,
- ApplicationError,
+ TimeoutExpiredError,
)
from .thread import (
@@ -35,15 +36,56 @@ from .test import (
)
-def get_timeout() -> t.Optional[dict[str, t.Any]]:
- """Return details about the currently set timeout, if any, otherwise return None."""
- if not os.path.exists(TIMEOUT_PATH):
- return None
+@dataclasses.dataclass(frozen=True)
+class TimeoutDetail:
+ """Details required to enforce a timeout on test execution."""
+
+ _DEADLINE_FORMAT = '%Y-%m-%dT%H:%M:%SZ' # format used to maintain backwards compatibility with previous versions of ansible-test
+
+ deadline: datetime.datetime
+ duration: int | float # minutes
+
+ @property
+ def remaining(self) -> datetime.timedelta:
+ """The amount of time remaining before the timeout occurs. If the timeout has passed, this will be a negative duration."""
+ return self.deadline - datetime.datetime.now(tz=datetime.timezone.utc).replace(microsecond=0)
+
+ def to_dict(self) -> dict[str, t.Any]:
+ """Return timeout details as a dictionary suitable for JSON serialization."""
+ return dict(
+ deadline=self.deadline.strftime(self._DEADLINE_FORMAT),
+ duration=self.duration,
+ )
- data = read_json_file(TIMEOUT_PATH)
- data['deadline'] = datetime.datetime.strptime(data['deadline'], '%Y-%m-%dT%H:%M:%SZ')
+ @staticmethod
+ def from_dict(value: dict[str, t.Any]) -> TimeoutDetail:
+ """Return a TimeoutDetail instance using the value previously returned by to_dict."""
+ return TimeoutDetail(
+ deadline=datetime.datetime.strptime(value['deadline'], TimeoutDetail._DEADLINE_FORMAT).replace(tzinfo=datetime.timezone.utc),
+ duration=value['duration'],
+ )
- return data
+ @staticmethod
+ def create(duration: int | float) -> TimeoutDetail | None:
+ """Return a new TimeoutDetail instance for the specified duration (in minutes), or None if the duration is zero."""
+ if not duration:
+ return None
+
+ if duration == int(duration):
+ duration = int(duration)
+
+ return TimeoutDetail(
+ deadline=datetime.datetime.now(datetime.timezone.utc).replace(microsecond=0) + datetime.timedelta(seconds=int(duration * 60)),
+ duration=duration,
+ )
+
+
+def get_timeout() -> TimeoutDetail | None:
+ """Return details about the currently set timeout, if any, otherwise return None."""
+ try:
+ return TimeoutDetail.from_dict(read_json_file(TIMEOUT_PATH))
+ except FileNotFoundError:
+ return None
def configure_timeout(args: CommonConfig) -> None:
@@ -59,27 +101,22 @@ def configure_test_timeout(args: TestConfig) -> None:
if not timeout:
return
- timeout_start = datetime.datetime.utcnow()
- timeout_duration = timeout['duration']
- timeout_deadline = timeout['deadline']
- timeout_remaining = timeout_deadline - timeout_start
+ timeout_remaining = timeout.remaining
- test_timeout = TestTimeout(timeout_duration)
+ test_timeout = TestTimeout(timeout.duration)
if timeout_remaining <= datetime.timedelta():
test_timeout.write(args)
- raise ApplicationError('The %d minute test timeout expired %s ago at %s.' % (
- timeout_duration, timeout_remaining * -1, timeout_deadline))
+ raise TimeoutExpiredError(f'The {timeout.duration} minute test timeout expired {timeout_remaining * -1} ago at {timeout.deadline}.')
- display.info('The %d minute test timeout expires in %s at %s.' % (
- timeout_duration, timeout_remaining, timeout_deadline), verbosity=1)
+ display.info(f'The {timeout.duration} minute test timeout expires in {timeout_remaining} at {timeout.deadline}.', verbosity=1)
def timeout_handler(_dummy1: t.Any, _dummy2: t.Any) -> None:
"""Runs when SIGUSR1 is received."""
test_timeout.write(args)
- raise ApplicationError('Tests aborted after exceeding the %d minute time limit.' % timeout_duration)
+ raise TimeoutExpiredError(f'Tests aborted after exceeding the {timeout.duration} minute time limit.')
def timeout_waiter(timeout_seconds: int) -> None:
"""Background thread which will kill the current process if the timeout elapses."""
@@ -88,6 +125,6 @@ def configure_test_timeout(args: TestConfig) -> None:
signal.signal(signal.SIGUSR1, timeout_handler)
- instance = WrappedThread(functools.partial(timeout_waiter, timeout_remaining.seconds))
+ instance = WrappedThread(functools.partial(timeout_waiter, timeout_remaining.total_seconds()))
instance.daemon = True
instance.start()
diff --git a/test/lib/ansible_test/_internal/util.py b/test/lib/ansible_test/_internal/util.py
index ec485a2b..a5a9faba 100644
--- a/test/lib/ansible_test/_internal/util.py
+++ b/test/lib/ansible_test/_internal/util.py
@@ -23,10 +23,14 @@ import time
import functools
import shlex
import typing as t
+import warnings
from struct import unpack, pack
from termios import TIOCGWINSZ
+# CAUTION: Avoid third-party imports in this module whenever possible.
+# Any third-party imports occurring here will result in an error if they are vendored by ansible-core.
+
try:
from typing_extensions import TypeGuard # TypeGuard was added in Python 3.10
except ImportError:
@@ -129,6 +133,7 @@ class Architecture:
Normalized architecture names.
These are the architectures supported by ansible-test, such as when provisioning remote instances.
"""
+
X86_64 = 'x86_64'
AARCH64 = 'aarch64'
@@ -338,6 +343,17 @@ def get_ansible_version() -> str:
return ansible_version
+def _enable_vendoring() -> None:
+ """Enable support for loading Python packages vendored by ansible-core."""
+ # Load the vendoring code by file path, since ansible may not be in our sys.path.
+ # Convert warnings into errors, to avoid problems from surfacing later.
+
+ with warnings.catch_warnings():
+ warnings.filterwarnings('error')
+
+ load_module(os.path.join(ANSIBLE_LIB_ROOT, '_vendor', '__init__.py'), 'ansible_vendor')
+
+
@cache
def get_available_python_versions() -> dict[str, str]:
"""Return a dictionary indicating which supported Python versions are available."""
@@ -553,6 +569,7 @@ def communicate_with_process(
class WriterThread(WrappedThread):
"""Thread to write data to stdin of a subprocess."""
+
def __init__(self, handle: t.IO[bytes], data: bytes) -> None:
super().__init__(self._run)
@@ -570,6 +587,7 @@ class WriterThread(WrappedThread):
class ReaderThread(WrappedThread, metaclass=abc.ABCMeta):
"""Thread to read stdout from a subprocess."""
+
def __init__(self, handle: t.IO[bytes], buffer: t.BinaryIO) -> None:
super().__init__(self._run)
@@ -584,6 +602,7 @@ class ReaderThread(WrappedThread, metaclass=abc.ABCMeta):
class CaptureThread(ReaderThread):
"""Thread to capture stdout from a subprocess into a buffer."""
+
def _run(self) -> None:
"""Workload to run on a thread."""
src = self.handle
@@ -598,6 +617,7 @@ class CaptureThread(ReaderThread):
class OutputThread(ReaderThread):
"""Thread to pass stdout from a subprocess to stdout."""
+
def _run(self) -> None:
"""Workload to run on a thread."""
src = self.handle
@@ -778,6 +798,7 @@ def generate_password() -> str:
class Display:
"""Manages color console output."""
+
clear = '\033[0m'
red = '\033[31m'
green = '\033[32m'
@@ -886,6 +907,7 @@ class Display:
class InternalError(Exception):
"""An unhandled internal error indicating a bug in the code."""
+
def __init__(self, message: str) -> None:
super().__init__(f'An internal error has occurred in ansible-test: {message}')
@@ -898,8 +920,13 @@ class ApplicationWarning(Exception):
"""General application warning which interrupts normal program flow."""
+class TimeoutExpiredError(SystemExit):
+ """Error raised when the test timeout has been reached or exceeded."""
+
+
class SubprocessError(ApplicationError):
"""Error resulting from failed subprocess execution."""
+
def __init__(
self,
cmd: list[str],
@@ -936,6 +963,7 @@ class SubprocessError(ApplicationError):
class MissingEnvironmentVariable(ApplicationError):
"""Error caused by missing environment variable."""
+
def __init__(self, name: str) -> None:
super().__init__('Missing environment variable: %s' % name)
@@ -948,6 +976,7 @@ class HostConnectionError(ApplicationError):
Raised by provisioning code when one or more provisioning threads raise this exception.
Also raised when an SSH connection fails for the shell command.
"""
+
def __init__(self, message: str, callback: t.Callable[[], None] = None) -> None:
super().__init__(message)
@@ -1144,3 +1173,5 @@ def type_guard(sequence: c.Sequence[t.Any], guard_type: t.Type[C]) -> TypeGuard[
display = Display() # pylint: disable=locally-disabled, invalid-name
+
+_enable_vendoring()
diff --git a/test/lib/ansible_test/_internal/util_common.py b/test/lib/ansible_test/_internal/util_common.py
index 1dfc7f38..79ff6c03 100644
--- a/test/lib/ansible_test/_internal/util_common.py
+++ b/test/lib/ansible_test/_internal/util_common.py
@@ -66,6 +66,7 @@ CHECK_YAML_VERSIONS: dict[str, t.Any] = {}
class ShellScriptTemplate:
"""A simple substitution template for shell scripts."""
+
def __init__(self, template: str) -> None:
self.template = template
@@ -87,6 +88,7 @@ class ShellScriptTemplate:
class ResultType:
"""Test result type."""
+
BOT: ResultType = None
COVERAGE: ResultType = None
DATA: ResultType = None
@@ -128,6 +130,7 @@ ResultType._populate() # pylint: disable=protected-access
class CommonConfig:
"""Configuration common to all commands."""
+
def __init__(self, args: t.Any, command: str) -> None:
self.command = command
self.interactive = False
@@ -237,12 +240,13 @@ def named_temporary_file(args: CommonConfig, prefix: str, suffix: str, directory
yield tempfile_fd.name
-def write_json_test_results(category: ResultType,
- name: str,
- content: t.Union[list[t.Any], dict[str, t.Any]],
- formatted: bool = True,
- encoder: t.Optional[t.Type[json.JSONEncoder]] = None,
- ) -> None:
+def write_json_test_results(
+ category: ResultType,
+ name: str,
+ content: t.Union[list[t.Any], dict[str, t.Any]],
+ formatted: bool = True,
+ encoder: t.Optional[t.Type[json.JSONEncoder]] = None,
+) -> None:
"""Write the given json content to the specified test results path, creating directories as needed."""
path = os.path.join(category.path, name)
write_json_file(path, content, create_directories=True, formatted=formatted, encoder=encoder)
@@ -445,8 +449,21 @@ def run_command(
) -> tuple[t.Optional[str], t.Optional[str]]:
"""Run the specified command and return stdout and stderr as a tuple."""
explain = args.explain and not always
- return raw_command(cmd, capture=capture, env=env, data=data, cwd=cwd, explain=explain, stdin=stdin, stdout=stdout, interactive=interactive,
- output_stream=output_stream, cmd_verbosity=cmd_verbosity, str_errors=str_errors, error_callback=error_callback)
+ return raw_command(
+ cmd,
+ capture=capture,
+ env=env,
+ data=data,
+ cwd=cwd,
+ explain=explain,
+ stdin=stdin,
+ stdout=stdout,
+ interactive=interactive,
+ output_stream=output_stream,
+ cmd_verbosity=cmd_verbosity,
+ str_errors=str_errors,
+ error_callback=error_callback,
+ )
def yamlcheck(python: PythonConfig) -> t.Optional[bool]:
diff --git a/test/lib/ansible_test/_internal/venv.py b/test/lib/ansible_test/_internal/venv.py
index ec498ed9..a83fc8b9 100644
--- a/test/lib/ansible_test/_internal/venv.py
+++ b/test/lib/ansible_test/_internal/venv.py
@@ -78,12 +78,13 @@ def get_virtual_python(
return virtual_environment_python
-def create_virtual_environment(args: EnvironmentConfig,
- python: PythonConfig,
- path: str,
- system_site_packages: bool = False,
- pip: bool = False,
- ) -> bool:
+def create_virtual_environment(
+ args: EnvironmentConfig,
+ python: PythonConfig,
+ path: str,
+ system_site_packages: bool = False,
+ pip: bool = False,
+) -> bool:
"""Create a virtual environment using venv or virtualenv for the requested Python version."""
if not os.path.exists(python.path):
# the requested python version could not be found
@@ -180,12 +181,13 @@ def get_python_real_prefix(python_path: str) -> t.Optional[str]:
return real_prefix
-def run_venv(args: EnvironmentConfig,
- run_python: str,
- system_site_packages: bool,
- pip: bool,
- path: str,
- ) -> bool:
+def run_venv(
+ args: EnvironmentConfig,
+ run_python: str,
+ system_site_packages: bool,
+ pip: bool,
+ path: str,
+) -> bool:
"""Create a virtual environment using the 'venv' module. Not available on Python 2.x."""
cmd = [run_python, '-m', 'venv']
@@ -210,13 +212,14 @@ def run_venv(args: EnvironmentConfig,
return True
-def run_virtualenv(args: EnvironmentConfig,
- run_python: str,
- env_python: str,
- system_site_packages: bool,
- pip: bool,
- path: str,
- ) -> bool:
+def run_virtualenv(
+ args: EnvironmentConfig,
+ run_python: str,
+ env_python: str,
+ system_site_packages: bool,
+ pip: bool,
+ path: str,
+) -> bool:
"""Create a virtual environment using the 'virtualenv' module."""
# always specify which interpreter to use to guarantee the desired interpreter is provided
# otherwise virtualenv may select a different interpreter than the one running virtualenv
diff --git a/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-test.ini b/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-test.ini
index 190e9529..55738f87 100644
--- a/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-test.ini
+++ b/test/lib/ansible_test/_util/controller/sanity/mypy/ansible-test.ini
@@ -14,6 +14,9 @@ disable_error_code = misc
[mypy-argcomplete]
ignore_missing_imports = True
+[mypy-argcomplete.finders]
+ignore_missing_imports = True
+
[mypy-coverage]
ignore_missing_imports = True
diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py
index 270c9f44..25c61798 100644
--- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py
@@ -24,9 +24,7 @@ import datetime
import json
import os
import re
-import subprocess
import sys
-import tempfile
import traceback
import warnings
@@ -301,8 +299,8 @@ class ModuleValidator(Validator):
ACCEPTLIST_FUTURE_IMPORTS = frozenset(('absolute_import', 'division', 'print_function'))
- def __init__(self, path, analyze_arg_spec=False, collection=None, collection_version=None,
- base_branch=None, git_cache=None, reporter=None, routing=None, plugin_type='module'):
+ def __init__(self, path, git_cache: GitCache, analyze_arg_spec=False, collection=None, collection_version=None,
+ reporter=None, routing=None, plugin_type='module'):
super(ModuleValidator, self).__init__(reporter=reporter or Reporter())
self.path = path
@@ -328,8 +326,8 @@ class ModuleValidator(Validator):
self.collection_version_str = collection_version
self.collection_version = SemanticVersion(collection_version)
- self.base_branch = base_branch
- self.git_cache = git_cache or GitCache()
+ self.git_cache = git_cache
+ self.base_module = self.git_cache.get_original_path(self.path)
self._python_module_override = False
@@ -341,11 +339,6 @@ class ModuleValidator(Validator):
except Exception:
self.ast = None
- if base_branch:
- self.base_module = self._get_base_file()
- else:
- self.base_module = None
-
def _create_version(self, v, collection_name=None):
if not v:
raise ValueError('Empty string is not a valid version')
@@ -368,13 +361,7 @@ class ModuleValidator(Validator):
return self
def __exit__(self, exc_type, exc_value, traceback):
- if not self.base_module:
- return
-
- try:
- os.remove(self.base_module)
- except Exception:
- pass
+ pass
@property
def object_name(self):
@@ -426,36 +413,9 @@ class ModuleValidator(Validator):
except AttributeError:
return False
- def _get_base_branch_module_path(self):
- """List all paths within lib/ansible/modules to try and match a moved module"""
- return self.git_cache.base_module_paths.get(self.object_name)
-
- def _has_alias(self):
- """Return true if the module has any aliases."""
- return self.object_name in self.git_cache.head_aliased_modules
-
- def _get_base_file(self):
- # In case of module moves, look for the original location
- base_path = self._get_base_branch_module_path()
- ext = os.path.splitext(base_path or self.path)[1]
-
- command = ['git', 'show', '%s:%s' % (self.base_branch, base_path or self.path)]
- p = subprocess.run(command, stdin=subprocess.DEVNULL, capture_output=True, check=False)
-
- if int(p.returncode) != 0:
- return None
-
- t = tempfile.NamedTemporaryFile(delete=False, suffix=ext)
- t.write(p.stdout)
- t.close()
-
- return t.name
-
- def _is_new_module(self):
- if self._has_alias():
- return False
-
- return not self.object_name.startswith('_') and bool(self.base_branch) and not bool(self.base_module)
+ def _is_new_module(self) -> bool | None:
+ """Return True if the content is new, False if it is not and None if the information is not available."""
+ return self.git_cache.is_new(self.path)
def _check_interpreter(self, powershell=False):
if powershell:
@@ -2050,7 +2010,7 @@ class ModuleValidator(Validator):
)
def _check_for_new_args(self, doc):
- if not self.base_branch or self._is_new_module():
+ if not self.base_module:
return
with CaptureStd():
@@ -2284,7 +2244,7 @@ class ModuleValidator(Validator):
# We can only validate PowerShell arg spec if it is using the new Ansible.Basic.AnsibleModule util
pattern = r'(?im)^#\s*ansiblerequires\s+\-csharputil\s*Ansible\.Basic'
if re.search(pattern, self.text) and self.object_name not in self.PS_ARG_VALIDATE_REJECTLIST:
- with ModuleValidator(docs_path, base_branch=self.base_branch, git_cache=self.git_cache) as docs_mv:
+ with ModuleValidator(docs_path, git_cache=self.git_cache) as docs_mv:
docs = docs_mv._validate_docs()[1]
self._validate_ansible_module_call(docs)
@@ -2329,6 +2289,84 @@ class PythonPackageValidator(Validator):
)
+class GitCache(metaclass=abc.ABCMeta):
+ """Base class for access to original files."""
+ @abc.abstractmethod
+ def get_original_path(self, path: str) -> str | None:
+ """Return the path to the original version of the specified file, or None if there isn't one."""
+
+ @abc.abstractmethod
+ def is_new(self, path: str) -> bool | None:
+ """Return True if the content is new, False if it is not and None if the information is not available."""
+
+ @staticmethod
+ def create(original_plugins: str | None, plugin_type: str) -> GitCache:
+ return CoreGitCache(original_plugins, plugin_type) if original_plugins else NoOpGitCache()
+
+
+class CoreGitCache(GitCache):
+ """Provides access to original files when testing core."""
+ def __init__(self, original_plugins: str | None, plugin_type: str) -> None:
+ super().__init__()
+
+ self.original_plugins = original_plugins
+
+ rel_path = 'lib/ansible/modules/' if plugin_type == 'module' else f'lib/ansible/plugins/{plugin_type}/'
+ head_tree = self._find_files(rel_path)
+
+ head_aliased_modules = set()
+
+ for path in head_tree:
+ filename = os.path.basename(path)
+
+ if filename.startswith('_') and filename != '__init__.py':
+ if os.path.islink(path):
+ head_aliased_modules.add(os.path.basename(os.path.realpath(path)))
+
+ self._head_aliased_modules = head_aliased_modules
+
+ def get_original_path(self, path: str) -> str | None:
+ """Return the path to the original version of the specified file, or None if there isn't one."""
+ path = os.path.join(self.original_plugins, path)
+
+ if not os.path.exists(path):
+ path = None
+
+ return path
+
+ def is_new(self, path: str) -> bool | None:
+ """Return True if the content is new, False if it is not and None if the information is not available."""
+ if os.path.basename(path).startswith('_'):
+ return False
+
+ if os.path.basename(path) in self._head_aliased_modules:
+ return False
+
+ return not self.get_original_path(path)
+
+ @staticmethod
+ def _find_files(path: str) -> list[str]:
+ """Return a list of files found in the specified directory."""
+ paths = []
+
+ for (dir_path, dir_names, file_names) in os.walk(path):
+ for file_name in file_names:
+ paths.append(os.path.join(dir_path, file_name))
+
+ return sorted(paths)
+
+
+class NoOpGitCache(GitCache):
+ """Provides a no-op interface for access to original files."""
+ def get_original_path(self, path: str) -> str | None:
+ """Return the path to the original version of the specified file, or None if there isn't one."""
+ return None
+
+ def is_new(self, path: str) -> bool | None:
+ """Return True if the content is new, False if it is not and None if the information is not available."""
+ return None
+
+
def re_compile(value):
"""
Argparse expects things to raise TypeError, re.compile raises an re.error
@@ -2354,8 +2392,6 @@ def run():
type=re_compile)
parser.add_argument('--arg-spec', help='Analyze module argument spec',
action='store_true', default=False)
- parser.add_argument('--base-branch', default=None,
- help='Used in determining if new options were added')
parser.add_argument('--format', choices=['json', 'plain'], default='plain',
help='Output format. Default: "%(default)s"')
parser.add_argument('--output', default='-',
@@ -2372,13 +2408,14 @@ def run():
parser.add_argument('--plugin-type',
default='module',
help='The plugin type to validate. Defaults to %(default)s')
+ parser.add_argument('--original-plugins')
args = parser.parse_args()
args.plugins = [m.rstrip('/') for m in args.plugins]
reporter = Reporter()
- git_cache = GitCache(args.base_branch, args.plugin_type)
+ git_cache = GitCache.create(args.original_plugins, args.plugin_type)
check_dirs = set()
@@ -2403,7 +2440,7 @@ def run():
if ModuleValidator.is_on_rejectlist(path):
continue
with ModuleValidator(path, collection=args.collection, collection_version=args.collection_version,
- analyze_arg_spec=args.arg_spec, base_branch=args.base_branch,
+ analyze_arg_spec=args.arg_spec,
git_cache=git_cache, reporter=reporter, routing=routing,
plugin_type=args.plugin_type) as mv1:
mv1.validate()
@@ -2428,7 +2465,7 @@ def run():
if ModuleValidator.is_on_rejectlist(path):
continue
with ModuleValidator(path, collection=args.collection, collection_version=args.collection_version,
- analyze_arg_spec=args.arg_spec, base_branch=args.base_branch,
+ analyze_arg_spec=args.arg_spec,
git_cache=git_cache, reporter=reporter, routing=routing,
plugin_type=args.plugin_type) as mv2:
mv2.validate()
@@ -2444,75 +2481,6 @@ def run():
sys.exit(reporter.json(warnings=args.warnings, output=args.output))
-class GitCache:
- def __init__(self, base_branch, plugin_type):
- self.base_branch = base_branch
- self.plugin_type = plugin_type
-
- self.rel_path = 'lib/ansible/modules/'
- if plugin_type != 'module':
- self.rel_path = 'lib/ansible/plugins/%s/' % plugin_type
-
- if self.base_branch:
- self.base_tree = self._git(['ls-tree', '-r', '--name-only', self.base_branch, self.rel_path])
- else:
- self.base_tree = []
-
- try:
- self.head_tree = self._git(['ls-tree', '-r', '--name-only', 'HEAD', self.rel_path])
- except GitError as ex:
- if ex.status == 128:
- # fallback when there is no .git directory
- self.head_tree = self._get_module_files()
- else:
- raise
- except FileNotFoundError:
- # fallback when git is not installed
- self.head_tree = self._get_module_files()
-
- allowed_exts = ('.py', '.ps1')
- if plugin_type != 'module':
- allowed_exts = ('.py', )
- self.base_module_paths = dict((os.path.basename(p), p) for p in self.base_tree if os.path.splitext(p)[1] in allowed_exts)
-
- self.base_module_paths.pop('__init__.py', None)
-
- self.head_aliased_modules = set()
-
- for path in self.head_tree:
- filename = os.path.basename(path)
-
- if filename.startswith('_') and filename != '__init__.py':
- if os.path.islink(path):
- self.head_aliased_modules.add(os.path.basename(os.path.realpath(path)))
-
- def _get_module_files(self):
- module_files = []
-
- for (dir_path, dir_names, file_names) in os.walk(self.rel_path):
- for file_name in file_names:
- module_files.append(os.path.join(dir_path, file_name))
-
- return module_files
-
- @staticmethod
- def _git(args):
- cmd = ['git'] + args
- p = subprocess.run(cmd, stdin=subprocess.DEVNULL, capture_output=True, text=True, check=False)
-
- if p.returncode != 0:
- raise GitError(p.stderr, p.returncode)
-
- return p.stdout.splitlines()
-
-
-class GitError(Exception):
- def __init__(self, message, status):
- super(GitError, self).__init__(message)
-
- self.status = status
-
-
def main():
try:
run()
diff --git a/test/lib/ansible_test/_util/target/setup/bootstrap.sh b/test/lib/ansible_test/_util/target/setup/bootstrap.sh
index 732c122a..f2e82fbc 100644
--- a/test/lib/ansible_test/_util/target/setup/bootstrap.sh
+++ b/test/lib/ansible_test/_util/target/setup/bootstrap.sh
@@ -148,6 +148,7 @@ bootstrap_remote_freebsd()
packages="
python${python_package_version}
py${python_package_version}-sqlite3
+ py${python_package_version}-setuptools
bash
curl
gtar
diff --git a/test/sanity/code-smell/package-data.py b/test/sanity/code-smell/package-data.py
index 0c6e7613..fc894a77 100644
--- a/test/sanity/code-smell/package-data.py
+++ b/test/sanity/code-smell/package-data.py
@@ -29,9 +29,6 @@ def assemble_files_to_ship(complete_file_list):
'hacking/ticket_stubs/*',
'test/sanity/code-smell/botmeta.*',
'test/sanity/code-smell/release-names.*',
- 'test/utils/*',
- 'test/utils/*/*',
- 'test/utils/*/*/*',
'test/results/.tmp/*',
'test/results/.tmp/*/*',
'test/results/.tmp/*/*/*',
@@ -54,7 +51,6 @@ def assemble_files_to_ship(complete_file_list):
'hacking/report.py',
'hacking/return_skeleton_generator.py',
'hacking/test-module',
- 'test/support/README.md',
'test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py',
'test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py',
'.cherry_picker.toml',
diff --git a/test/support/README.md b/test/support/README.md
new file mode 100644
index 00000000..850bc921
--- /dev/null
+++ b/test/support/README.md
@@ -0,0 +1,11 @@
+# IMPORTANT!
+
+Files under this directory are not actual plugins and modules used by Ansible
+and as such should **not be modified**. They are used for testing purposes
+only (and are temporary).
+
+In almost every case, pull requests affecting files under this directory
+will be closed.
+
+**You are likely looking for something under
+https://github.com/ansible-collections/ instead.**
diff --git a/test/units/plugins/lookup/test_password.py b/test/units/plugins/lookup/test_password.py
index 15207b2f..318bc10b 100644
--- a/test/units/plugins/lookup/test_password.py
+++ b/test/units/plugins/lookup/test_password.py
@@ -330,23 +330,34 @@ class TestRandomPassword(unittest.TestCase):
class TestParseContent(unittest.TestCase):
def test_empty_password_file(self):
- plaintext_password, salt = password._parse_content(u'')
+ plaintext_password, salt, ident = password._parse_content(u'')
self.assertEqual(plaintext_password, u'')
self.assertEqual(salt, None)
+ self.assertEqual(ident, None)
def test(self):
expected_content = u'12345678'
file_content = expected_content
- plaintext_password, salt = password._parse_content(file_content)
+ plaintext_password, salt, ident = password._parse_content(file_content)
self.assertEqual(plaintext_password, expected_content)
self.assertEqual(salt, None)
+ self.assertEqual(ident, None)
def test_with_salt(self):
expected_content = u'12345678 salt=87654321'
file_content = expected_content
- plaintext_password, salt = password._parse_content(file_content)
+ plaintext_password, salt, ident = password._parse_content(file_content)
self.assertEqual(plaintext_password, u'12345678')
self.assertEqual(salt, u'87654321')
+ self.assertEqual(ident, None)
+
+ def test_with_salt_and_ident(self):
+ expected_content = u'12345678 salt=87654321 ident=2a'
+ file_content = expected_content
+ plaintext_password, salt, ident = password._parse_content(file_content)
+ self.assertEqual(plaintext_password, u'12345678')
+ self.assertEqual(salt, u'87654321')
+ self.assertEqual(ident, u'2a')
class TestFormatContent(unittest.TestCase):