diff options
author | Lee Garrett <lgarrett@rocketjump.eu> | 2023-06-16 15:40:52 +0200 |
---|---|---|
committer | Lee Garrett <lgarrett@rocketjump.eu> | 2023-06-16 15:40:52 +0200 |
commit | 8d9a6d9cdf440b0a9b254a8a4bf063c0cb6a6201 (patch) | |
tree | e25b4160deb15b08aaf0aea65fc8c7bbc01dea12 /test/integration/targets | |
parent | 3cda7ad4dd15b514ff660905294b5b6330ecfb6f (diff) | |
download | debian-ansible-core-8d9a6d9cdf440b0a9b254a8a4bf063c0cb6a6201.zip |
New upstream version 2.14.6
Diffstat (limited to 'test/integration/targets')
37 files changed, 877 insertions, 24 deletions
diff --git a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py index f251a69f..79b7a704 100644 --- a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py +++ b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/plugins/modules/randommodule.py @@ -48,6 +48,18 @@ options: version: '2.0.0' extends_documentation_fragment: - testns.testcol2.module +seealso: + - module: ansible.builtin.ping + - module: ansible.builtin.uri + description: Use this to fetch an URI + - module: testns.testcol.test + - module: testns.testcol.fakemodule + description: A fake module + - link: https://docs.ansible.com + name: Ansible docsite + description: See also the Ansible docsite. + - ref: foo_bar + description: Some foo bar. ''' EXAMPLES = ''' diff --git a/test/integration/targets/ansible-doc/fix-urls.py b/test/integration/targets/ansible-doc/fix-urls.py new file mode 100644 index 00000000..1379a4e4 --- /dev/null +++ b/test/integration/targets/ansible-doc/fix-urls.py @@ -0,0 +1,15 @@ +"""Unwrap URLs to docs.ansible.com and remove version""" + +import re +import sys + + +def main(): + data = sys.stdin.read() + data = re.sub('(https://docs\\.ansible\\.com/[^ ]+)\n +([^ ]+)\n', '\\1\\2\n', data, flags=re.MULTILINE) + data = re.sub('https://docs\\.ansible\\.com/ansible(|-core)/(?:[^/]+)/', 'https://docs.ansible.com/ansible\\1/devel/', data) + sys.stdout.write(data) + + +if __name__ == '__main__': + main() diff --git a/test/integration/targets/ansible-doc/randommodule-text.output b/test/integration/targets/ansible-doc/randommodule-text.output index 51d7930a..602d66ec 100644 --- a/test/integration/targets/ansible-doc/randommodule-text.output +++ b/test/integration/targets/ansible-doc/randommodule-text.output @@ -65,6 +65,25 @@ OPTIONS (= is mandatory): type: str +SEE ALSO: + * Module ansible.builtin.ping + The official documentation on the + ansible.builtin.ping module. + https://docs.ansible.com/ansible-core/devel/collections/ansible/builtin/ping_module.html + * Module ansible.builtin.uri + Use this to fetch an URI + https://docs.ansible.com/ansible-core/devel/collections/ansible/builtin/uri_module.html + * Module testns.testcol.test + * Module testns.testcol.fakemodule + A fake module + * Ansible docsite + See also the Ansible docsite. + https://docs.ansible.com + * Ansible documentation [foo_bar] + Some foo bar. + https://docs.ansible.com/ansible-core/devel/#stq=foo_bar&stp=1 + + AUTHOR: Ansible Core Team EXAMPLES: diff --git a/test/integration/targets/ansible-doc/randommodule.output b/test/integration/targets/ansible-doc/randommodule.output index 25f46c36..cf036000 100644 --- a/test/integration/targets/ansible-doc/randommodule.output +++ b/test/integration/targets/ansible-doc/randommodule.output @@ -70,6 +70,31 @@ "type": "str" } }, + "seealso": [ + { + "module": "ansible.builtin.ping" + }, + { + "description": "Use this to fetch an URI", + "module": "ansible.builtin.uri" + }, + { + "module": "testns.testcol.test" + }, + { + "description": "A fake module", + "module": "testns.testcol.fakemodule" + }, + { + "description": "See also the Ansible docsite.", + "link": "https://docs.ansible.com", + "name": "Ansible docsite" + }, + { + "description": "Some foo bar.", + "ref": "foo_bar" + } + ], "short_description": "A random module", "version_added": "1.0.0", "version_added_collection": "testns.testcol" diff --git a/test/integration/targets/ansible-doc/runme.sh b/test/integration/targets/ansible-doc/runme.sh index 887d3c41..f51fa8a4 100755 --- a/test/integration/targets/ansible-doc/runme.sh +++ b/test/integration/targets/ansible-doc/runme.sh @@ -19,8 +19,8 @@ current_out="$(ansible-doc --playbook-dir ./ testns.testcol.fakemodule | sed '1 expected_out="$(sed '1 s/\(^> TESTNS\.TESTCOL\.FAKEMODULE\).*(.*)$/\1/' fakemodule.output)" test "$current_out" == "$expected_out" -# we use sed to strip the module path from the first line -current_out="$(ansible-doc --playbook-dir ./ testns.testcol.randommodule | sed '1 s/\(^> TESTNS\.TESTCOL\.RANDOMMODULE\).*(.*)$/\1/')" +# we use sed to strip the plugin path from the first line, and fix-urls.py to unbreak and replace URLs from stable-X branches +current_out="$(ansible-doc --playbook-dir ./ testns.testcol.randommodule | sed '1 s/\(^> TESTNS\.TESTCOL\.RANDOMMODULE\).*(.*)$/\1/' | python fix-urls.py)" expected_out="$(sed '1 s/\(^> TESTNS\.TESTCOL\.RANDOMMODULE\).*(.*)$/\1/' randommodule-text.output)" test "$current_out" == "$expected_out" diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/install.yml b/test/integration/targets/ansible-galaxy-collection/tasks/install.yml index 8916faf5..cca83c7b 100644 --- a/test/integration/targets/ansible-galaxy-collection/tasks/install.yml +++ b/test/integration/targets/ansible-galaxy-collection/tasks/install.yml @@ -5,7 +5,7 @@ state: directory - name: install simple collection from first accessible server - command: ansible-galaxy collection install namespace1.name1 {{ galaxy_verbosity }} + command: ansible-galaxy collection install namespace1.name1 -vvvv environment: ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}/ansible_collections' register: from_first_good_server @@ -30,6 +30,7 @@ - install_normal_files.files[1].path | basename in ['MANIFEST.json', 'FILES.json', 'README.md'] - install_normal_files.files[2].path | basename in ['MANIFEST.json', 'FILES.json', 'README.md'] - (install_normal_manifest.content | b64decode | from_json).collection_info.version == '1.0.9' + - 'from_first_good_server.stdout|regex_findall("has not signed namespace1\.name1")|length == 1' - name: Remove the collection file: diff --git a/test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/target-prefixes.something b/test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/target-prefixes.something new file mode 100644 index 00000000..b3065492 --- /dev/null +++ b/test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/target-prefixes.something @@ -0,0 +1,2 @@ +one-part +two_part diff --git a/test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/targets/one-part_test/aliases b/test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/targets/one-part_test/aliases new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/targets/one-part_test/aliases diff --git a/test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/targets/two_part_test/aliases b/test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/targets/two_part_test/aliases new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/test/integration/targets/ansible-test-integration-targets/ansible_collections/ns/col/tests/integration/targets/two_part_test/aliases diff --git a/test/integration/targets/ansible-test-integration-targets/test.py b/test/integration/targets/ansible-test-integration-targets/test.py index 443ed59d..8effb647 100755 --- a/test/integration/targets/ansible-test-integration-targets/test.py +++ b/test/integration/targets/ansible-test-integration-targets/test.py @@ -31,5 +31,17 @@ class OptionsTest(unittest.TestCase): raise Exception(f'{ex}:\n>>> Standard Output:\n{ex.stdout}\n>>> Standard Error:\n{ex.stderr}') from ex +class PrefixesTest(unittest.TestCase): + def test_prefixes(self): + try: + command = ['ansible-test', 'integration', '--list-targets'] + + something = subprocess.run([*command, 'something/'], text=True, capture_output=True, check=True) + + self.assertEqual(something.stdout.splitlines(), ['one-part_test', 'two_part_test']) + except subprocess.CalledProcessError as ex: + raise Exception(f'{ex}:\n>>> Standard Output:\n{ex.stdout}\n>>> Standard Error:\n{ex.stderr}') from ex + + if __name__ == '__main__': unittest.main() diff --git a/test/integration/targets/ansible-test-vendoring/aliases b/test/integration/targets/ansible-test-vendoring/aliases new file mode 100644 index 00000000..09cbf4b8 --- /dev/null +++ b/test/integration/targets/ansible-test-vendoring/aliases @@ -0,0 +1,5 @@ +shippable/posix/group3 # runs in the distro test containers +shippable/generic/group1 # runs in the default test container +context/controller +needs/target/collection +destructive # adds and then removes packages into lib/ansible/_vendor/ diff --git a/test/integration/targets/ansible-test-vendoring/ansible_collections/ns/col/tests/config.yml b/test/integration/targets/ansible-test-vendoring/ansible_collections/ns/col/tests/config.yml new file mode 100644 index 00000000..c73de69d --- /dev/null +++ b/test/integration/targets/ansible-test-vendoring/ansible_collections/ns/col/tests/config.yml @@ -0,0 +1,4 @@ +# This config file is included to cause ansible-test to import the `packaging` module. + +modules: + python_requires: default diff --git a/test/integration/targets/ansible-test-vendoring/runme.sh b/test/integration/targets/ansible-test-vendoring/runme.sh new file mode 100755 index 00000000..fa6f652a --- /dev/null +++ b/test/integration/targets/ansible-test-vendoring/runme.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env bash + +set -eux + +# Run import sanity tests which require modifications to the source directory. + +vendor_dir="$(python -c 'import pathlib, ansible._vendor; print(pathlib.Path(ansible._vendor.__file__).parent)')" + +mkdir "${vendor_dir}/packaging/" # intended to fail if packaging is already present (to avoid deleting it later) + +cleanup() { + rm -rf "${vendor_dir}/packaging/" +} + +trap cleanup EXIT + +# Verify that packages installed in the vendor directory are loaded by ansible-test. +# This is done by injecting a broken `packaging` package, which should cause ansible-test to fail. + +echo 'raise Exception("intentional failure from ansible-test-vendoring integration test")' > "${vendor_dir}/packaging/__init__.py" + +if ansible-test sanity --test import --color --truncate 0 "${@}" > output.log 2>&1; then + echo "ansible-test did not exit with a non-zero status" + cat output.log + exit 1 +fi + +if ! grep '^Exception: intentional failure from ansible-test-vendoring integration test$' output.log; then + echo "ansible-test did not fail with the expected output" + cat output.log + exit 1 +fi + diff --git a/test/integration/targets/async/tasks/main.yml b/test/integration/targets/async/tasks/main.yml index 05c789e6..f5e5c992 100644 --- a/test/integration/targets/async/tasks/main.yml +++ b/test/integration/targets/async/tasks/main.yml @@ -122,7 +122,7 @@ - name: assert task failed correctly assert: that: - - async_result.ansible_job_id is match('\d+\.\d+') + - async_result.ansible_job_id is match('j\d+\.\d+') - async_result.finished == 1 - async_result is finished - async_result is not changed @@ -140,7 +140,7 @@ - name: validate response assert: that: - - async_result.ansible_job_id is match('\d+\.\d+') + - async_result.ansible_job_id is match('j\d+\.\d+') - async_result.finished == 1 - async_result is finished - async_result.changed == false @@ -159,7 +159,7 @@ - name: validate response assert: that: - - async_result.ansible_job_id is match('\d+\.\d+') + - async_result.ansible_job_id is match('j\d+\.\d+') - async_result.finished == 1 - async_result is finished - async_result.changed == true @@ -176,7 +176,7 @@ - name: validate response assert: that: - - async_result.ansible_job_id is match('\d+\.\d+') + - async_result.ansible_job_id is match('j\d+\.\d+') - async_result.finished == 1 - async_result is finished - async_result.changed == true diff --git a/test/integration/targets/async_fail/tasks/main.yml b/test/integration/targets/async_fail/tasks/main.yml index 40f72e10..24cea1d5 100644 --- a/test/integration/targets/async_fail/tasks/main.yml +++ b/test/integration/targets/async_fail/tasks/main.yml @@ -28,7 +28,7 @@ - name: validate that by the end of the retry interval, we succeeded assert: that: - - async_result.ansible_job_id is match('\d+\.\d+') + - async_result.ansible_job_id is match('j\d+\.\d+') - async_result.finished == 1 - async_result is finished - async_result is changed diff --git a/test/integration/targets/callback_default/runme.sh b/test/integration/targets/callback_default/runme.sh index 0ee4259d..a815132a 100755 --- a/test/integration/targets/callback_default/runme.sh +++ b/test/integration/targets/callback_default/runme.sh @@ -135,8 +135,8 @@ run_test default test.yml # Check for async output # NOTE: regex to match 1 or more digits works for both BSD and GNU grep ansible-playbook -i inventory test_async.yml 2>&1 | tee async_test.out -grep "ASYNC OK .* jid=[0-9]\{1,\}" async_test.out -grep "ASYNC FAILED .* jid=[0-9]\{1,\}" async_test.out +grep "ASYNC OK .* jid=j[0-9]\{1,\}" async_test.out +grep "ASYNC FAILED .* jid=j[0-9]\{1,\}" async_test.out rm -f async_test.out # Hide skipped diff --git a/test/integration/targets/canonical-pep517-self-packaging/aliases b/test/integration/targets/canonical-pep517-self-packaging/aliases new file mode 100644 index 00000000..4667aa4f --- /dev/null +++ b/test/integration/targets/canonical-pep517-self-packaging/aliases @@ -0,0 +1,3 @@ +shippable/posix/group3 +context/controller +packaging diff --git a/test/integration/targets/canonical-pep517-self-packaging/minimum-build-constraints.txt b/test/integration/targets/canonical-pep517-self-packaging/minimum-build-constraints.txt new file mode 100644 index 00000000..ea5d8084 --- /dev/null +++ b/test/integration/targets/canonical-pep517-self-packaging/minimum-build-constraints.txt @@ -0,0 +1,16 @@ +# Lowest supporting Python 3.9 and 3.10: +setuptools == 57.0.0; python_version == "3.9" or python_version == "3.10" + +# Lowest supporting Python 3.11: +setuptools == 60.0.0; python_version >= "3.11" + + +# An arbitrary old version that was released before Python 3.9.0: +wheel == 0.33.6 + +# Conditional dependencies: +docutils == 0.16 +Jinja2 == 3.0.0 +MarkupSafe == 2.0.0 +PyYAML == 5.3 +straight.plugin == 1.4.2 diff --git a/test/integration/targets/canonical-pep517-self-packaging/modernish-build-constraints.txt b/test/integration/targets/canonical-pep517-self-packaging/modernish-build-constraints.txt new file mode 100644 index 00000000..7f744afd --- /dev/null +++ b/test/integration/targets/canonical-pep517-self-packaging/modernish-build-constraints.txt @@ -0,0 +1,11 @@ +setuptools == 67.4.0 + +# Wheel-only build dependency +wheel == 0.38.4 + +# Conditional dependencies: +docutils == 0.19 +Jinja2 == 3.1.2 +MarkupSafe == 2.1.2 +PyYAML == 6.0 +straight.plugin == 1.5.0 # WARNING: v1.5.0 doesn't have a Git tag / src diff --git a/test/integration/targets/canonical-pep517-self-packaging/runme.sh b/test/integration/targets/canonical-pep517-self-packaging/runme.sh new file mode 100755 index 00000000..028348f8 --- /dev/null +++ b/test/integration/targets/canonical-pep517-self-packaging/runme.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash + +if [[ "${ANSIBLE_DEBUG}" == true ]] # `ansible-test` invoked with `--debug` +then + PYTEST_VERY_VERBOSE_FLAG=-vvvvv + SET_DEBUG_MODE=-x +else + ANSIBLE_DEBUG=false + PYTEST_VERY_VERBOSE_FLAG= + SET_DEBUG_MODE=+x +fi + + +set -eEuo pipefail + +source virtualenv.sh + +set "${SET_DEBUG_MODE}" + +export PIP_DISABLE_PIP_VERSION_CHECK=true +export PIP_NO_PYTHON_VERSION_WARNING=true +export PIP_NO_WARN_SCRIPT_LOCATION=true + +python -Im pip install 'pytest ~= 7.2.0' +python -Im pytest ${PYTEST_VERY_VERBOSE_FLAG} \ + --basetemp="${OUTPUT_DIR}/pytest-tmp" \ + --color=yes \ + --showlocals \ + -p no:forked \ + -p no:mock \ + -ra diff --git a/test/integration/targets/canonical-pep517-self-packaging/runme_test.py b/test/integration/targets/canonical-pep517-self-packaging/runme_test.py new file mode 100644 index 00000000..86b0f753 --- /dev/null +++ b/test/integration/targets/canonical-pep517-self-packaging/runme_test.py @@ -0,0 +1,385 @@ +"""Smoke tests for the in-tree PEP 517 backend.""" + +from __future__ import annotations + +from filecmp import dircmp +from os import chdir, environ, PathLike +from pathlib import Path +from shutil import rmtree +from subprocess import check_call, check_output, PIPE +from sys import executable as current_interpreter, version_info +from tarfile import TarFile +import typing as t + +try: + from contextlib import chdir as _chdir_cm +except ImportError: + from contextlib import contextmanager as _contextmanager + + @_contextmanager + def _chdir_cm(path: PathLike) -> t.Iterator[None]: + original_wd = Path.cwd() + chdir(path) + try: + yield + finally: + chdir(original_wd) + +import pytest + + +DIST_NAME = 'ansible_core' +DIST_FILENAME_BASE = 'ansible-core' +OUTPUT_DIR = Path(environ['OUTPUT_DIR']).resolve().absolute() +SRC_ROOT_DIR = OUTPUT_DIR.parents[3] +GENERATED_MANPAGES_SUBDIR = SRC_ROOT_DIR / 'docs' / 'man' / 'man1' +LOWEST_SUPPORTED_BUILD_DEPS_FILE = ( + Path(__file__).parent / 'minimum-build-constraints.txt' +).resolve().absolute() +MODERNISH_BUILD_DEPS_FILE = ( + Path(__file__).parent / 'modernish-build-constraints.txt' +).resolve().absolute() +RELEASE_MODULE = SRC_ROOT_DIR / 'lib' / 'ansible' / 'release.py' +VERSION_LINE_PREFIX = "__version__ = '" +PKG_DIST_VERSION = next( + line[len(VERSION_LINE_PREFIX):-1] + for line in RELEASE_MODULE.read_text().splitlines() + if line.startswith(VERSION_LINE_PREFIX) +) +EXPECTED_SDIST_NAME_BASE = f'{DIST_FILENAME_BASE}-{PKG_DIST_VERSION}' +EXPECTED_SDIST_NAME = f'{EXPECTED_SDIST_NAME_BASE}.tar.gz' +EXPECTED_WHEEL_NAME = f'{DIST_NAME}-{PKG_DIST_VERSION}-py3-none-any.whl' + +IS_PYTHON310_PLUS = version_info[:2] >= (3, 10) + + +def wipe_generated_manpages() -> None: + """Ensure man1 pages aren't present in the source checkout.""" + # Cleaning up the gitignored manpages... + if not GENERATED_MANPAGES_SUBDIR.exists(): + return + + rmtree(GENERATED_MANPAGES_SUBDIR) + # Removed the generated manpages... + + +def contains_man1_pages(sdist_tarball: Path) -> Path: + """Check if the man1 pages are present in given tarball.""" + with sdist_tarball.open(mode='rb') as tarball_fd: + with TarFile.gzopen(fileobj=tarball_fd, name=None) as tarball: + try: + tarball.getmember( + name=f'{EXPECTED_SDIST_NAME_BASE}/docs/man/man1', + ) + except KeyError: + return False + + return True + + +def unpack_sdist(sdist_tarball: Path, target_directory: Path) -> Path: + """Unarchive given tarball. + + :returns: Path of the package source checkout. + """ + with sdist_tarball.open(mode='rb') as tarball_fd: + with TarFile.gzopen(fileobj=tarball_fd, name=None) as tarball: + tarball.extractall(path=target_directory) + return target_directory / EXPECTED_SDIST_NAME_BASE + + +def assert_dirs_equal(*dir_paths: t.List[Path]) -> None: + dir_comparison = dircmp(*dir_paths) + assert not dir_comparison.left_only + assert not dir_comparison.right_only + assert not dir_comparison.diff_files + assert not dir_comparison.funny_files + + +def normalize_unpacked_rebuilt_sdist(sdist_path: Path) -> None: + top_pkg_info_path = sdist_path / 'PKG-INFO' + nested_pkg_info_path = ( + sdist_path / 'lib' / f'{DIST_NAME}.egg-info' / 'PKG-INFO' + ) + entry_points_path = nested_pkg_info_path.parent / 'entry_points.txt' + + # setuptools v39 write out two trailing empty lines and an unknown platform + # while the recent don't + top_pkg_info_path.write_text( + top_pkg_info_path.read_text().replace( + 'Classifier: Development Status :: 5', + 'Platform: UNKNOWN\nClassifier: Development Status :: 5', + ) + '\n\n' + ) + nested_pkg_info_path.write_text( + nested_pkg_info_path.read_text().replace( + 'Classifier: Development Status :: 5', + 'Platform: UNKNOWN\nClassifier: Development Status :: 5', + ) + '\n\n' + ) + + # setuptools v39 write out one trailing empty line while the recent don't + entry_points_path.write_text(entry_points_path.read_text() + '\n') + + +@pytest.fixture +def venv_python_exe(tmp_path: Path) -> t.Iterator[Path]: + venv_path = tmp_path / 'pytest-managed-venv' + mkvenv_cmd = ( + current_interpreter, '-m', 'venv', str(venv_path), + ) + check_call(mkvenv_cmd, env={}, stderr=PIPE, stdout=PIPE) + yield venv_path / 'bin' / 'python' + rmtree(venv_path) + + +def run_with_venv_python( + python_exe: Path, *cli_args: t.Iterable[str], + env_vars: t.Dict[str, str] = None, +) -> str: + if env_vars is None: + env_vars = {} + full_cmd = str(python_exe), *cli_args + return check_output(full_cmd, env=env_vars, stderr=PIPE) + + +def build_dists( + python_exe: Path, *cli_args: t.Iterable[str], + env_vars: t.Dict[str, str], +) -> str: + return run_with_venv_python( + python_exe, '-m', 'build', + *cli_args, env_vars=env_vars, + ) + + +def pip_install( + python_exe: Path, *cli_args: t.Iterable[str], + env_vars: t.Dict[str, str] = None, +) -> str: + return run_with_venv_python( + python_exe, '-m', 'pip', 'install', + *cli_args, env_vars=env_vars, + ) + + +def test_installing_sdist_build_with_modern_deps_to_old_env( + venv_python_exe: Path, tmp_path: Path, +) -> None: + pip_install(venv_python_exe, 'build ~= 0.10.0') + tmp_dir_sdist_w_modern_tools = tmp_path / 'sdist-w-modern-tools' + build_dists( + venv_python_exe, '--sdist', + '--config-setting=--build-manpages', + f'--outdir={tmp_dir_sdist_w_modern_tools!s}', + str(SRC_ROOT_DIR), + env_vars={ + 'PIP_CONSTRAINT': str(MODERNISH_BUILD_DEPS_FILE), + }, + ) + tmp_path_sdist_w_modern_tools = ( + tmp_dir_sdist_w_modern_tools / EXPECTED_SDIST_NAME + ) + + # Downgrading pip, because v20+ supports in-tree build backends + pip_install(venv_python_exe, 'pip ~= 19.3.1') + + # Smoke test — installing an sdist with pip that does not support + # in-tree build backends. + pip_install( + venv_python_exe, str(tmp_path_sdist_w_modern_tools), '--no-deps', + ) + + # Downgrading pip, because versions that support PEP 517 don't allow + # disabling it with `--no-use-pep517` when `build-backend` is set in + # the `[build-system]` section of `pyproject.toml`, considering this + # an explicit opt-in. + if not IS_PYTHON310_PLUS: + pip_install(venv_python_exe, 'pip == 18.0') + + # Smoke test — installing an sdist with pip that does not support invoking + # PEP 517 interface at all. + # In this scenario, pip will run `setup.py install` since `wheel` is not in + # the environment. + if IS_PYTHON310_PLUS: + tmp_dir_unpacked_sdist_root = tmp_path / 'unpacked-sdist' + tmp_dir_unpacked_sdist_path = tmp_dir_unpacked_sdist_root / EXPECTED_SDIST_NAME_BASE + with TarFile.gzopen(tmp_path_sdist_w_modern_tools) as sdist_fd: + sdist_fd.extractall(path=tmp_dir_unpacked_sdist_root) + + pip_install( + venv_python_exe, 'setuptools', + env_vars={ + 'PIP_CONSTRAINT': str(LOWEST_SUPPORTED_BUILD_DEPS_FILE), + }, + ) + with _chdir_cm(tmp_dir_unpacked_sdist_path): + run_with_venv_python( + venv_python_exe, 'setup.py', 'sdist', + env_vars={'PATH': environ['PATH']}, + ) + else: + pip_install( + venv_python_exe, str(tmp_path_sdist_w_modern_tools), '--no-deps', + env_vars={ + 'PIP_CONSTRAINT': str(LOWEST_SUPPORTED_BUILD_DEPS_FILE), + }, + ) + + # Smoke test — installing an sdist with pip that does not support invoking + # PEP 517 interface at all. + # With `wheel` present, pip will run `setup.py bdist_wheel` and then, + # unpack the result. + pip_install(venv_python_exe, 'wheel') + if IS_PYTHON310_PLUS: + with _chdir_cm(tmp_dir_unpacked_sdist_path): + run_with_venv_python( + venv_python_exe, 'setup.py', 'bdist_wheel', + env_vars={'PATH': environ['PATH']}, + ) + else: + pip_install( + venv_python_exe, str(tmp_path_sdist_w_modern_tools), '--no-deps', + ) + + +def test_dist_rebuilds_with_manpages_premutations( + venv_python_exe: Path, tmp_path: Path, +) -> None: + """Test a series of sdist rebuilds under different conditions. + + This check builds sdists right from the Git checkout with and without + the manpages. It also does this using different versions of the setuptools + PEP 517 build backend being pinned. Finally, it builds a wheel out of one + of the rebuilt sdists. + As intermediate assertions, this test makes simple smoke tests along + the way. + """ + pip_install(venv_python_exe, 'build ~= 0.10.0') + + # Test building an sdist without manpages from the Git checkout + tmp_dir_sdist_without_manpages = tmp_path / 'sdist-without-manpages' + wipe_generated_manpages() + build_dists( + venv_python_exe, '--sdist', + f'--outdir={tmp_dir_sdist_without_manpages!s}', + str(SRC_ROOT_DIR), + env_vars={ + 'PIP_CONSTRAINT': str(MODERNISH_BUILD_DEPS_FILE), + }, + ) + tmp_path_sdist_without_manpages = ( + tmp_dir_sdist_without_manpages / EXPECTED_SDIST_NAME + ) + assert tmp_path_sdist_without_manpages.exists() + assert not contains_man1_pages(tmp_path_sdist_without_manpages) + sdist_without_manpages_path = unpack_sdist( + tmp_path_sdist_without_manpages, + tmp_dir_sdist_without_manpages / 'src', + ) + + # Test building an sdist with manpages from the Git checkout + # and lowest supported build deps + wipe_generated_manpages() + tmp_dir_sdist_with_manpages = tmp_path / 'sdist-with-manpages' + build_dists( + venv_python_exe, '--sdist', + '--config-setting=--build-manpages', + f'--outdir={tmp_dir_sdist_with_manpages!s}', + str(SRC_ROOT_DIR), + env_vars={ + 'PIP_CONSTRAINT': str(LOWEST_SUPPORTED_BUILD_DEPS_FILE), + }, + ) + tmp_path_sdist_with_manpages = ( + tmp_dir_sdist_with_manpages / EXPECTED_SDIST_NAME + ) + assert tmp_path_sdist_with_manpages.exists() + assert contains_man1_pages(tmp_path_sdist_with_manpages) + sdist_with_manpages_path = unpack_sdist( + tmp_path_sdist_with_manpages, + tmp_dir_sdist_with_manpages / 'src', + ) + + # Test re-building an sdist with manpages from the + # sdist contents that does not include the manpages + tmp_dir_rebuilt_sdist = tmp_path / 'rebuilt-sdist' + build_dists( + venv_python_exe, '--sdist', + '--config-setting=--build-manpages', + f'--outdir={tmp_dir_rebuilt_sdist!s}', + str(sdist_without_manpages_path), + env_vars={ + 'PIP_CONSTRAINT': str(MODERNISH_BUILD_DEPS_FILE), + }, + ) + tmp_path_rebuilt_sdist = tmp_dir_rebuilt_sdist / EXPECTED_SDIST_NAME + # Checking that the expected sdist got created + # from the previous unpacked sdist... + assert tmp_path_rebuilt_sdist.exists() + # NOTE: The following assertion is disabled due to the fact that, when + # NOTE: building an sdist from the original source checkout, the build + # NOTE: backend replaces itself with pure setuptools in the resulting + # NOTE: sdist, and the following rebuilds from that sdist are no longer + # NOTE: able to process the custom config settings that are implemented in + # NOTE: the in-tree build backend. It is expected that said + # NOTE: `pyproject.toml` mutation change will be reverted once all of the + # NOTE: supported `ansible-core` versions ship wheels, meaning that the + # NOTE: end-users won't be building the distribution from sdist on install. + # NOTE: Another case, when it can be reverted is declaring pip below v20 + # NOTE: unsupported — it is the first version to support in-tree build + # NOTE: backends natively. + # assert contains_man1_pages(tmp_path_rebuilt_sdist) # FIXME: See #80255 + rebuilt_sdist_path = unpack_sdist( + tmp_path_rebuilt_sdist, + tmp_dir_rebuilt_sdist / 'src', + ) + assert rebuilt_sdist_path.exists() + assert rebuilt_sdist_path.is_dir() + normalize_unpacked_rebuilt_sdist(rebuilt_sdist_path) + assert_dirs_equal(rebuilt_sdist_path, sdist_with_manpages_path) + + # Test building a wheel from the rebuilt sdist with manpages contents + # and lowest supported build deps + tmp_dir_rebuilt_wheel = tmp_path / 'rebuilt-wheel' + build_dists( + venv_python_exe, '--wheel', + f'--outdir={tmp_dir_rebuilt_wheel!s}', + str(sdist_with_manpages_path), + env_vars={ + 'PIP_CONSTRAINT': str(LOWEST_SUPPORTED_BUILD_DEPS_FILE), + }, + ) + tmp_path_rebuilt_wheel = tmp_dir_rebuilt_wheel / EXPECTED_WHEEL_NAME + # Checking that the expected wheel got created... + assert tmp_path_rebuilt_wheel.exists() + + +def test_pep660_editable_install_smoke(venv_python_exe: Path) -> None: + """Smoke-test PEP 660 editable install. + + This verifies that the in-tree build backend wrapper + does not break any required interfaces. + """ + pip_install(venv_python_exe, '-e', str(SRC_ROOT_DIR)) + + pip_show_cmd = ( + str(venv_python_exe), '-m', + 'pip', 'show', DIST_FILENAME_BASE, + ) + installed_ansible_meta = check_output( + pip_show_cmd, + env={}, stderr=PIPE, text=True, + ).splitlines() + assert f'Name: {DIST_FILENAME_BASE}' in installed_ansible_meta + assert f'Version: {PKG_DIST_VERSION}' in installed_ansible_meta + + pip_runtime_version_cmd = ( + str(venv_python_exe), '-c', + 'from ansible import __version__; print(__version__)', + ) + runtime_ansible_version = check_output( + pip_runtime_version_cmd, + env={}, stderr=PIPE, text=True, + ).strip() + assert runtime_ansible_version == PKG_DIST_VERSION diff --git a/test/integration/targets/copy/tasks/check_mode.yml b/test/integration/targets/copy/tasks/check_mode.yml index 5b405cc4..9702e070 100644 --- a/test/integration/targets/copy/tasks/check_mode.yml +++ b/test/integration/targets/copy/tasks/check_mode.yml @@ -113,8 +113,7 @@ - check_mode_subdir_first is changed - check_mode_trailing_slash_first is changed - # TODO: This is a legitimate bug - #- not check_mode_trailing_slash_first_stat.stat.exists + - not check_mode_trailing_slash_first_stat.stat.exists - check_mode_trailing_slash_real is changed - check_mode_trailing_slash_real_stat.stat.exists - check_mode_trailing_slash_second is not changed @@ -124,3 +123,41 @@ - check_mode_foo_real is changed - check_mode_foo_real_stat.stat.exists - check_mode_foo_second is not changed + + - name: check_mode - Do a basic copy to setup next test (without check mode) + copy: + src: foo.txt + dest: "{{ remote_dir }}/foo-check_mode.txt" + mode: 0444 + + - name: check_mode - Copy the same src with a different mode (check mode) + copy: + src: foo.txt + dest: "{{ remote_dir }}/foo-check_mode.txt" + mode: 0666 + check_mode: True + register: check_mode_file_attribute + + - name: stat the file to make sure the mode was not updated in check mode + stat: + path: "{{ remote_dir }}/foo-check_mode.txt" + register: check_mode_file_attribute_stat + + - name: check_mode - Copy the same src with a different mode (without check mode) + copy: + src: foo.txt + dest: "{{ remote_dir }}/foo-check_mode.txt" + mode: 0666 + register: real_file_attribute + + - name: stat the file to make sure the mode was updated without check mode + stat: + path: "{{ remote_dir }}/foo-check_mode.txt" + register: real_file_attribute_stat + + - assert: + that: + - check_mode_file_attribute is changed + - real_file_attribute is changed + - "check_mode_file_attribute_stat.stat.mode == '0444'" + - "real_file_attribute_stat.stat.mode == '0666'" diff --git a/test/integration/targets/entry_points/aliases b/test/integration/targets/entry_points/aliases index 9d967564..7f0ffcf0 100644 --- a/test/integration/targets/entry_points/aliases +++ b/test/integration/targets/entry_points/aliases @@ -1,2 +1,3 @@ context/controller shippable/posix/group4 +packaging diff --git a/test/integration/targets/include_import/roles/role_with_argspec/meta/argument_specs.yml b/test/integration/targets/include_import/roles/role_with_argspec/meta/argument_specs.yml new file mode 100644 index 00000000..e6d200c1 --- /dev/null +++ b/test/integration/targets/include_import/roles/role_with_argspec/meta/argument_specs.yml @@ -0,0 +1,7 @@ +argument_specs: + main: + short_description: The main entry point for dup_allowed_role + options: + optional_int: + type: int + description: An integer value diff --git a/test/integration/targets/include_import/roles/role_with_argspec/tasks/main.yml b/test/integration/targets/include_import/roles/role_with_argspec/tasks/main.yml new file mode 100644 index 00000000..23f52ef5 --- /dev/null +++ b/test/integration/targets/include_import/roles/role_with_argspec/tasks/main.yml @@ -0,0 +1 @@ +- debug: msg='Running role_with_argspec' diff --git a/test/integration/targets/include_import/runme.sh b/test/integration/targets/include_import/runme.sh index d384a12e..078f080b 100755 --- a/test/integration/targets/include_import/runme.sh +++ b/test/integration/targets/include_import/runme.sh @@ -121,6 +121,11 @@ ansible-playbook valid_include_keywords/playbook.yml "$@" ansible-playbook tasks/test_allow_single_role_dup.yml 2>&1 | tee test_allow_single_role_dup.out test "$(grep -c 'ok=3' test_allow_single_role_dup.out)" = 1 +# test templating public, allow_duplicates, and rolespec_validate +ansible-playbook tasks/test_templating_IncludeRole_FA.yml 2>&1 | tee IncludeRole_FA_template.out +test "$(grep -c 'ok=4' IncludeRole_FA_template.out)" = 1 +test "$(grep -c 'failed=0' IncludeRole_FA_template.out)" = 1 + # https://github.com/ansible/ansible/issues/66764 ANSIBLE_HOST_PATTERN_MISMATCH=error ansible-playbook empty_group_warning/playbook.yml diff --git a/test/integration/targets/include_import/tasks/test_templating_IncludeRole_FA.yml b/test/integration/targets/include_import/tasks/test_templating_IncludeRole_FA.yml new file mode 100644 index 00000000..cb67a9bb --- /dev/null +++ b/test/integration/targets/include_import/tasks/test_templating_IncludeRole_FA.yml @@ -0,0 +1,28 @@ +--- +- name: test templating allow_duplicates, public, and rolespec_validate + hosts: localhost + gather_facts: false + tasks: + - name: prevent duplicate roles with a templated value + block: + - import_role: + name: dup_allowed_role + allow_duplicates: "{{ False | bool }}" + - import_role: + name: dup_allowed_role + allow_duplicates: "{{ False | bool }}" + + - name: prevent leaky vars with a templated value + include_role: + name: role1 + public: "{{ False | bool }}" + - assert: + that: + - where_am_i_defined is undefined + + - name: skip role argspec validation with a templated value + include_role: + name: role_with_argspec + rolespec_validate: "{{ False | bool }}" + vars: + optional_int: wrong_type diff --git a/test/integration/targets/keyword_inheritance/dep_keyword_inheritance.yml b/test/integration/targets/keyword_inheritance/dep_keyword_inheritance.yml new file mode 100644 index 00000000..3d3b684a --- /dev/null +++ b/test/integration/targets/keyword_inheritance/dep_keyword_inheritance.yml @@ -0,0 +1,8 @@ +- hosts: localhost + gather_facts: false + tasks: + - include_role: + name: "{{ item }}" + loop: + - setup_test_user + - role-meta-inheritance diff --git a/test/integration/targets/keyword_inheritance/roles/role-meta-inheritance/meta/main.yml b/test/integration/targets/keyword_inheritance/roles/role-meta-inheritance/meta/main.yml new file mode 100644 index 00000000..b0af49fb --- /dev/null +++ b/test/integration/targets/keyword_inheritance/roles/role-meta-inheritance/meta/main.yml @@ -0,0 +1,4 @@ +dependencies: + - role: whoami + become: true + become_user: ansibletest0 diff --git a/test/integration/targets/keyword_inheritance/runme.sh b/test/integration/targets/keyword_inheritance/runme.sh index 6b78a06d..1f13ef85 100755 --- a/test/integration/targets/keyword_inheritance/runme.sh +++ b/test/integration/targets/keyword_inheritance/runme.sh @@ -3,3 +3,5 @@ set -eux ANSIBLE_ROLES_PATH=../ ansible-playbook -i ../../inventory test.yml "$@" + +ANSIBLE_ROLES_PATH=../ ansible-playbook -i ../../inventory dep_keyword_inheritance.yml "$@" diff --git a/test/integration/targets/lookup_url/tasks/main.yml b/test/integration/targets/lookup_url/tasks/main.yml index a7de5063..2fb227ad 100644 --- a/test/integration/targets/lookup_url/tasks/main.yml +++ b/test/integration/targets/lookup_url/tasks/main.yml @@ -1,11 +1,11 @@ - name: Test that retrieving a url works set_fact: - web_data: "{{ lookup('url', 'https://gist.githubusercontent.com/abadger/9858c22712f62a8effff/raw/43dd47ea691c90a5fa7827892c70241913351963/test') }}" + web_data: "{{ lookup('url', 'https://{{ httpbin_host }}/get?one') }}" - name: Assert that the url was retrieved assert: that: - - "'one' in web_data" + - "'one' in web_data.args" - name: Test that retrieving a url with invalid cert fails set_fact: diff --git a/test/integration/targets/module_utils_Ansible.Basic/library/ansible_basic_tests.ps1 b/test/integration/targets/module_utils_Ansible.Basic/library/ansible_basic_tests.ps1 index cfa73c60..6170f046 100644 --- a/test/integration/targets/module_utils_Ansible.Basic/library/ansible_basic_tests.ps1 +++ b/test/integration/targets/module_utils_Ansible.Basic/library/ansible_basic_tests.ps1 @@ -1747,6 +1747,168 @@ test_no_log - Invoked with: Remove-Item -LiteralPath $actual_tmpdir -Force -Recurse } + "Module tmpdir with symlinks" = { + $remote_tmp = Join-Path -Path $tmpdir -ChildPath "moduletmpdir-$(Get-Random)" + New-Item -Path $remote_tmp -ItemType Directory > $null + Set-Variable -Name complex_args -Scope Global -Value @{ + _ansible_remote_tmp = $remote_tmp.ToString() + } + $m = [Ansible.Basic.AnsibleModule]::Create(@(), @{}) + + $actual_tmpdir = $m.Tmpdir + + $dir1 = Join-Path $actual_tmpdir Dir1 + $dir2 = Join-Path $actual_tmpdir Dir2 + $dir1, $dir2 | New-Item -Path { $_ } -ItemType Directory > $null + + $file1 = Join-Path $dir1 test.txt + $file2 = Join-Path $dir2 test.txt + $file3 = Join-Path $actual_tmpdir test.txt + Set-Content -LiteralPath $file1 '' + Set-Content -LiteralPath $file2 '' + Set-Content -LiteralPath $file3 '' + + $outside_target = Join-Path -Path $tmpdir -ChildPath "moduleoutsidedir-$(Get-Random)" + $outside_file = Join-Path -Path $outside_target -ChildPath "file" + New-Item -Path $outside_target -ItemType Directory > $null + Set-Content -LiteralPath $outside_file '' + + cmd.exe /c mklink /d "$dir1\missing-dir-link" "$actual_tmpdir\fake" + cmd.exe /c mklink /d "$dir1\good-dir-link" "$dir2" + cmd.exe /c mklink /d "$dir1\recursive-target-link" "$dir1" + cmd.exe /c mklink "$dir1\missing-file-link" "$actual_tmpdir\fake" + cmd.exe /c mklink "$dir1\good-file-link" "$dir2\test.txt" + cmd.exe /c mklink /d "$actual_tmpdir\outside-dir" $outside_target + cmd.exe /c mklink "$actual_tmpdir\outside-file" $outside_file + + try { + $m.ExitJson() + } + catch [System.Management.Automation.RuntimeException] { + $output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output) + } + + $output.warnings.Count | Assert-Equal -Expected 0 + (Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $false + (Test-Path -LiteralPath $outside_target -PathType Container) | Assert-Equal -Expected $true + (Test-Path -LiteralPath $outside_file -PathType Leaf) | Assert-Equal -Expected $true + + Remove-Item -LiteralPath $remote_tmp -Force -Recurse + } + + "Module tmpdir with undeletable file" = { + $remote_tmp = Join-Path -Path $tmpdir -ChildPath "moduletmpdir-$(Get-Random)" + New-Item -Path $remote_tmp -ItemType Directory > $null + Set-Variable -Name complex_args -Scope Global -Value @{ + _ansible_remote_tmp = $remote_tmp.ToString() + } + $m = [Ansible.Basic.AnsibleModule]::Create(@(), @{}) + + $actual_tmpdir = $m.Tmpdir + + $dir1 = Join-Path $actual_tmpdir Dir1 + $dir2 = Join-Path $actual_tmpdir Dir2 + $dir1, $dir2 | New-Item -Path { $_ } -ItemType Directory > $null + + $file1 = Join-Path $dir1 test.txt + $file2 = Join-Path $dir2 test.txt + $file3 = Join-Path $actual_tmpdir test.txt + Set-Content -LiteralPath $file1 '' + Set-Content -LiteralPath $file2 '' + Set-Content -LiteralPath $file3 '' + + $fs = [System.IO.File]::Open($file1, "Open", "Read", "None") + try { + $m.ExitJson() + } + catch [System.Management.Automation.RuntimeException] { + $output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output) + } + + $expected_msg = "Failure cleaning temp path '$actual_tmpdir': IOException Directory contains files still open by other processes" + $output.warnings.Count | Assert-Equal -Expected 1 + $output.warnings[0] | Assert-Equal -Expected $expected_msg + + (Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $true + (Test-Path -LiteralPath $dir1 -PathType Container) | Assert-Equal -Expected $true + # Test-Path tries to open the file in a way that fails if it's marked as deleted + (Get-ChildItem -LiteralPath $dir1 -File).Count | Assert-Equal -Expected 1 + (Test-Path -LiteralPath $dir2 -PathType Container) | Assert-Equal -Expected $false + (Test-Path -LiteralPath $file3 -PathType Leaf) | Assert-Equal -Expected $false + + # Releasing the file handle releases the lock on the file but as the + # cleanup couldn't access the file to mark as delete on close it is + # still going to be present. + $fs.Dispose() + (Test-Path -LiteralPath $dir1 -PathType Container) | Assert-Equal -Expected $true + (Test-Path -LiteralPath $file1 -PathType Leaf) | Assert-Equal -Expected $true + + Remove-Item -LiteralPath $remote_tmp -Force -Recurse + } + + "Module tmpdir delete with locked handle" = { + $remote_tmp = Join-Path -Path $tmpdir -ChildPath "moduletmpdir-$(Get-Random)" + New-Item -Path $remote_tmp -ItemType Directory > $null + Set-Variable -Name complex_args -Scope Global -Value @{ + _ansible_remote_tmp = $remote_tmp.ToString() + } + $m = [Ansible.Basic.AnsibleModule]::Create(@(), @{}) + + $actual_tmpdir = $m.Tmpdir + + $dir1 = Join-Path $actual_tmpdir Dir1 + $dir2 = Join-Path $actual_tmpdir Dir2 + $dir1, $dir2 | New-Item -Path { $_ } -ItemType Directory > $null + + $file1 = Join-Path $dir1 test.txt + $file2 = Join-Path $dir2 test.txt + $file3 = Join-Path $actual_tmpdir test.txt + Set-Content -LiteralPath $file1 '' + Set-Content -LiteralPath $file2 '' + Set-Content -LiteralPath $file3 '' + + [System.IO.File]::SetAttributes($file1, "ReadOnly") + [System.IO.File]::SetAttributes($file2, "ReadOnly") + [System.IO.File]::SetAttributes($file3, "ReadOnly") + $fs = [System.IO.File]::Open($file1, "Open", "Read", "Delete") + try { + $m.ExitJson() + } + catch [System.Management.Automation.RuntimeException] { + $output = [Ansible.Basic.AnsibleModule]::FromJson($_.Exception.InnerException.Output) + } + + if ([System.Environment]::OSVersion.Version -lt [Version]'10.0') { + # Older hosts can only do delete on close. This means Dir1 and its + # file will still be present but Dir2 should be deleted. + $expected_msg = "Failure cleaning temp path '$actual_tmpdir': IOException Directory contains files still open by other processes" + $output.warnings.Count | Assert-Equal -Expected 1 + $output.warnings[0] | Assert-Equal -Expected $expected_msg + + (Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $true + (Test-Path -LiteralPath $dir1 -PathType Container) | Assert-Equal -Expected $true + # Test-Path tries to open the file in a way that fails if it's marked as deleted + (Get-ChildItem -LiteralPath $dir1 -File).Count | Assert-Equal -Expected 1 + (Test-Path -LiteralPath $dir2 -PathType Container) | Assert-Equal -Expected $false + (Test-Path -LiteralPath $file3 -PathType Leaf) | Assert-Equal -Expected $false + + # Releasing the file handle releases the lock on the file deleting + # it. Unfortunately the parent dir will still be present + $fs.Dispose() + (Test-Path -LiteralPath $dir1 -PathType Container) | Assert-Equal -Expected $true + (Test-Path -LiteralPath $file1 -PathType Leaf) | Assert-Equal -Expected $false + } + else { + # Server 2016+ can use the POSIX APIs which will delete it straight away + (Test-Path -LiteralPath $actual_tmpdir -PathType Container) | Assert-Equal -Expected $false + $output.warnings.Count | Assert-Equal -Expected 0 + + $fs.Dispose() + } + + Remove-Item -LiteralPath $remote_tmp -Force -Recurse + } + "Invalid argument spec key" = { $spec = @{ invalid = $true diff --git a/test/integration/targets/template_jinja2_non_native/macro_override.yml b/test/integration/targets/template_jinja2_non_native/macro_override.yml new file mode 100644 index 00000000..8a1cabd2 --- /dev/null +++ b/test/integration/targets/template_jinja2_non_native/macro_override.yml @@ -0,0 +1,15 @@ +- hosts: localhost + gather_facts: false + vars: + output_dir: "{{ lookup('env', 'OUTPUT_DIR') }}" + tasks: + - template: + src: macro_override.j2 + dest: "{{ output_dir }}/macro_override.out" + + - assert: + that: + - "'foobar' not in data" + - "'\"foo\" \"bar\"' in data" + vars: + data: "{{ lookup('file', '{{ output_dir }}/macro_override.out') }}" diff --git a/test/integration/targets/template_jinja2_non_native/runme.sh b/test/integration/targets/template_jinja2_non_native/runme.sh index fe9d495a..c02d6b33 100755 --- a/test/integration/targets/template_jinja2_non_native/runme.sh +++ b/test/integration/targets/template_jinja2_non_native/runme.sh @@ -4,4 +4,6 @@ set -eux export ANSIBLE_JINJA2_NATIVE=1 ansible-playbook 46169.yml -v "$@" +python -m pip install "Jinja2>=3.1.0" +ansible-playbook macro_override.yml -v "$@" unset ANSIBLE_JINJA2_NATIVE diff --git a/test/integration/targets/template_jinja2_non_native/templates/macro_override.j2 b/test/integration/targets/template_jinja2_non_native/templates/macro_override.j2 new file mode 100644 index 00000000..51908da0 --- /dev/null +++ b/test/integration/targets/template_jinja2_non_native/templates/macro_override.j2 @@ -0,0 +1,7 @@ +#jinja2: variable_start_string:'<<',variable_end_string:'>>' +Use a jinja2 override to trigger creating and using an environment overlay. + +{% macro m() %} +"foo" "bar" +{% endmacro %} +<< m() >> diff --git a/test/integration/targets/uri/tasks/main.yml b/test/integration/targets/uri/tasks/main.yml index d821f286..7fa687b4 100644 --- a/test/integration/targets/uri/tasks/main.yml +++ b/test/integration/targets/uri/tasks/main.yml @@ -305,7 +305,7 @@ environment: https_proxy: 'https://localhost:3456' uri: - url: 'https://httpbin.org/get' + url: 'https://{{ httpbin_host }}/get' register: result ignore_errors: true @@ -318,7 +318,7 @@ environment: https_proxy: 'https://localhost:3456' uri: - url: 'https://httpbin.org/get' + url: 'https://{{ httpbin_host }}/get' use_proxy: no # Ubuntu12.04 doesn't have python-urllib3, this makes handling required dependencies a pain across all variations diff --git a/test/integration/targets/win_async_wrapper/tasks/main.yml b/test/integration/targets/win_async_wrapper/tasks/main.yml index 91b45846..0fc64d8c 100644 --- a/test/integration/targets/win_async_wrapper/tasks/main.yml +++ b/test/integration/targets/win_async_wrapper/tasks/main.yml @@ -12,12 +12,12 @@ - name: validate response assert: that: - - asyncresult.ansible_job_id is match('\d+\.\d+') + - asyncresult.ansible_job_id is match('j\d+\.\d+') - asyncresult.started == 1 - asyncresult is started - asyncresult.finished == 0 - asyncresult is not finished - - asyncresult.results_file is search('\.ansible_async.+\d+\.\d+') + - asyncresult.results_file is search('\.ansible_async.+j\d+\.\d+') # ensure that async is actually async- this test will fail if # hosts > forks or if the target host is VERY slow - (lookup('pipe', 'date +%s') | int) - (start_timestamp | int) < 15 @@ -31,7 +31,7 @@ - name: validate response assert: that: - - asyncresult.ansible_job_id is match('\d+\.\d+') + - asyncresult.ansible_job_id is match('j\d+\.\d+') - asyncresult.finished == 1 - asyncresult is finished - asyncresult is changed @@ -69,7 +69,7 @@ - name: validate response assert: that: - - asyncresult.ansible_job_id is match('\d+\.\d+') + - asyncresult.ansible_job_id is match('j\d+\.\d+') - asyncresult.finished == 1 - asyncresult is finished - asyncresult is changed @@ -107,7 +107,7 @@ - name: validate response assert: that: - - asyncresult.ansible_job_id is match('\d+\.\d+') + - asyncresult.ansible_job_id is match('j\d+\.\d+') - asyncresult.finished == 1 - asyncresult is finished - asyncresult is not changed @@ -125,7 +125,7 @@ - name: validate response assert: that: - - asyncresult.ansible_job_id is match('\d+\.\d+') + - asyncresult.ansible_job_id is match('j\d+\.\d+') - asyncresult.finished == 1 - asyncresult is finished - asyncresult is changed @@ -143,7 +143,7 @@ - name: validate response assert: that: - - asyncresult.ansible_job_id is match('\d+\.\d+') + - asyncresult.ansible_job_id is match('j\d+\.\d+') - asyncresult.finished == 1 - asyncresult is finished - asyncresult is not changed @@ -231,7 +231,7 @@ # - item is finished # - item.slept_sec == 3 # - item is changed -# - item.ansible_job_id is match('\d+\.\d+') +# - item.ansible_job_id is match('j\d+\.\d+') # with_items: "{{ asyncout.results }}" # this part of the test is flaky- Windows PIDs are reused aggressively, so this occasionally fails due to a new process with the same ID |