diff options
author | Lee Garrett <lgarrett@rocketjump.eu> | 2023-08-18 23:02:31 +0200 |
---|---|---|
committer | Lee Garrett <lgarrett@rocketjump.eu> | 2023-08-18 23:02:31 +0200 |
commit | bda5b278e6632ca997f58b67f99f159544a63a57 (patch) | |
tree | be7ef8c02ea878b09fc026c0f3f1d7ffc6ec0394 /test | |
parent | aff27d44d75c760b1288814b4948fc2c4a937d6e (diff) | |
download | debian-ansible-core-bda5b278e6632ca997f58b67f99f159544a63a57.zip |
New upstream version 2.14.9
Diffstat (limited to 'test')
51 files changed, 935 insertions, 1228 deletions
diff --git a/test/integration/targets/canonical-pep517-self-packaging/aliases b/test/integration/targets/canonical-pep517-self-packaging/aliases deleted file mode 100644 index 4667aa4f..00000000 --- a/test/integration/targets/canonical-pep517-self-packaging/aliases +++ /dev/null @@ -1,3 +0,0 @@ -shippable/posix/group3 -context/controller -packaging diff --git a/test/integration/targets/canonical-pep517-self-packaging/minimum-build-constraints.txt b/test/integration/targets/canonical-pep517-self-packaging/minimum-build-constraints.txt deleted file mode 100644 index 3ba47aeb..00000000 --- a/test/integration/targets/canonical-pep517-self-packaging/minimum-build-constraints.txt +++ /dev/null @@ -1,15 +0,0 @@ -# Lowest supporting Python 3.9 and 3.10: -setuptools == 57.0.0; python_version == "3.9" or python_version == "3.10" - -# Lowest supporting Python 3.11: -setuptools == 60.0.0; python_version >= "3.11" - - -# An arbitrary old version that was released before Python 3.9.0: -wheel == 0.33.6 - -# Conditional dependencies: -docutils == 0.16 -Jinja2 == 3.0.0 -MarkupSafe == 2.0.0 -PyYAML == 5.3 diff --git a/test/integration/targets/canonical-pep517-self-packaging/modernish-build-constraints.txt b/test/integration/targets/canonical-pep517-self-packaging/modernish-build-constraints.txt deleted file mode 100644 index 9b8e9d0a..00000000 --- a/test/integration/targets/canonical-pep517-self-packaging/modernish-build-constraints.txt +++ /dev/null @@ -1,10 +0,0 @@ -setuptools == 67.4.0 - -# Wheel-only build dependency -wheel == 0.38.4 - -# Conditional dependencies: -docutils == 0.19 -Jinja2 == 3.1.2 -MarkupSafe == 2.1.2 -PyYAML == 6.0 diff --git a/test/integration/targets/canonical-pep517-self-packaging/runme.sh b/test/integration/targets/canonical-pep517-self-packaging/runme.sh deleted file mode 100755 index 028348f8..00000000 --- a/test/integration/targets/canonical-pep517-self-packaging/runme.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env bash - -if [[ "${ANSIBLE_DEBUG}" == true ]] # `ansible-test` invoked with `--debug` -then - PYTEST_VERY_VERBOSE_FLAG=-vvvvv - SET_DEBUG_MODE=-x -else - ANSIBLE_DEBUG=false - PYTEST_VERY_VERBOSE_FLAG= - SET_DEBUG_MODE=+x -fi - - -set -eEuo pipefail - -source virtualenv.sh - -set "${SET_DEBUG_MODE}" - -export PIP_DISABLE_PIP_VERSION_CHECK=true -export PIP_NO_PYTHON_VERSION_WARNING=true -export PIP_NO_WARN_SCRIPT_LOCATION=true - -python -Im pip install 'pytest ~= 7.2.0' -python -Im pytest ${PYTEST_VERY_VERBOSE_FLAG} \ - --basetemp="${OUTPUT_DIR}/pytest-tmp" \ - --color=yes \ - --showlocals \ - -p no:forked \ - -p no:mock \ - -ra diff --git a/test/integration/targets/canonical-pep517-self-packaging/runme_test.py b/test/integration/targets/canonical-pep517-self-packaging/runme_test.py deleted file mode 100644 index 86b0f753..00000000 --- a/test/integration/targets/canonical-pep517-self-packaging/runme_test.py +++ /dev/null @@ -1,385 +0,0 @@ -"""Smoke tests for the in-tree PEP 517 backend.""" - -from __future__ import annotations - -from filecmp import dircmp -from os import chdir, environ, PathLike -from pathlib import Path -from shutil import rmtree -from subprocess import check_call, check_output, PIPE -from sys import executable as current_interpreter, version_info -from tarfile import TarFile -import typing as t - -try: - from contextlib import chdir as _chdir_cm -except ImportError: - from contextlib import contextmanager as _contextmanager - - @_contextmanager - def _chdir_cm(path: PathLike) -> t.Iterator[None]: - original_wd = Path.cwd() - chdir(path) - try: - yield - finally: - chdir(original_wd) - -import pytest - - -DIST_NAME = 'ansible_core' -DIST_FILENAME_BASE = 'ansible-core' -OUTPUT_DIR = Path(environ['OUTPUT_DIR']).resolve().absolute() -SRC_ROOT_DIR = OUTPUT_DIR.parents[3] -GENERATED_MANPAGES_SUBDIR = SRC_ROOT_DIR / 'docs' / 'man' / 'man1' -LOWEST_SUPPORTED_BUILD_DEPS_FILE = ( - Path(__file__).parent / 'minimum-build-constraints.txt' -).resolve().absolute() -MODERNISH_BUILD_DEPS_FILE = ( - Path(__file__).parent / 'modernish-build-constraints.txt' -).resolve().absolute() -RELEASE_MODULE = SRC_ROOT_DIR / 'lib' / 'ansible' / 'release.py' -VERSION_LINE_PREFIX = "__version__ = '" -PKG_DIST_VERSION = next( - line[len(VERSION_LINE_PREFIX):-1] - for line in RELEASE_MODULE.read_text().splitlines() - if line.startswith(VERSION_LINE_PREFIX) -) -EXPECTED_SDIST_NAME_BASE = f'{DIST_FILENAME_BASE}-{PKG_DIST_VERSION}' -EXPECTED_SDIST_NAME = f'{EXPECTED_SDIST_NAME_BASE}.tar.gz' -EXPECTED_WHEEL_NAME = f'{DIST_NAME}-{PKG_DIST_VERSION}-py3-none-any.whl' - -IS_PYTHON310_PLUS = version_info[:2] >= (3, 10) - - -def wipe_generated_manpages() -> None: - """Ensure man1 pages aren't present in the source checkout.""" - # Cleaning up the gitignored manpages... - if not GENERATED_MANPAGES_SUBDIR.exists(): - return - - rmtree(GENERATED_MANPAGES_SUBDIR) - # Removed the generated manpages... - - -def contains_man1_pages(sdist_tarball: Path) -> Path: - """Check if the man1 pages are present in given tarball.""" - with sdist_tarball.open(mode='rb') as tarball_fd: - with TarFile.gzopen(fileobj=tarball_fd, name=None) as tarball: - try: - tarball.getmember( - name=f'{EXPECTED_SDIST_NAME_BASE}/docs/man/man1', - ) - except KeyError: - return False - - return True - - -def unpack_sdist(sdist_tarball: Path, target_directory: Path) -> Path: - """Unarchive given tarball. - - :returns: Path of the package source checkout. - """ - with sdist_tarball.open(mode='rb') as tarball_fd: - with TarFile.gzopen(fileobj=tarball_fd, name=None) as tarball: - tarball.extractall(path=target_directory) - return target_directory / EXPECTED_SDIST_NAME_BASE - - -def assert_dirs_equal(*dir_paths: t.List[Path]) -> None: - dir_comparison = dircmp(*dir_paths) - assert not dir_comparison.left_only - assert not dir_comparison.right_only - assert not dir_comparison.diff_files - assert not dir_comparison.funny_files - - -def normalize_unpacked_rebuilt_sdist(sdist_path: Path) -> None: - top_pkg_info_path = sdist_path / 'PKG-INFO' - nested_pkg_info_path = ( - sdist_path / 'lib' / f'{DIST_NAME}.egg-info' / 'PKG-INFO' - ) - entry_points_path = nested_pkg_info_path.parent / 'entry_points.txt' - - # setuptools v39 write out two trailing empty lines and an unknown platform - # while the recent don't - top_pkg_info_path.write_text( - top_pkg_info_path.read_text().replace( - 'Classifier: Development Status :: 5', - 'Platform: UNKNOWN\nClassifier: Development Status :: 5', - ) + '\n\n' - ) - nested_pkg_info_path.write_text( - nested_pkg_info_path.read_text().replace( - 'Classifier: Development Status :: 5', - 'Platform: UNKNOWN\nClassifier: Development Status :: 5', - ) + '\n\n' - ) - - # setuptools v39 write out one trailing empty line while the recent don't - entry_points_path.write_text(entry_points_path.read_text() + '\n') - - -@pytest.fixture -def venv_python_exe(tmp_path: Path) -> t.Iterator[Path]: - venv_path = tmp_path / 'pytest-managed-venv' - mkvenv_cmd = ( - current_interpreter, '-m', 'venv', str(venv_path), - ) - check_call(mkvenv_cmd, env={}, stderr=PIPE, stdout=PIPE) - yield venv_path / 'bin' / 'python' - rmtree(venv_path) - - -def run_with_venv_python( - python_exe: Path, *cli_args: t.Iterable[str], - env_vars: t.Dict[str, str] = None, -) -> str: - if env_vars is None: - env_vars = {} - full_cmd = str(python_exe), *cli_args - return check_output(full_cmd, env=env_vars, stderr=PIPE) - - -def build_dists( - python_exe: Path, *cli_args: t.Iterable[str], - env_vars: t.Dict[str, str], -) -> str: - return run_with_venv_python( - python_exe, '-m', 'build', - *cli_args, env_vars=env_vars, - ) - - -def pip_install( - python_exe: Path, *cli_args: t.Iterable[str], - env_vars: t.Dict[str, str] = None, -) -> str: - return run_with_venv_python( - python_exe, '-m', 'pip', 'install', - *cli_args, env_vars=env_vars, - ) - - -def test_installing_sdist_build_with_modern_deps_to_old_env( - venv_python_exe: Path, tmp_path: Path, -) -> None: - pip_install(venv_python_exe, 'build ~= 0.10.0') - tmp_dir_sdist_w_modern_tools = tmp_path / 'sdist-w-modern-tools' - build_dists( - venv_python_exe, '--sdist', - '--config-setting=--build-manpages', - f'--outdir={tmp_dir_sdist_w_modern_tools!s}', - str(SRC_ROOT_DIR), - env_vars={ - 'PIP_CONSTRAINT': str(MODERNISH_BUILD_DEPS_FILE), - }, - ) - tmp_path_sdist_w_modern_tools = ( - tmp_dir_sdist_w_modern_tools / EXPECTED_SDIST_NAME - ) - - # Downgrading pip, because v20+ supports in-tree build backends - pip_install(venv_python_exe, 'pip ~= 19.3.1') - - # Smoke test — installing an sdist with pip that does not support - # in-tree build backends. - pip_install( - venv_python_exe, str(tmp_path_sdist_w_modern_tools), '--no-deps', - ) - - # Downgrading pip, because versions that support PEP 517 don't allow - # disabling it with `--no-use-pep517` when `build-backend` is set in - # the `[build-system]` section of `pyproject.toml`, considering this - # an explicit opt-in. - if not IS_PYTHON310_PLUS: - pip_install(venv_python_exe, 'pip == 18.0') - - # Smoke test — installing an sdist with pip that does not support invoking - # PEP 517 interface at all. - # In this scenario, pip will run `setup.py install` since `wheel` is not in - # the environment. - if IS_PYTHON310_PLUS: - tmp_dir_unpacked_sdist_root = tmp_path / 'unpacked-sdist' - tmp_dir_unpacked_sdist_path = tmp_dir_unpacked_sdist_root / EXPECTED_SDIST_NAME_BASE - with TarFile.gzopen(tmp_path_sdist_w_modern_tools) as sdist_fd: - sdist_fd.extractall(path=tmp_dir_unpacked_sdist_root) - - pip_install( - venv_python_exe, 'setuptools', - env_vars={ - 'PIP_CONSTRAINT': str(LOWEST_SUPPORTED_BUILD_DEPS_FILE), - }, - ) - with _chdir_cm(tmp_dir_unpacked_sdist_path): - run_with_venv_python( - venv_python_exe, 'setup.py', 'sdist', - env_vars={'PATH': environ['PATH']}, - ) - else: - pip_install( - venv_python_exe, str(tmp_path_sdist_w_modern_tools), '--no-deps', - env_vars={ - 'PIP_CONSTRAINT': str(LOWEST_SUPPORTED_BUILD_DEPS_FILE), - }, - ) - - # Smoke test — installing an sdist with pip that does not support invoking - # PEP 517 interface at all. - # With `wheel` present, pip will run `setup.py bdist_wheel` and then, - # unpack the result. - pip_install(venv_python_exe, 'wheel') - if IS_PYTHON310_PLUS: - with _chdir_cm(tmp_dir_unpacked_sdist_path): - run_with_venv_python( - venv_python_exe, 'setup.py', 'bdist_wheel', - env_vars={'PATH': environ['PATH']}, - ) - else: - pip_install( - venv_python_exe, str(tmp_path_sdist_w_modern_tools), '--no-deps', - ) - - -def test_dist_rebuilds_with_manpages_premutations( - venv_python_exe: Path, tmp_path: Path, -) -> None: - """Test a series of sdist rebuilds under different conditions. - - This check builds sdists right from the Git checkout with and without - the manpages. It also does this using different versions of the setuptools - PEP 517 build backend being pinned. Finally, it builds a wheel out of one - of the rebuilt sdists. - As intermediate assertions, this test makes simple smoke tests along - the way. - """ - pip_install(venv_python_exe, 'build ~= 0.10.0') - - # Test building an sdist without manpages from the Git checkout - tmp_dir_sdist_without_manpages = tmp_path / 'sdist-without-manpages' - wipe_generated_manpages() - build_dists( - venv_python_exe, '--sdist', - f'--outdir={tmp_dir_sdist_without_manpages!s}', - str(SRC_ROOT_DIR), - env_vars={ - 'PIP_CONSTRAINT': str(MODERNISH_BUILD_DEPS_FILE), - }, - ) - tmp_path_sdist_without_manpages = ( - tmp_dir_sdist_without_manpages / EXPECTED_SDIST_NAME - ) - assert tmp_path_sdist_without_manpages.exists() - assert not contains_man1_pages(tmp_path_sdist_without_manpages) - sdist_without_manpages_path = unpack_sdist( - tmp_path_sdist_without_manpages, - tmp_dir_sdist_without_manpages / 'src', - ) - - # Test building an sdist with manpages from the Git checkout - # and lowest supported build deps - wipe_generated_manpages() - tmp_dir_sdist_with_manpages = tmp_path / 'sdist-with-manpages' - build_dists( - venv_python_exe, '--sdist', - '--config-setting=--build-manpages', - f'--outdir={tmp_dir_sdist_with_manpages!s}', - str(SRC_ROOT_DIR), - env_vars={ - 'PIP_CONSTRAINT': str(LOWEST_SUPPORTED_BUILD_DEPS_FILE), - }, - ) - tmp_path_sdist_with_manpages = ( - tmp_dir_sdist_with_manpages / EXPECTED_SDIST_NAME - ) - assert tmp_path_sdist_with_manpages.exists() - assert contains_man1_pages(tmp_path_sdist_with_manpages) - sdist_with_manpages_path = unpack_sdist( - tmp_path_sdist_with_manpages, - tmp_dir_sdist_with_manpages / 'src', - ) - - # Test re-building an sdist with manpages from the - # sdist contents that does not include the manpages - tmp_dir_rebuilt_sdist = tmp_path / 'rebuilt-sdist' - build_dists( - venv_python_exe, '--sdist', - '--config-setting=--build-manpages', - f'--outdir={tmp_dir_rebuilt_sdist!s}', - str(sdist_without_manpages_path), - env_vars={ - 'PIP_CONSTRAINT': str(MODERNISH_BUILD_DEPS_FILE), - }, - ) - tmp_path_rebuilt_sdist = tmp_dir_rebuilt_sdist / EXPECTED_SDIST_NAME - # Checking that the expected sdist got created - # from the previous unpacked sdist... - assert tmp_path_rebuilt_sdist.exists() - # NOTE: The following assertion is disabled due to the fact that, when - # NOTE: building an sdist from the original source checkout, the build - # NOTE: backend replaces itself with pure setuptools in the resulting - # NOTE: sdist, and the following rebuilds from that sdist are no longer - # NOTE: able to process the custom config settings that are implemented in - # NOTE: the in-tree build backend. It is expected that said - # NOTE: `pyproject.toml` mutation change will be reverted once all of the - # NOTE: supported `ansible-core` versions ship wheels, meaning that the - # NOTE: end-users won't be building the distribution from sdist on install. - # NOTE: Another case, when it can be reverted is declaring pip below v20 - # NOTE: unsupported — it is the first version to support in-tree build - # NOTE: backends natively. - # assert contains_man1_pages(tmp_path_rebuilt_sdist) # FIXME: See #80255 - rebuilt_sdist_path = unpack_sdist( - tmp_path_rebuilt_sdist, - tmp_dir_rebuilt_sdist / 'src', - ) - assert rebuilt_sdist_path.exists() - assert rebuilt_sdist_path.is_dir() - normalize_unpacked_rebuilt_sdist(rebuilt_sdist_path) - assert_dirs_equal(rebuilt_sdist_path, sdist_with_manpages_path) - - # Test building a wheel from the rebuilt sdist with manpages contents - # and lowest supported build deps - tmp_dir_rebuilt_wheel = tmp_path / 'rebuilt-wheel' - build_dists( - venv_python_exe, '--wheel', - f'--outdir={tmp_dir_rebuilt_wheel!s}', - str(sdist_with_manpages_path), - env_vars={ - 'PIP_CONSTRAINT': str(LOWEST_SUPPORTED_BUILD_DEPS_FILE), - }, - ) - tmp_path_rebuilt_wheel = tmp_dir_rebuilt_wheel / EXPECTED_WHEEL_NAME - # Checking that the expected wheel got created... - assert tmp_path_rebuilt_wheel.exists() - - -def test_pep660_editable_install_smoke(venv_python_exe: Path) -> None: - """Smoke-test PEP 660 editable install. - - This verifies that the in-tree build backend wrapper - does not break any required interfaces. - """ - pip_install(venv_python_exe, '-e', str(SRC_ROOT_DIR)) - - pip_show_cmd = ( - str(venv_python_exe), '-m', - 'pip', 'show', DIST_FILENAME_BASE, - ) - installed_ansible_meta = check_output( - pip_show_cmd, - env={}, stderr=PIPE, text=True, - ).splitlines() - assert f'Name: {DIST_FILENAME_BASE}' in installed_ansible_meta - assert f'Version: {PKG_DIST_VERSION}' in installed_ansible_meta - - pip_runtime_version_cmd = ( - str(venv_python_exe), '-c', - 'from ansible import __version__; print(__version__)', - ) - runtime_ansible_version = check_output( - pip_runtime_version_cmd, - env={}, stderr=PIPE, text=True, - ).strip() - assert runtime_ansible_version == PKG_DIST_VERSION diff --git a/test/integration/targets/packaging_cli-doc/aliases b/test/integration/targets/packaging_cli-doc/aliases new file mode 100644 index 00000000..1d28bdb2 --- /dev/null +++ b/test/integration/targets/packaging_cli-doc/aliases @@ -0,0 +1,2 @@ +shippable/posix/group5 +context/controller diff --git a/test/integration/targets/packaging_cli-doc/runme.sh b/test/integration/targets/packaging_cli-doc/runme.sh new file mode 100755 index 00000000..9218b0a4 --- /dev/null +++ b/test/integration/targets/packaging_cli-doc/runme.sh @@ -0,0 +1,38 @@ +#!/usr/bin/env bash + +set -eux + +source virtualenv.sh + +mkdir -p "${JUNIT_OUTPUT_DIR}" # ensure paths relative to this path work + +cli_doc="${JUNIT_OUTPUT_DIR}/../../../packaging/cli-doc" +build="${cli_doc}/build.py" +template="template.j2" + +# Test `rst` command + +pip install jinja2 + +rst_dir="${OUTPUT_DIR}/rst" + +python.py "${build}" rst --output-dir "${rst_dir}" && ./verify.py "${rst_dir}" +python.py "${build}" rst --output-dir "${rst_dir}" --template "${template}" && ./verify.py "${rst_dir}" + +# Test `man` command (and the argcomplete code path) + +pip install docutils argcomplete + +man_dir="${OUTPUT_DIR}/man" + +python.py "${build}" man --output-dir "${man_dir}" && ./verify.py "${man_dir}" +python.py "${build}" man --output-dir "${man_dir}" --template "${template}" && ./verify.py "${man_dir}" + +# Test `json` command + +python.py "${build}" json --output-file docs.json && ls -l docs.json + +# Ensure complete coverage of the main conditional + +echo "import sys; sys.path.insert(0, '${cli_doc}'); import build" > cover.py +python.py cover.py diff --git a/test/integration/targets/packaging_cli-doc/template.j2 b/test/integration/targets/packaging_cli-doc/template.j2 new file mode 100644 index 00000000..697e7527 --- /dev/null +++ b/test/integration/targets/packaging_cli-doc/template.j2 @@ -0,0 +1 @@ +{{ version }} diff --git a/test/integration/targets/packaging_cli-doc/verify.py b/test/integration/targets/packaging_cli-doc/verify.py new file mode 100755 index 00000000..7793fa8c --- /dev/null +++ b/test/integration/targets/packaging_cli-doc/verify.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python + +import os +import pathlib +import sys + +exclude_programs = { + 'ansible-connection', + 'ansible-test', +} + +bin_dir = pathlib.Path(os.environ['JUNIT_OUTPUT_DIR']).parent.parent.parent / 'bin' +programs = set(program.name for program in bin_dir.iterdir() if program.name not in exclude_programs) +docs_dir = pathlib.Path(sys.argv[1]) +docs = set(path.with_suffix('').name for path in docs_dir.iterdir()) + +print('\n'.join(sorted(docs))) + +missing = programs - docs +extra = docs - programs + +if missing or extra: + raise RuntimeError(f'{missing=} {extra=}') diff --git a/test/integration/targets/setup_cron/tasks/main.yml b/test/integration/targets/setup_cron/tasks/main.yml index 730926d1..73cce2a2 100644 --- a/test/integration/targets/setup_cron/tasks/main.yml +++ b/test/integration/targets/setup_cron/tasks/main.yml @@ -1,3 +1,7 @@ +- name: Alpine is not supported due to lack of libfaketime + meta: end_host + when: ansible_distribution == 'Alpine' + - name: Include distribution specific variables include_vars: "{{ lookup('first_found', search) }}" vars: diff --git a/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt b/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt index 59fa8701..580f0641 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt @@ -1,4 +1,6 @@ # edit "sanity.ansible-doc.in" and generate with: hacking/update-sanity-requirements.py --test ansible-doc +# pre-build requirement: pyyaml == 6.0 +# pre-build constraint: Cython < 3.0 Jinja2==3.1.2 MarkupSafe==2.1.1 packaging==21.3 diff --git a/test/lib/ansible_test/_data/requirements/sanity.changelog.txt b/test/lib/ansible_test/_data/requirements/sanity.changelog.txt index 1b2b2527..1755a489 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.changelog.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.changelog.txt @@ -1,4 +1,6 @@ # edit "sanity.changelog.in" and generate with: hacking/update-sanity-requirements.py --test changelog +# pre-build requirement: pyyaml == 6.0 +# pre-build constraint: Cython < 3.0 antsibull-changelog==0.16.0 docutils==0.17.1 packaging==21.3 diff --git a/test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt b/test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt index ef7b0061..93e147a5 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.import.plugin.txt @@ -1,4 +1,6 @@ # edit "sanity.import.plugin.in" and generate with: hacking/update-sanity-requirements.py --test import.plugin +# pre-build requirement: pyyaml == 6.0 +# pre-build constraint: Cython < 3.0 Jinja2==3.1.2 MarkupSafe==2.1.1 PyYAML==6.0 diff --git a/test/lib/ansible_test/_data/requirements/sanity.import.txt b/test/lib/ansible_test/_data/requirements/sanity.import.txt index e9645ea2..4fda120d 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.import.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.import.txt @@ -1,2 +1,4 @@ # edit "sanity.import.in" and generate with: hacking/update-sanity-requirements.py --test import +# pre-build requirement: pyyaml == 6.0 +# pre-build constraint: Cython < 3.0 PyYAML==6.0 diff --git a/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt b/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt index ba3a5028..51cc1ca3 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt @@ -1,2 +1,4 @@ # edit "sanity.integration-aliases.in" and generate with: hacking/update-sanity-requirements.py --test integration-aliases +# pre-build requirement: pyyaml == 6.0 +# pre-build constraint: Cython < 3.0 PyYAML==6.0 diff --git a/test/lib/ansible_test/_data/requirements/sanity.pylint.txt b/test/lib/ansible_test/_data/requirements/sanity.pylint.txt index a1c6a6a7..44d8b88c 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.pylint.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.pylint.txt @@ -1,4 +1,6 @@ # edit "sanity.pylint.in" and generate with: hacking/update-sanity-requirements.py --test pylint +# pre-build requirement: pyyaml == 6.0 +# pre-build constraint: Cython < 3.0 astroid==2.12.12 dill==0.3.6 isort==5.10.1 diff --git a/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt b/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt index 3953b77c..b2b70567 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt @@ -1,3 +1,5 @@ # edit "sanity.runtime-metadata.in" and generate with: hacking/update-sanity-requirements.py --test runtime-metadata +# pre-build requirement: pyyaml == 6.0 +# pre-build constraint: Cython < 3.0 PyYAML==6.0 voluptuous==0.13.1 diff --git a/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt b/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt index e737f902..8a877bba 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt @@ -1,4 +1,6 @@ # edit "sanity.validate-modules.in" and generate with: hacking/update-sanity-requirements.py --test validate-modules +# pre-build requirement: pyyaml == 6.0 +# pre-build constraint: Cython < 3.0 Jinja2==3.1.2 MarkupSafe==2.1.1 PyYAML==6.0 diff --git a/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt b/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt index fd013b55..dd401113 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt @@ -1,4 +1,6 @@ # edit "sanity.yamllint.in" and generate with: hacking/update-sanity-requirements.py --test yamllint +# pre-build requirement: pyyaml == 6.0 +# pre-build constraint: Cython < 3.0 pathspec==0.10.1 PyYAML==6.0 yamllint==1.28.0 diff --git a/test/lib/ansible_test/_internal/ansible_util.py b/test/lib/ansible_test/_internal/ansible_util.py index 9548d37c..be88ccd8 100644 --- a/test/lib/ansible_test/_internal/ansible_util.py +++ b/test/lib/ansible_test/_internal/ansible_util.py @@ -231,7 +231,7 @@ def generate_egg_info(path: str) -> None: # minimal PKG-INFO stub following the format defined in PEP 241 # required for older setuptools versions to avoid a traceback when importing pkg_resources from packages like cryptography # newer setuptools versions are happy with an empty directory - # including a stub here means we don't need to locate the existing file or have setup.py generate it when running from source + # including a stub here means we don't need to locate the existing file or run any tools to generate it when running from source pkg_info = ''' Metadata-Version: 1.0 Name: ansible diff --git a/test/lib/ansible_test/_internal/classification/__init__.py b/test/lib/ansible_test/_internal/classification/__init__.py index bca02403..deda27ee 100644 --- a/test/lib/ansible_test/_internal/classification/__init__.py +++ b/test/lib/ansible_test/_internal/classification/__init__.py @@ -721,17 +721,6 @@ class PathMapper: if path.startswith('changelogs/'): return minimal - if path.startswith('docs/'): - return minimal - - if path.startswith('examples/'): - if path == 'examples/scripts/ConfigureRemotingForAnsible.ps1': - return { - 'windows-integration': 'connection_winrm', - } - - return minimal - if path.startswith('hacking/'): return minimal @@ -753,8 +742,12 @@ class PathMapper: return minimal if path.startswith('packaging/'): - if path.startswith('packaging/pep517_backend/'): - return packaging + packaging_target = f'packaging_{os.path.splitext(path.split(os.path.sep)[1])[0]}' + + if packaging_target in self.integration_targets_by_name: + return { + 'integration': packaging_target, + } return minimal diff --git a/test/lib/ansible_test/_internal/classification/python.py b/test/lib/ansible_test/_internal/classification/python.py index 7036de1a..c074d348 100644 --- a/test/lib/ansible_test/_internal/classification/python.py +++ b/test/lib/ansible_test/_internal/classification/python.py @@ -256,7 +256,6 @@ class ModuleUtilFinder(ast.NodeVisitor): # The mapping is a tuple consisting of a path pattern to match and a replacement path. # During analysis, any relative imports not covered here will result in warnings, which can be fixed by adding the appropriate entry. path_map = ( - ('^hacking/build_library/build_ansible/', 'build_ansible/'), ('^lib/ansible/', 'ansible/'), ('^test/lib/ansible_test/_util/controller/sanity/validate-modules/', 'validate_modules/'), ('^test/units/', 'test/units/'), diff --git a/test/lib/ansible_test/_internal/commands/sanity/__init__.py b/test/lib/ansible_test/_internal/commands/sanity/__init__.py index f399f2ad..0bc68a21 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/__init__.py +++ b/test/lib/ansible_test/_internal/commands/sanity/__init__.py @@ -71,6 +71,7 @@ from ...executor import ( ) from ...python_requirements import ( + PipCommand, PipInstall, collect_requirements, run_pip, @@ -1143,7 +1144,7 @@ def create_sanity_virtualenv( # The path to the virtual environment must be kept short to avoid the 127 character shebang length limit on Linux. # If the limit is exceeded, generated entry point scripts from pip installed packages will fail with syntax errors. virtualenv_install = json.dumps([command.serialize() for command in commands], indent=4) - virtualenv_hash = hashlib.sha256(to_bytes(virtualenv_install)).hexdigest()[:8] + virtualenv_hash = hash_pip_commands(commands) virtualenv_cache = os.path.join(os.path.expanduser('~/.ansible/test/venv')) virtualenv_path = os.path.join(virtualenv_cache, label, f'{python.version}', virtualenv_hash) virtualenv_marker = os.path.join(virtualenv_path, 'marker.txt') @@ -1183,6 +1184,39 @@ def create_sanity_virtualenv( return virtualenv_python +def hash_pip_commands(commands: list[PipCommand]) -> str: + """Return a short hash unique to the given list of pip commands, suitable for identifying the resulting sanity test environment.""" + serialized_commands = json.dumps([make_pip_command_hashable(command) for command in commands], indent=4) + + return hashlib.sha256(to_bytes(serialized_commands)).hexdigest()[:8] + + +def make_pip_command_hashable(command: PipCommand) -> tuple[str, dict[str, t.Any]]: + """Return a serialized version of the given pip command that is suitable for hashing.""" + if isinstance(command, PipInstall): + # The pre-build instructions for pip installs must be omitted, so they do not affect the hash. + # This is allows the pre-build commands to be added without breaking sanity venv caching. + # It is safe to omit these from the hash since they only affect packages used during builds, not what is installed in the venv. + command = PipInstall( + requirements=[omit_pre_build_from_requirement(*req) for req in command.requirements], + constraints=list(command.constraints), + packages=list(command.packages), + ) + + return command.serialize() + + +def omit_pre_build_from_requirement(path: str, requirements: str) -> tuple[str, str]: + """Return the given requirements with pre-build instructions omitted.""" + lines = requirements.splitlines(keepends=True) + + # CAUTION: This code must be kept in sync with the code which processes pre-build instructions in: + # test/lib/ansible_test/_util/target/setup/requirements.py + lines = [line for line in lines if not line.startswith('# pre-build ')] + + return path, ''.join(lines) + + def check_sanity_virtualenv_yaml(python: VirtualPythonConfig) -> t.Optional[bool]: """Return True if PyYAML has libyaml support for the given sanity virtual environment, False if it does not and None if it was not found.""" virtualenv_path = os.path.dirname(os.path.dirname(python.path)) diff --git a/test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py b/test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py new file mode 100644 index 00000000..8f4fe8a4 --- /dev/null +++ b/test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py @@ -0,0 +1,102 @@ +"""Sanity test for symlinks in the bin directory.""" +from __future__ import annotations + +import os + +from . import ( + SanityVersionNeutral, + SanityMessage, + SanityFailure, + SanitySuccess, + SanityTargets, +) + +from ...constants import ( + __file__ as symlink_map_full_path, +) + +from ...test import ( + TestResult, +) + +from ...config import ( + SanityConfig, +) + +from ...data import ( + data_context, +) + +from ...payload import ( + ANSIBLE_BIN_SYMLINK_MAP, +) + +from ...util import ( + ANSIBLE_BIN_PATH, +) + + +class BinSymlinksTest(SanityVersionNeutral): + """Sanity test for symlinks in the bin directory.""" + + ansible_only = True + + @property + def can_ignore(self) -> bool: + """True if the test supports ignore entries.""" + return False + + @property + def no_targets(self) -> bool: + """True if the test does not use test targets. Mutually exclusive with all_targets.""" + return True + + def test(self, args: SanityConfig, targets: SanityTargets) -> TestResult: + bin_root = ANSIBLE_BIN_PATH + bin_names = os.listdir(bin_root) + bin_paths = sorted(os.path.join(bin_root, path) for path in bin_names) + + errors: list[tuple[str, str]] = [] + + symlink_map_path = os.path.relpath(symlink_map_full_path, data_context().content.root) + + for bin_path in bin_paths: + if not os.path.islink(bin_path): + errors.append((bin_path, 'not a symbolic link')) + continue + + dest = os.readlink(bin_path) + + if not os.path.exists(bin_path): + errors.append((bin_path, 'points to non-existent path "%s"' % dest)) + continue + + if not os.path.isfile(bin_path): + errors.append((bin_path, 'points to non-file "%s"' % dest)) + continue + + map_dest = ANSIBLE_BIN_SYMLINK_MAP.get(os.path.basename(bin_path)) + + if not map_dest: + errors.append((bin_path, 'missing from ANSIBLE_BIN_SYMLINK_MAP in file "%s"' % symlink_map_path)) + continue + + if dest != map_dest: + errors.append((bin_path, 'points to "%s" instead of "%s" from ANSIBLE_BIN_SYMLINK_MAP in file "%s"' % (dest, map_dest, symlink_map_path))) + continue + + if not os.access(bin_path, os.X_OK): + errors.append((bin_path, 'points to non-executable file "%s"' % dest)) + continue + + for bin_name, dest in ANSIBLE_BIN_SYMLINK_MAP.items(): + if bin_name not in bin_names: + bin_path = os.path.join(bin_root, bin_name) + errors.append((bin_path, 'missing symlink to "%s" defined in ANSIBLE_BIN_SYMLINK_MAP in file "%s"' % (dest, symlink_map_path))) + + messages = [SanityMessage(message=message, path=os.path.relpath(path, data_context().content.root), confidence=100) for path, message in errors] + + if errors: + return SanityFailure(self.name, messages=messages) + + return SanitySuccess(self.name) diff --git a/test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py b/test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py new file mode 100644 index 00000000..32b70c24 --- /dev/null +++ b/test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py @@ -0,0 +1,430 @@ +"""Sanity test to check integration test aliases.""" +from __future__ import annotations + +import dataclasses +import json +import textwrap +import os +import re +import typing as t + +from . import ( + SanitySingleVersion, + SanityMessage, + SanityFailure, + SanitySuccess, + SanityTargets, + SANITY_ROOT, +) + +from ...test import ( + TestResult, +) + +from ...config import ( + SanityConfig, +) + +from ...target import ( + filter_targets, + walk_posix_integration_targets, + walk_windows_integration_targets, + walk_integration_targets, + walk_module_targets, + CompletionTarget, + IntegrationTargetType, +) + +from ..integration.cloud import ( + get_cloud_platforms, +) + +from ...io import ( + read_text_file, +) + +from ...util import ( + display, + raw_command, +) + +from ...util_common import ( + get_docs_url, + write_json_test_results, + ResultType, +) + +from ...host_configs import ( + PythonConfig, +) + + +class IntegrationAliasesTest(SanitySingleVersion): + """Sanity test to evaluate integration test aliases.""" + + CI_YML = '.azure-pipelines/azure-pipelines.yml' + TEST_ALIAS_PREFIX = 'shippable' # this will be changed at some point in the future + + DISABLED = 'disabled/' + UNSTABLE = 'unstable/' + UNSUPPORTED = 'unsupported/' + + EXPLAIN_URL = get_docs_url('https://docs.ansible.com/ansible-core/devel/dev_guide/testing/sanity/integration-aliases.html') + + TEMPLATE_DISABLED = """ + The following integration tests are **disabled** [[explain]({explain_url}#disabled)]: + + {tests} + + Consider fixing the integration tests before or alongside changes. + """ + + TEMPLATE_UNSTABLE = """ + The following integration tests are **unstable** [[explain]({explain_url}#unstable)]: + + {tests} + + Tests may need to be restarted due to failures unrelated to changes. + """ + + TEMPLATE_UNSUPPORTED = """ + The following integration tests are **unsupported** [[explain]({explain_url}#unsupported)]: + + {tests} + + Consider running the tests manually or extending test infrastructure to add support. + """ + + TEMPLATE_UNTESTED = """ + The following modules have **no integration tests** [[explain]({explain_url}#untested)]: + + {tests} + + Consider adding integration tests before or alongside changes. + """ + + ansible_only = True + + def __init__(self) -> None: + super().__init__() + + self._ci_config: dict[str, t.Any] = {} + self._ci_test_groups: dict[str, list[int]] = {} + + @property + def can_ignore(self) -> bool: + """True if the test supports ignore entries.""" + return False + + @property + def no_targets(self) -> bool: + """True if the test does not use test targets. Mutually exclusive with all_targets.""" + return True + + def load_ci_config(self, python: PythonConfig) -> dict[str, t.Any]: + """Load and return the CI YAML configuration.""" + if not self._ci_config: + self._ci_config = self.load_yaml(python, self.CI_YML) + + return self._ci_config + + @property + def ci_test_groups(self) -> dict[str, list[int]]: + """Return a dictionary of CI test names and their group(s).""" + if not self._ci_test_groups: + test_groups: dict[str, set[int]] = {} + + for stage in self._ci_config['stages']: + for job in stage['jobs']: + if job.get('template') != 'templates/matrix.yml': + continue + + parameters = job['parameters'] + + groups = parameters.get('groups', []) + test_format = parameters.get('testFormat', '{0}') + test_group_format = parameters.get('groupFormat', '{0}/{{1}}') + + for target in parameters['targets']: + test = target.get('test') or target.get('name') + + if groups: + tests_formatted = [test_group_format.format(test_format).format(test, group) for group in groups] + else: + tests_formatted = [test_format.format(test)] + + for test_formatted in tests_formatted: + parts = test_formatted.split('/') + key = parts[0] + + if key in ('sanity', 'units'): + continue + + try: + group = int(parts[-1]) + except ValueError: + continue + + if group < 1 or group > 99: + continue + + group_set = test_groups.setdefault(key, set()) + group_set.add(group) + + self._ci_test_groups = dict((key, sorted(value)) for key, value in test_groups.items()) + + return self._ci_test_groups + + def format_test_group_alias(self, name: str, fallback: str = '') -> str: + """Return a test group alias using the given name and fallback.""" + group_numbers = self.ci_test_groups.get(name, None) + + if group_numbers: + if min(group_numbers) != 1: + display.warning('Min test group "%s" in %s is %d instead of 1.' % (name, self.CI_YML, min(group_numbers)), unique=True) + + if max(group_numbers) != len(group_numbers): + display.warning('Max test group "%s" in %s is %d instead of %d.' % (name, self.CI_YML, max(group_numbers), len(group_numbers)), unique=True) + + if max(group_numbers) > 9: + alias = '%s/%s/group(%s)/' % (self.TEST_ALIAS_PREFIX, name, '|'.join(str(i) for i in range(min(group_numbers), max(group_numbers) + 1))) + elif len(group_numbers) > 1: + alias = '%s/%s/group[%d-%d]/' % (self.TEST_ALIAS_PREFIX, name, min(group_numbers), max(group_numbers)) + else: + alias = '%s/%s/group%d/' % (self.TEST_ALIAS_PREFIX, name, min(group_numbers)) + elif fallback: + alias = '%s/%s/group%d/' % (self.TEST_ALIAS_PREFIX, fallback, 1) + else: + raise Exception('cannot find test group "%s" in %s' % (name, self.CI_YML)) + + return alias + + def load_yaml(self, python: PythonConfig, path: str) -> dict[str, t.Any]: + """Load the specified YAML file and return the contents.""" + yaml_to_json_path = os.path.join(SANITY_ROOT, self.name, 'yaml_to_json.py') + return json.loads(raw_command([python.path, yaml_to_json_path], data=read_text_file(path), capture=True)[0]) + + def test(self, args: SanityConfig, targets: SanityTargets, python: PythonConfig) -> TestResult: + if args.explain: + return SanitySuccess(self.name) + + if not os.path.isfile(self.CI_YML): + return SanityFailure(self.name, messages=[SanityMessage( + message='file missing', + path=self.CI_YML, + )]) + + results = Results( + comments=[], + labels={}, + ) + + self.load_ci_config(python) + self.check_changes(args, results) + + write_json_test_results(ResultType.BOT, 'data-sanity-ci.json', results.__dict__) + + messages = [] + + messages += self.check_posix_targets(args) + messages += self.check_windows_targets() + + if messages: + return SanityFailure(self.name, messages=messages) + + return SanitySuccess(self.name) + + def check_posix_targets(self, args: SanityConfig) -> list[SanityMessage]: + """Check POSIX integration test targets and return messages with any issues found.""" + posix_targets = tuple(walk_posix_integration_targets()) + + clouds = get_cloud_platforms(args, posix_targets) + cloud_targets = ['cloud/%s/' % cloud for cloud in clouds] + + all_cloud_targets = tuple(filter_targets(posix_targets, ['cloud/'], errors=False)) + invalid_cloud_targets = tuple(filter_targets(all_cloud_targets, cloud_targets, include=False, errors=False)) + + messages = [] + + for target in invalid_cloud_targets: + for alias in target.aliases: + if alias.startswith('cloud/') and alias != 'cloud/': + if any(alias.startswith(cloud_target) for cloud_target in cloud_targets): + continue + + messages.append(SanityMessage('invalid alias `%s`' % alias, '%s/aliases' % target.path)) + + messages += self.check_ci_group( + targets=tuple(filter_targets(posix_targets, ['cloud/', '%s/generic/' % self.TEST_ALIAS_PREFIX], include=False, errors=False)), + find=self.format_test_group_alias('linux').replace('linux', 'posix'), + find_incidental=['%s/posix/incidental/' % self.TEST_ALIAS_PREFIX], + ) + + messages += self.check_ci_group( + targets=tuple(filter_targets(posix_targets, ['%s/generic/' % self.TEST_ALIAS_PREFIX], errors=False)), + find=self.format_test_group_alias('generic'), + ) + + for cloud in clouds: + if cloud == 'httptester': + find = self.format_test_group_alias('linux').replace('linux', 'posix') + find_incidental = ['%s/posix/incidental/' % self.TEST_ALIAS_PREFIX] + else: + find = self.format_test_group_alias(cloud, 'generic') + find_incidental = ['%s/%s/incidental/' % (self.TEST_ALIAS_PREFIX, cloud), '%s/cloud/incidental/' % self.TEST_ALIAS_PREFIX] + + messages += self.check_ci_group( + targets=tuple(filter_targets(posix_targets, ['cloud/%s/' % cloud], errors=False)), + find=find, + find_incidental=find_incidental, + ) + + target_type_groups = { + IntegrationTargetType.TARGET: (1, 2), + IntegrationTargetType.CONTROLLER: (3, 4, 5), + IntegrationTargetType.CONFLICT: (), + IntegrationTargetType.UNKNOWN: (), + } + + for target in posix_targets: + if target.name == 'ansible-test-container': + continue # special test target which uses group 6 -- nothing else should be in that group + + if f'{self.TEST_ALIAS_PREFIX}/posix/' not in target.aliases: + continue + + found_groups = [alias for alias in target.aliases if re.search(f'^{self.TEST_ALIAS_PREFIX}/posix/group[0-9]+/$', alias)] + expected_groups = [f'{self.TEST_ALIAS_PREFIX}/posix/group{group}/' for group in target_type_groups[target.target_type]] + valid_groups = [group for group in found_groups if group in expected_groups] + invalid_groups = [group for group in found_groups if not any(group.startswith(expected_group) for expected_group in expected_groups)] + + if not valid_groups: + messages.append(SanityMessage(f'Target of type {target.target_type.name} must be in at least one of these groups: {", ".join(expected_groups)}', + f'{target.path}/aliases')) + + if invalid_groups: + messages.append(SanityMessage(f'Target of type {target.target_type.name} cannot be in these groups: {", ".join(invalid_groups)}', + f'{target.path}/aliases')) + + return messages + + def check_windows_targets(self) -> list[SanityMessage]: + """Check Windows integration test targets and return messages with any issues found.""" + windows_targets = tuple(walk_windows_integration_targets()) + + messages = [] + + messages += self.check_ci_group( + targets=windows_targets, + find=self.format_test_group_alias('windows'), + find_incidental=['%s/windows/incidental/' % self.TEST_ALIAS_PREFIX], + ) + + return messages + + def check_ci_group( + self, + targets: tuple[CompletionTarget, ...], + find: str, + find_incidental: t.Optional[list[str]] = None, + ) -> list[SanityMessage]: + """Check the CI groups set in the provided targets and return a list of messages with any issues found.""" + all_paths = set(target.path for target in targets) + supported_paths = set(target.path for target in filter_targets(targets, [find], errors=False)) + unsupported_paths = set(target.path for target in filter_targets(targets, [self.UNSUPPORTED], errors=False)) + + if find_incidental: + incidental_paths = set(target.path for target in filter_targets(targets, find_incidental, errors=False)) + else: + incidental_paths = set() + + unassigned_paths = all_paths - supported_paths - unsupported_paths - incidental_paths + conflicting_paths = supported_paths & unsupported_paths + + unassigned_message = 'missing alias `%s` or `%s`' % (find.strip('/'), self.UNSUPPORTED.strip('/')) + conflicting_message = 'conflicting alias `%s` and `%s`' % (find.strip('/'), self.UNSUPPORTED.strip('/')) + + messages = [] + + for path in unassigned_paths: + if path == 'test/integration/targets/ansible-test-container': + continue # special test target which uses group 6 -- nothing else should be in that group + + messages.append(SanityMessage(unassigned_message, '%s/aliases' % path)) + + for path in conflicting_paths: + messages.append(SanityMessage(conflicting_message, '%s/aliases' % path)) + + return messages + + def check_changes(self, args: SanityConfig, results: Results) -> None: + """Check changes and store results in the provided result dictionary.""" + integration_targets = list(walk_integration_targets()) + module_targets = list(walk_module_targets()) + + integration_targets_by_name = dict((target.name, target) for target in integration_targets) + module_names_by_path = dict((target.path, target.module) for target in module_targets) + + disabled_targets = [] + unstable_targets = [] + unsupported_targets = [] + + for command in [command for command in args.metadata.change_description.focused_command_targets if 'integration' in command]: + for target in args.metadata.change_description.focused_command_targets[command]: + if self.DISABLED in integration_targets_by_name[target].aliases: + disabled_targets.append(target) + elif self.UNSTABLE in integration_targets_by_name[target].aliases: + unstable_targets.append(target) + elif self.UNSUPPORTED in integration_targets_by_name[target].aliases: + unsupported_targets.append(target) + + untested_modules = [] + + for path in args.metadata.change_description.no_integration_paths: + module = module_names_by_path.get(path) + + if module: + untested_modules.append(module) + + comments = [ + self.format_comment(self.TEMPLATE_DISABLED, disabled_targets), + self.format_comment(self.TEMPLATE_UNSTABLE, unstable_targets), + self.format_comment(self.TEMPLATE_UNSUPPORTED, unsupported_targets), + self.format_comment(self.TEMPLATE_UNTESTED, untested_modules), + ] + + comments = [comment for comment in comments if comment] + + labels = dict( + needs_tests=bool(untested_modules), + disabled_tests=bool(disabled_targets), + unstable_tests=bool(unstable_targets), + unsupported_tests=bool(unsupported_targets), + ) + + results.comments += comments + results.labels.update(labels) + + def format_comment(self, template: str, targets: list[str]) -> t.Optional[str]: + """Format and return a comment based on the given template and targets, or None if there are no targets.""" + if not targets: + return None + + tests = '\n'.join('- %s' % target for target in targets) + + data = dict( + explain_url=self.EXPLAIN_URL, + tests=tests, + ) + + message = textwrap.dedent(template).strip().format(**data) + + return message + + +@dataclasses.dataclass +class Results: + """Check results.""" + + comments: list[str] + labels: dict[str, bool] diff --git a/test/lib/ansible_test/_internal/commands/sanity/sanity_docs.py b/test/lib/ansible_test/_internal/commands/sanity/sanity_docs.py deleted file mode 100644 index 48f1b0b1..00000000 --- a/test/lib/ansible_test/_internal/commands/sanity/sanity_docs.py +++ /dev/null @@ -1,61 +0,0 @@ -"""Sanity test for documentation of sanity tests.""" -from __future__ import annotations - -import os - -from . import ( - SanityVersionNeutral, - SanityMessage, - SanityFailure, - SanitySuccess, - SanityTargets, - sanity_get_tests, -) - -from ...test import ( - TestResult, -) - -from ...config import ( - SanityConfig, -) - -from ...data import ( - data_context, -) - - -class SanityDocsTest(SanityVersionNeutral): - """Sanity test for documentation of sanity tests.""" - - ansible_only = True - - @property - def can_ignore(self) -> bool: - """True if the test supports ignore entries.""" - return False - - @property - def no_targets(self) -> bool: - """True if the test does not use test targets. Mutually exclusive with all_targets.""" - return True - - def test(self, args: SanityConfig, targets: SanityTargets) -> TestResult: - sanity_dir = 'docs/docsite/rst/dev_guide/testing/sanity' - sanity_docs = set(part[0] for part in (os.path.splitext(os.path.basename(path)) for path in data_context().content.get_files(sanity_dir)) - if part[1] == '.rst') - sanity_tests = set(sanity_test.name for sanity_test in sanity_get_tests()) - - missing = sanity_tests - sanity_docs - - results = [] - - results += [SanityMessage( - message='missing docs for ansible-test sanity --test %s' % r, - path=os.path.join(sanity_dir, '%s.rst' % r), - ) for r in sorted(missing)] - - if results: - return SanityFailure(self.name, messages=results) - - return SanitySuccess(self.name) diff --git a/test/lib/ansible_test/_internal/provider/layout/ansible.py b/test/lib/ansible_test/_internal/provider/layout/ansible.py index 3ee818a5..3fad835a 100644 --- a/test/lib/ansible_test/_internal/provider/layout/ansible.py +++ b/test/lib/ansible_test/_internal/provider/layout/ansible.py @@ -20,7 +20,7 @@ class AnsibleLayout(LayoutProvider): @staticmethod def is_content_root(path: str) -> bool: """Return True if the given path is a content root for this provider.""" - return os.path.exists(os.path.join(path, 'setup.py')) and os.path.exists(os.path.join(path, 'bin/ansible-test')) + return os.path.isfile(os.path.join(path, 'pyproject.toml')) and os.path.isdir(os.path.join(path, 'test/lib/ansible_test')) def create(self, root: str, paths: list[str]) -> ContentLayout: """Create a Layout using the given root and paths.""" diff --git a/test/lib/ansible_test/_internal/provider/source/unversioned.py b/test/lib/ansible_test/_internal/provider/source/unversioned.py index 699de889..54831c99 100644 --- a/test/lib/ansible_test/_internal/provider/source/unversioned.py +++ b/test/lib/ansible_test/_internal/provider/source/unversioned.py @@ -48,9 +48,6 @@ class UnversionedSource(SourceProvider): 'tests': ( 'output', ), - 'docs/docsite': ( - '_build', - ), } kill_sub_file = { diff --git a/test/lib/ansible_test/_internal/python_requirements.py b/test/lib/ansible_test/_internal/python_requirements.py index fc88b637..506b802c 100644 --- a/test/lib/ansible_test/_internal/python_requirements.py +++ b/test/lib/ansible_test/_internal/python_requirements.py @@ -251,6 +251,13 @@ def collect_requirements( # installed packages may have run-time dependencies on setuptools uninstall_packages.remove('setuptools') + # hack to allow the package-data sanity test to keep wheel in the venv + install_commands = [command for command in commands if isinstance(command, PipInstall)] + install_wheel = any(install.has_package('wheel') for install in install_commands) + + if install_wheel: + uninstall_packages.remove('wheel') + commands.extend(collect_uninstall(packages=uninstall_packages)) return commands diff --git a/test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1 b/test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1 index 7cc86abd..c1cb91e4 100644 --- a/test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1 +++ b/test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1 @@ -15,7 +15,7 @@ # To run this script in Powershell: # # [Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 -# $url = "https://raw.githubusercontent.com/ansible/ansible/devel/examples/scripts/ConfigureRemotingForAnsible.ps1" +# $url = "https://raw.githubusercontent.com/ansible/ansible/devel/test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1" # $file = "$env:temp\ConfigureRemotingForAnsible.ps1" # # (New-Object -TypeName System.Net.WebClient).DownloadFile($url, $file) diff --git a/test/lib/ansible_test/_util/target/setup/requirements.py b/test/lib/ansible_test/_util/target/setup/requirements.py index 4fe9a6c5..b145fde5 100644 --- a/test/lib/ansible_test/_util/target/setup/requirements.py +++ b/test/lib/ansible_test/_util/target/setup/requirements.py @@ -134,6 +134,14 @@ def install(pip, options): # type: (str, t.Dict[str, t.Any]) -> None options.extend(packages) for path, content in requirements: + if path.split(os.sep)[0] in ('test', 'requirements'): + # Support for pre-build is currently limited to requirements embedded in ansible-test and those used by ansible-core. + # Requirements from ansible-core can be found in the 'test' and 'requirements' directories. + # This feature will probably be extended to support collections after further testing. + # Requirements from collections can be found in the 'tests' directory. + for pre_build in parse_pre_build_instructions(content): + pre_build.execute(pip) + write_text_file(os.path.join(tempdir, path), content, True) options.extend(['-r', path]) @@ -150,6 +158,61 @@ def install(pip, options): # type: (str, t.Dict[str, t.Any]) -> None remove_tree(tempdir) +class PreBuild: + """Parsed pre-build instructions.""" + + def __init__(self, requirement): # type: (str) -> None + self.requirement = requirement + self.constraints = [] # type: list[str] + + def execute(self, pip): # type: (str) -> None + """Execute these pre-build instructions.""" + tempdir = tempfile.mkdtemp(prefix='ansible-test-', suffix='-pre-build') + + try: + options = common_pip_options() + options.append(self.requirement) + + constraints = '\n'.join(self.constraints) + '\n' + constraints_path = os.path.join(tempdir, 'constraints.txt') + + write_text_file(constraints_path, constraints, True) + + env = common_pip_environment() + env.update(PIP_CONSTRAINT=constraints_path) + + command = [sys.executable, pip, 'wheel'] + options + + execute_command(command, env=env, cwd=tempdir) + finally: + remove_tree(tempdir) + + +def parse_pre_build_instructions(requirements): # type: (str) -> list[PreBuild] + """Parse the given pip requirements and return a list of extracted pre-build instructions.""" + # CAUTION: This code must be kept in sync with the sanity test hashing code in: + # test/lib/ansible_test/_internal/commands/sanity/__init__.py + + pre_build_prefix = '# pre-build ' + pre_build_requirement_prefix = pre_build_prefix + 'requirement: ' + pre_build_constraint_prefix = pre_build_prefix + 'constraint: ' + + lines = requirements.splitlines() + pre_build_lines = [line for line in lines if line.startswith(pre_build_prefix)] + + instructions = [] # type: list[PreBuild] + + for line in pre_build_lines: + if line.startswith(pre_build_requirement_prefix): + instructions.append(PreBuild(line[len(pre_build_requirement_prefix):])) + elif line.startswith(pre_build_constraint_prefix): + instructions[-1].constraints.append(line[len(pre_build_constraint_prefix):]) + else: + raise RuntimeError('Unsupported pre-build comment: ' + line) + + return instructions + + def uninstall(pip, options): # type: (str, t.Dict[str, t.Any]) -> None """Perform a pip uninstall.""" packages = options['packages'] diff --git a/test/sanity/code-smell/configure-remoting-ps1.py b/test/sanity/code-smell/configure-remoting-ps1.py deleted file mode 100644 index fe678008..00000000 --- a/test/sanity/code-smell/configure-remoting-ps1.py +++ /dev/null @@ -1,52 +0,0 @@ -from __future__ import annotations - -import os - - -def main(): - # required by external automated processes and should not be moved, renamed or converted to a symbolic link - original = 'examples/scripts/ConfigureRemotingForAnsible.ps1' - # required to be packaged with ansible-test and must match the original file, but cannot be a symbolic link - # the packaged version is needed to run tests when ansible-test has been installed - # keeping the packaged version identical to the original makes sure tests cover both files - packaged = 'test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1' - - copy_valid = False - - if os.path.isfile(original) and os.path.isfile(packaged): - with open(original, 'rb') as original_file: - original_content = original_file.read() - - with open(packaged, 'rb') as packaged_file: - packaged_content = packaged_file.read() - - if original_content == packaged_content: - copy_valid = True - - if not copy_valid: - print('%s: must be an exact copy of "%s"' % (packaged, original)) - - for path in [original, packaged]: - directory = path - - while True: - directory = os.path.dirname(directory) - - if not directory: - break - - if not os.path.isdir(directory): - print('%s: must be a directory' % directory) - - if os.path.islink(directory): - print('%s: cannot be a symbolic link' % directory) - - if not os.path.isfile(path): - print('%s: must be a file' % path) - - if os.path.islink(path): - print('%s: cannot be a symbolic link' % path) - - -if __name__ == '__main__': - main() diff --git a/test/sanity/code-smell/deprecated-config.requirements.txt b/test/sanity/code-smell/deprecated-config.requirements.txt index 6ab26e34..338e3f38 100644 --- a/test/sanity/code-smell/deprecated-config.requirements.txt +++ b/test/sanity/code-smell/deprecated-config.requirements.txt @@ -1,4 +1,6 @@ # edit "deprecated-config.requirements.in" and generate with: hacking/update-sanity-requirements.py --test deprecated-config +# pre-build requirement: pyyaml == 6.0 +# pre-build constraint: Cython < 3.0 Jinja2==3.1.2 MarkupSafe==2.1.1 PyYAML==6.0 diff --git a/test/sanity/code-smell/docs-build.json b/test/sanity/code-smell/docs-build.json deleted file mode 100644 index a43fa923..00000000 --- a/test/sanity/code-smell/docs-build.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "disabled": true, - "no_targets": true, - "output": "path-line-column-message" -} diff --git a/test/sanity/code-smell/docs-build.py b/test/sanity/code-smell/docs-build.py deleted file mode 100644 index aaa69378..00000000 --- a/test/sanity/code-smell/docs-build.py +++ /dev/null @@ -1,152 +0,0 @@ -from __future__ import annotations - -import os -import re -import shutil -import subprocess -import sys -import tempfile - - -def main(): - base_dir = os.getcwd() + os.path.sep - docs_dir = os.path.abspath('docs/docsite') - - # TODO: Remove this temporary hack to constrain 'cryptography' when we have - # a better story for dealing with it. - tmpfd, tmp = tempfile.mkstemp() - requirements_txt = os.path.join(base_dir, 'requirements.txt') - shutil.copy2(requirements_txt, tmp) - lines = [] - with open(requirements_txt, 'r') as f: - for line in f.readlines(): - if line.strip() == 'cryptography': - line = 'cryptography < 3.4\n' - lines.append(line) - - with open(requirements_txt, 'w') as f: - f.writelines(lines) - - try: - cmd = ['make', 'core_singlehtmldocs'] - sphinx = subprocess.run(cmd, stdin=subprocess.DEVNULL, capture_output=True, cwd=docs_dir, check=False, text=True) - finally: - shutil.move(tmp, requirements_txt) - - stdout = sphinx.stdout - stderr = sphinx.stderr - - if sphinx.returncode != 0: - sys.stderr.write("Command '%s' failed with status code: %d\n" % (' '.join(cmd), sphinx.returncode)) - - if stdout.strip(): - stdout = simplify_stdout(stdout) - - sys.stderr.write("--> Standard Output\n") - sys.stderr.write("%s\n" % stdout.strip()) - - if stderr.strip(): - sys.stderr.write("--> Standard Error\n") - sys.stderr.write("%s\n" % stderr.strip()) - - sys.exit(1) - - with open('docs/docsite/rst_warnings', 'r') as warnings_fd: - output = warnings_fd.read().strip() - lines = output.splitlines() - - known_warnings = { - 'block-quote-missing-blank-line': r'^Block quote ends without a blank line; unexpected unindent.$', - 'literal-block-lex-error': r'^Could not lex literal_block as "[^"]*". Highlighting skipped.$', - 'duplicate-label': r'^duplicate label ', - 'undefined-label': r'undefined label: ', - 'unknown-document': r'unknown document: ', - 'toc-tree-missing-document': r'toctree contains reference to nonexisting document ', - 'reference-target-not-found': r'[^ ]* reference target not found: ', - 'not-in-toc-tree': r"document isn't included in any toctree$", - 'unexpected-indentation': r'^Unexpected indentation.$', - 'definition-list-missing-blank-line': r'^Definition list ends without a blank line; unexpected unindent.$', - 'explicit-markup-missing-blank-line': r'Explicit markup ends without a blank line; unexpected unindent.$', - 'toc-tree-glob-pattern-no-match': r"^toctree glob pattern '[^']*' didn't match any documents$", - 'unknown-interpreted-text-role': '^Unknown interpreted text role "[^"]*".$', - } - - for line in lines: - match = re.search('^(?P<path>[^:]+):((?P<line>[0-9]+):)?((?P<column>[0-9]+):)? (?P<level>WARNING|ERROR): (?P<message>.*)$', line) - - if not match: - path = 'docs/docsite/rst/index.rst' - lineno = 0 - column = 0 - code = 'unknown' - message = line - - # surface unknown lines while filtering out known lines to avoid excessive output - print('%s:%d:%d: %s: %s' % (path, lineno, column, code, message)) - continue - - path = match.group('path') - lineno = int(match.group('line') or 0) - column = int(match.group('column') or 0) - level = match.group('level').lower() - message = match.group('message') - - path = os.path.abspath(path) - - if path.startswith(base_dir): - path = path[len(base_dir):] - - if path.startswith('rst/'): - path = 'docs/docsite/' + path # fix up paths reported relative to `docs/docsite/` - - if level == 'warning': - code = 'warning' - - for label, pattern in known_warnings.items(): - if re.search(pattern, message): - code = label - break - else: - code = 'error' - - print('%s:%d:%d: %s: %s' % (path, lineno, column, code, message)) - - -def simplify_stdout(value): - """Simplify output by omitting earlier 'rendering: ...' messages.""" - lines = value.strip().splitlines() - - rendering = [] - keep = [] - - def truncate_rendering(): - """Keep last rendering line (if any) with a message about omitted lines as needed.""" - if not rendering: - return - - notice = rendering[-1] - - if len(rendering) > 1: - notice += ' (%d previous rendering line(s) omitted)' % (len(rendering) - 1) - - keep.append(notice) - # Could change to rendering.clear() if we do not support python2 - rendering[:] = [] - - for line in lines: - if line.startswith('rendering: '): - rendering.append(line) - continue - - truncate_rendering() - keep.append(line) - - truncate_rendering() - - result = '\n'.join(keep) - - return result - - -if __name__ == '__main__': - main() diff --git a/test/sanity/code-smell/docs-build.requirements.in b/test/sanity/code-smell/docs-build.requirements.in deleted file mode 100644 index 02c3bfc9..00000000 --- a/test/sanity/code-smell/docs-build.requirements.in +++ /dev/null @@ -1,9 +0,0 @@ -jinja2 -pyyaml -resolvelib < 0.9.0 -sphinx == 4.2.0 -sphinx-notfound-page -sphinx-ansible-theme -straight.plugin -rstcheck < 4 # match version used in other sanity tests -antsibull-docs == 1.7.0 # currently approved version diff --git a/test/sanity/code-smell/docs-build.requirements.txt b/test/sanity/code-smell/docs-build.requirements.txt deleted file mode 100644 index 7e30a732..00000000 --- a/test/sanity/code-smell/docs-build.requirements.txt +++ /dev/null @@ -1,50 +0,0 @@ -# edit "docs-build.requirements.in" and generate with: hacking/update-sanity-requirements.py --test docs-build -aiofiles==22.1.0 -aiohttp==3.8.3 -aiosignal==1.2.0 -alabaster==0.7.12 -ansible-pygments==0.1.1 -antsibull-core==1.2.0 -antsibull-docs==1.7.0 -async-timeout==4.0.2 -asyncio-pool==0.6.0 -attrs==22.1.0 -Babel==2.10.3 -certifi==2022.9.14 -charset-normalizer==2.1.1 -docutils==0.17.1 -frozenlist==1.3.1 -idna==3.4 -imagesize==1.4.1 -Jinja2==3.1.2 -MarkupSafe==2.1.1 -multidict==6.0.2 -packaging==21.3 -perky==0.5.5 -pydantic==1.10.2 -Pygments==2.13.0 -pyparsing==3.0.9 -pytz==2022.2.1 -PyYAML==6.0 -requests==2.28.1 -resolvelib==0.8.1 -rstcheck==3.5.0 -semantic-version==2.10.0 -sh==1.14.3 -six==1.16.0 -snowballstemmer==2.2.0 -Sphinx==4.2.0 -sphinx-ansible-theme==0.9.1 -sphinx-notfound-page==0.8.3 -sphinx-rtd-theme==1.0.0 -sphinxcontrib-applehelp==1.0.2 -sphinxcontrib-devhelp==1.0.2 -sphinxcontrib-htmlhelp==2.0.0 -sphinxcontrib-jsmath==1.0.1 -sphinxcontrib-qthelp==1.0.3 -sphinxcontrib-serializinghtml==1.1.5 -straight.plugin==1.5.0 -Twiggy==0.5.1 -typing_extensions==4.3.0 -urllib3==1.26.12 -yarl==1.8.1 diff --git a/test/sanity/code-smell/package-data.json b/test/sanity/code-smell/package-data.json index 0aa70a3c..f7ecd010 100644 --- a/test/sanity/code-smell/package-data.json +++ b/test/sanity/code-smell/package-data.json @@ -1,5 +1,6 @@ { "disabled": true, "all_targets": true, + "include_symlinks": true, "output": "path-message" } diff --git a/test/sanity/code-smell/package-data.py b/test/sanity/code-smell/package-data.py index fc894a77..7a81b759 100644 --- a/test/sanity/code-smell/package-data.py +++ b/test/sanity/code-smell/package-data.py @@ -1,400 +1,184 @@ +"""Verify the contents of the built sdist and wheel.""" from __future__ import annotations import contextlib import fnmatch -import glob import os -import re +import pathlib import shutil import subprocess import sys import tarfile import tempfile +import typing as t +import zipfile +from ansible.release import __version__ -def assemble_files_to_ship(complete_file_list): - """ - This looks for all files which should be shipped in the sdist - """ - # All files which are in the repository except these: + +def collect_sdist_files(complete_file_list: list[str]) -> list[str]: + """Return a list of files which should be present in the sdist.""" ignore_patterns = ( - # Developer-only tools '.azure-pipelines/*', - '.github/*', - '.github/*/*', - 'changelogs/fragments/*', - 'hacking/backport/*', - 'hacking/azp/*', - 'hacking/tests/*', - 'hacking/ticket_stubs/*', - 'test/sanity/code-smell/botmeta.*', - 'test/sanity/code-smell/release-names.*', - 'test/results/.tmp/*', - 'test/results/.tmp/*/*', - 'test/results/.tmp/*/*/*', - 'test/results/.tmp/*/*/*/*', - 'test/results/.tmp/*/*/*/*/*', - '.git*', - ) - ignore_files = frozenset(( - # Developer-only tools - 'changelogs/config.yaml', - 'hacking/README.md', - 'hacking/ansible-profile', - 'hacking/cgroup_perf_recap_graph.py', - 'hacking/create_deprecated_issues.py', - 'hacking/deprecated_issue_template.md', - 'hacking/create_deprecation_bug_reports.py', - 'hacking/fix_test_syntax.py', - 'hacking/get_library.py', - 'hacking/metadata-tool.py', - 'hacking/report.py', - 'hacking/return_skeleton_generator.py', - 'hacking/test-module', - 'test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py', - 'test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py', '.cherry_picker.toml', + '.git*', '.mailmap', - # Generated as part of a build step - 'docs/docsite/rst/conf.py', - 'docs/docsite/rst/index.rst', - 'docs/docsite/rst/dev_guide/index.rst', - # Possibly should be included - 'examples/scripts/uptime.py', - 'examples/scripts/my_test.py', - 'examples/scripts/my_test_info.py', - 'examples/scripts/my_test_facts.py', - 'examples/DOCUMENTATION.yml', - 'examples/play.yml', - 'examples/hosts.yaml', - 'examples/hosts.yml', - 'examples/inventory_script_schema.json', - 'examples/plugin_filters.yml', - 'hacking/env-setup', - 'hacking/env-setup.fish', - 'MANIFEST', - 'setup.cfg', - # docs for test files not included in sdist - 'docs/docsite/rst/dev_guide/testing/sanity/bin-symlinks.rst', - 'docs/docsite/rst/dev_guide/testing/sanity/botmeta.rst', - 'docs/docsite/rst/dev_guide/testing/sanity/integration-aliases.rst', - 'docs/docsite/rst/dev_guide/testing/sanity/release-names.rst', - )) - - # These files are generated and then intentionally added to the sdist - - # Manpages - ignore_script = ('ansible-connection', 'ansible-test') - manpages = ['docs/man/man1/ansible.1'] - for dirname, dummy, files in os.walk('bin'): - for filename in files: - if filename in ignore_script: - continue - manpages.append('docs/man/man1/%s.1' % filename) - - # Misc - misc_generated_files = [ - 'PKG-INFO', - ] + 'changelogs/README.md', + 'changelogs/config.yaml', + 'changelogs/fragments/*', + 'hacking/*', + ) - shipped_files = manpages + misc_generated_files + sdist_files = [path for path in complete_file_list if not any(fnmatch.fnmatch(path, ignore) for ignore in ignore_patterns)] - for path in complete_file_list: - if path not in ignore_files: - for ignore in ignore_patterns: - if fnmatch.fnmatch(path, ignore): - break - else: - shipped_files.append(path) + egg_info = ( + 'PKG-INFO', + 'SOURCES.txt', + 'dependency_links.txt', + 'entry_points.txt', + 'not-zip-safe', + 'requires.txt', + 'top_level.txt', + ) - return shipped_files + sdist_files.append('PKG-INFO') + sdist_files.extend(f'lib/ansible_core.egg-info/{name}' for name in egg_info) + return sdist_files -def assemble_files_to_install(complete_file_list): - """ - This looks for all of the files which should show up in an installation of ansible - """ - ignore_patterns = ( - # Tests excluded from sdist - 'test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py', - 'test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py', - ) - pkg_data_files = [] - for path in complete_file_list: +def collect_wheel_files(complete_file_list: list[str]) -> list[str]: + """Return a list of files which should be present in the wheel.""" + wheel_files = [] - if path.startswith("lib/ansible"): + for path in complete_file_list: + if path.startswith('lib/ansible/'): prefix = 'lib' - elif path.startswith("test/lib/ansible_test"): + elif path.startswith('test/lib/ansible_test/'): prefix = 'test/lib' else: continue - for ignore in ignore_patterns: - if fnmatch.fnmatch(path, ignore): - break - else: - pkg_data_files.append(os.path.relpath(path, prefix)) - - return pkg_data_files + wheel_files.append(os.path.relpath(path, prefix)) - -@contextlib.contextmanager -def clean_repository(file_list): - """Copy the repository to clean it of artifacts""" - # Create a tempdir that will be the clean repo - with tempfile.TemporaryDirectory() as repo_root: - directories = set((repo_root + os.path.sep,)) - - for filename in file_list: - # Determine if we need to create the directory - directory = os.path.dirname(filename) - dest_dir = os.path.join(repo_root, directory) - if dest_dir not in directories: - os.makedirs(dest_dir) - - # Keep track of all the directories that now exist - path_components = directory.split(os.path.sep) - path = repo_root - for component in path_components: - path = os.path.join(path, component) - if path not in directories: - directories.add(path) - - # Copy the file - shutil.copy2(filename, dest_dir, follow_symlinks=False) - - yield repo_root - - -def create_sdist(tmp_dir): - """Create an sdist in the repository""" - create = subprocess.run( - ['make', 'snapshot', 'SDIST_DIR=%s' % tmp_dir], - stdin=subprocess.DEVNULL, - capture_output=True, - text=True, - check=False, + dist_info = ( + 'COPYING', + 'METADATA', + 'RECORD', + 'WHEEL', + 'entry_points.txt', + 'top_level.txt', ) - stderr = create.stderr - - if create.returncode != 0: - raise Exception('make snapshot failed:\n%s' % stderr) - - # Determine path to sdist - tmp_dir_files = os.listdir(tmp_dir) + wheel_files.append(f'ansible_core-{__version__}.data/scripts/ansible-test') + wheel_files.extend(f'ansible_core-{__version__}.dist-info/{name}' for name in dist_info) - if not tmp_dir_files: - raise Exception('sdist was not created in the temp dir') - elif len(tmp_dir_files) > 1: - raise Exception('Unexpected extra files in the temp dir') + return wheel_files - return os.path.join(tmp_dir, tmp_dir_files[0]) +@contextlib.contextmanager +def clean_repository(complete_file_list: list[str]) -> t.Generator[str, None, None]: + """Copy the files to a temporary directory and yield the path.""" + directories = sorted(set(os.path.dirname(path) for path in complete_file_list)) + directories.remove('') -def extract_sdist(sdist_path, tmp_dir): - """Untar the sdist""" - # Untar the sdist from the tmp_dir - with tarfile.open(os.path.join(tmp_dir, sdist_path), 'r|*') as sdist: - sdist.extractall(path=tmp_dir) - - # Determine the sdist directory name - sdist_filename = os.path.basename(sdist_path) - tmp_dir_files = os.listdir(tmp_dir) - try: - tmp_dir_files.remove(sdist_filename) - except ValueError: - # Unexpected could not find original sdist in temp dir - raise + with tempfile.TemporaryDirectory() as temp_dir: + for directory in directories: + os.makedirs(os.path.join(temp_dir, directory)) - if len(tmp_dir_files) > 1: - raise Exception('Unexpected extra files in the temp dir') - elif len(tmp_dir_files) < 1: - raise Exception('sdist extraction did not occur i nthe temp dir') + for path in complete_file_list: + shutil.copy2(path, os.path.join(temp_dir, path), follow_symlinks=False) - return os.path.join(tmp_dir, tmp_dir_files[0]) + yield temp_dir -def install_sdist(tmp_dir, sdist_dir): - """Install the extracted sdist into the temporary directory""" - install = subprocess.run( - ['python', 'setup.py', 'install', '--root=%s' % tmp_dir], +def build(source_dir: str, tmp_dir: str) -> tuple[pathlib.Path, pathlib.Path]: + """Create a sdist and wheel.""" + create = subprocess.run( + [sys.executable, '-m', 'build', '--no-isolation', '--outdir', tmp_dir], stdin=subprocess.DEVNULL, capture_output=True, text=True, - cwd=os.path.join(tmp_dir, sdist_dir), check=False, + cwd=source_dir, ) - stdout, stderr = install.stdout, install.stderr - - if install.returncode != 0: - raise Exception('sdist install failed:\n%s' % stderr) - - # Determine the prefix for the installed files - match = re.search('^copying .* -> (%s/.*?/(?:site|dist)-packages)/ansible$' % - tmp_dir, stdout, flags=re.M) - - return match.group(1) - - -def check_sdist_contains_expected(sdist_dir, to_ship_files): - """Check that the files we expect to ship are present in the sdist""" - results = [] - for filename in to_ship_files: - path = os.path.join(sdist_dir, filename) - if not os.path.exists(path): - results.append('%s: File was not added to sdist' % filename) - - # Also changelog - changelog_files = glob.glob(os.path.join(sdist_dir, 'changelogs/CHANGELOG-v2.[0-9]*.rst')) - if not changelog_files: - results.append('changelogs/CHANGELOG-v2.*.rst: Changelog file was not added to the sdist') - elif len(changelog_files) > 1: - results.append('changelogs/CHANGELOG-v2.*.rst: Too many changelog files: %s' - % changelog_files) - - return results - - -def check_sdist_files_are_wanted(sdist_dir, to_ship_files): - """Check that all files in the sdist are desired""" - results = [] - for dirname, dummy, files in os.walk(sdist_dir): - dirname = os.path.relpath(dirname, start=sdist_dir) - if dirname == '.': - dirname = '' - - for filename in files: - if filename == 'setup.cfg': - continue - - path = os.path.join(dirname, filename) - if path not in to_ship_files: - - if fnmatch.fnmatch(path, 'changelogs/CHANGELOG-v2.[0-9]*.rst'): - # changelog files are expected - continue - - if fnmatch.fnmatch(path, 'lib/ansible_core.egg-info/*'): - continue + if create.returncode != 0: + raise RuntimeError(f'build failed:\n{create.stderr}\n{create.stdout}') - # FIXME: ansible-test doesn't pass the paths of symlinks to us so we aren't - # checking those - if not os.path.islink(os.path.join(sdist_dir, path)): - results.append('%s: File in sdist was not in the repository' % path) + tmp_dir_files = list(pathlib.Path(tmp_dir).iterdir()) - return results + if len(tmp_dir_files) != 2: + raise RuntimeError(f'build resulted in {len(tmp_dir_files)} items instead of 2') + sdist_path = [path for path in tmp_dir_files if path.suffix == '.gz'][0] + wheel_path = [path for path in tmp_dir_files if path.suffix == '.whl'][0] -def check_installed_contains_expected(install_dir, to_install_files): - """Check that all the files we expect to be installed are""" - results = [] - for filename in to_install_files: - path = os.path.join(install_dir, filename) - if not os.path.exists(path): - results.append('%s: File not installed' % os.path.join('lib', filename)) + return sdist_path, wheel_path - return results +def list_sdist(path: pathlib.Path) -> list[str]: + """Return a list of the files in the sdist.""" + item: tarfile.TarInfo -EGG_RE = re.compile('ansible[^/]+\\.egg-info/(PKG-INFO|SOURCES.txt|' - 'dependency_links.txt|not-zip-safe|requires.txt|top_level.txt|entry_points.txt)$') + with tarfile.open(path) as sdist: + paths = ['/'.join(pathlib.Path(item.path).parts[1:]) for item in sdist.getmembers() if not item.isdir()] + return paths -def check_installed_files_are_wanted(install_dir, to_install_files): - """Check that all installed files were desired""" - results = [] - for dirname, dummy, files in os.walk(install_dir): - dirname = os.path.relpath(dirname, start=install_dir) - if dirname == '.': - dirname = '' +def list_wheel(path: pathlib.Path) -> list[str]: + """Return a list of the files in the wheel.""" + with zipfile.ZipFile(path) as wheel: + paths = [item.filename for item in wheel.filelist if not item.is_dir()] - for filename in files: - # If this is a byte code cache, look for the python file's name - directory = dirname - if filename.endswith('.pyc') or filename.endswith('.pyo'): - # Remove the trailing "o" or c" - filename = filename[:-1] + return paths - if directory.endswith('%s__pycache__' % os.path.sep): - # Python3 byte code cache, look for the basename of - # __pycache__/__init__.cpython-36.py - segments = filename.rsplit('.', 2) - if len(segments) >= 3: - filename = '.'.join((segments[0], segments[2])) - directory = os.path.dirname(directory) - path = os.path.join(directory, filename) +def check_files(source: str, expected: list[str], actual: list[str]) -> list[str]: + """Verify the expected files exist and no extra files exist.""" + missing = sorted(set(expected) - set(actual)) + extra = sorted(set(actual) - set(expected)) - # Test that the file was listed for installation - if path not in to_install_files: - # FIXME: ansible-test doesn't pass the paths of symlinks to us so we - # aren't checking those - if not os.path.islink(os.path.join(install_dir, path)): - if not EGG_RE.match(path): - results.append('%s: File was installed but was not supposed to be' % path) + errors = ( + [f'{path}: missing from {source}' for path in missing] + + [f'{path}: unexpected in {source}' for path in extra] + ) - return results - - -def _find_symlinks(): - symlink_list = [] - for dirname, directories, filenames in os.walk('.'): - for filename in filenames: - path = os.path.join(dirname, filename) - # Strip off "./" from the front - path = path[2:] - if os.path.islink(path): - symlink_list.append(path) + return errors - return symlink_list +def main() -> None: + """Main program entry point.""" + complete_file_list = sys.argv[1:] or sys.stdin.read().splitlines() -def main(): - """All of the files in the repository""" - complete_file_list = [] - for path in sys.argv[1:] or sys.stdin.read().splitlines(): - complete_file_list.append(path) - - # ansible-test isn't currently passing symlinks to us so construct those ourselves for now - for filename in _find_symlinks(): - if filename not in complete_file_list: - # For some reason ansible-test is passing us lib/ansible/module_utils/ansible_release.py - # which is a symlink even though it doesn't pass any others - complete_file_list.append(filename) + errors = [] - # We may run this after docs sanity tests so get a clean repository to run in + # Limit visible files to those reported by ansible-test. + # This avoids including files which are not committed to git. with clean_repository(complete_file_list) as clean_repo_dir: - os.chdir(clean_repo_dir) + if __version__.endswith('.dev0'): + # Make sure a changelog exists for this version when testing from devel. + # When testing from a stable branch the changelog will already exist. + major_minor_version = '.'.join(__version__.split('.')[:2]) + changelog_path = f'changelogs/CHANGELOG-v{major_minor_version}.rst' + pathlib.Path(clean_repo_dir, changelog_path).touch() + complete_file_list.append(changelog_path) - to_ship_files = assemble_files_to_ship(complete_file_list) - to_install_files = assemble_files_to_install(complete_file_list) + expected_sdist_files = collect_sdist_files(complete_file_list) + expected_wheel_files = collect_wheel_files(complete_file_list) - results = [] with tempfile.TemporaryDirectory() as tmp_dir: - sdist_path = create_sdist(tmp_dir) - sdist_dir = extract_sdist(sdist_path, tmp_dir) - - # Check that the files that are supposed to be in the sdist are there - results.extend(check_sdist_contains_expected(sdist_dir, to_ship_files)) - - # Check that the files that are in the sdist are in the repository - results.extend(check_sdist_files_are_wanted(sdist_dir, to_ship_files)) - - # install the sdist - install_dir = install_sdist(tmp_dir, sdist_dir) + sdist_path, wheel_path = build(clean_repo_dir, tmp_dir) - # Check that the files that are supposed to be installed are there - results.extend(check_installed_contains_expected(install_dir, to_install_files)) + actual_sdist_files = list_sdist(sdist_path) + actual_wheel_files = list_wheel(wheel_path) - # Check that the files that are installed are supposed to be installed - results.extend(check_installed_files_are_wanted(install_dir, to_install_files)) + errors.extend(check_files('sdist', expected_sdist_files, actual_sdist_files)) + errors.extend(check_files('wheel', expected_wheel_files, actual_wheel_files)) - for message in results: - print(message) + for error in errors: + print(error) if __name__ == '__main__': diff --git a/test/sanity/code-smell/package-data.requirements.in b/test/sanity/code-smell/package-data.requirements.in index 6b58f755..3162feb6 100644 --- a/test/sanity/code-smell/package-data.requirements.in +++ b/test/sanity/code-smell/package-data.requirements.in @@ -1,7 +1,8 @@ -docutils < 0.18 # match version required by sphinx in the docs-build sanity test +build # required to build sdist +wheel # required to build wheel jinja2 pyyaml # ansible-core requirement resolvelib < 0.9.0 rstcheck < 4 # match version used in other sanity tests -straight.plugin antsibull-changelog +setuptools == 45.2.0 # minimum supported setuptools diff --git a/test/sanity/code-smell/package-data.requirements.txt b/test/sanity/code-smell/package-data.requirements.txt index 94ad68fd..b66079d0 100644 --- a/test/sanity/code-smell/package-data.requirements.txt +++ b/test/sanity/code-smell/package-data.requirements.txt @@ -1,12 +1,18 @@ # edit "package-data.requirements.in" and generate with: hacking/update-sanity-requirements.py --test package-data +# pre-build requirement: pyyaml == 6.0 +# pre-build constraint: Cython < 3.0 antsibull-changelog==0.16.0 +build==0.10.0 docutils==0.17.1 Jinja2==3.1.2 MarkupSafe==2.1.1 packaging==21.3 +pyproject_hooks==1.0.0 pyparsing==3.0.9 PyYAML==6.0 resolvelib==0.8.1 rstcheck==3.5.0 semantic-version==2.10.0 -straight.plugin==1.5.0 +setuptools==45.2.0 +tomli==2.0.1 +wheel==0.41.0 diff --git a/test/sanity/code-smell/configure-remoting-ps1.json b/test/sanity/code-smell/release-names.json index 593b765d..593b765d 100644 --- a/test/sanity/code-smell/configure-remoting-ps1.json +++ b/test/sanity/code-smell/release-names.json diff --git a/test/sanity/code-smell/release-names.py b/test/sanity/code-smell/release-names.py new file mode 100644 index 00000000..81d90d81 --- /dev/null +++ b/test/sanity/code-smell/release-names.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# (c) 2019, Ansible Project +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see <http://www.gnu.org/licenses/>. +""" +Test that the release name is present in the list of used up release names +""" + + +from __future__ import annotations + +from yaml import safe_load + +from ansible.release import __codename__ + + +def main(): + """Entrypoint to the script""" + + with open('.github/RELEASE_NAMES.yml') as f: + releases = safe_load(f.read()) + + # Why this format? The file's sole purpose is to be read by a human when they need to know + # which release names have already been used. So: + # 1) It's easier for a human to find the release names when there's one on each line + # 2) It helps keep other people from using the file and then asking for new features in it + for name in (r.split(maxsplit=1)[1] for r in releases): + if __codename__ == name: + break + else: + print('.github/RELEASE_NAMES.yml: Current codename was not present in the file') + + +if __name__ == '__main__': + main() diff --git a/test/sanity/code-smell/release-names.requirements.in b/test/sanity/code-smell/release-names.requirements.in new file mode 100644 index 00000000..c3726e8b --- /dev/null +++ b/test/sanity/code-smell/release-names.requirements.in @@ -0,0 +1 @@ +pyyaml diff --git a/test/sanity/code-smell/release-names.requirements.txt b/test/sanity/code-smell/release-names.requirements.txt new file mode 100644 index 00000000..bb6a130c --- /dev/null +++ b/test/sanity/code-smell/release-names.requirements.txt @@ -0,0 +1,4 @@ +# edit "release-names.requirements.in" and generate with: hacking/update-sanity-requirements.py --test release-names +# pre-build requirement: pyyaml == 6.0 +# pre-build constraint: Cython < 3.0 +PyYAML==6.0 diff --git a/test/sanity/code-smell/rstcheck.json b/test/sanity/code-smell/rstcheck.json deleted file mode 100644 index 870c19ff..00000000 --- a/test/sanity/code-smell/rstcheck.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "output": "path-line-column-message", - "extensions": [ - ".rst" - ] -} diff --git a/test/sanity/code-smell/rstcheck.py b/test/sanity/code-smell/rstcheck.py deleted file mode 100644 index 99917ca8..00000000 --- a/test/sanity/code-smell/rstcheck.py +++ /dev/null @@ -1,62 +0,0 @@ -"""Sanity test using rstcheck and sphinx.""" -from __future__ import annotations - -import re -import subprocess -import sys - - -def main(): - paths = sys.argv[1:] or sys.stdin.read().splitlines() - - encoding = 'utf-8' - - ignore_substitutions = ( - 'br', - ) - - cmd = [ - sys.executable, - '-m', 'rstcheck', - '--report', 'warning', - '--ignore-substitutions', ','.join(ignore_substitutions), - ] + paths - - process = subprocess.run(cmd, - stdin=subprocess.DEVNULL, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - check=False, - ) - - if process.stdout: - raise Exception(process.stdout) - - pattern = re.compile(r'^(?P<path>[^:]*):(?P<line>[0-9]+): \((?P<level>INFO|WARNING|ERROR|SEVERE)/[0-4]\) (?P<message>.*)$') - - results = parse_to_list_of_dict(pattern, process.stderr.decode(encoding)) - - for result in results: - print('%s:%s:%s: %s' % (result['path'], result['line'], 0, result['message'])) - - -def parse_to_list_of_dict(pattern, value): - matched = [] - unmatched = [] - - for line in value.splitlines(): - match = re.search(pattern, line) - - if match: - matched.append(match.groupdict()) - else: - unmatched.append(line) - - if unmatched: - raise Exception('Pattern "%s" did not match values:\n%s' % (pattern, '\n'.join(unmatched))) - - return matched - - -if __name__ == '__main__': - main() diff --git a/test/sanity/code-smell/rstcheck.requirements.in b/test/sanity/code-smell/rstcheck.requirements.in deleted file mode 100644 index 5b93841d..00000000 --- a/test/sanity/code-smell/rstcheck.requirements.in +++ /dev/null @@ -1,3 +0,0 @@ -sphinx == 4.2.0 # required for full rstcheck functionality, installed first to get the correct docutils version -rstcheck < 4 # match version used in other sanity tests -jinja2 # ansible-core requirement diff --git a/test/sanity/code-smell/rstcheck.requirements.txt b/test/sanity/code-smell/rstcheck.requirements.txt deleted file mode 100644 index 81d5c4f0..00000000 --- a/test/sanity/code-smell/rstcheck.requirements.txt +++ /dev/null @@ -1,25 +0,0 @@ -# edit "rstcheck.requirements.in" and generate with: hacking/update-sanity-requirements.py --test rstcheck -alabaster==0.7.12 -Babel==2.10.3 -certifi==2022.9.14 -charset-normalizer==2.1.1 -docutils==0.17.1 -idna==3.4 -imagesize==1.4.1 -Jinja2==3.1.2 -MarkupSafe==2.1.1 -packaging==21.3 -Pygments==2.13.0 -pyparsing==3.0.9 -pytz==2022.2.1 -requests==2.28.1 -rstcheck==3.5.0 -snowballstemmer==2.2.0 -Sphinx==4.2.0 -sphinxcontrib-applehelp==1.0.2 -sphinxcontrib-devhelp==1.0.2 -sphinxcontrib-htmlhelp==2.0.0 -sphinxcontrib-jsmath==1.0.1 -sphinxcontrib-qthelp==1.0.3 -sphinxcontrib-serializinghtml==1.1.5 -urllib3==1.26.12 diff --git a/test/sanity/ignore.txt b/test/sanity/ignore.txt index 660628fc..869522b1 100644 --- a/test/sanity/ignore.txt +++ b/test/sanity/ignore.txt @@ -1,9 +1,4 @@ .azure-pipelines/scripts/publish-codecov.py replace-urlopen -docs/docsite/rst/dev_guide/testing/sanity/no-smart-quotes.rst no-smart-quotes -docs/docsite/rst/locales/ja/LC_MESSAGES/dev_guide.po no-smart-quotes # Translation of the no-smart-quotes rule -examples/scripts/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath -examples/scripts/upgrade_to_ps3.ps1 pslint:PSCustomUseLiteralPath -examples/scripts/upgrade_to_ps3.ps1 pslint:PSUseApprovedVerbs lib/ansible/cli/scripts/ansible_connection_cli_stub.py shebang lib/ansible/config/base.yml no-unwanted-files lib/ansible/executor/playbook_executor.py pylint:disallowed-name @@ -71,6 +66,8 @@ lib/ansible/module_utils/compat/selinux.py import-3.6!skip # pass/fail depends o lib/ansible/module_utils/compat/selinux.py import-3.7!skip # pass/fail depends on presence of libselinux.so lib/ansible/module_utils/compat/selinux.py import-3.8!skip # pass/fail depends on presence of libselinux.so lib/ansible/module_utils/compat/selinux.py import-3.9!skip # pass/fail depends on presence of libselinux.so +lib/ansible/module_utils/compat/selinux.py import-3.10!skip # pass/fail depends on presence of libselinux.so +lib/ansible/module_utils/compat/selinux.py import-3.11!skip # pass/fail depends on presence of libselinux.so lib/ansible/module_utils/distro/_distro.py future-import-boilerplate # ignore bundled lib/ansible/module_utils/distro/_distro.py metaclass-boilerplate # ignore bundled lib/ansible/module_utils/distro/_distro.py no-assert diff --git a/test/units/module_utils/urls/test_channel_binding.py b/test/units/module_utils/urls/test_channel_binding.py index ea9cd01c..a08e9e43 100644 --- a/test/units/module_utils/urls/test_channel_binding.py +++ b/test/units/module_utils/urls/test_channel_binding.py @@ -9,9 +9,33 @@ import base64 import os.path import pytest +from ansible.module_utils.compat.version import LooseVersion from ansible.module_utils import urls +# cryptography < 41.0.0 does not associate the algorithm with the cert, +# so module_utils falls back to cryptography.hazmat.primitives.hashes.SHA256 +rsa_pss_sha512 = ( + b'\x85\x85\x19\xB9\xE1\x0F\x23\xE2' + b'\x1D\x2C\xE9\xD5\x47\x2A\xAB\xCE' + b'\x42\x0F\xD1\x00\x75\x9C\x53\xA1' + b'\x7B\xB9\x79\x86\xB2\x59\x61\x27' +) + +if urls.HAS_CRYPTOGRAPHY: + import cryptography + + if LooseVersion(cryptography.__version__) >= LooseVersion('41.0.0'): + rsa_pss_sha512 = ( + b"K\x8c\xa5\xf5y\x89A\xa0\xaf'\xeb" + b"\x00\xeb\xccUz6z\xe0l\x035\xa3h" + b"\xfc\xa6 k\xda]\xba\x88\xf8m\xf3" + b"\x98\xd2\xd2wW\x87w\xa4\x0e\x14" + b"\t\xd4]\xb9\xa29\xe2h\x1b\x9f" + b"\xe6\x04\x00\xec\x7fc\x83\xd7b" + ) + + @pytest.mark.skipif(not urls.HAS_CRYPTOGRAPHY, reason='Requires cryptography to be installed') @pytest.mark.parametrize('certificate, expected', [ ('rsa_md5.pem', b'\x23\x34\xB8\x47\x6C\xBF\x4E\x6D' @@ -44,10 +68,7 @@ from ansible.module_utils import urls b'\xC2\xDC\xBB\x89\x8D\x84\x47\x4E' b'\x58\x9C\xD7\xC2\x7A\xDB\xEF\x8B' b'\xD9\xC0\xC0\x68\xAF\x9C\x36\x6D'), - ('rsa-pss_sha512.pem', b'\x85\x85\x19\xB9\xE1\x0F\x23\xE2' - b'\x1D\x2C\xE9\xD5\x47\x2A\xAB\xCE' - b'\x42\x0F\xD1\x00\x75\x9C\x53\xA1' - b'\x7B\xB9\x79\x86\xB2\x59\x61\x27'), + ('rsa-pss_sha512.pem', rsa_pss_sha512), ('ecdsa_sha256.pem', b'\xFE\xCF\x1B\x25\x85\x44\x99\x90' b'\xD9\xE3\xB2\xC9\x2D\x3F\x59\x7E' b'\xC8\x35\x4E\x12\x4E\xDA\x75\x1D' |