diff options
36 files changed, 332 insertions, 73 deletions
@@ -1,6 +1,6 @@ Metadata-Version: 1.2 Name: ansible-core -Version: 2.12.3 +Version: 2.12.4 Summary: Radically simple IT automation Home-page: https://ansible.com/ Author: Ansible, Inc. diff --git a/changelogs/CHANGELOG-v2.12.rst b/changelogs/CHANGELOG-v2.12.rst index 71250127..80ff7123 100644 --- a/changelogs/CHANGELOG-v2.12.rst +++ b/changelogs/CHANGELOG-v2.12.rst @@ -5,6 +5,29 @@ ansible-core 2.12 "Dazed and Confused" Release Notes .. contents:: Topics +v2.12.4 +======= + +Release Summary +--------------- + +| Release Date: 2022-03-28 +| `Porting Guide <https://docs.ansible.com/ansible/devel/porting_guides.html>`__ + + +Bugfixes +-------- + +- Add a YAML representer for ``NativeJinjaText`` +- Add a YAML representer for ``NativeJinjaUnsafeText`` +- AnsiballZ - Ensure we use the full python package in the module cache filename to avoid a case where ``collections:`` is used to execute a module via short name, where the short name duplicates another module from ``ansible.builtin`` or another collection that was executed previously. +- Fix collection filter/test plugin redirects (https://github.com/ansible/ansible/issues/77192). +- ansible-galaxy collection verify - display files/directories not included in the FILES.json as modified content. +- ansible-test - Fix ``windows-integration`` and ``network-integration`` when used with the ``--docker`` option and user-provided inventory. +- extend timeout for ansible-galaxy when communicating with the galaxy server api, and apply it to all interactions with the api +- first_found - fix to allow for spaces in file names (https://github.com/ansible/ansible/issues/77136) +- unarchive - the ``io_buffer_size`` option added in 2.12 was not accepted by the module (https://github.com/ansible/ansible/pull/77271). + v2.12.3 ======= diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 16b9241e..e5db2029 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -1344,3 +1344,51 @@ releases: - ssh_use_right_host.yml - v2.12.3rc1_summary.yaml release_date: '2022-02-23' + 2.12.4: + changes: + release_summary: '| Release Date: 2022-03-28 + + | `Porting Guide <https://docs.ansible.com/ansible/devel/porting_guides.html>`__ + + ' + codename: Dazed and Confused + fragments: + - v2.12.4_summary.yaml + release_date: '2022-03-28' + 2.12.4rc1: + changes: + bugfixes: + - Add a YAML representer for ``NativeJinjaText`` + - Add a YAML representer for ``NativeJinjaUnsafeText`` + - AnsiballZ - Ensure we use the full python package in the module cache filename + to avoid a case where ``collections:`` is used to execute a module via short + name, where the short name duplicates another module from ``ansible.builtin`` + or another collection that was executed previously. + - Fix collection filter/test plugin redirects (https://github.com/ansible/ansible/issues/77192). + - ansible-galaxy collection verify - display files/directories not included + in the FILES.json as modified content. + - ansible-test - Fix ``windows-integration`` and ``network-integration`` when + used with the ``--docker`` option and user-provided inventory. + - extend timeout for ansible-galaxy when communicating with the galaxy server + api, and apply it to all interactions with the api + - first_found - fix to allow for spaces in file names (https://github.com/ansible/ansible/issues/77136) + - unarchive - the ``io_buffer_size`` option added in 2.12 was not accepted by + the module (https://github.com/ansible/ansible/pull/77271). + release_summary: '| Release Date: 2022-03-21 + + | `Porting Guide <https://docs.ansible.com/ansible/devel/porting_guides.html>`__ + + ' + codename: Dazed and Confused + fragments: + - 76690-fix-ansible-galaxy-collection-verify-modified-content.yaml + - 77136-first_found-spaces-in-names.yml + - 77210-fix-collection-filter-test-redirects.yml + - 77271-unarchive.yml + - ansible-test-delegation-inventory.yaml + - ansible_galaxy_timeout.yml + - fqn-module-cache.yml + - nativejinjatext-yaml-representer.yml + - nativejinjaunsafetext-yaml-representer.yml + - v2.12.4rc1_summary.yaml + release_date: '2022-03-21' diff --git a/docs/docsite/rst/installation_guide/intro_installation.rst b/docs/docsite/rst/installation_guide/intro_installation.rst index 7ec8f6d9..ce360ce1 100644 --- a/docs/docsite/rst/installation_guide/intro_installation.rst +++ b/docs/docsite/rst/installation_guide/intro_installation.rst @@ -85,6 +85,7 @@ Ansible also distributes a minimalist object called ``ansible-core`` (or ``ansib You can choose any of the following ways to install ``ansible-core``: * Install ``ansible-core`` (version 2.11 and greater) or ``ansible-base`` (version 2.10) with ``pip``. +* Install ``ansible-core`` (version 2.11 and greater) RPM package with ``dnf``. * Install ``ansible-core`` from source from the ansible/ansible GitHub repository to access the development (``devel``) version to develop or test the latest features. .. note:: @@ -241,10 +242,6 @@ On CentOS: RPMs for currently supported versions of RHEL and CentOS are also available from `EPEL <https://fedoraproject.org/wiki/EPEL>`_. -.. note:: - - Since Ansible 2.10 for RHEL is not available at this time, continue to use Ansible 2.9. - Ansible can manage older operating systems that contain Python 2.6 or higher. .. _from_apt: diff --git a/docs/man/man1/ansible-config.1 b/docs/man/man1/ansible-config.1 index 95a7fcfb..c10b447a 100644 --- a/docs/man/man1/ansible-config.1 +++ b/docs/man/man1/ansible-config.1 @@ -1,6 +1,6 @@ .\" Man page generated from reStructuredText. . -.TH ANSIBLE-CONFIG 1 "" "Ansible 2.12.3" "System administration commands" +.TH ANSIBLE-CONFIG 1 "" "Ansible 2.12.4" "System administration commands" .SH NAME ansible-config \- View ansible configuration. . diff --git a/docs/man/man1/ansible-console.1 b/docs/man/man1/ansible-console.1 index a66e8fa1..ebc61205 100644 --- a/docs/man/man1/ansible-console.1 +++ b/docs/man/man1/ansible-console.1 @@ -1,6 +1,6 @@ .\" Man page generated from reStructuredText. . -.TH ANSIBLE-CONSOLE 1 "" "Ansible 2.12.3" "System administration commands" +.TH ANSIBLE-CONSOLE 1 "" "Ansible 2.12.4" "System administration commands" .SH NAME ansible-console \- REPL console for executing Ansible tasks. . diff --git a/docs/man/man1/ansible-doc.1 b/docs/man/man1/ansible-doc.1 index 5142e598..53c7c98a 100644 --- a/docs/man/man1/ansible-doc.1 +++ b/docs/man/man1/ansible-doc.1 @@ -1,6 +1,6 @@ .\" Man page generated from reStructuredText. . -.TH ANSIBLE-DOC 1 "" "Ansible 2.12.3" "System administration commands" +.TH ANSIBLE-DOC 1 "" "Ansible 2.12.4" "System administration commands" .SH NAME ansible-doc \- plugin documentation tool . diff --git a/docs/man/man1/ansible-galaxy.1 b/docs/man/man1/ansible-galaxy.1 index 0b4c0f0e..67fd5128 100644 --- a/docs/man/man1/ansible-galaxy.1 +++ b/docs/man/man1/ansible-galaxy.1 @@ -1,6 +1,6 @@ .\" Man page generated from reStructuredText. . -.TH ANSIBLE-GALAXY 1 "" "Ansible 2.12.3" "System administration commands" +.TH ANSIBLE-GALAXY 1 "" "Ansible 2.12.4" "System administration commands" .SH NAME ansible-galaxy \- Perform various Role and Collection related operations. . diff --git a/docs/man/man1/ansible-inventory.1 b/docs/man/man1/ansible-inventory.1 index a6724b4f..69cdaccf 100644 --- a/docs/man/man1/ansible-inventory.1 +++ b/docs/man/man1/ansible-inventory.1 @@ -1,6 +1,6 @@ .\" Man page generated from reStructuredText. . -.TH ANSIBLE-INVENTORY 1 "" "Ansible 2.12.3" "System administration commands" +.TH ANSIBLE-INVENTORY 1 "" "Ansible 2.12.4" "System administration commands" .SH NAME ansible-inventory \- None . diff --git a/docs/man/man1/ansible-playbook.1 b/docs/man/man1/ansible-playbook.1 index 1521cc43..b8e1cdde 100644 --- a/docs/man/man1/ansible-playbook.1 +++ b/docs/man/man1/ansible-playbook.1 @@ -1,6 +1,6 @@ .\" Man page generated from reStructuredText. . -.TH ANSIBLE-PLAYBOOK 1 "" "Ansible 2.12.3" "System administration commands" +.TH ANSIBLE-PLAYBOOK 1 "" "Ansible 2.12.4" "System administration commands" .SH NAME ansible-playbook \- Runs Ansible playbooks, executing the defined tasks on the targeted hosts. . diff --git a/docs/man/man1/ansible-pull.1 b/docs/man/man1/ansible-pull.1 index 5eb87f23..522ac194 100644 --- a/docs/man/man1/ansible-pull.1 +++ b/docs/man/man1/ansible-pull.1 @@ -1,6 +1,6 @@ .\" Man page generated from reStructuredText. . -.TH ANSIBLE-PULL 1 "" "Ansible 2.12.3" "System administration commands" +.TH ANSIBLE-PULL 1 "" "Ansible 2.12.4" "System administration commands" .SH NAME ansible-pull \- pulls playbooks from a VCS repo and executes them for the local host . diff --git a/docs/man/man1/ansible-vault.1 b/docs/man/man1/ansible-vault.1 index 431b1516..da04c000 100644 --- a/docs/man/man1/ansible-vault.1 +++ b/docs/man/man1/ansible-vault.1 @@ -1,6 +1,6 @@ .\" Man page generated from reStructuredText. . -.TH ANSIBLE-VAULT 1 "" "Ansible 2.12.3" "System administration commands" +.TH ANSIBLE-VAULT 1 "" "Ansible 2.12.4" "System administration commands" .SH NAME ansible-vault \- encryption/decryption utility for Ansible data files . diff --git a/docs/man/man1/ansible.1 b/docs/man/man1/ansible.1 index 6ee36c5a..98845e32 100644 --- a/docs/man/man1/ansible.1 +++ b/docs/man/man1/ansible.1 @@ -1,6 +1,6 @@ .\" Man page generated from reStructuredText. . -.TH ANSIBLE 1 "" "Ansible 2.12.3" "System administration commands" +.TH ANSIBLE 1 "" "Ansible 2.12.4" "System administration commands" .SH NAME ansible \- Define and run a single task 'playbook' against a set of hosts . diff --git a/lib/ansible/executor/module_common.py b/lib/ansible/executor/module_common.py index c0760fa3..953c1604 100644 --- a/lib/ansible/executor/module_common.py +++ b/lib/ansible/executor/module_common.py @@ -1161,7 +1161,7 @@ def _find_module_utils(module_name, b_module_data, module_path, module_args, tas compression_method = zipfile.ZIP_STORED lookup_path = os.path.join(C.DEFAULT_LOCAL_TMP, 'ansiballz_cache') - cached_module_filename = os.path.join(lookup_path, "%s-%s" % (module_name, module_compression)) + cached_module_filename = os.path.join(lookup_path, "%s-%s" % (remote_module_fqn, module_compression)) zipdata = None # Optimization -- don't lock if the module has already been cached diff --git a/lib/ansible/galaxy/api.py b/lib/ansible/galaxy/api.py index 5bd37149..3becc67d 100644 --- a/lib/ansible/galaxy/api.py +++ b/lib/ansible/galaxy/api.py @@ -258,6 +258,7 @@ class GalaxyAPI: available_api_versions=None, clear_response_cache=False, no_cache=True, priority=float('inf'), + timeout=60, ): self.galaxy = galaxy self.name = name @@ -266,6 +267,7 @@ class GalaxyAPI: self.token = token self.api_server = url self.validate_certs = validate_certs + self.timeout = timeout self._available_api_versions = available_api_versions or {} self._priority = priority @@ -377,7 +379,7 @@ class GalaxyAPI: try: display.vvvv("Calling Galaxy at %s" % url) resp = open_url(to_native(url), data=args, validate_certs=self.validate_certs, headers=headers, - method=method, timeout=20, http_agent=user_agent(), follow_redirects='safe') + method=method, timeout=self.timeout, http_agent=user_agent(), follow_redirects='safe') except HTTPError as e: raise GalaxyError(e, error_context_msg) except Exception as e: @@ -435,7 +437,7 @@ class GalaxyAPI: """ url = _urljoin(self.api_server, self.available_api_versions['v1'], "tokens") + '/' args = urlencode({"github_token": github_token}) - resp = open_url(url, data=args, validate_certs=self.validate_certs, method="POST", http_agent=user_agent()) + resp = open_url(url, data=args, validate_certs=self.validate_certs, method="POST", http_agent=user_agent(), timeout=self.timeout) data = json.loads(to_text(resp.read(), errors='surrogate_or_strict')) return data diff --git a/lib/ansible/galaxy/collection/__init__.py b/lib/ansible/galaxy/collection/__init__.py index 436436e6..9634f602 100644 --- a/lib/ansible/galaxy/collection/__init__.py +++ b/lib/ansible/galaxy/collection/__init__.py @@ -220,11 +220,46 @@ def verify_local_collection( _verify_file_hash(b_collection_path, file_manifest_filename, expected_hash, modified_content) file_manifest = get_json_from_validation_source(file_manifest_filename) + collection_dirs = set() + collection_files = { + os.path.join(b_collection_path, b'MANIFEST.json'), + os.path.join(b_collection_path, b'FILES.json'), + } + # Use the file manifest to verify individual file checksums for manifest_data in file_manifest['files']: + name = manifest_data['name'] + if manifest_data['ftype'] == 'file': + collection_files.add( + os.path.join(b_collection_path, to_bytes(name, errors='surrogate_or_strict')) + ) expected_hash = manifest_data['chksum_%s' % manifest_data['chksum_type']] - _verify_file_hash(b_collection_path, manifest_data['name'], expected_hash, modified_content) + _verify_file_hash(b_collection_path, name, expected_hash, modified_content) + + if manifest_data['ftype'] == 'dir': + collection_dirs.add( + os.path.join(b_collection_path, to_bytes(name, errors='surrogate_or_strict')) + ) + + # Find any paths not in the FILES.json + for root, dirs, files in os.walk(b_collection_path): + for name in files: + full_path = os.path.join(root, name) + path = to_text(full_path[len(b_collection_path) + 1::], errors='surrogate_or_strict') + + if full_path not in collection_files: + modified_content.append( + ModifiedContent(filename=path, expected='the file does not exist', installed='the file exists') + ) + for name in dirs: + full_path = os.path.join(root, name) + path = to_text(full_path[len(b_collection_path) + 1::], errors='surrogate_or_strict') + + if full_path not in collection_dirs: + modified_content.append( + ModifiedContent(filename=path, expected='the directory does not exist', installed='the directory exists') + ) if modified_content: result.success = False diff --git a/lib/ansible/galaxy/collection/concrete_artifact_manager.py b/lib/ansible/galaxy/collection/concrete_artifact_manager.py index 7406b85b..73b392f4 100644 --- a/lib/ansible/galaxy/collection/concrete_artifact_manager.py +++ b/lib/ansible/galaxy/collection/concrete_artifact_manager.py @@ -66,8 +66,8 @@ class ConcreteArtifactsManager: * retrieving the metadata out of the downloaded artifacts """ - def __init__(self, b_working_directory, validate_certs=True): - # type: (bytes, bool) -> None + def __init__(self, b_working_directory, validate_certs=True, timeout=60): + # type: (bytes, bool, int) -> None """Initialize ConcreteArtifactsManager caches and costraints.""" self._validate_certs = validate_certs # type: bool self._artifact_cache = {} # type: Dict[bytes, bytes] @@ -75,6 +75,7 @@ class ConcreteArtifactsManager: self._artifact_meta_cache = {} # type: Dict[bytes, Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]] self._galaxy_collection_cache = {} # type: Dict[Union[Candidate, Requirement], Tuple[str, str, GalaxyToken]] self._b_working_directory = b_working_directory # type: bytes + self.timeout = timeout # type: int def get_galaxy_artifact_path(self, collection): # type: (Union[Candidate, Requirement]) -> bytes @@ -169,6 +170,7 @@ class ConcreteArtifactsManager: self._b_working_directory, expected_hash=None, # NOTE: URLs don't support checksums validate_certs=self._validate_certs, + timeout=self.timeout ) except URLError as err: raise_from( @@ -389,8 +391,8 @@ def _extract_collection_from_git(repo_url, coll_ver, b_path): # FIXME: use random subdirs while preserving the file names -def _download_file(url, b_path, expected_hash, validate_certs, token=None): - # type: (str, bytes, Optional[str], bool, GalaxyToken) -> bytes +def _download_file(url, b_path, expected_hash, validate_certs, token=None, timeout=60): + # type: (str, bytes, Optional[str], bool, GalaxyToken, int) -> bytes # ^ NOTE: used in download and verify_collections ^ b_tarball_name = to_bytes( url.rsplit('/', 1)[1], errors='surrogate_or_strict', @@ -412,6 +414,7 @@ def _download_file(url, b_path, expected_hash, validate_certs, token=None): validate_certs=validate_certs, headers=None if token is None else token.headers(), unredirected_headers=['Authorization'], http_agent=user_agent(), + timeout=timeout ) with open(b_file_path, 'wb') as download_file: # type: BinaryIO diff --git a/lib/ansible/modules/unarchive.py b/lib/ansible/modules/unarchive.py index 0b265943..3eba055a 100644 --- a/lib/ansible/modules/unarchive.py +++ b/lib/ansible/modules/unarchive.py @@ -53,7 +53,7 @@ options: description: - Size of the volatile memory buffer that is used for extracting files from the archive in bytes. type: int - default: 64 KiB + default: 65536 version_added: "2.12" list_files: description: @@ -305,7 +305,7 @@ class ZipArchive(object): self.file_args = file_args self.opts = module.params['extra_opts'] self.module = module - self.io_buffer_size = module.params.get("io_buffer_size", 64 * 1024) + self.io_buffer_size = module.params["io_buffer_size"] self.excludes = module.params['exclude'] self.includes = [] self.include_files = self.module.params['include'] @@ -977,6 +977,13 @@ def main(): include=dict(type='list', elements='str', default=[]), extra_opts=dict(type='list', elements='str', default=[]), validate_certs=dict(type='bool', default=True), + io_buffer_size=dict(type='int', default=64 * 1024), + + # Options that are for the action plugin, but ignored by the module itself. + # We have them here so that the sanity tests pass without ignores, which + # reduces the likelihood of further bugs added. + copy=dict(type='bool', default=True), + decrypt=dict(type='bool', default=True), ), add_file_common_args=True, # check-mode only works for zip files, we cover that later diff --git a/lib/ansible/parsing/yaml/dumper.py b/lib/ansible/parsing/yaml/dumper.py index 65d35781..8701bb81 100644 --- a/lib/ansible/parsing/yaml/dumper.py +++ b/lib/ansible/parsing/yaml/dumper.py @@ -24,7 +24,7 @@ import yaml from ansible.module_utils.six import text_type, binary_type from ansible.module_utils.common.yaml import SafeDumper from ansible.parsing.yaml.objects import AnsibleUnicode, AnsibleSequence, AnsibleMapping, AnsibleVaultEncryptedUnicode -from ansible.utils.unsafe_proxy import AnsibleUnsafeText, AnsibleUnsafeBytes +from ansible.utils.unsafe_proxy import AnsibleUnsafeText, AnsibleUnsafeBytes, NativeJinjaUnsafeText, NativeJinjaText from ansible.template import AnsibleUndefined from ansible.vars.hostvars import HostVars, HostVarsVars from ansible.vars.manager import VarsWithSources @@ -110,3 +110,13 @@ AnsibleDumper.add_representer( AnsibleUndefined, represent_undefined, ) + +AnsibleDumper.add_representer( + NativeJinjaUnsafeText, + represent_unicode, +) + +AnsibleDumper.add_representer( + NativeJinjaText, + represent_unicode, +) diff --git a/lib/ansible/plugins/lookup/first_found.py b/lib/ansible/plugins/lookup/first_found.py index 5d54959e..8a37a689 100644 --- a/lib/ansible/plugins/lookup/first_found.py +++ b/lib/ansible/plugins/lookup/first_found.py @@ -134,6 +134,7 @@ RETURN = """ elements: path """ import os +import re from jinja2.exceptions import UndefinedError @@ -144,18 +145,13 @@ from ansible.plugins.lookup import LookupBase def _split_on(terms, spliters=','): - - # TODO: fix as it does not allow spaces in names termlist = [] if isinstance(terms, string_types): - for spliter in spliters: - terms = terms.replace(spliter, ' ') - termlist = terms.split(' ') + termlist = re.split(r'[%s]' % ''.join(map(re.escape, spliters)), terms) else: # added since options will already listify for t in terms: termlist.extend(_split_on(t, spliters)) - return termlist diff --git a/lib/ansible/release.py b/lib/ansible/release.py index 078520dc..61b91c1c 100644 --- a/lib/ansible/release.py +++ b/lib/ansible/release.py @@ -19,6 +19,6 @@ from __future__ import (absolute_import, division, print_function) __metaclass__ = type -__version__ = '2.12.3' +__version__ = '2.12.4' __author__ = 'Ansible, Inc.' __codename__ = 'Dazed and Confused' diff --git a/lib/ansible/template/__init__.py b/lib/ansible/template/__init__.py index 3c27587c..741275d1 100644 --- a/lib/ansible/template/__init__.py +++ b/lib/ansible/template/__init__.py @@ -482,6 +482,7 @@ class JinjaPluginIntercept(MutableMapping): # FUTURE: we can cache FQ filter/test calls for the entire duration of a run, since a given collection's impl's # aren't supposed to change during a run def __getitem__(self, key): + original_key = key self._load_ansible_plugins() try: @@ -496,56 +497,61 @@ class JinjaPluginIntercept(MutableMapping): if func: return func - # didn't find it in the pre-built Jinja env, assume it's a former builtin and follow the normal routing path - leaf_key = key - key = 'ansible.builtin.' + key - else: - leaf_key = key.split('.')[-1] + key, leaf_key = get_fqcr_and_name(key) + seen = set() + + while True: + if key in seen: + raise TemplateSyntaxError( + 'recursive collection redirect found for %r' % original_key, + 0 + ) + seen.add(key) - acr = AnsibleCollectionRef.try_parse_fqcr(key, self._dirname) + acr = AnsibleCollectionRef.try_parse_fqcr(key, self._dirname) - if not acr: - raise KeyError('invalid plugin name: {0}'.format(key)) + if not acr: + raise KeyError('invalid plugin name: {0}'.format(key)) - ts = _get_collection_metadata(acr.collection) + ts = _get_collection_metadata(acr.collection) - # TODO: implement support for collection-backed redirect (currently only builtin) - # TODO: implement cycle detection (unified across collection redir as well) + # TODO: implement cycle detection (unified across collection redir as well) - routing_entry = ts.get('plugin_routing', {}).get(self._dirname, {}).get(leaf_key, {}) + routing_entry = ts.get('plugin_routing', {}).get(self._dirname, {}).get(leaf_key, {}) - deprecation_entry = routing_entry.get('deprecation') - if deprecation_entry: - warning_text = deprecation_entry.get('warning_text') - removal_date = deprecation_entry.get('removal_date') - removal_version = deprecation_entry.get('removal_version') + deprecation_entry = routing_entry.get('deprecation') + if deprecation_entry: + warning_text = deprecation_entry.get('warning_text') + removal_date = deprecation_entry.get('removal_date') + removal_version = deprecation_entry.get('removal_version') - if not warning_text: - warning_text = '{0} "{1}" is deprecated'.format(self._dirname, key) + if not warning_text: + warning_text = '{0} "{1}" is deprecated'.format(self._dirname, key) - display.deprecated(warning_text, version=removal_version, date=removal_date, collection_name=acr.collection) + display.deprecated(warning_text, version=removal_version, date=removal_date, collection_name=acr.collection) - tombstone_entry = routing_entry.get('tombstone') + tombstone_entry = routing_entry.get('tombstone') - if tombstone_entry: - warning_text = tombstone_entry.get('warning_text') - removal_date = tombstone_entry.get('removal_date') - removal_version = tombstone_entry.get('removal_version') + if tombstone_entry: + warning_text = tombstone_entry.get('warning_text') + removal_date = tombstone_entry.get('removal_date') + removal_version = tombstone_entry.get('removal_version') - if not warning_text: - warning_text = '{0} "{1}" has been removed'.format(self._dirname, key) + if not warning_text: + warning_text = '{0} "{1}" has been removed'.format(self._dirname, key) - exc_msg = display.get_deprecation_message(warning_text, version=removal_version, date=removal_date, - collection_name=acr.collection, removed=True) + exc_msg = display.get_deprecation_message(warning_text, version=removal_version, date=removal_date, + collection_name=acr.collection, removed=True) - raise AnsiblePluginRemovedError(exc_msg) + raise AnsiblePluginRemovedError(exc_msg) - redirect_fqcr = routing_entry.get('redirect', None) - if redirect_fqcr: - acr = AnsibleCollectionRef.from_fqcr(ref=redirect_fqcr, ref_type=self._dirname) - display.vvv('redirecting {0} {1} to {2}.{3}'.format(self._dirname, key, acr.collection, acr.resource)) - key = redirect_fqcr - # TODO: handle recursive forwarding (not necessary for builtin, but definitely for further collection redirs) + redirect = routing_entry.get('redirect', None) + if redirect: + next_key, leaf_key = get_fqcr_and_name(redirect, collection=acr.collection) + display.vvv('redirecting (type: {0}) {1}.{2} to {3}'.format(self._dirname, acr.collection, acr.resource, next_key)) + key = next_key + else: + break func = self._collection_jinja_func_cache.get(key) @@ -620,6 +626,17 @@ class JinjaPluginIntercept(MutableMapping): return len(self._delegatee) +def get_fqcr_and_name(resource, collection='ansible.builtin'): + if '.' not in resource: + name = resource + fqcr = collection + '.' + resource + else: + name = resource.split('.')[-1] + fqcr = resource + + return fqcr, name + + class AnsibleEnvironment(Environment): ''' Our custom environment, which simply allows us to override the class-level diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/verify.yml b/test/integration/targets/ansible-galaxy-collection/tasks/verify.yml index 8bf39577..815a67f5 100644 --- a/test/integration/targets/ansible-galaxy-collection/tasks/verify.yml +++ b/test/integration/targets/ansible-galaxy-collection/tasks/verify.yml @@ -275,6 +275,16 @@ - name: append a newline to a module to modify the checksum shell: "echo '' >> {{ module_path }}" +- name: create a new module file + file: + path: '{{ galaxy_dir }}/ansible_collections/ansible_test/verify/plugins/modules/test_new_file.py' + state: touch + +- name: create a new directory + file: + path: '{{ galaxy_dir }}/ansible_collections/ansible_test/verify/plugins/modules/test_new_dir' + state: directory + - name: verify modified collection locally-only (should fail) command: ansible-galaxy collection verify --offline ansible_test.verify register: verify @@ -287,3 +297,5 @@ - verify.rc != 0 - "'Collection ansible_test.verify contains modified content in the following files:' in verify.stdout" - "'plugins/modules/test_module.py' in verify.stdout" + - "'plugins/modules/test_new_file.py' in verify.stdout" + - "'plugins/modules/test_new_dir' in verify.stdout" diff --git a/test/integration/targets/collections/ansiballz_dupe/collections/ansible_collections/duplicate/name/plugins/modules/ping.py b/test/integration/targets/collections/ansiballz_dupe/collections/ansible_collections/duplicate/name/plugins/modules/ping.py new file mode 100644 index 00000000..d0fdba76 --- /dev/null +++ b/test/integration/targets/collections/ansiballz_dupe/collections/ansible_collections/duplicate/name/plugins/modules/ping.py @@ -0,0 +1,3 @@ +#!/usr/bin/python +from ansible.module_utils.basic import AnsibleModule +AnsibleModule({}).exit_json(ping='duplicate.name.pong') diff --git a/test/integration/targets/collections/ansiballz_dupe/test_ansiballz_cache_dupe_shortname.yml b/test/integration/targets/collections/ansiballz_dupe/test_ansiballz_cache_dupe_shortname.yml new file mode 100644 index 00000000..25526246 --- /dev/null +++ b/test/integration/targets/collections/ansiballz_dupe/test_ansiballz_cache_dupe_shortname.yml @@ -0,0 +1,15 @@ +- hosts: localhost + gather_facts: false + tasks: + - ping: + register: result1 + + - ping: + collections: + - duplicate.name + register: result2 + + - assert: + that: + - result1.ping == 'pong' + - result2.ping == 'duplicate.name.pong' diff --git a/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testredirect/meta/runtime.yml b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testredirect/meta/runtime.yml index da8e4901..2dcf456e 100644 --- a/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testredirect/meta/runtime.yml +++ b/test/integration/targets/collections/collection_root_user/ansible_collections/testns/testredirect/meta/runtime.yml @@ -2,3 +2,20 @@ plugin_routing: modules: ping: redirect: testns.testcoll.ping + filter: + multi_redirect_filter: + redirect: testns.testredirect.redirect_filter1 + deprecation: + warning_text: deprecation1 + redirect_filter1: + redirect: redirect_filter2 + deprecation: + warning_text: deprecation2 + redirect_filter2: + redirect: testns.testcoll.testfilter + deprecation: + warning_text: deprecation3 + dead_end: + redirect: bad_redirect + recursive_redirect: + redirect: recursive_redirect diff --git a/test/integration/targets/collections/runme.sh b/test/integration/targets/collections/runme.sh index 5a5261bb..5f11abeb 100755 --- a/test/integration/targets/collections/runme.sh +++ b/test/integration/targets/collections/runme.sh @@ -65,6 +65,16 @@ else ansible-playbook -i "${INVENTORY_PATH}" collection_root_user/ansible_collections/testns/testcoll/playbooks/default_collection_playbook.yml "$@" fi +# test redirects and warnings for filter redirects +echo "testing redirect and deprecation display" +ANSIBLE_DEPRECATION_WARNINGS=yes ansible localhost -m debug -a msg='{{ "data" | testns.testredirect.multi_redirect_filter }}' -vvvvv 2>&1 | tee out.txt +cat out.txt + +test "$(grep out.txt -ce 'deprecation1' -ce 'deprecation2' -ce 'deprecation3')" == 3 +grep out.txt -e 'redirecting (type: filter) testns.testredirect.multi_redirect_filter to testns.testredirect.redirect_filter1' +grep out.txt -e 'redirecting (type: filter) testns.testredirect.redirect_filter1 to testns.testredirect.redirect_filter2' +grep out.txt -e 'redirecting (type: filter) testns.testredirect.redirect_filter2 to testns.testcoll.testfilter' + echo "--- validating collections support in playbooks/roles" # run test playbooks ansible-playbook -i "${INVENTORY_PATH}" -v "${TEST_PLAYBOOK}" "$@" @@ -100,6 +110,9 @@ ansible-playbook inventory_test.yml -i a.statichost.yml -i redirected.statichost # test plugin loader redirect_list ansible-playbook test_redirect_list.yml -v "$@" +# test ansiballz cache dupe +ansible-playbook ansiballz_dupe/test_ansiballz_cache_dupe_shortname.yml -v "$@" + # test adjacent with --playbook-dir export ANSIBLE_COLLECTIONS_PATH='' ANSIBLE_INVENTORY_ANY_UNPARSED_IS_FAILED=1 ansible-inventory --list --export --playbook-dir=. -v "$@" diff --git a/test/integration/targets/collections/test_collection_meta.yml b/test/integration/targets/collections/test_collection_meta.yml index 22a00b21..b682d220 100644 --- a/test/integration/targets/collections/test_collection_meta.yml +++ b/test/integration/targets/collections/test_collection_meta.yml @@ -21,6 +21,25 @@ # redirect filter - assert: that: ('yes' | formerly_core_filter) == True + # redirect filter (multiple levels) + - assert: + that: ('data' | testns.testredirect.multi_redirect_filter) == 'data_via_testfilter_from_userdir' + # invalid filter redirect + - debug: msg="{{ 'data' | testns.testredirect.dead_end }}" + ignore_errors: yes + register: redirect_failure + - assert: + that: + - redirect_failure is failed + - '"no filter named ''testns.testredirect.dead_end''" in (redirect_failure.msg | lower)' + # recursive filter redirect + - debug: msg="{{ 'data' | testns.testredirect.recursive_redirect }}" + ignore_errors: yes + register: redirect_failure + - assert: + that: + - redirect_failure is failed + - '"recursive collection redirect found for ''testns.testredirect.recursive_redirect''" in redirect_failure.msg' # legacy filter should mask redirected - assert: that: ('' | formerly_core_masked_filter) == 'hello from overridden formerly_core_masked_filter' diff --git a/test/integration/targets/lookup_first_found/files/vars file spaces.yml b/test/integration/targets/lookup_first_found/files/vars file spaces.yml new file mode 100644 index 00000000..790bc26c --- /dev/null +++ b/test/integration/targets/lookup_first_found/files/vars file spaces.yml @@ -0,0 +1 @@ +foo: 1 diff --git a/test/integration/targets/lookup_first_found/tasks/main.yml b/test/integration/targets/lookup_first_found/tasks/main.yml index e85f4f27..9aeaf1d1 100644 --- a/test/integration/targets/lookup_first_found/tasks/main.yml +++ b/test/integration/targets/lookup_first_found/tasks/main.yml @@ -84,3 +84,13 @@ assert: that: - "hatethisformat == '/etc/hosts'" + +- name: test spaces in names + include_vars: "{{ item }}" + with_first_found: + - files: + - "{{ role_path + '/files/vars file spaces.yml' }}" + +- assert: + that: + - foo is defined diff --git a/test/integration/targets/setup_pexpect/tasks/main.yml b/test/integration/targets/setup_pexpect/tasks/main.yml index 690fe441..84b7bd1c 100644 --- a/test/integration/targets/setup_pexpect/tasks/main.yml +++ b/test/integration/targets/setup_pexpect/tasks/main.yml @@ -3,8 +3,17 @@ src: constraints.txt dest: "{{ remote_tmp_dir }}/pexpect-constraints.txt" +- name: Install pexpect with --user + pip: + name: pexpect + extra_args: '--user --constraint "{{ remote_tmp_dir }}/pexpect-constraints.txt"' + state: present + ignore_errors: yes # fails when inside a virtual environment + register: pip_user + - name: Install pexpect pip: name: pexpect extra_args: '--constraint "{{ remote_tmp_dir }}/pexpect-constraints.txt"' state: present + when: pip_user is failed diff --git a/test/lib/ansible_test/_internal/commands/integration/__init__.py b/test/lib/ansible_test/_internal/commands/integration/__init__.py index 839b24ae..a9a49aa1 100644 --- a/test/lib/ansible_test/_internal/commands/integration/__init__.py +++ b/test/lib/ansible_test/_internal/commands/integration/__init__.py @@ -92,6 +92,7 @@ from ...data import ( ) from ...host_configs import ( + InventoryConfig, OriginConfig, ) @@ -183,6 +184,18 @@ def check_inventory(args, inventory_path): # type: (IntegrationConfig, str) -> display.warning('Use of "ansible_ssh_private_key_file" in inventory with the --docker or --remote option is unsupported and will likely fail.') +def get_inventory_absolute_path(args: IntegrationConfig, target: InventoryConfig) -> str: + """Return the absolute inventory path used for the given integration configuration or target inventory config (if provided).""" + path = target.path or os.path.basename(get_inventory_relative_path(args)) + + if args.host_path: + path = os.path.join(data_context().content.root, path) # post-delegation, path is relative to the content root + else: + path = os.path.join(data_context().content.root, data_context().content.integration_path, path) + + return path + + def get_inventory_relative_path(args): # type: (IntegrationConfig) -> str """Return the inventory path used for the given integration configuration relative to the content root.""" inventory_names = { diff --git a/test/lib/ansible_test/_internal/commands/integration/network.py b/test/lib/ansible_test/_internal/commands/integration/network.py index f9953144..778384f4 100644 --- a/test/lib/ansible_test/_internal/commands/integration/network.py +++ b/test/lib/ansible_test/_internal/commands/integration/network.py @@ -23,6 +23,7 @@ from ...config import ( from . import ( command_integration_filter, command_integration_filtered, + get_inventory_absolute_path, get_inventory_relative_path, check_inventory, delegate_inventory, @@ -46,8 +47,11 @@ def command_network_integration(args): # type: (NetworkIntegrationConfig) -> No template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template' if issubclass(args.target_type, NetworkInventoryConfig): - inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path, - args.only_target(NetworkInventoryConfig).path or os.path.basename(inventory_relative_path)) + target = args.only_target(NetworkInventoryConfig) + inventory_path = get_inventory_absolute_path(args, target) + + if args.delegate or not target.path: + target.path = inventory_relative_path else: inventory_path = os.path.join(data_context().content.root, inventory_relative_path) diff --git a/test/lib/ansible_test/_internal/commands/integration/windows.py b/test/lib/ansible_test/_internal/commands/integration/windows.py index f6b44942..d14ae11b 100644 --- a/test/lib/ansible_test/_internal/commands/integration/windows.py +++ b/test/lib/ansible_test/_internal/commands/integration/windows.py @@ -34,6 +34,7 @@ from ...host_configs import ( from . import ( command_integration_filter, command_integration_filtered, + get_inventory_absolute_path, get_inventory_relative_path, check_inventory, delegate_inventory, @@ -52,8 +53,11 @@ def command_windows_integration(args): # type: (WindowsIntegrationConfig) -> No template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template' if issubclass(args.target_type, WindowsInventoryConfig): - inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path, - args.only_target(WindowsInventoryConfig).path or os.path.basename(inventory_relative_path)) + target = args.only_target(WindowsInventoryConfig) + inventory_path = get_inventory_absolute_path(args, target) + + if args.delegate or not target.path: + target.path = inventory_relative_path else: inventory_path = os.path.join(data_context().content.root, inventory_relative_path) diff --git a/test/sanity/ignore.txt b/test/sanity/ignore.txt index 058331e1..1dba93b0 100644 --- a/test/sanity/ignore.txt +++ b/test/sanity/ignore.txt @@ -101,7 +101,6 @@ lib/ansible/modules/stat.py validate-modules:undocumented-parameter lib/ansible/modules/systemd.py validate-modules:parameter-invalid lib/ansible/modules/systemd.py validate-modules:return-syntax-error lib/ansible/modules/sysvinit.py validate-modules:return-syntax-error -lib/ansible/modules/unarchive.py validate-modules:nonexistent-parameter-documented lib/ansible/modules/uri.py pylint:disallowed-name lib/ansible/modules/uri.py validate-modules:doc-required-mismatch lib/ansible/modules/user.py validate-modules:doc-default-does-not-match-spec diff --git a/test/units/modules/test_unarchive.py b/test/units/modules/test_unarchive.py index c3300372..3e7a58c9 100644 --- a/test/units/modules/test_unarchive.py +++ b/test/units/modules/test_unarchive.py @@ -52,6 +52,7 @@ class TestCaseZipArchive: "extra_opts": "", "exclude": "", "include": "", + "io_buffer_size": 65536, } z = ZipArchive( @@ -74,6 +75,7 @@ class TestCaseTgzArchive: "extra_opts": "", "exclude": "", "include": "", + "io_buffer_size": 65536, } fake_ansible_module.check_mode = False |