diff options
author | Lee Garrett <lgarrett@rocketjump.eu> | 2023-10-20 16:36:43 +0200 |
---|---|---|
committer | Lee Garrett <lgarrett@rocketjump.eu> | 2023-10-20 16:36:43 +0200 |
commit | aa50cf5e169fcddf7e7b049117cdea5e305f6645 (patch) | |
tree | 29bff81a20cd2be84df506a6e431050c66700432 | |
parent | 2c596bd3b9cdb541ea70fc0dc584da466b84c794 (diff) | |
download | debian-ansible-core-aa50cf5e169fcddf7e7b049117cdea5e305f6645.zip |
New upstream version 2.14.11
20 files changed, 330 insertions, 120 deletions
@@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: ansible-core -Version: 2.14.10 +Version: 2.14.11 Summary: Radically simple IT automation Home-page: https://ansible.com/ Author: Ansible, Inc. diff --git a/bin/ansible-galaxy b/bin/ansible-galaxy index 7732c79a..536964e2 100755 --- a/bin/ansible-galaxy +++ b/bin/ansible-galaxy @@ -1229,6 +1229,9 @@ class GalaxyCLI(CLI): if remote_data: role_info.update(remote_data) + else: + data = u"- the role %s was not found" % role + break elif context.CLIARGS['offline'] and not gr._exists: data = u"- the role %s was not found" % role diff --git a/changelogs/CHANGELOG-v2.14.rst b/changelogs/CHANGELOG-v2.14.rst index da0b3266..54c5c51a 100644 --- a/changelogs/CHANGELOG-v2.14.rst +++ b/changelogs/CHANGELOG-v2.14.rst @@ -5,6 +5,34 @@ ansible-core 2.14 "C'mon Everybody" Release Notes .. contents:: Topics +v2.14.11 +======== + +Release Summary +--------------- + +| Release Date: 2023-10-09 +| `Porting Guide <https://docs.ansible.com/ansible-core/2.14/porting_guides/porting_guide_core_2.14.html>`__ + + +Minor Changes +------------- + +- ansible-galaxy dependency resolution messages have changed the unexplained 'virtual' collection for the specific type ('scm', 'dir', etc) that is more user friendly + +Security Fixes +-------------- + +- ansible-galaxy - Prevent roles from using symlinks to overwrite files outside of the installation directory (CVE-2023-5115) + +Bugfixes +-------- + +- PluginLoader - fix Jinja plugin performance issues (https://github.com/ansible/ansible/issues/79652) +- ansible-galaxy error on dependency resolution will not error itself due to 'virtual' collections not having a name/namespace. +- ansible-galaxy info - fix reporting no role found when lookup_role_by_name returns None. +- winrm - Better handle send input failures when communicating with hosts under load + v2.14.10 ======== diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 484c4100..3bde3c50 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -896,6 +896,48 @@ releases: - freebsd-12.3-replacement.yml - tarfile_extract_warn.yml release_date: '2023-09-05' + 2.14.11: + changes: + release_summary: '| Release Date: 2023-10-09 + + | `Porting Guide <https://docs.ansible.com/ansible-core/2.14/porting_guides/porting_guide_core_2.14.html>`__ + + ' + codename: C'mon Everybody + fragments: + - 2.14.11_summary.yaml + release_date: '2023-10-09' + 2.14.11rc1: + changes: + bugfixes: + - PluginLoader - fix Jinja plugin performance issues (https://github.com/ansible/ansible/issues/79652) + - ansible-galaxy error on dependency resolution will not error itself due to + 'virtual' collections not having a name/namespace. + - ansible-galaxy info - fix reporting no role found when lookup_role_by_name + returns None. + - winrm - Better handle send input failures when communicating with hosts under + load + minor_changes: + - ansible-galaxy dependency resolution messages have changed the unexplained + 'virtual' collection for the specific type ('scm', 'dir', etc) that is more + user friendly + release_summary: '| Release Date: 2023-10-03 + + | `Porting Guide <https://docs.ansible.com/ansible-core/2.14/porting_guides/porting_guide_core_2.14.html>`__ + + ' + security_fixes: + - ansible-galaxy - Prevent roles from using symlinks to overwrite files outside + of the installation directory (CVE-2023-5115) + codename: C'mon Everybody + fragments: + - 2.14.11rc1_summary.yaml + - cve-2023-5115.yml + - fix-ansible-galaxy-info-no-role-found.yml + - galaxy_dep_res_msgs.yml + - jinja_plugin_cache_cleanup.yml + - winrm-send-input.yml + release_date: '2023-10-03' 2.14.1rc1: changes: bugfixes: diff --git a/lib/ansible/cli/galaxy.py b/lib/ansible/cli/galaxy.py index 7732c79a..536964e2 100755 --- a/lib/ansible/cli/galaxy.py +++ b/lib/ansible/cli/galaxy.py @@ -1229,6 +1229,9 @@ class GalaxyCLI(CLI): if remote_data: role_info.update(remote_data) + else: + data = u"- the role %s was not found" % role + break elif context.CLIARGS['offline'] and not gr._exists: data = u"- the role %s was not found" % role diff --git a/lib/ansible/galaxy/collection/__init__.py b/lib/ansible/galaxy/collection/__init__.py index 75aec751..84444d82 100644 --- a/lib/ansible/galaxy/collection/__init__.py +++ b/lib/ansible/galaxy/collection/__init__.py @@ -545,7 +545,7 @@ def download_collections( for fqcn, concrete_coll_pin in dep_map.copy().items(): # FIXME: move into the provider if concrete_coll_pin.is_virtual: display.display( - 'Virtual collection {coll!s} is not downloadable'. + '{coll!s} is not downloadable'. format(coll=to_text(concrete_coll_pin)), ) continue @@ -744,7 +744,7 @@ def install_collections( for fqcn, concrete_coll_pin in dependency_map.items(): if concrete_coll_pin.is_virtual: display.vvvv( - "'{coll!s}' is virtual, skipping.". + "Encountered {coll!s}, skipping.". format(coll=to_text(concrete_coll_pin)), ) continue @@ -1804,8 +1804,7 @@ def _resolve_depenency_map( ) except CollectionDependencyInconsistentCandidate as dep_exc: parents = [ - "%s.%s:%s" % (p.namespace, p.name, p.ver) - for p in dep_exc.criterion.iter_parent() + str(p) for p in dep_exc.criterion.iter_parent() if p is not None ] diff --git a/lib/ansible/galaxy/dependency_resolution/dataclasses.py b/lib/ansible/galaxy/dependency_resolution/dataclasses.py index 32acabdf..35b65054 100644 --- a/lib/ansible/galaxy/dependency_resolution/dataclasses.py +++ b/lib/ansible/galaxy/dependency_resolution/dataclasses.py @@ -440,8 +440,8 @@ class _ComputedReqKindsMixin: def __unicode__(self): if self.fqcn is None: return ( - u'"virtual collection Git repo"' if self.is_scm - else u'"virtual collection namespace"' + f'{self.type} collection from a Git repo' if self.is_scm + else f'{self.type} collection from a namespace' ) return ( @@ -481,14 +481,14 @@ class _ComputedReqKindsMixin: @property def namespace(self): if self.is_virtual: - raise TypeError('Virtual collections do not have a namespace') + raise TypeError(f'{self.type} collections do not have a namespace') return self._get_separate_ns_n_name()[0] @property def name(self): if self.is_virtual: - raise TypeError('Virtual collections do not have a name') + raise TypeError(f'{self.type} collections do not have a name') return self._get_separate_ns_n_name()[-1] diff --git a/lib/ansible/galaxy/role.py b/lib/ansible/galaxy/role.py index 0915adfa..9eb6e7b4 100644 --- a/lib/ansible/galaxy/role.py +++ b/lib/ansible/galaxy/role.py @@ -394,18 +394,36 @@ class GalaxyRole(object): # bits that might be in the file for security purposes # and drop any containing directory, as mentioned above if member.isreg() or member.issym(): - n_member_name = to_native(member.name) - n_archive_parent_dir = to_native(archive_parent_dir) - n_parts = n_member_name.replace(n_archive_parent_dir, "", 1).split(os.sep) - n_final_parts = [] - for n_part in n_parts: - # TODO if the condition triggers it produces a broken installation. - # It will create the parent directory as an empty file and will - # explode if the directory contains valid files. - # Leaving this as is since the whole module needs a rewrite. - if n_part != '..' and not n_part.startswith('~') and '$' not in n_part: + for attr in ('name', 'linkname'): + attr_value = getattr(member, attr, None) + if not attr_value: + continue + n_attr_value = to_native(attr_value) + n_archive_parent_dir = to_native(archive_parent_dir) + n_parts = n_attr_value.replace(n_archive_parent_dir, "", 1).split(os.sep) + n_final_parts = [] + for n_part in n_parts: + # TODO if the condition triggers it produces a broken installation. + # It will create the parent directory as an empty file and will + # explode if the directory contains valid files. + # Leaving this as is since the whole module needs a rewrite. + # + # Check if we have any files with illegal names, + # and display a warning if so. This could help users + # to debug a broken installation. + if not n_part: + continue + if n_part == '..': + display.warning(f"Illegal filename '{n_part}': '..' is not allowed") + continue + if n_part.startswith('~'): + display.warning(f"Illegal filename '{n_part}': names cannot start with '~'") + continue + if '$' in n_part: + display.warning(f"Illegal filename '{n_part}': names cannot contain '$'") + continue n_final_parts.append(n_part) - member.name = os.path.join(*n_final_parts) + setattr(member, attr, os.path.join(*n_final_parts)) if _check_working_data_filter(): # deprecated: description='extract fallback without filter' python_version='3.11' diff --git a/lib/ansible/module_utils/ansible_release.py b/lib/ansible/module_utils/ansible_release.py index 591e60b7..6f0f794f 100644 --- a/lib/ansible/module_utils/ansible_release.py +++ b/lib/ansible/module_utils/ansible_release.py @@ -19,6 +19,6 @@ from __future__ import (absolute_import, division, print_function) __metaclass__ = type -__version__ = '2.14.10' +__version__ = '2.14.11' __author__ = 'Ansible, Inc.' __codename__ = "C'mon Everybody" diff --git a/lib/ansible/plugins/connection/winrm.py b/lib/ansible/plugins/connection/winrm.py index 13c80ec5..69dbd663 100644 --- a/lib/ansible/plugins/connection/winrm.py +++ b/lib/ansible/plugins/connection/winrm.py @@ -149,6 +149,7 @@ import json import tempfile import shlex import subprocess +import time from inspect import getfullargspec from urllib.parse import urlunsplit @@ -176,6 +177,7 @@ from ansible.utils.display import Display try: import winrm from winrm import Response + from winrm.exceptions import WinRMError, WinRMOperationTimeoutError from winrm.protocol import Protocol import requests.exceptions HAS_WINRM = True @@ -470,6 +472,43 @@ class Connection(ConnectionBase): else: raise AnsibleError('No transport found for WinRM connection') + def _winrm_write_stdin(self, command_id, stdin_iterator): + for (data, is_last) in stdin_iterator: + for attempt in range(1, 4): + try: + self._winrm_send_input(self.protocol, self.shell_id, command_id, data, eof=is_last) + + except WinRMOperationTimeoutError: + # A WSMan OperationTimeout can be received for a Send + # operation when the server is under severe load. On manual + # testing the input is still processed and it's safe to + # continue. As the calling method still tries to wait for + # the proc to end if this failed it shouldn't hurt to just + # treat this as a warning. + display.warning( + "WSMan OperationTimeout during send input, attempting to continue. " + "If this continues to occur, try increasing the connection_timeout " + "value for this host." + ) + if not is_last: + time.sleep(5) + + except WinRMError as e: + # Error 170 == ERROR_BUSY. This could be the result of a + # timed out Send from above still being processed on the + # server. Add a 5 second delay and try up to 3 times before + # fully giving up. + # pywinrm does not expose the internal WSMan fault details + # through an actual object but embeds it as a repr. + if attempt == 3 or "'wsmanfault_code': '170'" not in str(e): + raise + + display.warning(f"WSMan send failed on attempt {attempt} as the command is busy, trying to send data again") + time.sleep(5) + continue + + break + def _winrm_send_input(self, protocol, shell_id, command_id, stdin, eof=False): rq = {'env:Envelope': protocol._get_soap_header( resource_uri='http://schemas.microsoft.com/wbem/wsman/1/windows/shell/cmd', @@ -499,8 +538,7 @@ class Connection(ConnectionBase): try: if stdin_iterator: - for (data, is_last) in stdin_iterator: - self._winrm_send_input(self.protocol, self.shell_id, command_id, data, eof=is_last) + self._winrm_write_stdin(command_id, stdin_iterator) except Exception as ex: display.warning("ERROR DURING WINRM SEND INPUT - attempting to recover: %s %s" diff --git a/lib/ansible/plugins/loader.py b/lib/ansible/plugins/loader.py index 845fdcd0..8b7fbfce 100644 --- a/lib/ansible/plugins/loader.py +++ b/lib/ansible/plugins/loader.py @@ -17,6 +17,10 @@ import warnings from collections import defaultdict, namedtuple from traceback import format_exc + +from .filter import AnsibleJinja2Filter +from .test import AnsibleJinja2Test + from ansible import __version__ as ansible_version from ansible import constants as C from ansible.errors import AnsibleError, AnsiblePluginCircularRedirect, AnsiblePluginRemovedError, AnsibleCollectionUnsupportedVersionError @@ -1065,28 +1069,17 @@ class Jinja2Loader(PluginLoader): We need to do a few things differently in the base class because of file == plugin assumptions and dedupe logic. """ - def __init__(self, class_name, package, config, subdir, aliases=None, required_base_class=None): - + def __init__(self, class_name, package, config, subdir, plugin_wrapper_type, aliases=None, required_base_class=None): super(Jinja2Loader, self).__init__(class_name, package, config, subdir, aliases=aliases, required_base_class=required_base_class) - self._loaded_j2_file_maps = [] + self._plugin_wrapper_type = plugin_wrapper_type + self._cached_non_collection_wrappers = {} def _clear_caches(self): super(Jinja2Loader, self)._clear_caches() - self._loaded_j2_file_maps = [] + self._cached_non_collection_wrappers = {} def find_plugin(self, name, mod_type='', ignore_deprecated=False, check_aliases=False, collection_list=None): - - # TODO: handle collection plugin find, see 'get_with_context' - # this can really 'find plugin file' - plugin = super(Jinja2Loader, self).find_plugin(name, mod_type=mod_type, ignore_deprecated=ignore_deprecated, check_aliases=check_aliases, - collection_list=collection_list) - - # if not found, try loading all non collection plugins and see if this in there - if not plugin: - all_plugins = self.all() - plugin = all_plugins.get(name, None) - - return plugin + raise NotImplementedError('find_plugin is not supported on Jinja2Loader') @property def method_map_name(self): @@ -1120,8 +1113,7 @@ class Jinja2Loader(PluginLoader): for func_name, func in plugin_map: fq_name = '.'.join((collection, func_name)) full = '.'.join((full_name, func_name)) - pclass = self._load_jinja2_class() - plugin = pclass(func) + plugin = self._plugin_wrapper_type(func) if plugin in plugins: continue self._update_object(plugin, full, plugin_path, resolved=fq_name) @@ -1129,27 +1121,28 @@ class Jinja2Loader(PluginLoader): return plugins + # FUTURE: now that the resulting plugins are closer, refactor base class method with some extra + # hooks so we can avoid all the duplicated plugin metadata logic, and also cache the collection results properly here def get_with_context(self, name, *args, **kwargs): - - # found_in_cache = True - class_only = kwargs.pop('class_only', False) # just pop it, dont want to pass through - collection_list = kwargs.pop('collection_list', None) + # pop N/A kwargs to avoid passthrough to parent methods + kwargs.pop('class_only', False) + kwargs.pop('collection_list', None) context = PluginLoadContext() # avoid collection path for legacy name = name.removeprefix('ansible.legacy.') - if '.' not in name: - # Filter/tests must always be FQCN except builtin and legacy - for known_plugin in self.all(*args, **kwargs): - if known_plugin.matches_name([name]): - context.resolved = True - context.plugin_resolved_name = name - context.plugin_resolved_path = known_plugin._original_path - context.plugin_resolved_collection = 'ansible.builtin' if known_plugin.ansible_name.startswith('ansible.builtin.') else '' - context._resolved_fqcn = known_plugin.ansible_name - return get_with_context_result(known_plugin, context) + self._ensure_non_collection_wrappers(*args, **kwargs) + + # check for stuff loaded via legacy/builtin paths first + if known_plugin := self._cached_non_collection_wrappers.get(name): + context.resolved = True + context.plugin_resolved_name = name + context.plugin_resolved_path = known_plugin._original_path + context.plugin_resolved_collection = 'ansible.builtin' if known_plugin.ansible_name.startswith('ansible.builtin.') else '' + context._resolved_fqcn = known_plugin.ansible_name + return get_with_context_result(known_plugin, context) plugin = None key, leaf_key = get_fqcr_and_name(name) @@ -1235,14 +1228,10 @@ class Jinja2Loader(PluginLoader): # use 'parent' loader class to find files, but cannot return this as it can contain # multiple plugins per file plugin_impl = super(Jinja2Loader, self).get_with_context(module_name, *args, **kwargs) - except Exception as e: - raise KeyError(to_native(e)) - - try: method_map = getattr(plugin_impl.object, self.method_map_name) plugin_map = method_map().items() except Exception as e: - display.warning("Skipping %s plugins in '%s' as it seems to be invalid: %r" % (self.type, to_text(plugin_impl.object._original_path), e)) + display.warning(f"Skipping {self.type} plugins in {module_name}'; an error occurred while loading: {e}") continue for func_name, func in plugin_map: @@ -1251,11 +1240,11 @@ class Jinja2Loader(PluginLoader): # TODO: load anyways into CACHE so we only match each at end of loop # the files themseves should already be cached by base class caching of modules(python) if key in (func_name, fq_name): - pclass = self._load_jinja2_class() - plugin = pclass(func) + plugin = self._plugin_wrapper_type(func) if plugin: context = plugin_impl.plugin_load_context self._update_object(plugin, src_name, plugin_impl.object._original_path, resolved=fq_name) + # FIXME: once we start caching these results, we'll be missing functions that would have loaded later break # go to next file as it can override if dupe (dont break both loops) except AnsiblePluginRemovedError as apre: @@ -1270,8 +1259,7 @@ class Jinja2Loader(PluginLoader): return get_with_context_result(plugin, context) def all(self, *args, **kwargs): - - # inputs, we ignore 'dedupe' we always do, used in base class to find files for this one + kwargs.pop('_dedupe', None) path_only = kwargs.pop('path_only', False) class_only = kwargs.pop('class_only', False) # basically ignored for test/filters since they are functions @@ -1279,9 +1267,19 @@ class Jinja2Loader(PluginLoader): if path_only and class_only: raise AnsibleError('Do not set both path_only and class_only when calling PluginLoader.all()') - found = set() + self._ensure_non_collection_wrappers(*args, **kwargs) + if path_only: + yield from (w._original_path for w in self._cached_non_collection_wrappers.values()) + else: + yield from (w for w in self._cached_non_collection_wrappers.values()) + + def _ensure_non_collection_wrappers(self, *args, **kwargs): + if self._cached_non_collection_wrappers: + return + # get plugins from files in configured paths (multiple in each) - for p_map in self._j2_all_file_maps(*args, **kwargs): + for p_map in super(Jinja2Loader, self).all(*args, **kwargs): + is_builtin = p_map.ansible_name.startswith('ansible.builtin.') # p_map is really object from file with class that holds multiple plugins plugins_list = getattr(p_map, self.method_map_name) @@ -1292,57 +1290,35 @@ class Jinja2Loader(PluginLoader): continue for plugin_name in plugins.keys(): - if plugin_name in _PLUGIN_FILTERS[self.package]: - display.debug("%s skipped due to a defined plugin filter" % plugin_name) + if '.' in plugin_name: + display.debug(f'{plugin_name} skipped in {p_map._original_path}; Jinja plugin short names may not contain "."') continue - if plugin_name in found: - display.debug("%s skipped as duplicate" % plugin_name) + if plugin_name in _PLUGIN_FILTERS[self.package]: + display.debug("%s skipped due to a defined plugin filter" % plugin_name) continue - if path_only: - result = p_map._original_path - else: - # loader class is for the file with multiple plugins, but each plugin now has it's own class - pclass = self._load_jinja2_class() - result = pclass(plugins[plugin_name]) # if bad plugin, let exception rise - found.add(plugin_name) - fqcn = plugin_name - collection = '.'.join(p_map.ansible_name.split('.')[:2]) if p_map.ansible_name.count('.') >= 2 else '' - if not plugin_name.startswith(collection): - fqcn = f"{collection}.{plugin_name}" - - self._update_object(result, plugin_name, p_map._original_path, resolved=fqcn) - yield result - - def _load_jinja2_class(self): - """ override the normal method of plugin classname as these are used in the generic funciton - to access the 'multimap' of filter/tests to function, this is a 'singular' plugin for - each entry. - """ - class_name = 'AnsibleJinja2%s' % get_plugin_class(self.class_name).capitalize() - module = __import__(self.package, fromlist=[class_name]) - - return getattr(module, class_name) + # the plugin class returned by the loader may host multiple Jinja plugins, but we wrap each plugin in + # its own surrogate wrapper instance here to ease the bookkeeping... + wrapper = self._plugin_wrapper_type(plugins[plugin_name]) + fqcn = plugin_name + collection = '.'.join(p_map.ansible_name.split('.')[:2]) if p_map.ansible_name.count('.') >= 2 else '' + if not plugin_name.startswith(collection): + fqcn = f"{collection}.{plugin_name}" - def _j2_all_file_maps(self, *args, **kwargs): - """ - * Unlike other plugin types, file != plugin, a file can contain multiple plugins (of same type). - This is why we do not deduplicate ansible file names at this point, we mostly care about - the names of the actual jinja2 plugins which are inside of our files. - * This method will NOT fetch collection plugin files, only those that would be expected under 'ansible.builtin/legacy'. - """ - # populate cache if needed - if not self._loaded_j2_file_maps: + self._update_object(wrapper, plugin_name, p_map._original_path, resolved=fqcn) - # We don't deduplicate ansible file names. - # Instead, calling code deduplicates jinja2 plugin names when loading each file. - kwargs['_dedupe'] = False + target_names = {plugin_name, fqcn} + if is_builtin: + target_names.add(f'ansible.builtin.{plugin_name}') - # To match correct precedence, call base class' all() to get a list of files, - self._loaded_j2_file_maps = list(super(Jinja2Loader, self).all(*args, **kwargs)) + for target_name in target_names: + if existing_plugin := self._cached_non_collection_wrappers.get(target_name): + display.debug(f'Jinja plugin {target_name} from {p_map._original_path} skipped; ' + f'shadowed by plugin from {existing_plugin._original_path})') + continue - return self._loaded_j2_file_maps + self._cached_non_collection_wrappers[target_name] = wrapper def get_fqcr_and_name(resource, collection='ansible.builtin'): @@ -1551,13 +1527,15 @@ filter_loader = Jinja2Loader( 'ansible.plugins.filter', C.DEFAULT_FILTER_PLUGIN_PATH, 'filter_plugins', + AnsibleJinja2Filter ) test_loader = Jinja2Loader( 'TestModule', 'ansible.plugins.test', C.DEFAULT_TEST_PLUGIN_PATH, - 'test_plugins' + 'test_plugins', + AnsibleJinja2Test ) strategy_loader = PluginLoader( diff --git a/lib/ansible/release.py b/lib/ansible/release.py index 591e60b7..6f0f794f 100644 --- a/lib/ansible/release.py +++ b/lib/ansible/release.py @@ -19,6 +19,6 @@ from __future__ import (absolute_import, division, print_function) __metaclass__ = type -__version__ = '2.14.10' +__version__ = '2.14.11' __author__ = 'Ansible, Inc.' __codename__ = "C'mon Everybody" diff --git a/lib/ansible/template/__init__.py b/lib/ansible/template/__init__.py index 17bee104..baa85ed7 100644 --- a/lib/ansible/template/__init__.py +++ b/lib/ansible/template/__init__.py @@ -412,11 +412,11 @@ class JinjaPluginIntercept(MutableMapping): self._pluginloader = pluginloader - # cache of resolved plugins + # Jinja environment's mapping of known names (initially just J2 builtins) self._delegatee = delegatee - # track loaded plugins here as cache above includes 'jinja2' filters but ours should override - self._loaded_builtins = set() + # our names take precedence over Jinja's, but let things we've tried to resolve skip the pluginloader + self._seen_it = set() def __getitem__(self, key): @@ -424,7 +424,10 @@ class JinjaPluginIntercept(MutableMapping): raise ValueError('key must be a string, got %s instead' % type(key)) original_exc = None - if key not in self._loaded_builtins: + if key not in self._seen_it: + # this looks too early to set this- it isn't. Setting it here keeps requests for Jinja builtins from + # going through the pluginloader more than once, which is extremely slow for something that won't ever succeed. + self._seen_it.add(key) plugin = None try: plugin = self._pluginloader.get(key) @@ -438,12 +441,12 @@ class JinjaPluginIntercept(MutableMapping): if plugin: # set in filter cache and avoid expensive plugin load self._delegatee[key] = plugin.j2_function - self._loaded_builtins.add(key) # raise template syntax error if we could not find ours or jinja2 one try: func = self._delegatee[key] except KeyError as e: + self._seen_it.remove(key) raise TemplateSyntaxError('Could not load "%s": %s' % (key, to_native(original_exc or e)), 0) # if i do have func and it is a filter, it nees wrapping diff --git a/lib/ansible_core.egg-info/PKG-INFO b/lib/ansible_core.egg-info/PKG-INFO index 6ed4167b..e994e044 100644 --- a/lib/ansible_core.egg-info/PKG-INFO +++ b/lib/ansible_core.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: ansible-core -Version: 2.14.10 +Version: 2.14.11 Summary: Radically simple IT automation Home-page: https://ansible.com/ Author: Ansible, Inc. diff --git a/lib/ansible_core.egg-info/SOURCES.txt b/lib/ansible_core.egg-info/SOURCES.txt index 83bc701d..f4109a34 100644 --- a/lib/ansible_core.egg-info/SOURCES.txt +++ b/lib/ansible_core.egg-info/SOURCES.txt @@ -874,7 +874,9 @@ test/integration/targets/ansible-galaxy-collection/tasks/verify.yml test/integration/targets/ansible-galaxy-collection/templates/ansible.cfg.j2 test/integration/targets/ansible-galaxy-collection/vars/main.yml test/integration/targets/ansible-galaxy-role/aliases +test/integration/targets/ansible-galaxy-role/files/create-role-archive.py test/integration/targets/ansible-galaxy-role/meta/main.yml +test/integration/targets/ansible-galaxy-role/tasks/dir-traversal.yml test/integration/targets/ansible-galaxy-role/tasks/main.yml test/integration/targets/ansible-galaxy/files/testserver.py test/integration/targets/ansible-inventory/aliases diff --git a/test/integration/targets/ansible-galaxy-role/files/create-role-archive.py b/test/integration/targets/ansible-galaxy-role/files/create-role-archive.py new file mode 100755 index 00000000..cfd908c1 --- /dev/null +++ b/test/integration/targets/ansible-galaxy-role/files/create-role-archive.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python +"""Create a role archive which overwrites an arbitrary file.""" + +import argparse +import pathlib +import tarfile +import tempfile + + +def main() -> None: + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('archive', type=pathlib.Path, help='archive to create') + parser.add_argument('content', type=pathlib.Path, help='content to write') + parser.add_argument('target', type=pathlib.Path, help='file to overwrite') + + args = parser.parse_args() + + create_archive(args.archive, args.content, args.target) + + +def create_archive(archive_path: pathlib.Path, content_path: pathlib.Path, target_path: pathlib.Path) -> None: + with ( + tarfile.open(name=archive_path, mode='w') as role_archive, + tempfile.TemporaryDirectory() as temp_dir_name, + ): + temp_dir_path = pathlib.Path(temp_dir_name) + + meta_main_path = temp_dir_path / 'meta' / 'main.yml' + meta_main_path.parent.mkdir() + meta_main_path.write_text('') + + symlink_path = temp_dir_path / 'symlink' + symlink_path.symlink_to(target_path) + + role_archive.add(meta_main_path) + role_archive.add(symlink_path) + + content_tarinfo = role_archive.gettarinfo(content_path, str(symlink_path)) + + with content_path.open('rb') as content_file: + role_archive.addfile(content_tarinfo, content_file) + + +if __name__ == '__main__': + main() diff --git a/test/integration/targets/ansible-galaxy-role/tasks/dir-traversal.yml b/test/integration/targets/ansible-galaxy-role/tasks/dir-traversal.yml new file mode 100644 index 00000000..c70e8998 --- /dev/null +++ b/test/integration/targets/ansible-galaxy-role/tasks/dir-traversal.yml @@ -0,0 +1,44 @@ +- name: create test directories + file: + path: '{{ remote_tmp_dir }}/dir-traversal/{{ item }}' + state: directory + loop: + - source + - target + - roles + +- name: create test content + copy: + dest: '{{ remote_tmp_dir }}/dir-traversal/source/content.txt' + content: | + some content to write + +- name: build dangerous dir traversal role + script: + chdir: '{{ remote_tmp_dir }}/dir-traversal/source' + cmd: create-role-archive.py dangerous.tar content.txt {{ remote_tmp_dir }}/dir-traversal/target/target-file-to-overwrite.txt + executable: '{{ ansible_playbook_python }}' + +- name: install dangerous role + command: + cmd: ansible-galaxy role install --roles-path '{{ remote_tmp_dir }}/dir-traversal/roles' dangerous.tar + chdir: '{{ remote_tmp_dir }}/dir-traversal/source' + ignore_errors: true + register: galaxy_install_dangerous + +- name: check for overwritten file + stat: + path: '{{ remote_tmp_dir }}/dir-traversal/target/target-file-to-overwrite.txt' + register: dangerous_overwrite_stat + +- name: get overwritten content + slurp: + path: '{{ remote_tmp_dir }}/dir-traversal/target/target-file-to-overwrite.txt' + register: dangerous_overwrite_content + when: dangerous_overwrite_stat.stat.exists + +- assert: + that: + - dangerous_overwrite_content.content|default('')|b64decode == '' + - not dangerous_overwrite_stat.stat.exists + - galaxy_install_dangerous is failed diff --git a/test/integration/targets/ansible-galaxy-role/tasks/main.yml b/test/integration/targets/ansible-galaxy-role/tasks/main.yml index 03d0b3c2..b39df11c 100644 --- a/test/integration/targets/ansible-galaxy-role/tasks/main.yml +++ b/test/integration/targets/ansible-galaxy-role/tasks/main.yml @@ -59,3 +59,5 @@ - name: Uninstall invalid role command: ansible-galaxy role remove invalid-testrole + +- import_tasks: dir-traversal.yml diff --git a/test/integration/targets/collections/posix.yml b/test/integration/targets/collections/posix.yml index 903fb4ff..4db20003 100644 --- a/test/integration/targets/collections/posix.yml +++ b/test/integration/targets/collections/posix.yml @@ -151,8 +151,8 @@ - assert: that: - - | - 'This is a broken filter plugin.' in result.msg + # FUTURE: ensure that the warning was also issued with the actual failure details + - result is failed - debug: msg: "{{ 'foo'|missing.collection.filter }}" diff --git a/test/integration/targets/win_fetch/tasks/main.yml b/test/integration/targets/win_fetch/tasks/main.yml index 78b6fa02..b5818352 100644 --- a/test/integration/targets/win_fetch/tasks/main.yml +++ b/test/integration/targets/win_fetch/tasks/main.yml @@ -176,8 +176,13 @@ - "fetch_missing_implicit.msg" - "fetch_missing_implicit is not changed" +- name: create empty temp directory + win_file: + path: '{{ remote_tmp_dir }}\fetch_tmp' + state: directory + - name: attempt to fetch a directory - fetch: src="C:\\Windows" dest={{ host_output_dir }} + fetch: src="{{ remote_tmp_dir }}\\fetch_tmp" dest={{ host_output_dir }} register: fetch_dir ignore_errors: true |