summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorLee Garrett <lgarrett@rocketjump.eu>2022-03-29 17:09:09 +0200
committerLee Garrett <lgarrett@rocketjump.eu>2022-06-13 23:10:52 +0200
commitc854267a3b7e70668beced393c6b8f2f5f64ed58 (patch)
tree155f8e898e97770ec1d7e6df8addc5923e52c6f7 /lib
parent39552c97f0bcb60e41b82424bed22eb25e790c9a (diff)
downloaddebian-ansible-core-c854267a3b7e70668beced393c6b8f2f5f64ed58.zip
New upstream version 2.12.4
Diffstat (limited to 'lib')
-rw-r--r--lib/ansible/executor/module_common.py2
-rw-r--r--lib/ansible/galaxy/api.py6
-rw-r--r--lib/ansible/galaxy/collection/__init__.py37
-rw-r--r--lib/ansible/galaxy/collection/concrete_artifact_manager.py11
-rw-r--r--lib/ansible/modules/unarchive.py11
-rw-r--r--lib/ansible/parsing/yaml/dumper.py12
-rw-r--r--lib/ansible/plugins/lookup/first_found.py8
-rw-r--r--lib/ansible/release.py2
-rw-r--r--lib/ansible/template/__init__.py89
9 files changed, 124 insertions, 54 deletions
diff --git a/lib/ansible/executor/module_common.py b/lib/ansible/executor/module_common.py
index c0760fa3..953c1604 100644
--- a/lib/ansible/executor/module_common.py
+++ b/lib/ansible/executor/module_common.py
@@ -1161,7 +1161,7 @@ def _find_module_utils(module_name, b_module_data, module_path, module_args, tas
compression_method = zipfile.ZIP_STORED
lookup_path = os.path.join(C.DEFAULT_LOCAL_TMP, 'ansiballz_cache')
- cached_module_filename = os.path.join(lookup_path, "%s-%s" % (module_name, module_compression))
+ cached_module_filename = os.path.join(lookup_path, "%s-%s" % (remote_module_fqn, module_compression))
zipdata = None
# Optimization -- don't lock if the module has already been cached
diff --git a/lib/ansible/galaxy/api.py b/lib/ansible/galaxy/api.py
index 5bd37149..3becc67d 100644
--- a/lib/ansible/galaxy/api.py
+++ b/lib/ansible/galaxy/api.py
@@ -258,6 +258,7 @@ class GalaxyAPI:
available_api_versions=None,
clear_response_cache=False, no_cache=True,
priority=float('inf'),
+ timeout=60,
):
self.galaxy = galaxy
self.name = name
@@ -266,6 +267,7 @@ class GalaxyAPI:
self.token = token
self.api_server = url
self.validate_certs = validate_certs
+ self.timeout = timeout
self._available_api_versions = available_api_versions or {}
self._priority = priority
@@ -377,7 +379,7 @@ class GalaxyAPI:
try:
display.vvvv("Calling Galaxy at %s" % url)
resp = open_url(to_native(url), data=args, validate_certs=self.validate_certs, headers=headers,
- method=method, timeout=20, http_agent=user_agent(), follow_redirects='safe')
+ method=method, timeout=self.timeout, http_agent=user_agent(), follow_redirects='safe')
except HTTPError as e:
raise GalaxyError(e, error_context_msg)
except Exception as e:
@@ -435,7 +437,7 @@ class GalaxyAPI:
"""
url = _urljoin(self.api_server, self.available_api_versions['v1'], "tokens") + '/'
args = urlencode({"github_token": github_token})
- resp = open_url(url, data=args, validate_certs=self.validate_certs, method="POST", http_agent=user_agent())
+ resp = open_url(url, data=args, validate_certs=self.validate_certs, method="POST", http_agent=user_agent(), timeout=self.timeout)
data = json.loads(to_text(resp.read(), errors='surrogate_or_strict'))
return data
diff --git a/lib/ansible/galaxy/collection/__init__.py b/lib/ansible/galaxy/collection/__init__.py
index 436436e6..9634f602 100644
--- a/lib/ansible/galaxy/collection/__init__.py
+++ b/lib/ansible/galaxy/collection/__init__.py
@@ -220,11 +220,46 @@ def verify_local_collection(
_verify_file_hash(b_collection_path, file_manifest_filename, expected_hash, modified_content)
file_manifest = get_json_from_validation_source(file_manifest_filename)
+ collection_dirs = set()
+ collection_files = {
+ os.path.join(b_collection_path, b'MANIFEST.json'),
+ os.path.join(b_collection_path, b'FILES.json'),
+ }
+
# Use the file manifest to verify individual file checksums
for manifest_data in file_manifest['files']:
+ name = manifest_data['name']
+
if manifest_data['ftype'] == 'file':
+ collection_files.add(
+ os.path.join(b_collection_path, to_bytes(name, errors='surrogate_or_strict'))
+ )
expected_hash = manifest_data['chksum_%s' % manifest_data['chksum_type']]
- _verify_file_hash(b_collection_path, manifest_data['name'], expected_hash, modified_content)
+ _verify_file_hash(b_collection_path, name, expected_hash, modified_content)
+
+ if manifest_data['ftype'] == 'dir':
+ collection_dirs.add(
+ os.path.join(b_collection_path, to_bytes(name, errors='surrogate_or_strict'))
+ )
+
+ # Find any paths not in the FILES.json
+ for root, dirs, files in os.walk(b_collection_path):
+ for name in files:
+ full_path = os.path.join(root, name)
+ path = to_text(full_path[len(b_collection_path) + 1::], errors='surrogate_or_strict')
+
+ if full_path not in collection_files:
+ modified_content.append(
+ ModifiedContent(filename=path, expected='the file does not exist', installed='the file exists')
+ )
+ for name in dirs:
+ full_path = os.path.join(root, name)
+ path = to_text(full_path[len(b_collection_path) + 1::], errors='surrogate_or_strict')
+
+ if full_path not in collection_dirs:
+ modified_content.append(
+ ModifiedContent(filename=path, expected='the directory does not exist', installed='the directory exists')
+ )
if modified_content:
result.success = False
diff --git a/lib/ansible/galaxy/collection/concrete_artifact_manager.py b/lib/ansible/galaxy/collection/concrete_artifact_manager.py
index 7406b85b..73b392f4 100644
--- a/lib/ansible/galaxy/collection/concrete_artifact_manager.py
+++ b/lib/ansible/galaxy/collection/concrete_artifact_manager.py
@@ -66,8 +66,8 @@ class ConcreteArtifactsManager:
* retrieving the metadata out of the downloaded artifacts
"""
- def __init__(self, b_working_directory, validate_certs=True):
- # type: (bytes, bool) -> None
+ def __init__(self, b_working_directory, validate_certs=True, timeout=60):
+ # type: (bytes, bool, int) -> None
"""Initialize ConcreteArtifactsManager caches and costraints."""
self._validate_certs = validate_certs # type: bool
self._artifact_cache = {} # type: Dict[bytes, bytes]
@@ -75,6 +75,7 @@ class ConcreteArtifactsManager:
self._artifact_meta_cache = {} # type: Dict[bytes, Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]]
self._galaxy_collection_cache = {} # type: Dict[Union[Candidate, Requirement], Tuple[str, str, GalaxyToken]]
self._b_working_directory = b_working_directory # type: bytes
+ self.timeout = timeout # type: int
def get_galaxy_artifact_path(self, collection):
# type: (Union[Candidate, Requirement]) -> bytes
@@ -169,6 +170,7 @@ class ConcreteArtifactsManager:
self._b_working_directory,
expected_hash=None, # NOTE: URLs don't support checksums
validate_certs=self._validate_certs,
+ timeout=self.timeout
)
except URLError as err:
raise_from(
@@ -389,8 +391,8 @@ def _extract_collection_from_git(repo_url, coll_ver, b_path):
# FIXME: use random subdirs while preserving the file names
-def _download_file(url, b_path, expected_hash, validate_certs, token=None):
- # type: (str, bytes, Optional[str], bool, GalaxyToken) -> bytes
+def _download_file(url, b_path, expected_hash, validate_certs, token=None, timeout=60):
+ # type: (str, bytes, Optional[str], bool, GalaxyToken, int) -> bytes
# ^ NOTE: used in download and verify_collections ^
b_tarball_name = to_bytes(
url.rsplit('/', 1)[1], errors='surrogate_or_strict',
@@ -412,6 +414,7 @@ def _download_file(url, b_path, expected_hash, validate_certs, token=None):
validate_certs=validate_certs,
headers=None if token is None else token.headers(),
unredirected_headers=['Authorization'], http_agent=user_agent(),
+ timeout=timeout
)
with open(b_file_path, 'wb') as download_file: # type: BinaryIO
diff --git a/lib/ansible/modules/unarchive.py b/lib/ansible/modules/unarchive.py
index 0b265943..3eba055a 100644
--- a/lib/ansible/modules/unarchive.py
+++ b/lib/ansible/modules/unarchive.py
@@ -53,7 +53,7 @@ options:
description:
- Size of the volatile memory buffer that is used for extracting files from the archive in bytes.
type: int
- default: 64 KiB
+ default: 65536
version_added: "2.12"
list_files:
description:
@@ -305,7 +305,7 @@ class ZipArchive(object):
self.file_args = file_args
self.opts = module.params['extra_opts']
self.module = module
- self.io_buffer_size = module.params.get("io_buffer_size", 64 * 1024)
+ self.io_buffer_size = module.params["io_buffer_size"]
self.excludes = module.params['exclude']
self.includes = []
self.include_files = self.module.params['include']
@@ -977,6 +977,13 @@ def main():
include=dict(type='list', elements='str', default=[]),
extra_opts=dict(type='list', elements='str', default=[]),
validate_certs=dict(type='bool', default=True),
+ io_buffer_size=dict(type='int', default=64 * 1024),
+
+ # Options that are for the action plugin, but ignored by the module itself.
+ # We have them here so that the sanity tests pass without ignores, which
+ # reduces the likelihood of further bugs added.
+ copy=dict(type='bool', default=True),
+ decrypt=dict(type='bool', default=True),
),
add_file_common_args=True,
# check-mode only works for zip files, we cover that later
diff --git a/lib/ansible/parsing/yaml/dumper.py b/lib/ansible/parsing/yaml/dumper.py
index 65d35781..8701bb81 100644
--- a/lib/ansible/parsing/yaml/dumper.py
+++ b/lib/ansible/parsing/yaml/dumper.py
@@ -24,7 +24,7 @@ import yaml
from ansible.module_utils.six import text_type, binary_type
from ansible.module_utils.common.yaml import SafeDumper
from ansible.parsing.yaml.objects import AnsibleUnicode, AnsibleSequence, AnsibleMapping, AnsibleVaultEncryptedUnicode
-from ansible.utils.unsafe_proxy import AnsibleUnsafeText, AnsibleUnsafeBytes
+from ansible.utils.unsafe_proxy import AnsibleUnsafeText, AnsibleUnsafeBytes, NativeJinjaUnsafeText, NativeJinjaText
from ansible.template import AnsibleUndefined
from ansible.vars.hostvars import HostVars, HostVarsVars
from ansible.vars.manager import VarsWithSources
@@ -110,3 +110,13 @@ AnsibleDumper.add_representer(
AnsibleUndefined,
represent_undefined,
)
+
+AnsibleDumper.add_representer(
+ NativeJinjaUnsafeText,
+ represent_unicode,
+)
+
+AnsibleDumper.add_representer(
+ NativeJinjaText,
+ represent_unicode,
+)
diff --git a/lib/ansible/plugins/lookup/first_found.py b/lib/ansible/plugins/lookup/first_found.py
index 5d54959e..8a37a689 100644
--- a/lib/ansible/plugins/lookup/first_found.py
+++ b/lib/ansible/plugins/lookup/first_found.py
@@ -134,6 +134,7 @@ RETURN = """
elements: path
"""
import os
+import re
from jinja2.exceptions import UndefinedError
@@ -144,18 +145,13 @@ from ansible.plugins.lookup import LookupBase
def _split_on(terms, spliters=','):
-
- # TODO: fix as it does not allow spaces in names
termlist = []
if isinstance(terms, string_types):
- for spliter in spliters:
- terms = terms.replace(spliter, ' ')
- termlist = terms.split(' ')
+ termlist = re.split(r'[%s]' % ''.join(map(re.escape, spliters)), terms)
else:
# added since options will already listify
for t in terms:
termlist.extend(_split_on(t, spliters))
-
return termlist
diff --git a/lib/ansible/release.py b/lib/ansible/release.py
index 078520dc..61b91c1c 100644
--- a/lib/ansible/release.py
+++ b/lib/ansible/release.py
@@ -19,6 +19,6 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-__version__ = '2.12.3'
+__version__ = '2.12.4'
__author__ = 'Ansible, Inc.'
__codename__ = 'Dazed and Confused'
diff --git a/lib/ansible/template/__init__.py b/lib/ansible/template/__init__.py
index 3c27587c..741275d1 100644
--- a/lib/ansible/template/__init__.py
+++ b/lib/ansible/template/__init__.py
@@ -482,6 +482,7 @@ class JinjaPluginIntercept(MutableMapping):
# FUTURE: we can cache FQ filter/test calls for the entire duration of a run, since a given collection's impl's
# aren't supposed to change during a run
def __getitem__(self, key):
+ original_key = key
self._load_ansible_plugins()
try:
@@ -496,56 +497,61 @@ class JinjaPluginIntercept(MutableMapping):
if func:
return func
- # didn't find it in the pre-built Jinja env, assume it's a former builtin and follow the normal routing path
- leaf_key = key
- key = 'ansible.builtin.' + key
- else:
- leaf_key = key.split('.')[-1]
+ key, leaf_key = get_fqcr_and_name(key)
+ seen = set()
+
+ while True:
+ if key in seen:
+ raise TemplateSyntaxError(
+ 'recursive collection redirect found for %r' % original_key,
+ 0
+ )
+ seen.add(key)
- acr = AnsibleCollectionRef.try_parse_fqcr(key, self._dirname)
+ acr = AnsibleCollectionRef.try_parse_fqcr(key, self._dirname)
- if not acr:
- raise KeyError('invalid plugin name: {0}'.format(key))
+ if not acr:
+ raise KeyError('invalid plugin name: {0}'.format(key))
- ts = _get_collection_metadata(acr.collection)
+ ts = _get_collection_metadata(acr.collection)
- # TODO: implement support for collection-backed redirect (currently only builtin)
- # TODO: implement cycle detection (unified across collection redir as well)
+ # TODO: implement cycle detection (unified across collection redir as well)
- routing_entry = ts.get('plugin_routing', {}).get(self._dirname, {}).get(leaf_key, {})
+ routing_entry = ts.get('plugin_routing', {}).get(self._dirname, {}).get(leaf_key, {})
- deprecation_entry = routing_entry.get('deprecation')
- if deprecation_entry:
- warning_text = deprecation_entry.get('warning_text')
- removal_date = deprecation_entry.get('removal_date')
- removal_version = deprecation_entry.get('removal_version')
+ deprecation_entry = routing_entry.get('deprecation')
+ if deprecation_entry:
+ warning_text = deprecation_entry.get('warning_text')
+ removal_date = deprecation_entry.get('removal_date')
+ removal_version = deprecation_entry.get('removal_version')
- if not warning_text:
- warning_text = '{0} "{1}" is deprecated'.format(self._dirname, key)
+ if not warning_text:
+ warning_text = '{0} "{1}" is deprecated'.format(self._dirname, key)
- display.deprecated(warning_text, version=removal_version, date=removal_date, collection_name=acr.collection)
+ display.deprecated(warning_text, version=removal_version, date=removal_date, collection_name=acr.collection)
- tombstone_entry = routing_entry.get('tombstone')
+ tombstone_entry = routing_entry.get('tombstone')
- if tombstone_entry:
- warning_text = tombstone_entry.get('warning_text')
- removal_date = tombstone_entry.get('removal_date')
- removal_version = tombstone_entry.get('removal_version')
+ if tombstone_entry:
+ warning_text = tombstone_entry.get('warning_text')
+ removal_date = tombstone_entry.get('removal_date')
+ removal_version = tombstone_entry.get('removal_version')
- if not warning_text:
- warning_text = '{0} "{1}" has been removed'.format(self._dirname, key)
+ if not warning_text:
+ warning_text = '{0} "{1}" has been removed'.format(self._dirname, key)
- exc_msg = display.get_deprecation_message(warning_text, version=removal_version, date=removal_date,
- collection_name=acr.collection, removed=True)
+ exc_msg = display.get_deprecation_message(warning_text, version=removal_version, date=removal_date,
+ collection_name=acr.collection, removed=True)
- raise AnsiblePluginRemovedError(exc_msg)
+ raise AnsiblePluginRemovedError(exc_msg)
- redirect_fqcr = routing_entry.get('redirect', None)
- if redirect_fqcr:
- acr = AnsibleCollectionRef.from_fqcr(ref=redirect_fqcr, ref_type=self._dirname)
- display.vvv('redirecting {0} {1} to {2}.{3}'.format(self._dirname, key, acr.collection, acr.resource))
- key = redirect_fqcr
- # TODO: handle recursive forwarding (not necessary for builtin, but definitely for further collection redirs)
+ redirect = routing_entry.get('redirect', None)
+ if redirect:
+ next_key, leaf_key = get_fqcr_and_name(redirect, collection=acr.collection)
+ display.vvv('redirecting (type: {0}) {1}.{2} to {3}'.format(self._dirname, acr.collection, acr.resource, next_key))
+ key = next_key
+ else:
+ break
func = self._collection_jinja_func_cache.get(key)
@@ -620,6 +626,17 @@ class JinjaPluginIntercept(MutableMapping):
return len(self._delegatee)
+def get_fqcr_and_name(resource, collection='ansible.builtin'):
+ if '.' not in resource:
+ name = resource
+ fqcr = collection + '.' + resource
+ else:
+ name = resource.split('.')[-1]
+ fqcr = resource
+
+ return fqcr, name
+
+
class AnsibleEnvironment(Environment):
'''
Our custom environment, which simply allows us to override the class-level