summaryrefslogtreecommitdiff
path: root/lib/ansible/module_utils
diff options
context:
space:
mode:
Diffstat (limited to 'lib/ansible/module_utils')
-rw-r--r--lib/ansible/module_utils/_text.py1
-rw-r--r--lib/ansible/module_utils/ansible_release.py4
-rw-r--r--lib/ansible/module_utils/basic.py225
-rw-r--r--lib/ansible/module_utils/common/_collections_compat.py56
-rw-r--r--lib/ansible/module_utils/common/collections.py2
-rw-r--r--lib/ansible/module_utils/common/dict_transformations.py2
-rw-r--r--lib/ansible/module_utils/common/file.py109
-rw-r--r--lib/ansible/module_utils/common/json.py4
-rw-r--r--lib/ansible/module_utils/common/locale.py2
-rw-r--r--lib/ansible/module_utils/common/parameters.py5
-rw-r--r--lib/ansible/module_utils/common/respawn.py11
-rw-r--r--lib/ansible/module_utils/common/text/converters.py17
-rw-r--r--lib/ansible/module_utils/common/text/formatters.py2
-rw-r--r--lib/ansible/module_utils/common/validation.py4
-rw-r--r--lib/ansible/module_utils/common/yaml.py8
-rw-r--r--lib/ansible/module_utils/compat/_selectors2.py10
-rw-r--r--lib/ansible/module_utils/compat/importlib.py2
-rw-r--r--lib/ansible/module_utils/compat/paramiko.py4
-rw-r--r--lib/ansible/module_utils/compat/selectors.py3
-rw-r--r--lib/ansible/module_utils/compat/selinux.py2
-rw-r--r--lib/ansible/module_utils/compat/typing.py4
-rw-r--r--lib/ansible/module_utils/connection.py2
-rw-r--r--lib/ansible/module_utils/distro/_distro.py151
-rw-r--r--lib/ansible/module_utils/facts/hardware/linux.py58
-rw-r--r--lib/ansible/module_utils/facts/hardware/openbsd.py4
-rw-r--r--lib/ansible/module_utils/facts/hardware/sunos.py4
-rw-r--r--lib/ansible/module_utils/facts/network/fc_wwn.py10
-rw-r--r--lib/ansible/module_utils/facts/network/iscsi.py1
-rw-r--r--lib/ansible/module_utils/facts/network/linux.py40
-rw-r--r--lib/ansible/module_utils/facts/network/nvme.py1
-rw-r--r--lib/ansible/module_utils/facts/other/facter.py23
-rw-r--r--lib/ansible/module_utils/facts/sysctl.py2
-rw-r--r--lib/ansible/module_utils/facts/system/caps.py1
-rw-r--r--lib/ansible/module_utils/facts/system/date_time.py4
-rw-r--r--lib/ansible/module_utils/facts/system/distribution.py2
-rw-r--r--lib/ansible/module_utils/facts/system/local.py8
-rw-r--r--lib/ansible/module_utils/facts/system/pkg_mgr.py88
-rw-r--r--lib/ansible/module_utils/facts/system/service_mgr.py6
-rw-r--r--lib/ansible/module_utils/json_utils.py2
-rw-r--r--lib/ansible/module_utils/parsing/convert_bool.py2
-rw-r--r--lib/ansible/module_utils/powershell/Ansible.ModuleUtils.AddType.psm120
-rw-r--r--lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Backup.psm12
-rw-r--r--lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm110
-rw-r--r--lib/ansible/module_utils/pycompat24.py40
-rw-r--r--lib/ansible/module_utils/service.py15
-rw-r--r--lib/ansible/module_utils/urls.py108
-rw-r--r--lib/ansible/module_utils/yumdnf.py2
47 files changed, 495 insertions, 588 deletions
diff --git a/lib/ansible/module_utils/_text.py b/lib/ansible/module_utils/_text.py
index 6cd77217..f30a5e97 100644
--- a/lib/ansible/module_utils/_text.py
+++ b/lib/ansible/module_utils/_text.py
@@ -8,6 +8,7 @@ __metaclass__ = type
"""
# Backwards compat for people still calling it from this package
+# pylint: disable=unused-import
import codecs
from ansible.module_utils.six import PY3, text_type, binary_type
diff --git a/lib/ansible/module_utils/ansible_release.py b/lib/ansible/module_utils/ansible_release.py
index 5fc1bde1..f8530dc9 100644
--- a/lib/ansible/module_utils/ansible_release.py
+++ b/lib/ansible/module_utils/ansible_release.py
@@ -19,6 +19,6 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-__version__ = '2.14.13'
+__version__ = '2.16.5'
__author__ = 'Ansible, Inc.'
-__codename__ = "C'mon Everybody"
+__codename__ = "All My Love"
diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py
index 67be9240..19ca0aaf 100644
--- a/lib/ansible/module_utils/basic.py
+++ b/lib/ansible/module_utils/basic.py
@@ -5,28 +5,20 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
-FILE_ATTRIBUTES = {
- 'A': 'noatime',
- 'a': 'append',
- 'c': 'compressed',
- 'C': 'nocow',
- 'd': 'nodump',
- 'D': 'dirsync',
- 'e': 'extents',
- 'E': 'encrypted',
- 'h': 'blocksize',
- 'i': 'immutable',
- 'I': 'indexed',
- 'j': 'journalled',
- 'N': 'inline',
- 's': 'zero',
- 'S': 'synchronous',
- 't': 'notail',
- 'T': 'blockroot',
- 'u': 'undelete',
- 'X': 'compressedraw',
- 'Z': 'compresseddirty',
-}
+import sys
+
+# Used for determining if the system is running a new enough python version
+# and should only restrict on our documented minimum versions
+_PY3_MIN = sys.version_info >= (3, 6)
+_PY2_MIN = (2, 7) <= sys.version_info < (3,)
+_PY_MIN = _PY3_MIN or _PY2_MIN
+
+if not _PY_MIN:
+ print(
+ '\n{"failed": true, '
+ '"msg": "ansible-core requires a minimum of Python2 version 2.7 or Python3 version 3.6. Current version: %s"}' % ''.join(sys.version.splitlines())
+ )
+ sys.exit(1)
# Ansible modules can be written in any language.
# The functions available here can be used to do many common tasks,
@@ -49,7 +41,6 @@ import shutil
import signal
import stat
import subprocess
-import sys
import tempfile
import time
import traceback
@@ -101,43 +92,49 @@ from ansible.module_utils.common.text.formatters import (
SIZE_RANGES,
)
+import hashlib
+
+
+def _get_available_hash_algorithms():
+ """Return a dictionary of available hash function names and their associated function."""
+ try:
+ # Algorithms available in Python 2.7.9+ and Python 3.2+
+ # https://docs.python.org/2.7/library/hashlib.html#hashlib.algorithms_available
+ # https://docs.python.org/3.2/library/hashlib.html#hashlib.algorithms_available
+ algorithm_names = hashlib.algorithms_available
+ except AttributeError:
+ # Algorithms in Python 2.7.x (used only for Python 2.7.0 through 2.7.8)
+ # https://docs.python.org/2.7/library/hashlib.html#hashlib.hashlib.algorithms
+ algorithm_names = set(hashlib.algorithms)
+
+ algorithms = {}
+
+ for algorithm_name in algorithm_names:
+ algorithm_func = getattr(hashlib, algorithm_name, None)
+
+ if algorithm_func:
+ try:
+ # Make sure the algorithm is actually available for use.
+ # Not all algorithms listed as available are actually usable.
+ # For example, md5 is not available in FIPS mode.
+ algorithm_func()
+ except Exception:
+ pass
+ else:
+ algorithms[algorithm_name] = algorithm_func
+
+ return algorithms
+
+
+AVAILABLE_HASH_ALGORITHMS = _get_available_hash_algorithms()
+
try:
from ansible.module_utils.common._json_compat import json
except ImportError as e:
print('\n{{"msg": "Error: ansible requires the stdlib json: {0}", "failed": true}}'.format(to_native(e)))
sys.exit(1)
-
-AVAILABLE_HASH_ALGORITHMS = dict()
-try:
- import hashlib
-
- # python 2.7.9+ and 2.7.0+
- for attribute in ('available_algorithms', 'algorithms'):
- algorithms = getattr(hashlib, attribute, None)
- if algorithms:
- break
- if algorithms is None:
- # python 2.5+
- algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
- for algorithm in algorithms:
- AVAILABLE_HASH_ALGORITHMS[algorithm] = getattr(hashlib, algorithm)
-
- # we may have been able to import md5 but it could still not be available
- try:
- hashlib.md5()
- except ValueError:
- AVAILABLE_HASH_ALGORITHMS.pop('md5', None)
-except Exception:
- import sha
- AVAILABLE_HASH_ALGORITHMS = {'sha1': sha.sha}
- try:
- import md5
- AVAILABLE_HASH_ALGORITHMS['md5'] = md5.md5
- except Exception:
- pass
-
-from ansible.module_utils.common._collections_compat import (
+from ansible.module_utils.six.moves.collections_abc import (
KeysView,
Mapping, MutableMapping,
Sequence, MutableSequence,
@@ -152,6 +149,7 @@ from ansible.module_utils.common.file import (
is_executable,
format_attributes,
get_flags_from_attributes,
+ FILE_ATTRIBUTES,
)
from ansible.module_utils.common.sys_info import (
get_distribution,
@@ -203,14 +201,14 @@ imap = map
try:
# Python 2
- unicode # type: ignore[has-type] # pylint: disable=used-before-assignment
+ unicode # type: ignore[used-before-def] # pylint: disable=used-before-assignment
except NameError:
# Python 3
unicode = text_type
try:
# Python 2
- basestring # type: ignore[has-type] # pylint: disable=used-before-assignment
+ basestring # type: ignore[used-before-def,has-type] # pylint: disable=used-before-assignment
except NameError:
# Python 3
basestring = string_types
@@ -245,20 +243,8 @@ PASSWD_ARG_RE = re.compile(r'^[-]{0,2}pass[-]?(word|wd)?')
# Used for parsing symbolic file perms
MODE_OPERATOR_RE = re.compile(r'[+=-]')
-USERS_RE = re.compile(r'[^ugo]')
-PERMS_RE = re.compile(r'[^rwxXstugo]')
-
-# Used for determining if the system is running a new enough python version
-# and should only restrict on our documented minimum versions
-_PY3_MIN = sys.version_info >= (3, 5)
-_PY2_MIN = (2, 7) <= sys.version_info < (3,)
-_PY_MIN = _PY3_MIN or _PY2_MIN
-if not _PY_MIN:
- print(
- '\n{"failed": true, '
- '"msg": "ansible-core requires a minimum of Python2 version 2.7 or Python3 version 3.5. Current version: %s"}' % ''.join(sys.version.splitlines())
- )
- sys.exit(1)
+USERS_RE = re.compile(r'^[ugo]+$')
+PERMS_RE = re.compile(r'^[rwxXstugo]*$')
#
@@ -1055,18 +1041,18 @@ class AnsibleModule(object):
# Check if there are illegal characters in the user list
# They can end up in 'users' because they are not split
- if USERS_RE.match(users):
+ if not USERS_RE.match(users):
raise ValueError("bad symbolic permission for mode: %s" % mode)
# Now we have two list of equal length, one contains the requested
# permissions and one with the corresponding operators.
for idx, perms in enumerate(permlist):
# Check if there are illegal characters in the permissions
- if PERMS_RE.match(perms):
+ if not PERMS_RE.match(perms):
raise ValueError("bad symbolic permission for mode: %s" % mode)
for user in users:
- mode_to_apply = cls._get_octal_mode_from_symbolic_perms(path_stat, user, perms, use_umask)
+ mode_to_apply = cls._get_octal_mode_from_symbolic_perms(path_stat, user, perms, use_umask, new_mode)
new_mode = cls._apply_operation_to_mode(user, opers[idx], mode_to_apply, new_mode)
return new_mode
@@ -1091,9 +1077,9 @@ class AnsibleModule(object):
return new_mode
@staticmethod
- def _get_octal_mode_from_symbolic_perms(path_stat, user, perms, use_umask):
- prev_mode = stat.S_IMODE(path_stat.st_mode)
-
+ def _get_octal_mode_from_symbolic_perms(path_stat, user, perms, use_umask, prev_mode=None):
+ if prev_mode is None:
+ prev_mode = stat.S_IMODE(path_stat.st_mode)
is_directory = stat.S_ISDIR(path_stat.st_mode)
has_x_permissions = (prev_mode & EXEC_PERM_BITS) > 0
apply_X_permission = is_directory or has_x_permissions
@@ -1503,7 +1489,19 @@ class AnsibleModule(object):
if deprecations:
kwargs['deprecations'] = deprecations
+ # preserve bools/none from no_log
+ # TODO: once python version on target high enough, dict comprh
+ preserved = {}
+ for k, v in kwargs.items():
+ if v is None or isinstance(v, bool):
+ preserved[k] = v
+
+ # strip no_log collisions
kwargs = remove_values(kwargs, self.no_log_values)
+
+ # return preserved
+ kwargs.update(preserved)
+
print('\n%s' % self.jsonify(kwargs))
def exit_json(self, **kwargs):
@@ -1707,14 +1705,6 @@ class AnsibleModule(object):
tmp_dest_fd, tmp_dest_name = tempfile.mkstemp(prefix=b'.ansible_tmp', dir=b_dest_dir, suffix=b_suffix)
except (OSError, IOError) as e:
error_msg = 'The destination directory (%s) is not writable by the current user. Error was: %s' % (os.path.dirname(dest), to_native(e))
- except TypeError:
- # We expect that this is happening because python3.4.x and
- # below can't handle byte strings in mkstemp().
- # Traceback would end in something like:
- # file = _os.path.join(dir, pre + name + suf)
- # TypeError: can't concat bytes to str
- error_msg = ('Failed creating tmp file for atomic move. This usually happens when using Python3 less than Python3.5. '
- 'Please use Python2.x or Python3.5 or greater.')
finally:
if error_msg:
if unsafe_writes:
@@ -1844,6 +1834,14 @@ class AnsibleModule(object):
'''
Execute a command, returns rc, stdout, and stderr.
+ The mechanism of this method for reading stdout and stderr differs from
+ that of CPython subprocess.Popen.communicate, in that this method will
+ stop reading once the spawned command has exited and stdout and stderr
+ have been consumed, as opposed to waiting until stdout/stderr are
+ closed. This can be an important distinction, when taken into account
+ that a forked or backgrounded process may hold stdout or stderr open
+ for longer than the spawned command.
+
:arg args: is the command to run
* If args is a list, the command will be run with shell=False.
* If args is a string and use_unsafe_shell=False it will split args to a list and run with shell=False
@@ -2023,53 +2021,64 @@ class AnsibleModule(object):
if before_communicate_callback:
before_communicate_callback(cmd)
- # the communication logic here is essentially taken from that
- # of the _communicate() function in ssh.py
-
stdout = b''
stderr = b''
- try:
- selector = selectors.DefaultSelector()
- except (IOError, OSError):
- # Failed to detect default selector for the given platform
- # Select PollSelector which is supported by major platforms
+
+ # Mirror the CPython subprocess logic and preference for the selector to use.
+ # poll/select have the advantage of not requiring any extra file
+ # descriptor, contrarily to epoll/kqueue (also, they require a single
+ # syscall).
+ if hasattr(selectors, 'PollSelector'):
selector = selectors.PollSelector()
+ else:
+ selector = selectors.SelectSelector()
+
+ if data:
+ if not binary_data:
+ data += '\n'
+ if isinstance(data, text_type):
+ data = to_bytes(data)
selector.register(cmd.stdout, selectors.EVENT_READ)
selector.register(cmd.stderr, selectors.EVENT_READ)
+
if os.name == 'posix':
fcntl.fcntl(cmd.stdout.fileno(), fcntl.F_SETFL, fcntl.fcntl(cmd.stdout.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
fcntl.fcntl(cmd.stderr.fileno(), fcntl.F_SETFL, fcntl.fcntl(cmd.stderr.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK)
if data:
- if not binary_data:
- data += '\n'
- if isinstance(data, text_type):
- data = to_bytes(data)
cmd.stdin.write(data)
cmd.stdin.close()
while True:
+ # A timeout of 1 is both a little short and a little long.
+ # With None we could deadlock, with a lower value we would
+ # waste cycles. As it is, this is a mild inconvenience if
+ # we need to exit, and likely doesn't waste too many cycles
events = selector.select(1)
+ stdout_changed = False
for key, event in events:
- b_chunk = key.fileobj.read()
- if b_chunk == b(''):
+ b_chunk = key.fileobj.read(32768)
+ if not b_chunk:
selector.unregister(key.fileobj)
- if key.fileobj == cmd.stdout:
+ elif key.fileobj == cmd.stdout:
stdout += b_chunk
+ stdout_changed = True
elif key.fileobj == cmd.stderr:
stderr += b_chunk
- # if we're checking for prompts, do it now
- if prompt_re:
- if prompt_re.search(stdout) and not data:
- if encoding:
- stdout = to_native(stdout, encoding=encoding, errors=errors)
- return (257, stdout, "A prompt was encountered while running a command, but no input data was specified")
- # only break out if no pipes are left to read or
- # the pipes are completely read and
- # the process is terminated
+
+ # if we're checking for prompts, do it now, but only if stdout
+ # actually changed since the last loop
+ if prompt_re and stdout_changed and prompt_re.search(stdout) and not data:
+ if encoding:
+ stdout = to_native(stdout, encoding=encoding, errors=errors)
+ return (257, stdout, "A prompt was encountered while running a command, but no input data was specified")
+
+ # break out if no pipes are left to read or the pipes are completely read
+ # and the process is terminated
if (not events or not selector.get_map()) and cmd.poll() is not None:
break
+
# No pipes are left to read but process is not yet terminated
# Only then it is safe to wait for the process to be finished
# NOTE: Actually cmd.poll() is always None here if no selectors are left
diff --git a/lib/ansible/module_utils/common/_collections_compat.py b/lib/ansible/module_utils/common/_collections_compat.py
index 3412408f..f0f8f0d0 100644
--- a/lib/ansible/module_utils/common/_collections_compat.py
+++ b/lib/ansible/module_utils/common/_collections_compat.py
@@ -2,45 +2,27 @@
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
"""Collections ABC import shim.
-This module is intended only for internal use.
-It will go away once the bundled copy of six includes equivalent functionality.
-Third parties should not use this.
+Use `ansible.module_utils.six.moves.collections_abc` instead, which has been available since ansible-core 2.11.
+This module exists only for backwards compatibility.
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
-try:
- """Python 3.3+ branch."""
- from collections.abc import (
- MappingView,
- ItemsView,
- KeysView,
- ValuesView,
- Mapping, MutableMapping,
- Sequence, MutableSequence,
- Set, MutableSet,
- Container,
- Hashable,
- Sized,
- Callable,
- Iterable,
- Iterator,
- )
-except ImportError:
- """Use old lib location under 2.6-3.2."""
- from collections import ( # type: ignore[no-redef,attr-defined] # pylint: disable=deprecated-class
- MappingView,
- ItemsView,
- KeysView,
- ValuesView,
- Mapping, MutableMapping,
- Sequence, MutableSequence,
- Set, MutableSet,
- Container,
- Hashable,
- Sized,
- Callable,
- Iterable,
- Iterator,
- )
+# Although this was originally intended for internal use only, it has wide adoption in collections.
+# This is due in part to sanity tests previously recommending its use over `collections` imports.
+from ansible.module_utils.six.moves.collections_abc import ( # pylint: disable=unused-import
+ MappingView,
+ ItemsView,
+ KeysView,
+ ValuesView,
+ Mapping, MutableMapping,
+ Sequence, MutableSequence,
+ Set, MutableSet,
+ Container,
+ Hashable,
+ Sized,
+ Callable,
+ Iterable,
+ Iterator,
+)
diff --git a/lib/ansible/module_utils/common/collections.py b/lib/ansible/module_utils/common/collections.py
index fdb91081..06f08a82 100644
--- a/lib/ansible/module_utils/common/collections.py
+++ b/lib/ansible/module_utils/common/collections.py
@@ -8,7 +8,7 @@ __metaclass__ = type
from ansible.module_utils.six import binary_type, text_type
-from ansible.module_utils.common._collections_compat import Hashable, Mapping, MutableMapping, Sequence
+from ansible.module_utils.six.moves.collections_abc import Hashable, Mapping, MutableMapping, Sequence # pylint: disable=unused-import
class ImmutableDict(Hashable, Mapping):
diff --git a/lib/ansible/module_utils/common/dict_transformations.py b/lib/ansible/module_utils/common/dict_transformations.py
index ffd0645f..9ee7878f 100644
--- a/lib/ansible/module_utils/common/dict_transformations.py
+++ b/lib/ansible/module_utils/common/dict_transformations.py
@@ -10,7 +10,7 @@ __metaclass__ = type
import re
from copy import deepcopy
-from ansible.module_utils.common._collections_compat import MutableMapping
+from ansible.module_utils.six.moves.collections_abc import MutableMapping
def camel_dict_to_snake_dict(camel_dict, reversible=False, ignore_list=()):
diff --git a/lib/ansible/module_utils/common/file.py b/lib/ansible/module_utils/common/file.py
index 1e836607..72b0d2cf 100644
--- a/lib/ansible/module_utils/common/file.py
+++ b/lib/ansible/module_utils/common/file.py
@@ -4,25 +4,12 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import errno
import os
import stat
import re
-import pwd
-import grp
-import time
-import shutil
-import traceback
-import fcntl
-import sys
-
-from contextlib import contextmanager
-from ansible.module_utils._text import to_bytes, to_native, to_text
-from ansible.module_utils.six import b, binary_type
-from ansible.module_utils.common.warnings import deprecate
try:
- import selinux
+ import selinux # pylint: disable=unused-import
HAVE_SELINUX = True
except ImportError:
HAVE_SELINUX = False
@@ -109,97 +96,3 @@ def get_file_arg_spec():
attributes=dict(aliases=['attr']),
)
return arg_spec
-
-
-class LockTimeout(Exception):
- pass
-
-
-class FileLock:
- '''
- Currently FileLock is implemented via fcntl.flock on a lock file, however this
- behaviour may change in the future. Avoid mixing lock types fcntl.flock,
- fcntl.lockf and module_utils.common.file.FileLock as it will certainly cause
- unwanted and/or unexpected behaviour
- '''
- def __init__(self):
- deprecate("FileLock is not reliable and has never been used in core for that reason. There is no current alternative that works across POSIX targets",
- version='2.16')
- self.lockfd = None
-
- @contextmanager
- def lock_file(self, path, tmpdir, lock_timeout=None):
- '''
- Context for lock acquisition
- '''
- try:
- self.set_lock(path, tmpdir, lock_timeout)
- yield
- finally:
- self.unlock()
-
- def set_lock(self, path, tmpdir, lock_timeout=None):
- '''
- Create a lock file based on path with flock to prevent other processes
- using given path.
- Please note that currently file locking only works when it's executed by
- the same user, I.E single user scenarios
-
- :kw path: Path (file) to lock
- :kw tmpdir: Path where to place the temporary .lock file
- :kw lock_timeout:
- Wait n seconds for lock acquisition, fail if timeout is reached.
- 0 = Do not wait, fail if lock cannot be acquired immediately,
- Default is None, wait indefinitely until lock is released.
- :returns: True
- '''
- lock_path = os.path.join(tmpdir, 'ansible-{0}.lock'.format(os.path.basename(path)))
- l_wait = 0.1
- r_exception = IOError
- if sys.version_info[0] == 3:
- r_exception = BlockingIOError
-
- self.lockfd = open(lock_path, 'w')
-
- if lock_timeout <= 0:
- fcntl.flock(self.lockfd, fcntl.LOCK_EX | fcntl.LOCK_NB)
- os.chmod(lock_path, stat.S_IWRITE | stat.S_IREAD)
- return True
-
- if lock_timeout:
- e_secs = 0
- while e_secs < lock_timeout:
- try:
- fcntl.flock(self.lockfd, fcntl.LOCK_EX | fcntl.LOCK_NB)
- os.chmod(lock_path, stat.S_IWRITE | stat.S_IREAD)
- return True
- except r_exception:
- time.sleep(l_wait)
- e_secs += l_wait
- continue
-
- self.lockfd.close()
- raise LockTimeout('{0} sec'.format(lock_timeout))
-
- fcntl.flock(self.lockfd, fcntl.LOCK_EX)
- os.chmod(lock_path, stat.S_IWRITE | stat.S_IREAD)
-
- return True
-
- def unlock(self):
- '''
- Make sure lock file is available for everyone and Unlock the file descriptor
- locked by set_lock
-
- :returns: True
- '''
- if not self.lockfd:
- return True
-
- try:
- fcntl.flock(self.lockfd, fcntl.LOCK_UN)
- self.lockfd.close()
- except ValueError: # file wasn't opened, let context manager fail gracefully
- pass
-
- return True
diff --git a/lib/ansible/module_utils/common/json.py b/lib/ansible/module_utils/common/json.py
index c4333fc1..639e7b90 100644
--- a/lib/ansible/module_utils/common/json.py
+++ b/lib/ansible/module_utils/common/json.py
@@ -10,8 +10,8 @@ import json
import datetime
-from ansible.module_utils._text import to_text
-from ansible.module_utils.common._collections_compat import Mapping
+from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils.six.moves.collections_abc import Mapping
from ansible.module_utils.common.collections import is_sequence
diff --git a/lib/ansible/module_utils/common/locale.py b/lib/ansible/module_utils/common/locale.py
index a6068c86..08216f59 100644
--- a/lib/ansible/module_utils/common/locale.py
+++ b/lib/ansible/module_utils/common/locale.py
@@ -4,7 +4,7 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
def get_best_parsable_locale(module, preferences=None, raise_on_locale=False):
diff --git a/lib/ansible/module_utils/common/parameters.py b/lib/ansible/module_utils/common/parameters.py
index 059ca0af..386eb875 100644
--- a/lib/ansible/module_utils/common/parameters.py
+++ b/lib/ansible/module_utils/common/parameters.py
@@ -13,7 +13,6 @@ from itertools import chain
from ansible.module_utils.common.collections import is_iterable
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
-from ansible.module_utils.common.text.formatters import lenient_lowercase
from ansible.module_utils.common.warnings import warn
from ansible.module_utils.errors import (
AliasError,
@@ -33,7 +32,7 @@ from ansible.module_utils.errors import (
)
from ansible.module_utils.parsing.convert_bool import BOOLEANS_FALSE, BOOLEANS_TRUE
-from ansible.module_utils.common._collections_compat import (
+from ansible.module_utils.six.moves.collections_abc import (
KeysView,
Set,
Sequence,
@@ -610,7 +609,7 @@ def _validate_argument_types(argument_spec, parameters, prefix='', options_conte
continue
value = parameters[param]
- if value is None:
+ if value is None and not spec.get('required') and spec.get('default') is None:
continue
wanted_type = spec.get('type')
diff --git a/lib/ansible/module_utils/common/respawn.py b/lib/ansible/module_utils/common/respawn.py
index 3bc526af..3e209ca0 100644
--- a/lib/ansible/module_utils/common/respawn.py
+++ b/lib/ansible/module_utils/common/respawn.py
@@ -8,7 +8,7 @@ import os
import subprocess
import sys
-from ansible.module_utils.common.text.converters import to_bytes, to_native
+from ansible.module_utils.common.text.converters import to_bytes
def has_respawned():
@@ -79,10 +79,9 @@ def _create_payload():
import runpy
import sys
-module_fqn = '{module_fqn}'
-modlib_path = '{modlib_path}'
-smuggled_args = b"""{smuggled_args}""".strip()
-
+module_fqn = {module_fqn!r}
+modlib_path = {modlib_path!r}
+smuggled_args = {smuggled_args!r}
if __name__ == '__main__':
sys.path.insert(0, modlib_path)
@@ -93,6 +92,6 @@ if __name__ == '__main__':
runpy.run_module(module_fqn, init_globals=dict(_respawned=True), run_name='__main__', alter_sys=True)
'''
- respawn_code = respawn_code_template.format(module_fqn=module_fqn, modlib_path=modlib_path, smuggled_args=to_native(smuggled_args))
+ respawn_code = respawn_code_template.format(module_fqn=module_fqn, modlib_path=modlib_path, smuggled_args=smuggled_args.strip())
return respawn_code
diff --git a/lib/ansible/module_utils/common/text/converters.py b/lib/ansible/module_utils/common/text/converters.py
index 5b25df47..5b41315b 100644
--- a/lib/ansible/module_utils/common/text/converters.py
+++ b/lib/ansible/module_utils/common/text/converters.py
@@ -10,7 +10,7 @@ import codecs
import datetime
import json
-from ansible.module_utils.common._collections_compat import Set
+from ansible.module_utils.six.moves.collections_abc import Set
from ansible.module_utils.six import (
PY3,
binary_type,
@@ -168,7 +168,7 @@ def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
handler, otherwise it will use replace.
:surrogate_then_replace: Does the same as surrogate_or_replace but
`was added for symmetry with the error handlers in
- :func:`ansible.module_utils._text.to_bytes` (Added in Ansible 2.3)
+ :func:`ansible.module_utils.common.text.converters.to_bytes` (Added in Ansible 2.3)
Because surrogateescape was added in Python3 this usually means that
Python3 will use `surrogateescape` and Python2 will use the fallback
@@ -179,7 +179,7 @@ def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
The default until Ansible-2.2 was `surrogate_or_replace`
In Ansible-2.3 this defaults to `surrogate_then_replace` for symmetry
- with :func:`ansible.module_utils._text.to_bytes` .
+ with :func:`ansible.module_utils.common.text.converters.to_bytes` .
:kwarg nonstring: The strategy to use if a nonstring is specified in
``obj``. Default is 'simplerepr'. Valid values are:
@@ -268,18 +268,13 @@ def _json_encode_fallback(obj):
def jsonify(data, **kwargs):
+ # After 2.18, we should remove this loop, and hardcode to utf-8 in alignment with requiring utf-8 module responses
for encoding in ("utf-8", "latin-1"):
try:
- return json.dumps(data, encoding=encoding, default=_json_encode_fallback, **kwargs)
- # Old systems using old simplejson module does not support encoding keyword.
- except TypeError:
- try:
- new_data = container_to_text(data, encoding=encoding)
- except UnicodeDecodeError:
- continue
- return json.dumps(new_data, default=_json_encode_fallback, **kwargs)
+ new_data = container_to_text(data, encoding=encoding)
except UnicodeDecodeError:
continue
+ return json.dumps(new_data, default=_json_encode_fallback, **kwargs)
raise UnicodeError('Invalid unicode encoding encountered')
diff --git a/lib/ansible/module_utils/common/text/formatters.py b/lib/ansible/module_utils/common/text/formatters.py
index 94ca5a3d..0c3d4951 100644
--- a/lib/ansible/module_utils/common/text/formatters.py
+++ b/lib/ansible/module_utils/common/text/formatters.py
@@ -67,7 +67,7 @@ def human_to_bytes(number, default_unit=None, isbits=False):
unit = default_unit
if unit is None:
- ''' No unit given, returning raw number '''
+ # No unit given, returning raw number
return int(round(num))
range_key = unit[0].upper()
try:
diff --git a/lib/ansible/module_utils/common/validation.py b/lib/ansible/module_utils/common/validation.py
index 5a4cebbc..cc547899 100644
--- a/lib/ansible/module_utils/common/validation.py
+++ b/lib/ansible/module_utils/common/validation.py
@@ -9,7 +9,7 @@ import os
import re
from ast import literal_eval
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
from ansible.module_utils.common._json_compat import json
from ansible.module_utils.common.collections import is_iterable
from ansible.module_utils.common.text.converters import jsonify
@@ -381,7 +381,7 @@ def check_type_str(value, allow_conversion=True, param=None, prefix=''):
if isinstance(value, string_types):
return value
- if allow_conversion:
+ if allow_conversion and value is not None:
return to_native(value, errors='surrogate_or_strict')
msg = "'{0!r}' is not a string and conversion is not allowed".format(value)
diff --git a/lib/ansible/module_utils/common/yaml.py b/lib/ansible/module_utils/common/yaml.py
index e79cc096..b4d766bb 100644
--- a/lib/ansible/module_utils/common/yaml.py
+++ b/lib/ansible/module_utils/common/yaml.py
@@ -24,13 +24,13 @@ if HAS_YAML:
try:
from yaml import CSafeLoader as SafeLoader
from yaml import CSafeDumper as SafeDumper
- from yaml.cyaml import CParser as Parser
+ from yaml.cyaml import CParser as Parser # type: ignore[attr-defined] # pylint: disable=unused-import
HAS_LIBYAML = True
except (ImportError, AttributeError):
- from yaml import SafeLoader # type: ignore[misc]
- from yaml import SafeDumper # type: ignore[misc]
- from yaml.parser import Parser # type: ignore[misc]
+ from yaml import SafeLoader # type: ignore[assignment]
+ from yaml import SafeDumper # type: ignore[assignment]
+ from yaml.parser import Parser # type: ignore[assignment] # pylint: disable=unused-import
yaml_load = _partial(_yaml.load, Loader=SafeLoader)
yaml_load_all = _partial(_yaml.load_all, Loader=SafeLoader)
diff --git a/lib/ansible/module_utils/compat/_selectors2.py b/lib/ansible/module_utils/compat/_selectors2.py
index be44b4b3..4a4fcc32 100644
--- a/lib/ansible/module_utils/compat/_selectors2.py
+++ b/lib/ansible/module_utils/compat/_selectors2.py
@@ -25,7 +25,7 @@ import socket
import sys
import time
from collections import namedtuple
-from ansible.module_utils.common._collections_compat import Mapping
+from ansible.module_utils.six.moves.collections_abc import Mapping
try:
monotonic = time.monotonic
@@ -81,7 +81,7 @@ def _fileobj_to_fd(fileobj):
# Python 3.5 uses a more direct route to wrap system calls to increase speed.
if sys.version_info >= (3, 5):
- def _syscall_wrapper(func, _, *args, **kwargs):
+ def _syscall_wrapper(func, dummy, *args, **kwargs):
""" This is the short-circuit version of the below logic
because in Python 3.5+ all selectors restart system calls. """
try:
@@ -342,8 +342,8 @@ if hasattr(select, "select"):
timeout = None if timeout is None else max(timeout, 0.0)
ready = []
- r, w, _ = _syscall_wrapper(self._select, True, self._readers,
- self._writers, timeout=timeout)
+ r, w, dummy = _syscall_wrapper(self._select, True, self._readers,
+ self._writers, timeout=timeout)
r = set(r)
w = set(w)
for fd in r | w:
@@ -649,7 +649,7 @@ elif 'PollSelector' in globals(): # Platform-specific: Linux
elif 'SelectSelector' in globals(): # Platform-specific: Windows
DefaultSelector = SelectSelector
else: # Platform-specific: AppEngine
- def no_selector(_):
+ def no_selector(dummy):
raise ValueError("Platform does not have a selector")
DefaultSelector = no_selector
HAS_SELECT = False
diff --git a/lib/ansible/module_utils/compat/importlib.py b/lib/ansible/module_utils/compat/importlib.py
index 0b7fb2c7..a3dca6b2 100644
--- a/lib/ansible/module_utils/compat/importlib.py
+++ b/lib/ansible/module_utils/compat/importlib.py
@@ -8,7 +8,7 @@ __metaclass__ = type
import sys
try:
- from importlib import import_module
+ from importlib import import_module # pylint: disable=unused-import
except ImportError:
# importlib.import_module returns the tail
# whereas __import__ returns the head
diff --git a/lib/ansible/module_utils/compat/paramiko.py b/lib/ansible/module_utils/compat/paramiko.py
index 85478eae..095dfa50 100644
--- a/lib/ansible/module_utils/compat/paramiko.py
+++ b/lib/ansible/module_utils/compat/paramiko.py
@@ -5,7 +5,7 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
-import types
+import types # pylint: disable=unused-import
import warnings
PARAMIKO_IMPORT_ERR = None
@@ -13,7 +13,7 @@ PARAMIKO_IMPORT_ERR = None
try:
with warnings.catch_warnings():
warnings.filterwarnings('ignore', message='Blowfish has been deprecated', category=UserWarning)
- import paramiko
+ import paramiko # pylint: disable=unused-import
# paramiko and gssapi are incompatible and raise AttributeError not ImportError
# When running in FIPS mode, cryptography raises InternalError
# https://bugzilla.redhat.com/show_bug.cgi?id=1778939
diff --git a/lib/ansible/module_utils/compat/selectors.py b/lib/ansible/module_utils/compat/selectors.py
index 93ffc626..0c4adc9f 100644
--- a/lib/ansible/module_utils/compat/selectors.py
+++ b/lib/ansible/module_utils/compat/selectors.py
@@ -35,9 +35,8 @@ _BUNDLED_METADATA = {"pypi_name": "selectors2", "version": "1.1.1", "version_con
# Fix use of OSError exception for py3 and use the wrapper of kqueue.control so retries of
# interrupted syscalls work with kqueue
-import os.path
import sys
-import types
+import types # pylint: disable=unused-import
try:
# Python 3.4+
diff --git a/lib/ansible/module_utils/compat/selinux.py b/lib/ansible/module_utils/compat/selinux.py
index 7191713c..ca58098a 100644
--- a/lib/ansible/module_utils/compat/selinux.py
+++ b/lib/ansible/module_utils/compat/selinux.py
@@ -62,7 +62,7 @@ def _module_setup():
fn.restype = cfg.get('restype', c_int)
# just patch simple directly callable functions directly onto the module
- if not fn.argtypes or not any(argtype for argtype in fn.argtypes if type(argtype) == base_ptr_type):
+ if not fn.argtypes or not any(argtype for argtype in fn.argtypes if type(argtype) is base_ptr_type):
setattr(_thismod, fname, fn)
continue
diff --git a/lib/ansible/module_utils/compat/typing.py b/lib/ansible/module_utils/compat/typing.py
index 27b25f77..94b1dee7 100644
--- a/lib/ansible/module_utils/compat/typing.py
+++ b/lib/ansible/module_utils/compat/typing.py
@@ -13,13 +13,13 @@ except Exception: # pylint: disable=broad-except
pass
try:
- from typing import * # type: ignore[misc]
+ from typing import * # type: ignore[assignment,no-redef]
except Exception: # pylint: disable=broad-except
pass
try:
- cast
+ cast # type: ignore[used-before-def]
except NameError:
def cast(typ, val): # type: ignore[no-redef]
return val
diff --git a/lib/ansible/module_utils/connection.py b/lib/ansible/module_utils/connection.py
index 1396c1c1..e4e507db 100644
--- a/lib/ansible/module_utils/connection.py
+++ b/lib/ansible/module_utils/connection.py
@@ -38,7 +38,7 @@ import traceback
import uuid
from functools import partial
-from ansible.module_utils._text import to_bytes, to_text
+from ansible.module_utils.common.text.converters import to_bytes, to_text
from ansible.module_utils.common.json import AnsibleJSONEncoder
from ansible.module_utils.six import iteritems
from ansible.module_utils.six.moves import cPickle
diff --git a/lib/ansible/module_utils/distro/_distro.py b/lib/ansible/module_utils/distro/_distro.py
index 58e41d4e..19262a41 100644
--- a/lib/ansible/module_utils/distro/_distro.py
+++ b/lib/ansible/module_utils/distro/_distro.py
@@ -31,6 +31,8 @@ access to OS distribution information is needed. See `Python issue 1322
<https://bugs.python.org/issue1322>`_ for more information.
"""
+import argparse
+import json
import logging
import os
import re
@@ -136,56 +138,6 @@ _DISTRO_RELEASE_IGNORE_BASENAMES = (
)
-#
-# Python 2.6 does not have subprocess.check_output so replicate it here
-#
-def _my_check_output(*popenargs, **kwargs):
- r"""Run command with arguments and return its output as a byte string.
-
- If the exit code was non-zero it raises a CalledProcessError. The
- CalledProcessError object will have the return code in the returncode
- attribute and output in the output attribute.
-
- The arguments are the same as for the Popen constructor. Example:
-
- >>> check_output(["ls", "-l", "/dev/null"])
- 'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n'
-
- The stdout argument is not allowed as it is used internally.
- To capture standard error in the result, use stderr=STDOUT.
-
- >>> check_output(["/bin/sh", "-c",
- ... "ls -l non_existent_file ; exit 0"],
- ... stderr=STDOUT)
- 'ls: non_existent_file: No such file or directory\n'
-
- This is a backport of Python-2.7's check output to Python-2.6
- """
- if 'stdout' in kwargs:
- raise ValueError(
- 'stdout argument not allowed, it will be overridden.'
- )
- process = subprocess.Popen(
- stdout=subprocess.PIPE, *popenargs, **kwargs
- )
- output, unused_err = process.communicate()
- retcode = process.poll()
- if retcode:
- cmd = kwargs.get("args")
- if cmd is None:
- cmd = popenargs[0]
- # Deviation from Python-2.7: Python-2.6's CalledProcessError does not
- # have an argument for the stdout so simply omit it.
- raise subprocess.CalledProcessError(retcode, cmd)
- return output
-
-
-try:
- _check_output = subprocess.check_output
-except AttributeError:
- _check_output = _my_check_output
-
-
def linux_distribution(full_distribution_name=True):
# type: (bool) -> Tuple[str, str, str]
"""
@@ -204,7 +156,8 @@ def linux_distribution(full_distribution_name=True):
* ``version``: The result of :func:`distro.version`.
- * ``codename``: The result of :func:`distro.codename`.
+ * ``codename``: The extra item (usually in parentheses) after the
+ os-release version number, or the result of :func:`distro.codename`.
The interface of this function is compatible with the original
:py:func:`platform.linux_distribution` function, supporting a subset of
@@ -251,8 +204,9 @@ def id():
"fedora" Fedora
"sles" SUSE Linux Enterprise Server
"opensuse" openSUSE
- "amazon" Amazon Linux
+ "amzn" Amazon Linux
"arch" Arch Linux
+ "buildroot" Buildroot
"cloudlinux" CloudLinux OS
"exherbo" Exherbo Linux
"gentoo" GenToo Linux
@@ -272,6 +226,8 @@ def id():
"netbsd" NetBSD
"freebsd" FreeBSD
"midnightbsd" MidnightBSD
+ "rocky" Rocky Linux
+ "guix" Guix System
============== =========================================
If you have a need to get distros for reliable IDs added into this set,
@@ -366,6 +322,10 @@ def version(pretty=False, best=False):
sources in a fixed priority order does not always yield the most precise
version (e.g. for Debian 8.2, or CentOS 7.1).
+ Some other distributions may not provide this kind of information. In these
+ cases, an empty string would be returned. This behavior can be observed
+ with rolling releases distributions (e.g. Arch Linux).
+
The *best* parameter can be used to control the approach for the returned
version:
@@ -681,7 +641,7 @@ except ImportError:
def __get__(self, obj, owner):
# type: (Any, Type[Any]) -> Any
- assert obj is not None, "call {0} on an instance".format(self._fname)
+ assert obj is not None, "call {} on an instance".format(self._fname)
ret = obj.__dict__[self._fname] = self._f(obj)
return ret
@@ -776,10 +736,6 @@ class LinuxDistribution(object):
* :py:exc:`IOError`: Some I/O issue with an os-release file or distro
release file.
- * :py:exc:`subprocess.CalledProcessError`: The lsb_release command had
- some issue (other than not being available in the program execution
- path).
-
* :py:exc:`UnicodeError`: A data source has unexpected characters or
uses an unexpected encoding.
"""
@@ -837,7 +793,7 @@ class LinuxDistribution(object):
return (
self.name() if full_distribution_name else self.id(),
self.version(),
- self.codename(),
+ self._os_release_info.get("release_codename") or self.codename(),
)
def id(self):
@@ -913,6 +869,9 @@ class LinuxDistribution(object):
).get("version_id", ""),
self.uname_attr("release"),
]
+ if self.id() == "debian" or "debian" in self.like().split():
+ # On Debian-like, add debian_version file content to candidates list.
+ versions.append(self._debian_version)
version = ""
if best:
# This algorithm uses the last version in priority order that has
@@ -1155,12 +1114,17 @@ class LinuxDistribution(object):
# stripped, etc.), so the tokens are now either:
# * variable assignments: var=value
# * commands or their arguments (not allowed in os-release)
+ # Ignore any tokens that are not variable assignments
if "=" in token:
k, v = token.split("=", 1)
props[k.lower()] = v
- else:
- # Ignore any tokens that are not variable assignments
- pass
+
+ if "version" in props:
+ # extract release codename (if any) from version attribute
+ match = re.search(r"\((\D+)\)|,\s*(\D+)", props["version"])
+ if match:
+ release_codename = match.group(1) or match.group(2)
+ props["codename"] = props["release_codename"] = release_codename
if "version_codename" in props:
# os-release added a version_codename field. Use that in
@@ -1171,16 +1135,6 @@ class LinuxDistribution(object):
elif "ubuntu_codename" in props:
# Same as above but a non-standard field name used on older Ubuntus
props["codename"] = props["ubuntu_codename"]
- elif "version" in props:
- # If there is no version_codename, parse it from the version
- match = re.search(r"(\(\D+\))|,(\s+)?\D+", props["version"])
- if match:
- codename = match.group()
- codename = codename.strip("()")
- codename = codename.strip(",")
- codename = codename.strip()
- # codename appears within paranthese.
- props["codename"] = codename
return props
@@ -1198,7 +1152,7 @@ class LinuxDistribution(object):
with open(os.devnull, "wb") as devnull:
try:
cmd = ("lsb_release", "-a")
- stdout = _check_output(cmd, stderr=devnull)
+ stdout = subprocess.check_output(cmd, stderr=devnull)
# Command not found or lsb_release returned error
except (OSError, subprocess.CalledProcessError):
return {}
@@ -1233,18 +1187,31 @@ class LinuxDistribution(object):
@cached_property
def _uname_info(self):
# type: () -> Dict[str, str]
+ if not self.include_uname:
+ return {}
with open(os.devnull, "wb") as devnull:
try:
cmd = ("uname", "-rs")
- stdout = _check_output(cmd, stderr=devnull)
+ stdout = subprocess.check_output(cmd, stderr=devnull)
except OSError:
return {}
content = self._to_str(stdout).splitlines()
return self._parse_uname_content(content)
+ @cached_property
+ def _debian_version(self):
+ # type: () -> str
+ try:
+ with open(os.path.join(self.etc_dir, "debian_version")) as fp:
+ return fp.readline().rstrip()
+ except (OSError, IOError):
+ return ""
+
@staticmethod
def _parse_uname_content(lines):
# type: (Sequence[str]) -> Dict[str, str]
+ if not lines:
+ return {}
props = {}
match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip())
if match:
@@ -1270,7 +1237,7 @@ class LinuxDistribution(object):
if isinstance(text, bytes):
return text.decode(encoding)
else:
- if isinstance(text, unicode): # noqa pylint: disable=undefined-variable
+ if isinstance(text, unicode): # noqa
return text.encode(encoding)
return text
@@ -1325,6 +1292,7 @@ class LinuxDistribution(object):
"manjaro-release",
"oracle-release",
"redhat-release",
+ "rocky-release",
"sl-release",
"slackware-version",
]
@@ -1403,13 +1371,36 @@ def main():
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler(sys.stdout))
- dist = _distro
+ parser = argparse.ArgumentParser(description="OS distro info tool")
+ parser.add_argument(
+ "--json", "-j", help="Output in machine readable format", action="store_true"
+ )
+
+ parser.add_argument(
+ "--root-dir",
+ "-r",
+ type=str,
+ dest="root_dir",
+ help="Path to the root filesystem directory (defaults to /)",
+ )
+
+ args = parser.parse_args()
- logger.info("Name: %s", dist.name(pretty=True))
- distribution_version = dist.version(pretty=True)
- logger.info("Version: %s", distribution_version)
- distribution_codename = dist.codename()
- logger.info("Codename: %s", distribution_codename)
+ if args.root_dir:
+ dist = LinuxDistribution(
+ include_lsb=False, include_uname=False, root_dir=args.root_dir
+ )
+ else:
+ dist = _distro
+
+ if args.json:
+ logger.info(json.dumps(dist.info(), indent=4, sort_keys=True))
+ else:
+ logger.info("Name: %s", dist.name(pretty=True))
+ distribution_version = dist.version(pretty=True)
+ logger.info("Version: %s", distribution_version)
+ distribution_codename = dist.codename()
+ logger.info("Codename: %s", distribution_codename)
if __name__ == "__main__":
diff --git a/lib/ansible/module_utils/facts/hardware/linux.py b/lib/ansible/module_utils/facts/hardware/linux.py
index c0ca33d5..4e6305cb 100644
--- a/lib/ansible/module_utils/facts/hardware/linux.py
+++ b/lib/ansible/module_utils/facts/hardware/linux.py
@@ -28,7 +28,7 @@ import time
from multiprocessing import cpu_count
from multiprocessing.pool import ThreadPool
-from ansible.module_utils._text import to_text
+from ansible.module_utils.common.text.converters import to_text
from ansible.module_utils.common.locale import get_best_parsable_locale
from ansible.module_utils.common.process import get_bin_path
from ansible.module_utils.common.text.formatters import bytes_to_human
@@ -170,6 +170,8 @@ class LinuxHardware(Hardware):
coreid = 0
sockets = {}
cores = {}
+ zp = 0
+ zmt = 0
xen = False
xen_paravirt = False
@@ -209,7 +211,6 @@ class LinuxHardware(Hardware):
# model name is for Intel arch, Processor (mind the uppercase P)
# works for some ARM devices, like the Sheevaplug.
- # 'ncpus active' is SPARC attribute
if key in ['model name', 'Processor', 'vendor_id', 'cpu', 'Vendor', 'processor']:
if 'processor' not in cpu_facts:
cpu_facts['processor'] = []
@@ -233,8 +234,12 @@ class LinuxHardware(Hardware):
sockets[physid] = int(val)
elif key == 'siblings':
cores[coreid] = int(val)
+ # S390x classic cpuinfo
elif key == '# processors':
- cpu_facts['processor_cores'] = int(val)
+ zp = int(val)
+ elif key == 'max thread id':
+ zmt = int(val) + 1
+ # SPARC
elif key == 'ncpus active':
i = int(val)
@@ -250,13 +255,20 @@ class LinuxHardware(Hardware):
if collected_facts.get('ansible_architecture', '').startswith(('armv', 'aarch', 'ppc')):
i = processor_occurrence
- # FIXME
- if collected_facts.get('ansible_architecture') != 's390x':
+ if collected_facts.get('ansible_architecture') == 's390x':
+ # getting sockets would require 5.7+ with CONFIG_SCHED_TOPOLOGY
+ cpu_facts['processor_count'] = 1
+ cpu_facts['processor_cores'] = zp // zmt
+ cpu_facts['processor_threads_per_core'] = zmt
+ cpu_facts['processor_vcpus'] = zp
+ cpu_facts['processor_nproc'] = zp
+ else:
if xen_paravirt:
cpu_facts['processor_count'] = i
cpu_facts['processor_cores'] = i
cpu_facts['processor_threads_per_core'] = 1
cpu_facts['processor_vcpus'] = i
+ cpu_facts['processor_nproc'] = i
else:
if sockets:
cpu_facts['processor_count'] = len(sockets)
@@ -278,25 +290,25 @@ class LinuxHardware(Hardware):
cpu_facts['processor_vcpus'] = (cpu_facts['processor_threads_per_core'] *
cpu_facts['processor_count'] * cpu_facts['processor_cores'])
- # if the number of processors available to the module's
- # thread cannot be determined, the processor count
- # reported by /proc will be the default:
cpu_facts['processor_nproc'] = processor_occurrence
- try:
- cpu_facts['processor_nproc'] = len(
- os.sched_getaffinity(0)
- )
- except AttributeError:
- # In Python < 3.3, os.sched_getaffinity() is not available
- try:
- cmd = get_bin_path('nproc')
- except ValueError:
- pass
- else:
- rc, out, _err = self.module.run_command(cmd)
- if rc == 0:
- cpu_facts['processor_nproc'] = int(out)
+ # if the number of processors available to the module's
+ # thread cannot be determined, the processor count
+ # reported by /proc will be the default (as previously defined)
+ try:
+ cpu_facts['processor_nproc'] = len(
+ os.sched_getaffinity(0)
+ )
+ except AttributeError:
+ # In Python < 3.3, os.sched_getaffinity() is not available
+ try:
+ cmd = get_bin_path('nproc')
+ except ValueError:
+ pass
+ else:
+ rc, out, _err = self.module.run_command(cmd)
+ if rc == 0:
+ cpu_facts['processor_nproc'] = int(out)
return cpu_facts
@@ -538,7 +550,7 @@ class LinuxHardware(Hardware):
# start threads to query each mount
results = {}
pool = ThreadPool(processes=min(len(mtab_entries), cpu_count()))
- maxtime = globals().get('GATHER_TIMEOUT') or timeout.DEFAULT_GATHER_TIMEOUT
+ maxtime = timeout.GATHER_TIMEOUT or timeout.DEFAULT_GATHER_TIMEOUT
for fields in mtab_entries:
# Transform octal escape sequences
fields = [self._replace_octal_escapes(field) for field in fields]
diff --git a/lib/ansible/module_utils/facts/hardware/openbsd.py b/lib/ansible/module_utils/facts/hardware/openbsd.py
index 3bcf8ce4..cd5e21e9 100644
--- a/lib/ansible/module_utils/facts/hardware/openbsd.py
+++ b/lib/ansible/module_utils/facts/hardware/openbsd.py
@@ -19,7 +19,7 @@ __metaclass__ = type
import re
import time
-from ansible.module_utils._text import to_text
+from ansible.module_utils.common.text.converters import to_text
from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector
from ansible.module_utils.facts import timeout
@@ -94,7 +94,7 @@ class OpenBSDHardware(Hardware):
rc, out, err = self.module.run_command("/usr/bin/vmstat")
if rc == 0:
memory_facts['memfree_mb'] = int(out.splitlines()[-1].split()[4]) // 1024
- memory_facts['memtotal_mb'] = int(self.sysctl['hw.usermem']) // 1024 // 1024
+ memory_facts['memtotal_mb'] = int(self.sysctl['hw.physmem']) // 1024 // 1024
# Get swapctl info. swapctl output looks like:
# total: 69268 1K-blocks allocated, 0 used, 69268 available
diff --git a/lib/ansible/module_utils/facts/hardware/sunos.py b/lib/ansible/module_utils/facts/hardware/sunos.py
index 0a77db07..54850fe3 100644
--- a/lib/ansible/module_utils/facts/hardware/sunos.py
+++ b/lib/ansible/module_utils/facts/hardware/sunos.py
@@ -175,9 +175,7 @@ class SunOSHardware(Hardware):
prtdiag_path = self.module.get_bin_path("prtdiag", opt_dirs=[platform_sbin])
rc, out, err = self.module.run_command(prtdiag_path)
- """
- rc returns 1
- """
+ # rc returns 1
if out:
system_conf = out.split('\n')[0]
diff --git a/lib/ansible/module_utils/facts/network/fc_wwn.py b/lib/ansible/module_utils/facts/network/fc_wwn.py
index 86182f89..dc2e3d6c 100644
--- a/lib/ansible/module_utils/facts/network/fc_wwn.py
+++ b/lib/ansible/module_utils/facts/network/fc_wwn.py
@@ -46,18 +46,14 @@ class FcWwnInitiatorFactCollector(BaseFactCollector):
for line in get_file_lines(fcfile):
fc_facts['fibre_channel_wwn'].append(line.rstrip()[2:])
elif sys.platform.startswith('sunos'):
- """
- on solaris 10 or solaris 11 should use `fcinfo hba-port`
- TBD (not implemented): on solaris 9 use `prtconf -pv`
- """
+ # on solaris 10 or solaris 11 should use `fcinfo hba-port`
+ # TBD (not implemented): on solaris 9 use `prtconf -pv`
cmd = module.get_bin_path('fcinfo')
if cmd:
cmd = cmd + " hba-port"
rc, fcinfo_out, err = module.run_command(cmd)
- """
# fcinfo hba-port | grep "Port WWN"
- HBA Port WWN: 10000090fa1658de
- """
+ # HBA Port WWN: 10000090fa1658de
if rc == 0 and fcinfo_out:
for line in fcinfo_out.splitlines():
if 'Port WWN' in line:
diff --git a/lib/ansible/module_utils/facts/network/iscsi.py b/lib/ansible/module_utils/facts/network/iscsi.py
index 2bb93834..ef5ac398 100644
--- a/lib/ansible/module_utils/facts/network/iscsi.py
+++ b/lib/ansible/module_utils/facts/network/iscsi.py
@@ -19,7 +19,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
-import subprocess
import ansible.module_utils.compat.typing as t
diff --git a/lib/ansible/module_utils/facts/network/linux.py b/lib/ansible/module_utils/facts/network/linux.py
index b7ae9765..a189f387 100644
--- a/lib/ansible/module_utils/facts/network/linux.py
+++ b/lib/ansible/module_utils/facts/network/linux.py
@@ -59,8 +59,46 @@ class LinuxNetwork(Network):
network_facts['default_ipv6'] = default_ipv6
network_facts['all_ipv4_addresses'] = ips['all_ipv4_addresses']
network_facts['all_ipv6_addresses'] = ips['all_ipv6_addresses']
+ network_facts['locally_reachable_ips'] = self.get_locally_reachable_ips(ip_path)
return network_facts
+ # List all `scope host` routes/addresses.
+ # They belong to routes, but it means the whole prefix is reachable
+ # locally, regardless of specific IP addresses.
+ # E.g.: 192.168.0.0/24, any IP address is reachable from this range
+ # if assigned as scope host.
+ def get_locally_reachable_ips(self, ip_path):
+ locally_reachable_ips = dict(
+ ipv4=[],
+ ipv6=[],
+ )
+
+ def parse_locally_reachable_ips(output):
+ for line in output.splitlines():
+ if not line:
+ continue
+ words = line.split()
+ if words[0] != 'local':
+ continue
+ address = words[1]
+ if ":" in address:
+ if address not in locally_reachable_ips['ipv6']:
+ locally_reachable_ips['ipv6'].append(address)
+ else:
+ if address not in locally_reachable_ips['ipv4']:
+ locally_reachable_ips['ipv4'].append(address)
+
+ args = [ip_path, '-4', 'route', 'show', 'table', 'local']
+ rc, routes, dummy = self.module.run_command(args)
+ if rc == 0:
+ parse_locally_reachable_ips(routes)
+ args = [ip_path, '-6', 'route', 'show', 'table', 'local']
+ rc, routes, dummy = self.module.run_command(args)
+ if rc == 0:
+ parse_locally_reachable_ips(routes)
+
+ return locally_reachable_ips
+
def get_default_interfaces(self, ip_path, collected_facts=None):
collected_facts = collected_facts or {}
# Use the commands:
@@ -236,7 +274,7 @@ class LinuxNetwork(Network):
elif words[0] == 'inet6':
if 'peer' == words[2]:
address = words[1]
- _, prefix = words[3].split('/')
+ dummy, prefix = words[3].split('/')
scope = words[5]
else:
address, prefix = words[1].split('/')
diff --git a/lib/ansible/module_utils/facts/network/nvme.py b/lib/ansible/module_utils/facts/network/nvme.py
index febd0abb..1d759566 100644
--- a/lib/ansible/module_utils/facts/network/nvme.py
+++ b/lib/ansible/module_utils/facts/network/nvme.py
@@ -19,7 +19,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
-import subprocess
import ansible.module_utils.compat.typing as t
diff --git a/lib/ansible/module_utils/facts/other/facter.py b/lib/ansible/module_utils/facts/other/facter.py
index 3f83999d..06306525 100644
--- a/lib/ansible/module_utils/facts/other/facter.py
+++ b/lib/ansible/module_utils/facts/other/facter.py
@@ -1,17 +1,5 @@
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+# Copyright (c) 2023 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -21,7 +9,6 @@ import json
import ansible.module_utils.compat.typing as t
from ansible.module_utils.facts.namespace import PrefixFactNamespace
-
from ansible.module_utils.facts.collector import BaseFactCollector
@@ -49,6 +36,12 @@ class FacterFactCollector(BaseFactCollector):
# if facter is installed, and we can use --json because
# ruby-json is ALSO installed, include facter data in the JSON
rc, out, err = module.run_command(facter_path + " --puppet --json")
+
+ # for some versions of facter, --puppet returns an error if puppet is not present,
+ # try again w/o it, other errors should still appear and be sent back
+ if rc != 0:
+ rc, out, err = module.run_command(facter_path + " --json")
+
return rc, out, err
def get_facter_output(self, module):
diff --git a/lib/ansible/module_utils/facts/sysctl.py b/lib/ansible/module_utils/facts/sysctl.py
index 2c55d776..d7bcc8a1 100644
--- a/lib/ansible/module_utils/facts/sysctl.py
+++ b/lib/ansible/module_utils/facts/sysctl.py
@@ -18,7 +18,7 @@ __metaclass__ = type
import re
-from ansible.module_utils._text import to_text
+from ansible.module_utils.common.text.converters import to_text
def get_sysctl(module, prefixes):
diff --git a/lib/ansible/module_utils/facts/system/caps.py b/lib/ansible/module_utils/facts/system/caps.py
index 6a1e26d5..3692f207 100644
--- a/lib/ansible/module_utils/facts/system/caps.py
+++ b/lib/ansible/module_utils/facts/system/caps.py
@@ -20,7 +20,6 @@ __metaclass__ = type
import ansible.module_utils.compat.typing as t
-from ansible.module_utils._text import to_text
from ansible.module_utils.facts.collector import BaseFactCollector
diff --git a/lib/ansible/module_utils/facts/system/date_time.py b/lib/ansible/module_utils/facts/system/date_time.py
index 481bef42..93af6dcf 100644
--- a/lib/ansible/module_utils/facts/system/date_time.py
+++ b/lib/ansible/module_utils/facts/system/date_time.py
@@ -22,8 +22,8 @@ import datetime
import time
import ansible.module_utils.compat.typing as t
-
from ansible.module_utils.facts.collector import BaseFactCollector
+from ansible.module_utils.compat.datetime import utcfromtimestamp
class DateTimeFactCollector(BaseFactCollector):
@@ -37,7 +37,7 @@ class DateTimeFactCollector(BaseFactCollector):
# Store the timestamp once, then get local and UTC versions from that
epoch_ts = time.time()
now = datetime.datetime.fromtimestamp(epoch_ts)
- utcnow = datetime.datetime.utcfromtimestamp(epoch_ts)
+ utcnow = utcfromtimestamp(epoch_ts).replace(tzinfo=None)
date_time_facts['year'] = now.strftime('%Y')
date_time_facts['month'] = now.strftime('%m')
diff --git a/lib/ansible/module_utils/facts/system/distribution.py b/lib/ansible/module_utils/facts/system/distribution.py
index dcb6e5a4..6feece2a 100644
--- a/lib/ansible/module_utils/facts/system/distribution.py
+++ b/lib/ansible/module_utils/facts/system/distribution.py
@@ -524,7 +524,7 @@ class Distribution(object):
'Solaris': ['Solaris', 'Nexenta', 'OmniOS', 'OpenIndiana', 'SmartOS'],
'Slackware': ['Slackware'],
'Altlinux': ['Altlinux'],
- 'SGML': ['SGML'],
+ 'SMGL': ['SMGL'],
'Gentoo': ['Gentoo', 'Funtoo'],
'Alpine': ['Alpine'],
'AIX': ['AIX'],
diff --git a/lib/ansible/module_utils/facts/system/local.py b/lib/ansible/module_utils/facts/system/local.py
index bacdbe0d..66813509 100644
--- a/lib/ansible/module_utils/facts/system/local.py
+++ b/lib/ansible/module_utils/facts/system/local.py
@@ -23,9 +23,10 @@ import stat
import ansible.module_utils.compat.typing as t
-from ansible.module_utils._text import to_text
+from ansible.module_utils.common.text.converters import to_text
from ansible.module_utils.facts.utils import get_file_content
from ansible.module_utils.facts.collector import BaseFactCollector
+from ansible.module_utils.six import PY3
from ansible.module_utils.six.moves import configparser, StringIO
@@ -91,7 +92,10 @@ class LocalFactCollector(BaseFactCollector):
# if that fails read it with ConfigParser
cp = configparser.ConfigParser()
try:
- cp.readfp(StringIO(out))
+ if PY3:
+ cp.read_file(StringIO(out))
+ else:
+ cp.readfp(StringIO(out))
except configparser.Error:
fact = "error loading facts as JSON or ini - please check content: %s" % fn
module.warn(fact)
diff --git a/lib/ansible/module_utils/facts/system/pkg_mgr.py b/lib/ansible/module_utils/facts/system/pkg_mgr.py
index 704ea201..14ad0a66 100644
--- a/lib/ansible/module_utils/facts/system/pkg_mgr.py
+++ b/lib/ansible/module_utils/facts/system/pkg_mgr.py
@@ -17,7 +17,13 @@ from ansible.module_utils.facts.collector import BaseFactCollector
# ansible module, use that as the value for the 'name' key.
PKG_MGRS = [{'path': '/usr/bin/rpm-ostree', 'name': 'atomic_container'},
{'path': '/usr/bin/yum', 'name': 'yum'},
- {'path': '/usr/bin/dnf', 'name': 'dnf'},
+
+ # NOTE the `path` key for dnf/dnf5 is effectively discarded when matched for Red Hat OS family,
+ # special logic to infer the default `pkg_mgr` is used in `PkgMgrFactCollector._check_rh_versions()`
+ # leaving them here so a list of package modules can be constructed by iterating over `name` keys
+ {'path': '/usr/bin/dnf-3', 'name': 'dnf'},
+ {'path': '/usr/bin/dnf5', 'name': 'dnf5'},
+
{'path': '/usr/bin/apt-get', 'name': 'apt'},
{'path': '/usr/bin/zypper', 'name': 'zypper'},
{'path': '/usr/sbin/urpmi', 'name': 'urpmi'},
@@ -50,10 +56,7 @@ class OpenBSDPkgMgrFactCollector(BaseFactCollector):
_platform = 'OpenBSD'
def collect(self, module=None, collected_facts=None):
- facts_dict = {}
-
- facts_dict['pkg_mgr'] = 'openbsd_pkg'
- return facts_dict
+ return {'pkg_mgr': 'openbsd_pkg'}
# the fact ends up being 'pkg_mgr' so stick with that naming/spelling
@@ -63,49 +66,42 @@ class PkgMgrFactCollector(BaseFactCollector):
_platform = 'Generic'
required_facts = set(['distribution'])
- def _pkg_mgr_exists(self, pkg_mgr_name):
- for cur_pkg_mgr in [pkg_mgr for pkg_mgr in PKG_MGRS if pkg_mgr['name'] == pkg_mgr_name]:
- if os.path.exists(cur_pkg_mgr['path']):
- return pkg_mgr_name
+ def __init__(self, *args, **kwargs):
+ super(PkgMgrFactCollector, self).__init__(*args, **kwargs)
+ self._default_unknown_pkg_mgr = 'unknown'
def _check_rh_versions(self, pkg_mgr_name, collected_facts):
if os.path.exists('/run/ostree-booted'):
return "atomic_container"
- if collected_facts['ansible_distribution'] == 'Fedora':
- try:
- if int(collected_facts['ansible_distribution_major_version']) < 23:
- if self._pkg_mgr_exists('yum'):
- pkg_mgr_name = 'yum'
-
- else:
- if self._pkg_mgr_exists('dnf'):
- pkg_mgr_name = 'dnf'
- except ValueError:
- # If there's some new magical Fedora version in the future,
- # just default to dnf
- pkg_mgr_name = 'dnf'
- elif collected_facts['ansible_distribution'] == 'Amazon':
- try:
- if int(collected_facts['ansible_distribution_major_version']) < 2022:
- if self._pkg_mgr_exists('yum'):
- pkg_mgr_name = 'yum'
- else:
- if self._pkg_mgr_exists('dnf'):
- pkg_mgr_name = 'dnf'
- except ValueError:
- pkg_mgr_name = 'dnf'
- else:
- # If it's not one of the above and it's Red Hat family of distros, assume
- # RHEL or a clone. For versions of RHEL < 8 that Ansible supports, the
- # vendor supported official package manager is 'yum' and in RHEL 8+
- # (as far as we know at the time of this writing) it is 'dnf'.
- # If anyone wants to force a non-official package manager then they
- # can define a provider to either the package or yum action plugins.
- if int(collected_facts['ansible_distribution_major_version']) < 8:
- pkg_mgr_name = 'yum'
- else:
- pkg_mgr_name = 'dnf'
+ # Reset whatever was matched from PKG_MGRS, infer the default pkg_mgr below
+ pkg_mgr_name = self._default_unknown_pkg_mgr
+ # Since /usr/bin/dnf and /usr/bin/microdnf can point to different versions of dnf in different distributions
+ # the only way to infer the default package manager is to look at the binary they are pointing to.
+ # /usr/bin/microdnf is likely used only in fedora minimal container so /usr/bin/dnf takes precedence
+ for bin_path in ('/usr/bin/dnf', '/usr/bin/microdnf'):
+ if os.path.exists(bin_path):
+ pkg_mgr_name = 'dnf5' if os.path.realpath(bin_path) == '/usr/bin/dnf5' else 'dnf'
+ break
+
+ try:
+ major_version = collected_facts['ansible_distribution_major_version']
+ if collected_facts['ansible_distribution'] == 'Kylin Linux Advanced Server':
+ major_version = major_version.lstrip('V')
+ distro_major_ver = int(major_version)
+ except ValueError:
+ # a non integer magical future version
+ return self._default_unknown_pkg_mgr
+
+ if (
+ (collected_facts['ansible_distribution'] == 'Fedora' and distro_major_ver < 23)
+ or (collected_facts['ansible_distribution'] == 'Kylin Linux Advanced Server' and distro_major_ver < 10)
+ or (collected_facts['ansible_distribution'] == 'Amazon' and distro_major_ver < 2022)
+ or (collected_facts['ansible_distribution'] == 'TencentOS' and distro_major_ver < 3)
+ or distro_major_ver < 8 # assume RHEL or a clone
+ ) and any(pm for pm in PKG_MGRS if pm['name'] == 'yum' and os.path.exists(pm['path'])):
+ pkg_mgr_name = 'yum'
+
return pkg_mgr_name
def _check_apt_flavor(self, pkg_mgr_name):
@@ -136,10 +132,9 @@ class PkgMgrFactCollector(BaseFactCollector):
return PKG_MGRS
def collect(self, module=None, collected_facts=None):
- facts_dict = {}
collected_facts = collected_facts or {}
- pkg_mgr_name = 'unknown'
+ pkg_mgr_name = self._default_unknown_pkg_mgr
for pkg in self.pkg_mgrs(collected_facts):
if os.path.exists(pkg['path']):
pkg_mgr_name = pkg['name']
@@ -161,5 +156,4 @@ class PkgMgrFactCollector(BaseFactCollector):
if pkg_mgr_name == 'apt':
pkg_mgr_name = self._check_apt_flavor(pkg_mgr_name)
- facts_dict['pkg_mgr'] = pkg_mgr_name
- return facts_dict
+ return {'pkg_mgr': pkg_mgr_name}
diff --git a/lib/ansible/module_utils/facts/system/service_mgr.py b/lib/ansible/module_utils/facts/system/service_mgr.py
index d862ac90..701def99 100644
--- a/lib/ansible/module_utils/facts/system/service_mgr.py
+++ b/lib/ansible/module_utils/facts/system/service_mgr.py
@@ -24,7 +24,7 @@ import re
import ansible.module_utils.compat.typing as t
-from ansible.module_utils._text import to_native
+from ansible.module_utils.common.text.converters import to_native
from ansible.module_utils.facts.utils import get_file_content
from ansible.module_utils.facts.collector import BaseFactCollector
@@ -47,7 +47,7 @@ class ServiceMgrFactCollector(BaseFactCollector):
# tools must be installed
if module.get_bin_path('systemctl'):
- # this should show if systemd is the boot init system, if checking init faild to mark as systemd
+ # this should show if systemd is the boot init system, if checking init failed to mark as systemd
# these mirror systemd's own sd_boot test http://www.freedesktop.org/software/systemd/man/sd_booted.html
for canary in ["/run/systemd/system/", "/dev/.run/systemd/", "/dev/.systemd/"]:
if os.path.exists(canary):
@@ -131,6 +131,8 @@ class ServiceMgrFactCollector(BaseFactCollector):
service_mgr_name = 'smf'
elif collected_facts.get('ansible_distribution') == 'OpenWrt':
service_mgr_name = 'openwrt_init'
+ elif collected_facts.get('ansible_distribution') == 'SMGL':
+ service_mgr_name = 'simpleinit_msb'
elif collected_facts.get('ansible_system') == 'Linux':
# FIXME: mv is_systemd_managed
if self.is_systemd_managed(module=module):
diff --git a/lib/ansible/module_utils/json_utils.py b/lib/ansible/module_utils/json_utils.py
index 0e95aa67..1ec971cc 100644
--- a/lib/ansible/module_utils/json_utils.py
+++ b/lib/ansible/module_utils/json_utils.py
@@ -27,7 +27,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import json
+import json # pylint: disable=unused-import
# NB: a copy of this function exists in ../../modules/core/async_wrapper.py. Ensure any
diff --git a/lib/ansible/module_utils/parsing/convert_bool.py b/lib/ansible/module_utils/parsing/convert_bool.py
index 7eea875f..fb331d89 100644
--- a/lib/ansible/module_utils/parsing/convert_bool.py
+++ b/lib/ansible/module_utils/parsing/convert_bool.py
@@ -5,7 +5,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.six import binary_type, text_type
-from ansible.module_utils._text import to_text
+from ansible.module_utils.common.text.converters import to_text
BOOLEANS_TRUE = frozenset(('y', 'yes', 'on', '1', 'true', 't', 1, 1.0, True))
diff --git a/lib/ansible/module_utils/powershell/Ansible.ModuleUtils.AddType.psm1 b/lib/ansible/module_utils/powershell/Ansible.ModuleUtils.AddType.psm1
index 6dc2917f..f40c3384 100644
--- a/lib/ansible/module_utils/powershell/Ansible.ModuleUtils.AddType.psm1
+++ b/lib/ansible/module_utils/powershell/Ansible.ModuleUtils.AddType.psm1
@@ -65,6 +65,10 @@ Function Add-CSharpType {
* Create automatic type accelerators to simplify long namespace names (Ansible 2.9+)
//TypeAccelerator -Name <AcceleratorName> -TypeName <Name of compiled type>
+
+ * Compile with unsafe support (Ansible 2.15+)
+
+ //AllowUnsafe
#>
param(
[Parameter(Mandatory = $true)][AllowEmptyCollection()][String[]]$References,
@@ -117,6 +121,7 @@ Function Add-CSharpType {
$assembly_pattern = [Regex]"//\s*AssemblyReference\s+-(?<Parameter>(Name)|(Type))\s+(?<Name>[\w.]*)(\s+-CLR\s+(?<CLR>Core|Framework))?"
$no_warn_pattern = [Regex]"//\s*NoWarn\s+-Name\s+(?<Name>[\w\d]*)(\s+-CLR\s+(?<CLR>Core|Framework))?"
$type_pattern = [Regex]"//\s*TypeAccelerator\s+-Name\s+(?<Name>[\w.]*)\s+-TypeName\s+(?<TypeName>[\w.]*)"
+ $allow_unsafe_pattern = [Regex]"//\s*AllowUnsafe?"
# PSCore vs PSDesktop use different methods to compile the code,
# PSCore uses Roslyn and can compile the code purely in memory
@@ -142,11 +147,13 @@ Function Add-CSharpType {
$ignore_warnings = New-Object -TypeName 'System.Collections.Generic.Dictionary`2[[String], [Microsoft.CodeAnalysis.ReportDiagnostic]]'
$parse_options = ([Microsoft.CodeAnalysis.CSharp.CSharpParseOptions]::Default).WithPreprocessorSymbols($defined_symbols)
$syntax_trees = [System.Collections.Generic.List`1[Microsoft.CodeAnalysis.SyntaxTree]]@()
+ $allow_unsafe = $false
foreach ($reference in $References) {
# scan through code and add any assemblies that match
# //AssemblyReference -Name ... [-CLR Core]
# //NoWarn -Name ... [-CLR Core]
# //TypeAccelerator -Name ... -TypeName ...
+ # //AllowUnsafe
$assembly_matches = $assembly_pattern.Matches($reference)
foreach ($match in $assembly_matches) {
$clr = $match.Groups["CLR"].Value
@@ -180,6 +187,10 @@ Function Add-CSharpType {
foreach ($match in $type_matches) {
$type_accelerators.Add(@{Name = $match.Groups["Name"].Value; TypeName = $match.Groups["TypeName"].Value })
}
+
+ if ($allow_unsafe_pattern.Matches($reference).Count) {
+ $allow_unsafe = $true
+ }
}
# Release seems to contain the correct line numbers compared to
@@ -194,6 +205,10 @@ Function Add-CSharpType {
$compiler_options = $compiler_options.WithSpecificDiagnosticOptions($ignore_warnings)
}
+ if ($allow_unsafe) {
+ $compiler_options = $compiler_options.WithAllowUnsafe($true)
+ }
+
# create compilation object
$compilation = [Microsoft.CodeAnalysis.CSharp.CSharpCompilation]::Create(
[System.Guid]::NewGuid().ToString(),
@@ -297,6 +312,7 @@ Function Add-CSharpType {
# //AssemblyReference -Name ... [-CLR Framework]
# //NoWarn -Name ... [-CLR Framework]
# //TypeAccelerator -Name ... -TypeName ...
+ # //AllowUnsafe
$assembly_matches = $assembly_pattern.Matches($reference)
foreach ($match in $assembly_matches) {
$clr = $match.Groups["CLR"].Value
@@ -330,6 +346,10 @@ Function Add-CSharpType {
foreach ($match in $type_matches) {
$type_accelerators.Add(@{Name = $match.Groups["Name"].Value; TypeName = $match.Groups["TypeName"].Value })
}
+
+ if ($allow_unsafe_pattern.Matches($reference).Count) {
+ $compiler_options.Add("/unsafe") > $null
+ }
}
if ($ignore_warnings.Count -gt 0) {
$compiler_options.Add("/nowarn:" + ([String]::Join(",", $ignore_warnings.ToArray()))) > $null
diff --git a/lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Backup.psm1 b/lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Backup.psm1
index ca4f5ba5..c2b80b01 100644
--- a/lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Backup.psm1
+++ b/lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Backup.psm1
@@ -18,7 +18,7 @@ Function Backup-File {
Process {
$backup_path = $null
if (Test-Path -LiteralPath $path -PathType Leaf) {
- $backup_path = "$path.$pid." + [DateTime]::Now.ToString("yyyyMMdd-HHmmss") + ".bak";
+ $backup_path = "$path.$pid." + [DateTime]::Now.ToString("yyyyMMdd-HHmmss") + ".bak"
Try {
Copy-Item -LiteralPath $path -Destination $backup_path
}
diff --git a/lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1 b/lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1
index f0cb440f..4aea98b2 100644
--- a/lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1
+++ b/lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1
@@ -354,16 +354,16 @@ Function Get-FileChecksum($path, $algorithm = 'sha1') {
$hash = $raw_hash.Hash.ToLower()
}
Else {
- $fp = [System.IO.File]::Open($path, [System.IO.Filemode]::Open, [System.IO.FileAccess]::Read, [System.IO.FileShare]::ReadWrite);
- $hash = [System.BitConverter]::ToString($sp.ComputeHash($fp)).Replace("-", "").ToLower();
- $fp.Dispose();
+ $fp = [System.IO.File]::Open($path, [System.IO.Filemode]::Open, [System.IO.FileAccess]::Read, [System.IO.FileShare]::ReadWrite)
+ $hash = [System.BitConverter]::ToString($sp.ComputeHash($fp)).Replace("-", "").ToLower()
+ $fp.Dispose()
}
}
ElseIf (Test-Path -LiteralPath $path -PathType Container) {
- $hash = "3";
+ $hash = "3"
}
Else {
- $hash = "1";
+ $hash = "1"
}
return $hash
}
diff --git a/lib/ansible/module_utils/pycompat24.py b/lib/ansible/module_utils/pycompat24.py
index c398427c..d57f968a 100644
--- a/lib/ansible/module_utils/pycompat24.py
+++ b/lib/ansible/module_utils/pycompat24.py
@@ -47,45 +47,7 @@ def get_exception():
return sys.exc_info()[1]
-try:
- # Python 2.6+
- from ast import literal_eval
-except ImportError:
- # a replacement for literal_eval that works with python 2.4. from:
- # https://mail.python.org/pipermail/python-list/2009-September/551880.html
- # which is essentially a cut/paste from an earlier (2.6) version of python's
- # ast.py
- from compiler import ast, parse
- from ansible.module_utils.six import binary_type, integer_types, string_types, text_type
+from ast import literal_eval
- def literal_eval(node_or_string): # type: ignore[misc]
- """
- Safely evaluate an expression node or a string containing a Python
- expression. The string or node provided may only consist of the following
- Python literal structures: strings, numbers, tuples, lists, dicts, booleans,
- and None.
- """
- _safe_names = {'None': None, 'True': True, 'False': False}
- if isinstance(node_or_string, string_types):
- node_or_string = parse(node_or_string, mode='eval')
- if isinstance(node_or_string, ast.Expression):
- node_or_string = node_or_string.node
-
- def _convert(node):
- if isinstance(node, ast.Const) and isinstance(node.value, (text_type, binary_type, float, complex) + integer_types):
- return node.value
- elif isinstance(node, ast.Tuple):
- return tuple(map(_convert, node.nodes))
- elif isinstance(node, ast.List):
- return list(map(_convert, node.nodes))
- elif isinstance(node, ast.Dict):
- return dict((_convert(k), _convert(v)) for k, v in node.items())
- elif isinstance(node, ast.Name):
- if node.name in _safe_names:
- return _safe_names[node.name]
- elif isinstance(node, ast.UnarySub):
- return -_convert(node.expr) # pylint: disable=invalid-unary-operand-type
- raise ValueError('malformed string')
- return _convert(node_or_string)
__all__ = ('get_exception', 'literal_eval')
diff --git a/lib/ansible/module_utils/service.py b/lib/ansible/module_utils/service.py
index d2cecd49..e79f40ed 100644
--- a/lib/ansible/module_utils/service.py
+++ b/lib/ansible/module_utils/service.py
@@ -39,7 +39,7 @@ import subprocess
import traceback
from ansible.module_utils.six import PY2, b
-from ansible.module_utils._text import to_bytes, to_text
+from ansible.module_utils.common.text.converters import to_bytes, to_text
def sysv_is_enabled(name, runlevel=None):
@@ -207,17 +207,20 @@ def daemonize(module, cmd):
p = subprocess.Popen(run_cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, preexec_fn=lambda: os.close(pipe[1]))
fds = [p.stdout, p.stderr]
- # loop reading output till its done
+ # loop reading output till it is done
output = {p.stdout: b(""), p.stderr: b("")}
while fds:
rfd, wfd, efd = select.select(fds, [], fds, 1)
- if (rfd + wfd + efd) or p.poll():
+ if (rfd + wfd + efd) or p.poll() is None:
for out in list(fds):
if out in rfd:
data = os.read(out.fileno(), chunk)
- if not data:
+ if data:
+ output[out] += to_bytes(data, errors=errors)
+ else:
fds.remove(out)
- output[out] += b(data)
+ else:
+ break
# even after fds close, we might want to wait for pid to die
p.wait()
@@ -246,7 +249,7 @@ def daemonize(module, cmd):
data = os.read(pipe[0], chunk)
if not data:
break
- return_data += b(data)
+ return_data += to_bytes(data, errors=errors)
# Note: no need to specify encoding on py3 as this module sends the
# pickle to itself (thus same python interpreter so we aren't mixing
diff --git a/lib/ansible/module_utils/urls.py b/lib/ansible/module_utils/urls.py
index 542f89b0..42ef55b0 100644
--- a/lib/ansible/module_utils/urls.py
+++ b/lib/ansible/module_utils/urls.py
@@ -53,7 +53,7 @@ import socket
import sys
import tempfile
import traceback
-import types
+import types # pylint: disable=unused-import
from contextlib import contextmanager
@@ -88,7 +88,7 @@ from ansible.module_utils.common.collections import Mapping, is_sequence
from ansible.module_utils.six import PY2, PY3, string_types
from ansible.module_utils.six.moves import cStringIO
from ansible.module_utils.basic import get_distribution, missing_required_lib
-from ansible.module_utils._text import to_bytes, to_native, to_text
+from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
try:
# python3
@@ -99,7 +99,7 @@ except ImportError:
import urllib2 as urllib_request # type: ignore[no-redef]
from urllib2 import AbstractHTTPHandler, BaseHandler # type: ignore[no-redef]
-urllib_request.HTTPRedirectHandler.http_error_308 = urllib_request.HTTPRedirectHandler.http_error_307 # type: ignore[attr-defined]
+urllib_request.HTTPRedirectHandler.http_error_308 = urllib_request.HTTPRedirectHandler.http_error_307 # type: ignore[attr-defined,assignment]
try:
from ansible.module_utils.six.moves.urllib.parse import urlparse, urlunparse, unquote
@@ -115,7 +115,7 @@ except Exception:
try:
# SNI Handling needs python2.7.9's SSLContext
- from ssl import create_default_context, SSLContext
+ from ssl import create_default_context, SSLContext # pylint: disable=unused-import
HAS_SSLCONTEXT = True
except ImportError:
HAS_SSLCONTEXT = False
@@ -129,13 +129,13 @@ if not HAS_SSLCONTEXT:
try:
from urllib3.contrib.pyopenssl import PyOpenSSLContext
except Exception:
- from requests.packages.urllib3.contrib.pyopenssl import PyOpenSSLContext
+ from requests.packages.urllib3.contrib.pyopenssl import PyOpenSSLContext # type: ignore[no-redef]
HAS_URLLIB3_PYOPENSSLCONTEXT = True
except Exception:
# urllib3<1.15,>=1.6
try:
try:
- from urllib3.contrib.pyopenssl import ssl_wrap_socket
+ from urllib3.contrib.pyopenssl import ssl_wrap_socket # type: ignore[attr-defined]
except Exception:
from requests.packages.urllib3.contrib.pyopenssl import ssl_wrap_socket
HAS_URLLIB3_SSL_WRAP_SOCKET = True
@@ -160,7 +160,7 @@ if not HAS_SSLCONTEXT and HAS_SSL:
libssl = ctypes.CDLL(libssl_name)
for method in ('TLSv1_1_method', 'TLSv1_2_method'):
try:
- libssl[method]
+ libssl[method] # pylint: disable=pointless-statement
# Found something - we'll let openssl autonegotiate and hope
# the server has disabled sslv2 and 3. best we can do.
PROTOCOL = ssl.PROTOCOL_SSLv23
@@ -181,7 +181,7 @@ try:
from ssl import match_hostname, CertificateError
except ImportError:
try:
- from backports.ssl_match_hostname import match_hostname, CertificateError # type: ignore[misc]
+ from backports.ssl_match_hostname import match_hostname, CertificateError # type: ignore[assignment]
except ImportError:
HAS_MATCH_HOSTNAME = False
@@ -196,7 +196,7 @@ except ImportError:
# Old import for GSSAPI authentication, this is not used in urls.py but kept for backwards compatibility.
try:
- import urllib_gssapi
+ import urllib_gssapi # pylint: disable=unused-import
HAS_GSSAPI = True
except ImportError:
HAS_GSSAPI = False
@@ -288,7 +288,7 @@ if not HAS_MATCH_HOSTNAME:
# The following block of code is under the terms and conditions of the
# Python Software Foundation License
- """The match_hostname() function from Python 3.4, essential when using SSL."""
+ # The match_hostname() function from Python 3.4, essential when using SSL.
try:
# Divergence: Python-3.7+'s _ssl has this exception type but older Pythons do not
@@ -535,15 +535,18 @@ HTTPSClientAuthHandler = None
UnixHTTPSConnection = None
if hasattr(httplib, 'HTTPSConnection') and hasattr(urllib_request, 'HTTPSHandler'):
class CustomHTTPSConnection(httplib.HTTPSConnection): # type: ignore[no-redef]
- def __init__(self, *args, **kwargs):
+ def __init__(self, client_cert=None, client_key=None, *args, **kwargs):
httplib.HTTPSConnection.__init__(self, *args, **kwargs)
self.context = None
if HAS_SSLCONTEXT:
self.context = self._context
elif HAS_URLLIB3_PYOPENSSLCONTEXT:
self.context = self._context = PyOpenSSLContext(PROTOCOL)
- if self.context and self.cert_file:
- self.context.load_cert_chain(self.cert_file, self.key_file)
+
+ self._client_cert = client_cert
+ self._client_key = client_key
+ if self.context and self._client_cert:
+ self.context.load_cert_chain(self._client_cert, self._client_key)
def connect(self):
"Connect to a host on a given (SSL) port."
@@ -564,10 +567,10 @@ if hasattr(httplib, 'HTTPSConnection') and hasattr(urllib_request, 'HTTPSHandler
if HAS_SSLCONTEXT or HAS_URLLIB3_PYOPENSSLCONTEXT:
self.sock = self.context.wrap_socket(sock, server_hostname=server_hostname)
elif HAS_URLLIB3_SSL_WRAP_SOCKET:
- self.sock = ssl_wrap_socket(sock, keyfile=self.key_file, cert_reqs=ssl.CERT_NONE, # pylint: disable=used-before-assignment
- certfile=self.cert_file, ssl_version=PROTOCOL, server_hostname=server_hostname)
+ self.sock = ssl_wrap_socket(sock, keyfile=self._client_key, cert_reqs=ssl.CERT_NONE, # pylint: disable=used-before-assignment
+ certfile=self._client_cert, ssl_version=PROTOCOL, server_hostname=server_hostname)
else:
- self.sock = ssl.wrap_socket(sock, keyfile=self.key_file, certfile=self.cert_file, ssl_version=PROTOCOL)
+ self.sock = ssl.wrap_socket(sock, keyfile=self._client_key, certfile=self._client_cert, ssl_version=PROTOCOL)
class CustomHTTPSHandler(urllib_request.HTTPSHandler): # type: ignore[no-redef]
@@ -602,10 +605,6 @@ if hasattr(httplib, 'HTTPSConnection') and hasattr(urllib_request, 'HTTPSHandler
return self.do_open(self._build_https_connection, req)
def _build_https_connection(self, host, **kwargs):
- kwargs.update({
- 'cert_file': self.client_cert,
- 'key_file': self.client_key,
- })
try:
kwargs['context'] = self._context
except AttributeError:
@@ -613,7 +612,7 @@ if hasattr(httplib, 'HTTPSConnection') and hasattr(urllib_request, 'HTTPSHandler
if self._unix_socket:
return UnixHTTPSConnection(self._unix_socket)(host, **kwargs)
if not HAS_SSLCONTEXT:
- return CustomHTTPSConnection(host, **kwargs)
+ return CustomHTTPSConnection(host, client_cert=self.client_cert, client_key=self.client_key, **kwargs)
return httplib.HTTPSConnection(host, **kwargs)
@contextmanager
@@ -772,6 +771,18 @@ def extract_pem_certs(b_data):
yield match.group(0)
+def _py2_get_param(headers, param, header='content-type'):
+ m = httplib.HTTPMessage(io.StringIO())
+ cd = headers.getheader(header) or ''
+ try:
+ m.plisttext = cd[cd.index(';'):]
+ m.parseplist()
+ except ValueError:
+ return None
+
+ return m.getparam(param)
+
+
def get_response_filename(response):
url = response.geturl()
path = urlparse(url)[2]
@@ -779,7 +790,12 @@ def get_response_filename(response):
if filename:
filename = unquote(filename)
- return response.headers.get_param('filename', header='content-disposition') or filename
+ if PY2:
+ get_param = functools.partial(_py2_get_param, response.headers)
+ else:
+ get_param = response.headers.get_param
+
+ return get_param('filename', header='content-disposition') or filename
def parse_content_type(response):
@@ -866,7 +882,7 @@ def RedirectHandlerFactory(follow_redirects=None, validate_certs=True, ca_path=N
to determine how redirects should be handled in urllib2.
"""
- def redirect_request(self, req, fp, code, msg, hdrs, newurl):
+ def redirect_request(self, req, fp, code, msg, headers, newurl):
if not any((HAS_SSLCONTEXT, HAS_URLLIB3_PYOPENSSLCONTEXT)):
handler = maybe_add_ssl_handler(newurl, validate_certs, ca_path=ca_path, ciphers=ciphers)
if handler:
@@ -874,23 +890,23 @@ def RedirectHandlerFactory(follow_redirects=None, validate_certs=True, ca_path=N
# Preserve urllib2 compatibility
if follow_redirects == 'urllib2':
- return urllib_request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, hdrs, newurl)
+ return urllib_request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
# Handle disabled redirects
elif follow_redirects in ['no', 'none', False]:
- raise urllib_error.HTTPError(newurl, code, msg, hdrs, fp)
+ raise urllib_error.HTTPError(newurl, code, msg, headers, fp)
method = req.get_method()
# Handle non-redirect HTTP status or invalid follow_redirects
if follow_redirects in ['all', 'yes', True]:
if code < 300 or code >= 400:
- raise urllib_error.HTTPError(req.get_full_url(), code, msg, hdrs, fp)
+ raise urllib_error.HTTPError(req.get_full_url(), code, msg, headers, fp)
elif follow_redirects == 'safe':
if code < 300 or code >= 400 or method not in ('GET', 'HEAD'):
- raise urllib_error.HTTPError(req.get_full_url(), code, msg, hdrs, fp)
+ raise urllib_error.HTTPError(req.get_full_url(), code, msg, headers, fp)
else:
- raise urllib_error.HTTPError(req.get_full_url(), code, msg, hdrs, fp)
+ raise urllib_error.HTTPError(req.get_full_url(), code, msg, headers, fp)
try:
# Python 2-3.3
@@ -907,12 +923,12 @@ def RedirectHandlerFactory(follow_redirects=None, validate_certs=True, ca_path=N
# Support redirect with payload and original headers
if code in (307, 308):
# Preserve payload and headers
- headers = req.headers
+ req_headers = req.headers
else:
# Do not preserve payload and filter headers
data = None
- headers = dict((k, v) for k, v in req.headers.items()
- if k.lower() not in ("content-length", "content-type", "transfer-encoding"))
+ req_headers = dict((k, v) for k, v in req.headers.items()
+ if k.lower() not in ("content-length", "content-type", "transfer-encoding"))
# http://tools.ietf.org/html/rfc7231#section-6.4.4
if code == 303 and method != 'HEAD':
@@ -929,7 +945,7 @@ def RedirectHandlerFactory(follow_redirects=None, validate_certs=True, ca_path=N
return RequestWithMethod(newurl,
method=method,
- headers=headers,
+ headers=req_headers,
data=data,
origin_req_host=origin_req_host,
unverifiable=True,
@@ -979,7 +995,7 @@ def atexit_remove_file(filename):
pass
-def make_context(cafile=None, cadata=None, ciphers=None, validate_certs=True):
+def make_context(cafile=None, cadata=None, ciphers=None, validate_certs=True, client_cert=None, client_key=None):
if ciphers is None:
ciphers = []
@@ -1006,6 +1022,9 @@ def make_context(cafile=None, cadata=None, ciphers=None, validate_certs=True):
if ciphers:
context.set_ciphers(':'.join(map(to_native, ciphers)))
+ if client_cert:
+ context.load_cert_chain(client_cert, keyfile=client_key)
+
return context
@@ -1309,7 +1328,7 @@ class Request:
follow_redirects='urllib2', client_cert=None, client_key=None, cookies=None, unix_socket=None,
ca_path=None, unredirected_headers=None, decompress=True, ciphers=None, use_netrc=True):
"""This class works somewhat similarly to the ``Session`` class of from requests
- by defining a cookiejar that an be used across requests as well as cascaded defaults that
+ by defining a cookiejar that can be used across requests as well as cascaded defaults that
can apply to repeated requests
For documentation of params, see ``Request.open``
@@ -1461,7 +1480,7 @@ class Request:
url = urlunparse(parsed_list)
if use_gssapi:
- if HTTPGSSAPIAuthHandler:
+ if HTTPGSSAPIAuthHandler: # type: ignore[truthy-function]
handlers.append(HTTPGSSAPIAuthHandler(username, password))
else:
imp_err_msg = missing_required_lib('gssapi', reason='for use_gssapi=True',
@@ -1495,7 +1514,7 @@ class Request:
login = None
if login:
- username, _, password = login
+ username, dummy, password = login
if username and password:
headers["Authorization"] = basic_auth_header(username, password)
@@ -1514,6 +1533,8 @@ class Request:
cadata=cadata,
ciphers=ciphers,
validate_certs=validate_certs,
+ client_cert=client_cert,
+ client_key=client_key,
)
handlers.append(HTTPSClientAuthHandler(client_cert=client_cert,
client_key=client_key,
@@ -1865,12 +1886,8 @@ def fetch_url(module, url, data=None, headers=None, method=None,
if not HAS_URLPARSE:
module.fail_json(msg='urlparse is not installed')
- if not HAS_GZIP and decompress is True:
- decompress = False
- module.deprecate(
- '%s. "decompress" has been automatically disabled to prevent a failure' % GzipDecodedReader.missing_gzip_error(),
- version='2.16'
- )
+ if not HAS_GZIP:
+ module.fail_json(msg=GzipDecodedReader.missing_gzip_error())
# ensure we use proper tempdir
old_tempdir = tempfile.tempdir
@@ -1884,7 +1901,7 @@ def fetch_url(module, url, data=None, headers=None, method=None,
username = module.params.get('url_username', '')
password = module.params.get('url_password', '')
- http_agent = module.params.get('http_agent', 'ansible-httpget')
+ http_agent = module.params.get('http_agent', get_user_agent())
force_basic_auth = module.params.get('force_basic_auth', '')
follow_redirects = module.params.get('follow_redirects', 'urllib2')
@@ -2068,3 +2085,8 @@ def fetch_file(module, url, data=None, headers=None, method=None,
except Exception as e:
module.fail_json(msg="Failure downloading %s, %s" % (url, to_native(e)))
return fetch_temp_file.name
+
+
+def get_user_agent():
+ """Returns a user agent used by open_url"""
+ return u"ansible-httpget"
diff --git a/lib/ansible/module_utils/yumdnf.py b/lib/ansible/module_utils/yumdnf.py
index e265a2d3..7eb9d5fc 100644
--- a/lib/ansible/module_utils/yumdnf.py
+++ b/lib/ansible/module_utils/yumdnf.py
@@ -15,10 +15,8 @@ __metaclass__ = type
import os
import time
import glob
-import tempfile
from abc import ABCMeta, abstractmethod
-from ansible.module_utils._text import to_native
from ansible.module_utils.six import with_metaclass
yumdnf_argument_spec = dict(