summaryrefslogtreecommitdiff
path: root/lib/ansible/cli
diff options
context:
space:
mode:
Diffstat (limited to 'lib/ansible/cli')
-rw-r--r--lib/ansible/cli/__init__.py32
-rwxr-xr-xlib/ansible/cli/adhoc.py2
-rw-r--r--lib/ansible/cli/arguments/option_helpers.py20
-rwxr-xr-xlib/ansible/cli/config.py42
-rwxr-xr-xlib/ansible/cli/console.py38
-rwxr-xr-xlib/ansible/cli/doc.py85
-rwxr-xr-xlib/ansible/cli/galaxy.py273
-rwxr-xr-xlib/ansible/cli/inventory.py100
-rwxr-xr-xlib/ansible/cli/playbook.py13
-rwxr-xr-xlib/ansible/cli/pull.py11
-rwxr-xr-xlib/ansible/cli/scripts/ansible_connection_cli_stub.py6
-rwxr-xr-xlib/ansible/cli/vault.py22
12 files changed, 236 insertions, 408 deletions
diff --git a/lib/ansible/cli/__init__.py b/lib/ansible/cli/__init__.py
index 91d6a969..15ab5fe1 100644
--- a/lib/ansible/cli/__init__.py
+++ b/lib/ansible/cli/__init__.py
@@ -13,9 +13,9 @@ import sys
# Used for determining if the system is running a new enough python version
# and should only restrict on our documented minimum versions
-if sys.version_info < (3, 10):
+if sys.version_info < (3, 9):
raise SystemExit(
- 'ERROR: Ansible requires Python 3.10 or newer on the controller. '
+ 'ERROR: Ansible requires Python 3.9 or newer on the controller. '
'Current version: %s' % ''.join(sys.version.splitlines())
)
@@ -97,12 +97,11 @@ from ansible.cli.arguments import option_helpers as opt_help
from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError
from ansible.inventory.manager import InventoryManager
from ansible.module_utils.six import string_types
-from ansible.module_utils.common.text.converters import to_bytes, to_text
-from ansible.module_utils.common.collections import is_sequence
+from ansible.module_utils._text import to_bytes, to_text
from ansible.module_utils.common.file import is_executable
from ansible.parsing.dataloader import DataLoader
from ansible.parsing.vault import PromptVaultSecret, get_file_vault_secret
-from ansible.plugins.loader import add_all_plugin_dirs, init_plugin_loader
+from ansible.plugins.loader import add_all_plugin_dirs
from ansible.release import __version__
from ansible.utils.collection_loader import AnsibleCollectionConfig
from ansible.utils.collection_loader._collection_finder import _get_collection_name_from_path
@@ -120,7 +119,7 @@ except ImportError:
class CLI(ABC):
''' code behind bin/ansible* programs '''
- PAGER = C.config.get_config_value('PAGER')
+ PAGER = 'less'
# -F (quit-if-one-screen) -R (allow raw ansi control chars)
# -S (chop long lines) -X (disable termcap init and de-init)
@@ -155,13 +154,6 @@ class CLI(ABC):
"""
self.parse()
- # Initialize plugin loader after parse, so that the init code can utilize parsed arguments
- cli_collections_path = context.CLIARGS.get('collections_path') or []
- if not is_sequence(cli_collections_path):
- # In some contexts ``collections_path`` is singular
- cli_collections_path = [cli_collections_path]
- init_plugin_loader(cli_collections_path)
-
display.vv(to_text(opt_help.version(self.parser.prog)))
if C.CONFIG_FILE:
@@ -502,11 +494,11 @@ class CLI(ABC):
# this is a much simpler form of what is in pydoc.py
if not sys.stdout.isatty():
display.display(text, screen_only=True)
- elif CLI.PAGER:
+ elif 'PAGER' in os.environ:
if sys.platform == 'win32':
display.display(text, screen_only=True)
else:
- CLI.pager_pipe(text)
+ CLI.pager_pipe(text, os.environ['PAGER'])
else:
p = subprocess.Popen('less --version', shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate()
@@ -516,12 +508,12 @@ class CLI(ABC):
display.display(text, screen_only=True)
@staticmethod
- def pager_pipe(text):
+ def pager_pipe(text, cmd):
''' pipe text through a pager '''
- if 'less' in CLI.PAGER:
+ if 'LESS' not in os.environ:
os.environ['LESS'] = CLI.LESS_OPTS
try:
- cmd = subprocess.Popen(CLI.PAGER, shell=True, stdin=subprocess.PIPE, stdout=sys.stdout)
+ cmd = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=sys.stdout)
cmd.communicate(input=to_bytes(text))
except IOError:
pass
@@ -530,10 +522,6 @@ class CLI(ABC):
@staticmethod
def _play_prereqs():
- # TODO: evaluate moving all of the code that touches ``AnsibleCollectionConfig``
- # into ``init_plugin_loader`` so that we can specifically remove
- # ``AnsibleCollectionConfig.playbook_paths`` to make it immutable after instantiation
-
options = context.CLIARGS
# all needs loader
diff --git a/lib/ansible/cli/adhoc.py b/lib/ansible/cli/adhoc.py
index a54dacb7..e90b44ce 100755
--- a/lib/ansible/cli/adhoc.py
+++ b/lib/ansible/cli/adhoc.py
@@ -14,7 +14,7 @@ from ansible import context
from ansible.cli.arguments import option_helpers as opt_help
from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError
from ansible.executor.task_queue_manager import TaskQueueManager
-from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils._text import to_text
from ansible.parsing.splitter import parse_kv
from ansible.parsing.utils.yaml import from_yaml
from ansible.playbook import Playbook
diff --git a/lib/ansible/cli/arguments/option_helpers.py b/lib/ansible/cli/arguments/option_helpers.py
index 3baaf255..a3efb1e2 100644
--- a/lib/ansible/cli/arguments/option_helpers.py
+++ b/lib/ansible/cli/arguments/option_helpers.py
@@ -16,7 +16,7 @@ from jinja2 import __version__ as j2_version
import ansible
from ansible import constants as C
-from ansible.module_utils.common.text.converters import to_native
+from ansible.module_utils._text import to_native
from ansible.module_utils.common.yaml import HAS_LIBYAML, yaml_load
from ansible.release import __version__
from ansible.utils.path import unfrackpath
@@ -31,16 +31,6 @@ class SortingHelpFormatter(argparse.HelpFormatter):
super(SortingHelpFormatter, self).add_arguments(actions)
-class ArgumentParser(argparse.ArgumentParser):
- def add_argument(self, *args, **kwargs):
- action = kwargs.get('action')
- help = kwargs.get('help')
- if help and action in {'append', 'append_const', 'count', 'extend', PrependListAction}:
- help = f'{help.rstrip(".")}. This argument may be specified multiple times.'
- kwargs['help'] = help
- return super().add_argument(*args, **kwargs)
-
-
class AnsibleVersion(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
ansible_version = to_native(version(getattr(parser, 'prog')))
@@ -202,7 +192,7 @@ def create_base_parser(prog, usage="", desc=None, epilog=None):
Create an options parser for all ansible scripts
"""
# base opts
- parser = ArgumentParser(
+ parser = argparse.ArgumentParser(
prog=prog,
formatter_class=SortingHelpFormatter,
epilog=epilog,
@@ -260,8 +250,8 @@ def add_connect_options(parser):
help='connect as this user (default=%s)' % C.DEFAULT_REMOTE_USER)
connect_group.add_argument('-c', '--connection', dest='connection', default=C.DEFAULT_TRANSPORT,
help="connection type to use (default=%s)" % C.DEFAULT_TRANSPORT)
- connect_group.add_argument('-T', '--timeout', default=None, type=int, dest='timeout',
- help="override the connection timeout in seconds (default depends on connection)")
+ connect_group.add_argument('-T', '--timeout', default=C.DEFAULT_TIMEOUT, type=int, dest='timeout',
+ help="override the connection timeout in seconds (default=%s)" % C.DEFAULT_TIMEOUT)
# ssh only
connect_group.add_argument('--ssh-common-args', default=None, dest='ssh_common_args',
@@ -393,7 +383,7 @@ def add_vault_options(parser):
parser.add_argument('--vault-id', default=[], dest='vault_ids', action='append', type=str,
help='the vault identity to use')
base_group = parser.add_mutually_exclusive_group()
- base_group.add_argument('-J', '--ask-vault-password', '--ask-vault-pass', default=C.DEFAULT_ASK_VAULT_PASS, dest='ask_vault_pass', action='store_true',
+ base_group.add_argument('--ask-vault-password', '--ask-vault-pass', default=C.DEFAULT_ASK_VAULT_PASS, dest='ask_vault_pass', action='store_true',
help='ask for vault password')
base_group.add_argument('--vault-password-file', '--vault-pass-file', default=[], dest='vault_password_files',
help="vault password file", type=unfrack_path(follow=False), action='append')
diff --git a/lib/ansible/cli/config.py b/lib/ansible/cli/config.py
index f394ef7c..c8d99ea0 100755
--- a/lib/ansible/cli/config.py
+++ b/lib/ansible/cli/config.py
@@ -23,7 +23,7 @@ from ansible import constants as C
from ansible.cli.arguments import option_helpers as opt_help
from ansible.config.manager import ConfigManager, Setting
from ansible.errors import AnsibleError, AnsibleOptionsError
-from ansible.module_utils.common.text.converters import to_native, to_text, to_bytes
+from ansible.module_utils._text import to_native, to_text, to_bytes
from ansible.module_utils.common.json import json_dump
from ansible.module_utils.six import string_types
from ansible.parsing.quoting import is_quoted
@@ -67,7 +67,7 @@ class ConfigCLI(CLI):
desc="View ansible configuration.",
)
- common = opt_help.ArgumentParser(add_help=False)
+ common = opt_help.argparse.ArgumentParser(add_help=False)
opt_help.add_verbosity_options(common)
common.add_argument('-c', '--config', dest='config_file',
help="path to configuration file, defaults to first file found in precedence.")
@@ -187,7 +187,7 @@ class ConfigCLI(CLI):
# pylint: disable=unreachable
try:
- editor = shlex.split(C.config.get_config_value('EDITOR'))
+ editor = shlex.split(os.environ.get('EDITOR', 'vi'))
editor.append(self.config_file)
subprocess.call(editor)
except Exception as e:
@@ -314,7 +314,7 @@ class ConfigCLI(CLI):
return data
- def _get_settings_ini(self, settings, seen):
+ def _get_settings_ini(self, settings):
sections = {}
for o in sorted(settings.keys()):
@@ -327,7 +327,7 @@ class ConfigCLI(CLI):
if not opt.get('description'):
# its a plugin
- new_sections = self._get_settings_ini(opt, seen)
+ new_sections = self._get_settings_ini(opt)
for s in new_sections:
if s in sections:
sections[s].extend(new_sections[s])
@@ -343,45 +343,37 @@ class ConfigCLI(CLI):
if 'ini' in opt and opt['ini']:
entry = opt['ini'][-1]
- if entry['section'] not in seen:
- seen[entry['section']] = []
if entry['section'] not in sections:
sections[entry['section']] = []
- # avoid dupes
- if entry['key'] not in seen[entry['section']]:
- seen[entry['section']].append(entry['key'])
-
- default = opt.get('default', '')
- if opt.get('type', '') == 'list' and not isinstance(default, string_types):
- # python lists are not valid ini ones
- default = ', '.join(default)
- elif default is None:
- default = ''
-
- if context.CLIARGS['commented']:
- entry['key'] = ';%s' % entry['key']
+ default = opt.get('default', '')
+ if opt.get('type', '') == 'list' and not isinstance(default, string_types):
+ # python lists are not valid ini ones
+ default = ', '.join(default)
+ elif default is None:
+ default = ''
- key = desc + '\n%s=%s' % (entry['key'], default)
+ if context.CLIARGS['commented']:
+ entry['key'] = ';%s' % entry['key']
- sections[entry['section']].append(key)
+ key = desc + '\n%s=%s' % (entry['key'], default)
+ sections[entry['section']].append(key)
return sections
def execute_init(self):
"""Create initial configuration"""
- seen = {}
data = []
config_entries = self._list_entries_from_args()
plugin_types = config_entries.pop('PLUGINS', None)
if context.CLIARGS['format'] == 'ini':
- sections = self._get_settings_ini(config_entries, seen)
+ sections = self._get_settings_ini(config_entries)
if plugin_types:
for ptype in plugin_types:
- plugin_sections = self._get_settings_ini(plugin_types[ptype], seen)
+ plugin_sections = self._get_settings_ini(plugin_types[ptype])
for s in plugin_sections:
if s in sections:
sections[s].extend(plugin_sections[s])
diff --git a/lib/ansible/cli/console.py b/lib/ansible/cli/console.py
index 2325bf05..3125cc47 100755
--- a/lib/ansible/cli/console.py
+++ b/lib/ansible/cli/console.py
@@ -22,7 +22,7 @@ from ansible import constants as C
from ansible import context
from ansible.cli.arguments import option_helpers as opt_help
from ansible.executor.task_queue_manager import TaskQueueManager
-from ansible.module_utils.common.text.converters import to_native, to_text
+from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.parsing.splitter import parse_kv
from ansible.playbook.play import Play
@@ -39,30 +39,26 @@ class ConsoleCLI(CLI, cmd.Cmd):
'''
A REPL that allows for running ad-hoc tasks against a chosen inventory
from a nice shell with built-in tab completion (based on dominis'
- ``ansible-shell``).
+ ansible-shell).
It supports several commands, and you can modify its configuration at
runtime:
- - ``cd [pattern]``: change host/group
- (you can use host patterns eg.: ``app*.dc*:!app01*``)
- - ``list``: list available hosts in the current path
- - ``list groups``: list groups included in the current path
- - ``become``: toggle the become flag
- - ``!``: forces shell module instead of the ansible module
- (``!yum update -y``)
- - ``verbosity [num]``: set the verbosity level
- - ``forks [num]``: set the number of forks
- - ``become_user [user]``: set the become_user
- - ``remote_user [user]``: set the remote_user
- - ``become_method [method]``: set the privilege escalation method
- - ``check [bool]``: toggle check mode
- - ``diff [bool]``: toggle diff mode
- - ``timeout [integer]``: set the timeout of tasks in seconds
- (0 to disable)
- - ``help [command/module]``: display documentation for
- the command or module
- - ``exit``: exit ``ansible-console``
+ - `cd [pattern]`: change host/group (you can use host patterns eg.: app*.dc*:!app01*)
+ - `list`: list available hosts in the current path
+ - `list groups`: list groups included in the current path
+ - `become`: toggle the become flag
+ - `!`: forces shell module instead of the ansible module (!yum update -y)
+ - `verbosity [num]`: set the verbosity level
+ - `forks [num]`: set the number of forks
+ - `become_user [user]`: set the become_user
+ - `remote_user [user]`: set the remote_user
+ - `become_method [method]`: set the privilege escalation method
+ - `check [bool]`: toggle check mode
+ - `diff [bool]`: toggle diff mode
+ - `timeout [integer]`: set the timeout of tasks in seconds (0 to disable)
+ - `help [command/module]`: display documentation for the command or module
+ - `exit`: exit ansible-console
'''
name = 'ansible-console'
diff --git a/lib/ansible/cli/doc.py b/lib/ansible/cli/doc.py
index 4a5c8928..9f560bcb 100755
--- a/lib/ansible/cli/doc.py
+++ b/lib/ansible/cli/doc.py
@@ -26,7 +26,7 @@ from ansible import context
from ansible.cli.arguments import option_helpers as opt_help
from ansible.collections.list import list_collection_dirs
from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError, AnsiblePluginNotFound
-from ansible.module_utils.common.text.converters import to_native, to_text
+from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.common.collections import is_sequence
from ansible.module_utils.common.json import json_dump
from ansible.module_utils.common.yaml import yaml_dump
@@ -163,8 +163,8 @@ class RoleMixin(object):
might be fully qualified with the collection name (e.g., community.general.roleA)
or not (e.g., roleA).
- :param collection_filter: A list of strings containing the FQCN of a collection which will
- be used to limit results. This filter will take precedence over the name_filters.
+ :param collection_filter: A string containing the FQCN of a collection which will be
+ used to limit results. This filter will take precedence over the name_filters.
:returns: A set of tuples consisting of: role name, collection name, collection path
"""
@@ -362,23 +362,12 @@ class DocCLI(CLI, RoleMixin):
_ITALIC = re.compile(r"\bI\(([^)]+)\)")
_BOLD = re.compile(r"\bB\(([^)]+)\)")
_MODULE = re.compile(r"\bM\(([^)]+)\)")
- _PLUGIN = re.compile(r"\bP\(([^#)]+)#([a-z]+)\)")
_LINK = re.compile(r"\bL\(([^)]+), *([^)]+)\)")
_URL = re.compile(r"\bU\(([^)]+)\)")
_REF = re.compile(r"\bR\(([^)]+), *([^)]+)\)")
_CONST = re.compile(r"\bC\(([^)]+)\)")
- _SEM_PARAMETER_STRING = r"\(((?:[^\\)]+|\\.)+)\)"
- _SEM_OPTION_NAME = re.compile(r"\bO" + _SEM_PARAMETER_STRING)
- _SEM_OPTION_VALUE = re.compile(r"\bV" + _SEM_PARAMETER_STRING)
- _SEM_ENV_VARIABLE = re.compile(r"\bE" + _SEM_PARAMETER_STRING)
- _SEM_RET_VALUE = re.compile(r"\bRV" + _SEM_PARAMETER_STRING)
_RULER = re.compile(r"\bHORIZONTALLINE\b")
- # helper for unescaping
- _UNESCAPE = re.compile(r"\\(.)")
- _FQCN_TYPE_PREFIX_RE = re.compile(r'^([^.]+\.[^.]+\.[^#]+)#([a-z]+):(.*)$')
- _IGNORE_MARKER = 'ignore:'
-
# rst specific
_RST_NOTE = re.compile(r".. note::")
_RST_SEEALSO = re.compile(r".. seealso::")
@@ -390,40 +379,6 @@ class DocCLI(CLI, RoleMixin):
super(DocCLI, self).__init__(args)
self.plugin_list = set()
- @staticmethod
- def _tty_ify_sem_simle(matcher):
- text = DocCLI._UNESCAPE.sub(r'\1', matcher.group(1))
- return f"`{text}'"
-
- @staticmethod
- def _tty_ify_sem_complex(matcher):
- text = DocCLI._UNESCAPE.sub(r'\1', matcher.group(1))
- value = None
- if '=' in text:
- text, value = text.split('=', 1)
- m = DocCLI._FQCN_TYPE_PREFIX_RE.match(text)
- if m:
- plugin_fqcn = m.group(1)
- plugin_type = m.group(2)
- text = m.group(3)
- elif text.startswith(DocCLI._IGNORE_MARKER):
- text = text[len(DocCLI._IGNORE_MARKER):]
- plugin_fqcn = plugin_type = ''
- else:
- plugin_fqcn = plugin_type = ''
- entrypoint = None
- if ':' in text:
- entrypoint, text = text.split(':', 1)
- if value is not None:
- text = f"{text}={value}"
- if plugin_fqcn and plugin_type:
- plugin_suffix = '' if plugin_type in ('role', 'module', 'playbook') else ' plugin'
- plugin = f"{plugin_type}{plugin_suffix} {plugin_fqcn}"
- if plugin_type == 'role' and entrypoint is not None:
- plugin = f"{plugin}, {entrypoint} entrypoint"
- return f"`{text}' (of {plugin})"
- return f"`{text}'"
-
@classmethod
def find_plugins(cls, path, internal, plugin_type, coll_filter=None):
display.deprecated("find_plugins method as it is incomplete/incorrect. use ansible.plugins.list functions instead.", version='2.17')
@@ -438,13 +393,8 @@ class DocCLI(CLI, RoleMixin):
t = cls._MODULE.sub("[" + r"\1" + "]", t) # M(word) => [word]
t = cls._URL.sub(r"\1", t) # U(word) => word
t = cls._LINK.sub(r"\1 <\2>", t) # L(word, url) => word <url>
- t = cls._PLUGIN.sub("[" + r"\1" + "]", t) # P(word#type) => [word]
t = cls._REF.sub(r"\1", t) # R(word, sphinx-ref) => word
t = cls._CONST.sub(r"`\1'", t) # C(word) => `word'
- t = cls._SEM_OPTION_NAME.sub(cls._tty_ify_sem_complex, t) # O(expr)
- t = cls._SEM_OPTION_VALUE.sub(cls._tty_ify_sem_simle, t) # V(expr)
- t = cls._SEM_ENV_VARIABLE.sub(cls._tty_ify_sem_simle, t) # E(expr)
- t = cls._SEM_RET_VALUE.sub(cls._tty_ify_sem_complex, t) # RV(expr)
t = cls._RULER.sub("\n{0}\n".format("-" * 13), t) # HORIZONTALLINE => -------
# remove rst
@@ -545,9 +495,7 @@ class DocCLI(CLI, RoleMixin):
desc = desc[:linelimit] + '...'
pbreak = plugin.split('.')
- # TODO: add mark for deprecated collection plugins
- if pbreak[-1].startswith('_') and plugin.startswith(('ansible.builtin.', 'ansible.legacy.')):
- # Handle deprecated ansible.builtin plugins
+ if pbreak[-1].startswith('_'): # Handle deprecated # TODO: add mark for deprecated collection plugins
pbreak[-1] = pbreak[-1][1:]
plugin = '.'.join(pbreak)
deprecated.append("%-*s %-*.*s" % (displace, plugin, linelimit, len(desc), desc))
@@ -678,11 +626,12 @@ class DocCLI(CLI, RoleMixin):
def _get_collection_filter(self):
coll_filter = None
- if len(context.CLIARGS['args']) >= 1:
- coll_filter = context.CLIARGS['args']
- for coll_name in coll_filter:
- if not AnsibleCollectionRef.is_valid_collection_name(coll_name):
- raise AnsibleError('Invalid collection name (must be of the form namespace.collection): {0}'.format(coll_name))
+ if len(context.CLIARGS['args']) == 1:
+ coll_filter = context.CLIARGS['args'][0]
+ if not AnsibleCollectionRef.is_valid_collection_name(coll_filter):
+ raise AnsibleError('Invalid collection name (must be of the form namespace.collection): {0}'.format(coll_filter))
+ elif len(context.CLIARGS['args']) > 1:
+ raise AnsibleOptionsError("Only a single collection filter is supported.")
return coll_filter
@@ -1302,20 +1251,6 @@ class DocCLI(CLI, RoleMixin):
relative_url = 'collections/%s_module.html' % item['module'].replace('.', '/', 2)
text.append(textwrap.fill(DocCLI.tty_ify(get_versioned_doclink(relative_url)),
limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent))
- elif 'plugin' in item and 'plugin_type' in item:
- plugin_suffix = ' plugin' if item['plugin_type'] not in ('module', 'role') else ''
- text.append(textwrap.fill(DocCLI.tty_ify('%s%s %s' % (item['plugin_type'].title(), plugin_suffix, item['plugin'])),
- limit - 6, initial_indent=opt_indent[:-2] + "* ", subsequent_indent=opt_indent))
- description = item.get('description')
- if description is None and item['plugin'].startswith('ansible.builtin.'):
- description = 'The official documentation on the %s %s%s.' % (item['plugin'], item['plugin_type'], plugin_suffix)
- if description is not None:
- text.append(textwrap.fill(DocCLI.tty_ify(description),
- limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent + ' '))
- if item['plugin'].startswith('ansible.builtin.'):
- relative_url = 'collections/%s_%s.html' % (item['plugin'].replace('.', '/', 2), item['plugin_type'])
- text.append(textwrap.fill(DocCLI.tty_ify(get_versioned_doclink(relative_url)),
- limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent))
elif 'name' in item and 'link' in item and 'description' in item:
text.append(textwrap.fill(DocCLI.tty_ify(item['name']),
limit - 6, initial_indent=opt_indent[:-2] + "* ", subsequent_indent=opt_indent))
diff --git a/lib/ansible/cli/galaxy.py b/lib/ansible/cli/galaxy.py
index 334e4bf4..536964e2 100755
--- a/lib/ansible/cli/galaxy.py
+++ b/lib/ansible/cli/galaxy.py
@@ -10,11 +10,9 @@ __metaclass__ = type
# ansible.cli needs to be imported first, to ensure the source bin/* scripts run that code first
from ansible.cli import CLI
-import argparse
import functools
import json
import os.path
-import pathlib
import re
import shutil
import sys
@@ -53,7 +51,7 @@ from ansible.galaxy.token import BasicAuthToken, GalaxyToken, KeycloakToken, NoT
from ansible.module_utils.ansible_release import __version__ as ansible_version
from ansible.module_utils.common.collections import is_iterable
from ansible.module_utils.common.yaml import yaml_dump, yaml_load
-from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
+from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils import six
from ansible.parsing.dataloader import DataLoader
from ansible.parsing.yaml.loader import AnsibleLoader
@@ -73,7 +71,7 @@ SERVER_DEF = [
('password', False, 'str'),
('token', False, 'str'),
('auth_url', False, 'str'),
- ('api_version', False, 'int'),
+ ('v3', False, 'bool'),
('validate_certs', False, 'bool'),
('client_id', False, 'str'),
('timeout', False, 'int'),
@@ -81,9 +79,9 @@ SERVER_DEF = [
# config definition fields
SERVER_ADDITIONAL = {
- 'api_version': {'default': None, 'choices': [2, 3]},
+ 'v3': {'default': 'False'},
'validate_certs': {'cli': [{'name': 'validate_certs'}]},
- 'timeout': {'default': C.GALAXY_SERVER_TIMEOUT, 'cli': [{'name': 'timeout'}]},
+ 'timeout': {'default': '60', 'cli': [{'name': 'timeout'}]},
'token': {'default': None},
}
@@ -101,8 +99,7 @@ def with_collection_artifacts_manager(wrapped_method):
return wrapped_method(*args, **kwargs)
# FIXME: use validate_certs context from Galaxy servers when downloading collections
- # .get used here for when this is used in a non-CLI context
- artifacts_manager_kwargs = {'validate_certs': context.CLIARGS.get('resolved_validate_certs', True)}
+ artifacts_manager_kwargs = {'validate_certs': context.CLIARGS['resolved_validate_certs']}
keyring = context.CLIARGS.get('keyring', None)
if keyring is not None:
@@ -159,8 +156,8 @@ def _get_collection_widths(collections):
fqcn_set = {to_text(c.fqcn) for c in collections}
version_set = {to_text(c.ver) for c in collections}
- fqcn_length = len(max(fqcn_set or [''], key=len))
- version_length = len(max(version_set or [''], key=len))
+ fqcn_length = len(max(fqcn_set, key=len))
+ version_length = len(max(version_set, key=len))
return fqcn_length, version_length
@@ -241,49 +238,45 @@ class GalaxyCLI(CLI):
)
# Common arguments that apply to more than 1 action
- common = opt_help.ArgumentParser(add_help=False)
+ common = opt_help.argparse.ArgumentParser(add_help=False)
common.add_argument('-s', '--server', dest='api_server', help='The Galaxy API server URL')
- common.add_argument('--api-version', type=int, choices=[2, 3], help=argparse.SUPPRESS) # Hidden argument that should only be used in our tests
common.add_argument('--token', '--api-key', dest='api_key',
help='The Ansible Galaxy API key which can be found at '
'https://galaxy.ansible.com/me/preferences.')
common.add_argument('-c', '--ignore-certs', action='store_true', dest='ignore_certs', help='Ignore SSL certificate validation errors.', default=None)
-
- # --timeout uses the default None to handle two different scenarios.
- # * --timeout > C.GALAXY_SERVER_TIMEOUT for non-configured servers
- # * --timeout > server-specific timeout > C.GALAXY_SERVER_TIMEOUT for configured servers.
- common.add_argument('--timeout', dest='timeout', type=int,
+ common.add_argument('--timeout', dest='timeout', type=int, default=60,
help="The time to wait for operations against the galaxy server, defaults to 60s.")
opt_help.add_verbosity_options(common)
- force = opt_help.ArgumentParser(add_help=False)
+ force = opt_help.argparse.ArgumentParser(add_help=False)
force.add_argument('-f', '--force', dest='force', action='store_true', default=False,
help='Force overwriting an existing role or collection')
- github = opt_help.ArgumentParser(add_help=False)
+ github = opt_help.argparse.ArgumentParser(add_help=False)
github.add_argument('github_user', help='GitHub username')
github.add_argument('github_repo', help='GitHub repository')
- offline = opt_help.ArgumentParser(add_help=False)
+ offline = opt_help.argparse.ArgumentParser(add_help=False)
offline.add_argument('--offline', dest='offline', default=False, action='store_true',
help="Don't query the galaxy API when creating roles")
default_roles_path = C.config.get_configuration_definition('DEFAULT_ROLES_PATH').get('default', '')
- roles_path = opt_help.ArgumentParser(add_help=False)
+ roles_path = opt_help.argparse.ArgumentParser(add_help=False)
roles_path.add_argument('-p', '--roles-path', dest='roles_path', type=opt_help.unfrack_path(pathsep=True),
default=C.DEFAULT_ROLES_PATH, action=opt_help.PrependListAction,
help='The path to the directory containing your roles. The default is the first '
'writable one configured via DEFAULT_ROLES_PATH: %s ' % default_roles_path)
- collections_path = opt_help.ArgumentParser(add_help=False)
+ collections_path = opt_help.argparse.ArgumentParser(add_help=False)
collections_path.add_argument('-p', '--collections-path', dest='collections_path', type=opt_help.unfrack_path(pathsep=True),
+ default=AnsibleCollectionConfig.collection_paths,
action=opt_help.PrependListAction,
help="One or more directories to search for collections in addition "
"to the default COLLECTIONS_PATHS. Separate multiple paths "
"with '{0}'.".format(os.path.pathsep))
- cache_options = opt_help.ArgumentParser(add_help=False)
+ cache_options = opt_help.argparse.ArgumentParser(add_help=False)
cache_options.add_argument('--clear-response-cache', dest='clear_response_cache', action='store_true',
default=False, help='Clear the existing server response cache.')
cache_options.add_argument('--no-cache', dest='no_cache', action='store_true', default=False,
@@ -467,15 +460,12 @@ class GalaxyCLI(CLI):
valid_signature_count_help = 'The number of signatures that must successfully verify the collection. This should be a positive integer ' \
'or all to signify that all signatures must be used to verify the collection. ' \
'Prepend the value with + to fail if no valid signatures are found for the collection (e.g. +all).'
- ignore_gpg_status_help = 'A space separated list of status codes to ignore during signature verification (for example, NO_PUBKEY FAILURE). ' \
- 'Descriptions for the choices can be seen at L(https://github.com/gpg/gnupg/blob/master/doc/DETAILS#general-status-codes).' \
- 'Note: specify these after positional arguments or use -- to separate them.'
+ ignore_gpg_status_help = 'A status code to ignore during signature verification (for example, NO_PUBKEY). ' \
+ 'Provide this option multiple times to ignore a list of status codes. ' \
+ 'Descriptions for the choices can be seen at L(https://github.com/gpg/gnupg/blob/master/doc/DETAILS#general-status-codes).'
verify_parser.add_argument('--required-valid-signature-count', dest='required_valid_signature_count', type=validate_signature_count,
help=valid_signature_count_help, default=C.GALAXY_REQUIRED_VALID_SIGNATURE_COUNT)
verify_parser.add_argument('--ignore-signature-status-code', dest='ignore_gpg_errors', type=str, action='append',
- help=opt_help.argparse.SUPPRESS, default=C.GALAXY_IGNORE_INVALID_SIGNATURE_STATUS_CODES,
- choices=list(GPG_ERROR_MAP.keys()))
- verify_parser.add_argument('--ignore-signature-status-codes', dest='ignore_gpg_errors', type=str, action='extend', nargs='+',
help=ignore_gpg_status_help, default=C.GALAXY_IGNORE_INVALID_SIGNATURE_STATUS_CODES,
choices=list(GPG_ERROR_MAP.keys()))
@@ -511,9 +501,9 @@ class GalaxyCLI(CLI):
valid_signature_count_help = 'The number of signatures that must successfully verify the collection. This should be a positive integer ' \
'or -1 to signify that all signatures must be used to verify the collection. ' \
'Prepend the value with + to fail if no valid signatures are found for the collection (e.g. +all).'
- ignore_gpg_status_help = 'A space separated list of status codes to ignore during signature verification (for example, NO_PUBKEY FAILURE). ' \
- 'Descriptions for the choices can be seen at L(https://github.com/gpg/gnupg/blob/master/doc/DETAILS#general-status-codes).' \
- 'Note: specify these after positional arguments or use -- to separate them.'
+ ignore_gpg_status_help = 'A status code to ignore during signature verification (for example, NO_PUBKEY). ' \
+ 'Provide this option multiple times to ignore a list of status codes. ' \
+ 'Descriptions for the choices can be seen at L(https://github.com/gpg/gnupg/blob/master/doc/DETAILS#general-status-codes).'
if galaxy_type == 'collection':
install_parser.add_argument('-p', '--collections-path', dest='collections_path',
@@ -537,9 +527,6 @@ class GalaxyCLI(CLI):
install_parser.add_argument('--required-valid-signature-count', dest='required_valid_signature_count', type=validate_signature_count,
help=valid_signature_count_help, default=C.GALAXY_REQUIRED_VALID_SIGNATURE_COUNT)
install_parser.add_argument('--ignore-signature-status-code', dest='ignore_gpg_errors', type=str, action='append',
- help=opt_help.argparse.SUPPRESS, default=C.GALAXY_IGNORE_INVALID_SIGNATURE_STATUS_CODES,
- choices=list(GPG_ERROR_MAP.keys()))
- install_parser.add_argument('--ignore-signature-status-codes', dest='ignore_gpg_errors', type=str, action='extend', nargs='+',
help=ignore_gpg_status_help, default=C.GALAXY_IGNORE_INVALID_SIGNATURE_STATUS_CODES,
choices=list(GPG_ERROR_MAP.keys()))
install_parser.add_argument('--offline', dest='offline', action='store_true', default=False,
@@ -564,9 +551,6 @@ class GalaxyCLI(CLI):
install_parser.add_argument('--required-valid-signature-count', dest='required_valid_signature_count', type=validate_signature_count,
help=valid_signature_count_help, default=C.GALAXY_REQUIRED_VALID_SIGNATURE_COUNT)
install_parser.add_argument('--ignore-signature-status-code', dest='ignore_gpg_errors', type=str, action='append',
- help=opt_help.argparse.SUPPRESS, default=C.GALAXY_IGNORE_INVALID_SIGNATURE_STATUS_CODES,
- choices=list(GPG_ERROR_MAP.keys()))
- install_parser.add_argument('--ignore-signature-status-codes', dest='ignore_gpg_errors', type=str, action='extend', nargs='+',
help=ignore_gpg_status_help, default=C.GALAXY_IGNORE_INVALID_SIGNATURE_STATUS_CODES,
choices=list(GPG_ERROR_MAP.keys()))
@@ -638,7 +622,7 @@ class GalaxyCLI(CLI):
return config_def
galaxy_options = {}
- for optional_key in ['clear_response_cache', 'no_cache']:
+ for optional_key in ['clear_response_cache', 'no_cache', 'timeout']:
if optional_key in context.CLIARGS:
galaxy_options[optional_key] = context.CLIARGS[optional_key]
@@ -663,22 +647,17 @@ class GalaxyCLI(CLI):
client_id = server_options.pop('client_id')
token_val = server_options['token'] or NoTokenSentinel
username = server_options['username']
- api_version = server_options.pop('api_version')
+ v3 = server_options.pop('v3')
if server_options['validate_certs'] is None:
server_options['validate_certs'] = context.CLIARGS['resolved_validate_certs']
validate_certs = server_options['validate_certs']
- # This allows a user to explicitly force use of an API version when
- # multiple versions are supported. This was added for testing
- # against pulp_ansible and I'm not sure it has a practical purpose
- # outside of this use case. As such, this option is not documented
- # as of now
- if api_version:
- display.warning(
- f'The specified "api_version" configuration for the galaxy server "{server_key}" is '
- 'not a public configuration, and may be removed at any time without warning.'
- )
- server_options['available_api_versions'] = {'v%s' % api_version: '/v%s' % api_version}
+ if v3:
+ # This allows a user to explicitly indicate the server uses the /v3 API
+ # This was added for testing against pulp_ansible and I'm not sure it has
+ # a practical purpose outside of this use case. As such, this option is not
+ # documented as of now
+ server_options['available_api_versions'] = {'v3': '/v3'}
# default case if no auth info is provided.
server_options['token'] = None
@@ -704,17 +683,9 @@ class GalaxyCLI(CLI):
))
cmd_server = context.CLIARGS['api_server']
- if context.CLIARGS['api_version']:
- api_version = context.CLIARGS['api_version']
- display.warning(
- 'The --api-version is not a public argument, and may be removed at any time without warning.'
- )
- galaxy_options['available_api_versions'] = {'v%s' % api_version: '/v%s' % api_version}
-
cmd_token = GalaxyToken(token=context.CLIARGS['api_key'])
validate_certs = context.CLIARGS['resolved_validate_certs']
- default_server_timeout = context.CLIARGS['timeout'] if context.CLIARGS['timeout'] is not None else C.GALAXY_SERVER_TIMEOUT
if cmd_server:
# Cmd args take precedence over the config entry but fist check if the arg was a name and use that config
# entry, otherwise create a new API entry for the server specified.
@@ -726,7 +697,6 @@ class GalaxyCLI(CLI):
self.galaxy, 'cmd_arg', cmd_server, token=cmd_token,
priority=len(config_servers) + 1,
validate_certs=validate_certs,
- timeout=default_server_timeout,
**galaxy_options
))
else:
@@ -738,7 +708,6 @@ class GalaxyCLI(CLI):
self.galaxy, 'default', C.GALAXY_SERVER, token=cmd_token,
priority=0,
validate_certs=validate_certs,
- timeout=default_server_timeout,
**galaxy_options
))
@@ -835,7 +804,7 @@ class GalaxyCLI(CLI):
for role_req in file_requirements:
requirements['roles'] += parse_role_req(role_req)
- elif isinstance(file_requirements, dict):
+ else:
# Newer format with a collections and/or roles key
extra_keys = set(file_requirements.keys()).difference(set(['roles', 'collections']))
if extra_keys:
@@ -854,9 +823,6 @@ class GalaxyCLI(CLI):
for collection_req in file_requirements.get('collections') or []
]
- else:
- raise AnsibleError(f"Expecting requirements yaml to be a list or dictionary but got {type(file_requirements).__name__}")
-
return requirements
def _init_coll_req_dict(self, coll_req):
@@ -1220,16 +1186,11 @@ class GalaxyCLI(CLI):
df.write(b_rendered)
else:
f_rel_path = os.path.relpath(os.path.join(root, f), obj_skeleton)
- shutil.copyfile(os.path.join(root, f), os.path.join(obj_path, f_rel_path), follow_symlinks=False)
+ shutil.copyfile(os.path.join(root, f), os.path.join(obj_path, f_rel_path))
for d in dirs:
b_dir_path = to_bytes(os.path.join(obj_path, rel_root, d), errors='surrogate_or_strict')
- if os.path.exists(b_dir_path):
- continue
- b_src_dir = to_bytes(os.path.join(root, d), errors='surrogate_or_strict')
- if os.path.islink(b_src_dir):
- shutil.copyfile(b_src_dir, b_dir_path, follow_symlinks=False)
- else:
+ if not os.path.exists(b_dir_path):
os.makedirs(b_dir_path)
display.display("- %s %s was created successfully" % (galaxy_type.title(), obj_name))
@@ -1293,7 +1254,7 @@ class GalaxyCLI(CLI):
"""Compare checksums with the collection(s) found on the server and the installed copy. This does not verify dependencies."""
collections = context.CLIARGS['args']
- search_paths = AnsibleCollectionConfig.collection_paths
+ search_paths = context.CLIARGS['collections_path']
ignore_errors = context.CLIARGS['ignore_errors']
local_verify_only = context.CLIARGS['offline']
requirements_file = context.CLIARGS['requirements']
@@ -1433,19 +1394,7 @@ class GalaxyCLI(CLI):
upgrade = context.CLIARGS.get('upgrade', False)
collections_path = C.COLLECTIONS_PATHS
-
- managed_paths = set(validate_collection_path(p) for p in C.COLLECTIONS_PATHS)
- read_req_paths = set(validate_collection_path(p) for p in AnsibleCollectionConfig.collection_paths)
-
- unexpected_path = C.GALAXY_COLLECTIONS_PATH_WARNING and not any(p.startswith(path) for p in managed_paths)
- if unexpected_path and any(p.startswith(path) for p in read_req_paths):
- display.warning(
- f"The specified collections path '{path}' appears to be part of the pip Ansible package. "
- "Managing these directly with ansible-galaxy could break the Ansible package. "
- "Install collections to a configured collections path, which will take precedence over "
- "collections found in the PYTHONPATH."
- )
- elif unexpected_path:
+ if len([p for p in collections_path if p.startswith(path)]) == 0:
display.warning("The specified collections path '%s' is not part of the configured Ansible "
"collections paths '%s'. The installed collection will not be picked up in an Ansible "
"run, unless within a playbook-adjacent collections directory." % (to_text(path), to_text(":".join(collections_path))))
@@ -1462,7 +1411,6 @@ class GalaxyCLI(CLI):
artifacts_manager=artifacts_manager,
disable_gpg_verify=disable_gpg_verify,
offline=context.CLIARGS.get('offline', False),
- read_requirement_paths=read_req_paths,
)
return 0
@@ -1631,9 +1579,7 @@ class GalaxyCLI(CLI):
display.warning(w)
if not path_found:
- raise AnsibleOptionsError(
- "- None of the provided paths were usable. Please specify a valid path with --{0}s-path".format(context.CLIARGS['type'])
- )
+ raise AnsibleOptionsError("- None of the provided paths were usable. Please specify a valid path with --{0}s-path".format(context.CLIARGS['type']))
return 0
@@ -1648,65 +1594,100 @@ class GalaxyCLI(CLI):
artifacts_manager.require_build_metadata = False
output_format = context.CLIARGS['output_format']
+ collections_search_paths = set(context.CLIARGS['collections_path'])
collection_name = context.CLIARGS['collection']
- default_collections_path = set(C.COLLECTIONS_PATHS)
- collections_search_paths = (
- set(context.CLIARGS['collections_path'] or []) | default_collections_path | set(AnsibleCollectionConfig.collection_paths)
- )
+ default_collections_path = AnsibleCollectionConfig.collection_paths
collections_in_paths = {}
warnings = []
path_found = False
collection_found = False
-
- namespace_filter = None
- collection_filter = None
- if collection_name:
- # list a specific collection
-
- validate_collection_name(collection_name)
- namespace_filter, collection_filter = collection_name.split('.')
-
- collections = list(find_existing_collections(
- list(collections_search_paths),
- artifacts_manager,
- namespace_filter=namespace_filter,
- collection_filter=collection_filter,
- dedupe=False
- ))
-
- seen = set()
- fqcn_width, version_width = _get_collection_widths(collections)
- for collection in sorted(collections, key=lambda c: c.src):
- collection_found = True
- collection_path = pathlib.Path(to_text(collection.src)).parent.parent.as_posix()
-
- if output_format in {'yaml', 'json'}:
- collections_in_paths.setdefault(collection_path, {})
- collections_in_paths[collection_path][collection.fqcn] = {'version': collection.ver}
- else:
- if collection_path not in seen:
- _display_header(
- collection_path,
- 'Collection',
- 'Version',
- fqcn_width,
- version_width
- )
- seen.add(collection_path)
- _display_collection(collection, fqcn_width, version_width)
-
- path_found = False
for path in collections_search_paths:
+ collection_path = GalaxyCLI._resolve_path(path)
if not os.path.exists(path):
if path in default_collections_path:
# don't warn for missing default paths
continue
- warnings.append("- the configured path {0} does not exist.".format(path))
- elif os.path.exists(path) and not os.path.isdir(path):
- warnings.append("- the configured path {0}, exists, but it is not a directory.".format(path))
+ warnings.append("- the configured path {0} does not exist.".format(collection_path))
+ continue
+
+ if not os.path.isdir(collection_path):
+ warnings.append("- the configured path {0}, exists, but it is not a directory.".format(collection_path))
+ continue
+
+ path_found = True
+
+ if collection_name:
+ # list a specific collection
+
+ validate_collection_name(collection_name)
+ namespace, collection = collection_name.split('.')
+
+ collection_path = validate_collection_path(collection_path)
+ b_collection_path = to_bytes(os.path.join(collection_path, namespace, collection), errors='surrogate_or_strict')
+
+ if not os.path.exists(b_collection_path):
+ warnings.append("- unable to find {0} in collection paths".format(collection_name))
+ continue
+
+ if not os.path.isdir(collection_path):
+ warnings.append("- the configured path {0}, exists, but it is not a directory.".format(collection_path))
+ continue
+
+ collection_found = True
+
+ try:
+ collection = Requirement.from_dir_path_as_unknown(
+ b_collection_path,
+ artifacts_manager,
+ )
+ except ValueError as val_err:
+ six.raise_from(AnsibleError(val_err), val_err)
+
+ if output_format in {'yaml', 'json'}:
+ collections_in_paths[collection_path] = {
+ collection.fqcn: {'version': collection.ver}
+ }
+
+ continue
+
+ fqcn_width, version_width = _get_collection_widths([collection])
+
+ _display_header(collection_path, 'Collection', 'Version', fqcn_width, version_width)
+ _display_collection(collection, fqcn_width, version_width)
+
else:
- path_found = True
+ # list all collections
+ collection_path = validate_collection_path(path)
+ if os.path.isdir(collection_path):
+ display.vvv("Searching {0} for collections".format(collection_path))
+ collections = list(find_existing_collections(
+ collection_path, artifacts_manager,
+ ))
+ else:
+ # There was no 'ansible_collections/' directory in the path, so there
+ # or no collections here.
+ display.vvv("No 'ansible_collections' directory found at {0}".format(collection_path))
+ continue
+
+ if not collections:
+ display.vvv("No collections found at {0}".format(collection_path))
+ continue
+
+ if output_format in {'yaml', 'json'}:
+ collections_in_paths[collection_path] = {
+ collection.fqcn: {'version': collection.ver} for collection in collections
+ }
+
+ continue
+
+ # Display header
+ fqcn_width, version_width = _get_collection_widths(collections)
+ _display_header(collection_path, 'Collection', 'Version', fqcn_width, version_width)
+
+ # Sort collections by the namespace and name
+ for collection in sorted(collections, key=to_text):
+ _display_collection(collection, fqcn_width, version_width)
# Do not warn if the specific collection was found in any of the search paths
if collection_found and collection_name:
@@ -1715,10 +1696,8 @@ class GalaxyCLI(CLI):
for w in warnings:
display.warning(w)
- if not collections and not path_found:
- raise AnsibleOptionsError(
- "- None of the provided paths were usable. Please specify a valid path with --{0}s-path".format(context.CLIARGS['type'])
- )
+ if not path_found:
+ raise AnsibleOptionsError("- None of the provided paths were usable. Please specify a valid path with --{0}s-path".format(context.CLIARGS['type']))
if output_format == 'json':
display.display(json.dumps(collections_in_paths))
@@ -1752,8 +1731,8 @@ class GalaxyCLI(CLI):
tags=context.CLIARGS['galaxy_tags'], author=context.CLIARGS['author'], page_size=page_size)
if response['count'] == 0:
- display.warning("No roles match your search.")
- return 0
+ display.display("No roles match your search.", color=C.COLOR_ERROR)
+ return 1
data = [u'']
@@ -1792,7 +1771,6 @@ class GalaxyCLI(CLI):
github_user = to_text(context.CLIARGS['github_user'], errors='surrogate_or_strict')
github_repo = to_text(context.CLIARGS['github_repo'], errors='surrogate_or_strict')
- rc = 0
if context.CLIARGS['check_status']:
task = self.api.get_import_task(github_user=github_user, github_repo=github_repo)
else:
@@ -1810,7 +1788,7 @@ class GalaxyCLI(CLI):
display.display('%s.%s' % (t['summary_fields']['role']['namespace'], t['summary_fields']['role']['name']), color=C.COLOR_CHANGED)
display.display(u'\nTo properly namespace this role, remove each of the above and re-import %s/%s from scratch' % (github_user, github_repo),
color=C.COLOR_CHANGED)
- return rc
+ return 0
# found a single role as expected
display.display("Successfully submitted import request %d" % task[0]['id'])
if not context.CLIARGS['wait']:
@@ -1827,13 +1805,12 @@ class GalaxyCLI(CLI):
if msg['id'] not in msg_list:
display.display(msg['message_text'], color=colors[msg['message_type']])
msg_list.append(msg['id'])
- if (state := task[0]['state']) in ['SUCCESS', 'FAILED']:
- rc = ['SUCCESS', 'FAILED'].index(state)
+ if task[0]['state'] in ['SUCCESS', 'FAILED']:
finished = True
else:
time.sleep(10)
- return rc
+ return 0
def execute_setup(self):
""" Setup an integration from Github or Travis for Ansible Galaxy roles"""
diff --git a/lib/ansible/cli/inventory.py b/lib/ansible/cli/inventory.py
index 3550079b..56c370cc 100755
--- a/lib/ansible/cli/inventory.py
+++ b/lib/ansible/cli/inventory.py
@@ -18,7 +18,7 @@ from ansible import constants as C
from ansible import context
from ansible.cli.arguments import option_helpers as opt_help
from ansible.errors import AnsibleError, AnsibleOptionsError
-from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
+from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.utils.vars import combine_vars
from ansible.utils.display import Display
from ansible.vars.plugins import get_vars_from_inventory_sources, get_vars_from_path
@@ -72,6 +72,7 @@ class InventoryCLI(CLI):
opt_help.add_runtask_options(self.parser)
# remove unused default options
+ self.parser.add_argument('-l', '--limit', help=argparse.SUPPRESS, action=opt_help.UnrecognizedArgument, nargs='?')
self.parser.add_argument('--list-hosts', help=argparse.SUPPRESS, action=opt_help.UnrecognizedArgument)
self.parser.add_argument('args', metavar='host|group', nargs='?')
@@ -79,10 +80,9 @@ class InventoryCLI(CLI):
# Actions
action_group = self.parser.add_argument_group("Actions", "One of following must be used on invocation, ONLY ONE!")
action_group.add_argument("--list", action="store_true", default=False, dest='list', help='Output all hosts info, works as inventory script')
- action_group.add_argument("--host", action="store", default=None, dest='host',
- help='Output specific host info, works as inventory script. It will ignore limit')
+ action_group.add_argument("--host", action="store", default=None, dest='host', help='Output specific host info, works as inventory script')
action_group.add_argument("--graph", action="store_true", default=False, dest='graph',
- help='create inventory graph, if supplying pattern it must be a valid group name. It will ignore limit')
+ help='create inventory graph, if supplying pattern it must be a valid group name')
self.parser.add_argument_group(action_group)
# graph
@@ -144,22 +144,17 @@ class InventoryCLI(CLI):
# FIXME: should we template first?
results = self.dump(myvars)
- else:
- if context.CLIARGS['subset']:
- # not doing single host, set limit in general if given
- self.inventory.subset(context.CLIARGS['subset'])
-
- if context.CLIARGS['graph']:
- results = self.inventory_graph()
- elif context.CLIARGS['list']:
- top = self._get_group('all')
- if context.CLIARGS['yaml']:
- results = self.yaml_inventory(top)
- elif context.CLIARGS['toml']:
- results = self.toml_inventory(top)
- else:
- results = self.json_inventory(top)
- results = self.dump(results)
+ elif context.CLIARGS['graph']:
+ results = self.inventory_graph()
+ elif context.CLIARGS['list']:
+ top = self._get_group('all')
+ if context.CLIARGS['yaml']:
+ results = self.yaml_inventory(top)
+ elif context.CLIARGS['toml']:
+ results = self.toml_inventory(top)
+ else:
+ results = self.json_inventory(top)
+ results = self.dump(results)
if results:
outfile = context.CLIARGS['output_file']
@@ -254,7 +249,7 @@ class InventoryCLI(CLI):
return dump
@staticmethod
- def _remove_empty_keys(dump):
+ def _remove_empty(dump):
# remove empty keys
for x in ('hosts', 'vars', 'children'):
if x in dump and not dump[x]:
@@ -301,34 +296,33 @@ class InventoryCLI(CLI):
def json_inventory(self, top):
- seen_groups = set()
+ seen = set()
- def format_group(group, available_hosts):
+ def format_group(group):
results = {}
results[group.name] = {}
if group.name != 'all':
- results[group.name]['hosts'] = [h.name for h in group.hosts if h.name in available_hosts]
+ results[group.name]['hosts'] = [h.name for h in group.hosts]
results[group.name]['children'] = []
for subgroup in group.child_groups:
results[group.name]['children'].append(subgroup.name)
- if subgroup.name not in seen_groups:
- results.update(format_group(subgroup, available_hosts))
- seen_groups.add(subgroup.name)
+ if subgroup.name not in seen:
+ results.update(format_group(subgroup))
+ seen.add(subgroup.name)
if context.CLIARGS['export']:
results[group.name]['vars'] = self._get_group_variables(group)
- self._remove_empty_keys(results[group.name])
- # remove empty groups
+ self._remove_empty(results[group.name])
if not results[group.name]:
del results[group.name]
return results
- hosts = self.inventory.get_hosts(top.name)
- results = format_group(top, frozenset(h.name for h in hosts))
+ results = format_group(top)
# populate meta
results['_meta'] = {'hostvars': {}}
+ hosts = self.inventory.get_hosts()
for host in hosts:
hvars = self._get_host_variables(host)
if hvars:
@@ -338,10 +332,9 @@ class InventoryCLI(CLI):
def yaml_inventory(self, top):
- seen_hosts = set()
- seen_groups = set()
+ seen = []
- def format_group(group, available_hosts):
+ def format_group(group):
results = {}
# initialize group + vars
@@ -351,21 +344,15 @@ class InventoryCLI(CLI):
results[group.name]['children'] = {}
for subgroup in group.child_groups:
if subgroup.name != 'all':
- if subgroup.name in seen_groups:
- results[group.name]['children'].update({subgroup.name: {}})
- else:
- results[group.name]['children'].update(format_group(subgroup, available_hosts))
- seen_groups.add(subgroup.name)
+ results[group.name]['children'].update(format_group(subgroup))
# hosts for group
results[group.name]['hosts'] = {}
if group.name != 'all':
for h in group.hosts:
- if h.name not in available_hosts:
- continue # observe limit
myvars = {}
- if h.name not in seen_hosts: # avoid defining host vars more than once
- seen_hosts.add(h.name)
+ if h.name not in seen: # avoid defining host vars more than once
+ seen.append(h.name)
myvars = self._get_host_variables(host=h)
results[group.name]['hosts'][h.name] = myvars
@@ -374,22 +361,17 @@ class InventoryCLI(CLI):
if gvars:
results[group.name]['vars'] = gvars
- self._remove_empty_keys(results[group.name])
- # remove empty groups
- if not results[group.name]:
- del results[group.name]
+ self._remove_empty(results[group.name])
return results
- available_hosts = frozenset(h.name for h in self.inventory.get_hosts(top.name))
- return format_group(top, available_hosts)
+ return format_group(top)
def toml_inventory(self, top):
- seen_hosts = set()
- seen_hosts = set()
+ seen = set()
has_ungrouped = bool(next(g.hosts for g in top.child_groups if g.name == 'ungrouped'))
- def format_group(group, available_hosts):
+ def format_group(group):
results = {}
results[group.name] = {}
@@ -399,14 +381,12 @@ class InventoryCLI(CLI):
continue
if group.name != 'all':
results[group.name]['children'].append(subgroup.name)
- results.update(format_group(subgroup, available_hosts))
+ results.update(format_group(subgroup))
if group.name != 'all':
for host in group.hosts:
- if host.name not in available_hosts:
- continue
- if host.name not in seen_hosts:
- seen_hosts.add(host.name)
+ if host.name not in seen:
+ seen.add(host.name)
host_vars = self._get_host_variables(host=host)
else:
host_vars = {}
@@ -418,15 +398,13 @@ class InventoryCLI(CLI):
if context.CLIARGS['export']:
results[group.name]['vars'] = self._get_group_variables(group)
- self._remove_empty_keys(results[group.name])
- # remove empty groups
+ self._remove_empty(results[group.name])
if not results[group.name]:
del results[group.name]
return results
- available_hosts = frozenset(h.name for h in self.inventory.get_hosts(top.name))
- results = format_group(top, available_hosts)
+ results = format_group(top)
return results
diff --git a/lib/ansible/cli/playbook.py b/lib/ansible/cli/playbook.py
index e63785b0..9c091a67 100755
--- a/lib/ansible/cli/playbook.py
+++ b/lib/ansible/cli/playbook.py
@@ -18,7 +18,7 @@ from ansible import context
from ansible.cli.arguments import option_helpers as opt_help
from ansible.errors import AnsibleError
from ansible.executor.playbook_executor import PlaybookExecutor
-from ansible.module_utils.common.text.converters import to_bytes
+from ansible.module_utils._text import to_bytes
from ansible.playbook.block import Block
from ansible.plugins.loader import add_all_plugin_dirs
from ansible.utils.collection_loader import AnsibleCollectionConfig
@@ -67,19 +67,8 @@ class PlaybookCLI(CLI):
self.parser.add_argument('args', help='Playbook(s)', metavar='playbook', nargs='+')
def post_process_args(self, options):
-
- # for listing, we need to know if user had tag input
- # capture here as parent function sets defaults for tags
- havetags = bool(options.tags or options.skip_tags)
-
options = super(PlaybookCLI, self).post_process_args(options)
- if options.listtags:
- # default to all tags (including never), when listing tags
- # unless user specified tags
- if not havetags:
- options.tags = ['never', 'all']
-
display.verbosity = options.verbosity
self.validate_conflicts(options, runas_opts=True, fork_opts=True)
diff --git a/lib/ansible/cli/pull.py b/lib/ansible/cli/pull.py
index f369c390..47084989 100755
--- a/lib/ansible/cli/pull.py
+++ b/lib/ansible/cli/pull.py
@@ -24,7 +24,7 @@ from ansible import constants as C
from ansible import context
from ansible.cli.arguments import option_helpers as opt_help
from ansible.errors import AnsibleOptionsError
-from ansible.module_utils.common.text.converters import to_native, to_text
+from ansible.module_utils._text import to_native, to_text
from ansible.plugins.loader import module_loader
from ansible.utils.cmd_functions import run_cmd
from ansible.utils.display import Display
@@ -81,7 +81,7 @@ class PullCLI(CLI):
super(PullCLI, self).init_parser(
usage='%prog -U <repository> [options] [<playbook.yml>]',
- desc="pulls playbooks from a VCS repo and executes them on target host")
+ desc="pulls playbooks from a VCS repo and executes them for the local host")
# Do not add check_options as there's a conflict with --checkout/-C
opt_help.add_connect_options(self.parser)
@@ -275,15 +275,8 @@ class PullCLI(CLI):
for vault_id in context.CLIARGS['vault_ids']:
cmd += " --vault-id=%s" % vault_id
- if context.CLIARGS['become_password_file']:
- cmd += " --become-password-file=%s" % context.CLIARGS['become_password_file']
-
- if context.CLIARGS['connection_password_file']:
- cmd += " --connection-password-file=%s" % context.CLIARGS['connection_password_file']
-
for ev in context.CLIARGS['extra_vars']:
cmd += ' -e %s' % shlex.quote(ev)
-
if context.CLIARGS['become_ask_pass']:
cmd += ' --ask-become-pass'
if context.CLIARGS['skip_tags']:
diff --git a/lib/ansible/cli/scripts/ansible_connection_cli_stub.py b/lib/ansible/cli/scripts/ansible_connection_cli_stub.py
index b1ed18c9..9109137e 100755
--- a/lib/ansible/cli/scripts/ansible_connection_cli_stub.py
+++ b/lib/ansible/cli/scripts/ansible_connection_cli_stub.py
@@ -6,6 +6,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import argparse
import fcntl
import hashlib
import io
@@ -23,12 +24,12 @@ from contextlib import contextmanager
from ansible import constants as C
from ansible.cli.arguments import option_helpers as opt_help
-from ansible.module_utils.common.text.converters import to_bytes, to_text
+from ansible.module_utils._text import to_bytes, to_text
from ansible.module_utils.connection import Connection, ConnectionError, send_data, recv_data
from ansible.module_utils.service import fork_process
from ansible.parsing.ajson import AnsibleJSONEncoder, AnsibleJSONDecoder
from ansible.playbook.play_context import PlayContext
-from ansible.plugins.loader import connection_loader, init_plugin_loader
+from ansible.plugins.loader import connection_loader
from ansible.utils.path import unfrackpath, makedirs_safe
from ansible.utils.display import Display
from ansible.utils.jsonrpc import JsonRpcServer
@@ -229,7 +230,6 @@ def main(args=None):
parser.add_argument('playbook_pid')
parser.add_argument('task_uuid')
args = parser.parse_args(args[1:] if args is not None else args)
- init_plugin_loader()
# initialize verbosity
display.verbosity = args.verbosity
diff --git a/lib/ansible/cli/vault.py b/lib/ansible/cli/vault.py
index cf2c9dd9..3e60329d 100755
--- a/lib/ansible/cli/vault.py
+++ b/lib/ansible/cli/vault.py
@@ -17,7 +17,7 @@ from ansible import constants as C
from ansible import context
from ansible.cli.arguments import option_helpers as opt_help
from ansible.errors import AnsibleOptionsError
-from ansible.module_utils.common.text.converters import to_text, to_bytes
+from ansible.module_utils._text import to_text, to_bytes
from ansible.parsing.dataloader import DataLoader
from ansible.parsing.vault import VaultEditor, VaultLib, match_encrypt_secret
from ansible.utils.display import Display
@@ -61,20 +61,20 @@ class VaultCLI(CLI):
epilog="\nSee '%s <command> --help' for more information on a specific command.\n\n" % os.path.basename(sys.argv[0])
)
- common = opt_help.ArgumentParser(add_help=False)
+ common = opt_help.argparse.ArgumentParser(add_help=False)
opt_help.add_vault_options(common)
opt_help.add_verbosity_options(common)
subparsers = self.parser.add_subparsers(dest='action')
subparsers.required = True
- output = opt_help.ArgumentParser(add_help=False)
+ output = opt_help.argparse.ArgumentParser(add_help=False)
output.add_argument('--output', default=None, dest='output_file',
help='output file name for encrypt or decrypt; use - for stdout',
type=opt_help.unfrack_path())
# For encrypting actions, we can also specify which of multiple vault ids should be used for encrypting
- vault_id = opt_help.ArgumentParser(add_help=False)
+ vault_id = opt_help.argparse.ArgumentParser(add_help=False)
vault_id.add_argument('--encrypt-vault-id', default=[], dest='encrypt_vault_id',
action='store', type=str,
help='the vault id used to encrypt (required if more than one vault-id is provided)')
@@ -82,8 +82,6 @@ class VaultCLI(CLI):
create_parser = subparsers.add_parser('create', help='Create new vault encrypted file', parents=[vault_id, common])
create_parser.set_defaults(func=self.execute_create)
create_parser.add_argument('args', help='Filename', metavar='file_name', nargs='*')
- create_parser.add_argument('--skip-tty-check', default=False, help='allows editor to be opened when no tty attached',
- dest='skip_tty_check', action='store_true')
decrypt_parser = subparsers.add_parser('decrypt', help='Decrypt vault encrypted file', parents=[output, common])
decrypt_parser.set_defaults(func=self.execute_decrypt)
@@ -386,11 +384,6 @@ class VaultCLI(CLI):
sys.stderr.write(err)
b_outs.append(to_bytes(out))
- # The output must end with a newline to play nice with terminal representation.
- # Refs:
- # * https://stackoverflow.com/a/729795/595220
- # * https://github.com/ansible/ansible/issues/78932
- b_outs.append(b'')
self.editor.write_data(b'\n'.join(b_outs), context.CLIARGS['output_file'] or '-')
if sys.stdout.isatty():
@@ -449,11 +442,8 @@ class VaultCLI(CLI):
if len(context.CLIARGS['args']) != 1:
raise AnsibleOptionsError("ansible-vault create can take only one filename argument")
- if sys.stdout.isatty() or context.CLIARGS['skip_tty_check']:
- self.editor.create_file(context.CLIARGS['args'][0], self.encrypt_secret,
- vault_id=self.encrypt_vault_id)
- else:
- raise AnsibleOptionsError("not a tty, editor cannot be opened")
+ self.editor.create_file(context.CLIARGS['args'][0], self.encrypt_secret,
+ vault_id=self.encrypt_vault_id)
def execute_edit(self):
''' open and decrypt an existing vaulted file in an editor, that will be encrypted again when closed'''