summaryrefslogtreecommitdiff
path: root/lib/ansible/cli
diff options
context:
space:
mode:
authorLee Garrett <lgarrett@rocketjump.eu>2021-11-17 20:15:37 +0100
committerLee Garrett <lgarrett@rocketjump.eu>2021-11-17 20:15:37 +0100
commitb1739f3e93dadd7d8fa794644ceedc24bddc8388 (patch)
tree193d287510fd44d67857e2d6b6bfcbb2b495c60a /lib/ansible/cli
parent13e2c2e94d3559b85a7d813d98e9835b891b0a9f (diff)
downloaddebian-ansible-core-b1739f3e93dadd7d8fa794644ceedc24bddc8388.zip
New upstream version 2.12.0
Diffstat (limited to 'lib/ansible/cli')
-rw-r--r--lib/ansible/cli/__init__.py76
-rw-r--r--lib/ansible/cli/arguments/option_helpers.py44
-rw-r--r--lib/ansible/cli/config.py335
-rw-r--r--lib/ansible/cli/doc.py288
-rw-r--r--lib/ansible/cli/galaxy.py37
-rw-r--r--lib/ansible/cli/inventory.py10
-rw-r--r--lib/ansible/cli/pull.py5
-rwxr-xr-xlib/ansible/cli/scripts/ansible_cli_stub.py47
-rwxr-xr-xlib/ansible/cli/scripts/ansible_connection_cli_stub.py3
9 files changed, 671 insertions, 174 deletions
diff --git a/lib/ansible/cli/__init__.py b/lib/ansible/cli/__init__.py
index 112d892d..0ffafc5a 100644
--- a/lib/ansible/cli/__init__.py
+++ b/lib/ansible/cli/__init__.py
@@ -19,7 +19,7 @@ from ansible import constants as C
from ansible import context
from ansible.errors import AnsibleError
from ansible.inventory.manager import InventoryManager
-from ansible.module_utils.six import with_metaclass, string_types
+from ansible.module_utils.six import with_metaclass, string_types, PY3
from ansible.module_utils._text import to_bytes, to_text
from ansible.parsing.dataloader import DataLoader
from ansible.parsing.vault import PromptVaultSecret, get_file_vault_secret
@@ -230,6 +230,14 @@ class CLI(with_metaclass(ABCMeta, object)):
return vault_secrets
@staticmethod
+ def _get_secret(prompt):
+
+ secret = getpass.getpass(prompt=prompt)
+ if secret:
+ secret = to_unsafe_text(secret)
+ return secret
+
+ @staticmethod
def ask_passwords():
''' prompt for connection and become passwords if needed '''
@@ -241,26 +249,23 @@ class CLI(with_metaclass(ABCMeta, object)):
become_prompt_method = "BECOME" if C.AGNOSTIC_BECOME_PROMPT else op['become_method'].upper()
try:
+ become_prompt = "%s password: " % become_prompt_method
if op['ask_pass']:
- sshpass = getpass.getpass(prompt="SSH password: ")
+ sshpass = CLI._get_secret("SSH password: ")
become_prompt = "%s password[defaults to SSH password]: " % become_prompt_method
- else:
- become_prompt = "%s password: " % become_prompt_method
+ elif op['connection_password_file']:
+ sshpass = CLI.get_password_from_file(op['connection_password_file'])
if op['become_ask_pass']:
- becomepass = getpass.getpass(prompt=become_prompt)
+ becomepass = CLI._get_secret(become_prompt)
if op['ask_pass'] and becomepass == '':
becomepass = sshpass
+ elif op['become_password_file']:
+ becomepass = CLI.get_password_from_file(op['become_password_file'])
+
except EOFError:
pass
- # we 'wrap' the passwords to prevent templating as
- # they can contain special chars and trigger it incorrectly
- if sshpass:
- sshpass = to_unsafe_text(sshpass)
- if becomepass:
- becomepass = to_unsafe_text(becomepass)
-
return (sshpass, becomepass)
def validate_conflicts(self, op, runas_opts=False, fork_opts=False):
@@ -375,7 +380,7 @@ class CLI(with_metaclass(ABCMeta, object)):
options = self.parser.parse_args(self.args[1:])
except SystemExit as e:
if(e.code != 0):
- self.parser.exit(status=2, message=" \n%s " % self.parser.format_help())
+ self.parser.exit(status=2, message=" \n%s" % self.parser.format_help())
raise
options = self.post_process_args(options)
context._init_global_context(options)
@@ -492,3 +497,48 @@ class CLI(with_metaclass(ABCMeta, object)):
raise AnsibleError("Specified hosts and/or --limit does not match any hosts")
return hosts
+
+ @staticmethod
+ def get_password_from_file(pwd_file):
+
+ b_pwd_file = to_bytes(pwd_file)
+ secret = None
+ if b_pwd_file == b'-':
+ if PY3:
+ # ensure its read as bytes
+ secret = sys.stdin.buffer.read()
+ else:
+ secret = sys.stdin.read()
+
+ elif not os.path.exists(b_pwd_file):
+ raise AnsibleError("The password file %s was not found" % pwd_file)
+
+ elif os.path.is_executable(b_pwd_file):
+ display.vvvv(u'The password file %s is a script.' % to_text(pwd_file))
+ cmd = [b_pwd_file]
+
+ try:
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ except OSError as e:
+ raise AnsibleError("Problem occured when trying to run the password script %s (%s)."
+ " If this is not a script, remove the executable bit from the file." % (pwd_file, e))
+
+ stdout, stderr = p.communicate()
+ if p.returncode != 0:
+ raise AnsibleError("The password script %s returned an error (rc=%s): %s" % (pwd_file, p.returncode, stderr))
+ secret = stdout
+
+ else:
+ try:
+ f = open(b_pwd_file, "rb")
+ secret = f.read().strip()
+ f.close()
+ except (OSError, IOError) as e:
+ raise AnsibleError("Could not read password file %s: %s" % (pwd_file, e))
+
+ secret = secret.strip(b'\r\n')
+
+ if not secret:
+ raise AnsibleError('Empty password was provided from file (%s)' % pwd_file)
+
+ return to_unsafe_text(secret)
diff --git a/lib/ansible/cli/arguments/option_helpers.py b/lib/ansible/cli/arguments/option_helpers.py
index 7492e781..9704b381 100644
--- a/lib/ansible/cli/arguments/option_helpers.py
+++ b/lib/ansible/cli/arguments/option_helpers.py
@@ -11,19 +11,13 @@ import os
import os.path
import sys
import time
-import yaml
-
-try:
- import _yaml
- HAS_LIBYAML = True
-except ImportError:
- HAS_LIBYAML = False
from jinja2 import __version__ as j2_version
import ansible
from ansible import constants as C
from ansible.module_utils._text import to_native
+from ansible.module_utils.common.yaml import HAS_LIBYAML, yaml_load
from ansible.release import __version__
from ansible.utils.path import unfrackpath
@@ -122,7 +116,8 @@ def _git_repo_info(repo_path):
# Check if the .git is a file. If it is a file, it means that we are in a submodule structure.
if os.path.isfile(repo_path):
try:
- gitdir = yaml.safe_load(open(repo_path)).get('gitdir')
+ with open(repo_path) as f:
+ gitdir = yaml_load(f).get('gitdir')
# There is a possibility the .git file to have an absolute path.
if os.path.isabs(gitdir):
repo_path = gitdir
@@ -166,7 +161,7 @@ def _gitinfo():
def version(prog=None):
""" return ansible version """
if prog:
- result = ["{0} [core {1}] ".format(prog, __version__)]
+ result = ["{0} [core {1}]".format(prog, __version__)]
else:
result = [__version__]
@@ -249,8 +244,6 @@ def add_connect_options(parser):
"""Add options for commands which need to connection to other hosts"""
connect_group = parser.add_argument_group("Connection Options", "control as whom and how to connect to hosts")
- connect_group.add_argument('-k', '--ask-pass', default=C.DEFAULT_ASK_PASS, dest='ask_pass', action='store_true',
- help='ask for connection password')
connect_group.add_argument('--private-key', '--key-file', default=C.DEFAULT_PRIVATE_KEY_FILE, dest='private_key_file',
help='use this file to authenticate the connection', type=unfrack_path())
connect_group.add_argument('-u', '--user', default=C.DEFAULT_REMOTE_USER, dest='remote_user',
@@ -272,6 +265,14 @@ def add_connect_options(parser):
parser.add_argument_group(connect_group)
+ connect_password_group = parser.add_mutually_exclusive_group()
+ connect_password_group.add_argument('-k', '--ask-pass', default=C.DEFAULT_ASK_PASS, dest='ask_pass', action='store_true',
+ help='ask for connection password')
+ connect_password_group.add_argument('--connection-password-file', '--conn-pass-file', default=C.CONNECTION_PASSWORD_FILE, dest='connection_password_file',
+ help="Connection password file", type=unfrack_path(), action='store')
+
+ parser.add_argument_group(connect_password_group)
+
def add_fork_options(parser):
"""Add options for commands that can fork worker processes"""
@@ -331,7 +332,9 @@ def add_runas_options(parser):
runas_group.add_argument('--become-user', default=None, dest='become_user', type=str,
help='run operations as this user (default=%s)' % C.DEFAULT_BECOME_USER)
- add_runas_prompt_options(parser, runas_group=runas_group)
+ parser.add_argument_group(runas_group)
+
+ add_runas_prompt_options(parser)
def add_runas_prompt_options(parser, runas_group=None):
@@ -341,15 +344,18 @@ def add_runas_prompt_options(parser, runas_group=None):
Note that add_runas_options() includes these options already. Only one of the two functions
should be used.
"""
- if runas_group is None:
- runas_group = parser.add_argument_group("Privilege Escalation Options",
- "control how and which user you become as on target hosts")
+ if runas_group is not None:
+ parser.add_argument_group(runas_group)
- runas_group.add_argument('-K', '--ask-become-pass', dest='become_ask_pass', action='store_true',
- default=C.DEFAULT_BECOME_ASK_PASS,
- help='ask for privilege escalation password')
+ runas_pass_group = parser.add_mutually_exclusive_group()
- parser.add_argument_group(runas_group)
+ runas_pass_group.add_argument('-K', '--ask-become-pass', dest='become_ask_pass', action='store_true',
+ default=C.DEFAULT_BECOME_ASK_PASS,
+ help='ask for privilege escalation password')
+ runas_pass_group.add_argument('--become-password-file', '--become-pass-file', default=C.BECOME_PASSWORD_FILE, dest='become_password_file',
+ help="Become password file", type=unfrack_path(), action='store')
+
+ parser.add_argument_group(runas_pass_group)
def add_runtask_options(parser):
diff --git a/lib/ansible/cli/config.py b/lib/ansible/cli/config.py
index 286aad7e..328b3c11 100644
--- a/lib/ansible/cli/config.py
+++ b/lib/ansible/cli/config.py
@@ -10,11 +10,18 @@ import subprocess
import yaml
from ansible import context
+import ansible.plugins.loader as plugin_loader
+
+from ansible import constants as C
from ansible.cli import CLI
from ansible.cli.arguments import option_helpers as opt_help
-from ansible.config.manager import ConfigManager, Setting, find_ini_config_file
+from ansible.config.manager import ConfigManager, Setting
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.module_utils._text import to_native, to_text, to_bytes
+from ansible.module_utils.common._collections_compat import Mapping
+from ansible.module_utils.six import string_types
+from ansible.module_utils.six.moves import shlex_quote
+from ansible.parsing.quoting import is_quoted
from ansible.parsing.yaml.dumper import AnsibleDumper
from ansible.utils.color import stringc
from ansible.utils.display import Display
@@ -42,6 +49,9 @@ class ConfigCLI(CLI):
opt_help.add_verbosity_options(common)
common.add_argument('-c', '--config', dest='config_file',
help="path to configuration file, defaults to first file found in precedence.")
+ common.add_argument("-t", "--type", action="store", default='base', dest='type', choices=['all', 'base'] + list(C.CONFIGURABLE_PLUGINS),
+ help="Filter down to a specific plugin type.")
+ common.add_argument('args', help='Specific plugin to target, requires type of plugin to be set', nargs='*')
subparsers = self.parser.add_subparsers(dest='action')
subparsers.required = True
@@ -51,19 +61,20 @@ class ConfigCLI(CLI):
dump_parser = subparsers.add_parser('dump', help='Dump configuration', parents=[common])
dump_parser.set_defaults(func=self.execute_dump)
- dump_parser.add_argument('--only-changed', dest='only_changed', action='store_true',
+ dump_parser.add_argument('--only-changed', '--changed-only', dest='only_changed', action='store_true',
help="Only show configurations that have changed from the default")
view_parser = subparsers.add_parser('view', help='View configuration file', parents=[common])
view_parser.set_defaults(func=self.execute_view)
- # update_parser = subparsers.add_parser('update', help='Update configuration option')
- # update_parser.set_defaults(func=self.execute_update)
- # update_parser.add_argument('-s', '--setting', dest='setting',
- # help="config setting, the section defaults to 'defaults'",
- # metavar='[section.]setting=value')
+ init_parser = subparsers.add_parser('init', help='Create initial configuration', parents=[common])
+ init_parser.set_defaults(func=self.execute_init)
+ init_parser.add_argument('--format', '-f', dest='format', action='store', choices=['ini', 'env', 'vars'], default='ini',
+ help='Output format for init')
+ init_parser.add_argument('--disabled', dest='commented', action='store_true', default=False,
+ help='Prefixes all entries with a comment character to disable them')
- # search_parser = subparsers.add_parser('search', help='Search configuration')
+ # search_parser = subparsers.add_parser('find', help='Search configuration')
# search_parser.set_defaults(func=self.execute_search)
# search_parser.add_argument('args', help='Search term', metavar='<search term>')
@@ -85,8 +96,8 @@ class ConfigCLI(CLI):
else:
raise AnsibleOptionsError('The provided configuration file is missing or not accessible: %s' % to_native(self.config_file))
else:
- self.config = ConfigManager()
- self.config_file = find_ini_config_file()
+ self.config = C.config
+ self.config_file = self.config._config_file
if self.config_file:
try:
@@ -105,6 +116,7 @@ class ConfigCLI(CLI):
elif context.CLIARGS['action'] == 'view':
raise AnsibleError('Invalid or no config file was supplied')
+ # run the requested action
context.CLIARGS['func']()
def execute_update(self):
@@ -155,34 +167,305 @@ class ConfigCLI(CLI):
except Exception as e:
raise AnsibleError("Failed to open editor: %s" % to_native(e))
- def execute_list(self):
+ def _list_plugin_settings(self, ptype, plugins=None):
+ entries = {}
+ loader = getattr(plugin_loader, '%s_loader' % ptype)
+
+ # build list
+ if plugins:
+ plugin_cs = []
+ for plugin in plugins:
+ p = loader.get(plugin, class_only=True)
+ if p is None:
+ display.warning("Skipping %s as we could not find matching plugin" % plugin)
+ else:
+ plugin_cs.append(p)
+ else:
+ plugin_cs = loader.all(class_only=True)
+
+ # iterate over class instances
+ for plugin in plugin_cs:
+ finalname = name = plugin._load_name
+ if name.startswith('_'):
+ # alias or deprecated
+ if os.path.islink(plugin._original_path):
+ continue
+ else:
+ finalname = name.replace('_', '', 1) + ' (DEPRECATED)'
+
+ entries[finalname] = self.config.get_configuration_definitions(ptype, name)
+
+ return entries
+
+ def _list_entries_from_args(self):
'''
- list all current configs reading lib/constants.py and shows env and config file setting names
+ build a dict with the list requested configs
'''
- self.pager(to_text(yaml.dump(self.config.get_configuration_definitions(ignore_private=True), Dumper=AnsibleDumper), errors='surrogate_or_strict'))
+ config_entries = {}
+ if context.CLIARGS['type'] in ('base', 'all'):
+ # this dumps main/common configs
+ config_entries = self.config.get_configuration_definitions(ignore_private=True)
- def execute_dump(self):
+ if context.CLIARGS['type'] != 'base':
+ config_entries['PLUGINS'] = {}
+
+ if context.CLIARGS['type'] == 'all':
+ # now each plugin type
+ for ptype in C.CONFIGURABLE_PLUGINS:
+ config_entries['PLUGINS'][ptype.upper()] = self._list_plugin_settings(ptype)
+ elif context.CLIARGS['type'] != 'base':
+ config_entries['PLUGINS'][context.CLIARGS['type']] = self._list_plugin_settings(context.CLIARGS['type'], context.CLIARGS['args'])
+
+ return config_entries
+
+ def execute_list(self):
'''
- Shows the current settings, merges ansible.cfg if specified
+ list and output available configs
'''
- # FIXME: deal with plugins, not just base config
- text = []
- defaults = self.config.get_configuration_definitions(ignore_private=True).copy()
- for setting in self.config.data.get_settings():
- if setting.name in defaults:
- defaults[setting.name] = setting
- for setting in sorted(defaults):
- if isinstance(defaults[setting], Setting):
- if defaults[setting].origin == 'default':
+ config_entries = self._list_entries_from_args()
+ self.pager(to_text(yaml.dump(config_entries, Dumper=AnsibleDumper), errors='surrogate_or_strict'))
+
+ def _get_settings_vars(self, settings, subkey):
+
+ data = []
+ if context.CLIARGS['commented']:
+ prefix = '#'
+ else:
+ prefix = ''
+
+ for setting in settings:
+
+ if not settings[setting].get('description'):
+ continue
+
+ default = settings[setting].get('default', '')
+ if subkey == 'env':
+ stype = settings[setting].get('type', '')
+ if stype == 'boolean':
+ if default:
+ default = '1'
+ else:
+ default = '0'
+ elif default:
+ if stype == 'list':
+ if not isinstance(default, string_types):
+ # python lists are not valid env ones
+ try:
+ default = ', '.join(default)
+ except Exception as e:
+ # list of other stuff
+ default = '%s' % to_native(default)
+ if isinstance(default, string_types) and not is_quoted(default):
+ default = shlex_quote(default)
+ elif default is None:
+ default = ''
+
+ if subkey in settings[setting] and settings[setting][subkey]:
+ entry = settings[setting][subkey][-1]['name']
+ if isinstance(settings[setting]['description'], string_types):
+ desc = settings[setting]['description']
+ else:
+ desc = '\n#'.join(settings[setting]['description'])
+ name = settings[setting].get('name', setting)
+ data.append('# %s(%s): %s' % (name, settings[setting].get('type', 'string'), desc))
+
+ # TODO: might need quoting and value coercion depending on type
+ if subkey == 'env':
+ data.append('%s%s=%s' % (prefix, entry, default))
+ elif subkey == 'vars':
+ data.append(prefix + to_text(yaml.dump({entry: default}, Dumper=AnsibleDumper, default_flow_style=False), errors='surrogate_or_strict'))
+ data.append('')
+
+ return data
+
+ def _get_settings_ini(self, settings):
+
+ sections = {}
+ for o in sorted(settings.keys()):
+
+ opt = settings[o]
+
+ if not isinstance(opt, Mapping):
+ # recursed into one of the few settings that is a mapping, now hitting it's strings
+ continue
+
+ if not opt.get('description'):
+ # its a plugin
+ new_sections = self._get_settings_ini(opt)
+ for s in new_sections:
+ if s in sections:
+ sections[s].extend(new_sections[s])
+ else:
+ sections[s] = new_sections[s]
+ continue
+
+ if isinstance(opt['description'], string_types):
+ desc = '# (%s) %s' % (opt.get('type', 'string'), opt['description'])
+ else:
+ desc = "# (%s) " % opt.get('type', 'string')
+ desc += "\n# ".join(opt['description'])
+
+ if 'ini' in opt and opt['ini']:
+ entry = opt['ini'][-1]
+ if entry['section'] not in sections:
+ sections[entry['section']] = []
+
+ default = opt.get('default', '')
+ if opt.get('type', '') == 'list' and not isinstance(default, string_types):
+ # python lists are not valid ini ones
+ default = ', '.join(default)
+ elif default is None:
+ default = ''
+
+ if context.CLIARGS['commented']:
+ entry['key'] = ';%s' % entry['key']
+
+ key = desc + '\n%s=%s' % (entry['key'], default)
+ sections[entry['section']].append(key)
+
+ return sections
+
+ def execute_init(self):
+
+ data = []
+ config_entries = self._list_entries_from_args()
+ plugin_types = config_entries.pop('PLUGINS', None)
+
+ if context.CLIARGS['format'] == 'ini':
+ sections = self._get_settings_ini(config_entries)
+
+ if plugin_types:
+ for ptype in plugin_types:
+ plugin_sections = self._get_settings_ini(plugin_types[ptype])
+ for s in plugin_sections:
+ if s in sections:
+ sections[s].extend(plugin_sections[s])
+ else:
+ sections[s] = plugin_sections[s]
+
+ if sections:
+ for section in sections.keys():
+ data.append('[%s]' % section)
+ for key in sections[section]:
+ data.append(key)
+ data.append('')
+ data.append('')
+
+ elif context.CLIARGS['format'] in ('env', 'vars'): # TODO: add yaml once that config option is added
+ data = self._get_settings_vars(config_entries, context.CLIARGS['format'])
+ if plugin_types:
+ for ptype in plugin_types:
+ for plugin in plugin_types[ptype].keys():
+ data.extend(self._get_settings_vars(plugin_types[ptype][plugin], context.CLIARGS['format']))
+
+ self.pager(to_text('\n'.join(data), errors='surrogate_or_strict'))
+
+ def _render_settings(self, config):
+
+ text = []
+ for setting in sorted(config):
+ if isinstance(config[setting], Setting):
+ if config[setting].origin == 'default':
color = 'green'
+ elif config[setting].origin == 'REQUIRED':
+ color = 'red'
else:
color = 'yellow'
- msg = "%s(%s) = %s" % (setting, defaults[setting].origin, defaults[setting].value)
+ msg = "%s(%s) = %s" % (setting, config[setting].origin, config[setting].value)
else:
color = 'green'
- msg = "%s(%s) = %s" % (setting, 'default', defaults[setting].get('default'))
+ msg = "%s(%s) = %s" % (setting, 'default', config[setting].get('default'))
if not context.CLIARGS['only_changed'] or color == 'yellow':
text.append(stringc(msg, color))
+ return text
+
+ def _get_global_configs(self):
+ config = self.config.get_configuration_definitions(ignore_private=True).copy()
+ for setting in self.config.data.get_settings():
+ if setting.name in config:
+ config[setting.name] = setting
+
+ return self._render_settings(config)
+
+ def _get_plugin_configs(self, ptype, plugins):
+
+ # prep loading
+ loader = getattr(plugin_loader, '%s_loader' % ptype)
+
+ # acumulators
+ text = []
+ config_entries = {}
+
+ # build list
+ if plugins:
+ plugin_cs = []
+ for plugin in plugins:
+ p = loader.get(plugin, class_only=True)
+ if p is None:
+ display.warning("Skipping %s as we could not find matching plugin" % plugin)
+ else:
+ plugin_cs.append(loader.get(plugin, class_only=True))
+ else:
+ plugin_cs = loader.all(class_only=True)
+
+ for plugin in plugin_cs:
+ # in case of deprecastion they diverge
+ finalname = name = plugin._load_name
+ if name.startswith('_'):
+ if os.path.islink(plugin._original_path):
+ # skip alias
+ continue
+ # deprecated, but use 'nice name'
+ finalname = name.replace('_', '', 1) + ' (DEPRECATED)'
+
+ # default entries per plugin
+ config_entries[finalname] = self.config.get_configuration_definitions(ptype, name)
+
+ try:
+ # populate config entries by loading plugin
+ dump = loader.get(name, class_only=True)
+ except Exception as e:
+ display.warning('Skipping "%s" %s plugin, as we cannot load plugin to check config due to : %s' % (name, ptype, to_native(e)))
+ continue
+
+ # actually get the values
+ for setting in config_entries[finalname].keys():
+ try:
+ v, o = C.config.get_config_value_and_origin(setting, plugin_type=ptype, plugin_name=name)
+ except AnsibleError as e:
+ if to_text(e).startswith('No setting was provided for required configuration'):
+ v = None
+ o = 'REQUIRED'
+ else:
+ raise e
+ config_entries[finalname][setting] = Setting(setting, v, o, None)
+
+ # pretty please!
+ results = self._render_settings(config_entries[finalname])
+ if results:
+ # avoid header for empty lists (only changed!)
+ text.append('\n%s:\n%s' % (finalname, '_' * len(finalname)))
+ text.extend(results)
+ return text
+
+ def execute_dump(self):
+ '''
+ Shows the current settings, merges ansible.cfg if specified
+ '''
+ if context.CLIARGS['type'] == 'base':
+ # deal with base
+ text = self._get_global_configs()
+ elif context.CLIARGS['type'] == 'all':
+ # deal with base
+ text = self._get_global_configs()
+ # deal with plugins
+ for ptype in C.CONFIGURABLE_PLUGINS:
+ text.append('\n%s:\n%s' % (ptype.upper(), '=' * len(ptype)))
+ text.extend(self._get_plugin_configs(ptype, context.CLIARGS['args']))
+ else:
+ # deal with plugins
+ text = self._get_plugin_configs(context.CLIARGS['type'], context.CLIARGS['args'])
+
self.pager(to_text('\n'.join(text), errors='surrogate_or_strict'))
diff --git a/lib/ansible/cli/doc.py b/lib/ansible/cli/doc.py
index 047f6d65..2ed1cbbf 100644
--- a/lib/ansible/cli/doc.py
+++ b/lib/ansible/cli/doc.py
@@ -26,6 +26,7 @@ from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError
from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.common._collections_compat import Container, Sequence
from ansible.module_utils.common.json import AnsibleJSONEncoder
+from ansible.module_utils.common.yaml import yaml_dump
from ansible.module_utils.compat import importlib
from ansible.module_utils.six import iteritems, string_types
from ansible.parsing.plugin_docs import read_docstub
@@ -48,12 +49,14 @@ display = Display()
TARGET_OPTIONS = C.DOCUMENTABLE_PLUGINS + ('role', 'keyword',)
PB_OBJECTS = ['Play', 'Role', 'Block', 'Task']
PB_LOADED = {}
+SNIPPETS = ['inventory', 'lookup', 'module']
def jdump(text):
try:
display.display(json.dumps(text, cls=AnsibleJSONEncoder, sort_keys=True, indent=4))
except TypeError as e:
+ display.vvv(traceback.format_exc())
raise AnsibleError('We could not convert all the documentation into JSON as there was a conversion issue: %s' % to_native(e))
@@ -334,7 +337,6 @@ class DocCLI(CLI, RoleMixin):
# default ignore list for detailed views
IGNORE = ('module', 'docuri', 'version_added', 'short_description', 'now_date', 'plainexamples', 'returndocs', 'collection')
- JSON_IGNORE = ('attributes',)
# Warning: If you add more elements here, you also need to add it to the docsite build (in the
# ansible-community/antsibull repo)
@@ -348,10 +350,10 @@ class DocCLI(CLI, RoleMixin):
_RULER = re.compile(r"\bHORIZONTALLINE\b")
# rst specific
- _REFTAG = re.compile(r":ref:")
- _TERM = re.compile(r":term:")
- _NOTES = re.compile(r".. note:")
- _SEEALSO = re.compile(r"^\s*.. seealso:.*$", re.MULTILINE)
+ _RST_NOTE = re.compile(r".. note::")
+ _RST_SEEALSO = re.compile(r".. seealso::")
+ _RST_ROLES = re.compile(r":\w+?:`")
+ _RST_DIRECTIVES = re.compile(r".. \w+?::")
def __init__(self, args):
@@ -361,19 +363,21 @@ class DocCLI(CLI, RoleMixin):
@classmethod
def tty_ify(cls, text):
+ # general formatting
t = cls._ITALIC.sub(r"`\1'", text) # I(word) => `word'
t = cls._BOLD.sub(r"*\1*", t) # B(word) => *word*
t = cls._MODULE.sub("[" + r"\1" + "]", t) # M(word) => [word]
t = cls._URL.sub(r"\1", t) # U(word) => word
t = cls._LINK.sub(r"\1 <\2>", t) # L(word, url) => word <url>
- t = cls._REF.sub(r"\1", t) # R(word, sphinx-ref) => word
- t = cls._CONST.sub("`" + r"\1" + "'", t) # C(word) => `word'
+ t = cls._REF.sub(r"\1", t) # R(word, sphinx-ref) => word
+ t = cls._CONST.sub(r"`\1'", t) # C(word) => `word'
t = cls._RULER.sub("\n{0}\n".format("-" * 13), t) # HORIZONTALLINE => -------
- t = cls._REFTAG.sub(r"", t) # remove rst :ref:
- t = cls._TERM.sub(r"", t) # remove rst :term:
- t = cls._NOTES.sub(r" Note:", t) # nicer note
- t = cls._SEEALSO.sub(r"", t) # remove seealso
+ # remove rst
+ t = cls._RST_SEEALSO.sub(r"See website for:", t) # seealso is special and need to break
+ t = cls._RST_NOTE.sub(r"Note:", t) # .. note:: to note:
+ t = cls._RST_ROLES.sub(r"website for `", t) # remove :ref: and other tags
+ t = cls._RST_DIRECTIVES.sub(r"", t) # remove .. stuff:: in general
return t
@@ -388,32 +392,42 @@ class DocCLI(CLI, RoleMixin):
opt_help.add_module_options(self.parser)
opt_help.add_basedir_options(self.parser)
+ # targets
self.parser.add_argument('args', nargs='*', help='Plugin', metavar='plugin')
self.parser.add_argument("-t", "--type", action="store", default='module', dest='type',
help='Choose which plugin type (defaults to "module"). '
'Available plugin types are : {0}'.format(TARGET_OPTIONS),
choices=TARGET_OPTIONS)
+
+ # formatting
self.parser.add_argument("-j", "--json", action="store_true", default=False, dest='json_format',
help='Change output into json format.')
+ # TODO: warn if not used with -t roles
# role-specific options
self.parser.add_argument("-r", "--roles-path", dest='roles_path', default=C.DEFAULT_ROLES_PATH,
type=opt_help.unfrack_path(pathsep=True),
action=opt_help.PrependListAction,
help='The path to the directory containing your roles.')
+ # modifiers
exclusive = self.parser.add_mutually_exclusive_group()
+ # TODO: warn if not used with -t roles
+ exclusive.add_argument("-e", "--entry-point", dest="entry_point",
+ help="Select the entry point for role(s).")
+
+ # TODO: warn with --json as it is incompatible
+ exclusive.add_argument("-s", "--snippet", action="store_true", default=False, dest='show_snippet',
+ help='Show playbook snippet for these plugin types: %s' % ', '.join(SNIPPETS))
+
+ # TODO: warn when arg/plugin is passed
exclusive.add_argument("-F", "--list_files", action="store_true", default=False, dest="list_files",
help='Show plugin names and their source files without summaries (implies --list). %s' % coll_filter)
exclusive.add_argument("-l", "--list", action="store_true", default=False, dest='list_dir',
help='List available plugins. %s' % coll_filter)
- exclusive.add_argument("-s", "--snippet", action="store_true", default=False, dest='show_snippet',
- help='Show playbook snippet for specified plugin(s)')
exclusive.add_argument("--metadata-dump", action="store_true", default=False, dest='dump',
help='**For internal testing only** Dump json metadata for all plugins.')
- exclusive.add_argument("-e", "--entry-point", dest="entry_point",
- help="Select the entry point for role(s).")
def post_process_args(self, options):
options = super(DocCLI, self).post_process_args(options)
@@ -507,12 +521,20 @@ class DocCLI(CLI, RoleMixin):
data = {}
descs = DocCLI._list_keywords()
- for keyword in keys:
- if keyword.startswith('with_'):
+ for key in keys:
+
+ if key.startswith('with_'):
+ # simplify loops, dont want to handle every with_<lookup> combo
keyword = 'loop'
+ elif key == 'async':
+ # cause async became reserved in python we had to rename internally
+ keyword = 'async_val'
+ else:
+ keyword = key
+
try:
# if no desc, typeerror raised ends this block
- kdata = {'description': descs[keyword]}
+ kdata = {'description': descs[key]}
# get playbook objects for keyword and use first to get keyword attributes
kdata['applies_to'] = []
@@ -551,10 +573,12 @@ class DocCLI(CLI, RoleMixin):
if kdata[k] is None:
del kdata[k]
- data[keyword] = kdata
+ data[key] = kdata
- except KeyError as e:
- display.warning("Skipping Invalid keyword '%s' specified: %s" % (keyword, to_native(e)))
+ except (AttributeError, KeyError) as e:
+ display.warning("Skipping Invalid keyword '%s' specified: %s" % (key, to_text(e)))
+ if display.verbosity >= 3:
+ display.verbose(traceback.format_exc())
return data
@@ -623,7 +647,7 @@ class DocCLI(CLI, RoleMixin):
basedir = context.CLIARGS['basedir']
plugin_type = context.CLIARGS['type']
- do_json = context.CLIARGS['json_format']
+ do_json = context.CLIARGS['json_format'] or context.CLIARGS['dump']
roles_path = context.CLIARGS['roles_path']
listing = context.CLIARGS['list_files'] or context.CLIARGS['list_dir'] or context.CLIARGS['dump']
docs = {}
@@ -646,7 +670,10 @@ class DocCLI(CLI, RoleMixin):
raise AnsibleOptionsError("Unknown or undocumentable plugin type: %s" % plugin_type)
elif plugin_type == 'keyword':
- if listing:
+ if context.CLIARGS['dump']:
+ keys = DocCLI._list_keywords()
+ docs = DocCLI._get_keywords_docs(keys.keys())
+ elif listing:
docs = DocCLI._list_keywords()
else:
docs = DocCLI._get_keywords_docs(context.CLIARGS['args'])
@@ -687,21 +714,29 @@ class DocCLI(CLI, RoleMixin):
docs = self._get_plugins_docs(plugin_type, loader)
if do_json:
- for entry in docs.keys():
- for forbid in DocCLI.JSON_IGNORE:
- try:
- del docs[entry]['doc'][forbid]
- except (KeyError, TypeError):
- pass
jdump(docs)
else:
text = []
if plugin_type in C.DOCUMENTABLE_PLUGINS:
if listing and docs:
self.display_plugin_list(docs)
+ elif context.CLIARGS['show_snippet']:
+ if plugin_type not in SNIPPETS:
+ raise AnsibleError('Snippets are only available for the following plugin'
+ ' types: %s' % ', '.join(SNIPPETS))
+
+ for plugin, doc_data in docs.items():
+ try:
+ textret = DocCLI.format_snippet(plugin, plugin_type, doc_data['doc'])
+ except ValueError as e:
+ display.warning("Unable to construct a snippet for"
+ " '{0}': {1}".format(plugin, to_text(e)))
+ else:
+ text.append(textret)
else:
# Some changes to how plain text docs are formatted
for plugin, doc_data in docs.items():
+
textret = DocCLI.format_plugin_doc(plugin, plugin_type,
doc_data['doc'], doc_data['examples'],
doc_data['return'], doc_data['metadata'])
@@ -709,11 +744,13 @@ class DocCLI(CLI, RoleMixin):
text.append(textret)
else:
display.warning("No valid documentation was retrieved from '%s'" % plugin)
+
elif plugin_type == 'role':
if context.CLIARGS['list_dir'] and docs:
self._display_available_roles(docs)
elif docs:
self._display_role_doc(docs)
+
elif docs:
text = DocCLI._dump_yaml(docs, '')
@@ -779,7 +816,6 @@ class DocCLI(CLI, RoleMixin):
result = loader.find_plugin_with_context(plugin, mod_type='.py', ignore_deprecated=True, check_aliases=True)
if not result.resolved:
raise PluginNotFound('%s was not found in %s' % (plugin, search_paths))
- plugin_name = result.plugin_resolved_name
filename = result.plugin_resolved_path
collection_name = result.plugin_resolved_collection
@@ -809,6 +845,26 @@ class DocCLI(CLI, RoleMixin):
return {'doc': doc, 'examples': plainexamples, 'return': returndocs, 'metadata': metadata}
@staticmethod
+ def format_snippet(plugin, plugin_type, doc):
+ ''' return heavily commented plugin use to insert into play '''
+ if plugin_type == 'inventory' and doc.get('options', {}).get('plugin'):
+ # these do not take a yaml config that we can write a snippet for
+ raise ValueError('The {0} inventory plugin does not take YAML type config source'
+ ' that can be used with the "auto" plugin so a snippet cannot be'
+ ' created.'.format(plugin))
+
+ text = []
+
+ if plugin_type == 'lookup':
+ text = _do_lookup_snippet(doc)
+
+ elif 'options' in doc:
+ text = _do_yaml_snippet(doc)
+
+ text.append('')
+ return "\n".join(text)
+
+ @staticmethod
def format_plugin_doc(plugin, plugin_type, doc, plainexamples, returndocs, metadata):
collection_name = doc['collection']
@@ -823,13 +879,11 @@ class DocCLI(CLI, RoleMixin):
doc['returndocs'] = returndocs
doc['metadata'] = metadata
- if context.CLIARGS['show_snippet'] and plugin_type == 'module':
- text = DocCLI.get_snippet_text(doc)
- else:
- try:
- text = DocCLI.get_man_text(doc, collection_name, plugin_type)
- except Exception as e:
- raise AnsibleError("Unable to retrieve documentation from '%s' due to: %s" % (plugin, to_native(e)))
+ try:
+ text = DocCLI.get_man_text(doc, collection_name, plugin_type)
+ except Exception as e:
+ display.vvv(traceback.format_exc())
+ raise AnsibleError("Unable to retrieve documentation from '%s' due to: %s" % (plugin, to_native(e)), orig_exc=e)
return text
@@ -923,6 +977,7 @@ class DocCLI(CLI, RoleMixin):
pfiles[plugin] = filename
except Exception as e:
+ display.vvv(traceback.format_exc())
raise AnsibleError("Failed reading docs at %s: %s" % (plugin, to_native(e)), orig_exc=e)
return pfiles
@@ -940,39 +995,19 @@ class DocCLI(CLI, RoleMixin):
return os.pathsep.join(ret)
@staticmethod
- def get_snippet_text(doc):
-
- text = []
- desc = DocCLI.tty_ify(doc['short_description'])
- text.append("- name: %s" % (desc))
- text.append(" %s:" % (doc['module']))
- pad = 31
- subdent = " " * pad
- limit = display.columns - pad
-
- for o in sorted(doc['options'].keys()):
- opt = doc['options'][o]
- if isinstance(opt['description'], string_types):
- desc = DocCLI.tty_ify(opt['description'])
- else:
- desc = DocCLI.tty_ify(" ".join(opt['description']))
-
- required = opt.get('required', False)
- if not isinstance(required, bool):
- raise("Incorrect value for 'Required', a boolean is needed.: %s" % required)
- if required:
- desc = "(required) %s" % desc
- o = '%s:' % o
- text.append(" %-20s # %s" % (o, textwrap.fill(desc, limit, subsequent_indent=subdent)))
- text.append('')
-
- return "\n".join(text)
+ def _dump_yaml(struct, indent):
+ return DocCLI.tty_ify('\n'.join([indent + line for line in yaml.dump(struct, default_flow_style=False, Dumper=AnsibleDumper).split('\n')]))
@staticmethod
- def _dump_yaml(struct, indent):
- return DocCLI.tty_ify('\n'.join([indent + line for line in
- yaml.dump(struct, default_flow_style=False,
- Dumper=AnsibleDumper).split('\n')]))
+ def _format_version_added(version_added, version_added_collection=None):
+ if version_added_collection == 'ansible.builtin':
+ version_added_collection = 'ansible-core'
+ # In ansible-core, version_added can be 'historical'
+ if version_added == 'historical':
+ return 'historical'
+ if version_added_collection:
+ version_added = '%s of %s' % (version_added, version_added_collection)
+ return 'version %s' % (version_added, )
@staticmethod
def add_fields(text, fields, limit, opt_indent, return_values=False, base_indent=''):
@@ -1037,12 +1072,20 @@ class DocCLI(CLI, RoleMixin):
if ignore in item:
del item[ignore]
+ if 'cli' in opt and opt['cli']:
+ conf['cli'] = []
+ for cli in opt['cli']:
+ if 'option' not in cli:
+ conf['cli'].append({'name': cli['name'], 'option': '--%s' % cli['name'].replace('_', '-')})
+ else:
+ conf['cli'].append(cli)
+ del opt['cli']
+
if conf:
text.append(DocCLI._dump_yaml({'set_via': conf}, opt_indent))
- # Remove empty version_added_collection
- if opt.get('version_added_collection') == '':
- opt.pop('version_added_collection')
+ version_added = opt.pop('version_added', None)
+ version_added_collection = opt.pop('version_added_collection', None)
for k in sorted(opt):
if k.startswith('_'):
@@ -1057,6 +1100,9 @@ class DocCLI(CLI, RoleMixin):
else:
text.append(DocCLI._dump_yaml({k: opt[k]}, opt_indent))
+ if version_added:
+ text.append("%sadded in: %s\n" % (opt_indent, DocCLI._format_version_added(version_added, version_added_collection)))
+
for subkey, subdata in suboptions:
text.append('')
text.append("%s%s:\n" % (opt_indent, subkey.upper()))
@@ -1104,6 +1150,11 @@ class DocCLI(CLI, RoleMixin):
DocCLI.add_fields(text, doc.pop('options'), limit, opt_indent)
text.append('')
+ if doc.get('attributes'):
+ text.append("ATTRIBUTES:\n")
+ text.append(DocCLI._dump_yaml(doc.pop('attributes'), opt_indent))
+ text.append('')
+
# generic elements we will handle identically
for k in ('author',):
if k not in doc:
@@ -1145,6 +1196,11 @@ class DocCLI(CLI, RoleMixin):
text.append("%s\n" % textwrap.fill(DocCLI.tty_ify(desc), limit, initial_indent=opt_indent,
subsequent_indent=opt_indent))
+ if 'version_added' in doc:
+ version_added = doc.pop('version_added')
+ version_added_collection = doc.pop('version_added_collection', None)
+ text.append("ADDED IN: %s\n" % DocCLI._format_version_added(version_added, version_added_collection))
+
if doc.get('deprecated', False):
text.append("DEPRECATED: \n")
if isinstance(doc['deprecated'], dict):
@@ -1168,6 +1224,11 @@ class DocCLI(CLI, RoleMixin):
DocCLI.add_fields(text, doc.pop('options'), limit, opt_indent)
text.append('')
+ if doc.get('attributes', False):
+ text.append("ATTRIBUTES:\n")
+ text.append(DocCLI._dump_yaml(doc.pop('attributes'), opt_indent))
+ text.append('')
+
if doc.get('notes', False):
text.append("NOTES:")
for note in doc['notes']:
@@ -1230,7 +1291,7 @@ class DocCLI(CLI, RoleMixin):
if isinstance(doc['plainexamples'], string_types):
text.append(doc.pop('plainexamples').strip())
else:
- text.append(yaml.dump(doc.pop('plainexamples'), indent=2, default_flow_style=False))
+ text.append(yaml_dump(doc.pop('plainexamples'), indent=2, default_flow_style=False))
text.append('')
text.append('')
@@ -1239,3 +1300,86 @@ class DocCLI(CLI, RoleMixin):
DocCLI.add_fields(text, doc.pop('returndocs'), limit, opt_indent, return_values=True)
return "\n".join(text)
+
+
+def _do_yaml_snippet(doc):
+ text = []
+
+ mdesc = DocCLI.tty_ify(doc['short_description'])
+ module = doc.get('module')
+
+ if module:
+ # this is actually a usable task!
+ text.append("- name: %s" % (mdesc))
+ text.append(" %s:" % (module))
+ else:
+ # just a comment, hopefully useful yaml file
+ text.append("# %s:" % doc.get('plugin', doc.get('name')))
+
+ pad = 29
+ subdent = '# '.rjust(pad + 2)
+ limit = display.columns - pad
+
+ for o in sorted(doc['options'].keys()):
+ opt = doc['options'][o]
+ if isinstance(opt['description'], string_types):
+ desc = DocCLI.tty_ify(opt['description'])
+ else:
+ desc = DocCLI.tty_ify(" ".join(opt['description']))
+
+ required = opt.get('required', False)
+ if not isinstance(required, bool):
+ raise ValueError("Incorrect value for 'Required', a boolean is needed: %s" % required)
+
+ o = '%s:' % o
+ if module:
+ if required:
+ desc = "(required) %s" % desc
+ text.append(" %-20s # %s" % (o, textwrap.fill(desc, limit, subsequent_indent=subdent)))
+ else:
+ if required:
+ default = '(required)'
+ else:
+ default = opt.get('default', 'None')
+
+ text.append("%s %-9s # %s" % (o, default, textwrap.fill(desc, limit, subsequent_indent=subdent, max_lines=3)))
+
+ return text
+
+
+def _do_lookup_snippet(doc):
+ text = []
+ snippet = "lookup('%s', " % doc.get('plugin', doc.get('name'))
+ comment = []
+
+ for o in sorted(doc['options'].keys()):
+
+ opt = doc['options'][o]
+ comment.append('# %s(%s): %s' % (o, opt.get('type', 'string'), opt.get('description', '')))
+ if o in ('_terms', '_raw', '_list'):
+ # these are 'list of arguments'
+ snippet += '< %s >' % (o)
+ continue
+
+ required = opt.get('required', False)
+ if not isinstance(required, bool):
+ raise ValueError("Incorrect value for 'Required', a boolean is needed: %s" % required)
+
+ if required:
+ default = '<REQUIRED>'
+ else:
+ default = opt.get('default', 'None')
+
+ if opt.get('type') in ('string', 'str'):
+ snippet += ", %s='%s'" % (o, default)
+ else:
+ snippet += ', %s=%s' % (o, default)
+
+ snippet += ")"
+
+ if comment:
+ text.extend(comment)
+ text.append('')
+ text.append(snippet)
+
+ return text
diff --git a/lib/ansible/cli/galaxy.py b/lib/ansible/cli/galaxy.py
index 22931497..cc9a813e 100644
--- a/lib/ansible/cli/galaxy.py
+++ b/lib/ansible/cli/galaxy.py
@@ -12,7 +12,6 @@ import shutil
import sys
import textwrap
import time
-import yaml
from yaml.error import YAMLError
@@ -42,6 +41,7 @@ from ansible.galaxy.role import GalaxyRole
from ansible.galaxy.token import BasicAuthToken, GalaxyToken, KeycloakToken, NoTokenSentinel
from ansible.module_utils.ansible_release import __version__ as ansible_version
from ansible.module_utils.common.collections import is_iterable
+from ansible.module_utils.common.yaml import yaml_dump, yaml_load
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils import six
from ansible.parsing.dataloader import DataLoader
@@ -55,6 +55,17 @@ from ansible.utils.plugin_docs import get_versioned_doclink
display = Display()
urlparse = six.moves.urllib.parse.urlparse
+SERVER_DEF = [
+ ('url', True),
+ ('username', False),
+ ('password', False),
+ ('token', False),
+ ('auth_url', False),
+ ('v3', False),
+ ('validate_certs', False),
+ ('client_id', False),
+]
+
def with_collection_artifacts_manager(wrapped_method):
"""Inject an artifacts manager if not passed explicitly.
@@ -466,11 +477,9 @@ class GalaxyCLI(CLI):
],
'required': required,
}
- server_def = [('url', True), ('username', False), ('password', False), ('token', False),
- ('auth_url', False), ('v3', False)]
- validate_certs = not context.CLIARGS['ignore_certs']
- galaxy_options = {'validate_certs': validate_certs}
+ validate_certs_fallback = not context.CLIARGS['ignore_certs']
+ galaxy_options = {}
for optional_key in ['clear_response_cache', 'no_cache']:
if optional_key in context.CLIARGS:
galaxy_options[optional_key] = context.CLIARGS[optional_key]
@@ -482,18 +491,23 @@ class GalaxyCLI(CLI):
for server_priority, server_key in enumerate(server_list, start=1):
# Config definitions are looked up dynamically based on the C.GALAXY_SERVER_LIST entry. We look up the
# section [galaxy_server.<server>] for the values url, username, password, and token.
- config_dict = dict((k, server_config_def(server_key, k, req)) for k, req in server_def)
- defs = AnsibleLoader(yaml.safe_dump(config_dict)).get_single_data()
+ config_dict = dict((k, server_config_def(server_key, k, req)) for k, req in SERVER_DEF)
+ defs = AnsibleLoader(yaml_dump(config_dict)).get_single_data()
C.config.initialize_plugin_configuration_definitions('galaxy_server', server_key, defs)
server_options = C.config.get_plugin_options('galaxy_server', server_key)
# auth_url is used to create the token, but not directly by GalaxyAPI, so
# it doesn't need to be passed as kwarg to GalaxyApi
auth_url = server_options.pop('auth_url', None)
+ client_id = server_options.pop('client_id', None)
token_val = server_options['token'] or NoTokenSentinel
username = server_options['username']
available_api_versions = None
v3 = server_options.pop('v3', None)
+ validate_certs = server_options['validate_certs']
+ if validate_certs is None:
+ validate_certs = validate_certs_fallback
+ server_options['validate_certs'] = validate_certs
if v3:
# This allows a user to explicitly indicate the server uses the /v3 API
# This was added for testing against pulp_ansible and I'm not sure it has
@@ -512,7 +526,8 @@ class GalaxyCLI(CLI):
if auth_url:
server_options['token'] = KeycloakToken(access_token=token_val,
auth_url=auth_url,
- validate_certs=validate_certs)
+ validate_certs=validate_certs,
+ client_id=client_id)
else:
# The galaxy v1 / github / django / 'Token'
server_options['token'] = GalaxyToken(token=token_val)
@@ -613,7 +628,7 @@ class GalaxyCLI(CLI):
display.vvv("Reading requirement file at '%s'" % requirements_file)
with open(b_requirements_file, 'rb') as req_obj:
try:
- file_requirements = yaml.safe_load(req_obj)
+ file_requirements = yaml_load(req_obj)
except YAMLError as err:
raise AnsibleError(
"Failed to parse the requirements yml at '%s' with the following error:\n%s"
@@ -638,7 +653,7 @@ class GalaxyCLI(CLI):
with open(b_include_path, 'rb') as f_include:
try:
return [GalaxyRole(self.galaxy, self.api, **r) for r in
- (RoleRequirement.role_yaml_parse(i) for i in yaml.safe_load(f_include))]
+ (RoleRequirement.role_yaml_parse(i) for i in yaml_load(f_include))]
except Exception as e:
raise AnsibleError("Unable to load data from include requirements file: %s %s"
% (to_native(requirements_file), to_native(e)))
@@ -1503,7 +1518,7 @@ class GalaxyCLI(CLI):
if output_format == 'json':
display.display(json.dumps(collections_in_paths))
elif output_format == 'yaml':
- display.display(yaml.safe_dump(collections_in_paths))
+ display.display(yaml_dump(collections_in_paths))
return 0
diff --git a/lib/ansible/cli/inventory.py b/lib/ansible/cli/inventory.py
index dc461577..3fbea734 100644
--- a/lib/ansible/cli/inventory.py
+++ b/lib/ansible/cli/inventory.py
@@ -179,7 +179,15 @@ class InventoryCLI(CLI):
raise AnsibleError(
'The python "toml" library is required when using the TOML output format'
)
- results = toml_dumps(stuff)
+ try:
+ results = toml_dumps(stuff)
+ except KeyError as e:
+ raise AnsibleError(
+ 'The source inventory contains a non-string key (%s) which cannot be represented in TOML. '
+ 'The specified key will need to be converted to a string. Be aware that if your playbooks '
+ 'expect this key to be non-string, your playbooks will need to be modified to support this '
+ 'change.' % e.args[0]
+ )
else:
import json
from ansible.parsing.ajson import AnsibleJSONEncoder
diff --git a/lib/ansible/cli/pull.py b/lib/ansible/cli/pull.py
index 0ebd431b..c92eef76 100644
--- a/lib/ansible/cli/pull.py
+++ b/lib/ansible/cli/pull.py
@@ -47,7 +47,6 @@ class PullCLI(CLI):
1: 'File does not exist',
2: 'File is not readable',
}
- SUPPORTED_REPO_MODULES = ['git']
ARGUMENTS = {'playbook.yml': 'The name of one the YAML format files to run as an Ansible playbook.'
'This can be a relative path within the checkout. By default, Ansible will'
"look for a playbook based on the host's fully-qualified domain name,"
@@ -141,8 +140,8 @@ class PullCLI(CLI):
if not options.url:
raise AnsibleOptionsError("URL for repository not specified, use -h for help")
- if options.module_name not in self.SUPPORTED_REPO_MODULES:
- raise AnsibleOptionsError("Unsupported repo module %s, choices are %s" % (options.module_name, ','.join(self.SUPPORTED_REPO_MODULES)))
+ if options.module_name not in self.REPO_CHOICES:
+ raise AnsibleOptionsError("Unsupported repo module %s, choices are %s" % (options.module_name, ','.join(self.REPO_CHOICES)))
display.verbosity = options.verbosity
self.validate_conflicts(options)
diff --git a/lib/ansible/cli/scripts/ansible_cli_stub.py b/lib/ansible/cli/scripts/ansible_cli_stub.py
index f82c47b7..622152c4 100755
--- a/lib/ansible/cli/scripts/ansible_cli_stub.py
+++ b/lib/ansible/cli/scripts/ansible_cli_stub.py
@@ -22,8 +22,6 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-__requires__ = ['ansible_core']
-
import errno
import os
@@ -31,19 +29,23 @@ import shutil
import sys
import traceback
+# Used for determining if the system is running a new enough python version
+# and should only restrict on our documented minimum versions
+_PY38_MIN = sys.version_info[:2] >= (3, 8)
+if not _PY38_MIN:
+ raise SystemExit(
+ 'ERROR: Ansible requires Python 3.8 or newer on the controller. '
+ 'Current version: %s' % ''.join(sys.version.splitlines())
+ )
+
+
+# These lines appear after the PY38 check, to ensure the "friendly" error happens before
+# any invalid syntax appears in other files that may get imported
from ansible import context
from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError
from ansible.module_utils._text import to_text
-
-# Used for determining if the system is running a new enough python version
-# and should only restrict on our documented minimum versions
-_PY38_MIN = sys.version_info[:2] >= (3, 8)
-_PY3_MIN = sys.version_info[:2] >= (3, 5)
-_PY2_MIN = (2, 6) <= sys.version_info[:2] < (3,)
-_PY_MIN = _PY3_MIN or _PY2_MIN
-if not _PY_MIN:
- raise SystemExit('ERROR: Ansible requires a minimum of Python2 version 2.6 or Python3 version 3.5. Current version: %s' % ''.join(sys.version.splitlines()))
+from pathlib import Path
class LastResort(object):
@@ -69,19 +71,10 @@ if __name__ == '__main__':
initialize_locale()
cli = None
- me = os.path.basename(sys.argv[0])
+ me = Path(sys.argv[0]).name
try:
display = Display()
- if C.CONTROLLER_PYTHON_WARNING and not _PY38_MIN:
- display.deprecated(
- (
- 'Ansible will require Python 3.8 or newer on the controller starting with Ansible 2.12. '
- 'Current version: %s' % ''.join(sys.version.splitlines())
- ),
- version='2.12',
- collection_name='ansible.builtin',
- )
display.debug("starting run")
sub = None
@@ -113,16 +106,16 @@ if __name__ == '__main__':
else:
raise
- b_ansible_dir = os.path.expanduser(os.path.expandvars(b"~/.ansible"))
+ ansible_dir = Path("~/.ansible").expanduser()
try:
- os.mkdir(b_ansible_dir, 0o700)
+ ansible_dir.mkdir(mode=0o700)
except OSError as exc:
if exc.errno != errno.EEXIST:
- display.warning("Failed to create the directory '%s': %s"
- % (to_text(b_ansible_dir, errors='surrogate_or_replace'),
- to_text(exc, errors='surrogate_or_replace')))
+ display.warning(
+ "Failed to create the directory '%s': %s" % (ansible_dir, to_text(exc, errors='surrogate_or_replace'))
+ )
else:
- display.debug("Created the '%s' directory" % to_text(b_ansible_dir, errors='surrogate_or_replace'))
+ display.debug("Created the '%s' directory" % ansible_dir)
try:
args = [to_text(a, errors='surrogate_or_strict') for a in sys.argv]
diff --git a/lib/ansible/cli/scripts/ansible_connection_cli_stub.py b/lib/ansible/cli/scripts/ansible_connection_cli_stub.py
index 4cb09d57..31047d96 100755
--- a/lib/ansible/cli/scripts/ansible_connection_cli_stub.py
+++ b/lib/ansible/cli/scripts/ansible_connection_cli_stub.py
@@ -4,7 +4,6 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-__requires__ = ['ansible_core']
import fcntl
@@ -125,6 +124,7 @@ class ConnectionProcess(object):
def run(self):
try:
+ log_messages = self.connection.get_option('persistent_log_messages')
while not self.connection._conn_closed:
signal.signal(signal.SIGALRM, self.connect_timeout)
signal.signal(signal.SIGTERM, self.handler)
@@ -138,7 +138,6 @@ class ConnectionProcess(object):
data = recv_data(s)
if not data:
break
- log_messages = self.connection.get_option('persistent_log_messages')
if log_messages:
display.display("jsonrpc request: %s" % data, log_only=True)