diff options
author | Lee Garrett <lgarrett@rocketjump.eu> | 2022-09-13 16:39:47 +0200 |
---|---|---|
committer | Lee Garrett <lgarrett@rocketjump.eu> | 2022-09-13 16:39:47 +0200 |
commit | cb54577c7e5e7ee437d3b9e6a0d28582abf22c1c (patch) | |
tree | 864d3f1e3215be5f9a424b114d6666784307006e | |
parent | a6b2333631a42d8d7d0b03417abbb729f5977088 (diff) | |
parent | dfc95dfc10415e8ba138e2c042c39632c9251abb (diff) | |
download | debian-ansible-core-cb54577c7e5e7ee437d3b9e6a0d28582abf22c1c.zip |
Update upstream source from tag 'upstream/2.13.4'
Update to upstream version '2.13.4'
with Debian dir b345c17f436d2d4e413507184ed4c441be7ea58d
75 files changed, 1155 insertions, 250 deletions
@@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: ansible-core -Version: 2.13.3 +Version: 2.13.4 Summary: Radically simple IT automation Home-page: https://ansible.com/ Author: Ansible, Inc. diff --git a/bin/ansible-connection b/bin/ansible-connection index f56e17cf..70cfad58 100755 --- a/bin/ansible-connection +++ b/bin/ansible-connection @@ -89,11 +89,11 @@ class ConnectionProcess(object): self.connection = None self._ansible_playbook_pid = ansible_playbook_pid - def start(self, variables): - try: - messages = list() - result = {} + def start(self, options): + messages = list() + result = {} + try: messages.append(('vvvv', 'control socket path is %s' % self.socket_path)) # If this is a relative path (~ gets expanded later) then plug the @@ -104,7 +104,7 @@ class ConnectionProcess(object): self.connection = connection_loader.get(self.play_context.connection, self.play_context, '/dev/null', task_uuid=self._task_uuid, ansible_playbook_pid=self._ansible_playbook_pid) try: - self.connection.set_options(var_options=variables) + self.connection.set_options(direct=options) except ConnectionError as exc: messages.append(('debug', to_text(exc))) raise ConnectionError('Unable to decode JSON from response set_options. See the debug log for more information.') @@ -244,11 +244,11 @@ def main(args=None): try: # read the play context data via stdin, which means depickling it - vars_data = read_stream(stdin) + opts_data = read_stream(stdin) init_data = read_stream(stdin) pc_data = pickle.loads(init_data, encoding='bytes') - variables = pickle.loads(vars_data, encoding='bytes') + options = pickle.loads(opts_data, encoding='bytes') play_context = PlayContext() play_context.deserialize(pc_data) @@ -286,7 +286,7 @@ def main(args=None): os.close(r) wfd = os.fdopen(w, 'w') process = ConnectionProcess(wfd, play_context, socket_path, original_path, task_uuid, ansible_playbook_pid) - process.start(variables) + process.start(options) except Exception: messages.append(('error', traceback.format_exc())) rc = 1 @@ -309,7 +309,7 @@ def main(args=None): messages.append(('vvvv', 'found existing local domain socket, using it!')) conn = Connection(socket_path) try: - conn.set_options(var_options=variables) + conn.set_options(direct=options) except ConnectionError as exc: messages.append(('debug', to_text(exc))) raise ConnectionError('Unable to decode JSON from response set_options. See the debug log for more information.') diff --git a/bin/ansible-galaxy b/bin/ansible-galaxy index 288318aa..5acaa6e4 100755 --- a/bin/ansible-galaxy +++ b/bin/ansible-galaxy @@ -487,7 +487,12 @@ class GalaxyCLI(CLI): else: install_parser.add_argument('-r', '--role-file', dest='requirements', help='A file containing a list of roles to be installed.') - if self._implicit_role and ('-r' in self._raw_args or '--role-file' in self._raw_args): + + r_re = re.compile(r'^(?<!-)-[a-zA-Z]*r[a-zA-Z]*') # -r, -fr + contains_r = bool([a for a in self._raw_args if r_re.match(a)]) + role_file_re = re.compile(r'--role-file($|=)') # --role-file foo, --role-file=foo + contains_role_file = bool([a for a in self._raw_args if role_file_re.match(a)]) + if self._implicit_role and (contains_r or contains_role_file): # Any collections in the requirements files will also be installed install_parser.add_argument('--keyring', dest='keyring', default=C.GALAXY_GPG_KEYRING, help='The keyring used during collection signature verification') @@ -1315,7 +1320,16 @@ class GalaxyCLI(CLI): ignore_errors = context.CLIARGS['ignore_errors'] no_deps = context.CLIARGS['no_deps'] force_with_deps = context.CLIARGS['force_with_deps'] - disable_gpg_verify = context.CLIARGS['disable_gpg_verify'] + try: + disable_gpg_verify = context.CLIARGS['disable_gpg_verify'] + except KeyError: + if self._implicit_role: + raise AnsibleError( + 'Unable to properly parse command line arguments. Please use "ansible-galaxy collection install" ' + 'instead of "ansible-galaxy install".' + ) + raise + # If `ansible-galaxy install` is used, collection-only options aren't available to the user and won't be in context.CLIARGS allow_pre_release = context.CLIARGS.get('allow_pre_release', False) upgrade = context.CLIARGS.get('upgrade', False) @@ -1657,7 +1671,7 @@ class GalaxyCLI(CLI): if response['count'] == 0: display.display("No roles match your search.", color=C.COLOR_ERROR) - return True + return 1 data = [u''] @@ -1680,7 +1694,7 @@ class GalaxyCLI(CLI): data = u'\n'.join(data) self.pager(data) - return True + return 0 def execute_import(self): """ used to import a role into Ansible Galaxy """ @@ -1786,7 +1800,7 @@ class GalaxyCLI(CLI): display.display(resp['status']) - return True + return 0 def main(args=None): diff --git a/changelogs/CHANGELOG-v2.13.rst b/changelogs/CHANGELOG-v2.13.rst index 1f0a790c..26e82197 100644 --- a/changelogs/CHANGELOG-v2.13.rst +++ b/changelogs/CHANGELOG-v2.13.rst @@ -5,6 +5,30 @@ ansible-core 2.13 "Nobody's Fault but Mine" Release Notes .. contents:: Topics +v2.13.4 +======= + +Release Summary +--------------- + +| Release Date: 2022-09-12 +| `Porting Guide <https://docs.ansible.com/ansible/devel/porting_guides.html>`__ + + +Bugfixes +-------- + +- Fix for network_cli not getting all relevant connection options +- ansible-galaxy - Fix detection of ``--role-file`` in arguments for implicit role invocation (https://github.com/ansible/ansible/issues/78204) +- ansible-galaxy - Fix exit codes for role search and delete (https://github.com/ansible/ansible/issues/78516) +- ansible-test - Fix change detection for ansible-test's own integration tests. +- ansible-test - ansible-doc sanity test - Correctly determine the fully-qualified collection name for plugins in subdirectories, resolving https://github.com/ansible/ansible/issues/78490. +- apt - don't actually update the cache in check mode with update_cache=true. +- apt - don't mark existing packages as manually installed in check mode (https://github.com/ansible/ansible/issues/66413). +- apt - fix package selection to include /etc/apt/preferences(.d) (https://github.com/ansible/ansible/issues/77969) +- urls - Guard imports of ``urllib3`` by catching ``Exception`` instead of ``ImportError`` to prevent exceptions in the import process of optional dependencies from preventing use of ``urls.py`` (https://github.com/ansible/ansible/issues/78648) +- wait_for - Read file and perform comparisons using bytes to avoid decode errors (https://github.com/ansible/ansible/issues/78214) + v2.13.3 ======= diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 5eb38f48..98e8e90b 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -1003,3 +1003,50 @@ releases: - type_shim_exception_swallow.yml - v2.13.3rc1_summary.yaml release_date: '2022-08-08' + 2.13.4: + changes: + release_summary: '| Release Date: 2022-09-12 + + | `Porting Guide <https://docs.ansible.com/ansible/devel/porting_guides.html>`__ + + ' + codename: Nobody's Fault but Mine + fragments: + - v2.13.4_summary.yaml + release_date: '2022-09-12' + 2.13.4rc1: + changes: + bugfixes: + - Fix for network_cli not getting all relevant connection options + - ansible-galaxy - Fix detection of ``--role-file`` in arguments for implicit + role invocation (https://github.com/ansible/ansible/issues/78204) + - ansible-galaxy - Fix exit codes for role search and delete (https://github.com/ansible/ansible/issues/78516) + - ansible-test - Fix change detection for ansible-test's own integration tests. + - ansible-test - ansible-doc sanity test - Correctly determine the fully-qualified + collection name for plugins in subdirectories, resolving https://github.com/ansible/ansible/issues/78490. + - apt - don't actually update the cache in check mode with update_cache=true. + - apt - don't mark existing packages as manually installed in check mode (https://github.com/ansible/ansible/issues/66413). + - apt - fix package selection to include /etc/apt/preferences(.d) (https://github.com/ansible/ansible/issues/77969) + - urls - Guard imports of ``urllib3`` by catching ``Exception`` instead of ``ImportError`` + to prevent exceptions in the import process of optional dependencies from + preventing use of ``urls.py`` (https://github.com/ansible/ansible/issues/78648) + - wait_for - Read file and perform comparisons using bytes to avoid decode errors + (https://github.com/ansible/ansible/issues/78214) + release_summary: '| Release Date: 2022-09-06 + + | `Porting Guide <https://docs.ansible.com/ansible/devel/porting_guides.html>`__ + + ' + codename: Nobody's Fault but Mine + fragments: + - 74446-network-conn-options.yaml + - 77969-apt-preferences.yml + - 78204-galaxy-role-file-detection.yml + - 78214-wait-for-compare-bytes.yml + - 78496-fix-apt-check-mode.yml + - 78516-galaxy-cli-exit-codes.yml + - 78648-urllib3-import-exceptions.yml + - ansible-test-ansible-doc-sanity-fqcn.yml + - ansible-test-self-change-classification.yml + - v2.13.4rc1_summary.yaml + release_date: '2022-09-06' diff --git a/docs/docsite/.templates/banner.html b/docs/docsite/.templates/banner.html index 3288eb8e..57c104d9 100644 --- a/docs/docsite/.templates/banner.html +++ b/docs/docsite/.templates/banner.html @@ -46,7 +46,7 @@ /* temp extra banner to advertise AnsibeFest2021 */ banner += extra_banner; - msg += 'You are reading the <b>latest</b> (stable) community version of the Ansible documentation. Red Hat subscribers, select <b>2.9</b> in the version selection to the left for the most recent Red Hat release.'; + msg += 'You are reading the <b>latest</b> (stable) community version of the Ansible documentation. If you are a Red Hat customer, refer to the <a href="https://access.redhat.com/support/policy/updates/ansible-automation-platform">Ansible Automation Platform Life Cycle</a> page for subscription details.'; } else if (startsWith(current_url_path, "/ansible/2.9/")) { msg += 'You are reading the latest Red Hat released version of the Ansible documentation. Community users can use this version, or select <b>latest</b> from the version selector to the left for the most recent community version.'; } else if (startsWith(current_url_path, "/ansible/devel/")) { diff --git a/docs/docsite/Makefile b/docs/docsite/Makefile index 0474a314..9a8d6d3d 100644 --- a/docs/docsite/Makefile +++ b/docs/docsite/Makefile @@ -56,6 +56,7 @@ ifeq ($(findstring error,$(MAJOR_VERSION)), error) $(error "version_helper failed to determine major version") endif + assertrst: ifndef rst $(error specify document or pattern with rst=somefile.rst) @@ -67,6 +68,7 @@ docs: htmldocs coredocs: core_htmldocs + generate_rst: collections_meta config cli keywords plugins testing core_generate_rst: collections_meta config cli keywords base_plugins testing @@ -74,7 +76,7 @@ core_generate_rst: collections_meta config cli keywords base_plugins testing # localisation effort. It will come at a later time. gettext_generate_rst: collections_meta config cli keywords testing -# The following two symlinks are necessary to produce two different docsets +# The following symlinks are necessary to produce two different docsets # from the same set of rst files (Ansible the package docs, and core docs). # Symlink the relevant index into place for building Ansible docs ansible_structure: @@ -96,9 +98,14 @@ core_structure: @echo "Creating symlinks in core_structure" -ln -sf ../rst/core_index.rst rst/index.rst -ln -sf ../dev_guide/core_index.rst rst/dev_guide/index.rst +# set up the correct core conf.py to use for English vs a translated language +ifdef LANGOPTS + -ln -sf ../sphinx_conf/core_lang_conf.py rst/conf.py +else -ln -sf ../sphinx_conf/core_conf.py rst/conf.py +endif -# Symlink the relevant index into place for building core docs +# Symlink the relevant index into place for building core translated docs gettext_structure: @echo "Creating symlinks in gettext_structure" -ln -sf ../rst/core_index.rst rst/index.rst diff --git a/docs/docsite/rst/community/development_process.rst b/docs/docsite/rst/community/development_process.rst index 4fbf7c8b..344e0739 100644 --- a/docs/docsite/rst/community/development_process.rst +++ b/docs/docsite/rst/community/development_process.rst @@ -45,7 +45,7 @@ Here's an overview of the PR lifecycle: * Contributor addresses any feedback from reviewers * Developers, maintainers, community re-review * PR merged or closed -* PR `backported <backport_process>`_ to one or more ``stable-X.Y`` branches (optional, bugfixes only) +* PR :ref:`backported <backport_process>` to one or more ``stable-X.Y`` branches (optional, bugfixes only) Automated PR review: ansibullbot -------------------------------- @@ -369,7 +369,7 @@ We do **not** backport features. .. code-block:: shell git fetch upstream - git checkout -b backport/2.12/[PR_NUMBER_FROM_DEVEL] upstream/stable-2.13 + git checkout -b backport/2.13/[PR_NUMBER_FROM_DEVEL] upstream/stable-2.13 #. Cherry pick the relevant commit SHA from the devel branch into your feature branch, handling merge conflicts as necessary: diff --git a/docs/docsite/rst/dev_guide/developing_collections_creating.rst b/docs/docsite/rst/dev_guide/developing_collections_creating.rst index ae20c68f..2e8de1c4 100644 --- a/docs/docsite/rst/dev_guide/developing_collections_creating.rst +++ b/docs/docsite/rst/dev_guide/developing_collections_creating.rst @@ -22,16 +22,18 @@ A user can then install your collection on their systems. Creating a collection skeleton ============================== -To start a new collection: +To start a new collection, run the following command in your collections directory: .. code-block:: bash - collection_dir#> ansible-galaxy collection init my_namespace.my_collection + ansible_collections#> ansible-galaxy collection init my_namespace.my_collection .. note:: Both the namespace and collection names use the same strict set of requirements. See `Galaxy namespaces <https://galaxy.ansible.com/docs/contributing/namespaces.html#galaxy-namespaces>`_ on the Galaxy docsite for those requirements. +It will create the structure ``[my_namespace]/[my_collection]/[collection skeleton]``. +.. hint:: If Git is used for version control, the corresponding repository should be initialized in the collection directory. Once the skeleton exists, you can populate the directories with the content you want inside the collection. See `ansible-collections <https://github.com/ansible-collections/>`_ GitHub Org to get a better idea of what you can place inside a collection. Reference: the ``ansible-galaxy collection`` command diff --git a/docs/docsite/rst/dev_guide/developing_collections_distributing.rst b/docs/docsite/rst/dev_guide/developing_collections_distributing.rst index d0406680..a13226ea 100644 --- a/docs/docsite/rst/dev_guide/developing_collections_distributing.rst +++ b/docs/docsite/rst/dev_guide/developing_collections_distributing.rst @@ -6,15 +6,16 @@ Distributing collections A collection is a distribution format for Ansible content. A typical collection contains modules and other plugins that address a set of related use cases. For example, a collection might automate administering a particular database. A collection can also contain roles and playbooks. -To distribute your collection and allow others to use it, you can publish your collection on one or more distribution servers. Distribution servers include: +To distribute your collection and allow others to use it, you can publish your collection on one or more :term:`distribution server`. Distribution servers include: -================================= ======================================================== +================================= =================================================================== Distribution server Collections accepted -================================= ======================================================== +================================= =================================================================== Ansible Galaxy All collections -Red Hat Automation Hub Only collections certified by Red Hat +:term:`Pulp 3 Galaxy` All collections, supports signed collections +Red Hat Automation Hub Only collections certified by Red Hat, supports signed collections Privately hosted Automation Hub Collections authorized by the owners -================================= ======================================================== +================================= =================================================================== Distributing collections involves four major steps: @@ -179,6 +180,22 @@ For more information on the :file:`galaxy.yml` file, see :ref:`collections_galax .. note:: The ``build_ignore`` feature is only supported with ``ansible-galaxy collection build`` in Ansible 2.10 or newer. + +.. _signing_collections: + +Signing a collection +-------------------------- + +You can include a GnuPG signature with your collection on a :term:`Pulp 3 Galaxy` server. See `Enabling collection signing <https://galaxyng.netlify.app/config/collection_signing/>`_ for details. + +You can manually generate detached signatures for a collection using the ``gpg`` CLI using the following step. This step assume you have generated a GPG private key, but do not cover this process. + +.. code-block:: bash + + ansible-galaxy collection build + tar -Oxzf namespace-name-1.0.0.tar.gz MANIFEST.json | gpg --output namespace-name-1.0.0.asc --detach-sign --armor --local-user email@example.com - + + .. _trying_collection_locally: Preparing to publish your collection diff --git a/docs/docsite/rst/dev_guide/developing_modules_general.rst b/docs/docsite/rst/dev_guide/developing_modules_general.rst index 3f1f27fd..abffcf19 100644 --- a/docs/docsite/rst/dev_guide/developing_modules_general.rst +++ b/docs/docsite/rst/dev_guide/developing_modules_general.rst @@ -80,13 +80,13 @@ The simplest way is to use ``ansible`` adhoc command: .. code:: shell - ansible -m library/my_test.py -a 'name=hello new=true' remotehost + ANSIBLE_LIBRARY=./library ansible -m my_test -a 'name=hello new=true' remotehost If your module does not need to target a remote host, you can quickly and easily exercise your code locally like this: .. code:: shell - ansible -m library/my_test.py -a 'name=hello new=true' localhost + ANSIBLE_LIBRARY=./library ansible -m my_test -a 'name=hello new=true' localhost - If for any reason (pdb, using print(), faster iteration, etc) you want to avoid going through Ansible, another way is to create an arguments file, a basic JSON config file that passes parameters to your module so that you can run it. diff --git a/docs/docsite/rst/galaxy/user_guide.rst b/docs/docsite/rst/galaxy/user_guide.rst index c287b052..e4beb7ef 100644 --- a/docs/docsite/rst/galaxy/user_guide.rst +++ b/docs/docsite/rst/galaxy/user_guide.rst @@ -492,3 +492,6 @@ Use ``remove`` to delete a role from *roles_path*: Shareable collections of modules, playbooks and roles :ref:`playbooks_reuse_roles` Reusable tasks, handlers, and other files in a known directory structure + :ref:`command_line_tools` + Perform other related operations + diff --git a/docs/docsite/rst/porting_guides/porting_guide_5.rst b/docs/docsite/rst/porting_guides/porting_guide_5.rst index b02bfc83..d3ba694e 100644 --- a/docs/docsite/rst/porting_guides/porting_guide_5.rst +++ b/docs/docsite/rst/porting_guides/porting_guide_5.rst @@ -46,7 +46,7 @@ Playbook Python Interpreter Discovery ============================ -The default value of ``INTERPRETER_PYTHON_FALLBACK`` changed to ``auto``. The list of Python interpreters in ``INTERPRETER_PYTHON_FALLBACK`` changed to prefer Python 3 over Python 2. The combination of these two changes means the new default behavior is to quietly prefer Python 3 over Python 2 on remote hosts. Previously a deprecation warning was issued in situations where interpreter discovery would have used Python 3 but the interpreter was set to ``/usr/bin/python``. +The default value of ``INTERPRETER_PYTHON`` changed to ``auto``. The list of Python interpreters in ``INTERPRETER_PYTHON_FALLBACK`` changed to prefer Python 3 over Python 2. The combination of these two changes means the new default behavior is to quietly prefer Python 3 over Python 2 on remote hosts. Previously a deprecation warning was issued in situations where interpreter discovery would have used Python 3 but the interpreter was set to ``/usr/bin/python``. ``INTERPRETER_PYTHON_FALLBACK`` can be changed from the default list of interpreters by setting the ``ansible_interpreter_python_fallback`` variable. diff --git a/docs/docsite/rst/porting_guides/porting_guide_6.rst b/docs/docsite/rst/porting_guides/porting_guide_6.rst index 9befc4e8..42c972fb 100644 --- a/docs/docsite/rst/porting_guides/porting_guide_6.rst +++ b/docs/docsite/rst/porting_guides/porting_guide_6.rst @@ -99,6 +99,29 @@ Networking No notable changes +Porting Guide for v6.3.0 +======================== + +Major Changes +------------- + +community.mysql +~~~~~~~~~~~~~~~ + +- mysql_db - the ``pipefail`` argument's default value will be changed to ``true`` in community.mysql 4.0.0. If your target machines do not use ``bash`` as a default interpreter, set ``pipefail`` to ``false`` explicitly. However, we strongly recommend setting up ``bash`` as a default and ``pipefail=true`` as it will protect you from getting broken dumps you don't know about (https://github.com/ansible-collections/community.mysql/issues/407). + +fortinet.fortios +~~~~~~~~~~~~~~~~ + +- Support Diff feature in check_mode. +- Support Fortios 7.2.0. + +Deprecated Features +------------------- + +- The google.cloud collection is considered unmaintained and will be removed from Ansible 8 if no one starts maintaining it again before Ansible 8. See `the removal process for details on how this works <https://github.com/ansible-collections/overview/blob/main/removal_from_ansible.rst#cancelling-removal-of-an-unmaintained-collection>`__ (https://github.com/ansible-community/community-topics/issues/105). +- The servicenow.servicenow collection has been deprecated by its maintainers (https://github.com/ServiceNowITOM/servicenow-ansible/pull/69) and will be removed from Ansible 7. It can still be installed manually, but it is suggested to swich to `servicenow.itsm <https://galaxy.ansible.com/servicenow/itsm>`__ instead (https://github.com/ansible-community/community-topics/issues/124). + Porting Guide for v6.2.0 ======================== diff --git a/docs/docsite/rst/porting_guides/porting_guide_core_2.12.rst b/docs/docsite/rst/porting_guides/porting_guide_core_2.12.rst index bb6ff0a7..2d60bf26 100644 --- a/docs/docsite/rst/porting_guides/porting_guide_core_2.12.rst +++ b/docs/docsite/rst/porting_guides/porting_guide_core_2.12.rst @@ -44,7 +44,7 @@ Playbook Python Interpreter Discovery ============================ -The default value of ``INTERPRETER_PYTHON_FALLBACK`` changed to ``auto``. The list of Python interpreters in ``INTERPRETER_PYTHON_FALLBACK`` changed to prefer Python 3 over Python 2. The combination of these two changes means the new default behavior is to quietly prefer Python 3 over Python 2 on remote hosts. Previously a deprecation warning was issued in situations where interpreter discovery would have used Python 3 but the interpreter was set to ``/usr/bin/python``. +The default value of ``INTERPRETER_PYTHON`` changed to ``auto``. The list of Python interpreters in ``INTERPRETER_PYTHON_FALLBACK`` changed to prefer Python 3 over Python 2. The combination of these two changes means the new default behavior is to quietly prefer Python 3 over Python 2 on remote hosts. Previously a deprecation warning was issued in situations where interpreter discovery would have used Python 3 but the interpreter was set to ``/usr/bin/python``. ``INTERPRETER_PYTHON_FALLBACK`` can be changed from the default list of interpreters by setting the ``ansible_interpreter_python_fallback`` variable. diff --git a/docs/docsite/rst/reference_appendices/faq.rst b/docs/docsite/rst/reference_appendices/faq.rst index a969fd4d..486fb5f0 100644 --- a/docs/docsite/rst/reference_appendices/faq.rst +++ b/docs/docsite/rst/reference_appendices/faq.rst @@ -23,6 +23,15 @@ Where did this specific module go? IF you are searching for a specific module, you can check the `runtime.yml <https://github.com/ansible/ansible/blob/devel/lib/ansible/config/ansible_builtin_runtime.yml>`_ file, which lists the first destination for each module that we extracted from the main ansible/ansible repository. Some modules have moved again since then. You can also search on `Ansible Galaxy <https://galaxy.ansible.com/>`_ or ask on one of our :ref:`chat channels <communication_irc>`. +.. _slow_install: + +How can I speed up Ansible on systems with slow disks? ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +Ansible may feel sluggish on systems with slow disks, such as Raspberry PI. See `Ansible might be running slow if libyaml is not available <https://www.jeffgeerling.com/blog/2021/ansible-might-be-running-slow-if-libyaml-not-available>`_ for hints on how to improve this. + + + .. _set_environment: How can I set the PATH or any other environment variable for a task or entire play? @@ -709,15 +718,6 @@ To determine if a keyword requires ``{{ }}`` or even supports templating, use `` this will return documentation on the keyword including a ``template`` field with the values ``explicit`` (requires ``{{ }}``), ``implicit`` (assumes ``{{ }}``, so no needed) or ``static`` (no templating supported, all characters will be interpreted literally) -.. _why_no_wheel: - -Why don't you ship ansible in wheel format (or other packaging format) ? -++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -In most cases it has to do with maintainability. There are many ways to ship software and we do not have -the resources to release Ansible on every platform. -In some cases there are technical issues. For example, our dependencies are not present on Python Wheels. - .. _ansible_host_delegated: How do I get the original ansible_host when I delegate a task? diff --git a/docs/docsite/rst/reference_appendices/glossary.rst b/docs/docsite/rst/reference_appendices/glossary.rst index 10fdd2dd..f30a933c 100644 --- a/docs/docsite/rst/reference_appendices/glossary.rst +++ b/docs/docsite/rst/reference_appendices/glossary.rst @@ -33,7 +33,7 @@ when a term comes up on the mailing list. Name used starting with 2.11. The installable package (RPM/Python/Deb package) generated from the `ansible/ansible repository <https://github.com/ansible/ansible>`_. Contains the command-line tools and the code for basic features and functions, such as copying module code to managed nodes. The ``ansible-core`` package includes a few modules and plugins and allows you to add others by installing collections. Ansible Galaxy - An `online resource <galaxy.ansible.com>`_ for finding and sharing Ansible community content. Also, the command-line utility that lets users install individual Ansible Collections, for example`` ansible-galaxy install community.crypto``. + An `online distribution server <galaxy.ansible.com>`_ for finding and sharing Ansible community content, sometimes referred to as community Galaxy. Also, the command-line utility that lets users install individual Ansible Collections, for example ``ansible-galaxy collection install community.crypto``. Async Refers to a task that is configured to run in the background rather @@ -108,6 +108,9 @@ when a term comes up on the mailing list. modules that support it. You can combine it with ``--check`` to get a good 'dry run'. File diffs are normally in unified diff format. + Distribution server + A server, such as Ansible Galaxy or Red Hat Automation Hub where you can distribute your collections and allow others to access these collections. See :ref:`distributing_collections` for a list of distribution server types. Some Ansible features are only available on certain distribution servers. + Executor A core software component of Ansible that is the power behind :command:`/usr/bin/ansible` directly -- and corresponds to the @@ -386,6 +389,10 @@ when a term comes up on the mailing list. git on a crontab and then managing the machine locally, using the :term:`local connection` plugin. + Pulp 3 Galaxy + A self-hosted distribution server based on the `GalaxyNG codebase <https://galaxyng.netlify.app/>`_, based on Pulp version 3. Use it to find and share your own curated set of content. You can access your content with the ``ansible-galaxy collection`` command. + + Push Mode Push mode is the default mode of Ansible. In fact, it's not really a mode at all -- it's just how Ansible works when you aren't thinking diff --git a/docs/docsite/rst/shared_snippets/installing_collections.txt b/docs/docsite/rst/shared_snippets/installing_collections.txt index ba241088..23f5c3e7 100644 --- a/docs/docsite/rst/shared_snippets/installing_collections.txt +++ b/docs/docsite/rst/shared_snippets/installing_collections.txt @@ -70,34 +70,3 @@ You can also keep a collection adjacent to the current playbook, under a ``colle See :ref:`collection_structure` for details on the collection directory structure. -Collections signed by a Galaxy server can be verified during installation with GnuPG. To opt into signature verification, configure a keyring for ``ansible-galaxy`` with native GnuPG tooling and provide the file path with the ``--keyring`` CLI option or ref:`GALAXY_GPG_KEYRING`. Signatures provided by the Galaxy server will be used to verify the collection's ``MANIFEST.json``. - -Use the ``--signature`` option to verify the collection's ``MANIFEST.json`` with additional signatures to those provided by the Galaxy server. Supplemental signatures should be provided as URIs. - -.. code-block:: bash - - ansible-galaxy collection install my_namespace.my_collection --signature https://examplehost.com/detached_signature.asc --keyring ~/.ansible/pubring.kbx - -GnuPG verification only occurs for collections installed from a Galaxy server. User-provided signatures are not used to verify collections installed from git repositories, source directories, or URLs/paths to tar.gz files. - -By default, verification is considered successful if a minimum of 1 signature successfully verifies the collection. The number of required signatures can be configured with ``--required-valid-signature-count`` or :ref:`GALAXY_REQUIRED_VALID_SIGNATURE_COUNT`. All signatures can be required by setting the option to ``all``. To fail signature verification if no valid signatures are found, prepend the value with ``+``, such as ``+all`` or ``+1``. - -.. code-block:: bash - - export ANSIBLE_GALAXY_GPG_KEYRING=~/.ansible/pubring.kbx - export ANSIBLE_GALAXY_REQUIRED_VALID_SIGNATURE_COUNT=2 - ansible-galaxy collection install my_namespace.my_collection --signature https://examplehost.com/detached_signature.asc --signature file:///path/to/local/detached_signature.asc - -Certain GnuPG errors can be ignored with ``--ignore-signature-status-code`` or :ref:`GALAXY_REQUIRED_VALID_SIGNATURE_COUNT`. :ref:`GALAXY_REQUIRED_VALID_SIGNATURE_COUNT` should be a list, and ``--ignore-signature-status-code`` can be provided multiple times to ignore multiple additional error status codes. - -This example requires any signatures provided by the Galaxy server to verify the collection except if they fail due to NO_PUBKEY: - -.. code-block:: bash - - export ANSIBLE_GALAXY_GPG_KEYRING=~/.ansible/pubring.kbx - export ANSIBLE_GALAXY_REQUIRED_VALID_SIGNATURE_COUNT=all - ansible-galaxy collection install my_namespace.my_collection --ignore-signature-status-code NO_PUBKEY - -If verification fails for the example above, only errors other than NO_PUBKEY will be displayed. - -If verification is unsuccessful, the collection will not be installed. GnuPG signature verification can be disabled with ``--disable-gpg-verify`` or by configuring :ref:`GALAXY_DISABLE_GPG_VERIFY`. diff --git a/docs/docsite/rst/user_guide/collections_using.rst b/docs/docsite/rst/user_guide/collections_using.rst index 1bebb1de..f259ffcb 100644 --- a/docs/docsite/rst/user_guide/collections_using.rst +++ b/docs/docsite/rst/user_guide/collections_using.rst @@ -7,7 +7,7 @@ Using collections Collections are a distribution format for Ansible content that can include playbooks, roles, modules, and plugins. As modules move from the core Ansible repository into collections, the module documentation will move to the :ref:`collections pages <list_of_collections>`. -You can install and use collections through `Ansible Galaxy <https://galaxy.ansible.com>`_. +You can install and use collections through a distribution server, such as `Ansible Galaxy <https://galaxy.ansible.com>`_ or a `Pulp 3 Galaxy server <https://galaxyng.netlify.app/>`_. * For details on how to *develop* collections see :ref:`developing_collections`. * For the current development status of Collections and FAQ see `Ansible Collections Community Guide <https://github.com/ansible-collections/overview/blob/main/README.rst>`_. @@ -30,6 +30,81 @@ Installing collections with ``ansible-galaxy`` .. include:: ../shared_snippets/installing_collections.txt +.. _installing_signed_collections: + +Installing collections with signature verification +--------------------------------------------------- + +If a collection has been signed by a :term:`distribution server`, the server will provide ASCII armored, detached signatures to verify the authenticity of the ``MANIFEST.json`` before using it to verify the collection's contents. This option is not available on all distribution servers. See :ref:`distributing_collections` for a table listing which servers support collection signing. + +To use signature verification for signed collections: + +1. :ref:`Configured a GnuPG keyring <galaxy_gpg_keyring>` for ``ansible-galaxy``, or provide the path to the keyring with the ``--keyring`` option when you install the signed collection. + + +2. Import the public key from the distribution server into that keyring. + + .. code-block:: bash + + gpg --import --no-default-keyring --keyring ~/.ansible/pubring.kbx my-public-key.asc + + +3. Verify the signature when you install the collection. + + .. code-block:: bash + + ansible-galaxy collection install my_namespace.my_collection --keyring ~/.ansible/pubring.kbx + + The ``--keyring`` option is not necessary if you have :ref:`configured a GnuPG keyring <galaxy_gpg_keyring>`. + +4. Optionally, verify the signature at any point after installation to prove the collection has not been tampered with. See :ref:`verify_signed_collections` for details. + + +You can also include signatures in addition to those provided by the distribution server. Use the ``--signature`` option to verify the collection's ``MANIFEST.json`` with these additional signatures. Supplemental signatures should be provided as URIs. + +.. code-block:: bash + + ansible-galaxy collection install my_namespace.my_collection --signature https://examplehost.com/detached_signature.asc --keyring ~/.ansible/pubring.kbx + +GnuPG verification only occurs for collections installed from a distribution server. User-provided signatures are not used to verify collections installed from git repositories, source directories, or URLs/paths to tar.gz files. + +You can also include additional signatures in the collection ``requirements.yml`` file under the ``signatures`` key. + +.. code-block:: yaml + + # requirements.yml + collections: + - name: ns.coll + version: 1.0.0 + signatures: + - https://examplehost.com/detached_signature.asc + - file:///path/to/local/detached_signature.asc + +See :ref:`collection requirements file <collection_requirements_file>` for details on how to install collections with this file. + +By default, verification is considered successful if a minimum of 1 signature successfully verifies the collection. The number of required signatures can be configured with ``--required-valid-signature-count`` or :ref:`GALAXY_REQUIRED_VALID_SIGNATURE_COUNT`. All signatures can be required by setting the option to ``all``. To fail signature verification if no valid signatures are found, prepend the value with ``+``, such as ``+all`` or ``+1``. + +.. code-block:: bash + + export ANSIBLE_GALAXY_GPG_KEYRING=~/.ansible/pubring.kbx + export ANSIBLE_GALAXY_REQUIRED_VALID_SIGNATURE_COUNT=2 + ansible-galaxy collection install my_namespace.my_collection --signature https://examplehost.com/detached_signature.asc --signature file:///path/to/local/detached_signature.asc + +Certain GnuPG errors can be ignored with ``--ignore-signature-status-code`` or :ref:`GALAXY_REQUIRED_VALID_SIGNATURE_COUNT`. :ref:`GALAXY_REQUIRED_VALID_SIGNATURE_COUNT` should be a list, and ``--ignore-signature-status-code`` can be provided multiple times to ignore multiple additional error status codes. + +This example requires any signatures provided by the distribution server to verify the collection except if they fail due to NO_PUBKEY: + +.. code-block:: bash + + export ANSIBLE_GALAXY_GPG_KEYRING=~/.ansible/pubring.kbx + export ANSIBLE_GALAXY_REQUIRED_VALID_SIGNATURE_COUNT=all + ansible-galaxy collection install my_namespace.my_collection --ignore-signature-status-code NO_PUBKEY + +If verification fails for the example above, only errors other than NO_PUBKEY will be displayed. + +If verification is unsuccessful, the collection will not be installed. GnuPG signature verification can be disabled with ``--disable-gpg-verify`` or by configuring :ref:`GALAXY_DISABLE_GPG_VERIFY`. + + .. _collections_older_version: Installing an older version of a collection @@ -270,25 +345,19 @@ In addition to the ``namespace.collection_name:version`` format, you can provide Verifying against ``tar.gz`` files is not supported. If your ``requirements.yml`` contains paths to tar files or URLs for installation, you can use the ``--ignore-errors`` flag to ensure that all collections using the ``namespace.name`` format in the file are processed. -Signature verification ----------------------- - -If a collection has been signed by the Galaxy server, the server will provide ASCII armored, detached signatures to verify the authenticity of the MANIFEST.json before using it to verify the collection's contents. You must opt into signature verification by :ref:`configuring a keyring <galaxy_gpg_keyring>` for ``ansible-galaxy``, or by providing the path with the ``--keyring`` option. +.. _verify_signed_collections: -To import a public key into a keyring for use with ``ansible-galaxy`` use the following step. +Verifying signed collections +----------------------------- -.. code-block:: bash +If a collection has been signed by a :term:`distribution server`, the server will provide ASCII armored, detached signatures to verify the authenticity of the MANIFEST.json before using it to verify the collection's contents. This option is not available on all distribution servers. See :ref:`distributing_collections` for a table listing which servers support collection signing. See :ref:`installing_signed_collections` for how to verify a signed collection when you install it. - gpg --import --no-default-keyring --keyring ~/.ansible/pubring.kbx my-public-key.asc - -In addition to any signatures provided by the Galaxy server, signature sources can also be provided in the requirements file and on the command line. Signature sources should be URIs. - -You can manually generate detached signatures for a collection using the ``gpg`` CLI using the following step. This step assume you have generated a GPG private key, but do not cover this process. +To verify a signed installed collection: .. code-block:: bash - ansible-galaxy collection build - tar -Oxzf namespace-name-1.0.0.tar.gz MANIFEST.json | gpg --output namespace-name-1.0.0.asc --detach-sign --armor --local-user email@example.com - + ansible-galaxy collection verify my_namespace.my_collection --keyring ~/.ansible/pubring.kbx + Use the ``--signature`` option to verify collection name(s) provided on the CLI with an additional signature. This option can be used multiple times to provide multiple signatures. @@ -296,28 +365,19 @@ Use the ``--signature`` option to verify collection name(s) provided on the CLI ansible-galaxy collection verify my_namespace.my_collection --signature https://examplehost.com/detached_signature.asc --signature file:///path/to/local/detached_signature.asc --keyring ~/.ansible/pubring.kbx -Collections in a requirements file should list any additional signature sources following the collection's "signatures" key. - -.. code-block:: yaml - - # requirements.yml - collections: - - name: ns.coll - version: 1.0.0 - signatures: - - https://examplehost.com/detached_signature.asc - - file:///path/to/local/detached_signature.asc +Optionally, you can verify a collection signature with a ``requirements.yml`` file. .. code-block:: bash ansible-galaxy collection verify -r requirements.yml --keyring ~/.ansible/pubring.kbx -When a collection is installed from a Galaxy server, the signatures provided by the server to verify the collection's authenticity are saved alongside the installed collections. This data is used to verify the internal consistency of the collection without querying the Galaxy server again when the ``--offline`` option is provided. +When a collection is installed from a distribution server, the signatures provided by the server to verify the collection's authenticity are saved alongside the installed collections. This data is used to verify the internal consistency of the collection without querying the distribution server again when the ``--offline`` option is provided. .. code-block:: bash ansible-galaxy collection verify my_namespace.my_collection --offline --keyring ~/.ansible/pubring.kbx + .. _collections_using_playbook: Using collections in a Playbook diff --git a/docs/docsite/sphinx_conf/core_lang_conf.py b/docs/docsite/sphinx_conf/core_lang_conf.py new file mode 100644 index 00000000..a164dd67 --- /dev/null +++ b/docs/docsite/sphinx_conf/core_lang_conf.py @@ -0,0 +1,356 @@ +# -*- coding: utf-8 -*- +# +# documentation build configuration file, created by +# sphinx-quickstart on Sat Sep 27 13:23:22 2008-2009. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# The contents of this file are pickled, so don't put values in the namespace +# that aren't pickleable (module imports are okay, they're removed +# automatically). +# +# All configuration values have a default value; values that are commented out +# serve to show the default value. + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import sys +import os + +# If your extensions are in another directory, add it here. If the directory +# is relative to the documentation root, use os.path.abspath to make it +# absolute, like shown here. +# sys.path.append(os.path.abspath('some/directory')) +# +sys.path.insert(0, os.path.join('ansible', 'lib')) + +# We want sphinx to document the ansible modules contained in this repository, +# not those that may happen to be installed in the version +# of Python used to run sphinx. When sphinx loads in order to document, +# the repository version needs to be the one that is loaded: +sys.path.insert(0, os.path.abspath(os.path.join('..', '..', '..', 'lib'))) + +VERSION = '2.13_ja' +AUTHOR = 'Ansible, Inc' + + +# General configuration +# --------------------- + +# Add any Sphinx extension module names here, as strings. +# They can be extensions +# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +# TEST: 'sphinxcontrib.fulltoc' +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.intersphinx', + 'notfound.extension', + 'sphinx_antsibull_ext', # provides CSS for the plugin/module docs generated by antsibull +] + +# Later on, add 'sphinx.ext.viewcode' to the list if you want to have +# colorized code generated too for references. + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['../.templates'] + +# The suffix of source filenames. +source_suffix = '.rst' + +# The master toctree document. +root_doc = master_doc = 'index' # Sphinx 4+ / 3- + +# General substitutions. +project = 'Ansible' +copyright = "Ansible project contributors" + +# The default replacements for |version| and |release|, also used in various +# other places throughout the built documents. +# +# The short X.Y version. +version = VERSION +# The full version, including alpha/beta/rc tags. +release = VERSION + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +today_fmt = '%B %d, %Y' + +# List of documents that shouldn't be included in the build. +# unused_docs = [] + +# List of directories, relative to source directories, that shouldn't be +# searched for source files. +# exclude_dirs = [] + +# A list of glob-style patterns that should be excluded when looking +# for source files. +exclude_patterns = [ + '2.10_index.rst', + 'ansible_index.rst', + 'core_index.rst', + 'galaxy', + 'network', + 'scenario_guides', + 'community/collection_contributors/test_index.rst', + 'community/collection_contributors/collection_integration_about.rst', + 'community/collection_contributors/collection_integration_updating.rst', + 'community/collection_contributors/collection_integration_add.rst', + 'community/collection_contributors/collection_test_pr_locally.rst', + 'community/collection_contributors/collection_integration_tests.rst', + 'community/collection_contributors/collection_integration_running.rst', + 'community/collection_contributors/collection_reviewing.rst', + 'community/collection_contributors/collection_unit_tests.rst', + 'community/maintainers.rst', + 'community/contributions_collections.rst', + 'community/create_pr_quick_start.rst', + 'community/reporting_collections.rst', + 'community/contributing_maintained_collections.rst', + 'community/collection_development_process.rst', + 'community/collection_contributors/collection_release_without_branches.rst', + 'community/collection_contributors/collection_release_with_branches.rst', + 'community/collection_contributors/collection_releasing.rst', + 'community/maintainers_guidelines.rst', + 'community/maintainers_workflow.rst', + 'community/steering/community_steering_committee.rst', + 'community/steering/steering_committee_membership.rst', + 'community/steering/steering_committee_past_members.rst', + 'community/steering/steering_index.rst', + 'dev_guide/ansible_index.rst', + 'dev_guide/core_index.rst', + 'dev_guide/platforms/aws_guidelines.rst', + 'dev_guide/platforms/openstack_guidelines.rst', + 'dev_guide/platforms/ovirt_dev_guide.rst', + 'dev_guide/platforms/vmware_guidelines.rst', + 'dev_guide/platforms/vmware_rest_guidelines.rst', + 'porting_guides/porting_guides.rst', + 'porting_guides/porting_guide_[1-9]*', + 'roadmap/index.rst', + 'roadmap/ansible_roadmap_index.rst', + 'roadmap/old_roadmap_index.rst', + 'roadmap/ROADMAP_2_5.rst', + 'roadmap/ROADMAP_2_6.rst', + 'roadmap/ROADMAP_2_7.rst', + 'roadmap/ROADMAP_2_8.rst', + 'roadmap/ROADMAP_2_9.rst', + 'roadmap/COLLECTIONS*' +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'ansible' + +highlight_language = 'YAML+Jinja' + +# Substitutions, variables, entities, & shortcuts for text which do not need to link to anything. +# For titles which should be a link, use the intersphinx anchors set at the index, chapter, and section levels, such as qi_start_: +# |br| is useful for formatting fields inside of tables +# |_| is a nonbreaking space; similarly useful inside of tables +rst_epilog = """ +.. |br| raw:: html + + <br> +.. |_| unicode:: 0xA0 + :trim: +""" + + +# Options for HTML output +# ----------------------- + +html_theme_path = [] +html_theme = 'sphinx_ansible_theme' +html_show_sphinx = False + +html_theme_options = { + 'canonical_url': "https://docs.ansible.com/ansible/latest/", + 'hubspot_id': '330046', + 'satellite_tracking': True, + 'show_extranav': True, + 'swift_id': 'yABGvz2N8PwcwBxyfzUc', + 'tag_manager_id': 'GTM-PSB293', + 'vcs_pageview_mode': 'edit' +} + +html_context = { + 'display_github': 'True', + 'show_sphinx': False, + 'is_eol': False, + 'github_user': 'ansible', + 'github_repo': 'ansible', + 'github_version': 'devel/docs/docsite/rst/', + 'github_module_version': 'devel/lib/ansible/modules/', + 'github_root_dir': 'devel/lib/ansible', + 'github_cli_version': 'devel/lib/ansible/cli/', + 'current_version': version, + 'latest_version': '2.13', + # list specifically out of order to make latest work + 'available_versions': ('2.13_ja', '2.12_ja', '2.11_ja',), +} + +# Add extra CSS styles to the resulting HTML pages +html_css_files = [ + 'css/core-color-scheme.css', +] + +# The style sheet to use for HTML and HTML Help pages. A file of that name +# must exist either in Sphinx' static/ path, or in one of the custom paths +# given in html_static_path. +# html_style = 'solar.css' + +# The name for this set of Sphinx documents. If None, it defaults to +# "<project> v<release> documentation". +html_title = 'Ansible Core Documentation' + +# A shorter title for the navigation bar. Default is the same as html_title. +html_short_title = 'Documentation' + +# The name of an image file (within the static path) to place at the top of +# the sidebar. +# html_logo = + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = 'favicon.ico' + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['../_static'] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_use_modindex = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, the reST sources are included in the HTML build as _sources/<name>. +html_copy_source = False + +# If true, an OpenSearch description file will be output, and all pages will +# contain a <link> tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = 'https://docs.ansible.com/ansible/latest' + +# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = '' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'Poseidodoc' + +# Configuration for sphinx-notfound-pages +# with no 'notfound_template' and no 'notfound_context' set, +# the extension builds 404.rst into a location-agnostic 404 page +# +# default is `en` - using this for the sub-site: +notfound_default_language = "ansible" +# default is `latest`: +# setting explicitly - docsite serves up /ansible/latest/404.html +# so keep this set to `latest` even on the `devel` branch +# then no maintenance is needed when we branch a new stable_x.x +notfound_default_version = "latest" +# makes default setting explicit: +notfound_no_urls_prefix = False + +# Options for LaTeX output +# ------------------------ + +# The paper size ('letter' or 'a4'). +# latex_paper_size = 'letter' + +# The font size ('10pt', '11pt' or '12pt'). +# latex_font_size = '10pt' + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, document class +# [howto/manual]). +latex_documents = [ + ('index', 'ansible.tex', 'Ansible Documentation', AUTHOR, 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# Additional stuff for the LaTeX preamble. +# latex_preamble = '' + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_use_modindex = True + +autoclass_content = 'both' + +# Note: Our strategy for intersphinx mappings is to have the upstream build location as the +# canonical source and then cached copies of the mapping stored locally in case someone is building +# when disconnected from the internet. We then have a script to update the cached copies. +# +# Because of that, each entry in this mapping should have this format: +# name: ('http://UPSTREAM_URL', (None, 'path/to/local/cache.inv')) +# +# The update script depends on this format so deviating from this (for instance, adding a third +# location for the mappning to live) will confuse it. +intersphinx_mapping = {'python': ('https://docs.python.org/2/', (None, '../python2.inv')), + 'python3': ('https://docs.python.org/3/', (None, '../python3.inv')), + 'jinja2': ('http://jinja.palletsprojects.com/', (None, '../jinja2.inv')), + 'ansible_6': ('https://docs.ansible.com/ansible/6/', (None, '../ansible_6.inv')), + 'ansible_5': ('https://docs.ansible.com/ansible/5/', (None, '../ansible_5.inv')), + 'ansible_4': ('https://docs.ansible.com/ansible/4/', (None, '../ansible_4.inv')), + 'ansible_3': ('https://docs.ansible.com/ansible/3/', (None, '../ansible_3.inv')), + 'ansible_2_10': ('https://docs.ansible.com/ansible/2.10/', (None, '../ansible_2_10.inv')), + 'ansible_2_9': ('https://docs.ansible.com/ansible/2.9/', (None, '../ansible_2_9.inv')), + 'ansible_2_8': ('https://docs.ansible.com/ansible/2.8/', (None, '../ansible_2_8.inv')), + 'ansible_2_7': ('https://docs.ansible.com/ansible/2.7/', (None, '../ansible_2_7.inv')), + 'ansible_2_6': ('https://docs.ansible.com/ansible/2.6/', (None, '../ansible_2_6.inv')), + 'ansible_2_5': ('https://docs.ansible.com/ansible/2.5/', (None, '../ansible_2_5.inv')), + } + +# linckchecker settings +linkcheck_ignore = [ +] +linkcheck_workers = 25 +# linkcheck_anchors = False diff --git a/docs/man/man1/ansible-config.1 b/docs/man/man1/ansible-config.1 index 9c4613b1..ebd09996 100644 --- a/docs/man/man1/ansible-config.1 +++ b/docs/man/man1/ansible-config.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "ANSIBLE-CONFIG" 1 "" "Ansible 2.13.3" "System administration commands" +.TH "ANSIBLE-CONFIG" 1 "" "Ansible 2.13.4" "System administration commands" .SH NAME ansible-config \- View ansible configuration. .SH SYNOPSIS diff --git a/docs/man/man1/ansible-console.1 b/docs/man/man1/ansible-console.1 index 43d127f1..7bcf5597 100644 --- a/docs/man/man1/ansible-console.1 +++ b/docs/man/man1/ansible-console.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "ANSIBLE-CONSOLE" 1 "" "Ansible 2.13.3" "System administration commands" +.TH "ANSIBLE-CONSOLE" 1 "" "Ansible 2.13.4" "System administration commands" .SH NAME ansible-console \- REPL console for executing Ansible tasks. .SH SYNOPSIS diff --git a/docs/man/man1/ansible-doc.1 b/docs/man/man1/ansible-doc.1 index 510218ec..7f5ec0d6 100644 --- a/docs/man/man1/ansible-doc.1 +++ b/docs/man/man1/ansible-doc.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "ANSIBLE-DOC" 1 "" "Ansible 2.13.3" "System administration commands" +.TH "ANSIBLE-DOC" 1 "" "Ansible 2.13.4" "System administration commands" .SH NAME ansible-doc \- plugin documentation tool .SH SYNOPSIS diff --git a/docs/man/man1/ansible-galaxy.1 b/docs/man/man1/ansible-galaxy.1 index 53de7357..f6686909 100644 --- a/docs/man/man1/ansible-galaxy.1 +++ b/docs/man/man1/ansible-galaxy.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "ANSIBLE-GALAXY" 1 "" "Ansible 2.13.3" "System administration commands" +.TH "ANSIBLE-GALAXY" 1 "" "Ansible 2.13.4" "System administration commands" .SH NAME ansible-galaxy \- Perform various Role and Collection related operations. .SH SYNOPSIS diff --git a/docs/man/man1/ansible-inventory.1 b/docs/man/man1/ansible-inventory.1 index 9f7816bd..5dea0867 100644 --- a/docs/man/man1/ansible-inventory.1 +++ b/docs/man/man1/ansible-inventory.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "ANSIBLE-INVENTORY" 1 "" "Ansible 2.13.3" "System administration commands" +.TH "ANSIBLE-INVENTORY" 1 "" "Ansible 2.13.4" "System administration commands" .SH NAME ansible-inventory \- None .SH SYNOPSIS diff --git a/docs/man/man1/ansible-playbook.1 b/docs/man/man1/ansible-playbook.1 index 066db7d5..49285235 100644 --- a/docs/man/man1/ansible-playbook.1 +++ b/docs/man/man1/ansible-playbook.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "ANSIBLE-PLAYBOOK" 1 "" "Ansible 2.13.3" "System administration commands" +.TH "ANSIBLE-PLAYBOOK" 1 "" "Ansible 2.13.4" "System administration commands" .SH NAME ansible-playbook \- Runs Ansible playbooks, executing the defined tasks on the targeted hosts. .SH SYNOPSIS diff --git a/docs/man/man1/ansible-pull.1 b/docs/man/man1/ansible-pull.1 index 43014683..f3856502 100644 --- a/docs/man/man1/ansible-pull.1 +++ b/docs/man/man1/ansible-pull.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "ANSIBLE-PULL" 1 "" "Ansible 2.13.3" "System administration commands" +.TH "ANSIBLE-PULL" 1 "" "Ansible 2.13.4" "System administration commands" .SH NAME ansible-pull \- pulls playbooks from a VCS repo and executes them for the local host .SH SYNOPSIS diff --git a/docs/man/man1/ansible-vault.1 b/docs/man/man1/ansible-vault.1 index b33c8ca9..c1f85a0d 100644 --- a/docs/man/man1/ansible-vault.1 +++ b/docs/man/man1/ansible-vault.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "ANSIBLE-VAULT" 1 "" "Ansible 2.13.3" "System administration commands" +.TH "ANSIBLE-VAULT" 1 "" "Ansible 2.13.4" "System administration commands" .SH NAME ansible-vault \- encryption/decryption utility for Ansible data files .SH SYNOPSIS diff --git a/docs/man/man1/ansible.1 b/docs/man/man1/ansible.1 index 80335d7d..7941dbc1 100644 --- a/docs/man/man1/ansible.1 +++ b/docs/man/man1/ansible.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "ANSIBLE" 1 "" "Ansible 2.13.3" "System administration commands" +.TH "ANSIBLE" 1 "" "Ansible 2.13.4" "System administration commands" .SH NAME ansible \- Define and run a single task 'playbook' against a set of hosts .SH SYNOPSIS diff --git a/lib/ansible/cli/galaxy.py b/lib/ansible/cli/galaxy.py index 288318aa..5acaa6e4 100755 --- a/lib/ansible/cli/galaxy.py +++ b/lib/ansible/cli/galaxy.py @@ -487,7 +487,12 @@ class GalaxyCLI(CLI): else: install_parser.add_argument('-r', '--role-file', dest='requirements', help='A file containing a list of roles to be installed.') - if self._implicit_role and ('-r' in self._raw_args or '--role-file' in self._raw_args): + + r_re = re.compile(r'^(?<!-)-[a-zA-Z]*r[a-zA-Z]*') # -r, -fr + contains_r = bool([a for a in self._raw_args if r_re.match(a)]) + role_file_re = re.compile(r'--role-file($|=)') # --role-file foo, --role-file=foo + contains_role_file = bool([a for a in self._raw_args if role_file_re.match(a)]) + if self._implicit_role and (contains_r or contains_role_file): # Any collections in the requirements files will also be installed install_parser.add_argument('--keyring', dest='keyring', default=C.GALAXY_GPG_KEYRING, help='The keyring used during collection signature verification') @@ -1315,7 +1320,16 @@ class GalaxyCLI(CLI): ignore_errors = context.CLIARGS['ignore_errors'] no_deps = context.CLIARGS['no_deps'] force_with_deps = context.CLIARGS['force_with_deps'] - disable_gpg_verify = context.CLIARGS['disable_gpg_verify'] + try: + disable_gpg_verify = context.CLIARGS['disable_gpg_verify'] + except KeyError: + if self._implicit_role: + raise AnsibleError( + 'Unable to properly parse command line arguments. Please use "ansible-galaxy collection install" ' + 'instead of "ansible-galaxy install".' + ) + raise + # If `ansible-galaxy install` is used, collection-only options aren't available to the user and won't be in context.CLIARGS allow_pre_release = context.CLIARGS.get('allow_pre_release', False) upgrade = context.CLIARGS.get('upgrade', False) @@ -1657,7 +1671,7 @@ class GalaxyCLI(CLI): if response['count'] == 0: display.display("No roles match your search.", color=C.COLOR_ERROR) - return True + return 1 data = [u''] @@ -1680,7 +1694,7 @@ class GalaxyCLI(CLI): data = u'\n'.join(data) self.pager(data) - return True + return 0 def execute_import(self): """ used to import a role into Ansible Galaxy """ @@ -1786,7 +1800,7 @@ class GalaxyCLI(CLI): display.display(resp['status']) - return True + return 0 def main(args=None): diff --git a/lib/ansible/cli/scripts/ansible_connection_cli_stub.py b/lib/ansible/cli/scripts/ansible_connection_cli_stub.py index f56e17cf..70cfad58 100755 --- a/lib/ansible/cli/scripts/ansible_connection_cli_stub.py +++ b/lib/ansible/cli/scripts/ansible_connection_cli_stub.py @@ -89,11 +89,11 @@ class ConnectionProcess(object): self.connection = None self._ansible_playbook_pid = ansible_playbook_pid - def start(self, variables): - try: - messages = list() - result = {} + def start(self, options): + messages = list() + result = {} + try: messages.append(('vvvv', 'control socket path is %s' % self.socket_path)) # If this is a relative path (~ gets expanded later) then plug the @@ -104,7 +104,7 @@ class ConnectionProcess(object): self.connection = connection_loader.get(self.play_context.connection, self.play_context, '/dev/null', task_uuid=self._task_uuid, ansible_playbook_pid=self._ansible_playbook_pid) try: - self.connection.set_options(var_options=variables) + self.connection.set_options(direct=options) except ConnectionError as exc: messages.append(('debug', to_text(exc))) raise ConnectionError('Unable to decode JSON from response set_options. See the debug log for more information.') @@ -244,11 +244,11 @@ def main(args=None): try: # read the play context data via stdin, which means depickling it - vars_data = read_stream(stdin) + opts_data = read_stream(stdin) init_data = read_stream(stdin) pc_data = pickle.loads(init_data, encoding='bytes') - variables = pickle.loads(vars_data, encoding='bytes') + options = pickle.loads(opts_data, encoding='bytes') play_context = PlayContext() play_context.deserialize(pc_data) @@ -286,7 +286,7 @@ def main(args=None): os.close(r) wfd = os.fdopen(w, 'w') process = ConnectionProcess(wfd, play_context, socket_path, original_path, task_uuid, ansible_playbook_pid) - process.start(variables) + process.start(options) except Exception: messages.append(('error', traceback.format_exc())) rc = 1 @@ -309,7 +309,7 @@ def main(args=None): messages.append(('vvvv', 'found existing local domain socket, using it!')) conn = Connection(socket_path) try: - conn.set_options(var_options=variables) + conn.set_options(direct=options) except ConnectionError as exc: messages.append(('debug', to_text(exc))) raise ConnectionError('Unable to decode JSON from response set_options. See the debug log for more information.') diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py index eceab1b2..baeb1d1b 100644 --- a/lib/ansible/executor/task_executor.py +++ b/lib/ansible/executor/task_executor.py @@ -24,6 +24,7 @@ from ansible.module_utils._text import to_text, to_native from ansible.module_utils.connection import write_to_file_descriptor from ansible.playbook.conditional import Conditional from ansible.playbook.task import Task +from ansible.plugins import get_plugin_class from ansible.plugins.loader import become_loader, cliconf_loader, connection_loader, httpapi_loader, netconf_loader, terminal_loader from ansible.template import Templar from ansible.utils.collection_loader import AnsibleCollectionConfig, AnsibleCollectionRef @@ -583,6 +584,17 @@ class TaskExecutor: # feed back into pc to ensure plugins not using get_option can get correct value self._connection._play_context = self._play_context.set_task_and_variable_override(task=self._task, variables=vars_copy, templar=templar) + # for persistent connections, initialize socket path and start connection manager + if any(((self._connection.supports_persistence and C.USE_PERSISTENT_CONNECTIONS), self._connection.force_persistence)): + self._play_context.timeout = self._connection.get_option('persistent_command_timeout') + display.vvvv('attempting to start connection', host=self._play_context.remote_addr) + display.vvvv('using connection plugin %s' % self._connection.transport, host=self._play_context.remote_addr) + + options = self._connection.get_options() + socket_path = start_connection(self._play_context, options, self._task._uuid) + display.vvvv('local domain socket path is %s' % socket_path, host=self._play_context.remote_addr) + setattr(self._connection, '_socket_path', socket_path) + # TODO: eventually remove this block as this should be a 'consequence' of 'forced_local' modules # special handling for python interpreter for network_os, default to ansible python unless overriden if 'ansible_network_os' in cvars and 'ansible_python_interpreter' not in cvars: @@ -990,32 +1002,8 @@ class TaskExecutor: # Also backwards compat call for those still using play_context self._play_context.set_attributes_from_plugin(connection) - if any(((connection.supports_persistence and C.USE_PERSISTENT_CONNECTIONS), connection.force_persistence)): - self._play_context.timeout = connection.get_option('persistent_command_timeout') - display.vvvv('attempting to start connection', host=self._play_context.remote_addr) - display.vvvv('using connection plugin %s' % connection.transport, host=self._play_context.remote_addr) - - options = self._get_persistent_connection_options(connection, cvars, templar) - socket_path = start_connection(self._play_context, options, self._task._uuid) - display.vvvv('local domain socket path is %s' % socket_path, host=self._play_context.remote_addr) - setattr(connection, '_socket_path', socket_path) - return connection - def _get_persistent_connection_options(self, connection, final_vars, templar): - - option_vars = C.config.get_plugin_vars('connection', connection._load_name) - plugin = connection._sub_plugin - if plugin.get('type'): - option_vars.extend(C.config.get_plugin_vars(plugin['type'], plugin['name'])) - - options = {} - for k in option_vars: - if k in final_vars: - options[k] = templar.template(final_vars[k]) - - return options - def _set_plugin_options(self, plugin_type, variables, templar, task_keys): try: plugin = getattr(self._connection, '_%s' % plugin_type) @@ -1023,6 +1011,10 @@ class TaskExecutor: # Some plugins are assigned to private attrs, ``become`` is not plugin = getattr(self._connection, plugin_type) + # network_cli's "real" connection plugin is not named connection + # to avoid the confusion of having connection.connection + if plugin_type == "ssh_type_conn": + plugin_type = "connection" option_vars = C.config.get_plugin_vars(plugin_type, plugin._load_name) options = {} for k in option_vars: @@ -1092,6 +1084,15 @@ class TaskExecutor: pass # some plugins don't support all base flags self._play_context.prompt = self._connection.become.prompt + # deals with networking sub_plugins (network_cli/httpapi/netconf) + sub = getattr(self._connection, '_sub_plugin', None) + if sub is not None and sub.get('type') != 'external': + plugin_type = get_plugin_class(sub.get("obj")) + varnames.extend(self._set_plugin_options(plugin_type, variables, templar, task_keys)) + sub_conn = getattr(self._connection, 'ssh_type_conn', None) + if sub_conn is not None: + varnames.extend(self._set_plugin_options("ssh_type_conn", variables, templar, task_keys)) + return varnames def _get_action_handler(self, connection, templar): @@ -1154,7 +1155,7 @@ class TaskExecutor: return handler, module -def start_connection(play_context, variables, task_uuid): +def start_connection(play_context, options, task_uuid): ''' Starts the persistent connection ''' @@ -1200,7 +1201,7 @@ def start_connection(play_context, variables, task_uuid): try: termios.tcsetattr(master, termios.TCSANOW, new) - write_to_file_descriptor(master, variables) + write_to_file_descriptor(master, options) write_to_file_descriptor(master, play_context.serialize()) (stdout, stderr) = p.communicate() diff --git a/lib/ansible/galaxy/collection/__init__.py b/lib/ansible/galaxy/collection/__init__.py index 220285ad..9812bcad 100644 --- a/lib/ansible/galaxy/collection/__init__.py +++ b/lib/ansible/galaxy/collection/__init__.py @@ -180,7 +180,7 @@ class CollectionVerifyResult: def verify_local_collection( local_collection, remote_collection, artifacts_manager, -): # type: (Candidate, Candidate | None, ConcreteArtifactsManager) -> CollectionVerifyResult +): # type: (Candidate, t.Optional[Candidate], ConcreteArtifactsManager) -> CollectionVerifyResult """Verify integrity of the locally installed collection. :param local_collection: Collection being checked. diff --git a/lib/ansible/galaxy/collection/concrete_artifact_manager.py b/lib/ansible/galaxy/collection/concrete_artifact_manager.py index 1ef7eed2..4115aeed 100644 --- a/lib/ansible/galaxy/collection/concrete_artifact_manager.py +++ b/lib/ansible/galaxy/collection/concrete_artifact_manager.py @@ -100,7 +100,7 @@ class ConcreteArtifactsManager: self._require_build_metadata = value def get_galaxy_artifact_source_info(self, collection): - # type: (Candidate) -> dict[str, str | list[dict[str, str]]] + # type: (Candidate) -> dict[str, t.Union[str, list[dict[str, str]]]] server = collection.src.api_server try: @@ -124,7 +124,7 @@ class ConcreteArtifactsManager: } def get_galaxy_artifact_path(self, collection): - # type: (Candidate | Requirement) -> bytes + # type: (t.Union[Candidate, Requirement]) -> bytes """Given a Galaxy-stored collection, return a cached path. If it's not yet on disk, this method downloads the artifact first. @@ -184,7 +184,7 @@ class ConcreteArtifactsManager: return b_artifact_path def get_artifact_path(self, collection): - # type: (Candidate | Requirement) -> bytes + # type: (t.Union[Candidate, Requirement]) -> bytes """Given a concrete collection pointer, return a cached path. If it's not yet on disk, this method downloads the artifact first. @@ -249,15 +249,15 @@ class ConcreteArtifactsManager: return b_artifact_path def _get_direct_collection_namespace(self, collection): - # type: (Candidate) -> str | None + # type: (Candidate) -> t.Optional[str] return self.get_direct_collection_meta(collection)['namespace'] # type: ignore[return-value] def _get_direct_collection_name(self, collection): - # type: (Candidate) -> str | None + # type: (Candidate) -> t.Optional[str] return self.get_direct_collection_meta(collection)['name'] # type: ignore[return-value] def get_direct_collection_fqcn(self, collection): - # type: (Candidate) -> str | None + # type: (Candidate) -> t.Optional[str] """Extract FQCN from the given on-disk collection artifact. If the collection is virtual, ``None`` is returned instead @@ -273,17 +273,17 @@ class ConcreteArtifactsManager: )) def get_direct_collection_version(self, collection): - # type: (Candidate | Requirement) -> str + # type: (t.Union[Candidate, Requirement]) -> str """Extract version from the given on-disk collection artifact.""" return self.get_direct_collection_meta(collection)['version'] # type: ignore[return-value] def get_direct_collection_dependencies(self, collection): - # type: (Candidate | Requirement) -> dict[str, str] + # type: (t.Union[Candidate, Requirement]) -> dict[str, str] """Extract deps from the given on-disk collection artifact.""" return self.get_direct_collection_meta(collection)['dependencies'] # type: ignore[return-value] def get_direct_collection_meta(self, collection): - # type: (Candidate | Requirement) -> dict[str, str | dict[str, str] | list[str] | None] + # type: (t.Union[Candidate, Requirement]) -> dict[str, t.Union[str, dict[str, str], list[str], None]] """Extract meta from the given on-disk collection artifact.""" try: # FIXME: use unique collection identifier as a cache key? return self._artifact_meta_cache[collection.src] @@ -457,7 +457,7 @@ def _extract_collection_from_git(repo_url, coll_ver, b_path): # FIXME: use random subdirs while preserving the file names def _download_file(url, b_path, expected_hash, validate_certs, token=None, timeout=60): - # type: (str, bytes, str | None, bool, GalaxyToken, int) -> bytes + # type: (str, bytes, t.Optional[str], bool, GalaxyToken, int) -> bytes # ^ NOTE: used in download and verify_collections ^ b_tarball_name = to_bytes( url.rsplit('/', 1)[1], errors='surrogate_or_strict', @@ -513,15 +513,15 @@ def _consume_file(read_from, write_to=None): def _normalize_galaxy_yml_manifest( - galaxy_yml, # type: dict[str, str | list[str] | dict[str, str] | None] + galaxy_yml, # type: dict[str, t.Union[str, list[str], dict[str, str], None]] b_galaxy_yml_path, # type: bytes require_build_metadata=True, # type: bool ): - # type: (...) -> dict[str, str | list[str] | dict[str, str] | None] + # type: (...) -> dict[str, t.Union[str, list[str], dict[str, str], None]] galaxy_yml_schema = ( get_collections_galaxy_meta_info() ) # type: list[dict[str, t.Any]] # FIXME: <-- - # FIXME: 👆maybe precise type: list[dict[str, bool | str | list[str]]] + # FIXME: 👆maybe precise type: list[dict[str, t.Union[bool, str, list[str]]]] mandatory_keys = set() string_keys = set() # type: set[str] @@ -588,7 +588,7 @@ def _normalize_galaxy_yml_manifest( def _get_meta_from_dir( b_path, # type: bytes require_build_metadata=True, # type: bool -): # type: (...) -> dict[str, str | list[str] | dict[str, str] | None] +): # type: (...) -> dict[str, t.Union[str, list[str], dict[str, str], None]] try: return _get_meta_from_installed_dir(b_path) except LookupError: @@ -598,7 +598,7 @@ def _get_meta_from_dir( def _get_meta_from_src_dir( b_path, # type: bytes require_build_metadata=True, # type: bool -): # type: (...) -> dict[str, str | list[str] | dict[str, str] | None] +): # type: (...) -> dict[str, t.Union[str, list[str], dict[str, str], None]] galaxy_yml = os.path.join(b_path, _GALAXY_YAML) if not os.path.isfile(galaxy_yml): raise LookupError( @@ -667,7 +667,7 @@ def _get_json_from_installed_dir( def _get_meta_from_installed_dir( b_path, # type: bytes -): # type: (...) -> dict[str, str | list[str] | dict[str, str] | None] +): # type: (...) -> dict[str, t.Union[str, list[str], dict[str, str], None]] manifest = _get_json_from_installed_dir(b_path, MANIFEST_FILENAME) collection_info = manifest['collection_info'] @@ -688,7 +688,7 @@ def _get_meta_from_installed_dir( def _get_meta_from_tar( b_path, # type: bytes -): # type: (...) -> dict[str, str | list[str] | dict[str, str] | None] +): # type: (...) -> dict[str, t.Union[str, list[str], dict[str, str], None]] if not tarfile.is_tarfile(b_path): raise AnsibleError( "Collection artifact at '{path!s}' is not a valid tar file.". @@ -736,7 +736,7 @@ def _tarfile_extract( tar, # type: tarfile.TarFile member, # type: tarfile.TarInfo ): - # type: (...) -> t.Iterator[tuple[tarfile.TarInfo, t.IO[bytes] | None]] + # type: (...) -> t.Iterator[tuple[tarfile.TarInfo, t.Optional[t.IO[bytes]]]] tar_obj = tar.extractfile(member) try: yield member, tar_obj diff --git a/lib/ansible/galaxy/collection/gpg.py b/lib/ansible/galaxy/collection/gpg.py index f684de16..8641f0d7 100644 --- a/lib/ansible/galaxy/collection/gpg.py +++ b/lib/ansible/galaxy/collection/gpg.py @@ -25,7 +25,7 @@ IS_PY310_PLUS = sys.version_info[:2] >= (3, 10) frozen_dataclass = partial(dataclass, frozen=True, **({'slots': True} if IS_PY310_PLUS else {})) -def get_signature_from_source(source, display=None): # type: (str, Display | None) -> str +def get_signature_from_source(source, display=None): # type: (str, t.Optional[Display]) -> str if display is not None: display.vvvv(f"Using signature at {source}") try: diff --git a/lib/ansible/galaxy/dependency_resolution/providers.py b/lib/ansible/galaxy/dependency_resolution/providers.py index 947fae61..ccb56a9d 100644 --- a/lib/ansible/galaxy/dependency_resolution/providers.py +++ b/lib/ansible/galaxy/dependency_resolution/providers.py @@ -169,7 +169,7 @@ class CollectionDependencyProviderBase(AbstractProvider): return False def identify(self, requirement_or_candidate): - # type: (Candidate | Requirement) -> str + # type: (t.Union[Candidate, Requirement]) -> str """Given requirement or candidate, return an identifier for it. This is used to identify a requirement or candidate, e.g. @@ -181,7 +181,7 @@ class CollectionDependencyProviderBase(AbstractProvider): return requirement_or_candidate.canonical_package_id def get_preference(self, *args, **kwargs): - # type: (t.Any, t.Any) -> float | int + # type: (t.Any, t.Any) -> t.Union[float, int] """Return sort key function return value for given requirement. This result should be based on preference that is defined as diff --git a/lib/ansible/module_utils/ansible_release.py b/lib/ansible/module_utils/ansible_release.py index d8f1207f..b425660a 100644 --- a/lib/ansible/module_utils/ansible_release.py +++ b/lib/ansible/module_utils/ansible_release.py @@ -19,6 +19,6 @@ from __future__ import (absolute_import, division, print_function) __metaclass__ = type -__version__ = '2.13.3' +__version__ = '2.13.4' __author__ = 'Ansible, Inc.' __codename__ = "Nobody's Fault but Mine" diff --git a/lib/ansible/module_utils/urls.py b/lib/ansible/module_utils/urls.py index 7e7ba225..52c60dd9 100644 --- a/lib/ansible/module_utils/urls.py +++ b/lib/ansible/module_utils/urls.py @@ -114,19 +114,19 @@ try: HAS_URLLIB3_SSL_WRAP_SOCKET = False try: from urllib3.contrib.pyopenssl import PyOpenSSLContext - except ImportError: + except Exception: from requests.packages.urllib3.contrib.pyopenssl import PyOpenSSLContext HAS_URLLIB3_PYOPENSSLCONTEXT = True -except ImportError: +except Exception: # urllib3<1.15,>=1.6 HAS_URLLIB3_PYOPENSSLCONTEXT = False try: try: from urllib3.contrib.pyopenssl import ssl_wrap_socket - except ImportError: + except Exception: from requests.packages.urllib3.contrib.pyopenssl import ssl_wrap_socket HAS_URLLIB3_SSL_WRAP_SOCKET = True - except ImportError: + except Exception: pass # Select a protocol that includes all secure tls protocols diff --git a/lib/ansible/modules/apt.py b/lib/ansible/modules/apt.py index eb9479d9..b254fb03 100644 --- a/lib/ansible/modules/apt.py +++ b/lib/ansible/modules/apt.py @@ -210,6 +210,8 @@ notes: (If you typo C(foo) as C(fo) apt-get would install packages that have "fo" in their name with a warning and a prompt for the user. Since we don't have warnings and prompts before installing we disallow this.Use an explicit fnmatch pattern if you want wildcarding) - When used with a C(loop:) each package will be processed individually, it is much more efficient to pass the list directly to the I(name) option. + - When C(default_release) is used, an implicit priority of 990 is used. This is the same behavior as C(apt-get -t). + - When an exact version is specified, an implicit priority of 1001 is used. ''' EXAMPLES = ''' @@ -485,12 +487,17 @@ def package_version_compare(version, other_version): def package_best_match(pkgname, version_cmp, version, release, cache): policy = apt_pkg.Policy(cache) + + policy.read_pinfile(apt_pkg.config.find_file("Dir::Etc::preferences")) + policy.read_pindir(apt_pkg.config.find_file("Dir::Etc::preferencesparts")) + if release: # 990 is the priority used in `apt-get -t` policy.create_pin('Release', pkgname, release, 990) if version_cmp == "=": - # You can't pin to a minimum version, only equality with a glob - policy.create_pin('Version', pkgname, version, 991) + # Installing a specific version from command line overrides all pinning + # We don't mimmic this exactly, but instead set a priority which is higher than all APT built-in pin priorities. + policy.create_pin('Version', pkgname, version, 1001) pkg = cache[pkgname] pkgver = policy.get_candidate_ver(pkg) if not pkgver: @@ -503,6 +510,14 @@ def package_best_match(pkgname, version_cmp, version, release, cache): def package_status(m, pkgname, version_cmp, version, default_release, cache, state): + """ + :return: A tuple of (installed, installed_version, version_installable, has_files). *installed* indicates whether + the package (regardless of version) is installed. *installed_version* indicates whether the installed package + matches the provided version criteria. *version_installable* provides the latest matching version that can be + installed. In the case of virtual packages where we can't determine an applicable match, True is returned. + *has_files* indicates whether the package has files on the filesystem (even if not installed, meaning a purge is + required). + """ try: # get the package from the cache, as well as the # low-level apt_pkg.Package object which contains @@ -527,15 +542,15 @@ def package_status(m, pkgname, version_cmp, version, default_release, cache, sta # Otherwise return nothing so apt will sort out # what package to satisfy this with - return False, False, None, False + return False, False, True, False m.fail_json(msg="No package matching '%s' is available" % pkgname) except AttributeError: # python-apt version too old to detect virtual packages # mark as not installed and let apt-get install deal with it - return False, False, None, False + return False, False, True, False else: - return False, False, False, False + return False, False, None, False try: has_files = len(pkg.installed_files) > 0 except UnicodeDecodeError: @@ -565,13 +580,16 @@ def package_status(m, pkgname, version_cmp, version, default_release, cache, sta if version_cmp == "=": # check if the version is matched as well version_is_installed = fnmatch.fnmatch(installed_version, version) + if version_best and installed_version != version_best and fnmatch.fnmatch(version_best, version): + version_installable = version_best elif version_cmp == ">=": version_is_installed = apt_pkg.version_compare(installed_version, version) >= 0 + if version_best and installed_version != version_best and apt_pkg.version_compare(version_best, version) >= 0: + version_installable = version_best else: version_is_installed = True - - if installed_version != version_best: - version_installable = version_best + if version_best and installed_version != version_best: + version_installable = version_best else: version_installable = version_best @@ -692,23 +710,27 @@ def install(m, pkgspec, cache, upgrade=False, default_release=None, name, version_cmp, version = package_split(package) package_names.append(name) installed, installed_version, version_installable, has_files = package_status(m, name, version_cmp, version, default_release, cache, state='install') - if (not installed and not only_upgrade) or (installed and not installed_version) or (upgrade and version_installable): - if version_installable or version: - pkg_list.append("'%s=%s'" % (name, version_installable or version)) + + if (not installed_version and not version_installable) or (not installed and only_upgrade): + status = False + data = dict(msg="no available installation candidate for %s" % package) + return (status, data) + + if version_installable and ((not installed and not only_upgrade) or upgrade or not installed_version): + if version_installable is not True: + pkg_list.append("'%s=%s'" % (name, version_installable)) + elif version: + pkg_list.append("'%s=%s'" % (name, version)) else: pkg_list.append("'%s'" % name) elif installed_version and version_installable and version_cmp == "=": # This happens when the package is installed, a newer version is # available, and the version is a wildcard that matches both # - # We do not apply the upgrade flag because we cannot specify both - # a version and state=latest. (This behaviour mirrors how apt - # treats a version with wildcard in the package) - # # This is legacy behavior, and isn't documented (in fact it does # things documentations says it shouldn't). It should not be relied # upon. - pkg_list.append("'%s=%s'" % (name, version_installable)) + pkg_list.append("'%s=%s'" % (name, version)) packages = ' '.join(pkg_list) if packages: @@ -787,7 +809,7 @@ def install(m, pkgspec, cache, upgrade=False, default_release=None, status = True data = dict(changed=False) - if not build_dep: + if not build_dep and not m.check_mode: mark_installed_manually(m, package_names) return (status, data) @@ -1310,7 +1332,8 @@ def main(): for retry in range(update_cache_retries): try: - cache.update() + if not module.check_mode: + cache.update() break except apt.cache.FetchFailedException as e: err = to_native(e) @@ -1325,7 +1348,7 @@ def main(): cache.open(progress=None) mtimestamp, post_cache_update_time = get_updated_cache_time() - if updated_cache_time != post_cache_update_time: + if module.check_mode or updated_cache_time != post_cache_update_time: updated_cache = True updated_cache_time = post_cache_update_time diff --git a/lib/ansible/modules/apt_repository.py b/lib/ansible/modules/apt_repository.py index ec5a5273..1a111647 100644 --- a/lib/ansible/modules/apt_repository.py +++ b/lib/ansible/modules/apt_repository.py @@ -99,6 +99,7 @@ version_added: "0.7" requirements: - python-apt (python 2) - python3-apt (python 3) + - apt-key or gpg ''' EXAMPLES = ''' diff --git a/lib/ansible/modules/dnf.py b/lib/ansible/modules/dnf.py index 06004301..e8d910ca 100644 --- a/lib/ansible/modules/dnf.py +++ b/lib/ansible/modules/dnf.py @@ -36,7 +36,8 @@ options: list: description: - - Various (non-idempotent) commands for usage with C(/usr/bin/ansible) and I(not) playbooks. See examples. + - Various (non-idempotent) commands for usage with C(/usr/bin/ansible) and I(not) playbooks. + Use M(ansible.builtin.package_facts) instead of the C(list) argument as a best practice. type: str state: diff --git a/lib/ansible/modules/expect.py b/lib/ansible/modules/expect.py index 89459225..99ffe9f2 100644 --- a/lib/ansible/modules/expect.py +++ b/lib/ansible/modules/expect.py @@ -90,7 +90,7 @@ author: "Matt Martz (@sivel)" EXAMPLES = r''' - name: Case insensitive password string match ansible.builtin.expect: - ansible.builtin.command: passwd username + command: passwd username responses: (?i)password: "MySekretPa$$word" # you don't want to show passwords in your logs @@ -98,7 +98,7 @@ EXAMPLES = r''' - name: Generic question with multiple different responses ansible.builtin.expect: - ansible.builtin.command: /path/to/custom/command + command: /path/to/custom/command responses: Question: - response1 diff --git a/lib/ansible/modules/meta.py b/lib/ansible/modules/meta.py index b0376b62..1b062c98 100644 --- a/lib/ansible/modules/meta.py +++ b/lib/ansible/modules/meta.py @@ -93,7 +93,7 @@ EXAMPLES = r''' - name: Refresh inventory to ensure new instances exist in inventory ansible.builtin.meta: refresh_inventory -# Example showing how to clear all existing facts of targetted hosts +# Example showing how to clear all existing facts of targeted hosts - name: Clear gathered facts from all currently targeted hosts ansible.builtin.meta: clear_facts diff --git a/lib/ansible/modules/pip.py b/lib/ansible/modules/pip.py index 135af83c..faadf65a 100644 --- a/lib/ansible/modules/pip.py +++ b/lib/ansible/modules/pip.py @@ -243,7 +243,7 @@ cmd: type: str sample: pip2 install ansible six name: - description: list of python modules targetted by pip + description: list of python modules targeted by pip returned: success type: list sample: ['ansible', 'six'] diff --git a/lib/ansible/modules/replace.py b/lib/ansible/modules/replace.py index 6dfd6779..4b8f74f5 100644 --- a/lib/ansible/modules/replace.py +++ b/lib/ansible/modules/replace.py @@ -110,7 +110,7 @@ notes: ''' EXAMPLES = r''' -- name: Before Ansible 2.3, option 'dest', 'destfile' or 'name' was used instead of 'path' +- name: Replace old hostname with new hostname (requires Ansible >= 2.4) ansible.builtin.replace: path: /etc/hosts regexp: '(\s+)old\.host\.name(\s+.*)?$' diff --git a/lib/ansible/modules/stat.py b/lib/ansible/modules/stat.py index ff0f6712..918b588f 100644 --- a/lib/ansible/modules/stat.py +++ b/lib/ansible/modules/stat.py @@ -129,12 +129,12 @@ EXAMPLES = r''' msg: "Path exists and is a directory" when: p.stat.isdir is defined and p.stat.isdir -- name: Don not do checksum +- name: Do not calculate the checksum ansible.builtin.stat: path: /path/to/myhugefile get_checksum: no -- name: Use sha256 to calculate checksum +- name: Use sha256 to calculate the checksum ansible.builtin.stat: path: /path/to/something checksum_algorithm: sha256 diff --git a/lib/ansible/modules/wait_for.py b/lib/ansible/modules/wait_for.py index 2244f357..ada2e80b 100644 --- a/lib/ansible/modules/wait_for.py +++ b/lib/ansible/modules/wait_for.py @@ -224,9 +224,11 @@ match_groupdict: ''' import binascii +import contextlib import datetime import errno import math +import mmap import os import re import select @@ -236,7 +238,7 @@ import traceback from ansible.module_utils.basic import AnsibleModule, missing_required_lib from ansible.module_utils.common.sys_info import get_platform_subclass -from ansible.module_utils._text import to_native +from ansible.module_utils._text import to_bytes HAS_PSUTIL = False @@ -496,14 +498,22 @@ def main(): delay = module.params['delay'] port = module.params['port'] state = module.params['state'] + path = module.params['path'] + b_path = to_bytes(path, errors='surrogate_or_strict', nonstring='passthru') + search_regex = module.params['search_regex'] + b_search_regex = to_bytes(search_regex, errors='surrogate_or_strict', nonstring='passthru') + msg = module.params['msg'] if search_regex is not None: - compiled_search_re = re.compile(search_regex, re.MULTILINE) + try: + b_compiled_search_re = re.compile(b_search_regex, re.MULTILINE) + except re.error as e: + module.fail_json(msg="Invalid regular expression: %s" % e) else: - compiled_search_re = None + b_compiled_search_re = None match_groupdict = {} match_groups = () @@ -536,7 +546,7 @@ def main(): while datetime.datetime.utcnow() < end: if path: try: - if not os.access(path, os.F_OK): + if not os.access(b_path, os.F_OK): break except IOError: break @@ -562,7 +572,7 @@ def main(): while datetime.datetime.utcnow() < end: if path: try: - os.stat(path) + os.stat(b_path) except OSError as e: # If anything except file not present, throw an error if e.errno != 2: @@ -571,22 +581,20 @@ def main(): # file doesn't exist yet, so continue else: # File exists. Are there additional things to check? - if not compiled_search_re: + if not b_compiled_search_re: # nope, succeed! break try: - f = open(path) - try: - search = re.search(compiled_search_re, f.read()) - if search: - if search.groupdict(): - match_groupdict = search.groupdict() - if search.groups(): - match_groups = search.groups() - - break - finally: - f.close() + with open(b_path, 'rb') as f: + with contextlib.closing(mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)) as mm: + search = b_compiled_search_re.search(mm) + if search: + if search.groupdict(): + match_groupdict = search.groupdict() + if search.groups(): + match_groups = search.groups() + + break except IOError: pass elif port: @@ -598,8 +606,8 @@ def main(): pass else: # Connected -- are there additional conditions? - if compiled_search_re: - data = '' + if b_compiled_search_re: + b_data = b'' matched = False while datetime.datetime.utcnow() < end: max_timeout = math.ceil(_timedelta_total_seconds(end - datetime.datetime.utcnow())) @@ -612,8 +620,8 @@ def main(): if not response: # Server shutdown break - data += to_native(response, errors='surrogate_or_strict') - if re.search(compiled_search_re, data): + b_data += response + if b_compiled_search_re.search(b_data): matched = True break diff --git a/lib/ansible/plugins/connection/paramiko_ssh.py b/lib/ansible/plugins/connection/paramiko_ssh.py index c3e20d40..8e75adfa 100644 --- a/lib/ansible/plugins/connection/paramiko_ssh.py +++ b/lib/ansible/plugins/connection/paramiko_ssh.py @@ -59,14 +59,14 @@ DOCUMENTATION = """ - name: ansible_paramiko_password version_added: '2.5' host_key_auto_add: - description: 'TODO: write it' + description: 'Automatically add host keys' env: [{name: ANSIBLE_PARAMIKO_HOST_KEY_AUTO_ADD}] ini: - {key: host_key_auto_add, section: paramiko_connection} type: boolean look_for_keys: default: True - description: 'TODO: write it' + description: 'False to disable searching for private key files in ~/.ssh/' env: [{name: ANSIBLE_PARAMIKO_LOOK_FOR_KEYS}] ini: - {key: look_for_keys, section: paramiko_connection} @@ -81,7 +81,7 @@ DOCUMENTATION = """ - {key: proxy_command, section: paramiko_connection} pty: default: True - description: 'TODO: write it' + description: 'SUDO usually requires a PTY, True to give a PTY and False to not give a PTY.' env: - name: ANSIBLE_PARAMIKO_PTY ini: @@ -90,7 +90,7 @@ DOCUMENTATION = """ type: boolean record_host_keys: default: True - description: 'TODO: write it' + description: 'Save the host keys to a file' env: [{name: ANSIBLE_PARAMIKO_RECORD_HOST_KEYS}] ini: - section: paramiko_connection diff --git a/lib/ansible/release.py b/lib/ansible/release.py index d8f1207f..b425660a 100644 --- a/lib/ansible/release.py +++ b/lib/ansible/release.py @@ -19,6 +19,6 @@ from __future__ import (absolute_import, division, print_function) __metaclass__ = type -__version__ = '2.13.3' +__version__ = '2.13.4' __author__ = 'Ansible, Inc.' __codename__ = "Nobody's Fault but Mine" diff --git a/lib/ansible_core.egg-info/PKG-INFO b/lib/ansible_core.egg-info/PKG-INFO index b34bb39c..056a27c6 100644 --- a/lib/ansible_core.egg-info/PKG-INFO +++ b/lib/ansible_core.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: ansible-core -Version: 2.13.3 +Version: 2.13.4 Summary: Radically simple IT automation Home-page: https://ansible.com/ Author: Ansible, Inc. diff --git a/lib/ansible_core.egg-info/SOURCES.txt b/lib/ansible_core.egg-info/SOURCES.txt index de04d8a3..a845de8b 100644 --- a/lib/ansible_core.egg-info/SOURCES.txt +++ b/lib/ansible_core.egg-info/SOURCES.txt @@ -554,6 +554,7 @@ docs/docsite/sphinx_conf/2.10_conf.py docs/docsite/sphinx_conf/all_conf.py docs/docsite/sphinx_conf/ansible_conf.py docs/docsite/sphinx_conf/core_conf.py +docs/docsite/sphinx_conf/core_lang_conf.py docs/man/.gitignore docs/man/man1/ansible-config.1 docs/man/man1/ansible-console.1 @@ -1395,6 +1396,12 @@ test/integration/targets/ansible-test-no-tty/ansible_collections/ns/col/vendored test/integration/targets/ansible-test-no-tty/ansible_collections/ns/col/tests/integration/targets/no-tty/aliases test/integration/targets/ansible-test-no-tty/ansible_collections/ns/col/tests/integration/targets/no-tty/assert-no-tty.py test/integration/targets/ansible-test-no-tty/ansible_collections/ns/col/tests/integration/targets/no-tty/runme.sh +test/integration/targets/ansible-test-sanity-ansible-doc/aliases +test/integration/targets/ansible-test-sanity-ansible-doc/runme.sh +test/integration/targets/ansible-test-sanity-ansible-doc/ansible_collections/ns/col/plugins/lookup/lookup1.py +test/integration/targets/ansible-test-sanity-ansible-doc/ansible_collections/ns/col/plugins/lookup/a/b/lookup2.py +test/integration/targets/ansible-test-sanity-ansible-doc/ansible_collections/ns/col/plugins/modules/module1.py +test/integration/targets/ansible-test-sanity-ansible-doc/ansible_collections/ns/col/plugins/modules/a/b/module2.py test/integration/targets/ansible-test-sanity-lint/aliases test/integration/targets/ansible-test-sanity-lint/expected.txt test/integration/targets/ansible-test-sanity-lint/runme.sh @@ -3936,6 +3943,7 @@ test/integration/targets/var_templating/group_vars/all.yml test/integration/targets/var_templating/vars/connection.yml test/integration/targets/wait_for/aliases test/integration/targets/wait_for/files/testserver.py +test/integration/targets/wait_for/files/write_utf16.py test/integration/targets/wait_for/files/zombie.py test/integration/targets/wait_for/meta/main.yml test/integration/targets/wait_for/tasks/main.yml @@ -4617,6 +4625,7 @@ test/units/galaxy/test_role_install.py test/units/galaxy/test_role_requirements.py test/units/galaxy/test_token.py test/units/galaxy/test_user_agent.py +test/units/inventory/__init__.py test/units/inventory/test_group.py test/units/inventory/test_host.py test/units/inventory_test_data/group_vars/noparse/all.yml~ diff --git a/test/integration/targets/ansible-galaxy-collection/library/setup_collections.py b/test/integration/targets/ansible-galaxy-collection/library/setup_collections.py index 58ec9b2a..35b18dec 100644 --- a/test/integration/targets/ansible-galaxy-collection/library/setup_collections.py +++ b/test/integration/targets/ansible-galaxy-collection/library/setup_collections.py @@ -87,6 +87,10 @@ from ansible.module_utils.basic import AnsibleModule from ansible.module_utils._text import to_bytes from functools import partial from multiprocessing import dummy as threading +from multiprocessing import TimeoutError + + +COLLECTIONS_BUILD_AND_PUBLISH_TIMEOUT = 120 def publish_collection(module, collection): @@ -241,7 +245,14 @@ def run_module(): pool = threading.Pool(4) publish_func = partial(publish_collection, module) - result['results'] = pool.map(publish_func, module.params['collections']) + try: + result['results'] = pool.map_async( + publish_func, module.params['collections'], + ).get(timeout=COLLECTIONS_BUILD_AND_PUBLISH_TIMEOUT) + except TimeoutError as timeout_err: + module.fail_json( + 'Timed out waiting for collections to be provisioned.', + ) failed = bool(sum( r['build']['rc'] + r['publish']['rc'] for r in result['results'] diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/install.yml b/test/integration/targets/ansible-galaxy-collection/tasks/install.yml index 0068e76d..a55b64d8 100644 --- a/test/integration/targets/ansible-galaxy-collection/tasks/install.yml +++ b/test/integration/targets/ansible-galaxy-collection/tasks/install.yml @@ -423,6 +423,15 @@ - (install_req_actual.results[0].content | b64decode | from_json).collection_info.version == '1.0.0' - (install_req_actual.results[1].content | b64decode | from_json).collection_info.version == '1.0.0' +- name: Test deviations on -r and --role-file without collection or role sub command + command: '{{ cmd }}' + loop: + - ansible-galaxy install -vr '{{ galaxy_dir }}/ansible_collections/requirements.yaml' -s '{{ test_name }}' -vv + - ansible-galaxy install --role-file '{{ galaxy_dir }}/ansible_collections/requirements.yaml' -s '{{ test_name }}' -vvv + - ansible-galaxy install --role-file='{{ galaxy_dir }}/ansible_collections/requirements.yaml' -s '{{ test_name }}' -vvv + loop_control: + loop_var: cmd + - name: uninstall collections for next requirements file test file: path: '{{ galaxy_dir }}/ansible_collections/{{ collection }}/name' diff --git a/test/integration/targets/ansible-test-sanity-ansible-doc/aliases b/test/integration/targets/ansible-test-sanity-ansible-doc/aliases new file mode 100644 index 00000000..7741d444 --- /dev/null +++ b/test/integration/targets/ansible-test-sanity-ansible-doc/aliases @@ -0,0 +1,4 @@ +shippable/posix/group3 # runs in the distro test containers +shippable/generic/group1 # runs in the default test container +context/controller +needs/target/collection diff --git a/test/integration/targets/ansible-test-sanity-ansible-doc/ansible_collections/ns/col/plugins/lookup/a/b/lookup2.py b/test/integration/targets/ansible-test-sanity-ansible-doc/ansible_collections/ns/col/plugins/lookup/a/b/lookup2.py new file mode 100644 index 00000000..5cd2cf0f --- /dev/null +++ b/test/integration/targets/ansible-test-sanity-ansible-doc/ansible_collections/ns/col/plugins/lookup/a/b/lookup2.py @@ -0,0 +1,28 @@ +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = """ + name: lookup2 + author: Ansible Core Team + short_description: hello test lookup + description: + - Hello test lookup. + options: {} +""" + +EXAMPLES = """ +- minimal: +""" + +RETURN = """ +""" + +from ansible.plugins.lookup import LookupBase + + +class LookupModule(LookupBase): + + def run(self, terms, variables=None, **kwargs): + return [] diff --git a/test/integration/targets/ansible-test-sanity-ansible-doc/ansible_collections/ns/col/plugins/lookup/lookup1.py b/test/integration/targets/ansible-test-sanity-ansible-doc/ansible_collections/ns/col/plugins/lookup/lookup1.py new file mode 100644 index 00000000..e274f19f --- /dev/null +++ b/test/integration/targets/ansible-test-sanity-ansible-doc/ansible_collections/ns/col/plugins/lookup/lookup1.py @@ -0,0 +1,28 @@ +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +DOCUMENTATION = """ + name: lookup1 + author: Ansible Core Team + short_description: hello test lookup + description: + - Hello test lookup. + options: {} +""" + +EXAMPLES = """ +- minimal: +""" + +RETURN = """ +""" + +from ansible.plugins.lookup import LookupBase + + +class LookupModule(LookupBase): + + def run(self, terms, variables=None, **kwargs): + return [] diff --git a/test/integration/targets/ansible-test-sanity-ansible-doc/ansible_collections/ns/col/plugins/modules/a/b/module2.py b/test/integration/targets/ansible-test-sanity-ansible-doc/ansible_collections/ns/col/plugins/modules/a/b/module2.py new file mode 100644 index 00000000..6fafa193 --- /dev/null +++ b/test/integration/targets/ansible-test-sanity-ansible-doc/ansible_collections/ns/col/plugins/modules/a/b/module2.py @@ -0,0 +1,34 @@ +#!/usr/bin/python +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +DOCUMENTATION = ''' +module: module2 +short_description: Hello test module +description: Hello test module. +options: {} +author: + - Ansible Core Team +''' + +EXAMPLES = ''' +- minimal: +''' + +RETURN = '''''' + +from ansible.module_utils.basic import AnsibleModule + + +def main(): + module = AnsibleModule( + argument_spec={}, + ) + + module.exit_json() + + +if __name__ == '__main__': + main() diff --git a/test/integration/targets/ansible-test-sanity-ansible-doc/ansible_collections/ns/col/plugins/modules/module1.py b/test/integration/targets/ansible-test-sanity-ansible-doc/ansible_collections/ns/col/plugins/modules/module1.py new file mode 100644 index 00000000..8847f5b8 --- /dev/null +++ b/test/integration/targets/ansible-test-sanity-ansible-doc/ansible_collections/ns/col/plugins/modules/module1.py @@ -0,0 +1,34 @@ +#!/usr/bin/python +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +DOCUMENTATION = ''' +module: module1 +short_description: Hello test module +description: Hello test module. +options: {} +author: + - Ansible Core Team +''' + +EXAMPLES = ''' +- minimal: +''' + +RETURN = '''''' + +from ansible.module_utils.basic import AnsibleModule + + +def main(): + module = AnsibleModule( + argument_spec={}, + ) + + module.exit_json() + + +if __name__ == '__main__': + main() diff --git a/test/integration/targets/ansible-test-sanity-ansible-doc/runme.sh b/test/integration/targets/ansible-test-sanity-ansible-doc/runme.sh new file mode 100755 index 00000000..ee1a8823 --- /dev/null +++ b/test/integration/targets/ansible-test-sanity-ansible-doc/runme.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +set -eu + +source ../collection/setup.sh + +set -x + +ansible-test sanity --test ansible-doc --color "${@}" diff --git a/test/integration/targets/apt/tasks/apt.yml b/test/integration/targets/apt/tasks/apt.yml index 5b1a24a3..d273eda7 100644 --- a/test/integration/targets/apt/tasks/apt.yml +++ b/test/integration/targets/apt/tasks/apt.yml @@ -83,12 +83,26 @@ - apt_install_fnmatch is changed - apt_uninstall_fnmatch is changed +- name: Test update_cache 1 (check mode) + apt: + update_cache: true + cache_valid_time: 10 + register: apt_update_cache_1_check_mode + check_mode: true + - name: Test update_cache 1 apt: update_cache: true cache_valid_time: 10 register: apt_update_cache_1 +- name: Test update_cache 2 (check mode) + apt: + update_cache: true + cache_valid_time: 10 + register: apt_update_cache_2_check_mode + check_mode: true + - name: Test update_cache 2 apt: update_cache: true @@ -98,7 +112,9 @@ - name: verify update_cache assert: that: + - apt_update_cache_1_check_mode is changed - apt_update_cache_1 is changed + - apt_update_cache_2_check_mode is not changed - apt_update_cache_2 is not changed - name: uninstall apt bindings with apt again diff --git a/test/integration/targets/apt/tasks/repo.yml b/test/integration/targets/apt/tasks/repo.yml index 1705cb3e..e4e39aa3 100644 --- a/test/integration/targets/apt/tasks/repo.yml +++ b/test/integration/targets/apt/tasks/repo.yml @@ -40,6 +40,17 @@ state: absent allow_unauthenticated: yes +- name: Try to install non-existent version + apt: + name: foo=99 + state: present + ignore_errors: true + register: apt_result + +- name: Check if install failed + assert: + that: + - apt_result is failed # https://github.com/ansible/ansible/issues/30638 - block: @@ -56,6 +67,7 @@ assert: that: - "apt_result is not changed" + - "apt_result is failed" - apt: name: foo=1.0.0 @@ -122,12 +134,58 @@ - item.item.3 is none or "Inst foo [1.0.0] (" + item.item.3 + " localhost [all])" in item.stdout_lines loop: '{{ apt_result.results }}' + - name: Pin foo=1.0.0 + copy: + content: |- + Package: foo + Pin: version 1.0.0 + Pin-Priority: 1000 + dest: /etc/apt/preferences.d/foo + + - name: Run pinning version test matrix + apt: + name: foo{{ item.0 }} + default_release: '{{ item.1 }}' + state: '{{ item.2 | ternary("latest","present") }}' + check_mode: true + ignore_errors: true + register: apt_result + loop: + # [filter, release, state_latest, expected] # expected=null for no change. expected=False to assert an error + - ["", null, false, null] + - ["", null, true, null] + - ["=1.0.0", null, false, null] + - ["=1.0.0", null, true, null] + - ["=1.0.1", null, false, "1.0.1"] + #- ["=1.0.*", null, false, null] # legacy behavior. should not upgrade without state=latest + - ["=1.0.*", null, true, "1.0.1"] + - [">=1.0.0", null, false, null] + - [">=1.0.0", null, true, null] + - [">=1.0.1", null, false, False] + - ["", "testing", false, null] + - ["", "testing", true, null] + - ["=2.0.0", null, false, "2.0.0"] + - [">=2.0.0", "testing", false, False] + + - name: Validate pinning version test matrix + assert: + that: + - (item.item.3 != False) or (item.item.3 == False and item is failed) + - (item.item.3 is string) == (item.stdout is defined) + - item.item.3 is not string or "Inst foo [1.0.0] (" + item.item.3 + " localhost [all])" in item.stdout_lines + loop: '{{ apt_result.results }}' + always: - name: Uninstall foo apt: name: foo state: absent + - name: Unpin foo + file: + path: /etc/apt/preferences.d/foo + state: absent + # https://github.com/ansible/ansible/issues/35900 - block: - name: Disable ubuntu repos so system packages are not upgraded and do not change testing env @@ -138,6 +196,42 @@ name: foobar=1.0.0 allow_unauthenticated: yes + - name: mark foobar as auto for next test + shell: apt-mark auto foobar + + - name: Install foobar (marked as manual) (check mode) + apt: + name: foobar=1.0.1 + allow_unauthenticated: yes + check_mode: yes + register: manual_foobar_install_check_mode + + - name: check foobar was not marked as manually installed by check mode + shell: apt-mark showmanual | grep foobar + ignore_errors: yes + register: showmanual + + - assert: + that: + - manual_foobar_install_check_mode.changed + - "'foobar' not in showmanual.stdout" + + - name: Install foobar (marked as manual) + apt: + name: foobar=1.0.1 + allow_unauthenticated: yes + register: manual_foobar_install + + - name: check foobar was marked as manually installed + shell: apt-mark showmanual | grep foobar + ignore_errors: yes + register: showmanual + + - assert: + that: + - manual_foobar_install.changed + - "'foobar' in showmanual.stdout" + - name: Upgrade foobar to a version which does not depend on foo, autoremove should remove foo apt: upgrade: dist diff --git a/test/integration/targets/connection_paramiko_ssh/aliases b/test/integration/targets/connection_paramiko_ssh/aliases index aa7fd949..c5793217 100644 --- a/test/integration/targets/connection_paramiko_ssh/aliases +++ b/test/integration/targets/connection_paramiko_ssh/aliases @@ -2,4 +2,5 @@ needs/ssh shippable/posix/group3 needs/target/setup_paramiko needs/target/connection +skip/freebsd/13.0 # bcrypt 4+ requires a newer version of rust than is available destructive # potentially installs/uninstalls OS packages via setup_paramiko diff --git a/test/integration/targets/file/tasks/main.yml b/test/integration/targets/file/tasks/main.yml index a74cbc28..4ad19e06 100644 --- a/test/integration/targets/file/tasks/main.yml +++ b/test/integration/targets/file/tasks/main.yml @@ -151,8 +151,10 @@ attributes_supported: yes when: - attribute_A_set is success + - attribute_A_set.stdout_lines - "'A' in attribute_A_set.stdout_lines[0].split()[0]" - attribute_A_unset is success + - attribute_A_unset.stdout_lines - "'A' not in attribute_A_unset.stdout_lines[0].split()[0]" - name: explicitly set file attribute "A" diff --git a/test/integration/targets/incidental_inventory_aws_ec2/playbooks/setup.yml b/test/integration/targets/incidental_inventory_aws_ec2/playbooks/setup.yml index 8a9b8893..d12d300f 100644 --- a/test/integration/targets/incidental_inventory_aws_ec2/playbooks/setup.yml +++ b/test/integration/targets/incidental_inventory_aws_ec2/playbooks/setup.yml @@ -14,7 +14,7 @@ owner-id: '125523088429' virtualization-type: hvm root-device-type: ebs - name: 'Fedora-Atomic-27*' + name: 'Fedora-Cloud-Base-*' <<: *aws_connection_info register: fedora_images diff --git a/test/integration/targets/wait_for/files/write_utf16.py b/test/integration/targets/wait_for/files/write_utf16.py new file mode 100644 index 00000000..6079ed33 --- /dev/null +++ b/test/integration/targets/wait_for/files/write_utf16.py @@ -0,0 +1,20 @@ +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import sys + +# utf16 encoded bytes +# to ensure wait_for doesn't have any encoding errors +data = ( + b'\xff\xfep\x00r\x00e\x00m\x00i\x00\xe8\x00r\x00e\x00 \x00i\x00s\x00 ' + b'\x00f\x00i\x00r\x00s\x00t\x00\n\x00p\x00r\x00e\x00m\x00i\x00e\x00' + b'\x00\x03r\x00e\x00 \x00i\x00s\x00 \x00s\x00l\x00i\x00g\x00h\x00t\x00' + b'l\x00y\x00 \x00d\x00i\x00f\x00f\x00e\x00r\x00e\x00n\x00t\x00\n\x00\x1a' + b'\x048\x04@\x048\x04;\x04;\x048\x04F\x040\x04 \x00i\x00s\x00 \x00C\x00y' + b'\x00r\x00i\x00l\x00l\x00i\x00c\x00\n\x00\x01\xd8\x00\xdc \x00a\x00m' + b'\x00 \x00D\x00e\x00s\x00e\x00r\x00e\x00t\x00\n\x00\n' + b'completed\n' +) + +with open(sys.argv[1], 'wb') as f: + f.write(data) diff --git a/test/integration/targets/wait_for/tasks/main.yml b/test/integration/targets/wait_for/tasks/main.yml index c524f990..f71ddbda 100644 --- a/test/integration/targets/wait_for/tasks/main.yml +++ b/test/integration/targets/wait_for/tasks/main.yml @@ -104,6 +104,16 @@ - waitfor['match_groupdict']['foo'] == 'data' - waitfor['match_groups'] == ['data', '123'] +- name: write non-ascii file + script: write_utf16.py "{{remote_tmp_dir}}/utf16.txt" + args: + executable: '{{ ansible_facts.python.executable }}' + +- name: test non-ascii file + wait_for: + path: "{{remote_tmp_dir}}/utf16.txt" + search_regex: completed + - name: test wait for port timeout wait_for: port: 12121 diff --git a/test/lib/ansible_test/_data/requirements/sanity.mypy.in b/test/lib/ansible_test/_data/requirements/sanity.mypy.in index b7b82297..890caf30 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.mypy.in +++ b/test/lib/ansible_test/_data/requirements/sanity.mypy.in @@ -2,8 +2,9 @@ mypy[python2] packaging # type stubs not published separately types-backports types-jinja2 -types-paramiko +types-paramiko < 2.8.14 # newer versions drop support for Python 2.7 types-pyyaml < 6 # PyYAML 6+ stubs do not support Python 2.7 +types-cryptography < 3.3.16 # newer versions drop support for Python 2.7 types-requests types-setuptools types-toml diff --git a/test/lib/ansible_test/_data/requirements/sanity.mypy.txt b/test/lib/ansible_test/_data/requirements/sanity.mypy.txt index d4baf563..e448c907 100644 --- a/test/lib/ansible_test/_data/requirements/sanity.mypy.txt +++ b/test/lib/ansible_test/_data/requirements/sanity.mypy.txt @@ -1,10 +1,10 @@ # edit "sanity.mypy.in" and generate with: hacking/update-sanity-requirements.py --test mypy -mypy==0.931 +mypy==0.950 mypy-extensions==0.4.3 packaging==21.2 pyparsing==2.4.7 tomli==2.0.1 -typed-ast==1.5.2 +typed-ast==1.5.3 types-backports==0.1.3 types-cryptography==3.3.15 types-enum34==1.1.8 @@ -13,8 +13,8 @@ types-Jinja2==2.11.9 types-MarkupSafe==1.1.10 types-paramiko==2.8.13 types-PyYAML==5.4.12 -types-requests==2.27.10 -types-setuptools==57.4.9 -types-toml==0.10.4 -types-urllib3==1.26.9 +types-requests==2.27.25 +types-setuptools==57.4.14 +types-toml==0.10.6 +types-urllib3==1.26.14 typing-extensions==3.10.0.2 diff --git a/test/lib/ansible_test/_internal/classification/__init__.py b/test/lib/ansible_test/_internal/classification/__init__.py index 7a7e918b..c599d36e 100644 --- a/test/lib/ansible_test/_internal/classification/__init__.py +++ b/test/lib/ansible_test/_internal/classification/__init__.py @@ -746,13 +746,13 @@ class PathMapper: if path.startswith('test/lib/ansible_test/_internal/commands/sanity/'): return { 'sanity': 'all', # test infrastructure, run all sanity checks - 'integration': 'ansible-test', # run ansible-test self tests + 'integration': 'ansible-test/', # run ansible-test self tests } if path.startswith('test/lib/ansible_test/_internal/commands/units/'): return { 'units': 'all', # test infrastructure, run all unit tests - 'integration': 'ansible-test', # run ansible-test self tests + 'integration': 'ansible-test/', # run ansible-test self tests } if path.startswith('test/lib/ansible_test/_data/requirements/'): @@ -776,13 +776,13 @@ class PathMapper: if path.startswith('test/lib/ansible_test/_util/controller/sanity/') or path.startswith('test/lib/ansible_test/_util/target/sanity/'): return { 'sanity': 'all', # test infrastructure, run all sanity checks - 'integration': 'ansible-test', # run ansible-test self tests + 'integration': 'ansible-test/', # run ansible-test self tests } if path.startswith('test/lib/ansible_test/_util/target/pytest/'): return { 'units': 'all', # test infrastructure, run all unit tests - 'integration': 'ansible-test', # run ansible-test self tests + 'integration': 'ansible-test/', # run ansible-test self tests } if path.startswith('test/lib/'): diff --git a/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py b/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py index 0b421ed3..47ed1eb5 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py +++ b/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py @@ -65,7 +65,6 @@ class AnsibleDocTest(SanitySingleVersion): paths = [target.path for target in targets.include] doc_targets = collections.defaultdict(list) # type: t.Dict[str, t.List[str]] - target_paths = collections.defaultdict(dict) # type: t.Dict[str, t.Dict[str, str]] remap_types = dict( modules='module', @@ -75,13 +74,15 @@ class AnsibleDocTest(SanitySingleVersion): plugin_type = remap_types.get(plugin_type, plugin_type) for plugin_file_path in [target.name for target in targets.include if is_subdir(target.path, plugin_path)]: - plugin_name = os.path.splitext(os.path.basename(plugin_file_path))[0] + plugin_parts = os.path.relpath(plugin_file_path, plugin_path).split(os.path.sep) + plugin_name = os.path.splitext(plugin_parts[-1])[0] if plugin_name.startswith('_'): plugin_name = plugin_name[1:] - doc_targets[plugin_type].append(data_context().content.prefix + plugin_name) - target_paths[plugin_type][data_context().content.prefix + plugin_name] = plugin_file_path + plugin_fqcn = data_context().content.prefix + '.'.join(plugin_parts[:-1] + [plugin_name]) + + doc_targets[plugin_type].append(plugin_fqcn) env = ansible_environment(args, color=False) error_messages = [] # type: t.List[SanityMessage] diff --git a/test/lib/ansible_test/_internal/commands/sanity/mypy.py b/test/lib/ansible_test/_internal/commands/sanity/mypy.py index 5b83aa8b..fe664ddc 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/mypy.py +++ b/test/lib/ansible_test/_internal/commands/sanity/mypy.py @@ -90,11 +90,22 @@ class MypyTest(SanityMultipleVersion): display.warning(f'Skipping sanity test "{self.name}" due to missing virtual environment support on Python {args.controller_python.version}.') return SanitySkipped(self.name, python.version) + # Temporary hack to make Python 3.8 a remote-only Python version since we'll be dropping controller support for it soon. + # This avoids having to change annotations or add ignores for issues that are specific to that version. + + change_version = '3.8' + + if change_version not in CONTROLLER_PYTHON_VERSIONS or change_version in REMOTE_ONLY_PYTHON_VERSIONS: + raise Exception(f'Remove this hack now that Python {change_version} is not supported by the controller.') + + controller_python_versions = tuple(version for version in CONTROLLER_PYTHON_VERSIONS if version != change_version) + remote_only_python_versions = REMOTE_ONLY_PYTHON_VERSIONS + (change_version,) + contexts = ( - MyPyContext('ansible-test', ['test/lib/ansible_test/_util/target/sanity/import/'], CONTROLLER_PYTHON_VERSIONS), - MyPyContext('ansible-test', ['test/lib/ansible_test/_internal/'], CONTROLLER_PYTHON_VERSIONS), - MyPyContext('ansible-core', ['lib/ansible/'], CONTROLLER_PYTHON_VERSIONS), - MyPyContext('modules', ['lib/ansible/modules/', 'lib/ansible/module_utils/'], REMOTE_ONLY_PYTHON_VERSIONS), + MyPyContext('ansible-test', ['test/lib/ansible_test/_util/target/sanity/import/'], controller_python_versions), + MyPyContext('ansible-test', ['test/lib/ansible_test/_internal/'], controller_python_versions), + MyPyContext('ansible-core', ['lib/ansible/'], controller_python_versions), + MyPyContext('modules', ['lib/ansible/modules/', 'lib/ansible/module_utils/'], remote_only_python_versions), ) unfiltered_messages = [] # type: t.List[SanityMessage] diff --git a/test/lib/ansible_test/_internal/target.py b/test/lib/ansible_test/_internal/target.py index 879a7944..3962b95f 100644 --- a/test/lib/ansible_test/_internal/target.py +++ b/test/lib/ansible_test/_internal/target.py @@ -611,6 +611,9 @@ class IntegrationTarget(CompletionTarget): groups += [a for a in static_aliases if a not in modules] groups += ['module/%s' % m for m in self.modules] + if data_context().content.is_ansible and (self.name == 'ansible-test' or self.name.startswith('ansible-test-')): + groups.append('ansible-test') + if not self.modules: groups.append('non_module') diff --git a/test/sanity/ignore.txt b/test/sanity/ignore.txt index 0d2bb352..9f4b55d8 100644 --- a/test/sanity/ignore.txt +++ b/test/sanity/ignore.txt @@ -252,6 +252,7 @@ lib/ansible/module_utils/six/__init__.py mypy-2.7:assignment # vendored code lib/ansible/module_utils/six/__init__.py mypy-3.5:assignment # vendored code lib/ansible/module_utils/six/__init__.py mypy-3.6:assignment # vendored code lib/ansible/module_utils/six/__init__.py mypy-3.7:assignment # vendored code +lib/ansible/module_utils/six/__init__.py mypy-3.8:assignment # vendored code lib/ansible/module_utils/six/__init__.py mypy-2.7:misc # vendored code lib/ansible/module_utils/six/__init__.py mypy-3.5:misc # vendored code lib/ansible/module_utils/six/__init__.py mypy-3.6:misc # vendored code diff --git a/test/units/executor/test_task_executor.py b/test/units/executor/test_task_executor.py index 003cedee..f7162978 100644 --- a/test/units/executor/test_task_executor.py +++ b/test/units/executor/test_task_executor.py @@ -334,6 +334,8 @@ class TestTaskExecutor(unittest.TestCase): mock_play_context.update_vars.return_value = None mock_connection = MagicMock() + mock_connection.force_persistence = False + mock_connection.supports_persistence = False mock_connection.set_host_overrides.return_value = None mock_connection._connect.return_value = None diff --git a/test/units/inventory/__init__.py b/test/units/inventory/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/test/units/inventory/__init__.py |