diff options
author | Lee Garrett <lgarrett@rocketjump.eu> | 2023-03-01 21:05:21 +0100 |
---|---|---|
committer | Lee Garrett <lgarrett@rocketjump.eu> | 2023-03-01 21:05:21 +0100 |
commit | 0f96c948fb28e3219aeab42ed0e032333c2fbf6f (patch) | |
tree | bdc97c091dbec31a12db80c61238c17781b19f26 | |
parent | b62c034bd2c546414aee34d17ceadf431dbf1f9d (diff) | |
parent | 3cda7ad4dd15b514ff660905294b5b6330ecfb6f (diff) | |
download | debian-ansible-core-0f96c948fb28e3219aeab42ed0e032333c2fbf6f.zip |
Update upstream source from tag 'upstream/2.14.3'
Update to upstream version '2.14.3'
with Debian dir e768f30fc6732890a32da70483471016650390fd
153 files changed, 1337 insertions, 1369 deletions
@@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: ansible-core -Version: 2.14.2 +Version: 2.14.3 Summary: Radically simple IT automation Home-page: https://ansible.com/ Author: Ansible, Inc. diff --git a/changelogs/CHANGELOG-v2.14.rst b/changelogs/CHANGELOG-v2.14.rst index 16bc0ac0..d54fccbd 100644 --- a/changelogs/CHANGELOG-v2.14.rst +++ b/changelogs/CHANGELOG-v2.14.rst @@ -5,6 +5,35 @@ ansible-core 2.14 "C'mon Everybody" Release Notes .. contents:: Topics +v2.14.3 +======= + +Release Summary +--------------- + +| Release Date: 2023-02-27 +| `Porting Guide <https://docs.ansible.com/ansible/devel/porting_guides.html>`__ + + +Minor Changes +------------- + +- Make using blocks as handlers a parser error (https://github.com/ansible/ansible/issues/79968) +- ansible-test - Specify the configuration file location required by test plugins when the config file is not found. This resolves issue: https://github.com/ansible/ansible/issues/79411 +- ansible-test - Update error handling code to use Python 3.x constructs, avoiding direct use of ``errno``. +- ansible-test acme test container - update version to update used Pebble version, underlying Python and Go base containers, and Python requirements (https://github.com/ansible/ansible/pull/79783). + +Bugfixes +-------- + +- Ansible.Basic.cs - Ignore compiler warning (reported as an error) when running under PowerShell 7.3.x. +- Fix conditionally notifying ``include_tasks` handlers when ``force_handlers`` is used (https://github.com/ansible/ansible/issues/79776) +- TaskExecutor - don't ignore templated _raw_params that k=v parser failed to parse (https://github.com/ansible/ansible/issues/79862) +- ansible-galaxy - fix installing collections in git repositories/directories which contain a MANIFEST.json file (https://github.com/ansible/ansible/issues/79796). +- ansible-test - Support Podman 4.4.0+ by adding the ``SYS_CHROOT`` capability when running containers. +- ansible-test - fix warning message about failing to run an image to include the image name +- strategy plugins now correctly identify bad registered variables, even on skip. + v2.14.2 ======= diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index e6c58239..8331a9bc 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -1061,3 +1061,59 @@ releases: - v2.14.2rc1_summary.yaml - validate-module-ps-cmdlet.yml release_date: '2023-01-23' + 2.14.3: + changes: + release_summary: '| Release Date: 2023-02-27 + + | `Porting Guide <https://docs.ansible.com/ansible/devel/porting_guides.html>`__ + + ' + codename: C'mon Everybody + fragments: + - v2.14.3_summary.yaml + release_date: '2023-02-27' + 2.14.3rc1: + changes: + bugfixes: + - Ansible.Basic.cs - Ignore compiler warning (reported as an error) when running + under PowerShell 7.3.x. + - Fix conditionally notifying ``include_tasks` handlers when ``force_handlers`` + is used (https://github.com/ansible/ansible/issues/79776) + - TaskExecutor - don't ignore templated _raw_params that k=v parser failed to + parse (https://github.com/ansible/ansible/issues/79862) + - ansible-galaxy - fix installing collections in git repositories/directories + which contain a MANIFEST.json file (https://github.com/ansible/ansible/issues/79796). + - ansible-test - Support Podman 4.4.0+ by adding the ``SYS_CHROOT`` capability + when running containers. + - ansible-test - fix warning message about failing to run an image to include + the image name + - strategy plugins now correctly identify bad registered variables, even on + skip. + minor_changes: + - Make using blocks as handlers a parser error (https://github.com/ansible/ansible/issues/79968) + - 'ansible-test - Specify the configuration file location required by test plugins + when the config file is not found. This resolves issue: https://github.com/ansible/ansible/issues/79411' + - ansible-test - Update error handling code to use Python 3.x constructs, avoiding + direct use of ``errno``. + - ansible-test acme test container - update version to update used Pebble version, + underlying Python and Go base containers, and Python requirements (https://github.com/ansible/ansible/pull/79783). + release_summary: '| Release Date: 2023-02-20 + + | `Porting Guide <https://docs.ansible.com/ansible/devel/porting_guides.html>`__ + + ' + codename: C'mon Everybody + fragments: + - 79776-fix-force_handlers-cond-include.yml + - 79783-acme-test-container.yml + - 79862-fix-varargs.yml + - 79968-blocks-handlers-error.yml + - ansible-galaxy-install-git-src-manifest.yml + - ansible-test-errno.yml + - ansible-test-fix-warning-msg.yml + - ansible-test-podman-chroot.yml + - ansible-test-test-plugin-error-message.yml + - powershell-7.3-fix.yml + - strategy_badid_fix.yml + - v2.14.3rc1_summary.yaml + release_date: '2023-02-20' diff --git a/docs/docsite/.templates/banner.html b/docs/docsite/.templates/banner.html index 38815221..698870fe 100644 --- a/docs/docsite/.templates/banner.html +++ b/docs/docsite/.templates/banner.html @@ -20,7 +20,10 @@ '<div id="latest_extra_banner_id" class="admonition important">' + '<br>' + '<p>' + - 'Continue building on your automation knowledge, visit the <a href="https://events.experiences.redhat.com/widget/redhat/rhaf22/SessionCatalog2022">AnsibleFest content hub!</a> ' + + '<b>Experience AnsibleFest at Red Hat Summit</b>' + + '</p>' + + '<p>' + + 'We\'re bringing AnsibleFest to Red Hat Summit in Boston, May 23-25. It\'s the best of Fest combined with Red Hat Summit for even more learning and technical engagement. <a href="https://www.redhat.com/en/summit/ansiblefest?intcmp=7013a0000034lvhAAA">Learn more.</a> ' + '</p>' + '<br>' + diff --git a/docs/docsite/rst/community/contributor_path.rst b/docs/docsite/rst/community/contributor_path.rst index de59be65..12450585 100644 --- a/docs/docsite/rst/community/contributor_path.rst +++ b/docs/docsite/rst/community/contributor_path.rst @@ -61,7 +61,15 @@ You can find some ideas on how you can contribute in :ref:`how_can_i_help`. If you are interested in contributing to collections, take a look at :ref:`collection contributions<collections_contributions>` and the `collection repository <https://github.com/ansible-collections/>`_'s ``README`` and ``CONTRIBUTING`` files. To make your first experience as smooth as possible, read the repository documentation carefully, then ask the repository maintainers for guidance if you have any questions. -You can also look for GitHub issues labeled with the ``easyfix``, ``good_first_issue``, and ``docs`` labels. Add a comment on the GitHub issue to say you are looking at it and to ask for help if you need it. +Take a look at GitHub issues labeled with the ``easyfix`` and ``good_first_issue`` labels for: + +- `Ansible collections repositories <https://github.com/search?q=user%3Aansible-collections+label%3Aeasyfix%2C%22good+first+issue%22+state%3Aopen&type=Issues>`_ +- `All other Ansible projects <https://github.com/search?q=user%3Aansible+user%3Aansible-community+label%3Aeasyfix%2C%22good+first+issue%22+state%3Aopen&type=Issues>`_ + +Issues labeled with the ``docs`` label in `Ansible collections <https://github.com/search?q=user%3Aansible-collections+label%3Adocs+state%3Aopen+type%3Aissue&type=Issues>`_ and `other <https://github.com/search?q=user%3Aansible+user%3Aansible-community+label%3Adocs+state%3Aopen+type%3Aissue&type=Issues>`_ Ansible projects can be also good to start with. + +When you choose an issue to work on, add a comment directly on the GitHub issue to say you are looking at it and let others know to avoid conflicting work. +You can also ask for help in a comment if you need it. Continue to contribute ====================== diff --git a/docs/docsite/rst/community/documentation_contributions.rst b/docs/docsite/rst/community/documentation_contributions.rst index 51f30b7c..0f464f11 100644 --- a/docs/docsite/rst/community/documentation_contributions.rst +++ b/docs/docsite/rst/community/documentation_contributions.rst @@ -35,14 +35,28 @@ To submit a documentation PR from docs.ansible.com with ``Edit on GitHub``: #. Be patient while Ansibot, our automated script, adds labels, pings the docs maintainers, and kicks off a CI testing run. #. Keep an eye on your PR - the docs team may ask you for changes. -Reviewing open PRs and issues -============================= +Reviewing or solving open issues +================================ + +Review or solve open documentation issues for: + +- `Ansible projects <https://github.com/search?q=user%3Aansible+user%3Aansible-community+label%3Adocs+state%3Aopen+type%3Aissue&type=Issues>`_ +- `Ansible collections <https://github.com/search?q=user%3Aansible-collections+label%3Adocs+state%3Aopen+type%3Aissue&type=Issues>`_ + +Reviewing open PRs +================== + +Review open documentation pull requests for: + +- Ansible `projects <https://github.com/search?q=user%3Aansible+user%3Aansible-community+label%3Adocs+state%3Aopen+type%3Apr>`_ +- Ansible `collections <https://github.com/search?q=user%3Aansible-collections+label%3Adocs+state%3Aopen+type%3Apr>`_ -You can also contribute by reviewing open documentation `issues <https://github.com/ansible/ansible/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen+label%3Adocs>`_ and `PRs <https://github.com/ansible/ansible/pulls?utf8=%E2%9C%93&q=is%3Apr+is%3Aopen+label%3Adocs>`_. To add a helpful review, please: +To add a helpful review, please: -- Include a comment - "looks good to me" only helps if we know why. -- For issues, reproduce the problem. -- For PRs, test the change. +- Test the change if applicable. +- Think if it can be made better (including wording, structure, fixing typos and so on). +- Suggest improvements. +- Approve the change with the ``looks good to me`` comment. Opening a new issue and/or PR ============================= diff --git a/docs/docsite/rst/community/how_can_I_help.rst b/docs/docsite/rst/community/how_can_I_help.rst index 66f55d18..38cb1db8 100644 --- a/docs/docsite/rst/community/how_can_I_help.rst +++ b/docs/docsite/rst/community/how_can_I_help.rst @@ -61,6 +61,14 @@ Review and submit pull requests As you become more familiar with how Ansible works, you may be able to fix issues or develop new features yourself. If you think you have a fix for a bug in Ansible, or if you have a new feature that you would like to share with millions of Ansible users, read all about the :ref:`development process <community_development_process>` to learn how to get your code accepted into Ansible. +You can also get started with solving GitHub issues labeled with the ``easyfix`` and ``good_first_issue`` labels for: + +- `Ansible collections <https://github.com/search?q=user%3Aansible-collections+label%3Aeasyfix%2C%22good+first+issue%22+state%3Aopen&type=Issues>`_ +- `All other Ansible projects <https://github.com/search?q=user%3Aansible+user%3Aansible-community+label%3Aeasyfix%2C%22good+first+issue%22+state%3Aopen&type=Issues>`_ + +When you choose an issue to work on, add a comment directly on the GitHub issue to say you are looking at it and let others know to avoid conflicting work. +You can also ask for help in a comment if you need it. + Another good way to help is to review pull requests that other Ansible users have submitted. Ansible core keeps a full list of `open pull requests by file <https://ansible.sivel.net/pr/byfile.html>`_, so if a particular module or plugin interests you, you can easily keep track of all the relevant new pull requests and provide testing or feedback. Alternatively, you can review the pull requests for any collections that interest you. Click :guilabel:`Issue tracker` on the collection documentation page to find the issues and PRs for that collection. Become a collection maintainer diff --git a/docs/docsite/rst/community/maintainers_guidelines.rst b/docs/docsite/rst/community/maintainers_guidelines.rst index 8e176962..43a0e696 100644 --- a/docs/docsite/rst/community/maintainers_guidelines.rst +++ b/docs/docsite/rst/community/maintainers_guidelines.rst @@ -20,7 +20,7 @@ In general, collection maintainers: - Review and commit changes made by other contributors. - :ref:`Backport <Backporting>` changes to stable branches. - Address or assign issues to appropriate contributors. -- Release collections. +- :ref:`Release collections <Releasing>`. - Ensure that collections adhere to the `Collection Requirements <https://github.com/ansible-collections/overview/blob/main/collection_requirements.rst>`_, - Track changes announced in `News for collection contributors and maintainers <https://github.com/ansible-collections/news-for-maintainers>`_ and update a collection in accordance with these changes. - Subscribe and submit news to the `Bullhorn newsletter <https://github.com/ansible/community/wiki/News#the-bullhorn>`_. @@ -52,6 +52,22 @@ Collection contributors and maintainers should also communicate through: See :ref:`communication` for more details on these communication channels. +.. _wg_and_real_time_chat: + +Establishing working group communication +---------------------------------------------------------------- + +Working groups depend on efficient, real-time communication. +Project maintainers can use the following techniques to establish communication for working groups: + +* Find an existing :ref:`working_group_list` that is similar to your project and join the conversation. +* `Request <https://github.com/ansible/community/blob/main/WORKING-GROUPS.md>`_ a new working group for your project. +* `Create <https://hackmd.io/@ansible-community/community-matrix-faq#How-do-I-create-a-public-community-room>`_ a public chat for your working group or `ask <https://github.com/ansible/community/issues/new>`_ the community team. +* Provide working group details and links to chat rooms in the contributor section of your project ``README.md``. +* Encourage contributors to join the chats and add themselves to the working group. + +See the :ref:`Communication guide <communication_irc>` to learn more about real-time chat. + Community Topics ---------------- @@ -64,13 +80,12 @@ Share your opinion and vote on the topics to help the community make the best de Contributor Summits ------------------- - The quarterly Ansible Contributor Summit is a global event that provides our contributors a great opportunity to meet each other, communicate, share ideas, and see that there are other real people behind the messages on Matrix or Libera Chat IRC, or GitHub. This gives a sense of community. Watch the `Bullhorn newsletter <https://github.com/ansible/community/wiki/News#the-bullhorn>`_ for information when the next contributor summit, invite contributors you know, and take part in the event together. Weekly community Matrix/IRC meetings ------------------------------------ -The Community and the Steering Committee come together at weekly meetings in the ``#ansible-community`` :ref:`Matrix/Libera.Chat <communication_irc>` channel to discuss important project-scale questions. See the `schedule <https://github.com/ansible/community/blob/main/meetings/README.md#schedule>`_ and join. +The Community and the Steering Committee come together at weekly meetings in the ``#ansible-community`` `Libera.Chat IRC <https://docs.ansible.com/ansible/devel/community/communication.html#ansible-community-on-irc>`_ channel or in the bridged `#community:ansible.com <https://matrix.to/#/#community:ansible.com>`_ room on `Matrix <https://docs.ansible.com/ansible/devel/community/communication.html#ansible-community-on-matrix>`_ to discuss important project questions. Join us! Here is our `schedule <https://github.com/ansible/community/blob/main/meetings/README.md#schedule>`_. Expanding the collection community =================================== @@ -82,6 +97,7 @@ Expanding the collection community Here are some ways you can expand the community around your collection: * Give :ref:`newcomers a positive first experience <collection_new_contributors>`. + * Invite contributors to join :ref:`real-time chats <wg_and_real_time_chat>` related to your project. * Have :ref:`good documentation <maintainer_documentation>` with guidelines for new contributors. * Make people feel welcome personally and individually. * Use labels to show easy fixes and leave non-critical easy fixes to newcomers and offer to mentor them. diff --git a/docs/docsite/rst/dev_guide/ansible_index.rst b/docs/docsite/rst/dev_guide/ansible_index.rst index 0736df15..a660df06 100644 --- a/docs/docsite/rst/dev_guide/ansible_index.rst +++ b/docs/docsite/rst/dev_guide/ansible_index.rst @@ -36,8 +36,7 @@ Find the task that best describes what you want to do: * a :ref:`network module <developing_modules_network>` * a :ref:`Windows module <developing_modules_general_windows>`. * an :ref:`Amazon module <ansible_collections.amazon.aws.docsite.dev_guide_intro>`. - * an :ref:`OpenStack module <OpenStack_module_development>`. - * an :ref:`oVirt/RHV module <oVirt_module_development>`. + * an :ref:`oVirt/RHV module <https://github.com/oVirt/ovirt-ansible-collection/blob/master/README-developers.md>`. * a :ref:`VMware module <ansible_collections.community.vmware.docsite.vmware_ansible_devguide>`. * I want to :ref:`write a series of related modules <developing_modules_in_groups>` that integrate Ansible with a new product (for example, a database, cloud provider, network platform, and so on). @@ -75,14 +74,8 @@ If you prefer to read the entire guide, here's a list of the pages in order. developing_python_3 debugging developing_modules_documenting - adjacent_yaml_doc + sidecar developing_modules_general_windows - developing_modules_general_aci - platforms/aws_guidelines - platforms/openstack_guidelines - platforms/ovirt_dev_guide - platforms/vmware_guidelines - platforms/vmware_rest_guidelines developing_modules_in_groups testing module_lifecycle diff --git a/docs/docsite/rst/dev_guide/core_index.rst b/docs/docsite/rst/dev_guide/core_index.rst index 00a7db63..3c2fdb73 100644 --- a/docs/docsite/rst/dev_guide/core_index.rst +++ b/docs/docsite/rst/dev_guide/core_index.rst @@ -73,7 +73,6 @@ If you prefer to read the entire guide, here's a list of the pages in order. developing_modules_documenting sidecar developing_modules_general_windows - developing_modules_general_aci developing_modules_in_groups testing module_lifecycle diff --git a/docs/docsite/rst/dev_guide/developing_collections_shared.rst b/docs/docsite/rst/dev_guide/developing_collections_shared.rst index bddb94c9..34db6aea 100644 --- a/docs/docsite/rst/dev_guide/developing_collections_shared.rst +++ b/docs/docsite/rst/dev_guide/developing_collections_shared.rst @@ -78,7 +78,7 @@ You can use git repositories for collection dependencies during local developmen dependencies: {'git@github.com:organization/repo_name.git': 'devel'} -.. warning +.. warning:: Do not use git repositories as dependencies for published collections. Dependencies for published collections must be other published collections. diff --git a/docs/docsite/rst/dev_guide/developing_modules_general_aci.rst b/docs/docsite/rst/dev_guide/developing_modules_general_aci.rst deleted file mode 100644 index 0c9826d0..00000000 --- a/docs/docsite/rst/dev_guide/developing_modules_general_aci.rst +++ /dev/null @@ -1,454 +0,0 @@ -.. _aci_dev_guide: - -**************************** -Developing Cisco ACI modules -**************************** -This is a brief walk-through of how to create new Cisco ACI modules for Ansible. - -For more information about Cisco ACI, look at the :ref:`Cisco ACI user guide <aci_guide>`. - -What's covered in this section: - -.. contents:: - :depth: 3 - :local: - - -.. _aci_dev_guide_intro: - -Introduction -============ -The `cisco.aci collection <https://galaxy.ansible.com/cisco/aci>`_ already includes a large number of Cisco ACI modules, however the ACI object model is huge and covering all possible functionality would easily cover more than 1500 individual modules. - -If you need specific functionality, you have 2 options: - -- Learn the ACI object model and use the low-level APIC REST API using the :ref:`aci_rest <aci_rest_module>` module -- Write your own dedicated modules, which is actually quite easy - -.. seealso:: - - `Ansible ACI collection <https://github.com/CiscoDevNet/ansible-aci>`_ - Github repository of the ansible ACI collection - :ref:`hacking_collections` - Information on how to contribute to collections. - `ACI Fundamentals: ACI Policy Model <https://www.cisco.com/c/en/us/td/docs/switches/datacenter/aci/apic/sw/1-x/aci-fundamentals/b_ACI-Fundamentals/b_ACI-Fundamentals_chapter_010001.html>`_ - A good introduction to the ACI object model. - `APIC Management Information Model reference <https://developer.cisco.com/docs/apic-mim-ref/>`_ - Complete reference of the APIC object model. - `APIC REST API Configuration Guide <https://www.cisco.com/c/en/us/td/docs/switches/datacenter/aci/apic/sw/2-x/rest_cfg/2_1_x/b_Cisco_APIC_REST_API_Configuration_Guide.html>`_ - Detailed guide on how the APIC REST API is designed and used, incl. many examples. - - -So let's look at how a typical ACI module is built up. - - -.. _aci_dev_guide_module_structure: - -ACI module structure -==================== - -Importing objects from Python libraries ---------------------------------------- -The following imports are standard across ACI modules: - -.. code-block:: python - - from ansible.module_utils.aci import ACIModule, aci_argument_spec - from ansible.module_utils.basic import AnsibleModule - - -Defining the argument spec --------------------------- -The first line adds the standard connection parameters to the module. After that, the next section will update the ``argument_spec`` dictionary with module-specific parameters. The module-specific parameters should include: - -* the object_id (usually the name) -* the configurable properties of the object -* the parent object IDs (all parents up to the root) -* only child classes that are a 1-to-1 relationship (1-to-many/many-to-many require their own module to properly manage) -* the state - - + ``state: absent`` to ensure object does not exist - + ``state: present`` to ensure the object and configs exist; this is also the default - + ``state: query`` to retrieve information about objects in the class - -.. code-block:: python - - def main(): - argument_spec = aci_argument_spec() - argument_spec.update( - object_id=dict(type='str', aliases=['name']), - object_prop1=dict(type='str'), - object_prop2=dict(type='str', choices=['choice1', 'choice2', 'choice3']), - object_prop3=dict(type='int'), - parent_id=dict(type='str'), - child_object_id=dict(type='str'), - child_object_prop=dict(type='str'), - state=dict(type='str', default='present', choices=['absent', 'present', 'query']), - ) - - -.. hint:: Do not provide default values for configuration arguments. Default values could cause unintended changes to the object. - -Using the AnsibleModule object ------------------------------- -The following section creates an AnsibleModule instance. The module should support check-mode, so we pass the ``argument_spec`` and ``supports_check_mode`` arguments. Since these modules support querying the APIC for all objects of the module's class, the object/parent IDs should only be required if ``state: absent`` or ``state: present``. - -.. code-block:: python - - module = AnsibleModule( - argument_spec=argument_spec, - supports_check_mode=True, - required_if=[ - ['state', 'absent', ['object_id', 'parent_id']], - ['state', 'present', ['object_id', 'parent_id']], - ], - ) - - -Mapping variable definition ---------------------------- -Once the AnsibleModule object has been initiated, the necessary parameter values should be extracted from ``params`` and any data validation should be done. Usually the only params that need to be extracted are those related to the ACI object configuration and its child configuration. If you have integer objects that you would like to validate, then the validation should be done here, and the ``ACIModule.payload()`` method will handle the string conversion. - -.. code-block:: python - - object_id = object_id - object_prop1 = module.params['object_prop1'] - object_prop2 = module.params['object_prop2'] - object_prop3 = module.params['object_prop3'] - if object_prop3 is not None and object_prop3 not in range(x, y): - module.fail_json(msg='Valid object_prop3 values are between x and (y-1)') - child_object_id = module.params['child_object_id'] - child_object_prop = module.params['child_object_prop'] - state = module.params['state'] - - -Using the ACIModule object --------------------------- -The ACIModule class handles most of the logic for the ACI modules. The ACIModule extends functionality to the AnsibleModule object, so the module instance must be passed into the class instantiation. - -.. code-block:: python - - aci = ACIModule(module) - -The ACIModule has six main methods that are used by the modules: - -* construct_url -* get_existing -* payload -* get_diff -* post_config -* delete_config - -The first two methods are used regardless of what value is passed to the ``state`` parameter. - -Constructing URLs -^^^^^^^^^^^^^^^^^ -The ``construct_url()`` method is used to dynamically build the appropriate URL to interact with the object, and the appropriate filter string that should be appended to the URL to filter the results. - -* When the ``state`` is not ``query``, the URL is the base URL to access the APIC plus the distinguished name to access the object. The filter string will restrict the returned data to just the configuration data. -* When ``state`` is ``query``, the URL and filter string used depends on what parameters are passed to the object. This method handles the complexity so that it is easier to add new modules and so that all modules are consistent in what type of data is returned. - -.. note:: Our design goal is to take all ID parameters that have values, and return the most specific data possible. If you do not supply any ID parameters to the task, then all objects of the class will be returned. If your task does consist of ID parameters sed, then the data for the specific object is returned. If a partial set of ID parameters are passed, then the module will use the IDs that are passed to build the URL and filter strings appropriately. - -The ``construct_url()`` method takes 2 required arguments: - -* **self** - passed automatically with the class instance -* **root_class** - A dictionary consisting of ``aci_class``, ``aci_rn``, ``target_filter``, and ``module_object`` keys - - + **aci_class**: The name of the class used by the APIC, for example ``fvTenant`` - - + **aci_rn**: The relative name of the object, for example ``tn-ACME`` - - + **target_filter**: A dictionary with key-value pairs that make up the query string for selecting a subset of entries, for example ``{'name': 'ACME'}`` - - + **module_object**: The particular object for this class, for example ``ACME`` - -Example: - -.. code-block:: python - - aci.construct_url( - root_class=dict( - aci_class='fvTenant', - aci_rn='tn-{0}'.format(tenant), - target_filter={'name': tenant}, - module_object=tenant, - ), - ) - -Some modules, like ``aci_tenant``, are the root class and so they would not need to pass any additional arguments to the method. - -The ``construct_url()`` method takes 4 optional arguments, the first three imitate the root class as described above, but are for child objects: - -* subclass_1 - A dictionary consisting of ``aci_class``, ``aci_rn``, ``target_filter``, and ``module_object`` keys - - + Example: Application Profile Class (AP) - -* subclass_2 - A dictionary consisting of ``aci_class``, ``aci_rn``, ``target_filter``, and ``module_object`` keys - - + Example: End Point Group (EPG) - -* subclass_3 - A dictionary consisting of ``aci_class``, ``aci_rn``, ``target_filter``, and ``module_object`` keys - - + Example: Binding a Contract to an EPG - -* child_classes - The list of APIC names for the child classes supported by the modules. - - + This is a list, even if it is a list of one - + These are the unfriendly names used by the APIC - + These are used to limit the returned child_classes when possible - + Example: ``child_classes=['fvRsBDSubnetToProfile', 'fvRsNdPfxPol']`` - -.. note:: Sometimes the APIC will require special characters ([, ], and -) or will use object metadata in the name ("vlanns" for VLAN pools); the module should handle adding special characters or joining of multiple parameters in order to keep expected inputs simple. - -Getting the existing configuration -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Once the URL and filter string have been built, the module is ready to retrieve the existing configuration for the object: - -* ``state: present`` retrieves the configuration to use as a comparison against what was entered in the task. All values that are different than the existing values will be updated. -* ``state: absent`` uses the existing configuration to see if the item exists and needs to be deleted. -* ``state: query`` uses this to perform the query for the task and report back the existing data. - -.. code-block:: python - - aci.get_existing() - - -When state is present -^^^^^^^^^^^^^^^^^^^^^ -When ``state: present``, the module needs to perform a diff against the existing configuration and the task entries. If any value needs to be updated, then the module will make a POST request with only the items that need to be updated. Some modules have children that are in a 1-to-1 relationship with another object; for these cases, the module can be used to manage the child objects. - -Building the ACI payload -"""""""""""""""""""""""" -The ``aci.payload()`` method is used to build a dictionary of the proposed object configuration. All parameters that were not provided a value in the task will be removed from the dictionary (both for the object and its children). Any parameter that does have a value will be converted to a string and added to the final dictionary object that will be used for comparison against the existing configuration. - -The ``aci.payload()`` method takes two required arguments and 1 optional argument, depending on if the module manages child objects. - -* ``aci_class`` is the APIC name for the object's class, for example ``aci_class='fvBD'`` -* ``class_config`` is the appropriate dictionary to be used as the payload for the POST request - - + The keys should match the names used by the APIC. - + The values should be the corresponding value in ``module.params``; these are the variables defined above - -* ``child_configs`` is optional, and is a list of child config dictionaries. - - + The child configs include the full child object dictionary, not just the attributes configuration portion. - + The configuration portion is built the same way as the object. - -.. code-block:: python - - aci.payload( - aci_class=aci_class, - class_config=dict( - name=bd, - descr=description, - type=bd_type, - ), - child_configs=[ - dict( - fvRsCtx=dict( - attributes=dict( - tnFvCtxName=vrf - ), - ), - ), - ], - ) - - -Performing the request -"""""""""""""""""""""" -The ``get_diff()`` method is used to perform the diff, and takes only one required argument, ``aci_class``. -Example: ``aci.get_diff(aci_class='fvBD')`` - -The ``post_config()`` method is used to make the POST request to the APIC if needed. This method doesn't take any arguments and handles check mode. -Example: ``aci.post_config()`` - - -Example code -"""""""""""" -.. code-block:: text - - if state == 'present': - aci.payload( - aci_class='<object APIC class>', - class_config=dict( - name=object_id, - prop1=object_prop1, - prop2=object_prop2, - prop3=object_prop3, - ), - child_configs=[ - dict( - '<child APIC class>'=dict( - attributes=dict( - child_key=child_object_id, - child_prop=child_object_prop - ), - ), - ), - ], - ) - - aci.get_diff(aci_class='<object APIC class>') - - aci.post_config() - - -When state is absent -^^^^^^^^^^^^^^^^^^^^ -If the task sets the state to absent, then the ``delete_config()`` method is all that is needed. This method does not take any arguments, and handles check mode. - -.. code-block:: text - - elif state == 'absent': - aci.delete_config() - - -Exiting the module -^^^^^^^^^^^^^^^^^^ -To have the module exit, call the ACIModule method ``exit_json()``. This method automatically takes care of returning the common return values for you. - -.. code-block:: text - - aci.exit_json() - - if __name__ == '__main__': - main() - - -.. _aci_dev_guide_testing: - -Testing ACI library functions -============================= -You can test your ``construct_url()`` and ``payload()`` arguments without accessing APIC hardware by using the following python script: - -.. code-block:: text - - #!/usr/bin/python - import json - from ansible.module_utils.network.aci.aci import ACIModule - - # Just another class mimicking a bare AnsibleModule class for construct_url() and payload() methods - class AltModule(): - params = dict( - host='dummy', - port=123, - protocol='https', - state='present', - output_level='debug', - ) - - # A sub-class of ACIModule to overload __init__ (we don't need to log into APIC) - class AltACIModule(ACIModule): - def __init__(self): - self.result = dict(changed=False) - self.module = AltModule() - self.params = self.module.params - - # Instantiate our version of the ACI module - aci = AltACIModule() - - # Define the variables you need below - aep = 'AEP' - aep_domain = 'uni/phys-DOMAIN' - - # Below test the construct_url() arguments to see if it produced correct results - aci.construct_url( - root_class=dict( - aci_class='infraAttEntityP', - aci_rn='infra/attentp-{}'.format(aep), - target_filter={'name': aep}, - module_object=aep, - ), - subclass_1=dict( - aci_class='infraRsDomP', - aci_rn='rsdomP-[{}]'.format(aep_domain), - target_filter={'tDn': aep_domain}, - module_object=aep_domain, - ), - ) - - # Below test the payload arguments to see if it produced correct results - aci.payload( - aci_class='infraRsDomP', - class_config=dict(tDn=aep_domain), - ) - - # Print the URL and proposed payload - print 'URL:', json.dumps(aci.url, indent=4) - print 'PAYLOAD:', json.dumps(aci.proposed, indent=4) - - -This will result in: - -.. code-block:: yaml - - URL: "https://dummy/api/mo/uni/infra/attentp-AEP/rsdomP-[phys-DOMAIN].json" - PAYLOAD: { - "infraRsDomP": { - "attributes": { - "tDn": "phys-DOMAIN" - } - } - } - -Testing for sanity checks -------------------------- -For legacy versions of ansible, you can run from your fork something like: - -.. code-block:: bash - - $ ansible-test sanity --python 2.7 lib/ansible/modules/network/aci/aci_tenant.py - -Meanwhile, the ACI modules have moved into a collection. Please refer to the links below, which provide detailed guidance -how to setup your environment and test the collection. - -.. seealso:: - - :ref:`hacking_collections` - Information how to setup your environment to contribute to collections - :ref:`testing_sanity` - Information on how to build sanity tests. - `Ansible ACI collection <https://github.com/CiscoDevNet/ansible-aci>`_ - Github repository of the ansible ACI collection - - -Testing ACI integration tests ------------------------------ -You can run this: - -.. code-block:: bash - - $ ansible-test network-integration --continue-on-error --allow-unsupported --diff -v aci_tenant - -.. note:: You may need to add ``--python 2.7`` or ``--python 3.6`` in order to use the correct python version for performing tests. - -You may want to edit the used inventory at *test/integration/inventory.networking* and add something like: - -.. code-block:: ini - - [aci:vars] - aci_hostname=my-apic-1 - aci_username=admin - aci_password=my-password - aci_use_ssl=yes - aci_use_proxy=no - - [aci] - localhost ansible_ssh_host=127.0.0.1 ansible_connection=local - -.. seealso:: - - :ref:`testing_integration` - Information on how to build integration tests. - - -Testing for test coverage -------------------------- -You can run this: - -.. code-block:: bash - - $ ansible-test network-integration --python 2.7 --allow-unsupported --coverage aci_tenant - $ ansible-test coverage report diff --git a/docs/docsite/rst/dev_guide/developing_program_flow_modules.rst b/docs/docsite/rst/dev_guide/developing_program_flow_modules.rst index d896c544..e858103d 100644 --- a/docs/docsite/rst/dev_guide/developing_program_flow_modules.rst +++ b/docs/docsite/rst/dev_guide/developing_program_flow_modules.rst @@ -341,12 +341,6 @@ Files that are included from :file:`module_utils` are themselves scanned for imports of other Python modules from :file:`module_utils` to be included in the zipfile as well. -.. warning:: - At present, the Ansiballz Framework cannot determine whether an import - should be included if it is a relative import. Always use an absolute - import that has :py:mod:`ansible.module_utils` in it to allow Ansiballz to - determine that the file should be included. - .. _flow_passing_module_args: @@ -689,7 +683,7 @@ This section will discuss the behavioral attributes for arguments: option = { 'type': 'str', 'removed_in_version': '2.0.0', - 'collection_name': 'testns.testcol', + 'removed_from_collection': 'testns.testcol', }, :removed_at_date: @@ -703,7 +697,7 @@ This section will discuss the behavioral attributes for arguments: option = { 'type': 'str', 'removed_at_date': '2020-12-31', - 'collection_name': 'testns.testcol', + 'removed_from_collection': 'testns.testcol', }, :removed_from_collection: diff --git a/docs/docsite/rst/dev_guide/platforms/aws_guidelines.rst b/docs/docsite/rst/dev_guide/platforms/aws_guidelines.rst deleted file mode 100644 index 208aed53..00000000 --- a/docs/docsite/rst/dev_guide/platforms/aws_guidelines.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. _AWS_module_development: - -**************************************************** -Guidelines for Ansible Amazon AWS module development -**************************************************** - -This guide has moved to :ref:`ansible_collections.amazon.aws.docsite.dev_guide_intro`. diff --git a/docs/docsite/rst/dev_guide/platforms/openstack_guidelines.rst b/docs/docsite/rst/dev_guide/platforms/openstack_guidelines.rst deleted file mode 100644 index b4b91b44..00000000 --- a/docs/docsite/rst/dev_guide/platforms/openstack_guidelines.rst +++ /dev/null @@ -1,6 +0,0 @@ -.. _OpenStack_module_development: - -OpenStack Ansible Modules -========================= - -Please see the `OpenStack guidelines <https://opendev.org/openstack/ansible-collections-openstack/src/branch/master/docs/openstack_guidelines.rst>`_, for further information. diff --git a/docs/docsite/rst/dev_guide/platforms/ovirt_dev_guide.rst b/docs/docsite/rst/dev_guide/platforms/ovirt_dev_guide.rst deleted file mode 100644 index 4316bce5..00000000 --- a/docs/docsite/rst/dev_guide/platforms/ovirt_dev_guide.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. _oVirt_module_development: - -oVirt Ansible Modules -===================== - - -See the `ovirt.ovirt collection documentation <https://github.com/oVirt/ovirt-ansible-collection/blob/master/README-developers.md>`_ for details on how to contribute to this collection.
\ No newline at end of file diff --git a/docs/docsite/rst/dev_guide/platforms/vmware_guidelines.rst b/docs/docsite/rst/dev_guide/platforms/vmware_guidelines.rst deleted file mode 100644 index dc05302d..00000000 --- a/docs/docsite/rst/dev_guide/platforms/vmware_guidelines.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. _VMware_module_development: - -****************************************************** -Guidelines for VMware module development -****************************************************** - -This guide has moved to :ref:`ansible_collections.community.vmware.docsite.vmware_ansible_devguide`. diff --git a/docs/docsite/rst/dev_guide/platforms/vmware_rest_guidelines.rst b/docs/docsite/rst/dev_guide/platforms/vmware_rest_guidelines.rst deleted file mode 100644 index c176dfdb..00000000 --- a/docs/docsite/rst/dev_guide/platforms/vmware_rest_guidelines.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. _VMware_REST_module_development: - -********************************************* -Guidelines for VMware REST module development -********************************************* - -This guide has moved to :ref:`ansible_collections.vmware.vmware_rest.docsite.vmware_rest_devguide`. diff --git a/docs/docsite/rst/dev_guide/testing/sanity/import.rst b/docs/docsite/rst/dev_guide/testing/sanity/import.rst index 49994092..6a5d3294 100644 --- a/docs/docsite/rst/dev_guide/testing/sanity/import.rst +++ b/docs/docsite/rst/dev_guide/testing/sanity/import.rst @@ -1,59 +1,111 @@ import ====== -Ansible allows unchecked imports of some libraries from specific directories, listed at the bottom of this section. Import all other Python libraries in a try/except ImportError block to support sanity tests such as ``validate-modules`` and to allow Ansible to give better error messages to the user. To import a library in a try/except ImportError block: +Ansible :ref:`allows unchecked imports<allowed_unchecked_imports>` of some libraries from specific directories. +Importing any other Python library requires :ref:`handling import errors<handling_import_errors>`. +This enables support for sanity tests such as :ref:`testing_validate-modules` and provides better error messages to the user. -1. In modules: +.. _handling_import_errors: - .. code-block:: python +Handling import errors +---------------------- - # Instead of 'import another_library', do: +In modules +^^^^^^^^^^ - import traceback +Instead of using ``import another_library``: - try: - import another_library - except ImportError: - HAS_ANOTHER_LIBRARY = False - ANOTHER_LIBRARY_IMPORT_ERROR = traceback.format_exc() - else: - HAS_ANOTHER_LIBRARY = True +.. code-block:: python + import traceback - # Later in module code: + from ansible.module_utils.basic import missing_required_lib - module = AnsibleModule(...) + try: + import another_library + except ImportError: + HAS_ANOTHER_LIBRARY = False + ANOTHER_LIBRARY_IMPORT_ERROR = traceback.format_exc() + else: + HAS_ANOTHER_LIBRARY = True + ANOTHER_LIBRARY_IMPORT_ERROR = None - if not HAS_ANOTHER_LIBRARY: - # Needs: from ansible.module_utils.basic import missing_required_lib - module.fail_json( - msg=missing_required_lib('another_library'), - exception=ANOTHER_LIBRARY_IMPORT_ERROR) +.. note:: -2. In plugins: + The ``missing_required_lib`` import above will be used below. - .. code-block:: python +Then in the module code: - # Instead of 'import another_library', do: +.. code-block:: python - from ansible.module_utils.six import raise_from + module = AnsibleModule(...) - try: - import another_library - except ImportError as imp_exc: - ANOTHER_LIBRARY_IMPORT_ERROR = imp_exc - else: - ANOTHER_LIBRARY_IMPORT_ERROR = None + if not HAS_ANOTHER_LIBRARY: + module.fail_json( + msg=missing_required_lib('another_library'), + exception=ANOTHER_LIBRARY_IMPORT_ERROR) +In plugins +^^^^^^^^^^ - # Later in plugin code, for example in __init__ of the plugin: +Instead of using ``import another_library``: - if ANOTHER_LIBRARY_IMPORT_ERROR: - raise_from( - AnsibleError('another_library must be installed to use this plugin'), - ANOTHER_LIBRARY_IMPORT_ERROR) - # If you target only newer Python 3 versions, you can also use the - # 'raise ... from ...' syntax. +.. code-block:: python + + try: + import another_library + except ImportError as imp_exc: + ANOTHER_LIBRARY_IMPORT_ERROR = imp_exc + else: + ANOTHER_LIBRARY_IMPORT_ERROR = None + +Then in the plugin code, for example in ``__init__`` of the plugin: + +.. code-block:: python + + if ANOTHER_LIBRARY_IMPORT_ERROR: + raise AnsibleError('another_library must be installed to use this plugin') from ANOTHER_LIBRARY_IMPORT_ERROR + +When used as base classes +^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. important:: + + This solution builds on the previous two examples. + Make sure to pick the appropriate one before continuing with this solution. + +Sometimes an import is used in a base class, for example: + +.. code-block:: python + + from another_library import UsefulThing + + class CustomThing(UsefulThing): + pass + +One option is make the entire class definition conditional: + +.. code-block:: python + + if not ANOTHER_LIBRARY_IMPORT_ERROR: + class CustomThing(UsefulThing): + pass + +Another option is to define a substitute base class by modifying the exception handler: + +.. code-block:: python + + try: + from another_library import UsefulThing + except ImportError: + class UsefulThing: + pass + ... + +.. _allowed_unchecked_imports: + +Allowed unchecked imports +------------------------- Ansible allows the following unchecked imports from these specific directories: diff --git a/docs/docsite/rst/dev_guide/testing_units_modules.rst b/docs/docsite/rst/dev_guide/testing_units_modules.rst index 5b24d492..d07dcff9 100644 --- a/docs/docsite/rst/dev_guide/testing_units_modules.rst +++ b/docs/docsite/rst/dev_guide/testing_units_modules.rst @@ -388,8 +388,8 @@ Here is a simple mock of :meth:`AnsibleModule.run_command` (taken from :file:`te with patch.object(basic.AnsibleModule, 'run_command') as run_command: run_command.return_value = 0, '', '' # successful execution, no output - with self.assertRaises(AnsibleExitJson) as result: - my_module.main() + with self.assertRaises(AnsibleExitJson) as result: + my_module.main() self.assertFalse(result.exception.args[0]['changed']) # Check that run_command has been called run_command.assert_called_once_with('/usr/bin/command args') diff --git a/docs/docsite/rst/galaxy/dev_guide.rst b/docs/docsite/rst/galaxy/dev_guide.rst index 094ef532..01a92d02 100644 --- a/docs/docsite/rst/galaxy/dev_guide.rst +++ b/docs/docsite/rst/galaxy/dev_guide.rst @@ -95,32 +95,12 @@ Alternatively, the role_skeleton and ignoring of files can be configured via ans Authenticate with Galaxy ------------------------ -Using the ``import``, ``delete`` and ``setup`` commands to manage your roles on the Galaxy website requires authentication, and the ``login`` command -can be used to do just that. Before you can use the ``login`` command, you must create an account on the Galaxy website. +Using the ``import``, ``delete`` and ``setup`` commands to manage your roles on the Galaxy website requires authentication in the form of an API key, you must create an account on the Galaxy website. -The ``login`` command requires using your GitHub credentials. You can use your username and password, or you can create a `personal access token <https://help.github.com/articles/creating-an-access-token-for-command-line-use/>`_. If you choose to create a token, grant minimal access to the token, as it is used just to verify identify. +#. Log in to the Galaxy website and open the `Preferences <https://galaxy.ansible.com/me/preferences>`_ view. +#. Select **Show API key** and then copy it. -The following shows authenticating with the Galaxy website using a GitHub username and password: - -.. code-block:: text - - $ ansible-galaxy login - - We need your GitHub login to identify you. - This information will not be sent to Galaxy, only to api.github.com. - The password will not be displayed. - - Use --github-token if you do not want to enter your password. - - GitHub Username: dsmith - Password for dsmith: - Successfully logged into Galaxy as dsmith - -When you choose to use your username and password, your password is not sent to Galaxy. It is used to authenticates with GitHub and create a personal access token. -It then sends the token to Galaxy, which in turn verifies that your identity and returns a Galaxy access token. After authentication completes the GitHub token is -destroyed. - -If you do not want to use your GitHub password, or if you have two-factor authentication enabled with GitHub, use the ``--github-token`` option to pass a personal access token that you create. +#. Save your token in the path set in the :ref:`GALAXY_TOKEN_PATH`. Import a role diff --git a/docs/docsite/rst/galaxy/user_guide.rst b/docs/docsite/rst/galaxy/user_guide.rst index e4beb7ef..c5a1e19e 100644 --- a/docs/docsite/rst/galaxy/user_guide.rst +++ b/docs/docsite/rst/galaxy/user_guide.rst @@ -330,12 +330,12 @@ You can install roles and collections from the same requirements files roles: # Install a role from Ansible Galaxy. - name: geerlingguy.java - version: 1.9.6 + version: "1.9.6" # note that ranges are not supported for roles collections: # Install a collection from Ansible Galaxy. - name: geerlingguy.php_roles - version: 0.9.3 + version: ">=0.9.3" source: https://galaxy.ansible.com Installing multiple roles from multiple files diff --git a/docs/docsite/rst/network/getting_started/first_playbook.rst b/docs/docsite/rst/network/getting_started/first_playbook.rst index 2c06ded7..15a4ed16 100644 --- a/docs/docsite/rst/network/getting_started/first_playbook.rst +++ b/docs/docsite/rst/network/getting_started/first_playbook.rst @@ -91,7 +91,7 @@ The playbook sets three of the seven values from the command line above: the gro The playbook contains one play with two tasks, and should generate output like this: -.. code-block:: bash +.. code-block:: shell $ ansible-playbook -i vyos.example.net, -u ansible -k -e ansible_network_os=vyos.vyos.vyos first_playbook.yml @@ -115,7 +115,7 @@ The playbook contains one play with two tasks, and should generate output like t The extended first playbook has five tasks in a single play. Run it with the same command you used above. The output shows you the change Ansible made to the config: -.. code-block:: bash +.. code-block:: shell $ ansible-playbook -i vyos.example.net, -u ansible -k -e ansible_network_os=vyos.vyos.vyos first_playbook_ext.yml diff --git a/docs/docsite/rst/network/user_guide/network_debug_troubleshooting.rst b/docs/docsite/rst/network/user_guide/network_debug_troubleshooting.rst index 202814b3..d0fbcd63 100644 --- a/docs/docsite/rst/network/user_guide/network_debug_troubleshooting.rst +++ b/docs/docsite/rst/network/user_guide/network_debug_troubleshooting.rst @@ -509,6 +509,8 @@ Suggestions to resolve: Suggestions to resolve: + Some modules support a ``timeout`` option, which is different to the ``timeout`` keyword for tasks. + .. code-block:: yaml - name: save running-config @@ -519,6 +521,8 @@ Suggestions to resolve: Suggestions to resolve: + + If the module does not support the ``timeout`` option directly, most networking connection plugins can enable similar functionality with the ``ansible_command_timeout`` variable. .. code-block:: yaml diff --git a/docs/docsite/rst/porting_guides/porting_guide_7.rst b/docs/docsite/rst/porting_guides/porting_guide_7.rst index 737cf3e3..bb8b01bf 100644 --- a/docs/docsite/rst/porting_guides/porting_guide_7.rst +++ b/docs/docsite/rst/porting_guides/porting_guide_7.rst @@ -92,6 +92,125 @@ Networking No notable changes +Porting Guide for v7.2.0 +======================== + +Added Collections +----------------- + +- dellemc.powerflex (version 1.5.0) +- dellemc.unity (version 1.5.0) + +Known Issues +------------ + +Ansible-core +~~~~~~~~~~~~ + +- ansible-test - Additional configuration may be required for certain container host and container combinations. Further details are available in the testing documentation. +- ansible-test - Custom containers with ``VOLUME`` instructions may be unable to start, when previously the containers started correctly. Remove the ``VOLUME`` instructions to resolve the issue. Containers with this condition will cause ``ansible-test`` to emit a warning. +- ansible-test - Systems with Podman networking issues may be unable to run containers, when previously the issue went unreported. Correct the networking issues to continue using ``ansible-test`` with Podman. +- ansible-test - Using Docker on systems with SELinux may require setting SELinux to permissive mode. Podman should work with SELinux in enforcing mode. + +cisco.meraki +~~~~~~~~~~~~ + +- meraki_network - Updated documentation for `local_status_page_enabled` and `remote_status_page_enabled` as these no longer work. + +Breaking Changes +---------------- + +community.general +~~~~~~~~~~~~~~~~~ + +- ModuleHelper module utils - when the module sets output variables named ``msg``, ``exception``, ``output``, ``vars``, or ``changed``, the actual output will prefix those names with ``_`` (underscore symbol) only when they clash with output variables generated by ModuleHelper itself, which only occurs when handling exceptions. Please note that this breaking change does not require a new major release since before this release, it was not possible to add such variables to the output `due to a bug <https://github.com/ansible-collections/community.general/pull/5755>`__ (https://github.com/ansible-collections/community.general/pull/5765). + +Major Changes +------------- + +Ansible-core +~~~~~~~~~~~~ + +- ansible-test - Docker Desktop on WSL2 is now supported (additional configuration required). +- ansible-test - Docker and Podman are now supported on hosts with cgroup v2 unified. Previously only cgroup v1 and cgroup v2 hybrid were supported. +- ansible-test - Podman now works on container hosts without systemd. Previously only some containers worked, while others required rootfull or rootless Podman, but would not work with both. Some containers did not work at all. +- ansible-test - Podman on WSL2 is now supported. +- ansible-test - When additional cgroup setup is required on the container host, this will be automatically detected. Instructions on how to configure the host will be provided in the error message shown. + +ansible.windows +~~~~~~~~~~~~~~~ + +- Set the minimum Ansible version supported by this collection to Ansible 2.12 + +chocolatey.chocolatey +~~~~~~~~~~~~~~~~~~~~~ + +- win_chocolatey - Allow users to select the TLS versions used for bootstrapping Chocolatey installation. + +Deprecated Features +------------------- + +- The cisco.nso collection is considered unmaintained and will be removed from Ansible 9 if no one starts maintaining it again before Ansible 9. See `the removal process for details on how this works <https://github.com/ansible-collections/overview/blob/main/removal_from_ansible.rst#cancelling-removal-of-an-unmaintained-collection>`__ (https://github.com/ansible-community/community-topics/issues/155). +- The community.fortios collection is considered unmaintained and will be removed from Ansible 9 if no one starts maintaining it again before Ansible 9. See `the removal process for details on how this works <https://github.com/ansible-collections/overview/blob/main/removal_from_ansible.rst#cancelling-removal-of-an-unmaintained-collection>`__ (https://github.com/ansible-community/community-topics/issues/162). +- The community.google collection is considered unmaintained and will be removed from Ansible 9 if no one starts maintaining it again before Ansible 9. See `the removal process for details on how this works <https://github.com/ansible-collections/overview/blob/main/removal_from_ansible.rst#cancelling-removal-of-an-unmaintained-collection>`__ (https://github.com/ansible-community/community-topics/issues/160). +- The community.skydive collection is considered unmaintained and will be removed from Ansible 9 if no one starts maintaining it again before Ansible 9. See `the removal process for details on how this works <https://github.com/ansible-collections/overview/blob/main/removal_from_ansible.rst#cancelling-removal-of-an-unmaintained-collection>`__ (https://github.com/ansible-community/community-topics/issues/171). + +chocolatey.chocolatey +~~~~~~~~~~~~~~~~~~~~~ + +- win_chocolatey - Deprecate side-by-side installs. + +cisco.ios +~~~~~~~~~ + +- ios_bgp_address_family - deprecate neighbors.address/tag/ipv6_adddress with neighbor_address which enables common attributes for facts rendering +- ios_bgp_address_family - deprecate neighbors.password with password_options which allows encryption and password +- ios_bgp_address_family - deprecate slow_peer with slow_peer_options which supports a dict attribute + +community.dns +~~~~~~~~~~~~~ + +- The default of the newly added option ``txt_character_encoding`` will change from ``octal`` to ``decimal`` in community.dns 3.0.0. The new default will be compatible with `RFC 1035 <https://www.ietf.org/rfc/rfc1035.txt>`__ (https://github.com/ansible-collections/community.dns/pull/134). + +community.general +~~~~~~~~~~~~~~~~~ + +- consul - deprecate using parameters unused for ``state=absent`` (https://github.com/ansible-collections/community.general/pull/5772). +- gitlab_runner - the default of the new option ``access_level_on_creation`` will change from ``false`` to ``true`` in community.general 7.0.0. This will cause ``access_level`` to be used during runner registration as well, and not only during updates (https://github.com/ansible-collections/community.general/pull/5908). +- manageiq_policies - deprecate ``state=list`` in favour of using ``community.general.manageiq_policies_info`` (https://github.com/ansible-collections/community.general/pull/5721). +- rax - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_cbs - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_cbs_attachments - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_cdb - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_cdb_database - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_cdb_user - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_clb - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_clb_nodes - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_clb_ssl - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_dns - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_dns_record - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_facts - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_files - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_files_objects - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_identity - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_keypair - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_meta - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_mon_alarm - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_mon_check - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_mon_entity - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_mon_notification - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_mon_notification_plan - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_network - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_queue - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_scaling_group - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). +- rax_scaling_policy - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733). + +community.hashi_vault +~~~~~~~~~~~~~~~~~~~~~ + +- ansible-core - support for ``ansible-core`` versions ``2.11`` and ``2.12`` will be dropped in collection version ``5.0.0``, making ``2.13`` the minimum supported version of ``ansible-core`` (https://github.com/ansible-collections/community.hashi_vault/issues/340). +- hvac - the minimum version of ``hvac`` to be supported in collection version ``5.0.0`` will be at least ``1.0.2``; this minimum may be raised before ``5.0.0`` is released, so please subscribe to the linked issue and look out for new notices in the changelog (https://github.com/ansible-collections/community.hashi_vault/issues/324). + Porting Guide for v7.1.0 ======================== diff --git a/docs/docsite/rst/reference_appendices/test_strategies.rst b/docs/docsite/rst/reference_appendices/test_strategies.rst index 8ed20acc..fa28f76b 100644 --- a/docs/docsite/rst/reference_appendices/test_strategies.rst +++ b/docs/docsite/rst/reference_appendices/test_strategies.rst @@ -176,11 +176,20 @@ This is the great culmination of embedded tests: ansible.builtin.command: /usr/bin/take_out_of_pool {{ inventory_hostname }} delegate_to: 127.0.0.1 - roles: + tasks: - - common - - webserver - - apply_testing_checks + - ansible.builtin.include_role: + name: "{{ item }}" + loop: + - common + - webserver + + - name: run any notified handlers + ansible.builtin.meta: flush_handlers + + - name: test the configuration + ansible.builtin.include_role: + name: apply_testing_checks post_tasks: diff --git a/docs/docsite/rst/reference_appendices/tower.rst b/docs/docsite/rst/reference_appendices/tower.rst index 62c6afa3..3537d606 100644 --- a/docs/docsite/rst/reference_appendices/tower.rst +++ b/docs/docsite/rst/reference_appendices/tower.rst @@ -5,7 +5,7 @@ Red Hat Ansible Automation Platform .. important:: - Red Hat Ansible Automation Platform will soon be available on Microsoft Azure. `Sign up to preview the experience <https://www.redhat.com/en/engage/ansible-microsoft-azure-e-202110220735>`_. + Red Hat Ansible Automation Platform is available on multiple cloud platforms. See `Ansible on Clouds <https://access.redhat.com/documentation/en-us/ansible_on_clouds/2.x>`_ for details. `Red Hat Ansible Automation Platform <https://www.ansible.com/products/automation-platform>`_ (RHAAP) is an integrated solution for operationalizing Ansible across your team, organization, and enterprise. The platform includes a controller with a web console and REST API, analytics, execution environments, and much more. diff --git a/docs/docsite/rst/roadmap/COLLECTIONS_7.rst b/docs/docsite/rst/roadmap/COLLECTIONS_7.rst index ca763408..dcd92621 100644 --- a/docs/docsite/rst/roadmap/COLLECTIONS_7.rst +++ b/docs/docsite/rst/roadmap/COLLECTIONS_7.rst @@ -46,7 +46,7 @@ Release schedule Ansible minor releases ======================= -Ansible 7.x minor releases will occur approximately every four weeks if changes to collections have been made or to align to a later ansible-core-2.14.x. Ansible 7.x minor releases may contain new features but not backwards incompatibilities. In practice, this means we will include new collection versions where either the patch or the minor version number has changed but not when the major number has changed. For example, if Ansible-7.0.0 ships with community.crypto 2.3.0; Ansible-6.1.0 may ship with community.crypto 2.4.0 but would not ship with community.crypto 3.0.0. +Ansible 7.x minor releases will occur approximately every four weeks if changes to collections have been made or to align to a later ansible-core-2.14.x. Ansible 7.x minor releases may contain new features but not backwards incompatibilities. In practice, this means we will include new collection versions where either the patch or the minor version number has changed but not when the major number has changed. For example, if Ansible-7.0.0 ships with community.crypto 2.3.0; Ansible-7.1.0 may ship with community.crypto 2.4.0 but would not ship with community.crypto 3.0.0. .. note:: diff --git a/docs/docsite/rst/shared_snippets/installing_multiple_collections.txt b/docs/docsite/rst/shared_snippets/installing_multiple_collections.txt index c8cca8b4..a1b18b59 100644 --- a/docs/docsite/rst/shared_snippets/installing_multiple_collections.txt +++ b/docs/docsite/rst/shared_snippets/installing_multiple_collections.txt @@ -10,8 +10,8 @@ You can set up a ``requirements.yml`` file to install multiple collections in on # With the collection name, version, and source options - name: my_namespace.my_other_collection - version: 'version range identifiers (default: ``*``)' - source: 'The Galaxy URL to pull the collection from (default: ``--api-server`` from cmdline)' + version: ">=1.2.0" # Version range identifiers (default: ``*``) + source: ... # The Galaxy URL to pull the collection from (default: ``--api-server`` from cmdline) You can specify the following keys for each collection entry: @@ -56,12 +56,13 @@ You can also add roles to a ``requirements.yml`` file, under the ``roles`` key. roles: # Install a role from Ansible Galaxy. - name: geerlingguy.java - version: 1.9.6 + version: "1.9.6" # note that ranges are not supported for roles + collections: # Install a collection from Ansible Galaxy. - name: geerlingguy.php_roles - version: 0.9.3 + version: ">=0.9.3" source: https://galaxy.ansible.com To install both roles and collections at the same time with one command, run the following: diff --git a/docs/man/man1/ansible-config.1 b/docs/man/man1/ansible-config.1 index 11fa7f1f..28b88cde 100644 --- a/docs/man/man1/ansible-config.1 +++ b/docs/man/man1/ansible-config.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "ANSIBLE-CONFIG" 1 "" "Ansible 2.14.2" "System administration commands" +.TH "ANSIBLE-CONFIG" 1 "" "Ansible 2.14.3" "System administration commands" .SH NAME ansible-config \- View ansible configuration. .SH SYNOPSIS diff --git a/docs/man/man1/ansible-console.1 b/docs/man/man1/ansible-console.1 index 432d7773..039959b2 100644 --- a/docs/man/man1/ansible-console.1 +++ b/docs/man/man1/ansible-console.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "ANSIBLE-CONSOLE" 1 "" "Ansible 2.14.2" "System administration commands" +.TH "ANSIBLE-CONSOLE" 1 "" "Ansible 2.14.3" "System administration commands" .SH NAME ansible-console \- REPL console for executing Ansible tasks. .SH SYNOPSIS diff --git a/docs/man/man1/ansible-doc.1 b/docs/man/man1/ansible-doc.1 index fe61cd4b..434e92c4 100644 --- a/docs/man/man1/ansible-doc.1 +++ b/docs/man/man1/ansible-doc.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "ANSIBLE-DOC" 1 "" "Ansible 2.14.2" "System administration commands" +.TH "ANSIBLE-DOC" 1 "" "Ansible 2.14.3" "System administration commands" .SH NAME ansible-doc \- plugin documentation tool .SH SYNOPSIS diff --git a/docs/man/man1/ansible-galaxy.1 b/docs/man/man1/ansible-galaxy.1 index 98be5676..646e1b93 100644 --- a/docs/man/man1/ansible-galaxy.1 +++ b/docs/man/man1/ansible-galaxy.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "ANSIBLE-GALAXY" 1 "" "Ansible 2.14.2" "System administration commands" +.TH "ANSIBLE-GALAXY" 1 "" "Ansible 2.14.3" "System administration commands" .SH NAME ansible-galaxy \- Perform various Role and Collection related operations. .SH SYNOPSIS diff --git a/docs/man/man1/ansible-inventory.1 b/docs/man/man1/ansible-inventory.1 index 8b17cbba..dcf9152d 100644 --- a/docs/man/man1/ansible-inventory.1 +++ b/docs/man/man1/ansible-inventory.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "ANSIBLE-INVENTORY" 1 "" "Ansible 2.14.2" "System administration commands" +.TH "ANSIBLE-INVENTORY" 1 "" "Ansible 2.14.3" "System administration commands" .SH NAME ansible-inventory \- None .SH SYNOPSIS diff --git a/docs/man/man1/ansible-playbook.1 b/docs/man/man1/ansible-playbook.1 index 88ab61fd..42b123f9 100644 --- a/docs/man/man1/ansible-playbook.1 +++ b/docs/man/man1/ansible-playbook.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "ANSIBLE-PLAYBOOK" 1 "" "Ansible 2.14.2" "System administration commands" +.TH "ANSIBLE-PLAYBOOK" 1 "" "Ansible 2.14.3" "System administration commands" .SH NAME ansible-playbook \- Runs Ansible playbooks, executing the defined tasks on the targeted hosts. .SH SYNOPSIS diff --git a/docs/man/man1/ansible-pull.1 b/docs/man/man1/ansible-pull.1 index 46baa468..2fe0c038 100644 --- a/docs/man/man1/ansible-pull.1 +++ b/docs/man/man1/ansible-pull.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "ANSIBLE-PULL" 1 "" "Ansible 2.14.2" "System administration commands" +.TH "ANSIBLE-PULL" 1 "" "Ansible 2.14.3" "System administration commands" .SH NAME ansible-pull \- pulls playbooks from a VCS repo and executes them for the local host .SH SYNOPSIS diff --git a/docs/man/man1/ansible-vault.1 b/docs/man/man1/ansible-vault.1 index 37019579..42e7d7a4 100644 --- a/docs/man/man1/ansible-vault.1 +++ b/docs/man/man1/ansible-vault.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "ANSIBLE-VAULT" 1 "" "Ansible 2.14.2" "System administration commands" +.TH "ANSIBLE-VAULT" 1 "" "Ansible 2.14.3" "System administration commands" .SH NAME ansible-vault \- encryption/decryption utility for Ansible data files .SH SYNOPSIS diff --git a/docs/man/man1/ansible.1 b/docs/man/man1/ansible.1 index 5891ece1..5f29078d 100644 --- a/docs/man/man1/ansible.1 +++ b/docs/man/man1/ansible.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "ANSIBLE" 1 "" "Ansible 2.14.2" "System administration commands" +.TH "ANSIBLE" 1 "" "Ansible 2.14.3" "System administration commands" .SH NAME ansible \- Define and run a single task 'playbook' against a set of hosts .SH SYNOPSIS diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py index 1f35031f..02ace8f5 100644 --- a/lib/ansible/executor/task_executor.py +++ b/lib/ansible/executor/task_executor.py @@ -515,6 +515,10 @@ class TaskExecutor: "(see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat-unsafe)") variable_params.update(self._task.args) self._task.args = variable_params + else: + # if we didn't get a dict, it means there's garbage remaining after k=v parsing, just give up + # see https://github.com/ansible/ansible/issues/79862 + raise AnsibleError(f"invalid or malformed argument: '{variable_params}'") # update no_log to task value, now that we have it templated no_log = self._task.no_log diff --git a/lib/ansible/galaxy/collection/__init__.py b/lib/ansible/galaxy/collection/__init__.py index 7f04052e..7a144c0d 100644 --- a/lib/ansible/galaxy/collection/__init__.py +++ b/lib/ansible/galaxy/collection/__init__.py @@ -1570,6 +1570,7 @@ def install_src(collection, b_collection_path, b_collection_output_path, artifac if 'build_ignore' not in collection_meta: # installed collection, not src # FIXME: optimize this? use a different process? copy instead of build? collection_meta['build_ignore'] = [] + collection_meta['manifest'] = Sentinel collection_manifest = _build_manifest(**collection_meta) file_manifest = _build_files_manifest( b_collection_path, diff --git a/lib/ansible/module_utils/ansible_release.py b/lib/ansible/module_utils/ansible_release.py index 7a2e435b..66a04b98 100644 --- a/lib/ansible/module_utils/ansible_release.py +++ b/lib/ansible/module_utils/ansible_release.py @@ -19,6 +19,6 @@ from __future__ import (absolute_import, division, print_function) __metaclass__ = type -__version__ = '2.14.2' +__version__ = '2.14.3' __author__ = 'Ansible, Inc.' __codename__ = "C'mon Everybody" diff --git a/lib/ansible/module_utils/csharp/Ansible.Basic.cs b/lib/ansible/module_utils/csharp/Ansible.Basic.cs index 2db9728d..c68281ef 100644 --- a/lib/ansible/module_utils/csharp/Ansible.Basic.cs +++ b/lib/ansible/module_utils/csharp/Ansible.Basic.cs @@ -16,6 +16,10 @@ using Newtonsoft.Json; using System.Web.Script.Serialization; #endif +// Newtonsoft.Json may reference a different System.Runtime version (6.x) than loaded by PowerShell 7.3 (7.x). +// Ignore CS1701 so the code can be compiled when warnings are reported as errors. +//NoWarn -Name CS1701 -CLR Core + // System.Diagnostics.EventLog.dll reference different versioned dlls that are // loaded in PSCore, ignore CS1702 so the code will ignore this warning //NoWarn -Name CS1702 -CLR Core diff --git a/lib/ansible/modules/dnf.py b/lib/ansible/modules/dnf.py index a3b09908..8131833e 100644 --- a/lib/ansible/modules/dnf.py +++ b/lib/ansible/modules/dnf.py @@ -313,6 +313,14 @@ EXAMPLES = ''' name: "*" state: latest +- name: Update the webserver, depending on which is installed on the system. Do not install the other one + ansible.builtin.dnf: + name: + - httpd + - nginx + state: latest + update_only: yes + - name: Install the nginx rpm from a remote repo ansible.builtin.dnf: name: 'http://nginx.org/packages/centos/6/noarch/RPMS/nginx-release-centos-6-0.el6.ngx.noarch.rpm' diff --git a/lib/ansible/modules/uri.py b/lib/ansible/modules/uri.py index c0021364..f68b86a5 100644 --- a/lib/ansible/modules/uri.py +++ b/lib/ansible/modules/uri.py @@ -142,7 +142,7 @@ options: - This should only set to C(false) used on personally controlled sites using self-signed certificates. - Prior to 1.9.2 the code defaulted to C(false). type: bool - default: yes + default: true version_added: '1.9.2' client_cert: description: @@ -184,7 +184,7 @@ options: description: - If C(false), it will not use a proxy, even if one is defined in an environment variable on the target hosts. type: bool - default: yes + default: true unix_socket: description: - Path to Unix domain socket to use for connection @@ -252,7 +252,7 @@ EXAMPLES = r''' - name: Check that a page returns a status 200 and fail if the word AWESOME is not in the page contents ansible.builtin.uri: url: http://www.example.com - return_content: yes + return_content: true register: this failed_when: "'AWESOME' not in this.content" @@ -263,7 +263,7 @@ EXAMPLES = r''' password: your_pass method: POST body: "{{ lookup('ansible.builtin.file','issue.json') }}" - force_basic_auth: yes + force_basic_auth: true status_code: 201 body_format: json @@ -310,7 +310,7 @@ EXAMPLES = r''' ansible.builtin.uri: url: https://your.form.based.auth.example.com/dashboard.php method: GET - return_content: yes + return_content: true headers: Cookie: "{{ login.cookies_string }}" @@ -320,7 +320,7 @@ EXAMPLES = r''' user: "{{ jenkins.user }}" password: "{{ jenkins.password }}" method: GET - force_basic_auth: yes + force_basic_auth: true status_code: 201 - name: POST from contents of local file @@ -334,7 +334,7 @@ EXAMPLES = r''' url: https://httpbin.org/post method: POST src: /path/to/my/file.json - remote_src: yes + remote_src: true - name: Create workspaces in Log analytics Azure ansible.builtin.uri: diff --git a/lib/ansible/modules/user.py b/lib/ansible/modules/user.py index cb35e950..2fc4e473 100644 --- a/lib/ansible/modules/user.py +++ b/lib/ansible/modules/user.py @@ -86,12 +86,13 @@ options: version_added: "2.0" password: description: - - Optionally set the user's password to this crypted value. - - On macOS systems, this value has to be cleartext. Beware of security issues. - - To create a an account with a locked/disabled password on Linux systems, set this to C('!') or C('*'). - - To create a an account with a locked/disabled password on OpenBSD, set this to C('*************'). + - If provided, set the user's password to the provided encrypted hash (Linux) or plain text password (macOS). + - B(Linux/Unix/POSIX:) Enter the hashed password as the value. - See L(FAQ entry,https://docs.ansible.com/ansible/latest/reference_appendices/faq.html#how-do-i-generate-encrypted-passwords-for-the-user-module) - for details on various ways to generate these password values. + for details on various ways to generate the hash of a password. + - To create an account with a locked/disabled password on Linux systems, set this to C('!') or C('*'). + - To create an account with a locked/disabled password on OpenBSD, set this to C('*************'). + - B(OS X/macOS:) Enter the cleartext password as the value. Be sure to take relevant security precautions. type: str state: description: diff --git a/lib/ansible/playbook/helpers.py b/lib/ansible/playbook/helpers.py index 444f571e..38e32ef9 100644 --- a/lib/ansible/playbook/helpers.py +++ b/lib/ansible/playbook/helpers.py @@ -106,6 +106,8 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h raise AnsibleAssertionError('The ds (%s) should be a dict but was a %s' % (ds, type(ds))) if 'block' in task_ds: + if use_handlers: + raise AnsibleParserError("Using a block as a handler is not supported.", obj=task_ds) t = Block.load( task_ds, play=play, diff --git a/lib/ansible/plugins/doc_fragments/files.py b/lib/ansible/plugins/doc_fragments/files.py index 2356bd72..b87fd11d 100644 --- a/lib/ansible/plugins/doc_fragments/files.py +++ b/lib/ansible/plugins/doc_fragments/files.py @@ -36,6 +36,8 @@ options: - Name of the user that should own the filesystem object, as would be fed to I(chown). - When left unspecified, it uses the current user unless you are root, in which case it can preserve the previous ownership. + - Specifying a numeric username will be assumed to be a user ID and not a username. Avoid numeric usernames to avoid this confusion. + type: str group: description: diff --git a/lib/ansible/plugins/filter/combine.yml b/lib/ansible/plugins/filter/combine.yml index f2f43718..86788f31 100644 --- a/lib/ansible/plugins/filter/combine.yml +++ b/lib/ansible/plugins/filter/combine.yml @@ -36,7 +36,6 @@ EXAMPLES: | # ab => {'a':1, 'b':3, 'c': 4} ab: {{ {'a':1, 'b':2} | combine({'b':3, 'c':4}) }} - # ab => {'a':1, 'b':3, 'c': 4} many: "{{ dict1 | combine(dict2, dict3, dict4) }}" RETURN: diff --git a/lib/ansible/plugins/strategy/__init__.py b/lib/ansible/plugins/strategy/__init__.py index e3c4b02d..5cc05ee3 100644 --- a/lib/ansible/plugins/strategy/__init__.py +++ b/lib/ansible/plugins/strategy/__init__.py @@ -54,7 +54,7 @@ from ansible.template import Templar from ansible.utils.display import Display from ansible.utils.fqcn import add_internal_fqcns from ansible.utils.unsafe_proxy import wrap_var -from ansible.utils.vars import combine_vars +from ansible.utils.vars import combine_vars, isidentifier from ansible.vars.clean import strip_internal_keys, module_response_deepcopy display = Display() @@ -750,6 +750,10 @@ class StrategyBase: # register final results if original_task.register: + + if not isidentifier(original_task.register): + raise AnsibleError("Invalid variable name in 'register' specified: '%s'" % original_task.register) + host_list = self.get_task_hosts(iterator, original_host, original_task) clean_copy = strip_internal_keys(module_response_deepcopy(task_result._result)) diff --git a/lib/ansible/plugins/strategy/linear.py b/lib/ansible/plugins/strategy/linear.py index dc34e097..a3c91c29 100644 --- a/lib/ansible/plugins/strategy/linear.py +++ b/lib/ansible/plugins/strategy/linear.py @@ -314,11 +314,7 @@ class StrategyModule(StrategyBase): included_tasks.extend(final_block.get_tasks()) for host in hosts_left: - # handlers are included regardless of _hosts so noop - # tasks do not have to be created for lockstep, - # not notified handlers are then simply skipped - # in the PlayIterator - if host in included_file._hosts or is_handler: + if host in included_file._hosts: all_blocks[host].append(final_block) display.debug("done iterating over new_blocks loaded from include file") diff --git a/lib/ansible/release.py b/lib/ansible/release.py index 7a2e435b..66a04b98 100644 --- a/lib/ansible/release.py +++ b/lib/ansible/release.py @@ -19,6 +19,6 @@ from __future__ import (absolute_import, division, print_function) __metaclass__ = type -__version__ = '2.14.2' +__version__ = '2.14.3' __author__ = 'Ansible, Inc.' __codename__ = "C'mon Everybody" diff --git a/lib/ansible_core.egg-info/PKG-INFO b/lib/ansible_core.egg-info/PKG-INFO index b3beba17..0c7edcf5 100644 --- a/lib/ansible_core.egg-info/PKG-INFO +++ b/lib/ansible_core.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: ansible-core -Version: 2.14.2 +Version: 2.14.3 Summary: Radically simple IT automation Home-page: https://ansible.com/ Author: Ansible, Inc. diff --git a/lib/ansible_core.egg-info/SOURCES.txt b/lib/ansible_core.egg-info/SOURCES.txt index 54f63989..9444d2ff 100644 --- a/lib/ansible_core.egg-info/SOURCES.txt +++ b/lib/ansible_core.egg-info/SOURCES.txt @@ -123,7 +123,6 @@ docs/docsite/rst/dev_guide/developing_modules_best_practices.rst docs/docsite/rst/dev_guide/developing_modules_checklist.rst docs/docsite/rst/dev_guide/developing_modules_documenting.rst docs/docsite/rst/dev_guide/developing_modules_general.rst -docs/docsite/rst/dev_guide/developing_modules_general_aci.rst docs/docsite/rst/dev_guide/developing_modules_general_windows.rst docs/docsite/rst/dev_guide/developing_modules_in_groups.rst docs/docsite/rst/dev_guide/developing_plugins.rst @@ -145,11 +144,6 @@ docs/docsite/rst/dev_guide/testing_sanity.rst docs/docsite/rst/dev_guide/testing_units.rst docs/docsite/rst/dev_guide/testing_units_modules.rst docs/docsite/rst/dev_guide/testing_validate-modules.rst -docs/docsite/rst/dev_guide/platforms/aws_guidelines.rst -docs/docsite/rst/dev_guide/platforms/openstack_guidelines.rst -docs/docsite/rst/dev_guide/platforms/ovirt_dev_guide.rst -docs/docsite/rst/dev_guide/platforms/vmware_guidelines.rst -docs/docsite/rst/dev_guide/platforms/vmware_rest_guidelines.rst docs/docsite/rst/dev_guide/shared_snippets/licensing.txt docs/docsite/rst/dev_guide/style_guide/basic_rules.rst docs/docsite/rst/dev_guide/style_guide/grammar_punctuation.rst @@ -1496,6 +1490,7 @@ test/integration/targets/ansible-galaxy-collection-scm/tasks/setup_collection_ba test/integration/targets/ansible-galaxy-collection-scm/tasks/setup_multi_collection_repo.yml test/integration/targets/ansible-galaxy-collection-scm/tasks/setup_recursive_scm_dependency.yml test/integration/targets/ansible-galaxy-collection-scm/tasks/test_invalid_version.yml +test/integration/targets/ansible-galaxy-collection-scm/tasks/test_manifest_metadata.yml test/integration/targets/ansible-galaxy-collection-scm/tasks/test_supported_resolvelib_versions.yml test/integration/targets/ansible-galaxy-collection-scm/templates/git_prefix_name.yml test/integration/targets/ansible-galaxy-collection-scm/templates/name_and_type.yml @@ -2574,6 +2569,8 @@ test/integration/targets/handlers/46447.yml test/integration/targets/handlers/52561.yml test/integration/targets/handlers/54991.yml test/integration/targets/handlers/58841.yml +test/integration/targets/handlers/79776-handlers.yml +test/integration/targets/handlers/79776.yml test/integration/targets/handlers/aliases test/integration/targets/handlers/from_handlers.yml test/integration/targets/handlers/handlers.yml @@ -2582,6 +2579,10 @@ test/integration/targets/handlers/include_handlers_fail_force.yml test/integration/targets/handlers/inventory.handlers test/integration/targets/handlers/order.yml test/integration/targets/handlers/runme.sh +test/integration/targets/handlers/test_block_as_handler-import.yml +test/integration/targets/handlers/test_block_as_handler-include.yml +test/integration/targets/handlers/test_block_as_handler-include_import-handlers.yml +test/integration/targets/handlers/test_block_as_handler.yml test/integration/targets/handlers/test_flush_handlers_as_handler.yml test/integration/targets/handlers/test_flush_handlers_rescue_always.yml test/integration/targets/handlers/test_flush_in_rescue_always.yml @@ -3660,6 +3661,11 @@ test/integration/targets/reboot/tasks/test_molly_guard.yml test/integration/targets/reboot/tasks/test_reboot_command.yml test/integration/targets/reboot/tasks/test_standard_scenarios.yml test/integration/targets/reboot/vars/main.yml +test/integration/targets/register/aliases +test/integration/targets/register/can_register.yml +test/integration/targets/register/invalid.yml +test/integration/targets/register/invalid_skipped.yml +test/integration/targets/register/runme.sh test/integration/targets/rel_plugin_loading/aliases test/integration/targets/rel_plugin_loading/notyaml.yml test/integration/targets/rel_plugin_loading/runme.sh @@ -3956,7 +3962,8 @@ test/integration/targets/task_ordering/meta/main.yml test/integration/targets/task_ordering/tasks/main.yml test/integration/targets/task_ordering/tasks/taskorder-include.yml test/integration/targets/tasks/aliases -test/integration/targets/tasks/tasks/main.yml +test/integration/targets/tasks/playbook.yml +test/integration/targets/tasks/runme.sh test/integration/targets/tempfile/aliases test/integration/targets/tempfile/meta/main.yml test/integration/targets/tempfile/tasks/main.yml diff --git a/test/integration/targets/ansible-galaxy-collection-scm/meta/main.yml b/test/integration/targets/ansible-galaxy-collection-scm/meta/main.yml index e3dd5fb1..655a62f0 100644 --- a/test/integration/targets/ansible-galaxy-collection-scm/meta/main.yml +++ b/test/integration/targets/ansible-galaxy-collection-scm/meta/main.yml @@ -1,3 +1,4 @@ --- dependencies: - setup_remote_tmp_dir +- setup_gnutar diff --git a/test/integration/targets/ansible-galaxy-collection-scm/tasks/main.yml b/test/integration/targets/ansible-galaxy-collection-scm/tasks/main.yml index 546c4083..dab599b1 100644 --- a/test/integration/targets/ansible-galaxy-collection-scm/tasks/main.yml +++ b/test/integration/targets/ansible-galaxy-collection-scm/tasks/main.yml @@ -30,6 +30,7 @@ - include_tasks: ./download.yml - include_tasks: ./setup_collection_bad_version.yml - include_tasks: ./test_invalid_version.yml + - include_tasks: ./test_manifest_metadata.yml always: diff --git a/test/integration/targets/ansible-galaxy-collection-scm/tasks/test_manifest_metadata.yml b/test/integration/targets/ansible-galaxy-collection-scm/tasks/test_manifest_metadata.yml new file mode 100644 index 00000000..a01551ca --- /dev/null +++ b/test/integration/targets/ansible-galaxy-collection-scm/tasks/test_manifest_metadata.yml @@ -0,0 +1,55 @@ +- name: Test installing a collection from a git repo containing a MANIFEST.json + block: + - name: Create a temp directory for building the collection + file: + path: '{{ galaxy_dir }}/scratch' + state: directory + + - name: Initialize a collection + command: 'ansible-galaxy collection init namespace_3.collection_1' + args: + chdir: '{{ galaxy_dir }}/scratch' + + - name: Build the collection + command: 'ansible-galaxy collection build namespace_3/collection_1' + args: + chdir: '{{ galaxy_dir }}/scratch' + + - name: Initialize git repository + command: 'git init {{ scm_path }}/namespace_3' + + - name: Create the destination for the collection + file: + path: '{{ scm_path }}/namespace_3/collection_1' + state: directory + + - name: Unarchive the collection in the git repo + unarchive: + dest: '{{ scm_path }}/namespace_3/collection_1' + src: '{{ galaxy_dir }}/scratch/namespace_3-collection_1-1.0.0.tar.gz' + remote_src: yes + + - name: Commit the changes + shell: git add ./; git commit -m 'add collection' + args: + chdir: '{{ scm_path }}/namespace_3' + + - name: Install the collection in the git repository + command: 'ansible-galaxy collection install git+file://{{ scm_path }}/namespace_3/.git' + register: result + + - name: Assert the collection was installed successfully + assert: + that: + - '"namespace_3.collection_1:1.0.0 was installed successfully" in result.stdout_lines' + + always: + - name: Clean up directories from test + file: + path: '{{ galaxy_dir }}/scratch' + state: absent + loop: + - '{{ galaxy_dir }}/scratch' + - '{{ scm_path }}/namespace_3' + + - include_tasks: ./empty_installed_collections.yml diff --git a/test/integration/targets/handlers/79776-handlers.yml b/test/integration/targets/handlers/79776-handlers.yml new file mode 100644 index 00000000..639c9cad --- /dev/null +++ b/test/integration/targets/handlers/79776-handlers.yml @@ -0,0 +1,2 @@ +- debug: + msg: "Handler for {{ inventory_hostname }}" diff --git a/test/integration/targets/handlers/79776.yml b/test/integration/targets/handlers/79776.yml new file mode 100644 index 00000000..08d22272 --- /dev/null +++ b/test/integration/targets/handlers/79776.yml @@ -0,0 +1,10 @@ +- hosts: A,B + gather_facts: false + force_handlers: true + tasks: + - command: echo + notify: handler1 + when: inventory_hostname == "A" + handlers: + - name: handler1 + include_tasks: 79776-handlers.yml diff --git a/test/integration/targets/handlers/runme.sh b/test/integration/targets/handlers/runme.sh index e2d52180..76fc99d8 100755 --- a/test/integration/targets/handlers/runme.sh +++ b/test/integration/targets/handlers/runme.sh @@ -170,3 +170,14 @@ ansible-playbook test_flush_handlers_rescue_always.yml -i inventory.handlers "$@ ansible-playbook test_fqcn_meta_flush_handlers.yml -i inventory.handlers "$@" 2>&1 | tee out.txt grep out.txt -e "handler ran" grep out.txt -e "after flush" + +ansible-playbook 79776.yml -i inventory.handlers "$@" + +ansible-playbook test_block_as_handler.yml "$@" 2>&1 | tee out.txt +grep out.txt -e "ERROR! Using a block as a handler is not supported." + +ansible-playbook test_block_as_handler-include.yml "$@" 2>&1 | tee out.txt +grep out.txt -e "ERROR! Using a block as a handler is not supported." + +ansible-playbook test_block_as_handler-import.yml "$@" 2>&1 | tee out.txt +grep out.txt -e "ERROR! Using a block as a handler is not supported." diff --git a/test/integration/targets/handlers/test_block_as_handler-import.yml b/test/integration/targets/handlers/test_block_as_handler-import.yml new file mode 100644 index 00000000..ad6bb0d5 --- /dev/null +++ b/test/integration/targets/handlers/test_block_as_handler-import.yml @@ -0,0 +1,7 @@ +- hosts: localhost + gather_facts: false + tasks: + - debug: + handlers: + - name: handler + import_tasks: test_block_as_handler-include_import-handlers.yml diff --git a/test/integration/targets/handlers/test_block_as_handler-include.yml b/test/integration/targets/handlers/test_block_as_handler-include.yml new file mode 100644 index 00000000..5b03b0a8 --- /dev/null +++ b/test/integration/targets/handlers/test_block_as_handler-include.yml @@ -0,0 +1,8 @@ +- hosts: localhost + gather_facts: false + tasks: + - command: echo + notify: handler + handlers: + - name: handler + include_tasks: test_block_as_handler-include_import-handlers.yml diff --git a/test/integration/targets/handlers/test_block_as_handler-include_import-handlers.yml b/test/integration/targets/handlers/test_block_as_handler-include_import-handlers.yml new file mode 100644 index 00000000..61c058ba --- /dev/null +++ b/test/integration/targets/handlers/test_block_as_handler-include_import-handlers.yml @@ -0,0 +1,8 @@ +- name: handler + block: + - name: due to how handlers are implemented, this is correct as it is equivalent to an implicit block + debug: + - name: this is a parser error, blocks as handlers are not supported + block: + - name: handler in a nested block + debug: diff --git a/test/integration/targets/handlers/test_block_as_handler.yml b/test/integration/targets/handlers/test_block_as_handler.yml new file mode 100644 index 00000000..bd4f5b99 --- /dev/null +++ b/test/integration/targets/handlers/test_block_as_handler.yml @@ -0,0 +1,13 @@ +- hosts: localhost + gather_facts: false + tasks: + - debug: + handlers: + - name: handler + block: + - name: due to how handlers are implemented, this is correct as it is equivalent to an implicit block + debug: + - name: this is a parser error, blocks as handlers are not supported + block: + - name: handler in a nested block + debug: diff --git a/test/integration/targets/register/aliases b/test/integration/targets/register/aliases new file mode 100644 index 00000000..b76d5f67 --- /dev/null +++ b/test/integration/targets/register/aliases @@ -0,0 +1,2 @@ +shippable/posix/group3 +context/controller # this "module" is actually an action that runs on the controller diff --git a/test/integration/targets/register/can_register.yml b/test/integration/targets/register/can_register.yml new file mode 100644 index 00000000..da610101 --- /dev/null +++ b/test/integration/targets/register/can_register.yml @@ -0,0 +1,21 @@ +- hosts: testhost + gather_facts: false + tasks: + - name: test registering + debug: msg='does nothing really but register this' + register: debug_msg + + - name: validate registering + assert: + that: + - debug_msg is defined + + - name: test registering skipped + debug: msg='does nothing really but register this' + when: false + register: debug_skipped + + - name: validate registering + assert: + that: + - debug_skipped is defined diff --git a/test/integration/targets/register/invalid.yml b/test/integration/targets/register/invalid.yml new file mode 100644 index 00000000..bdca9d66 --- /dev/null +++ b/test/integration/targets/register/invalid.yml @@ -0,0 +1,11 @@ +- hosts: testhost + gather_facts: false + tasks: + - name: test registering + debug: msg='does nothing really but register this' + register: 200 + + - name: never gets here + assert: + that: + - 200 is not defined diff --git a/test/integration/targets/register/invalid_skipped.yml b/test/integration/targets/register/invalid_skipped.yml new file mode 100644 index 00000000..0ad31f51 --- /dev/null +++ b/test/integration/targets/register/invalid_skipped.yml @@ -0,0 +1,12 @@ +- hosts: testhost + gather_facts: false + tasks: + - name: test registering bad var when skipped + debug: msg='does nothing really but register this' + when: false + register: 200 + + - name: never gets here + assert: + that: + - 200 is not defined diff --git a/test/integration/targets/register/runme.sh b/test/integration/targets/register/runme.sh new file mode 100755 index 00000000..8adc5047 --- /dev/null +++ b/test/integration/targets/register/runme.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +set -eux + +# does it work? +ansible-playbook can_register.yml -i ../../inventory -v "$@" + +# ensure we do error when it its apprpos +set +e +result="$(ansible-playbook invalid.yml -i ../../inventory -v "$@" 2>&1)" +set -e +grep -q "Invalid variable name in " <<< "${result}" diff --git a/test/integration/targets/tasks/playbook.yml b/test/integration/targets/tasks/playbook.yml new file mode 100644 index 00000000..80d9f8b1 --- /dev/null +++ b/test/integration/targets/tasks/playbook.yml @@ -0,0 +1,19 @@ +- hosts: localhost + gather_facts: false + tasks: + # make sure tasks with an undefined variable in the name are gracefully handled + - name: "Task name with undefined variable: {{ not_defined }}" + debug: + msg: Hello + + - name: ensure malformed raw_params on arbitrary actions are not ignored + debug: + garbage {{"with a template"}} + ignore_errors: true + register: bad_templated_raw_param + + - assert: + that: + - bad_templated_raw_param is failed + - | + "invalid or malformed argument: 'garbage with a template'" in bad_templated_raw_param.msg diff --git a/test/integration/targets/tasks/runme.sh b/test/integration/targets/tasks/runme.sh new file mode 100755 index 00000000..594447bd --- /dev/null +++ b/test/integration/targets/tasks/runme.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +ansible-playbook playbook.yml "$@" diff --git a/test/integration/targets/tasks/tasks/main.yml b/test/integration/targets/tasks/tasks/main.yml deleted file mode 100644 index f6ac1114..00000000 --- a/test/integration/targets/tasks/tasks/main.yml +++ /dev/null @@ -1,4 +0,0 @@ -# make sure tasks with an undefined variable in the name are gracefully handled -- name: "Task name with undefined variable: {{ not_defined }}" - debug: - msg: Hello diff --git a/test/lib/ansible_test/_internal/ansible_util.py b/test/lib/ansible_test/_internal/ansible_util.py index 5798d352..9efcda26 100644 --- a/test/lib/ansible_test/_internal/ansible_util.py +++ b/test/lib/ansible_test/_internal/ansible_util.py @@ -284,11 +284,11 @@ def get_collection_detail(python: PythonConfig) -> CollectionDetail: def run_playbook( - args: EnvironmentConfig, - inventory_path: str, - playbook: str, - capture: bool, - variables: t.Optional[dict[str, t.Any]] = None, + args: EnvironmentConfig, + inventory_path: str, + playbook: str, + capture: bool, + variables: t.Optional[dict[str, t.Any]] = None, ) -> None: """Run the specified playbook using the given inventory file and playbook variables.""" playbook_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'playbooks', playbook) diff --git a/test/lib/ansible_test/_internal/become.py b/test/lib/ansible_test/_internal/become.py index cabf97e4..e653959a 100644 --- a/test/lib/ansible_test/_internal/become.py +++ b/test/lib/ansible_test/_internal/become.py @@ -12,7 +12,7 @@ from .util import ( class Become(metaclass=abc.ABCMeta): """Base class for become implementations.""" @classmethod - def name(cls): + def name(cls) -> str: """The name of this plugin.""" return cls.__name__.lower() @@ -48,7 +48,7 @@ class Doas(Become): class DoasSudo(Doas): """Become using 'doas' in ansible-test and then after bootstrapping use 'sudo' for other ansible commands.""" @classmethod - def name(cls): + def name(cls) -> str: """The name of this plugin.""" return 'doas_sudo' @@ -78,7 +78,7 @@ class Su(Become): class SuSudo(Su): """Become using 'su' in ansible-test and then after bootstrapping use 'sudo' for other ansible commands.""" @classmethod - def name(cls): + def name(cls) -> str: """The name of this plugin.""" return 'su_sudo' diff --git a/test/lib/ansible_test/_internal/cgroup.py b/test/lib/ansible_test/_internal/cgroup.py index b55d878d..977e359d 100644 --- a/test/lib/ansible_test/_internal/cgroup.py +++ b/test/lib/ansible_test/_internal/cgroup.py @@ -29,7 +29,7 @@ class CGroupEntry: path: pathlib.PurePosixPath @property - def root_path(self): + def root_path(self) -> pathlib.PurePosixPath: """The root path for this cgroup subsystem.""" return pathlib.PurePosixPath(CGroupPath.ROOT, self.subsystem) diff --git a/test/lib/ansible_test/_internal/ci/__init__.py b/test/lib/ansible_test/_internal/ci/__init__.py index 677fafce..97e41dae 100644 --- a/test/lib/ansible_test/_internal/ci/__init__.py +++ b/test/lib/ansible_test/_internal/ci/__init__.py @@ -152,6 +152,8 @@ class CryptographyAuthHelper(AuthHelper, metaclass=abc.ABCMeta): private_key_pem = self.initialize_private_key() private_key = load_pem_private_key(to_bytes(private_key_pem), None, default_backend()) + assert isinstance(private_key, ec.EllipticCurvePrivateKey) + signature_raw_bytes = private_key.sign(payload_bytes, ec.ECDSA(hashes.SHA256())) return signature_raw_bytes diff --git a/test/lib/ansible_test/_internal/ci/azp.py b/test/lib/ansible_test/_internal/ci/azp.py index 557fbacb..9170dfec 100644 --- a/test/lib/ansible_test/_internal/ci/azp.py +++ b/test/lib/ansible_test/_internal/ci/azp.py @@ -40,7 +40,7 @@ CODE = 'azp' class AzurePipelines(CIProvider): """CI provider implementation for Azure Pipelines.""" - def __init__(self): + def __init__(self) -> None: self.auth = AzurePipelinesAuthHelper() @staticmethod diff --git a/test/lib/ansible_test/_internal/classification/__init__.py b/test/lib/ansible_test/_internal/classification/__init__.py index 863de23c..aacc2ca9 100644 --- a/test/lib/ansible_test/_internal/classification/__init__.py +++ b/test/lib/ansible_test/_internal/classification/__init__.py @@ -379,9 +379,9 @@ class PathMapper: if is_subdir(path, data_context().content.integration_path): if dirname == data_context().content.integration_path: for command in ( - 'integration', - 'windows-integration', - 'network-integration', + 'integration', + 'windows-integration', + 'network-integration', ): if name == command and ext == '.cfg': return { @@ -641,19 +641,19 @@ class PathMapper: if '/' not in path: if path in ( - '.gitignore', - 'COPYING', - 'LICENSE', - 'Makefile', + '.gitignore', + 'COPYING', + 'LICENSE', + 'Makefile', ): return minimal if ext in ( - '.in', - '.md', - '.rst', - '.toml', - '.txt', + '.in', + '.md', + '.rst', + '.toml', + '.txt', ): return minimal @@ -757,17 +757,17 @@ class PathMapper: if path.startswith('test/lib/ansible_test/_data/requirements/'): if name in ( - 'integration', - 'network-integration', - 'windows-integration', + 'integration', + 'network-integration', + 'windows-integration', ): return { name: self.integration_all_target, } if name in ( - 'sanity', - 'units', + 'sanity', + 'units', ): return { name: 'all', @@ -826,11 +826,11 @@ class PathMapper: if '/' not in path: if path in ( - '.gitattributes', - '.gitignore', - '.mailmap', - 'COPYING', - 'Makefile', + '.gitattributes', + '.gitignore', + '.mailmap', + 'COPYING', + 'Makefile', ): return minimal @@ -840,11 +840,11 @@ class PathMapper: return all_tests(self.args) # broad impact, run all tests if ext in ( - '.in', - '.md', - '.rst', - '.toml', - '.txt', + '.in', + '.md', + '.rst', + '.toml', + '.txt', ): return minimal diff --git a/test/lib/ansible_test/_internal/classification/python.py b/test/lib/ansible_test/_internal/classification/python.py index df888738..77ffeacf 100644 --- a/test/lib/ansible_test/_internal/classification/python.py +++ b/test/lib/ansible_test/_internal/classification/python.py @@ -146,10 +146,8 @@ def get_python_module_utils_name(path: str) -> str: return name -def enumerate_module_utils(): - """Return a list of available module_utils imports. - :rtype: set[str] - """ +def enumerate_module_utils() -> set[str]: + """Return a list of available module_utils imports.""" module_utils = [] for path in data_context().content.walk_files(data_context().content.module_utils_path): diff --git a/test/lib/ansible_test/_internal/cli/argparsing/__init__.py b/test/lib/ansible_test/_internal/cli/argparsing/__init__.py index 9356442d..540cf552 100644 --- a/test/lib/ansible_test/_internal/cli/argparsing/__init__.py +++ b/test/lib/ansible_test/_internal/cli/argparsing/__init__.py @@ -34,17 +34,17 @@ class RegisteredCompletionFinder(OptionCompletionFinder): These registered completions, if provided, are used to filter the final completion results. This works around a known bug: https://github.com/kislyuk/argcomplete/issues/221 """ - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) self.registered_completions: t.Optional[list[str]] = None def completer( - self, - prefix: str, - action: argparse.Action, - parsed_args: argparse.Namespace, - **kwargs, + self, + prefix: str, + action: argparse.Action, + parsed_args: argparse.Namespace, + **kwargs, ) -> list[str]: """ Return a list of completions for the specified prefix and action. @@ -63,10 +63,10 @@ class RegisteredCompletionFinder(OptionCompletionFinder): @abc.abstractmethod def get_completions( - self, - prefix: str, - action: argparse.Action, - parsed_args: argparse.Namespace, + self, + prefix: str, + action: argparse.Action, + parsed_args: argparse.Namespace, ) -> list[str]: """ Return a list of completions for the specified prefix and action. @@ -89,9 +89,9 @@ class CompositeAction(argparse.Action, metaclass=abc.ABCMeta): documentation_state: dict[t.Type[CompositeAction], DocumentationState] = {} def __init__( - self, - *args, - **kwargs, + self, + *args, + **kwargs, ): self.definition = self.create_parser() self.documentation_state[type(self)] = documentation_state = DocumentationState() @@ -108,11 +108,11 @@ class CompositeAction(argparse.Action, metaclass=abc.ABCMeta): """Return a namespace parser to parse the argument associated with this action.""" def __call__( - self, - parser, - namespace, - values, - option_string=None, + self, + parser, + namespace, + values, + option_string=None, ): state = ParserState(mode=ParserMode.PARSE, namespaces=[namespace], remainder=values) @@ -135,10 +135,10 @@ class CompositeAction(argparse.Action, metaclass=abc.ABCMeta): class CompositeActionCompletionFinder(RegisteredCompletionFinder): """Completion finder with support for composite argument parsing.""" def get_completions( - self, - prefix: str, - action: argparse.Action, - parsed_args: argparse.Namespace, + self, + prefix: str, + action: argparse.Action, + parsed_args: argparse.Namespace, ) -> list[str]: """Return a list of completions appropriate for the given prefix and action, taking into account the arguments that have already been parsed.""" assert isinstance(action, CompositeAction) @@ -232,8 +232,8 @@ def detect_false_file_completion(value: str, mode: ParserMode) -> bool: def complete( - completer: Parser, - state: ParserState, + completer: Parser, + state: ParserState, ) -> Completion: """Perform argument completion using the given completer and return the completion result.""" value = state.remainder diff --git a/test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py b/test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py index df19b338..cf5776da 100644 --- a/test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py +++ b/test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py @@ -9,7 +9,7 @@ import typing as t class Substitute: """Substitute for missing class which accepts all arguments.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: pass @@ -87,7 +87,7 @@ class OptionCompletionFinder(CompletionFinder): """ enabled = bool(argcomplete) - def __init__(self, *args, validator=None, **kwargs): + def __init__(self, *args, validator=None, **kwargs) -> None: if validator: raise ValueError() diff --git a/test/lib/ansible_test/_internal/cli/argparsing/parsers.py b/test/lib/ansible_test/_internal/cli/argparsing/parsers.py index 429b9c0c..d07e03cb 100644 --- a/test/lib/ansible_test/_internal/cli/argparsing/parsers.py +++ b/test/lib/ansible_test/_internal/cli/argparsing/parsers.py @@ -341,7 +341,7 @@ class IntegerParser(DynamicChoicesParser): class BooleanParser(ChoicesParser): """Composite argument parser for boolean (yes/no) values.""" - def __init__(self): + def __init__(self) -> None: super().__init__(['yes', 'no']) def parse(self, state: ParserState) -> bool: diff --git a/test/lib/ansible_test/_internal/cli/commands/__init__.py b/test/lib/ansible_test/_internal/cli/commands/__init__.py index 2ecd3a5e..2eb14abc 100644 --- a/test/lib/ansible_test/_internal/cli/commands/__init__.py +++ b/test/lib/ansible_test/_internal/cli/commands/__init__.py @@ -44,8 +44,8 @@ from .units import ( def do_commands( - parent: argparse.ArgumentParser, - completer: CompositeActionCompletionFinder, + parent: argparse.ArgumentParser, + completer: CompositeActionCompletionFinder, ) -> None: """Command line parsing for all commands.""" common = argparse.ArgumentParser(add_help=False) diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/__init__.py b/test/lib/ansible_test/_internal/cli/commands/coverage/__init__.py index 96beafab..28e67709 100644 --- a/test/lib/ansible_test/_internal/cli/commands/coverage/__init__.py +++ b/test/lib/ansible_test/_internal/cli/commands/coverage/__init__.py @@ -37,9 +37,9 @@ from .xml import ( def do_coverage( - subparsers, - parent: argparse.ArgumentParser, - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + completer: CompositeActionCompletionFinder, ) -> None: """Command line parsing for all `coverage` commands.""" coverage_common = argparse.ArgumentParser(add_help=False, parents=[parent]) @@ -61,7 +61,7 @@ def do_coverage( def add_coverage_common( - parser: argparse.ArgumentParser, + parser: argparse.ArgumentParser, ): """Add common coverage arguments.""" parser.add_argument( diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/__init__.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/__init__.py index 9dbf8c0c..05fbd233 100644 --- a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/__init__.py +++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/__init__.py @@ -13,9 +13,9 @@ from ....environments import ( def do_analyze( - subparsers, - parent: argparse.ArgumentParser, - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + completer: CompositeActionCompletionFinder, ) -> None: """Command line parsing for all `coverage analyze` commands.""" parser: argparse.ArgumentParser = subparsers.add_parser( diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/__init__.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/__init__.py index 429111e2..7b6ea3eb 100644 --- a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/__init__.py +++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/__init__.py @@ -29,9 +29,9 @@ from .missing import ( def do_targets( - subparsers, - parent: argparse.ArgumentParser, - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + completer: CompositeActionCompletionFinder, ) -> None: """Command line parsing for all `coverage analyze targets` commands.""" targets = subparsers.add_parser( diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/combine.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/combine.py index d25240b4..7fa49bf9 100644 --- a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/combine.py +++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/combine.py @@ -17,9 +17,9 @@ from .....environments import ( def do_combine( - subparsers, - parent: argparse.ArgumentParser, - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + completer: CompositeActionCompletionFinder, ): """Command line parsing for the `coverage analyze targets combine` command.""" parser: argparse.ArgumentParser = subparsers.add_parser( diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/expand.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/expand.py index d34a12bc..f5f020fe 100644 --- a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/expand.py +++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/expand.py @@ -17,9 +17,9 @@ from .....environments import ( def do_expand( - subparsers, - parent: argparse.ArgumentParser, - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + completer: CompositeActionCompletionFinder, ): """Command line parsing for the `coverage analyze targets expand` command.""" parser: argparse.ArgumentParser = subparsers.add_parser( diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/filter.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/filter.py index 395d42a9..afcb828b 100644 --- a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/filter.py +++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/filter.py @@ -17,9 +17,9 @@ from .....environments import ( def do_filter( - subparsers, - parent: argparse.ArgumentParser, - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + completer: CompositeActionCompletionFinder, ): """Command line parsing for the `coverage analyze targets filter` command.""" parser: argparse.ArgumentParser = subparsers.add_parser( diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/generate.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/generate.py index 0090d277..0d13933d 100644 --- a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/generate.py +++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/generate.py @@ -17,9 +17,9 @@ from .....environments import ( def do_generate( - subparsers, - parent: argparse.ArgumentParser, - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + completer: CompositeActionCompletionFinder, ): """Command line parsing for the `coverage analyze targets generate` command.""" parser: argparse.ArgumentParser = subparsers.add_parser( diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/missing.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/missing.py index 7a6b847c..8af236f3 100644 --- a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/missing.py +++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/missing.py @@ -17,9 +17,9 @@ from .....environments import ( def do_missing( - subparsers, - parent: argparse.ArgumentParser, - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + completer: CompositeActionCompletionFinder, ): """Command line parsing for the `coverage analyze targets missing` command.""" parser: argparse.ArgumentParser = subparsers.add_parser( diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/combine.py b/test/lib/ansible_test/_internal/cli/commands/coverage/combine.py index 03d3ae89..9b6d34a3 100644 --- a/test/lib/ansible_test/_internal/cli/commands/coverage/combine.py +++ b/test/lib/ansible_test/_internal/cli/commands/coverage/combine.py @@ -19,10 +19,10 @@ from ...environments import ( def do_combine( - subparsers, - parent: argparse.ArgumentParser, - add_coverage_common: c.Callable[[argparse.ArgumentParser], None], - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + add_coverage_common: c.Callable[[argparse.ArgumentParser], None], + completer: CompositeActionCompletionFinder, ) -> None: """Command line parsing for the `coverage combine` command.""" parser: argparse.ArgumentParser = subparsers.add_parser( diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/erase.py b/test/lib/ansible_test/_internal/cli/commands/coverage/erase.py index 2491fa0d..ef356f02 100644 --- a/test/lib/ansible_test/_internal/cli/commands/coverage/erase.py +++ b/test/lib/ansible_test/_internal/cli/commands/coverage/erase.py @@ -17,9 +17,9 @@ from ...environments import ( def do_erase( - subparsers, - parent: argparse.ArgumentParser, - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + completer: CompositeActionCompletionFinder, ) -> None: """Command line parsing for the `coverage erase` command.""" parser: argparse.ArgumentParser = subparsers.add_parser( diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/html.py b/test/lib/ansible_test/_internal/cli/commands/coverage/html.py index 5823b692..5f719de7 100644 --- a/test/lib/ansible_test/_internal/cli/commands/coverage/html.py +++ b/test/lib/ansible_test/_internal/cli/commands/coverage/html.py @@ -19,10 +19,10 @@ from ...environments import ( def do_html( - subparsers, - parent: argparse.ArgumentParser, - add_coverage_common: c.Callable[[argparse.ArgumentParser], None], - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + add_coverage_common: c.Callable[[argparse.ArgumentParser], None], + completer: CompositeActionCompletionFinder, ) -> None: """Command line parsing for the `coverage html` command.""" parser: argparse.ArgumentParser = subparsers.add_parser( diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/report.py b/test/lib/ansible_test/_internal/cli/commands/coverage/report.py index 93f868fd..e6a6e805 100644 --- a/test/lib/ansible_test/_internal/cli/commands/coverage/report.py +++ b/test/lib/ansible_test/_internal/cli/commands/coverage/report.py @@ -19,10 +19,10 @@ from ...environments import ( def do_report( - subparsers, - parent: argparse.ArgumentParser, - add_coverage_common: c.Callable[[argparse.ArgumentParser], None], - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + add_coverage_common: c.Callable[[argparse.ArgumentParser], None], + completer: CompositeActionCompletionFinder, ) -> None: """Command line parsing for the `coverage report` command.""" parser: argparse.ArgumentParser = subparsers.add_parser( diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/xml.py b/test/lib/ansible_test/_internal/cli/commands/coverage/xml.py index 07e66252..e7b03ca8 100644 --- a/test/lib/ansible_test/_internal/cli/commands/coverage/xml.py +++ b/test/lib/ansible_test/_internal/cli/commands/coverage/xml.py @@ -19,10 +19,10 @@ from ...environments import ( def do_xml( - subparsers, - parent: argparse.ArgumentParser, - add_coverage_common: c.Callable[[argparse.ArgumentParser], None], - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + add_coverage_common: c.Callable[[argparse.ArgumentParser], None], + completer: CompositeActionCompletionFinder, ) -> None: """Command line parsing for the `coverage xml` command.""" parser: argparse.ArgumentParser = subparsers.add_parser( diff --git a/test/lib/ansible_test/_internal/cli/commands/env.py b/test/lib/ansible_test/_internal/cli/commands/env.py index 87fa41ad..0cd21145 100644 --- a/test/lib/ansible_test/_internal/cli/commands/env.py +++ b/test/lib/ansible_test/_internal/cli/commands/env.py @@ -17,9 +17,9 @@ from ..environments import ( def do_env( - subparsers, - parent: argparse.ArgumentParser, - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + completer: CompositeActionCompletionFinder, ): """Command line parsing for the `env` command.""" parser: argparse.ArgumentParser = subparsers.add_parser( diff --git a/test/lib/ansible_test/_internal/cli/commands/integration/__init__.py b/test/lib/ansible_test/_internal/cli/commands/integration/__init__.py index 916249fa..dfdefb11 100644 --- a/test/lib/ansible_test/_internal/cli/commands/integration/__init__.py +++ b/test/lib/ansible_test/_internal/cli/commands/integration/__init__.py @@ -26,9 +26,9 @@ from .windows import ( def do_integration( - subparsers, - parent: argparse.ArgumentParser, - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + completer: CompositeActionCompletionFinder, ): """Command line parsing for all integration commands.""" parser = argparse.ArgumentParser( @@ -42,7 +42,7 @@ def do_integration( def add_integration_common( - parser: argparse.ArgumentParser, + parser: argparse.ArgumentParser, ): """Add common integration arguments.""" register_completer(parser.add_argument( diff --git a/test/lib/ansible_test/_internal/cli/commands/integration/network.py b/test/lib/ansible_test/_internal/cli/commands/integration/network.py index 4d0eb918..a05985b5 100644 --- a/test/lib/ansible_test/_internal/cli/commands/integration/network.py +++ b/test/lib/ansible_test/_internal/cli/commands/integration/network.py @@ -35,10 +35,10 @@ from ...completers import ( def do_network_integration( - subparsers, - parent: argparse.ArgumentParser, - add_integration_common: c.Callable[[argparse.ArgumentParser], None], - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + add_integration_common: c.Callable[[argparse.ArgumentParser], None], + completer: CompositeActionCompletionFinder, ): """Command line parsing for the `network-integration` command.""" parser: argparse.ArgumentParser = subparsers.add_parser( diff --git a/test/lib/ansible_test/_internal/cli/commands/integration/posix.py b/test/lib/ansible_test/_internal/cli/commands/integration/posix.py index e6a9527c..78d61658 100644 --- a/test/lib/ansible_test/_internal/cli/commands/integration/posix.py +++ b/test/lib/ansible_test/_internal/cli/commands/integration/posix.py @@ -26,10 +26,10 @@ from ...environments import ( def do_posix_integration( - subparsers, - parent: argparse.ArgumentParser, - add_integration_common: c.Callable[[argparse.ArgumentParser], None], - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + add_integration_common: c.Callable[[argparse.ArgumentParser], None], + completer: CompositeActionCompletionFinder, ): """Command line parsing for the `integration` command.""" parser: argparse.ArgumentParser = subparsers.add_parser( diff --git a/test/lib/ansible_test/_internal/cli/commands/integration/windows.py b/test/lib/ansible_test/_internal/cli/commands/integration/windows.py index cfbdb44f..ab022e3b 100644 --- a/test/lib/ansible_test/_internal/cli/commands/integration/windows.py +++ b/test/lib/ansible_test/_internal/cli/commands/integration/windows.py @@ -26,10 +26,10 @@ from ...environments import ( def do_windows_integration( - subparsers, - parent: argparse.ArgumentParser, - add_integration_common: c.Callable[[argparse.ArgumentParser], None], - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + add_integration_common: c.Callable[[argparse.ArgumentParser], None], + completer: CompositeActionCompletionFinder, ): """Command line parsing for the `windows-integration` command.""" parser: argparse.ArgumentParser = subparsers.add_parser( diff --git a/test/lib/ansible_test/_internal/cli/commands/sanity.py b/test/lib/ansible_test/_internal/cli/commands/sanity.py index 36f0ec58..8b4a9ae5 100644 --- a/test/lib/ansible_test/_internal/cli/commands/sanity.py +++ b/test/lib/ansible_test/_internal/cli/commands/sanity.py @@ -29,9 +29,9 @@ from ..environments import ( def do_sanity( - subparsers, - parent: argparse.ArgumentParser, - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + completer: CompositeActionCompletionFinder, ): """Command line parsing for the `sanity` command.""" parser: argparse.ArgumentParser = subparsers.add_parser( diff --git a/test/lib/ansible_test/_internal/cli/commands/shell.py b/test/lib/ansible_test/_internal/cli/commands/shell.py index 16fb8b44..1baffc6e 100644 --- a/test/lib/ansible_test/_internal/cli/commands/shell.py +++ b/test/lib/ansible_test/_internal/cli/commands/shell.py @@ -20,9 +20,9 @@ from ..environments import ( def do_shell( - subparsers, - parent: argparse.ArgumentParser, - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + completer: CompositeActionCompletionFinder, ): """Command line parsing for the `shell` command.""" parser: argparse.ArgumentParser = subparsers.add_parser( diff --git a/test/lib/ansible_test/_internal/cli/commands/units.py b/test/lib/ansible_test/_internal/cli/commands/units.py index ab7f055c..c541a872 100644 --- a/test/lib/ansible_test/_internal/cli/commands/units.py +++ b/test/lib/ansible_test/_internal/cli/commands/units.py @@ -24,9 +24,9 @@ from ..environments import ( def do_units( - subparsers, - parent: argparse.ArgumentParser, - completer: CompositeActionCompletionFinder, + subparsers, + parent: argparse.ArgumentParser, + completer: CompositeActionCompletionFinder, ): """Command line parsing for the `units` command.""" parser: argparse.ArgumentParser = subparsers.add_parser( diff --git a/test/lib/ansible_test/_internal/cli/compat.py b/test/lib/ansible_test/_internal/cli/compat.py index c69b54d7..93006d5c 100644 --- a/test/lib/ansible_test/_internal/cli/compat.py +++ b/test/lib/ansible_test/_internal/cli/compat.py @@ -84,25 +84,25 @@ def get_option_name(name: str) -> str: class PythonVersionUnsupportedError(ApplicationError): """A Python version was requested for a context which does not support that version.""" - def __init__(self, context, version, versions): + def __init__(self, context: str, version: str, versions: c.Iterable[str]) -> None: super().__init__(f'Python {version} is not supported by environment `{context}`. Supported Python version(s) are: {", ".join(versions)}') class PythonVersionUnspecifiedError(ApplicationError): """A Python version was not specified for a context which is unknown, thus the Python version is unknown.""" - def __init__(self, context): + def __init__(self, context: str) -> None: super().__init__(f'A Python version was not specified for environment `{context}`. Use the `--python` option to specify a Python version.') class ControllerNotSupportedError(ApplicationError): """Option(s) were specified which do not provide support for the controller and would be ignored because they are irrelevant for the target.""" - def __init__(self, context): + def __init__(self, context: str) -> None: super().__init__(f'Environment `{context}` does not provide a Python version supported by the controller.') class OptionsConflictError(ApplicationError): """Option(s) were specified which conflict with other options.""" - def __init__(self, first, second): + def __init__(self, first: c.Iterable[str], second: c.Iterable[str]) -> None: super().__init__(f'Options `{" ".join(first)}` cannot be combined with options `{" ".join(second)}`.') @@ -170,30 +170,30 @@ class TargetMode(enum.Enum): NO_TARGETS = enum.auto() # coverage @property - def one_host(self): + def one_host(self) -> bool: """Return True if only one host (the controller) should be used, otherwise return False.""" return self in (TargetMode.SANITY, TargetMode.UNITS, TargetMode.NO_TARGETS) @property - def no_fallback(self): + def no_fallback(self) -> bool: """Return True if no fallback is acceptable for the controller (due to options not applying to the target), otherwise return False.""" return self in (TargetMode.WINDOWS_INTEGRATION, TargetMode.NETWORK_INTEGRATION, TargetMode.NO_TARGETS) @property - def multiple_pythons(self): + def multiple_pythons(self) -> bool: """Return True if multiple Python versions are allowed, otherwise False.""" return self in (TargetMode.SANITY, TargetMode.UNITS) @property - def has_python(self): + def has_python(self) -> bool: """Return True if this mode uses Python, otherwise False.""" return self in (TargetMode.POSIX_INTEGRATION, TargetMode.SANITY, TargetMode.UNITS, TargetMode.SHELL) def convert_legacy_args( - argv: list[str], - args: t.Union[argparse.Namespace, types.SimpleNamespace], - mode: TargetMode, + argv: list[str], + args: t.Union[argparse.Namespace, types.SimpleNamespace], + mode: TargetMode, ) -> HostSettings: """Convert pre-split host arguments in the given namespace to their split counterparts.""" old_options = LegacyHostOptions.create(args) @@ -262,9 +262,9 @@ def convert_legacy_args( def controller_targets( - mode: TargetMode, - options: LegacyHostOptions, - controller: ControllerHostConfig, + mode: TargetMode, + options: LegacyHostOptions, + controller: ControllerHostConfig, ) -> list[HostConfig]: """Return the configuration for controller targets.""" python = native_python(options) @@ -288,8 +288,8 @@ def native_python(options: LegacyHostOptions) -> t.Optional[NativePythonConfig]: def get_legacy_host_config( - mode: TargetMode, - options: LegacyHostOptions, + mode: TargetMode, + options: LegacyHostOptions, ) -> tuple[ControllerHostConfig, list[HostConfig], t.Optional[FallbackDetail]]: """ Returns controller and target host configs derived from the provided legacy host options. diff --git a/test/lib/ansible_test/_internal/cli/environments.py b/test/lib/ansible_test/_internal/cli/environments.py index 1dde9e63..5063715a 100644 --- a/test/lib/ansible_test/_internal/cli/environments.py +++ b/test/lib/ansible_test/_internal/cli/environments.py @@ -81,10 +81,10 @@ class ControllerMode(enum.Enum): def add_environments( - parser: argparse.ArgumentParser, - completer: CompositeActionCompletionFinder, - controller_mode: ControllerMode, - target_mode: TargetMode, + parser: argparse.ArgumentParser, + completer: CompositeActionCompletionFinder, + controller_mode: ControllerMode, + target_mode: TargetMode, ) -> None: """Add arguments for the environments used to run ansible-test and commands it invokes.""" no_environment = controller_mode == ControllerMode.NO_DELEGATION and target_mode == TargetMode.NO_TARGETS @@ -114,8 +114,8 @@ def add_environments( def add_global_options( - parser: argparse.ArgumentParser, - controller_mode: ControllerMode, + parser: argparse.ArgumentParser, + controller_mode: ControllerMode, ): """Add global options for controlling the test environment that work with both the legacy and composite options.""" global_parser = t.cast(argparse.ArgumentParser, parser.add_argument_group(title='global environment arguments')) @@ -156,10 +156,10 @@ def add_global_options( def add_composite_environment_options( - parser: argparse.ArgumentParser, - completer: CompositeActionCompletionFinder, - controller_mode: ControllerMode, - target_mode: TargetMode, + parser: argparse.ArgumentParser, + completer: CompositeActionCompletionFinder, + controller_mode: ControllerMode, + target_mode: TargetMode, ) -> list[t.Type[CompositeAction]]: """Add composite options for controlling the test environment.""" composite_parser = t.cast(argparse.ArgumentParser, parser.add_argument_group( @@ -246,9 +246,9 @@ def add_composite_environment_options( def add_legacy_environment_options( - parser: argparse.ArgumentParser, - controller_mode: ControllerMode, - target_mode: TargetMode, + parser: argparse.ArgumentParser, + controller_mode: ControllerMode, + target_mode: TargetMode, ): """Add legacy options for controlling the test environment.""" environment: argparse.ArgumentParser = parser.add_argument_group( # type: ignore[assignment] # real type private @@ -259,8 +259,8 @@ def add_legacy_environment_options( def add_environments_python( - environments_parser: argparse.ArgumentParser, - target_mode: TargetMode, + environments_parser: argparse.ArgumentParser, + target_mode: TargetMode, ) -> None: """Add environment arguments to control the Python version(s) used.""" python_versions: tuple[str, ...] @@ -285,9 +285,9 @@ def add_environments_python( def add_environments_host( - environments_parser: argparse.ArgumentParser, - controller_mode: ControllerMode, - target_mode: TargetMode, + environments_parser: argparse.ArgumentParser, + controller_mode: ControllerMode, + target_mode: TargetMode, ) -> None: """Add environment arguments for the given host and argument modes.""" environments_exclusive_group: argparse.ArgumentParser = environments_parser.add_mutually_exclusive_group() # type: ignore[assignment] # real type private @@ -341,7 +341,7 @@ def add_environment_network( def add_environment_windows( - environments_parser: argparse.ArgumentParser, + environments_parser: argparse.ArgumentParser, ) -> None: """Add environment arguments for running on a windows host.""" register_completer(environments_parser.add_argument( @@ -359,7 +359,7 @@ def add_environment_windows( def add_environment_local( - exclusive_parser: argparse.ArgumentParser, + exclusive_parser: argparse.ArgumentParser, ) -> None: """Add environment arguments for running on the local (origin) host.""" exclusive_parser.add_argument( @@ -370,8 +370,8 @@ def add_environment_local( def add_environment_venv( - exclusive_parser: argparse.ArgumentParser, - environments_parser: argparse.ArgumentParser, + exclusive_parser: argparse.ArgumentParser, + environments_parser: argparse.ArgumentParser, ) -> None: """Add environment arguments for running in ansible-test managed virtual environments.""" exclusive_parser.add_argument( @@ -387,8 +387,8 @@ def add_environment_venv( def add_global_docker( - parser: argparse.ArgumentParser, - controller_mode: ControllerMode, + parser: argparse.ArgumentParser, + controller_mode: ControllerMode, ) -> None: """Add global options for Docker.""" if controller_mode != ControllerMode.DELEGATED: @@ -450,9 +450,9 @@ def add_global_docker( def add_environment_docker( - exclusive_parser: argparse.ArgumentParser, - environments_parser: argparse.ArgumentParser, - target_mode: TargetMode, + exclusive_parser: argparse.ArgumentParser, + environments_parser: argparse.ArgumentParser, + target_mode: TargetMode, ) -> None: """Add environment arguments for running in docker containers.""" if target_mode in (TargetMode.POSIX_INTEGRATION, TargetMode.SHELL): @@ -490,8 +490,8 @@ def add_environment_docker( def add_global_remote( - parser: argparse.ArgumentParser, - controller_mode: ControllerMode, + parser: argparse.ArgumentParser, + controller_mode: ControllerMode, ) -> None: """Add global options for remote instances.""" if controller_mode != ControllerMode.DELEGATED: @@ -529,9 +529,9 @@ def add_global_remote( def add_environment_remote( - exclusive_parser: argparse.ArgumentParser, - environments_parser: argparse.ArgumentParser, - target_mode: TargetMode, + exclusive_parser: argparse.ArgumentParser, + environments_parser: argparse.ArgumentParser, + target_mode: TargetMode, ) -> None: """Add environment arguments for running in ansible-core-ci provisioned remote virtual machines.""" if target_mode == TargetMode.POSIX_INTEGRATION: diff --git a/test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py b/test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py index ed933bd5..aac7a694 100644 --- a/test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py +++ b/test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py @@ -69,5 +69,5 @@ class TargetsNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta): class ControllerRequiredFirstError(CompletionError): """Exception raised when controller and target options are specified out-of-order.""" - def __init__(self): + def __init__(self) -> None: super().__init__('The `--controller` option must be specified before `--target` option(s).') diff --git a/test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py b/test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py index a6af7f80..049b71ee 100644 --- a/test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py +++ b/test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py @@ -99,7 +99,7 @@ class ControllerKeyValueParser(KeyValueParser): class DockerKeyValueParser(KeyValueParser): """Composite argument parser for docker key/value pairs.""" - def __init__(self, image, controller): + def __init__(self, image: str, controller: bool) -> None: self.controller = controller self.versions = get_docker_pythons(image, controller, False) self.allow_default = bool(get_docker_pythons(image, controller, True)) @@ -135,7 +135,7 @@ class DockerKeyValueParser(KeyValueParser): class PosixRemoteKeyValueParser(KeyValueParser): """Composite argument parser for POSIX remote key/value pairs.""" - def __init__(self, name, controller): + def __init__(self, name: str, controller: bool) -> None: self.controller = controller self.versions = get_remote_pythons(name, controller, False) self.allow_default = bool(get_remote_pythons(name, controller, True)) diff --git a/test/lib/ansible_test/_internal/commands/coverage/__init__.py b/test/lib/ansible_test/_internal/commands/coverage/__init__.py index cdf2d544..139cf3c6 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/__init__.py +++ b/test/lib/ansible_test/_internal/commands/coverage/__init__.py @@ -2,7 +2,6 @@ from __future__ import annotations import collections.abc as c -import errno import json import os import re @@ -37,7 +36,7 @@ from ...python_requirements import ( install_requirements, ) -from ... target import ( +from ...target import ( walk_module_targets, ) @@ -135,11 +134,8 @@ def get_coverage_files(language: str, path: t.Optional[str] = None) -> list[str] try: coverage_files = [os.path.join(coverage_dir, f) for f in os.listdir(coverage_dir) if '=coverage.' in f and '=%s' % language in f] - except IOError as ex: - if ex.errno == errno.ENOENT: - return [] - - raise + except FileNotFoundError: + return [] return coverage_files @@ -162,11 +158,11 @@ def get_python_modules() -> dict[str, str]: def enumerate_python_arcs( - path: str, - coverage: coverage_module, - modules: dict[str, str], - collection_search_re: t.Optional[t.Pattern], - collection_sub_re: t.Optional[t.Pattern], + path: str, + coverage: coverage_module, + modules: dict[str, str], + collection_search_re: t.Optional[t.Pattern], + collection_sub_re: t.Optional[t.Pattern], ) -> c.Generator[tuple[str, set[tuple[int, int]]], None, None]: """Enumerate Python code coverage arcs in the given file.""" if os.path.getsize(path) == 0: @@ -231,7 +227,7 @@ def read_python_coverage_legacy(path: str) -> PythonArcs: contents = read_text_file(path) contents = re.sub(r'''^!coverage.py: This is a private format, don't read it directly!''', '', contents) data = json.loads(contents) - arcs: PythonArcs = {filename: [tuple(arc) for arc in arcs] for filename, arcs in data['arcs'].items()} + arcs: PythonArcs = {filename: [t.cast(tuple[int, int], tuple(arc)) for arc in arc_list] for filename, arc_list in data['arcs'].items()} except Exception as ex: raise CoverageError(path, f'Error reading JSON coverage file: {ex}') from ex @@ -239,9 +235,9 @@ def read_python_coverage_legacy(path: str) -> PythonArcs: def enumerate_powershell_lines( - path: str, - collection_search_re: t.Optional[t.Pattern], - collection_sub_re: t.Optional[t.Pattern], + path: str, + collection_search_re: t.Optional[t.Pattern], + collection_sub_re: t.Optional[t.Pattern], ) -> c.Generator[tuple[str, dict[int, int]], None, None]: """Enumerate PowerShell code coverage lines in the given file.""" if os.path.getsize(path) == 0: @@ -278,10 +274,10 @@ def enumerate_powershell_lines( def sanitize_filename( - filename: str, - modules: t.Optional[dict[str, str]] = None, - collection_search_re: t.Optional[t.Pattern] = None, - collection_sub_re: t.Optional[t.Pattern] = None, + filename: str, + modules: t.Optional[dict[str, str]] = None, + collection_search_re: t.Optional[t.Pattern] = None, + collection_sub_re: t.Optional[t.Pattern] = None, ) -> t.Optional[str]: """Convert the given code coverage path to a local absolute path and return its, or None if the path is not valid.""" ansible_path = os.path.abspath('lib/ansible/') + '/' diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py index f16f7b4f..ad6cf86f 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py +++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py @@ -20,6 +20,7 @@ from .. import ( ) TargetKey = t.TypeVar('TargetKey', int, tuple[int, int]) +TFlexKey = t.TypeVar('TFlexKey', int, tuple[int, int], str) NamedPoints = dict[str, dict[TargetKey, set[str]]] IndexedPoints = dict[str, dict[TargetKey, set[int]]] Arcs = dict[str, dict[tuple[int, int], set[int]]] @@ -118,12 +119,12 @@ def get_target_index(name: str, target_indexes: TargetIndexes) -> int: def expand_indexes( - source_data: IndexedPoints, - source_index: list[str], - format_func: c.Callable[[TargetKey], str], -) -> NamedPoints: + source_data: IndexedPoints, + source_index: list[str], + format_func: c.Callable[[TargetKey], TFlexKey], +) -> dict[str, dict[TFlexKey, set[str]]]: """Expand indexes from the source into target names for easier processing of the data (arcs or lines).""" - combined_data: dict[str, dict[t.Any, set[str]]] = {} + combined_data: dict[str, dict[TFlexKey, set[str]]] = {} for covered_path, covered_points in source_data.items(): combined_points = combined_data.setdefault(covered_path, {}) diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py index e7698974..e3782cee 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py +++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py @@ -58,10 +58,10 @@ def command_coverage_analyze_targets_combine(args: CoverageAnalyzeTargetsCombine def merge_indexes( - source_data: IndexedPoints, - source_index: list[str], - combined_data: IndexedPoints, - combined_index: TargetIndexes, + source_data: IndexedPoints, + source_index: list[str], + combined_data: IndexedPoints, + combined_index: TargetIndexes, ) -> None: """Merge indexes from the source into the combined data set (arcs or lines).""" for covered_path, covered_points in source_data.items(): diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py index f1d8551a..29a8ee5b 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py +++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py @@ -24,6 +24,7 @@ from . import ( from . import ( NamedPoints, + TargetKey, TargetIndexes, ) @@ -50,8 +51,12 @@ def command_coverage_analyze_targets_filter(args: CoverageAnalyzeTargetsFilterCo covered_targets, covered_path_arcs, covered_path_lines = read_report(args.input_file) - filtered_path_arcs = expand_indexes(covered_path_arcs, covered_targets, lambda v: v) - filtered_path_lines = expand_indexes(covered_path_lines, covered_targets, lambda v: v) + def pass_target_key(value: TargetKey) -> TargetKey: + """Return the given target key unmodified.""" + return value + + filtered_path_arcs = expand_indexes(covered_path_arcs, covered_targets, pass_target_key) + filtered_path_lines = expand_indexes(covered_path_lines, covered_targets, pass_target_key) include_targets = set(args.include_targets) if args.include_targets else None exclude_targets = set(args.exclude_targets) if args.exclude_targets else None @@ -59,7 +64,7 @@ def command_coverage_analyze_targets_filter(args: CoverageAnalyzeTargetsFilterCo include_path = re.compile(args.include_path) if args.include_path else None exclude_path = re.compile(args.exclude_path) if args.exclude_path else None - def path_filter_func(path): + def path_filter_func(path: str) -> bool: """Return True if the given path should be included, otherwise return False.""" if include_path and not re.search(include_path, path): return False @@ -69,7 +74,7 @@ def command_coverage_analyze_targets_filter(args: CoverageAnalyzeTargetsFilterCo return True - def target_filter_func(targets): + def target_filter_func(targets: set[str]) -> set[str]: """Filter the given targets and return the result based on the defined includes and excludes.""" if include_targets: targets &= include_targets @@ -92,9 +97,9 @@ def command_coverage_analyze_targets_filter(args: CoverageAnalyzeTargetsFilterCo def filter_data( - data: NamedPoints, - path_filter_func: c.Callable[[str], bool], - target_filter_func: c.Callable[[set[str]], set[str]], + data: NamedPoints, + path_filter_func: c.Callable[[str], bool], + target_filter_func: c.Callable[[set[str]], set[str]], ) -> NamedPoints: """Filter the data set using the specified filter function.""" result: NamedPoints = {} diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py index 2c61190a..127b5b7f 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py +++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py @@ -75,10 +75,10 @@ def command_coverage_analyze_targets_generate(args: CoverageAnalyzeTargetsGenera def analyze_python_coverage( - args: CoverageAnalyzeTargetsGenerateConfig, - host_state: HostState, - path: str, - target_indexes: TargetIndexes, + args: CoverageAnalyzeTargetsGenerateConfig, + host_state: HostState, + path: str, + target_indexes: TargetIndexes, ) -> Arcs: """Analyze Python code coverage.""" results: Arcs = {} @@ -107,9 +107,9 @@ def analyze_python_coverage( def analyze_powershell_coverage( - args: CoverageAnalyzeTargetsGenerateConfig, - path: str, - target_indexes: TargetIndexes, + args: CoverageAnalyzeTargetsGenerateConfig, + path: str, + target_indexes: TargetIndexes, ) -> Lines: """Analyze PowerShell code coverage""" results: Lines = {} @@ -136,9 +136,9 @@ def analyze_powershell_coverage( def prune_invalid_filenames( - args: CoverageAnalyzeTargetsGenerateConfig, - results: dict[str, t.Any], - collection_search_re: t.Optional[t.Pattern] = None, + args: CoverageAnalyzeTargetsGenerateConfig, + results: dict[str, t.Any], + collection_search_re: t.Optional[t.Pattern] = None, ) -> None: """Remove invalid filenames from the given result set.""" path_checker = PathChecker(args, collection_search_re) diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py index 4d6b469e..c1c77e75 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py +++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py @@ -66,11 +66,11 @@ def command_coverage_analyze_targets_missing(args: CoverageAnalyzeTargetsMissing def find_gaps( - from_data: IndexedPoints, - from_index: list[str], - to_data: IndexedPoints, - target_indexes: TargetIndexes, - only_exists: bool, + from_data: IndexedPoints, + from_index: list[str], + to_data: IndexedPoints, + target_indexes: TargetIndexes, + only_exists: bool, ) -> IndexedPoints: """Find gaps in coverage between the from and to data sets.""" target_data: IndexedPoints = {} @@ -91,12 +91,12 @@ def find_gaps( def find_missing( - from_data: IndexedPoints, - from_index: list[str], - to_data: IndexedPoints, - to_index: list[str], - target_indexes: TargetIndexes, - only_exists: bool, + from_data: IndexedPoints, + from_index: list[str], + to_data: IndexedPoints, + to_index: list[str], + target_indexes: TargetIndexes, + only_exists: bool, ) -> IndexedPoints: """Find coverage in from_data not present in to_data (arcs or lines).""" target_data: IndexedPoints = {} diff --git a/test/lib/ansible_test/_internal/commands/coverage/combine.py b/test/lib/ansible_test/_internal/commands/coverage/combine.py index cb086fd5..66210c73 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/combine.py +++ b/test/lib/ansible_test/_internal/commands/coverage/combine.py @@ -101,7 +101,7 @@ def combine_coverage_files(args: CoverageCombineConfig, host_state: HostState) - class ExportedCoverageDataNotFound(ApplicationError): """Exception when no combined coverage data is present yet is required.""" - def __init__(self): + def __init__(self) -> None: super().__init__( 'Coverage data must be exported before processing with the `--docker` or `--remote` option.\n' 'Export coverage with `ansible-test coverage combine` using the `--export` option.\n' @@ -283,9 +283,9 @@ def _get_coverage_targets(args: CoverageCombineConfig, walk_func: c.Callable) -> def _build_stub_groups( - args: CoverageCombineConfig, - sources: list[tuple[str, int]], - default_stub_value: c.Callable[[list[str]], dict[str, TValue]], + args: CoverageCombineConfig, + sources: list[tuple[str, int]], + default_stub_value: c.Callable[[list[str]], dict[str, TValue]], ) -> dict[str, dict[str, TValue]]: """ Split the given list of sources with line counts into groups, maintaining a maximum line count for each group. diff --git a/test/lib/ansible_test/_internal/commands/integration/__init__.py b/test/lib/ansible_test/_internal/commands/integration/__init__.py index 33bd45f6..8864d2ee 100644 --- a/test/lib/ansible_test/_internal/commands/integration/__init__.py +++ b/test/lib/ansible_test/_internal/commands/integration/__init__.py @@ -240,9 +240,9 @@ def delegate_inventory(args: IntegrationConfig, inventory_path_src: str) -> None @contextlib.contextmanager def integration_test_environment( - args: IntegrationConfig, - target: IntegrationTarget, - inventory_path_src: str, + args: IntegrationConfig, + target: IntegrationTarget, + inventory_path_src: str, ) -> c.Iterator[IntegrationEnvironment]: """Context manager that prepares the integration test environment and cleans it up.""" ansible_config_src = args.get_ansible_config() @@ -343,9 +343,9 @@ def integration_test_environment( @contextlib.contextmanager def integration_test_config_file( - args: IntegrationConfig, - env_config: CloudEnvironmentConfig, - integration_dir: str, + args: IntegrationConfig, + env_config: CloudEnvironmentConfig, + integration_dir: str, ) -> c.Iterator[t.Optional[str]]: """Context manager that provides a config file for integration tests, if needed.""" if not env_config: @@ -372,10 +372,10 @@ def integration_test_config_file( def create_inventory( - args: IntegrationConfig, - host_state: HostState, - inventory_path: str, - target: IntegrationTarget, + args: IntegrationConfig, + host_state: HostState, + inventory_path: str, + target: IntegrationTarget, ) -> None: """Create inventory.""" if isinstance(args, PosixIntegrationConfig): @@ -398,13 +398,13 @@ def create_inventory( def command_integration_filtered( - args: IntegrationConfig, - host_state: HostState, - targets: tuple[IntegrationTarget, ...], - all_targets: tuple[IntegrationTarget, ...], - inventory_path: str, - pre_target: t.Optional[c.Callable[[IntegrationTarget], None]] = None, - post_target: t.Optional[c.Callable[[IntegrationTarget], None]] = None, + args: IntegrationConfig, + host_state: HostState, + targets: tuple[IntegrationTarget, ...], + all_targets: tuple[IntegrationTarget, ...], + inventory_path: str, + pre_target: t.Optional[c.Callable[[IntegrationTarget], None]] = None, + post_target: t.Optional[c.Callable[[IntegrationTarget], None]] = None, ): """Run integration tests for the specified targets.""" found = False @@ -577,12 +577,12 @@ def command_integration_filtered( def command_integration_script( - args: IntegrationConfig, - host_state: HostState, - target: IntegrationTarget, - test_dir: str, - inventory_path: str, - coverage_manager: CoverageManager, + args: IntegrationConfig, + host_state: HostState, + target: IntegrationTarget, + test_dir: str, + inventory_path: str, + coverage_manager: CoverageManager, ): """Run an integration test script.""" display.info('Running %s integration test script' % target.name) @@ -629,13 +629,13 @@ def command_integration_script( def command_integration_role( - args: IntegrationConfig, - host_state: HostState, - target: IntegrationTarget, - start_at_task: t.Optional[str], - test_dir: str, - inventory_path: str, - coverage_manager: CoverageManager, + args: IntegrationConfig, + host_state: HostState, + target: IntegrationTarget, + start_at_task: t.Optional[str], + test_dir: str, + inventory_path: str, + coverage_manager: CoverageManager, ): """Run an integration test role.""" display.info('Running %s integration test role' % target.name) @@ -748,15 +748,15 @@ def command_integration_role( def run_setup_targets( - args: IntegrationConfig, - host_state: HostState, - test_dir: str, - target_names: c.Sequence[str], - targets_dict: dict[str, IntegrationTarget], - targets_executed: set[str], - inventory_path: str, - coverage_manager: CoverageManager, - always: bool, + args: IntegrationConfig, + host_state: HostState, + test_dir: str, + target_names: c.Sequence[str], + targets_dict: dict[str, IntegrationTarget], + targets_executed: set[str], + inventory_path: str, + coverage_manager: CoverageManager, + always: bool, ): """Run setup targets.""" for target_name in target_names: @@ -779,13 +779,13 @@ def run_setup_targets( def integration_environment( - args: IntegrationConfig, - target: IntegrationTarget, - test_dir: str, - inventory_path: str, - ansible_config: t.Optional[str], - env_config: t.Optional[CloudEnvironmentConfig], - test_env: IntegrationEnvironment, + args: IntegrationConfig, + target: IntegrationTarget, + test_dir: str, + inventory_path: str, + ansible_config: t.Optional[str], + env_config: t.Optional[CloudEnvironmentConfig], + test_env: IntegrationEnvironment, ) -> dict[str, str]: """Return a dictionary of environment variables to use when running the given integration test target.""" env = ansible_environment(args, ansible_config=ansible_config) @@ -819,7 +819,7 @@ def integration_environment( class IntegrationEnvironment: """Details about the integration environment.""" - def __init__(self, test_dir, integration_dir, targets_dir, inventory_path, ansible_config, vars_file): + def __init__(self, test_dir: str, integration_dir: str, targets_dir: str, inventory_path: str, ansible_config: str, vars_file: str) -> None: self.test_dir = test_dir self.integration_dir = integration_dir self.targets_dir = targets_dir @@ -831,17 +831,13 @@ class IntegrationEnvironment: class IntegrationCache(CommonCache): """Integration cache.""" @property - def integration_targets(self): - """ - :rtype: list[IntegrationTarget] - """ + def integration_targets(self) -> list[IntegrationTarget]: + """The list of integration test targets.""" return self.get('integration_targets', lambda: list(walk_integration_targets())) @property - def dependency_map(self): - """ - :rtype: dict[str, set[IntegrationTarget]] - """ + def dependency_map(self) -> dict[str, set[IntegrationTarget]]: + """The dependency map of integration test targets.""" return self.get('dependency_map', lambda: generate_dependency_map(self.integration_targets)) diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py b/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py index 32d90d6f..0c078b98 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py @@ -288,14 +288,14 @@ class CloudProvider(CloudBase): exclude.append(skip) if not self.uses_docker and self.uses_config: - display.warning('Excluding tests marked "%s" which require config (see "%s"): %s' - % (skip.rstrip('/'), self.config_template_path, ', '.join(skipped))) + display.warning('Excluding tests marked "%s" which require a "%s" config file (see "%s"): %s' + % (skip.rstrip('/'), self.config_static_path, self.config_template_path, ', '.join(skipped))) elif self.uses_docker and not self.uses_config: display.warning('Excluding tests marked "%s" which requires container support: %s' % (skip.rstrip('/'), ', '.join(skipped))) elif self.uses_docker and self.uses_config: - display.warning('Excluding tests marked "%s" which requires container support or config (see "%s"): %s' - % (skip.rstrip('/'), self.config_template_path, ', '.join(skipped))) + display.warning('Excluding tests marked "%s" which requires container support or a "%s" config file (see "%s"): %s' + % (skip.rstrip('/'), self.config_static_path, self.config_template_path, ', '.join(skipped))) def setup(self) -> None: """Setup the cloud resource before delegation and register a cleanup callback.""" diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py b/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py index 8a83ed2b..007d383c 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py @@ -30,7 +30,7 @@ class ACMEProvider(CloudProvider): if os.environ.get('ANSIBLE_ACME_CONTAINER'): self.image = os.environ.get('ANSIBLE_ACME_CONTAINER') else: - self.image = 'quay.io/ansible/acme-test-container:2.0.0' + self.image = 'quay.io/ansible/acme-test-container:2.1.0' self.uses_docker = True diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py b/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py index 25a02ff5..0037b423 100644 --- a/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py +++ b/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py @@ -131,12 +131,12 @@ class CsCloudProvider(CloudProvider): def _get_credentials(self, container_name: str) -> dict[str, t.Any]: """Wait for the CloudStack simulator to return credentials.""" - def check(value): + def check(value) -> bool: """Return True if the given configuration is valid JSON, otherwise return False.""" # noinspection PyBroadException try: json.loads(value) - except Exception: # pylint: disable=broad-except + except Exception: # pylint: disable=broad-except return False # sometimes the file exists but is not yet valid JSON return True diff --git a/test/lib/ansible_test/_internal/commands/integration/coverage.py b/test/lib/ansible_test/_internal/commands/integration/coverage.py index e9917692..5a486e93 100644 --- a/test/lib/ansible_test/_internal/commands/integration/coverage.py +++ b/test/lib/ansible_test/_internal/commands/integration/coverage.py @@ -158,7 +158,7 @@ class PosixCoverageHandler(CoverageHandler[PosixConfig]): self.teardown_controller() self.teardown_target() - def setup_controller(self): + def setup_controller(self) -> None: """Perform setup for code coverage on the controller.""" coverage_config_path = os.path.join(self.common_temp_path, COVERAGE_CONFIG_NAME) coverage_output_path = os.path.join(self.common_temp_path, ResultType.COVERAGE.name) @@ -171,7 +171,7 @@ class PosixCoverageHandler(CoverageHandler[PosixConfig]): os.mkdir(coverage_output_path) verified_chmod(coverage_output_path, MODE_DIRECTORY_WRITE) - def setup_target(self): + def setup_target(self) -> None: """Perform setup for code coverage on the target.""" if not self.target_profile: return diff --git a/test/lib/ansible_test/_internal/commands/integration/filters.py b/test/lib/ansible_test/_internal/commands/integration/filters.py index 1f242bd7..be03d7f4 100644 --- a/test/lib/ansible_test/_internal/commands/integration/filters.py +++ b/test/lib/ansible_test/_internal/commands/integration/filters.py @@ -67,12 +67,12 @@ class TargetFilter(t.Generic[THostConfig], metaclass=abc.ABCMeta): return self.configs[0] def skip( - self, - skip: str, - reason: str, - targets: list[IntegrationTarget], - exclude: set[str], - override: t.Optional[list[str]] = None, + self, + skip: str, + reason: str, + targets: list[IntegrationTarget], + exclude: set[str], + override: t.Optional[list[str]] = None, ) -> None: """Apply the specified skip rule to the given targets by updating the provided exclude list.""" if skip.startswith('skip/'): diff --git a/test/lib/ansible_test/_internal/commands/sanity/__init__.py b/test/lib/ansible_test/_internal/commands/sanity/__init__.py index 7aea3988..00b30310 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/__init__.py +++ b/test/lib/ansible_test/_internal/commands/sanity/__init__.py @@ -631,11 +631,11 @@ class SanitySkipped(TestSkipped): class SanityFailure(TestFailure): """Sanity test failure.""" def __init__( - self, - test: str, - python_version: t.Optional[str] = None, - messages: t.Optional[c.Sequence[SanityMessage]] = None, - summary: t.Optional[str] = None, + self, + test: str, + python_version: t.Optional[str] = None, + messages: t.Optional[c.Sequence[SanityMessage]] = None, + summary: t.Optional[str] = None, ) -> None: super().__init__(COMMAND, test, python_version, messages, summary) @@ -827,7 +827,7 @@ class SanitySingleVersion(SanityTest, metaclass=abc.ABCMeta): class SanityCodeSmellTest(SanitySingleVersion): """Sanity test script.""" - def __init__(self, path): + def __init__(self, path) -> None: name = os.path.splitext(os.path.basename(path))[0] config_path = os.path.splitext(path)[0] + '.json' @@ -862,10 +862,10 @@ class SanityCodeSmellTest(SanitySingleVersion): self.extensions = [] self.prefixes = [] self.files = [] - self.text: t.Optional[bool] = None + self.text = None self.ignore_self = False - self.minimum_python_version: t.Optional[str] = None - self.maximum_python_version: t.Optional[str] = None + self.minimum_python_version = None + self.maximum_python_version = None self.__all_targets = False self.__no_targets = True @@ -1097,11 +1097,11 @@ def sanity_get_tests() -> tuple[SanityTest, ...]: def create_sanity_virtualenv( - args: SanityConfig, - python: PythonConfig, - name: str, - coverage: bool = False, - minimize: bool = False, + args: SanityConfig, + python: PythonConfig, + name: str, + coverage: bool = False, + minimize: bool = False, ) -> t.Optional[VirtualPythonConfig]: """Return an existing sanity virtual environment matching the requested parameters or create a new one.""" commands = collect_requirements( # create_sanity_virtualenv() diff --git a/test/lib/ansible_test/_internal/commands/sanity/import.py b/test/lib/ansible_test/_internal/commands/sanity/import.py index d1b2641b..8511d7ac 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/import.py +++ b/test/lib/ansible_test/_internal/commands/sanity/import.py @@ -122,8 +122,8 @@ class ImportTest(SanityMultipleVersion): messages = [] for import_type, test in ( - ('module', _get_module_test(True)), - ('plugin', _get_module_test(False)), + ('module', _get_module_test(True)), + ('plugin', _get_module_test(False)), ): if import_type == 'plugin' and python.version in REMOTE_ONLY_PYTHON_VERSIONS: continue diff --git a/test/lib/ansible_test/_internal/commands/sanity/mypy.py b/test/lib/ansible_test/_internal/commands/sanity/mypy.py index c8497139..cb8ed12c 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/mypy.py +++ b/test/lib/ansible_test/_internal/commands/sanity/mypy.py @@ -165,11 +165,11 @@ class MypyTest(SanityMultipleVersion): @staticmethod def test_context( - args: SanityConfig, - virtualenv_python: VirtualPythonConfig, - python: PythonConfig, - context: MyPyContext, - paths: list[str], + args: SanityConfig, + virtualenv_python: VirtualPythonConfig, + python: PythonConfig, + context: MyPyContext, + paths: list[str], ) -> list[SanityMessage]: """Run mypy tests for the specified context.""" context_paths = [path for path in paths if any(is_subdir(path, match_path) for match_path in context.paths)] diff --git a/test/lib/ansible_test/_internal/commands/sanity/pylint.py b/test/lib/ansible_test/_internal/commands/sanity/pylint.py index cd5a8350..86f287ab 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/pylint.py +++ b/test/lib/ansible_test/_internal/commands/sanity/pylint.py @@ -58,7 +58,7 @@ from ...host_configs import ( class PylintTest(SanitySingleVersion): """Sanity test using pylint.""" - def __init__(self): + def __init__(self) -> None: super().__init__() self.optional_error_codes.update([ 'ansible-deprecated-date', @@ -189,13 +189,13 @@ class PylintTest(SanitySingleVersion): @staticmethod def pylint( - args: SanityConfig, - context: str, - paths: list[str], - plugin_dir: str, - plugin_names: list[str], - python: PythonConfig, - collection_detail: CollectionDetail, + args: SanityConfig, + context: str, + paths: list[str], + plugin_dir: str, + plugin_names: list[str], + python: PythonConfig, + collection_detail: CollectionDetail, ) -> list[dict[str, str]]: """Run pylint using the config specified by the context on the specified paths.""" rcfile = os.path.join(SANITY_ROOT, 'pylint', 'config', context.split('/')[0] + '.cfg') diff --git a/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py b/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py index 9ab8970b..e1dacb7c 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py +++ b/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py @@ -59,7 +59,7 @@ from ...host_configs import ( class ValidateModulesTest(SanitySingleVersion): """Sanity test using validate-modules.""" - def __init__(self): + def __init__(self) -> None: super().__init__() self.optional_error_codes.update([ diff --git a/test/lib/ansible_test/_internal/completion.py b/test/lib/ansible_test/_internal/completion.py index ee096772..f443181c 100644 --- a/test/lib/ansible_test/_internal/completion.py +++ b/test/lib/ansible_test/_internal/completion.py @@ -54,7 +54,7 @@ class CompletionConfig(metaclass=abc.ABCMeta): @property @abc.abstractmethod - def is_default(self): + def is_default(self) -> bool: """True if the completion entry is only used for defaults, otherwise False.""" @@ -107,17 +107,17 @@ class RemoteCompletionConfig(CompletionConfig): arch: t.Optional[str] = None @property - def platform(self): + def platform(self) -> str: """The name of the platform.""" return self.name.partition('/')[0] @property - def version(self): + def version(self) -> str: """The version of the platform.""" return self.name.partition('/')[2] @property - def is_default(self): + def is_default(self) -> bool: """True if the completion entry is only used for defaults, otherwise False.""" return not self.version @@ -166,7 +166,7 @@ class DockerCompletionConfig(PythonCompletionConfig): placeholder: bool = False @property - def is_default(self): + def is_default(self) -> bool: """True if the completion entry is only used for defaults, otherwise False.""" return False @@ -270,13 +270,15 @@ def parse_completion_entry(value: str) -> tuple[str, dict[str, str]]: def filter_completion( - completion: dict[str, TCompletionConfig], - controller_only: bool = False, - include_defaults: bool = False, + completion: dict[str, TCompletionConfig], + controller_only: bool = False, + include_defaults: bool = False, ) -> dict[str, TCompletionConfig]: """Return the given completion dictionary, filtering out configs which do not support the controller if controller_only is specified.""" if controller_only: - completion = {name: config for name, config in completion.items() if isinstance(config, PosixCompletionConfig) and config.controller_supported} + # The cast is needed because mypy gets confused here and forgets that completion values are TCompletionConfig. + completion = {name: t.cast(TCompletionConfig, config) for name, config in completion.items() if + isinstance(config, PosixCompletionConfig) and config.controller_supported} if not include_defaults: completion = {name: config for name, config in completion.items() if not config.is_default} diff --git a/test/lib/ansible_test/_internal/containers.py b/test/lib/ansible_test/_internal/containers.py index 95b1718b..a581ecf2 100644 --- a/test/lib/ansible_test/_internal/containers.py +++ b/test/lib/ansible_test/_internal/containers.py @@ -108,19 +108,19 @@ class CleanupMode(enum.Enum): def run_support_container( - args: EnvironmentConfig, - context: str, - image: str, - name: str, - ports: list[int], - aliases: t.Optional[list[str]] = None, - start: bool = True, - allow_existing: bool = False, - cleanup: t.Optional[CleanupMode] = None, - cmd: t.Optional[list[str]] = None, - env: t.Optional[dict[str, str]] = None, - options: t.Optional[list[str]] = None, - publish_ports: bool = True, + args: EnvironmentConfig, + context: str, + image: str, + name: str, + ports: list[int], + aliases: t.Optional[list[str]] = None, + start: bool = True, + allow_existing: bool = False, + cleanup: t.Optional[CleanupMode] = None, + cmd: t.Optional[list[str]] = None, + env: t.Optional[dict[str, str]] = None, + options: t.Optional[list[str]] = None, + publish_ports: bool = True, ) -> t.Optional[ContainerDescriptor]: """ Start a container used to support tests, but not run them. @@ -236,12 +236,12 @@ def run_support_container( def run_container( - args: EnvironmentConfig, - image: str, - name: str, - options: t.Optional[list[str]], - cmd: t.Optional[list[str]] = None, - create_only: bool = False, + args: EnvironmentConfig, + image: str, + name: str, + options: t.Optional[list[str]], + cmd: t.Optional[list[str]] = None, + create_only: bool = False, ) -> str: """Run a container using the given docker image.""" options = list(options or []) @@ -263,7 +263,7 @@ def run_container( stdout = docker_run(args, image, options, cmd)[0] except SubprocessError as ex: display.error(ex.message) - display.warning('Failed to run docker image "{image}". Waiting a few seconds before trying again.') + display.warning(f'Failed to run docker image "{image}". Waiting a few seconds before trying again.') docker_rm(args, name) # podman doesn't remove containers after create if run fails time.sleep(3) else: @@ -594,8 +594,8 @@ class SupportContainerContext: @contextlib.contextmanager def support_container_context( - args: EnvironmentConfig, - ssh: t.Optional[SshConnectionDetail], + args: EnvironmentConfig, + ssh: t.Optional[SshConnectionDetail], ) -> c.Iterator[t.Optional[ContainerDatabase]]: """Create a context manager for integration tests that use support containers.""" if not isinstance(args, (IntegrationConfig, UnitsConfig, SanityConfig, ShellConfig)): @@ -617,9 +617,9 @@ def support_container_context( def create_support_container_context( - args: EnvironmentConfig, - ssh: t.Optional[SshConnectionDetail], - containers: ContainerDatabase, + args: EnvironmentConfig, + ssh: t.Optional[SshConnectionDetail], + containers: ContainerDatabase, ) -> SupportContainerContext: """Context manager that provides SSH port forwards. Returns updated container metadata.""" host_type = HostType.control @@ -819,9 +819,9 @@ def create_hosts_entries(context: dict[str, ContainerAccess]) -> list[str]: def create_container_hooks( - args: IntegrationConfig, - control_connections: list[SshConnectionDetail], - managed_connections: t.Optional[list[SshConnectionDetail]], + args: IntegrationConfig, + control_connections: list[SshConnectionDetail], + managed_connections: t.Optional[list[SshConnectionDetail]], ) -> tuple[t.Optional[c.Callable[[IntegrationTarget], None]], t.Optional[c.Callable[[IntegrationTarget], None]]]: """Return pre and post target callbacks for enabling and disabling container access for each test target.""" containers = get_container_database(args) @@ -844,12 +844,12 @@ def create_container_hooks( control_state: dict[str, tuple[list[str], list[SshProcess]]] = {} managed_state: dict[str, tuple[list[str], list[SshProcess]]] = {} - def pre_target(target): + def pre_target(target: IntegrationTarget) -> None: """Configure hosts for SSH port forwarding required by the specified target.""" forward_ssh_ports(args, control_connections, '%s_hosts_prepare.yml' % control_type, control_state, target, HostType.control, control_contexts) forward_ssh_ports(args, managed_connections, '%s_hosts_prepare.yml' % managed_type, managed_state, target, HostType.managed, managed_contexts) - def post_target(target): + def post_target(target: IntegrationTarget) -> None: """Clean up previously configured SSH port forwarding which was required by the specified target.""" cleanup_ssh_ports(args, control_connections, '%s_hosts_restore.yml' % control_type, control_state, target, HostType.control) cleanup_ssh_ports(args, managed_connections, '%s_hosts_restore.yml' % managed_type, managed_state, target, HostType.managed) @@ -873,13 +873,13 @@ def create_managed_contexts(control_contexts: dict[str, dict[str, ContainerAcces def forward_ssh_ports( - args: IntegrationConfig, - ssh_connections: t.Optional[list[SshConnectionDetail]], - playbook: str, - target_state: dict[str, tuple[list[str], list[SshProcess]]], - target: IntegrationTarget, - host_type: str, - contexts: dict[str, dict[str, ContainerAccess]], + args: IntegrationConfig, + ssh_connections: t.Optional[list[SshConnectionDetail]], + playbook: str, + target_state: dict[str, tuple[list[str], list[SshProcess]]], + target: IntegrationTarget, + host_type: str, + contexts: dict[str, dict[str, ContainerAccess]], ) -> None: """Configure port forwarding using SSH and write hosts file entries.""" if ssh_connections is None: @@ -944,12 +944,12 @@ def forward_ssh_ports( def cleanup_ssh_ports( - args: IntegrationConfig, - ssh_connections: list[SshConnectionDetail], - playbook: str, - target_state: dict[str, tuple[list[str], list[SshProcess]]], - target: IntegrationTarget, - host_type: str, + args: IntegrationConfig, + ssh_connections: list[SshConnectionDetail], + playbook: str, + target_state: dict[str, tuple[list[str], list[SshProcess]]], + target: IntegrationTarget, + host_type: str, ) -> None: """Stop previously configured SSH port forwarding and remove previously written hosts file entries.""" state = target_state.pop(target.name, None) diff --git a/test/lib/ansible_test/_internal/core_ci.py b/test/lib/ansible_test/_internal/core_ci.py index cc2a868f..d62b9039 100644 --- a/test/lib/ansible_test/_internal/core_ci.py +++ b/test/lib/ansible_test/_internal/core_ci.py @@ -8,7 +8,6 @@ import os import re import traceback import uuid -import errno import time import typing as t @@ -114,10 +113,10 @@ class AnsibleCoreCI: DEFAULT_ENDPOINT = 'https://ansible-core-ci.testing.ansible.com' def __init__( - self, - args: EnvironmentConfig, - resource: Resource, - load: bool = True, + self, + args: EnvironmentConfig, + resource: Resource, + load: bool = True, ) -> None: self.args = args self.resource = resource @@ -174,11 +173,11 @@ class AnsibleCoreCI: self.endpoint = self.default_endpoint @property - def available(self): + def available(self) -> bool: """Return True if Ansible Core CI is supported.""" return self.ci_provider.supports_core_ci_auth() - def start(self): + def start(self) -> t.Optional[dict[str, t.Any]]: """Start instance.""" if self.started: display.info(f'Skipping started {self.label} instance.', verbosity=1) @@ -186,7 +185,7 @@ class AnsibleCoreCI: return self._start(self.ci_provider.prepare_core_ci_auth()) - def stop(self): + def stop(self) -> None: """Stop instance.""" if not self.started: display.info(f'Skipping invalid {self.label} instance.', verbosity=1) @@ -280,10 +279,10 @@ class AnsibleCoreCI: raise ApplicationError(f'Timeout waiting for {self.label} instance.') @property - def _uri(self): + def _uri(self) -> str: return f'{self.endpoint}/{self.stage}/{self.provider}/{self.instance_id}' - def _start(self, auth): + def _start(self, auth) -> dict[str, t.Any]: """Start instance.""" display.info(f'Initializing new {self.label} instance using: {self._uri}', verbosity=1) @@ -342,23 +341,19 @@ class AnsibleCoreCI: display.warning(f'{error}. Trying again after {sleep} seconds.') time.sleep(sleep) - def _clear(self): + def _clear(self) -> None: """Clear instance information.""" try: self.connection = None os.remove(self.path) - except OSError as ex: - if ex.errno != errno.ENOENT: - raise + except FileNotFoundError: + pass - def _load(self): + def _load(self) -> bool: """Load instance information.""" try: data = read_text_file(self.path) - except IOError as ex: - if ex.errno != errno.ENOENT: - raise - + except FileNotFoundError: return False if not data.startswith('{'): diff --git a/test/lib/ansible_test/_internal/coverage_util.py b/test/lib/ansible_test/_internal/coverage_util.py index 43d10718..0f445059 100644 --- a/test/lib/ansible_test/_internal/coverage_util.py +++ b/test/lib/ansible_test/_internal/coverage_util.py @@ -143,14 +143,14 @@ def get_sqlite_schema_version(path: str) -> int: def cover_python( - args: TestConfig, - python: PythonConfig, - cmd: list[str], - target_name: str, - env: dict[str, str], - capture: bool, - data: t.Optional[str] = None, - cwd: t.Optional[str] = None, + args: TestConfig, + python: PythonConfig, + cmd: list[str], + target_name: str, + env: dict[str, str], + capture: bool, + data: t.Optional[str] = None, + cwd: t.Optional[str] = None, ) -> tuple[t.Optional[str], t.Optional[str]]: """Run a command while collecting Python code coverage.""" if args.coverage: @@ -176,9 +176,9 @@ def get_coverage_platform(config: HostConfig) -> str: def get_coverage_environment( - args: TestConfig, - target_name: str, - version: str, + args: TestConfig, + target_name: str, + version: str, ) -> dict[str, str]: """Return environment variables needed to collect code coverage.""" # unit tests, sanity tests and other special cases (localhost only) diff --git a/test/lib/ansible_test/_internal/data.py b/test/lib/ansible_test/_internal/data.py index 66e21543..635b0c32 100644 --- a/test/lib/ansible_test/_internal/data.py +++ b/test/lib/ansible_test/_internal/data.py @@ -52,7 +52,7 @@ from .provider.layout.unsupported import ( class DataContext: """Data context providing details about the current execution environment for ansible-test.""" - def __init__(self): + def __init__(self) -> None: content_path = os.environ.get('ANSIBLE_TEST_CONTENT_ROOT') current_path = os.getcwd() @@ -245,7 +245,7 @@ class PluginInfo: @cache -def content_plugins(): +def content_plugins() -> dict[str, dict[str, PluginInfo]]: """ Analyze content. The primary purpose of this analysis is to facilitate mapping of integration tests to the plugin(s) they are intended to test. @@ -256,7 +256,7 @@ def content_plugins(): plugin_paths = sorted(data_context().content.walk_files(plugin_directory)) plugin_directory_offset = len(plugin_directory.split(os.path.sep)) - plugin_files = {} + plugin_files: dict[str, list[str]] = {} for plugin_path in plugin_paths: plugin_filename = os.path.basename(plugin_path) diff --git a/test/lib/ansible_test/_internal/delegation.py b/test/lib/ansible_test/_internal/delegation.py index 8c6879d2..0f181a23 100644 --- a/test/lib/ansible_test/_internal/delegation.py +++ b/test/lib/ansible_test/_internal/delegation.py @@ -226,7 +226,7 @@ def delegate_command(args: EnvironmentConfig, host_state: HostState, exclude: li target.on_target_failure() # when the controller is delegated, report failures after delegation fails -def insert_options(command, options): +def insert_options(command: list[str], options: list[str]) -> list[str]: """Insert addition command line options into the given command and return the result.""" result = [] @@ -267,12 +267,12 @@ def download_results(args: EnvironmentConfig, con: Connection, content_root: str def generate_command( - args: EnvironmentConfig, - python: PythonConfig, - ansible_bin_path: str, - content_root: str, - exclude: list[str], - require: list[str], + args: EnvironmentConfig, + python: PythonConfig, + ansible_bin_path: str, + content_root: str, + exclude: list[str], + require: list[str], ) -> list[str]: """Generate the command necessary to delegate ansible-test.""" cmd = [os.path.join(ansible_bin_path, 'ansible-test')] @@ -319,10 +319,10 @@ def generate_command( def filter_options( - args: EnvironmentConfig, - argv: list[str], - exclude: list[str], - require: list[str], + args: EnvironmentConfig, + argv: list[str], + exclude: list[str], + require: list[str], ) -> c.Iterable[str]: """Return an iterable that filters out unwanted CLI options and injects new ones as requested.""" replace: list[tuple[str, int, t.Optional[t.Union[bool, str, list[str]]]]] = [ diff --git a/test/lib/ansible_test/_internal/dev/container_probe.py b/test/lib/ansible_test/_internal/dev/container_probe.py index 84b88f4b..be22e01c 100644 --- a/test/lib/ansible_test/_internal/dev/container_probe.py +++ b/test/lib/ansible_test/_internal/dev/container_probe.py @@ -184,7 +184,7 @@ def check_container_cgroup_status(args: EnvironmentConfig, config: DockerConfig, write_text_file(os.path.join(args.dev_probe_cgroups, f'{identity}.log'), message) -def get_identity(args: EnvironmentConfig, config: DockerConfig, container_name: str): +def get_identity(args: EnvironmentConfig, config: DockerConfig, container_name: str) -> str: """Generate and return an identity string to use when logging test results.""" engine = require_docker().command diff --git a/test/lib/ansible_test/_internal/docker_util.py b/test/lib/ansible_test/_internal/docker_util.py index 77cdd4ee..6c38ddbd 100644 --- a/test/lib/ansible_test/_internal/docker_util.py +++ b/test/lib/ansible_test/_internal/docker_util.py @@ -401,11 +401,11 @@ def detect_host_properties(args: CommonConfig) -> ContainerHostProperties: def run_utility_container( - args: CommonConfig, - name: str, - cmd: list[str], - options: list[str], - data: t.Optional[str] = None, + args: CommonConfig, + name: str, + cmd: list[str], + options: list[str], + data: t.Optional[str] = None, ) -> tuple[t.Optional[str], t.Optional[str]]: """Run the specified command using the ansible-test utility container, returning stdout and stderr.""" options = options + [ @@ -670,30 +670,30 @@ def docker_cp_to(args: CommonConfig, container_id: str, src: str, dst: str) -> N def docker_create( - args: CommonConfig, - image: str, - options: list[str], - cmd: list[str] = None, + args: CommonConfig, + image: str, + options: list[str], + cmd: list[str] = None, ) -> tuple[t.Optional[str], t.Optional[str]]: """Create a container using the given docker image.""" return docker_command(args, ['create'] + options + [image] + cmd, capture=True) def docker_run( - args: CommonConfig, - image: str, - options: list[str], - cmd: list[str] = None, - data: t.Optional[str] = None, + args: CommonConfig, + image: str, + options: list[str], + cmd: list[str] = None, + data: t.Optional[str] = None, ) -> tuple[t.Optional[str], t.Optional[str]]: """Run a container using the given docker image.""" return docker_command(args, ['run'] + options + [image] + cmd, data=data, capture=True) def docker_start( - args: CommonConfig, - container_id: str, - options: list[str], + args: CommonConfig, + container_id: str, + options: list[str], ) -> tuple[t.Optional[str], t.Optional[str]]: """Start a container by name or ID.""" return docker_command(args, ['start'] + options + [container_id], capture=True) @@ -720,7 +720,7 @@ class DockerError(Exception): class ContainerNotFoundError(DockerError): """The container identified by `identifier` was not found.""" - def __init__(self, identifier): + def __init__(self, identifier: str) -> None: super().__init__('The container "%s" was not found.' % identifier) self.identifier = identifier @@ -943,16 +943,16 @@ def docker_logs(args: CommonConfig, container_id: str) -> None: def docker_exec( - args: CommonConfig, - container_id: str, - cmd: list[str], - capture: bool, - options: t.Optional[list[str]] = None, - stdin: t.Optional[t.IO[bytes]] = None, - stdout: t.Optional[t.IO[bytes]] = None, - interactive: bool = False, - output_stream: t.Optional[OutputStream] = None, - data: t.Optional[str] = None, + args: CommonConfig, + container_id: str, + cmd: list[str], + capture: bool, + options: t.Optional[list[str]] = None, + stdin: t.Optional[t.IO[bytes]] = None, + stdout: t.Optional[t.IO[bytes]] = None, + interactive: bool = False, + output_stream: t.Optional[OutputStream] = None, + data: t.Optional[str] = None, ) -> tuple[t.Optional[str], t.Optional[str]]: """Execute the given command in the specified container.""" if not options: @@ -966,15 +966,15 @@ def docker_exec( def docker_command( - args: CommonConfig, - cmd: list[str], - capture: bool, - stdin: t.Optional[t.IO[bytes]] = None, - stdout: t.Optional[t.IO[bytes]] = None, - interactive: bool = False, - output_stream: t.Optional[OutputStream] = None, - always: bool = False, - data: t.Optional[str] = None, + args: CommonConfig, + cmd: list[str], + capture: bool, + stdin: t.Optional[t.IO[bytes]] = None, + stdout: t.Optional[t.IO[bytes]] = None, + interactive: bool = False, + output_stream: t.Optional[OutputStream] = None, + always: bool = False, + data: t.Optional[str] = None, ) -> tuple[t.Optional[str], t.Optional[str]]: """Run the specified docker command.""" env = docker_environment() diff --git a/test/lib/ansible_test/_internal/executor.py b/test/lib/ansible_test/_internal/executor.py index b079df9f..0c94cf3b 100644 --- a/test/lib/ansible_test/_internal/executor.py +++ b/test/lib/ansible_test/_internal/executor.py @@ -81,13 +81,13 @@ def detect_changes(args: TestConfig) -> t.Optional[list[str]]: class NoChangesDetected(ApplicationWarning): """Exception when change detection was performed, but no changes were found.""" - def __init__(self): + def __init__(self) -> None: super().__init__('No changes detected.') class NoTestsForChanges(ApplicationWarning): """Exception when changes detected, but no tests trigger as a result.""" - def __init__(self): + def __init__(self) -> None: super().__init__('No tests found for detected changes.') @@ -111,5 +111,5 @@ class ListTargets(Exception): class AllTargetsSkipped(ApplicationWarning): """All targets skipped.""" - def __init__(self): + def __init__(self) -> None: super().__init__('All targets skipped.') diff --git a/test/lib/ansible_test/_internal/host_configs.py b/test/lib/ansible_test/_internal/host_configs.py index d7671c7f..48d5fd31 100644 --- a/test/lib/ansible_test/_internal/host_configs.py +++ b/test/lib/ansible_test/_internal/host_configs.py @@ -48,7 +48,7 @@ from .util import ( @dataclasses.dataclass(frozen=True) class OriginCompletionConfig(PosixCompletionConfig): """Pseudo completion config for the origin.""" - def __init__(self): + def __init__(self) -> None: super().__init__(name='origin') @property @@ -65,7 +65,7 @@ class OriginCompletionConfig(PosixCompletionConfig): return version @property - def is_default(self): + def is_default(self) -> bool: """True if the completion entry is only used for defaults, otherwise False.""" return False @@ -513,7 +513,7 @@ class HostSettings: with open_binary_file(path) as settings_file: return pickle.load(settings_file) - def apply_defaults(self): + def apply_defaults(self) -> None: """Apply defaults to the host settings.""" context = HostContext(controller_config=None) self.controller.apply_defaults(context, self.controller.get_defaults(context)) diff --git a/test/lib/ansible_test/_internal/host_profiles.py b/test/lib/ansible_test/_internal/host_profiles.py index 6575e7c1..0abc9961 100644 --- a/test/lib/ansible_test/_internal/host_profiles.py +++ b/test/lib/ansible_test/_internal/host_profiles.py @@ -351,7 +351,7 @@ class RemoteProfile(SshTargetHostProfile[TRemoteConfig], metaclass=abc.ABCMeta): return self.core_ci - def delete_instance(self): + def delete_instance(self) -> None: """Delete the AnsibleCoreCI VM instance.""" core_ci = self.get_instance() @@ -506,6 +506,13 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do cgroup_version = get_docker_info(self.args).cgroup_version + # Podman 4.4.0 updated containers/common to 0.51.0, which removed the SYS_CHROOT capability from the default list. + # This capability is needed by services such as sshd, so is unconditionally added here. + # See: https://github.com/containers/podman/releases/tag/v4.4.0 + # See: https://github.com/containers/common/releases/tag/v0.51.0 + # See: https://github.com/containers/common/pull/1240 + options.extend(('--cap-add', 'SYS_CHROOT')) + # Without AUDIT_WRITE the following errors may appear in the system logs of a container after attempting to log in using SSH: # # fatal: linux_audit_write_entry failed: Operation not permitted @@ -892,7 +899,7 @@ class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[Do return message - def check_cgroup_requirements(self): + def check_cgroup_requirements(self) -> None: """Check cgroup requirements for the container.""" cgroup_version = get_docker_info(self.args).cgroup_version @@ -1411,9 +1418,9 @@ def get_config_profile_type_map() -> dict[t.Type[HostConfig], t.Type[HostProfile def create_host_profile( - args: EnvironmentConfig, - config: HostConfig, - controller: bool, + args: EnvironmentConfig, + config: HostConfig, + controller: bool, ) -> HostProfile: """Create and return a host profile from the given host configuration.""" profile_type = get_config_profile_type_map()[type(config)] diff --git a/test/lib/ansible_test/_internal/io.py b/test/lib/ansible_test/_internal/io.py index a7e9f1b2..80d47699 100644 --- a/test/lib/ansible_test/_internal/io.py +++ b/test/lib/ansible_test/_internal/io.py @@ -1,7 +1,6 @@ """Functions for disk IO.""" from __future__ import annotations -import errno import io import json import os @@ -32,11 +31,7 @@ def read_binary_file(path: str) -> bytes: def make_dirs(path: str) -> None: """Create a directory at path, including any necessary parent directories.""" - try: - os.makedirs(to_bytes(path)) - except OSError as ex: - if ex.errno != errno.EEXIST: - raise + os.makedirs(to_bytes(path), exist_ok=True) def write_json_file(path: str, @@ -85,7 +80,7 @@ def open_binary_file(path: str, mode: str = 'rb') -> t.IO[bytes]: class SortedSetEncoder(json.JSONEncoder): """Encode sets as sorted lists.""" - def default(self, o): + def default(self, o: t.Any) -> t.Any: """Return a serialized version of the `o` object.""" if isinstance(o, set): return sorted(o) diff --git a/test/lib/ansible_test/_internal/metadata.py b/test/lib/ansible_test/_internal/metadata.py index e969f029..94bbc34a 100644 --- a/test/lib/ansible_test/_internal/metadata.py +++ b/test/lib/ansible_test/_internal/metadata.py @@ -19,7 +19,7 @@ from .diff import ( class Metadata: """Metadata object for passing data to delegated tests.""" - def __init__(self): + def __init__(self) -> None: """Initialize metadata.""" self.changes: dict[str, tuple[tuple[int, int], ...]] = {} self.cloud_config: t.Optional[dict[str, dict[str, t.Union[int, str, bool]]]] = None @@ -82,7 +82,7 @@ class Metadata: class ChangeDescription: """Description of changes.""" - def __init__(self): + def __init__(self) -> None: self.command: str = '' self.changed_paths: list[str] = [] self.deleted_paths: list[str] = [] diff --git a/test/lib/ansible_test/_internal/provider/layout/__init__.py b/test/lib/ansible_test/_internal/provider/layout/__init__.py index 2e8026bf..aa6693f0 100644 --- a/test/lib/ansible_test/_internal/provider/layout/__init__.py +++ b/test/lib/ansible_test/_internal/provider/layout/__init__.py @@ -150,7 +150,7 @@ class ContentLayout(Layout): class LayoutMessages: """Messages generated during layout creation that should be deferred for later display.""" - def __init__(self): + def __init__(self) -> None: self.info: list[str] = [] self.warning: list[str] = [] self.error: list[str] = [] diff --git a/test/lib/ansible_test/_internal/provisioning.py b/test/lib/ansible_test/_internal/provisioning.py index 8f914c2a..7547a302 100644 --- a/test/lib/ansible_test/_internal/provisioning.py +++ b/test/lib/ansible_test/_internal/provisioning.py @@ -97,10 +97,10 @@ class HostState: def prepare_profiles( - args: TEnvironmentConfig, - targets_use_pypi: bool = False, - skip_setup: bool = False, - requirements: t.Optional[c.Callable[[HostProfile], None]] = None, + args: TEnvironmentConfig, + targets_use_pypi: bool = False, + skip_setup: bool = False, + requirements: t.Optional[c.Callable[[HostProfile], None]] = None, ) -> HostState: """ Create new profiles, or load existing ones, and return them. diff --git a/test/lib/ansible_test/_internal/pypi_proxy.py b/test/lib/ansible_test/_internal/pypi_proxy.py index fa26b5fd..97663ead 100644 --- a/test/lib/ansible_test/_internal/pypi_proxy.py +++ b/test/lib/ansible_test/_internal/pypi_proxy.py @@ -119,7 +119,7 @@ def configure_target_pypi_proxy(args: EnvironmentConfig, profile: HostProfile, p create_posix_inventory(args, inventory_path, [profile]) - def cleanup_pypi_proxy(): + def cleanup_pypi_proxy() -> None: """Undo changes made to configure the PyPI proxy.""" run_playbook(args, inventory_path, 'pypi_proxy_restore.yml', capture=True) diff --git a/test/lib/ansible_test/_internal/python_requirements.py b/test/lib/ansible_test/_internal/python_requirements.py index 44cf53ae..e3733a5c 100644 --- a/test/lib/ansible_test/_internal/python_requirements.py +++ b/test/lib/ansible_test/_internal/python_requirements.py @@ -122,14 +122,14 @@ class PipBootstrap(PipCommand): def install_requirements( - args: EnvironmentConfig, - python: PythonConfig, - ansible: bool = False, - command: bool = False, - coverage: bool = False, - virtualenv: bool = False, - controller: bool = True, - connection: t.Optional[Connection] = None, + args: EnvironmentConfig, + python: PythonConfig, + ansible: bool = False, + command: bool = False, + coverage: bool = False, + virtualenv: bool = False, + controller: bool = True, + connection: t.Optional[Connection] = None, ) -> None: """Install requirements for the given Python using the specified arguments.""" create_result_directories(args) @@ -197,15 +197,15 @@ def collect_bootstrap(python: PythonConfig) -> list[PipCommand]: def collect_requirements( - python: PythonConfig, - controller: bool, - ansible: bool, - cryptography: bool, - coverage: bool, - virtualenv: bool, - minimize: bool, - command: t.Optional[str], - sanity: t.Optional[str], + python: PythonConfig, + controller: bool, + ansible: bool, + cryptography: bool, + coverage: bool, + virtualenv: bool, + minimize: bool, + command: t.Optional[str], + sanity: t.Optional[str], ) -> list[PipCommand]: """Collect requirements for the given Python using the specified arguments.""" commands: list[PipCommand] = [] @@ -252,10 +252,10 @@ def collect_requirements( def run_pip( - args: EnvironmentConfig, - python: PythonConfig, - commands: list[PipCommand], - connection: t.Optional[Connection], + args: EnvironmentConfig, + python: PythonConfig, + commands: list[PipCommand], + connection: t.Optional[Connection], ) -> None: """Run the specified pip commands for the given Python, and optionally the specified host.""" connection = connection or LocalConnection(args) @@ -367,10 +367,10 @@ def collect_integration_install(command: str, controller: bool) -> list[PipInsta def collect_install( - requirements_paths: list[tuple[str, str]], - constraints_paths: list[tuple[str, str]], - packages: t.Optional[list[str]] = None, - constraints: bool = True, + requirements_paths: list[tuple[str, str]], + constraints_paths: list[tuple[str, str]], + packages: t.Optional[list[str]] = None, + constraints: bool = True, ) -> list[PipInstall]: """Build a pip install list from the given requirements, constraints and packages.""" # listing content constraints first gives them priority over constraints provided by ansible-test diff --git a/test/lib/ansible_test/_internal/ssh.py b/test/lib/ansible_test/_internal/ssh.py index fd01ff25..840edf62 100644 --- a/test/lib/ansible_test/_internal/ssh.py +++ b/test/lib/ansible_test/_internal/ssh.py @@ -151,10 +151,10 @@ class SshProcess: def create_ssh_command( - ssh: SshConnectionDetail, - options: t.Optional[dict[str, t.Union[str, int]]] = None, - cli_args: list[str] = None, - command: t.Optional[str] = None, + ssh: SshConnectionDetail, + options: t.Optional[dict[str, t.Union[str, int]]] = None, + cli_args: list[str] = None, + command: t.Optional[str] = None, ) -> list[str]: """Create an SSH command using the specified options.""" cmd = [ @@ -207,11 +207,11 @@ def ssh_options_to_str(options: t.Union[dict[str, t.Union[int, str]], dict[str, def run_ssh_command( - args: EnvironmentConfig, - ssh: SshConnectionDetail, - options: t.Optional[dict[str, t.Union[str, int]]] = None, - cli_args: list[str] = None, - command: t.Optional[str] = None, + args: EnvironmentConfig, + ssh: SshConnectionDetail, + options: t.Optional[dict[str, t.Union[str, int]]] = None, + cli_args: list[str] = None, + command: t.Optional[str] = None, ) -> SshProcess: """Run the specified SSH command, returning the created SshProcess instance created.""" cmd = create_ssh_command(ssh, options, cli_args, command) @@ -233,9 +233,9 @@ def run_ssh_command( def create_ssh_port_forwards( - args: EnvironmentConfig, - ssh: SshConnectionDetail, - forwards: list[tuple[str, int]], + args: EnvironmentConfig, + ssh: SshConnectionDetail, + forwards: list[tuple[str, int]], ) -> SshProcess: """ Create SSH port forwards using the provided list of tuples (target_host, target_port). @@ -257,9 +257,9 @@ def create_ssh_port_forwards( def create_ssh_port_redirects( - args: EnvironmentConfig, - ssh: SshConnectionDetail, - redirects: list[tuple[int, str, int]], + args: EnvironmentConfig, + ssh: SshConnectionDetail, + redirects: list[tuple[int, str, int]], ) -> SshProcess: """Create SSH port redirections using the provided list of tuples (bind_port, target_host, target_port).""" options: dict[str, t.Union[str, int]] = {} diff --git a/test/lib/ansible_test/_internal/target.py b/test/lib/ansible_test/_internal/target.py index 4e04b10a..80411483 100644 --- a/test/lib/ansible_test/_internal/target.py +++ b/test/lib/ansible_test/_internal/target.py @@ -65,31 +65,30 @@ def walk_completion_targets(targets: c.Iterable[CompletionTarget], prefix: str, def walk_internal_targets( - targets: c.Iterable[TCompletionTarget], - includes: t.Optional[list[str]] = None, - excludes: t.Optional[list[str]] = None, - requires: t.Optional[list[str]] = None, + targets: c.Iterable[TCompletionTarget], + includes: t.Optional[list[str]] = None, + excludes: t.Optional[list[str]] = None, + requires: t.Optional[list[str]] = None, ) -> tuple[TCompletionTarget, ...]: """Return a tuple of matching completion targets.""" targets = tuple(targets) - include_targets = sorted(filter_targets(targets, includes, directories=False), key=lambda include_target: include_target.name) + include_targets = sorted(filter_targets(targets, includes), key=lambda include_target: include_target.name) if requires: - require_targets = set(filter_targets(targets, requires, directories=False)) + require_targets = set(filter_targets(targets, requires)) include_targets = [require_target for require_target in include_targets if require_target in require_targets] if excludes: - list(filter_targets(targets, excludes, include=False, directories=False)) + list(filter_targets(targets, excludes, include=False)) - internal_targets = set(filter_targets(include_targets, excludes, errors=False, include=False, directories=False)) + internal_targets = set(filter_targets(include_targets, excludes, errors=False, include=False)) return tuple(sorted(internal_targets, key=lambda sort_target: sort_target.name)) def filter_targets(targets: c.Iterable[TCompletionTarget], patterns: list[str], include: bool = True, - directories: bool = True, errors: bool = True, ) -> c.Iterable[TCompletionTarget]: """Iterate over the given targets and filter them based on the supplied arguments.""" @@ -130,20 +129,15 @@ def filter_targets(targets: c.Iterable[TCompletionTarget], if match != include: continue - if directories and matched_directories: - yield DirectoryTarget(to_text(sorted(matched_directories, key=len)[0]), target.modules) - else: - yield target + yield target if errors: if unmatched: raise TargetPatternsNotMatched(unmatched) -def walk_module_targets(): - """ - :rtype: collections.Iterable[TestTarget] - """ +def walk_module_targets() -> c.Iterable[TestTarget]: + """Iterate through the module test targets.""" for target in walk_test_targets(path=data_context().content.module_path, module_path=data_context().content.module_path, extensions=MODULE_EXTENSIONS): if not target.module: continue @@ -248,10 +242,8 @@ def walk_integration_targets() -> c.Iterable[IntegrationTarget]: yield IntegrationTarget(to_text(path), modules, prefixes) -def load_integration_prefixes(): - """ - :rtype: dict[str, str] - """ +def load_integration_prefixes() -> dict[str, str]: + """Load and return the integration test prefixes.""" path = data_context().content.integration_path file_paths = sorted(f for f in data_context().content.get_files(path) if os.path.splitext(os.path.basename(f))[0] == 'target-prefixes') prefixes = {} @@ -264,13 +256,13 @@ def load_integration_prefixes(): def walk_test_targets( - path: t.Optional[str] = None, - module_path: t.Optional[str] = None, - extensions: t.Optional[tuple[str, ...]] = None, - prefix: t.Optional[str] = None, - extra_dirs: t.Optional[tuple[str, ...]] = None, - include_symlinks: bool = False, - include_symlinked_directories: bool = False, + path: t.Optional[str] = None, + module_path: t.Optional[str] = None, + extensions: t.Optional[tuple[str, ...]] = None, + prefix: t.Optional[str] = None, + extra_dirs: t.Optional[tuple[str, ...]] = None, + include_symlinks: bool = False, + include_symlinked_directories: bool = False, ) -> c.Iterable[TestTarget]: """Iterate over available test targets.""" if path: @@ -317,7 +309,7 @@ def analyze_integration_target_dependencies(integration_targets: list[Integratio role_targets = [target for target in integration_targets if target.type == 'role'] hidden_role_target_names = set(target.name for target in role_targets if 'hidden/' in target.aliases) - dependencies = collections.defaultdict(set) + dependencies: collections.defaultdict[str, set[str]] = collections.defaultdict(set) # handle setup dependencies for target in integration_targets: @@ -409,12 +401,12 @@ def analyze_integration_target_dependencies(integration_targets: list[Integratio class CompletionTarget(metaclass=abc.ABCMeta): """Command-line argument completion target base class.""" - def __init__(self): - self.name = None - self.path = None - self.base_path = None - self.modules = tuple() - self.aliases = tuple() + def __init__(self) -> None: + self.name = '' + self.path = '' + self.base_path: t.Optional[str] = None + self.modules: tuple[str, ...] = tuple() + self.aliases: tuple[str, ...] = tuple() def __eq__(self, other): if isinstance(other, CompletionTarget): @@ -441,26 +433,16 @@ class CompletionTarget(metaclass=abc.ABCMeta): return self.name -class DirectoryTarget(CompletionTarget): - """Directory target.""" - def __init__(self, path: str, modules: tuple[str, ...]) -> None: - super().__init__() - - self.name = path - self.path = path - self.modules = modules - - class TestTarget(CompletionTarget): """Generic test target.""" def __init__( - self, - path: str, - module_path: t.Optional[str], - module_prefix: t.Optional[str], - base_path: str, - symlink: t.Optional[bool] = None, - ): + self, + path: str, + module_path: t.Optional[str], + module_prefix: t.Optional[str], + base_path: str, + symlink: t.Optional[bool] = None, + ) -> None: super().__init__() if symlink is None: @@ -679,8 +661,6 @@ class IntegrationTarget(CompletionTarget): target_type, actual_type = categorize_integration_test(self.name, list(static_aliases), force_target) - self._remove_group(groups, 'context') - groups.extend(['context/', f'context/{target_type.name.lower()}']) if target_type != actual_type: @@ -709,10 +689,6 @@ class IntegrationTarget(CompletionTarget): self.setup_always = tuple(sorted(set(g.split('/')[2] for g in groups if g.startswith('setup/always/')))) self.needs_target = tuple(sorted(set(g.split('/')[2] for g in groups if g.startswith('needs/target/')))) - @staticmethod - def _remove_group(groups, group): - return [g for g in groups if g != group and not g.startswith('%s/' % group)] - class TargetPatternsNotMatched(ApplicationError): """One or more targets were not matched when a match was required.""" diff --git a/test/lib/ansible_test/_internal/test.py b/test/lib/ansible_test/_internal/test.py index da6af355..211635c5 100644 --- a/test/lib/ansible_test/_internal/test.py +++ b/test/lib/ansible_test/_internal/test.py @@ -215,12 +215,12 @@ class TestSkipped(TestResult): class TestFailure(TestResult): """Test failure.""" def __init__( - self, - command: str, - test: str, - python_version: t.Optional[str] = None, - messages: t.Optional[c.Sequence[TestMessage]] = None, - summary: t.Optional[str] = None, + self, + command: str, + test: str, + python_version: t.Optional[str] = None, + messages: t.Optional[c.Sequence[TestMessage]] = None, + summary: t.Optional[str] = None, ): super().__init__(command, test, python_version) @@ -333,10 +333,8 @@ class TestFailure(TestResult): return command - def find_docs(self): - """ - :rtype: str - """ + def find_docs(self) -> t.Optional[str]: + """Return the docs URL for this test or None if there is no docs URL.""" if self.command != 'sanity': return None # only sanity tests have docs links @@ -381,14 +379,14 @@ class TestFailure(TestResult): class TestMessage: """Single test message for one file.""" def __init__( - self, - message: str, - path: str, - line: int = 0, - column: int = 0, - level: str = 'error', - code: t.Optional[str] = None, - confidence: t.Optional[int] = None, + self, + message: str, + path: str, + line: int = 0, + column: int = 0, + level: str = 'error', + code: t.Optional[str] = None, + confidence: t.Optional[int] = None, ): self.__path = path self.__line = line diff --git a/test/lib/ansible_test/_internal/thread.py b/test/lib/ansible_test/_internal/thread.py index d0ed1bab..edaf1b5c 100644 --- a/test/lib/ansible_test/_internal/thread.py +++ b/test/lib/ansible_test/_internal/thread.py @@ -21,7 +21,7 @@ class WrappedThread(threading.Thread): self.action = action self.result = None - def run(self): + def run(self) -> None: """ Run action and capture results or exception. Do not override. Do not call directly. Executed by the start() method. @@ -35,11 +35,8 @@ class WrappedThread(threading.Thread): except: # noqa self._result.put((None, sys.exc_info())) - def wait_for_result(self): - """ - Wait for thread to exit and return the result or raise an exception. - :rtype: any - """ + def wait_for_result(self) -> t.Any: + """Wait for thread to exit and return the result or raise an exception.""" result, exception = self._result.get() if exception: diff --git a/test/lib/ansible_test/_internal/timeout.py b/test/lib/ansible_test/_internal/timeout.py index da5cfceb..90ba5835 100644 --- a/test/lib/ansible_test/_internal/timeout.py +++ b/test/lib/ansible_test/_internal/timeout.py @@ -75,7 +75,7 @@ def configure_test_timeout(args: TestConfig) -> None: display.info('The %d minute test timeout expires in %s at %s.' % ( timeout_duration, timeout_remaining, timeout_deadline), verbosity=1) - def timeout_handler(_dummy1, _dummy2): + def timeout_handler(_dummy1: t.Any, _dummy2: t.Any) -> None: """Runs when SIGUSR1 is received.""" test_timeout.write(args) diff --git a/test/lib/ansible_test/_internal/util.py b/test/lib/ansible_test/_internal/util.py index 12316239..ec485a2b 100644 --- a/test/lib/ansible_test/_internal/util.py +++ b/test/lib/ansible_test/_internal/util.py @@ -3,7 +3,6 @@ from __future__ import annotations import abc import collections.abc as c -import errno import enum import fcntl import importlib.util @@ -346,19 +345,19 @@ def get_available_python_versions() -> dict[str, str]: def raw_command( - cmd: c.Iterable[str], - capture: bool, - env: t.Optional[dict[str, str]] = None, - data: t.Optional[str] = None, - cwd: t.Optional[str] = None, - explain: bool = False, - stdin: t.Optional[t.Union[t.IO[bytes], int]] = None, - stdout: t.Optional[t.Union[t.IO[bytes], int]] = None, - interactive: bool = False, - output_stream: t.Optional[OutputStream] = None, - cmd_verbosity: int = 1, - str_errors: str = 'strict', - error_callback: t.Optional[c.Callable[[SubprocessError], None]] = None, + cmd: c.Iterable[str], + capture: bool, + env: t.Optional[dict[str, str]] = None, + data: t.Optional[str] = None, + cwd: t.Optional[str] = None, + explain: bool = False, + stdin: t.Optional[t.Union[t.IO[bytes], int]] = None, + stdout: t.Optional[t.Union[t.IO[bytes], int]] = None, + interactive: bool = False, + output_stream: t.Optional[OutputStream] = None, + cmd_verbosity: int = 1, + str_errors: str = 'strict', + error_callback: t.Optional[c.Callable[[SubprocessError], None]] = None, ) -> tuple[t.Optional[str], t.Optional[str]]: """Run the specified command and return stdout and stderr as a tuple.""" output_stream = output_stream or OutputStream.AUTO @@ -467,10 +466,8 @@ def raw_command( cmd_bytes = [to_bytes(arg) for arg in cmd] env_bytes = dict((to_bytes(k), to_bytes(v)) for k, v in env.items()) process = subprocess.Popen(cmd_bytes, env=env_bytes, stdin=stdin, stdout=stdout, stderr=stderr, cwd=cwd) # pylint: disable=consider-using-with - except OSError as ex: - if ex.errno == errno.ENOENT: - raise ApplicationError('Required program "%s" not found.' % cmd[0]) - raise + except FileNotFoundError as ex: + raise ApplicationError('Required program "%s" not found.' % cmd[0]) from ex if communicate: data_bytes = to_optional_bytes(data) @@ -499,12 +496,12 @@ def raw_command( def communicate_with_process( - process: subprocess.Popen, - stdin: t.Optional[bytes], - stdout: bool, - stderr: bool, - capture: bool, - output_stream: OutputStream, + process: subprocess.Popen, + stdin: t.Optional[bytes], + stdout: bool, + stderr: bool, + capture: bool, + output_stream: OutputStream, ) -> tuple[bytes, bytes]: """Communicate with the specified process, handling stdin/stdout/stderr as requested.""" threads: list[WrappedThread] = [] @@ -614,7 +611,7 @@ class OutputThread(ReaderThread): src.close() -def common_environment(): +def common_environment() -> dict[str, str]: """Common environment used for executing all programs.""" env = dict( LC_ALL=CONFIGURED_LOCALE, @@ -694,12 +691,11 @@ def verified_chmod(path: str, mode: int) -> None: def remove_tree(path: str) -> None: - """Remove the specified directory, siliently continuing if the directory does not exist.""" + """Remove the specified directory, silently continuing if the directory does not exist.""" try: shutil.rmtree(to_bytes(path)) - except OSError as ex: - if ex.errno != errno.ENOENT: - raise + except FileNotFoundError: + pass def is_binary_file(path: str) -> bool: @@ -797,17 +793,17 @@ class Display: 3: cyan, } - def __init__(self): + def __init__(self) -> None: self.verbosity = 0 self.color = sys.stdout.isatty() - self.warnings = [] - self.warnings_unique = set() + self.warnings: list[str] = [] + self.warnings_unique: set[str] = set() self.fd = sys.stderr # default to stderr until config is initialized to avoid early messages going to stdout self.rows = 0 self.columns = 0 self.truncate = 0 self.redact = True - self.sensitive = set() + self.sensitive: set[str] = set() if os.isatty(0): self.rows, self.columns = unpack('HHHH', fcntl.ioctl(0, TIOCGWINSZ, pack('HHHH', 0, 0, 0, 0)))[:2] @@ -859,11 +855,11 @@ class Display: self.print_message(message, color=color, truncate=truncate) def print_message( # pylint: disable=locally-disabled, invalid-name - self, - message: str, - color: t.Optional[str] = None, - stderr: bool = False, - truncate: bool = False, + self, + message: str, + color: t.Optional[str] = None, + stderr: bool = False, + truncate: bool = False, ) -> None: """Display a message.""" if self.redact and self.sensitive: @@ -905,13 +901,13 @@ class ApplicationWarning(Exception): class SubprocessError(ApplicationError): """Error resulting from failed subprocess execution.""" def __init__( - self, - cmd: list[str], - status: int = 0, - stdout: t.Optional[str] = None, - stderr: t.Optional[str] = None, - runtime: t.Optional[float] = None, - error_callback: t.Optional[c.Callable[[SubprocessError], None]] = None, + self, + cmd: list[str], + status: int = 0, + stdout: t.Optional[str] = None, + stderr: t.Optional[str] = None, + runtime: t.Optional[float] = None, + error_callback: t.Optional[c.Callable[[SubprocessError], None]] = None, ) -> None: message = 'Command "%s" returned exit status %s.\n' % (shlex.join(cmd), status) @@ -963,7 +959,7 @@ class HostConnectionError(ApplicationError): self._callback() -def retry(func, ex_type=SubprocessError, sleep=10, attempts=10, warn=True): +def retry(func: t.Callable[..., TValue], ex_type: t.Type[BaseException] = SubprocessError, sleep: int = 10, attempts: int = 10, warn: bool = True) -> TValue: """Retry the specified function on failure.""" for dummy in range(1, attempts): try: @@ -1094,7 +1090,7 @@ def load_module(path: str, name: str) -> None: spec.loader.exec_module(module) -def sanitize_host_name(name): +def sanitize_host_name(name: str) -> str: """Return a sanitized version of the given name, suitable for use as a hostname.""" return re.sub('[^A-Za-z0-9]+', '-', name)[:63].strip('-') diff --git a/test/lib/ansible_test/_internal/util_common.py b/test/lib/ansible_test/_internal/util_common.py index fbd9e71d..1dfc7f38 100644 --- a/test/lib/ansible_test/_internal/util_common.py +++ b/test/lib/ansible_test/_internal/util_common.py @@ -96,7 +96,7 @@ class ResultType: TMP: ResultType = None @staticmethod - def _populate(): + def _populate() -> None: ResultType.BOT = ResultType('bot') ResultType.COVERAGE = ResultType('coverage') ResultType.DATA = ResultType('data') @@ -288,7 +288,7 @@ def get_injector_path() -> str: verified_chmod(injector_path, MODE_DIRECTORY) - def cleanup_injector(): + def cleanup_injector() -> None: """Remove the temporary injector directory.""" remove_tree(injector_path) @@ -388,7 +388,7 @@ def create_interpreter_wrapper(interpreter: str, injected_interpreter: str) -> N verified_chmod(injected_interpreter, MODE_FILE_EXECUTE) -def cleanup_python_paths(): +def cleanup_python_paths() -> None: """Clean up all temporary python directories.""" for path in sorted(PYTHON_PATHS.values()): display.info('Cleaning up temporary python directory: %s' % path, verbosity=2) @@ -396,14 +396,14 @@ def cleanup_python_paths(): def intercept_python( - args: CommonConfig, - python: PythonConfig, - cmd: list[str], - env: dict[str, str], - capture: bool, - data: t.Optional[str] = None, - cwd: t.Optional[str] = None, - always: bool = False, + args: CommonConfig, + python: PythonConfig, + cmd: list[str], + env: dict[str, str], + capture: bool, + data: t.Optional[str] = None, + cwd: t.Optional[str] = None, + always: bool = False, ) -> tuple[t.Optional[str], t.Optional[str]]: """ Run a command while intercepting invocations of Python to control the version used. @@ -428,20 +428,20 @@ def intercept_python( def run_command( - args: CommonConfig, - cmd: c.Iterable[str], - capture: bool, - env: t.Optional[dict[str, str]] = None, - data: t.Optional[str] = None, - cwd: t.Optional[str] = None, - always: bool = False, - stdin: t.Optional[t.IO[bytes]] = None, - stdout: t.Optional[t.IO[bytes]] = None, - interactive: bool = False, - output_stream: t.Optional[OutputStream] = None, - cmd_verbosity: int = 1, - str_errors: str = 'strict', - error_callback: t.Optional[c.Callable[[SubprocessError], None]] = None, + args: CommonConfig, + cmd: c.Iterable[str], + capture: bool, + env: t.Optional[dict[str, str]] = None, + data: t.Optional[str] = None, + cwd: t.Optional[str] = None, + always: bool = False, + stdin: t.Optional[t.IO[bytes]] = None, + stdout: t.Optional[t.IO[bytes]] = None, + interactive: bool = False, + output_stream: t.Optional[OutputStream] = None, + cmd_verbosity: int = 1, + str_errors: str = 'strict', + error_callback: t.Optional[c.Callable[[SubprocessError], None]] = None, ) -> tuple[t.Optional[str], t.Optional[str]]: """Run the specified command and return stdout and stderr as a tuple.""" explain = args.explain and not always @@ -449,7 +449,7 @@ def run_command( output_stream=output_stream, cmd_verbosity=cmd_verbosity, str_errors=str_errors, error_callback=error_callback) -def yamlcheck(python): +def yamlcheck(python: PythonConfig) -> t.Optional[bool]: """Return True if PyYAML has libyaml support, False if it does not and None if it was not found.""" result = json.loads(raw_command([python.path, os.path.join(ANSIBLE_TEST_TARGET_TOOLS_ROOT, 'yamlcheck.py')], capture=True)[0]) diff --git a/test/lib/ansible_test/_internal/venv.py b/test/lib/ansible_test/_internal/venv.py index 9e999c16..ec498ed9 100644 --- a/test/lib/ansible_test/_internal/venv.py +++ b/test/lib/ansible_test/_internal/venv.py @@ -41,8 +41,8 @@ from .python_requirements import ( def get_virtual_python( - args: EnvironmentConfig, - python: VirtualPythonConfig, + args: EnvironmentConfig, + python: VirtualPythonConfig, ) -> VirtualPythonConfig: """Create a virtual environment for the given Python and return the path to its root.""" if python.system_site_packages: diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py index 9513ed30..270c9f44 100644 --- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py +++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py @@ -22,7 +22,6 @@ import argparse import ast import datetime import json -import errno import os import re import subprocess @@ -2467,12 +2466,9 @@ class GitCache: self.head_tree = self._get_module_files() else: raise - except OSError as ex: - if ex.errno == errno.ENOENT: - # fallback when git is not installed - self.head_tree = self._get_module_files() - else: - raise + except FileNotFoundError: + # fallback when git is not installed + self.head_tree = self._get_module_files() allowed_exts = ('.py', '.ps1') if plugin_type != 'module': diff --git a/test/lib/ansible_test/_util/target/sanity/import/importer.py b/test/lib/ansible_test/_util/target/sanity/import/importer.py index 3180530c..44a5ddc9 100644 --- a/test/lib/ansible_test/_util/target/sanity/import/importer.py +++ b/test/lib/ansible_test/_util/target/sanity/import/importer.py @@ -44,7 +44,8 @@ def main(): # noinspection PyCompatibility from importlib import import_module except ImportError: - def import_module(name): + def import_module(name, package=None): # type: (str, str | None) -> types.ModuleType + assert package is None __import__(name) return sys.modules[name] diff --git a/test/units/playbook/test_helpers.py b/test/units/playbook/test_helpers.py index e784312f..a89730ca 100644 --- a/test/units/playbook/test_helpers.py +++ b/test/units/playbook/test_helpers.py @@ -160,20 +160,10 @@ class TestLoadListOfTasks(unittest.TestCase, MixinForMocks): self.assertIsInstance(block.always, list) self.assertEqual(len(block.always), 0) - def test_block_unknown_action_use_handlers(self): - ds = [{ - 'block': [{'action': 'foo_test_block_unknown_action'}] - }] - res = helpers.load_list_of_tasks(ds, play=self.mock_play, use_handlers=True, - variable_manager=self.mock_variable_manager, loader=self.fake_loader) - self._assert_is_task_list_or_blocks(res) - self.assertIsInstance(res[0], Block) - self._assert_default_block(res[0]) - - def test_one_bogus_block_use_handlers(self): + def test_block_use_handlers(self): ds = [{'block': True}] self.assertRaisesRegex(errors.AnsibleParserError, - "A malformed block was encountered", + "Using a block as a handler is not supported.", helpers.load_list_of_tasks, ds, play=self.mock_play, use_handlers=True, variable_manager=self.mock_variable_manager, loader=self.fake_loader) |