summaryrefslogtreecommitdiff
path: root/lib/ansible/galaxy
diff options
context:
space:
mode:
Diffstat (limited to 'lib/ansible/galaxy')
-rw-r--r--lib/ansible/galaxy/__init__.py72
-rw-r--r--lib/ansible/galaxy/api.py581
-rw-r--r--lib/ansible/galaxy/collection.py1551
-rw-r--r--lib/ansible/galaxy/data/apb/.travis.yml25
-rw-r--r--lib/ansible/galaxy/data/apb/Dockerfile.j29
-rw-r--r--lib/ansible/galaxy/data/apb/Makefile.j221
-rw-r--r--lib/ansible/galaxy/data/apb/README.md38
-rw-r--r--lib/ansible/galaxy/data/apb/apb.yml.j213
-rw-r--r--lib/ansible/galaxy/data/apb/defaults/main.yml.j22
-rw-r--r--lib/ansible/galaxy/data/apb/files/.git_keep0
-rw-r--r--lib/ansible/galaxy/data/apb/handlers/main.yml.j22
-rw-r--r--lib/ansible/galaxy/data/apb/meta/main.yml.j244
-rw-r--r--lib/ansible/galaxy/data/apb/playbooks/deprovision.yml.j28
-rw-r--r--lib/ansible/galaxy/data/apb/playbooks/provision.yml.j28
-rw-r--r--lib/ansible/galaxy/data/apb/tasks/main.yml.j22
-rw-r--r--lib/ansible/galaxy/data/apb/templates/.git_keep0
-rw-r--r--lib/ansible/galaxy/data/apb/tests/ansible.cfg2
-rw-r--r--lib/ansible/galaxy/data/apb/tests/inventory3
-rw-r--r--lib/ansible/galaxy/data/apb/tests/test.yml.j27
-rw-r--r--lib/ansible/galaxy/data/apb/vars/main.yml.j22
-rw-r--r--lib/ansible/galaxy/data/collections_galaxy_meta.yml110
-rw-r--r--lib/ansible/galaxy/data/container/.travis.yml45
-rw-r--r--lib/ansible/galaxy/data/container/README.md49
-rw-r--r--lib/ansible/galaxy/data/container/defaults/main.yml.j22
-rw-r--r--lib/ansible/galaxy/data/container/files/.git_keep0
-rw-r--r--lib/ansible/galaxy/data/container/handlers/main.yml.j22
-rw-r--r--lib/ansible/galaxy/data/container/meta/container.yml.j211
-rw-r--r--lib/ansible/galaxy/data/container/meta/main.yml.j252
-rw-r--r--lib/ansible/galaxy/data/container/tasks/main.yml.j22
-rw-r--r--lib/ansible/galaxy/data/container/templates/.git_keep0
-rw-r--r--lib/ansible/galaxy/data/container/tests/ansible.cfg2
-rw-r--r--lib/ansible/galaxy/data/container/tests/inventory3
-rw-r--r--lib/ansible/galaxy/data/container/tests/test.yml.j27
-rw-r--r--lib/ansible/galaxy/data/container/vars/main.yml.j22
-rw-r--r--lib/ansible/galaxy/data/default/collection/README.md.j23
-rw-r--r--lib/ansible/galaxy/data/default/collection/docs/.git_keep0
-rw-r--r--lib/ansible/galaxy/data/default/collection/galaxy.yml.j211
-rw-r--r--lib/ansible/galaxy/data/default/collection/plugins/README.md.j231
-rw-r--r--lib/ansible/galaxy/data/default/collection/roles/.git_keep0
-rw-r--r--lib/ansible/galaxy/data/default/role/.travis.yml29
-rw-r--r--lib/ansible/galaxy/data/default/role/README.md38
-rw-r--r--lib/ansible/galaxy/data/default/role/defaults/main.yml.j22
-rw-r--r--lib/ansible/galaxy/data/default/role/files/.git_keep0
-rw-r--r--lib/ansible/galaxy/data/default/role/handlers/main.yml.j22
-rw-r--r--lib/ansible/galaxy/data/default/role/meta/main.yml.j255
-rw-r--r--lib/ansible/galaxy/data/default/role/tasks/main.yml.j22
-rw-r--r--lib/ansible/galaxy/data/default/role/templates/.git_keep0
-rw-r--r--lib/ansible/galaxy/data/default/role/tests/inventory2
-rw-r--r--lib/ansible/galaxy/data/default/role/tests/test.yml.j25
-rw-r--r--lib/ansible/galaxy/data/default/role/vars/main.yml.j22
-rw-r--r--lib/ansible/galaxy/data/network/.travis.yml29
-rw-r--r--lib/ansible/galaxy/data/network/README.md38
-rw-r--r--lib/ansible/galaxy/data/network/cliconf_plugins/example.py.j240
-rw-r--r--lib/ansible/galaxy/data/network/defaults/main.yml.j22
-rw-r--r--lib/ansible/galaxy/data/network/files/.git_keep0
-rw-r--r--lib/ansible/galaxy/data/network/library/example_command.py.j266
-rw-r--r--lib/ansible/galaxy/data/network/library/example_config.py.j266
-rw-r--r--lib/ansible/galaxy/data/network/library/example_facts.py.j266
-rw-r--r--lib/ansible/galaxy/data/network/meta/main.yml.j252
-rw-r--r--lib/ansible/galaxy/data/network/module_utils/example.py.j240
-rw-r--r--lib/ansible/galaxy/data/network/netconf_plugins/example.py.j240
-rw-r--r--lib/ansible/galaxy/data/network/tasks/main.yml.j22
-rw-r--r--lib/ansible/galaxy/data/network/templates/.git_keep0
-rw-r--r--lib/ansible/galaxy/data/network/terminal_plugins/example.py.j240
-rw-r--r--lib/ansible/galaxy/data/network/tests/inventory2
-rw-r--r--lib/ansible/galaxy/data/network/tests/test.yml.j214
-rw-r--r--lib/ansible/galaxy/data/network/vars/main.yml.j22
-rw-r--r--lib/ansible/galaxy/role.py399
-rw-r--r--lib/ansible/galaxy/token.py180
-rw-r--r--lib/ansible/galaxy/user_agent.py23
70 files changed, 3960 insertions, 0 deletions
diff --git a/lib/ansible/galaxy/__init__.py b/lib/ansible/galaxy/__init__.py
new file mode 100644
index 00000000..a73baac8
--- /dev/null
+++ b/lib/ansible/galaxy/__init__.py
@@ -0,0 +1,72 @@
+########################################################################
+#
+# (C) 2015, Brian Coca <bcoca@ansible.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+########################################################################
+''' This manages remote shared Ansible objects, mainly roles'''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+import yaml
+
+import ansible.constants as C
+from ansible import context
+from ansible.module_utils._text import to_bytes
+
+# default_readme_template
+# default_meta_template
+
+
+def get_collections_galaxy_meta_info():
+ meta_path = os.path.join(os.path.dirname(__file__), 'data', 'collections_galaxy_meta.yml')
+ with open(to_bytes(meta_path, errors='surrogate_or_strict'), 'rb') as galaxy_obj:
+ return yaml.safe_load(galaxy_obj)
+
+
+class Galaxy(object):
+ ''' Keeps global galaxy info '''
+
+ def __init__(self):
+ # TODO: eventually remove this as it contains a mismash of properties that aren't really global
+
+ # roles_path needs to be a list and will be by default
+ roles_path = context.CLIARGS.get('roles_path', C.DEFAULT_ROLES_PATH)
+ # cli option handling is responsible for splitting roles_path
+ self.roles_paths = roles_path
+
+ self.roles = {}
+
+ # load data path for resource usage
+ this_dir, this_filename = os.path.split(__file__)
+ type_path = context.CLIARGS.get('role_type', 'default')
+ if type_path == 'default':
+ type_path = os.path.join(type_path, context.CLIARGS.get('type'))
+
+ self.DATA_PATH = os.path.join(this_dir, 'data', type_path)
+
+ @property
+ def default_role_skeleton_path(self):
+ return self.DATA_PATH
+
+ def add_role(self, role):
+ self.roles[role.name] = role
+
+ def remove_role(self, role_name):
+ del self.roles[role_name]
diff --git a/lib/ansible/galaxy/api.py b/lib/ansible/galaxy/api.py
new file mode 100644
index 00000000..4dd3cded
--- /dev/null
+++ b/lib/ansible/galaxy/api.py
@@ -0,0 +1,581 @@
+# (C) 2013, James Cammarata <jcammarata@ansible.com>
+# Copyright: (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import hashlib
+import json
+import os
+import tarfile
+import uuid
+import time
+
+from ansible import constants as C
+from ansible.errors import AnsibleError
+from ansible.galaxy.user_agent import user_agent
+from ansible.module_utils.six import string_types
+from ansible.module_utils.six.moves.urllib.error import HTTPError
+from ansible.module_utils.six.moves.urllib.parse import quote as urlquote, urlencode, urlparse
+from ansible.module_utils._text import to_bytes, to_native, to_text
+from ansible.module_utils.urls import open_url, prepare_multipart
+from ansible.utils.display import Display
+from ansible.utils.hashing import secure_hash_s
+
+try:
+ from urllib.parse import urlparse
+except ImportError:
+ # Python 2
+ from urlparse import urlparse
+
+display = Display()
+
+
+def g_connect(versions):
+ """
+ Wrapper to lazily initialize connection info to Galaxy and verify the API versions required are available on the
+ endpoint.
+
+ :param versions: A list of API versions that the function supports.
+ """
+ def decorator(method):
+ def wrapped(self, *args, **kwargs):
+ if not self._available_api_versions:
+ display.vvvv("Initial connection to galaxy_server: %s" % self.api_server)
+
+ # Determine the type of Galaxy server we are talking to. First try it unauthenticated then with Bearer
+ # auth for Automation Hub.
+ n_url = self.api_server
+ error_context_msg = 'Error when finding available api versions from %s (%s)' % (self.name, n_url)
+
+ if self.api_server == 'https://galaxy.ansible.com' or self.api_server == 'https://galaxy.ansible.com/':
+ n_url = 'https://galaxy.ansible.com/api/'
+
+ try:
+ data = self._call_galaxy(n_url, method='GET', error_context_msg=error_context_msg)
+ except (AnsibleError, GalaxyError, ValueError, KeyError) as err:
+ # Either the URL doesnt exist, or other error. Or the URL exists, but isn't a galaxy API
+ # root (not JSON, no 'available_versions') so try appending '/api/'
+ if n_url.endswith('/api') or n_url.endswith('/api/'):
+ raise
+
+ # Let exceptions here bubble up but raise the original if this returns a 404 (/api/ wasn't found).
+ n_url = _urljoin(n_url, '/api/')
+ try:
+ data = self._call_galaxy(n_url, method='GET', error_context_msg=error_context_msg)
+ except GalaxyError as new_err:
+ if new_err.http_code == 404:
+ raise err
+ raise
+
+ if 'available_versions' not in data:
+ raise AnsibleError("Tried to find galaxy API root at %s but no 'available_versions' are available "
+ "on %s" % (n_url, self.api_server))
+
+ # Update api_server to point to the "real" API root, which in this case could have been the configured
+ # url + '/api/' appended.
+ self.api_server = n_url
+
+ # Default to only supporting v1, if only v1 is returned we also assume that v2 is available even though
+ # it isn't returned in the available_versions dict.
+ available_versions = data.get('available_versions', {u'v1': u'v1/'})
+ if list(available_versions.keys()) == [u'v1']:
+ available_versions[u'v2'] = u'v2/'
+
+ self._available_api_versions = available_versions
+ display.vvvv("Found API version '%s' with Galaxy server %s (%s)"
+ % (', '.join(available_versions.keys()), self.name, self.api_server))
+
+ # Verify that the API versions the function works with are available on the server specified.
+ available_versions = set(self._available_api_versions.keys())
+ common_versions = set(versions).intersection(available_versions)
+ if not common_versions:
+ raise AnsibleError("Galaxy action %s requires API versions '%s' but only '%s' are available on %s %s"
+ % (method.__name__, ", ".join(versions), ", ".join(available_versions),
+ self.name, self.api_server))
+
+ return method(self, *args, **kwargs)
+ return wrapped
+ return decorator
+
+
+def _urljoin(*args):
+ return '/'.join(to_native(a, errors='surrogate_or_strict').strip('/') for a in args + ('',) if a)
+
+
+class GalaxyError(AnsibleError):
+ """ Error for bad Galaxy server responses. """
+
+ def __init__(self, http_error, message):
+ super(GalaxyError, self).__init__(message)
+ self.http_code = http_error.code
+ self.url = http_error.geturl()
+
+ try:
+ http_msg = to_text(http_error.read())
+ err_info = json.loads(http_msg)
+ except (AttributeError, ValueError):
+ err_info = {}
+
+ url_split = self.url.split('/')
+ if 'v2' in url_split:
+ galaxy_msg = err_info.get('message', http_error.reason)
+ code = err_info.get('code', 'Unknown')
+ full_error_msg = u"%s (HTTP Code: %d, Message: %s Code: %s)" % (message, self.http_code, galaxy_msg, code)
+ elif 'v3' in url_split:
+ errors = err_info.get('errors', [])
+ if not errors:
+ errors = [{}] # Defaults are set below, we just need to make sure 1 error is present.
+
+ message_lines = []
+ for error in errors:
+ error_msg = error.get('detail') or error.get('title') or http_error.reason
+ error_code = error.get('code') or 'Unknown'
+ message_line = u"(HTTP Code: %d, Message: %s Code: %s)" % (self.http_code, error_msg, error_code)
+ message_lines.append(message_line)
+
+ full_error_msg = "%s %s" % (message, ', '.join(message_lines))
+ else:
+ # v1 and unknown API endpoints
+ galaxy_msg = err_info.get('default', http_error.reason)
+ full_error_msg = u"%s (HTTP Code: %d, Message: %s)" % (message, self.http_code, galaxy_msg)
+
+ self.message = to_native(full_error_msg)
+
+
+class CollectionVersionMetadata:
+
+ def __init__(self, namespace, name, version, download_url, artifact_sha256, dependencies):
+ """
+ Contains common information about a collection on a Galaxy server to smooth through API differences for
+ Collection and define a standard meta info for a collection.
+
+ :param namespace: The namespace name.
+ :param name: The collection name.
+ :param version: The version that the metadata refers to.
+ :param download_url: The URL to download the collection.
+ :param artifact_sha256: The SHA256 of the collection artifact for later verification.
+ :param dependencies: A dict of dependencies of the collection.
+ """
+ self.namespace = namespace
+ self.name = name
+ self.version = version
+ self.download_url = download_url
+ self.artifact_sha256 = artifact_sha256
+ self.dependencies = dependencies
+
+
+class GalaxyAPI:
+ """ This class is meant to be used as a API client for an Ansible Galaxy server """
+
+ def __init__(self, galaxy, name, url, username=None, password=None, token=None, validate_certs=True):
+ self.galaxy = galaxy
+ self.name = name
+ self.username = username
+ self.password = password
+ self.token = token
+ self.api_server = url
+ self.validate_certs = validate_certs
+ self._available_api_versions = {}
+
+ display.debug('Validate TLS certificates for %s: %s' % (self.api_server, self.validate_certs))
+
+ @property
+ @g_connect(['v1', 'v2', 'v3'])
+ def available_api_versions(self):
+ # Calling g_connect will populate self._available_api_versions
+ return self._available_api_versions
+
+ def _call_galaxy(self, url, args=None, headers=None, method=None, auth_required=False, error_context_msg=None):
+ headers = headers or {}
+ self._add_auth_token(headers, url, required=auth_required)
+
+ try:
+ display.vvvv("Calling Galaxy at %s" % url)
+ resp = open_url(to_native(url), data=args, validate_certs=self.validate_certs, headers=headers,
+ method=method, timeout=20, http_agent=user_agent(), follow_redirects='safe')
+ except HTTPError as e:
+ raise GalaxyError(e, error_context_msg)
+ except Exception as e:
+ raise AnsibleError("Unknown error when attempting to call Galaxy at '%s': %s" % (url, to_native(e)))
+
+ resp_data = to_text(resp.read(), errors='surrogate_or_strict')
+ try:
+ data = json.loads(resp_data)
+ except ValueError:
+ raise AnsibleError("Failed to parse Galaxy response from '%s' as JSON:\n%s"
+ % (resp.url, to_native(resp_data)))
+
+ return data
+
+ def _add_auth_token(self, headers, url, token_type=None, required=False):
+ # Don't add the auth token if one is already present
+ if 'Authorization' in headers:
+ return
+
+ if not self.token and required:
+ raise AnsibleError("No access token or username set. A token can be set with --api-key "
+ "or at {0}.".format(to_native(C.GALAXY_TOKEN_PATH)))
+
+ if self.token:
+ headers.update(self.token.headers())
+
+ @g_connect(['v1'])
+ def authenticate(self, github_token):
+ """
+ Retrieve an authentication token
+ """
+ url = _urljoin(self.api_server, self.available_api_versions['v1'], "tokens") + '/'
+ args = urlencode({"github_token": github_token})
+ resp = open_url(url, data=args, validate_certs=self.validate_certs, method="POST", http_agent=user_agent())
+ data = json.loads(to_text(resp.read(), errors='surrogate_or_strict'))
+ return data
+
+ @g_connect(['v1'])
+ def create_import_task(self, github_user, github_repo, reference=None, role_name=None):
+ """
+ Post an import request
+ """
+ url = _urljoin(self.api_server, self.available_api_versions['v1'], "imports") + '/'
+ args = {
+ "github_user": github_user,
+ "github_repo": github_repo,
+ "github_reference": reference if reference else ""
+ }
+ if role_name:
+ args['alternate_role_name'] = role_name
+ elif github_repo.startswith('ansible-role'):
+ args['alternate_role_name'] = github_repo[len('ansible-role') + 1:]
+ data = self._call_galaxy(url, args=urlencode(args), method="POST")
+ if data.get('results', None):
+ return data['results']
+ return data
+
+ @g_connect(['v1'])
+ def get_import_task(self, task_id=None, github_user=None, github_repo=None):
+ """
+ Check the status of an import task.
+ """
+ url = _urljoin(self.api_server, self.available_api_versions['v1'], "imports")
+ if task_id is not None:
+ url = "%s?id=%d" % (url, task_id)
+ elif github_user is not None and github_repo is not None:
+ url = "%s?github_user=%s&github_repo=%s" % (url, github_user, github_repo)
+ else:
+ raise AnsibleError("Expected task_id or github_user and github_repo")
+
+ data = self._call_galaxy(url)
+ return data['results']
+
+ @g_connect(['v1'])
+ def lookup_role_by_name(self, role_name, notify=True):
+ """
+ Find a role by name.
+ """
+ role_name = to_text(urlquote(to_bytes(role_name)))
+
+ try:
+ parts = role_name.split(".")
+ user_name = ".".join(parts[0:-1])
+ role_name = parts[-1]
+ if notify:
+ display.display("- downloading role '%s', owned by %s" % (role_name, user_name))
+ except Exception:
+ raise AnsibleError("Invalid role name (%s). Specify role as format: username.rolename" % role_name)
+
+ url = _urljoin(self.api_server, self.available_api_versions['v1'], "roles",
+ "?owner__username=%s&name=%s" % (user_name, role_name))
+ data = self._call_galaxy(url)
+ if len(data["results"]) != 0:
+ return data["results"][0]
+ return None
+
+ @g_connect(['v1'])
+ def fetch_role_related(self, related, role_id):
+ """
+ Fetch the list of related items for the given role.
+ The url comes from the 'related' field of the role.
+ """
+
+ results = []
+ try:
+ url = _urljoin(self.api_server, self.available_api_versions['v1'], "roles", role_id, related,
+ "?page_size=50")
+ data = self._call_galaxy(url)
+ results = data['results']
+ done = (data.get('next_link', None) is None)
+
+ # https://github.com/ansible/ansible/issues/64355
+ # api_server contains part of the API path but next_link includes the /api part so strip it out.
+ url_info = urlparse(self.api_server)
+ base_url = "%s://%s/" % (url_info.scheme, url_info.netloc)
+
+ while not done:
+ url = _urljoin(base_url, data['next_link'])
+ data = self._call_galaxy(url)
+ results += data['results']
+ done = (data.get('next_link', None) is None)
+ except Exception as e:
+ display.warning("Unable to retrieve role (id=%s) data (%s), but this is not fatal so we continue: %s"
+ % (role_id, related, to_text(e)))
+ return results
+
+ @g_connect(['v1'])
+ def get_list(self, what):
+ """
+ Fetch the list of items specified.
+ """
+ try:
+ url = _urljoin(self.api_server, self.available_api_versions['v1'], what, "?page_size")
+ data = self._call_galaxy(url)
+ if "results" in data:
+ results = data['results']
+ else:
+ results = data
+ done = True
+ if "next" in data:
+ done = (data.get('next_link', None) is None)
+ while not done:
+ url = _urljoin(self.api_server, data['next_link'])
+ data = self._call_galaxy(url)
+ results += data['results']
+ done = (data.get('next_link', None) is None)
+ return results
+ except Exception as error:
+ raise AnsibleError("Failed to download the %s list: %s" % (what, to_native(error)))
+
+ @g_connect(['v1'])
+ def search_roles(self, search, **kwargs):
+
+ search_url = _urljoin(self.api_server, self.available_api_versions['v1'], "search", "roles", "?")
+
+ if search:
+ search_url += '&autocomplete=' + to_text(urlquote(to_bytes(search)))
+
+ tags = kwargs.get('tags', None)
+ platforms = kwargs.get('platforms', None)
+ page_size = kwargs.get('page_size', None)
+ author = kwargs.get('author', None)
+
+ if tags and isinstance(tags, string_types):
+ tags = tags.split(',')
+ search_url += '&tags_autocomplete=' + '+'.join(tags)
+
+ if platforms and isinstance(platforms, string_types):
+ platforms = platforms.split(',')
+ search_url += '&platforms_autocomplete=' + '+'.join(platforms)
+
+ if page_size:
+ search_url += '&page_size=%s' % page_size
+
+ if author:
+ search_url += '&username_autocomplete=%s' % author
+
+ data = self._call_galaxy(search_url)
+ return data
+
+ @g_connect(['v1'])
+ def add_secret(self, source, github_user, github_repo, secret):
+ url = _urljoin(self.api_server, self.available_api_versions['v1'], "notification_secrets") + '/'
+ args = urlencode({
+ "source": source,
+ "github_user": github_user,
+ "github_repo": github_repo,
+ "secret": secret
+ })
+ data = self._call_galaxy(url, args=args, method="POST")
+ return data
+
+ @g_connect(['v1'])
+ def list_secrets(self):
+ url = _urljoin(self.api_server, self.available_api_versions['v1'], "notification_secrets")
+ data = self._call_galaxy(url, auth_required=True)
+ return data
+
+ @g_connect(['v1'])
+ def remove_secret(self, secret_id):
+ url = _urljoin(self.api_server, self.available_api_versions['v1'], "notification_secrets", secret_id) + '/'
+ data = self._call_galaxy(url, auth_required=True, method='DELETE')
+ return data
+
+ @g_connect(['v1'])
+ def delete_role(self, github_user, github_repo):
+ url = _urljoin(self.api_server, self.available_api_versions['v1'], "removerole",
+ "?github_user=%s&github_repo=%s" % (github_user, github_repo))
+ data = self._call_galaxy(url, auth_required=True, method='DELETE')
+ return data
+
+ # Collection APIs #
+
+ @g_connect(['v2', 'v3'])
+ def publish_collection(self, collection_path):
+ """
+ Publishes a collection to a Galaxy server and returns the import task URI.
+
+ :param collection_path: The path to the collection tarball to publish.
+ :return: The import task URI that contains the import results.
+ """
+ display.display("Publishing collection artifact '%s' to %s %s" % (collection_path, self.name, self.api_server))
+
+ b_collection_path = to_bytes(collection_path, errors='surrogate_or_strict')
+ if not os.path.exists(b_collection_path):
+ raise AnsibleError("The collection path specified '%s' does not exist." % to_native(collection_path))
+ elif not tarfile.is_tarfile(b_collection_path):
+ raise AnsibleError("The collection path specified '%s' is not a tarball, use 'ansible-galaxy collection "
+ "build' to create a proper release artifact." % to_native(collection_path))
+
+ with open(b_collection_path, 'rb') as collection_tar:
+ sha256 = secure_hash_s(collection_tar.read(), hash_func=hashlib.sha256)
+
+ content_type, b_form_data = prepare_multipart(
+ {
+ 'sha256': sha256,
+ 'file': {
+ 'filename': b_collection_path,
+ 'mime_type': 'application/octet-stream',
+ },
+ }
+ )
+
+ headers = {
+ 'Content-type': content_type,
+ 'Content-length': len(b_form_data),
+ }
+
+ if 'v3' in self.available_api_versions:
+ n_url = _urljoin(self.api_server, self.available_api_versions['v3'], 'artifacts', 'collections') + '/'
+ else:
+ n_url = _urljoin(self.api_server, self.available_api_versions['v2'], 'collections') + '/'
+
+ resp = self._call_galaxy(n_url, args=b_form_data, headers=headers, method='POST', auth_required=True,
+ error_context_msg='Error when publishing collection to %s (%s)'
+ % (self.name, self.api_server))
+
+ return resp['task']
+
+ @g_connect(['v2', 'v3'])
+ def wait_import_task(self, task_id, timeout=0):
+ """
+ Waits until the import process on the Galaxy server has completed or the timeout is reached.
+
+ :param task_id: The id of the import task to wait for. This can be parsed out of the return
+ value for GalaxyAPI.publish_collection.
+ :param timeout: The timeout in seconds, 0 is no timeout.
+ """
+ state = 'waiting'
+ data = None
+
+ # Construct the appropriate URL per version
+ if 'v3' in self.available_api_versions:
+ full_url = _urljoin(self.api_server, self.available_api_versions['v3'],
+ 'imports/collections', task_id, '/')
+ else:
+ full_url = _urljoin(self.api_server, self.available_api_versions['v2'],
+ 'collection-imports', task_id, '/')
+
+ display.display("Waiting until Galaxy import task %s has completed" % full_url)
+ start = time.time()
+ wait = 2
+
+ while timeout == 0 or (time.time() - start) < timeout:
+ data = self._call_galaxy(full_url, method='GET', auth_required=True,
+ error_context_msg='Error when getting import task results at %s' % full_url)
+
+ state = data.get('state', 'waiting')
+
+ if data.get('finished_at', None):
+ break
+
+ display.vvv('Galaxy import process has a status of %s, wait %d seconds before trying again'
+ % (state, wait))
+ time.sleep(wait)
+
+ # poor man's exponential backoff algo so we don't flood the Galaxy API, cap at 30 seconds.
+ wait = min(30, wait * 1.5)
+ if state == 'waiting':
+ raise AnsibleError("Timeout while waiting for the Galaxy import process to finish, check progress at '%s'"
+ % to_native(full_url))
+
+ for message in data.get('messages', []):
+ level = message['level']
+ if level == 'error':
+ display.error("Galaxy import error message: %s" % message['message'])
+ elif level == 'warning':
+ display.warning("Galaxy import warning message: %s" % message['message'])
+ else:
+ display.vvv("Galaxy import message: %s - %s" % (level, message['message']))
+
+ if state == 'failed':
+ code = to_native(data['error'].get('code', 'UNKNOWN'))
+ description = to_native(
+ data['error'].get('description', "Unknown error, see %s for more details" % full_url))
+ raise AnsibleError("Galaxy import process failed: %s (Code: %s)" % (description, code))
+
+ @g_connect(['v2', 'v3'])
+ def get_collection_version_metadata(self, namespace, name, version):
+ """
+ Gets the collection information from the Galaxy server about a specific Collection version.
+
+ :param namespace: The collection namespace.
+ :param name: The collection name.
+ :param version: Version of the collection to get the information for.
+ :return: CollectionVersionMetadata about the collection at the version requested.
+ """
+ api_path = self.available_api_versions.get('v3', self.available_api_versions.get('v2'))
+ url_paths = [self.api_server, api_path, 'collections', namespace, name, 'versions', version, '/']
+
+ n_collection_url = _urljoin(*url_paths)
+ error_context_msg = 'Error when getting collection version metadata for %s.%s:%s from %s (%s)' \
+ % (namespace, name, version, self.name, self.api_server)
+ data = self._call_galaxy(n_collection_url, error_context_msg=error_context_msg)
+
+ return CollectionVersionMetadata(data['namespace']['name'], data['collection']['name'], data['version'],
+ data['download_url'], data['artifact']['sha256'],
+ data['metadata']['dependencies'])
+
+ @g_connect(['v2', 'v3'])
+ def get_collection_versions(self, namespace, name):
+ """
+ Gets a list of available versions for a collection on a Galaxy server.
+
+ :param namespace: The collection namespace.
+ :param name: The collection name.
+ :return: A list of versions that are available.
+ """
+ relative_link = False
+ if 'v3' in self.available_api_versions:
+ api_path = self.available_api_versions['v3']
+ results_key = 'data'
+ pagination_path = ['links', 'next']
+ relative_link = True # AH pagination results are relative an not an absolute URI.
+ else:
+ api_path = self.available_api_versions['v2']
+ results_key = 'results'
+ pagination_path = ['next']
+
+ n_url = _urljoin(self.api_server, api_path, 'collections', namespace, name, 'versions', '/')
+
+ error_context_msg = 'Error when getting available collection versions for %s.%s from %s (%s)' \
+ % (namespace, name, self.name, self.api_server)
+ data = self._call_galaxy(n_url, error_context_msg=error_context_msg)
+
+ versions = []
+ while True:
+ versions += [v['version'] for v in data[results_key]]
+
+ next_link = data
+ for path in pagination_path:
+ next_link = next_link.get(path, {})
+
+ if not next_link:
+ break
+ elif relative_link:
+ # TODO: This assumes the pagination result is relative to the root server. Will need to be verified
+ # with someone who knows the AH API.
+ next_link = n_url.replace(urlparse(n_url).path, next_link)
+
+ data = self._call_galaxy(to_native(next_link, errors='surrogate_or_strict'),
+ error_context_msg=error_context_msg)
+
+ return versions
diff --git a/lib/ansible/galaxy/collection.py b/lib/ansible/galaxy/collection.py
new file mode 100644
index 00000000..054a8a57
--- /dev/null
+++ b/lib/ansible/galaxy/collection.py
@@ -0,0 +1,1551 @@
+# Copyright: (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import errno
+import fnmatch
+import json
+import operator
+import os
+import shutil
+import stat
+import sys
+import tarfile
+import tempfile
+import threading
+import time
+import yaml
+
+from collections import namedtuple
+from contextlib import contextmanager
+from distutils.version import LooseVersion
+from hashlib import sha256
+from io import BytesIO
+from yaml.error import YAMLError
+
+try:
+ import queue
+except ImportError:
+ import Queue as queue # Python 2
+
+import ansible.constants as C
+from ansible.errors import AnsibleError
+from ansible.galaxy import get_collections_galaxy_meta_info
+from ansible.galaxy.api import CollectionVersionMetadata, GalaxyError
+from ansible.galaxy.user_agent import user_agent
+from ansible.module_utils import six
+from ansible.module_utils._text import to_bytes, to_native, to_text
+from ansible.utils.collection_loader import AnsibleCollectionRef
+from ansible.utils.display import Display
+from ansible.utils.galaxy import scm_archive_collection
+from ansible.utils.hashing import secure_hash, secure_hash_s
+from ansible.utils.version import SemanticVersion
+from ansible.module_utils.urls import open_url
+
+urlparse = six.moves.urllib.parse.urlparse
+urldefrag = six.moves.urllib.parse.urldefrag
+urllib_error = six.moves.urllib.error
+
+
+display = Display()
+
+MANIFEST_FORMAT = 1
+
+ModifiedContent = namedtuple('ModifiedContent', ['filename', 'expected', 'installed'])
+
+
+class CollectionRequirement:
+
+ _FILE_MAPPING = [(b'MANIFEST.json', 'manifest_file'), (b'FILES.json', 'files_file')]
+
+ def __init__(self, namespace, name, b_path, api, versions, requirement, force, parent=None, metadata=None,
+ files=None, skip=False, allow_pre_releases=False):
+ """Represents a collection requirement, the versions that are available to be installed as well as any
+ dependencies the collection has.
+
+ :param namespace: The collection namespace.
+ :param name: The collection name.
+ :param b_path: Byte str of the path to the collection tarball if it has already been downloaded.
+ :param api: The GalaxyAPI to use if the collection is from Galaxy.
+ :param versions: A list of versions of the collection that are available.
+ :param requirement: The version requirement string used to verify the list of versions fit the requirements.
+ :param force: Whether the force flag applied to the collection.
+ :param parent: The name of the parent the collection is a dependency of.
+ :param metadata: The galaxy.api.CollectionVersionMetadata that has already been retrieved from the Galaxy
+ server.
+ :param files: The files that exist inside the collection. This is based on the FILES.json file inside the
+ collection artifact.
+ :param skip: Whether to skip installing the collection. Should be set if the collection is already installed
+ and force is not set.
+ :param allow_pre_releases: Whether to skip pre-release versions of collections.
+ """
+ self.namespace = namespace
+ self.name = name
+ self.b_path = b_path
+ self.api = api
+ self._versions = set(versions)
+ self.force = force
+ self.skip = skip
+ self.required_by = []
+ self.allow_pre_releases = allow_pre_releases
+
+ self._metadata = metadata
+ self._files = files
+
+ self.add_requirement(parent, requirement)
+
+ def __str__(self):
+ return to_native("%s.%s" % (self.namespace, self.name))
+
+ def __unicode__(self):
+ return u"%s.%s" % (self.namespace, self.name)
+
+ @property
+ def metadata(self):
+ self._get_metadata()
+ return self._metadata
+
+ @property
+ def versions(self):
+ if self.allow_pre_releases:
+ return self._versions
+ return set(v for v in self._versions if v == '*' or not SemanticVersion(v).is_prerelease)
+
+ @versions.setter
+ def versions(self, value):
+ self._versions = set(value)
+
+ @property
+ def pre_releases(self):
+ return set(v for v in self._versions if SemanticVersion(v).is_prerelease)
+
+ @property
+ def latest_version(self):
+ try:
+ return max([v for v in self.versions if v != '*'], key=SemanticVersion)
+ except ValueError: # ValueError: max() arg is an empty sequence
+ return '*'
+
+ @property
+ def dependencies(self):
+ if not self._metadata:
+ if len(self.versions) > 1:
+ return {}
+ self._get_metadata()
+
+ dependencies = self._metadata.dependencies
+
+ if dependencies is None:
+ return {}
+
+ return dependencies
+
+ @staticmethod
+ def artifact_info(b_path):
+ """Load the manifest data from the MANIFEST.json and FILES.json. If the files exist, return a dict containing the keys 'files_file' and 'manifest_file'.
+ :param b_path: The directory of a collection.
+ """
+ info = {}
+ for b_file_name, property_name in CollectionRequirement._FILE_MAPPING:
+ b_file_path = os.path.join(b_path, b_file_name)
+ if not os.path.exists(b_file_path):
+ continue
+ with open(b_file_path, 'rb') as file_obj:
+ try:
+ info[property_name] = json.loads(to_text(file_obj.read(), errors='surrogate_or_strict'))
+ except ValueError:
+ raise AnsibleError("Collection file at '%s' does not contain a valid json string." % to_native(b_file_path))
+ return info
+
+ @staticmethod
+ def galaxy_metadata(b_path):
+ """Generate the manifest data from the galaxy.yml file.
+ If the galaxy.yml exists, return a dictionary containing the keys 'files_file' and 'manifest_file'.
+
+ :param b_path: The directory of a collection.
+ """
+ b_galaxy_path = get_galaxy_metadata_path(b_path)
+ info = {}
+ if os.path.exists(b_galaxy_path):
+ collection_meta = _get_galaxy_yml(b_galaxy_path)
+ info['files_file'] = _build_files_manifest(b_path, collection_meta['namespace'], collection_meta['name'], collection_meta['build_ignore'])
+ info['manifest_file'] = _build_manifest(**collection_meta)
+ return info
+
+ @staticmethod
+ def collection_info(b_path, fallback_metadata=False):
+ info = CollectionRequirement.artifact_info(b_path)
+ if info or not fallback_metadata:
+ return info
+ return CollectionRequirement.galaxy_metadata(b_path)
+
+ def add_requirement(self, parent, requirement):
+ self.required_by.append((parent, requirement))
+ new_versions = set(v for v in self.versions if self._meets_requirements(v, requirement, parent))
+ if len(new_versions) == 0:
+ if self.skip:
+ force_flag = '--force-with-deps' if parent else '--force'
+ version = self.latest_version if self.latest_version != '*' else 'unknown'
+ msg = "Cannot meet requirement %s:%s as it is already installed at version '%s'. Use %s to overwrite" \
+ % (to_text(self), requirement, version, force_flag)
+ raise AnsibleError(msg)
+ elif parent is None:
+ msg = "Cannot meet requirement %s for dependency %s" % (requirement, to_text(self))
+ else:
+ msg = "Cannot meet dependency requirement '%s:%s' for collection %s" \
+ % (to_text(self), requirement, parent)
+
+ collection_source = to_text(self.b_path, nonstring='passthru') or self.api.api_server
+ req_by = "\n".join(
+ "\t%s - '%s:%s'" % (to_text(p) if p else 'base', to_text(self), r)
+ for p, r in self.required_by
+ )
+
+ versions = ", ".join(sorted(self.versions, key=SemanticVersion))
+ if not self.versions and self.pre_releases:
+ pre_release_msg = (
+ '\nThis collection only contains pre-releases. Utilize `--pre` to install pre-releases, or '
+ 'explicitly provide the pre-release version.'
+ )
+ else:
+ pre_release_msg = ''
+
+ raise AnsibleError(
+ "%s from source '%s'. Available versions before last requirement added: %s\nRequirements from:\n%s%s"
+ % (msg, collection_source, versions, req_by, pre_release_msg)
+ )
+
+ self.versions = new_versions
+
+ def download(self, b_path):
+ download_url = self._metadata.download_url
+ artifact_hash = self._metadata.artifact_sha256
+ headers = {}
+ self.api._add_auth_token(headers, download_url, required=False)
+
+ b_collection_path = _download_file(download_url, b_path, artifact_hash, self.api.validate_certs,
+ headers=headers)
+
+ return to_text(b_collection_path, errors='surrogate_or_strict')
+
+ def install(self, path, b_temp_path):
+ if self.skip:
+ display.display("Skipping '%s' as it is already installed" % to_text(self))
+ return
+
+ # Install if it is not
+ collection_path = os.path.join(path, self.namespace, self.name)
+ b_collection_path = to_bytes(collection_path, errors='surrogate_or_strict')
+ display.display("Installing '%s:%s' to '%s'" % (to_text(self), self.latest_version, collection_path))
+
+ if self.b_path is None:
+ self.b_path = self.download(b_temp_path)
+
+ if os.path.exists(b_collection_path):
+ shutil.rmtree(b_collection_path)
+
+ if os.path.isfile(self.b_path):
+ self.install_artifact(b_collection_path, b_temp_path)
+ else:
+ self.install_scm(b_collection_path)
+
+ display.display("%s (%s) was installed successfully" % (to_text(self), self.latest_version))
+
+ def install_artifact(self, b_collection_path, b_temp_path):
+
+ try:
+ with tarfile.open(self.b_path, mode='r') as collection_tar:
+ files_member_obj = collection_tar.getmember('FILES.json')
+ with _tarfile_extract(collection_tar, files_member_obj) as (dummy, files_obj):
+ files = json.loads(to_text(files_obj.read(), errors='surrogate_or_strict'))
+
+ _extract_tar_file(collection_tar, 'MANIFEST.json', b_collection_path, b_temp_path)
+ _extract_tar_file(collection_tar, 'FILES.json', b_collection_path, b_temp_path)
+
+ for file_info in files['files']:
+ file_name = file_info['name']
+ if file_name == '.':
+ continue
+
+ if file_info['ftype'] == 'file':
+ _extract_tar_file(collection_tar, file_name, b_collection_path, b_temp_path,
+ expected_hash=file_info['chksum_sha256'])
+
+ else:
+ _extract_tar_dir(collection_tar, file_name, b_collection_path)
+
+ except Exception:
+ # Ensure we don't leave the dir behind in case of a failure.
+ shutil.rmtree(b_collection_path)
+
+ b_namespace_path = os.path.dirname(b_collection_path)
+ if not os.listdir(b_namespace_path):
+ os.rmdir(b_namespace_path)
+
+ raise
+
+ def install_scm(self, b_collection_output_path):
+ """Install the collection from source control into given dir.
+
+ Generates the Ansible collection artifact data from a galaxy.yml and installs the artifact to a directory.
+ This should follow the same pattern as build_collection, but instead of creating an artifact, install it.
+ :param b_collection_output_path: The installation directory for the collection artifact.
+ :raises AnsibleError: If no collection metadata found.
+ """
+ b_collection_path = self.b_path
+
+ b_galaxy_path = get_galaxy_metadata_path(b_collection_path)
+ if not os.path.exists(b_galaxy_path):
+ raise AnsibleError("The collection galaxy.yml path '%s' does not exist." % to_native(b_galaxy_path))
+
+ info = CollectionRequirement.galaxy_metadata(b_collection_path)
+
+ collection_manifest = info['manifest_file']
+ collection_meta = collection_manifest['collection_info']
+ file_manifest = info['files_file']
+
+ _build_collection_dir(b_collection_path, b_collection_output_path, collection_manifest, file_manifest)
+
+ collection_name = "%s.%s" % (collection_manifest['collection_info']['namespace'],
+ collection_manifest['collection_info']['name'])
+ display.display('Created collection for %s at %s' % (collection_name, to_text(b_collection_output_path)))
+
+ def set_latest_version(self):
+ self.versions = set([self.latest_version])
+ self._get_metadata()
+
+ def verify(self, remote_collection, path, b_temp_tar_path):
+ if not self.skip:
+ display.display("'%s' has not been installed, nothing to verify" % (to_text(self)))
+ return
+
+ collection_path = os.path.join(path, self.namespace, self.name)
+ b_collection_path = to_bytes(collection_path, errors='surrogate_or_strict')
+
+ display.vvv("Verifying '%s:%s'." % (to_text(self), self.latest_version))
+ display.vvv("Installed collection found at '%s'" % collection_path)
+ display.vvv("Remote collection found at '%s'" % remote_collection.metadata.download_url)
+
+ # Compare installed version versus requirement version
+ if self.latest_version != remote_collection.latest_version:
+ err = "%s has the version '%s' but is being compared to '%s'" % (to_text(self), self.latest_version, remote_collection.latest_version)
+ display.display(err)
+ return
+
+ modified_content = []
+
+ # Verify the manifest hash matches before verifying the file manifest
+ expected_hash = _get_tar_file_hash(b_temp_tar_path, 'MANIFEST.json')
+ self._verify_file_hash(b_collection_path, 'MANIFEST.json', expected_hash, modified_content)
+ manifest = _get_json_from_tar_file(b_temp_tar_path, 'MANIFEST.json')
+
+ # Use the manifest to verify the file manifest checksum
+ file_manifest_data = manifest['file_manifest_file']
+ file_manifest_filename = file_manifest_data['name']
+ expected_hash = file_manifest_data['chksum_%s' % file_manifest_data['chksum_type']]
+
+ # Verify the file manifest before using it to verify individual files
+ self._verify_file_hash(b_collection_path, file_manifest_filename, expected_hash, modified_content)
+ file_manifest = _get_json_from_tar_file(b_temp_tar_path, file_manifest_filename)
+
+ # Use the file manifest to verify individual file checksums
+ for manifest_data in file_manifest['files']:
+ if manifest_data['ftype'] == 'file':
+ expected_hash = manifest_data['chksum_%s' % manifest_data['chksum_type']]
+ self._verify_file_hash(b_collection_path, manifest_data['name'], expected_hash, modified_content)
+
+ if modified_content:
+ display.display("Collection %s contains modified content in the following files:" % to_text(self))
+ display.display(to_text(self))
+ display.vvv(to_text(self.b_path))
+ for content_change in modified_content:
+ display.display(' %s' % content_change.filename)
+ display.vvv(" Expected: %s\n Found: %s" % (content_change.expected, content_change.installed))
+ else:
+ display.vvv("Successfully verified that checksums for '%s:%s' match the remote collection" % (to_text(self), self.latest_version))
+
+ def _verify_file_hash(self, b_path, filename, expected_hash, error_queue):
+ b_file_path = to_bytes(os.path.join(to_text(b_path), filename), errors='surrogate_or_strict')
+
+ if not os.path.isfile(b_file_path):
+ actual_hash = None
+ else:
+ with open(b_file_path, mode='rb') as file_object:
+ actual_hash = _consume_file(file_object)
+
+ if expected_hash != actual_hash:
+ error_queue.append(ModifiedContent(filename=filename, expected=expected_hash, installed=actual_hash))
+
+ def _get_metadata(self):
+ if self._metadata:
+ return
+ self._metadata = self.api.get_collection_version_metadata(self.namespace, self.name, self.latest_version)
+
+ def _meets_requirements(self, version, requirements, parent):
+ """
+ Supports version identifiers can be '==', '!=', '>', '>=', '<', '<=', '*'. Each requirement is delimited by ','
+ """
+ op_map = {
+ '!=': operator.ne,
+ '==': operator.eq,
+ '=': operator.eq,
+ '>=': operator.ge,
+ '>': operator.gt,
+ '<=': operator.le,
+ '<': operator.lt,
+ }
+
+ for req in list(requirements.split(',')):
+ op_pos = 2 if len(req) > 1 and req[1] == '=' else 1
+ op = op_map.get(req[:op_pos])
+
+ requirement = req[op_pos:]
+ if not op:
+ requirement = req
+ op = operator.eq
+
+ # In the case we are checking a new requirement on a base requirement (parent != None) we can't accept
+ # version as '*' (unknown version) unless the requirement is also '*'.
+ if parent and version == '*' and requirement != '*':
+ display.warning("Failed to validate the collection requirement '%s:%s' for %s when the existing "
+ "install does not have a version set, the collection may not work."
+ % (to_text(self), req, parent))
+ continue
+ elif requirement == '*' or version == '*':
+ continue
+
+ if not op(SemanticVersion(version), SemanticVersion.from_loose_version(LooseVersion(requirement))):
+ break
+ else:
+ return True
+
+ # The loop was broken early, it does not meet all the requirements
+ return False
+
+ @staticmethod
+ def from_tar(b_path, force, parent=None):
+ if not tarfile.is_tarfile(b_path):
+ raise AnsibleError("Collection artifact at '%s' is not a valid tar file." % to_native(b_path))
+
+ info = {}
+ with tarfile.open(b_path, mode='r') as collection_tar:
+ for b_member_name, property_name in CollectionRequirement._FILE_MAPPING:
+ n_member_name = to_native(b_member_name)
+ try:
+ member = collection_tar.getmember(n_member_name)
+ except KeyError:
+ raise AnsibleError("Collection at '%s' does not contain the required file %s."
+ % (to_native(b_path), n_member_name))
+
+ with _tarfile_extract(collection_tar, member) as (dummy, member_obj):
+ try:
+ info[property_name] = json.loads(to_text(member_obj.read(), errors='surrogate_or_strict'))
+ except ValueError:
+ raise AnsibleError("Collection tar file member %s does not contain a valid json string."
+ % n_member_name)
+
+ meta = info['manifest_file']['collection_info']
+ files = info['files_file']['files']
+
+ namespace = meta['namespace']
+ name = meta['name']
+ version = meta['version']
+ meta = CollectionVersionMetadata(namespace, name, version, None, None, meta['dependencies'])
+
+ if SemanticVersion(version).is_prerelease:
+ allow_pre_release = True
+ else:
+ allow_pre_release = False
+
+ return CollectionRequirement(namespace, name, b_path, None, [version], version, force, parent=parent,
+ metadata=meta, files=files, allow_pre_releases=allow_pre_release)
+
+ @staticmethod
+ def from_path(b_path, force, parent=None, fallback_metadata=False, skip=True):
+ info = CollectionRequirement.collection_info(b_path, fallback_metadata)
+
+ allow_pre_release = False
+ if 'manifest_file' in info:
+ manifest = info['manifest_file']['collection_info']
+ namespace = manifest['namespace']
+ name = manifest['name']
+ version = to_text(manifest['version'], errors='surrogate_or_strict')
+
+ try:
+ _v = SemanticVersion()
+ _v.parse(version)
+ if _v.is_prerelease:
+ allow_pre_release = True
+ except ValueError:
+ display.warning("Collection at '%s' does not have a valid version set, falling back to '*'. Found "
+ "version: '%s'" % (to_text(b_path), version))
+ version = '*'
+
+ dependencies = manifest['dependencies']
+ else:
+ if fallback_metadata:
+ warning = "Collection at '%s' does not have a galaxy.yml or a MANIFEST.json file, cannot detect version."
+ else:
+ warning = "Collection at '%s' does not have a MANIFEST.json file, cannot detect version."
+ display.warning(warning % to_text(b_path))
+ parent_dir, name = os.path.split(to_text(b_path, errors='surrogate_or_strict'))
+ namespace = os.path.split(parent_dir)[1]
+
+ version = '*'
+ dependencies = {}
+
+ meta = CollectionVersionMetadata(namespace, name, version, None, None, dependencies)
+
+ files = info.get('files_file', {}).get('files', {})
+
+ return CollectionRequirement(namespace, name, b_path, None, [version], version, force, parent=parent,
+ metadata=meta, files=files, skip=skip, allow_pre_releases=allow_pre_release)
+
+ @staticmethod
+ def from_name(collection, apis, requirement, force, parent=None, allow_pre_release=False):
+ namespace, name = collection.split('.', 1)
+ galaxy_meta = None
+
+ for api in apis:
+ try:
+ if not (requirement == '*' or requirement.startswith('<') or requirement.startswith('>') or
+ requirement.startswith('!=')):
+ # Exact requirement
+ allow_pre_release = True
+
+ if requirement.startswith('='):
+ requirement = requirement.lstrip('=')
+
+ resp = api.get_collection_version_metadata(namespace, name, requirement)
+
+ galaxy_meta = resp
+ versions = [resp.version]
+ else:
+ versions = api.get_collection_versions(namespace, name)
+ except GalaxyError as err:
+ if err.http_code != 404:
+ raise
+
+ versions = []
+
+ # Automation Hub doesn't return a 404 but an empty version list so we check that to align both AH and
+ # Galaxy when the collection is not available on that server.
+ if not versions:
+ display.vvv("Collection '%s' is not available from server %s %s" % (collection, api.name,
+ api.api_server))
+ continue
+
+ display.vvv("Collection '%s' obtained from server %s %s" % (collection, api.name, api.api_server))
+ break
+ else:
+ raise AnsibleError("Failed to find collection %s:%s" % (collection, requirement))
+
+ req = CollectionRequirement(namespace, name, None, api, versions, requirement, force, parent=parent,
+ metadata=galaxy_meta, allow_pre_releases=allow_pre_release)
+ return req
+
+
+def build_collection(collection_path, output_path, force):
+ """Creates the Ansible collection artifact in a .tar.gz file.
+
+ :param collection_path: The path to the collection to build. This should be the directory that contains the
+ galaxy.yml file.
+ :param output_path: The path to create the collection build artifact. This should be a directory.
+ :param force: Whether to overwrite an existing collection build artifact or fail.
+ :return: The path to the collection build artifact.
+ """
+ b_collection_path = to_bytes(collection_path, errors='surrogate_or_strict')
+ b_galaxy_path = get_galaxy_metadata_path(b_collection_path)
+ if not os.path.exists(b_galaxy_path):
+ raise AnsibleError("The collection galaxy.yml path '%s' does not exist." % to_native(b_galaxy_path))
+
+ info = CollectionRequirement.galaxy_metadata(b_collection_path)
+
+ collection_manifest = info['manifest_file']
+ collection_meta = collection_manifest['collection_info']
+ file_manifest = info['files_file']
+
+ collection_output = os.path.join(output_path, "%s-%s-%s.tar.gz" % (collection_meta['namespace'],
+ collection_meta['name'],
+ collection_meta['version']))
+
+ b_collection_output = to_bytes(collection_output, errors='surrogate_or_strict')
+ if os.path.exists(b_collection_output):
+ if os.path.isdir(b_collection_output):
+ raise AnsibleError("The output collection artifact '%s' already exists, "
+ "but is a directory - aborting" % to_native(collection_output))
+ elif not force:
+ raise AnsibleError("The file '%s' already exists. You can use --force to re-create "
+ "the collection artifact." % to_native(collection_output))
+
+ _build_collection_tar(b_collection_path, b_collection_output, collection_manifest, file_manifest)
+ return collection_output
+
+
+def download_collections(collections, output_path, apis, validate_certs, no_deps, allow_pre_release):
+ """Download Ansible collections as their tarball from a Galaxy server to the path specified and creates a requirements
+ file of the downloaded requirements to be used for an install.
+
+ :param collections: The collections to download, should be a list of tuples with (name, requirement, Galaxy Server).
+ :param output_path: The path to download the collections to.
+ :param apis: A list of GalaxyAPIs to query when search for a collection.
+ :param validate_certs: Whether to validate the certificate if downloading a tarball from a non-Galaxy host.
+ :param no_deps: Ignore any collection dependencies and only download the base requirements.
+ :param allow_pre_release: Do not ignore pre-release versions when selecting the latest.
+ """
+ with _tempdir() as b_temp_path:
+ display.display("Process install dependency map")
+ with _display_progress():
+ dep_map = _build_dependency_map(collections, [], b_temp_path, apis, validate_certs, True, True, no_deps,
+ allow_pre_release=allow_pre_release)
+
+ requirements = []
+ display.display("Starting collection download process to '%s'" % output_path)
+ with _display_progress():
+ for name, requirement in dep_map.items():
+ collection_filename = "%s-%s-%s.tar.gz" % (requirement.namespace, requirement.name,
+ requirement.latest_version)
+ dest_path = os.path.join(output_path, collection_filename)
+ requirements.append({'name': collection_filename, 'version': requirement.latest_version})
+
+ display.display("Downloading collection '%s' to '%s'" % (name, dest_path))
+
+ if requirement.api is None and requirement.b_path and os.path.isfile(requirement.b_path):
+ shutil.copy(requirement.b_path, to_bytes(dest_path, errors='surrogate_or_strict'))
+ elif requirement.api is None and requirement.b_path:
+ temp_path = to_text(b_temp_path, errors='surrogate_or_string')
+ temp_download_path = build_collection(requirement.b_path, temp_path, True)
+ shutil.move(to_bytes(temp_download_path, errors='surrogate_or_strict'),
+ to_bytes(dest_path, errors='surrogate_or_strict'))
+ else:
+ b_temp_download_path = requirement.download(b_temp_path)
+ shutil.move(b_temp_download_path, to_bytes(dest_path, errors='surrogate_or_strict'))
+
+ display.display("%s (%s) was downloaded successfully" % (name, requirement.latest_version))
+
+ requirements_path = os.path.join(output_path, 'requirements.yml')
+ display.display("Writing requirements.yml file of downloaded collections to '%s'" % requirements_path)
+ with open(to_bytes(requirements_path, errors='surrogate_or_strict'), mode='wb') as req_fd:
+ req_fd.write(to_bytes(yaml.safe_dump({'collections': requirements}), errors='surrogate_or_strict'))
+
+
+def publish_collection(collection_path, api, wait, timeout):
+ """Publish an Ansible collection tarball into an Ansible Galaxy server.
+
+ :param collection_path: The path to the collection tarball to publish.
+ :param api: A GalaxyAPI to publish the collection to.
+ :param wait: Whether to wait until the import process is complete.
+ :param timeout: The time in seconds to wait for the import process to finish, 0 is indefinite.
+ """
+ import_uri = api.publish_collection(collection_path)
+
+ if wait:
+ # Galaxy returns a url fragment which differs between v2 and v3. The second to last entry is
+ # always the task_id, though.
+ # v2: {"task": "https://galaxy-dev.ansible.com/api/v2/collection-imports/35573/"}
+ # v3: {"task": "/api/automation-hub/v3/imports/collections/838d1308-a8f4-402c-95cb-7823f3806cd8/"}
+ task_id = None
+ for path_segment in reversed(import_uri.split('/')):
+ if path_segment:
+ task_id = path_segment
+ break
+
+ if not task_id:
+ raise AnsibleError("Publishing the collection did not return valid task info. Cannot wait for task status. Returned task info: '%s'" % import_uri)
+
+ display.display("Collection has been published to the Galaxy server %s %s" % (api.name, api.api_server))
+ with _display_progress():
+ api.wait_import_task(task_id, timeout)
+ display.display("Collection has been successfully published and imported to the Galaxy server %s %s"
+ % (api.name, api.api_server))
+ else:
+ display.display("Collection has been pushed to the Galaxy server %s %s, not waiting until import has "
+ "completed due to --no-wait being set. Import task results can be found at %s"
+ % (api.name, api.api_server, import_uri))
+
+
+def install_collections(collections, output_path, apis, validate_certs, ignore_errors, no_deps, force, force_deps,
+ allow_pre_release=False):
+ """Install Ansible collections to the path specified.
+
+ :param collections: The collections to install, should be a list of tuples with (name, requirement, Galaxy server).
+ :param output_path: The path to install the collections to.
+ :param apis: A list of GalaxyAPIs to query when searching for a collection.
+ :param validate_certs: Whether to validate the certificates if downloading a tarball.
+ :param ignore_errors: Whether to ignore any errors when installing the collection.
+ :param no_deps: Ignore any collection dependencies and only install the base requirements.
+ :param force: Re-install a collection if it has already been installed.
+ :param force_deps: Re-install a collection as well as its dependencies if they have already been installed.
+ """
+ existing_collections = find_existing_collections(output_path, fallback_metadata=True)
+
+ with _tempdir() as b_temp_path:
+ display.display("Process install dependency map")
+ with _display_progress():
+ dependency_map = _build_dependency_map(collections, existing_collections, b_temp_path, apis,
+ validate_certs, force, force_deps, no_deps,
+ allow_pre_release=allow_pre_release)
+
+ display.display("Starting collection install process")
+ with _display_progress():
+ for collection in dependency_map.values():
+ try:
+ collection.install(output_path, b_temp_path)
+ except AnsibleError as err:
+ if ignore_errors:
+ display.warning("Failed to install collection %s but skipping due to --ignore-errors being set. "
+ "Error: %s" % (to_text(collection), to_text(err)))
+ else:
+ raise
+
+
+def validate_collection_name(name):
+ """Validates the collection name as an input from the user or a requirements file fit the requirements.
+
+ :param name: The input name with optional range specifier split by ':'.
+ :return: The input value, required for argparse validation.
+ """
+ collection, dummy, dummy = name.partition(':')
+ if AnsibleCollectionRef.is_valid_collection_name(collection):
+ return name
+
+ raise AnsibleError("Invalid collection name '%s', "
+ "name must be in the format <namespace>.<collection>. \n"
+ "Please make sure namespace and collection name contains "
+ "characters from [a-zA-Z0-9_] only." % name)
+
+
+def validate_collection_path(collection_path):
+ """Ensure a given path ends with 'ansible_collections'
+
+ :param collection_path: The path that should end in 'ansible_collections'
+ :return: collection_path ending in 'ansible_collections' if it does not already.
+ """
+
+ if os.path.split(collection_path)[1] != 'ansible_collections':
+ return os.path.join(collection_path, 'ansible_collections')
+
+ return collection_path
+
+
+def verify_collections(collections, search_paths, apis, validate_certs, ignore_errors, allow_pre_release=False):
+
+ with _display_progress():
+ with _tempdir() as b_temp_path:
+ for collection in collections:
+ try:
+
+ local_collection = None
+ b_collection = to_bytes(collection[0], errors='surrogate_or_strict')
+
+ if os.path.isfile(b_collection) or urlparse(collection[0]).scheme.lower() in ['http', 'https'] or len(collection[0].split('.')) != 2:
+ raise AnsibleError(message="'%s' is not a valid collection name. The format namespace.name is expected." % collection[0])
+
+ collection_name = collection[0]
+ namespace, name = collection_name.split('.')
+ collection_version = collection[1]
+
+ # Verify local collection exists before downloading it from a galaxy server
+ for search_path in search_paths:
+ b_search_path = to_bytes(os.path.join(search_path, namespace, name), errors='surrogate_or_strict')
+ if os.path.isdir(b_search_path):
+ if not os.path.isfile(os.path.join(to_text(b_search_path, errors='surrogate_or_strict'), 'MANIFEST.json')):
+ raise AnsibleError(
+ message="Collection %s does not appear to have a MANIFEST.json. " % collection_name +
+ "A MANIFEST.json is expected if the collection has been built and installed via ansible-galaxy."
+ )
+ local_collection = CollectionRequirement.from_path(b_search_path, False)
+ break
+ if local_collection is None:
+ raise AnsibleError(message='Collection %s is not installed in any of the collection paths.' % collection_name)
+
+ # Download collection on a galaxy server for comparison
+ try:
+ remote_collection = CollectionRequirement.from_name(collection_name, apis, collection_version, False, parent=None,
+ allow_pre_release=allow_pre_release)
+ except AnsibleError as e:
+ if e.message == 'Failed to find collection %s:%s' % (collection[0], collection[1]):
+ raise AnsibleError('Failed to find remote collection %s:%s on any of the galaxy servers' % (collection[0], collection[1]))
+ raise
+
+ download_url = remote_collection.metadata.download_url
+ headers = {}
+ remote_collection.api._add_auth_token(headers, download_url, required=False)
+ b_temp_tar_path = _download_file(download_url, b_temp_path, None, validate_certs, headers=headers)
+
+ local_collection.verify(remote_collection, search_path, b_temp_tar_path)
+
+ except AnsibleError as err:
+ if ignore_errors:
+ display.warning("Failed to verify collection %s but skipping due to --ignore-errors being set. "
+ "Error: %s" % (collection[0], to_text(err)))
+ else:
+ raise
+
+
+@contextmanager
+def _tempdir():
+ b_temp_path = tempfile.mkdtemp(dir=to_bytes(C.DEFAULT_LOCAL_TMP, errors='surrogate_or_strict'))
+ yield b_temp_path
+ shutil.rmtree(b_temp_path)
+
+
+@contextmanager
+def _tarfile_extract(tar, member):
+ tar_obj = tar.extractfile(member)
+ yield member, tar_obj
+ tar_obj.close()
+
+
+@contextmanager
+def _display_progress():
+ config_display = C.GALAXY_DISPLAY_PROGRESS
+ display_wheel = sys.stdout.isatty() if config_display is None else config_display
+
+ if not display_wheel:
+ yield
+ return
+
+ def progress(display_queue, actual_display):
+ actual_display.debug("Starting display_progress display thread")
+ t = threading.current_thread()
+
+ while True:
+ for c in "|/-\\":
+ actual_display.display(c + "\b", newline=False)
+ time.sleep(0.1)
+
+ # Display a message from the main thread
+ while True:
+ try:
+ method, args, kwargs = display_queue.get(block=False, timeout=0.1)
+ except queue.Empty:
+ break
+ else:
+ func = getattr(actual_display, method)
+ func(*args, **kwargs)
+
+ if getattr(t, "finish", False):
+ actual_display.debug("Received end signal for display_progress display thread")
+ return
+
+ class DisplayThread(object):
+
+ def __init__(self, display_queue):
+ self.display_queue = display_queue
+
+ def __getattr__(self, attr):
+ def call_display(*args, **kwargs):
+ self.display_queue.put((attr, args, kwargs))
+
+ return call_display
+
+ # Temporary override the global display class with our own which add the calls to a queue for the thread to call.
+ global display
+ old_display = display
+ try:
+ display_queue = queue.Queue()
+ display = DisplayThread(display_queue)
+ t = threading.Thread(target=progress, args=(display_queue, old_display))
+ t.daemon = True
+ t.start()
+
+ try:
+ yield
+ finally:
+ t.finish = True
+ t.join()
+ except Exception:
+ # The exception is re-raised so we can sure the thread is finished and not using the display anymore
+ raise
+ finally:
+ display = old_display
+
+
+def _get_galaxy_yml(b_galaxy_yml_path):
+ meta_info = get_collections_galaxy_meta_info()
+
+ mandatory_keys = set()
+ string_keys = set()
+ list_keys = set()
+ dict_keys = set()
+
+ for info in meta_info:
+ if info.get('required', False):
+ mandatory_keys.add(info['key'])
+
+ key_list_type = {
+ 'str': string_keys,
+ 'list': list_keys,
+ 'dict': dict_keys,
+ }[info.get('type', 'str')]
+ key_list_type.add(info['key'])
+
+ all_keys = frozenset(list(mandatory_keys) + list(string_keys) + list(list_keys) + list(dict_keys))
+
+ try:
+ with open(b_galaxy_yml_path, 'rb') as g_yaml:
+ galaxy_yml = yaml.safe_load(g_yaml)
+ except YAMLError as err:
+ raise AnsibleError("Failed to parse the galaxy.yml at '%s' with the following error:\n%s"
+ % (to_native(b_galaxy_yml_path), to_native(err)))
+
+ set_keys = set(galaxy_yml.keys())
+ missing_keys = mandatory_keys.difference(set_keys)
+ if missing_keys:
+ raise AnsibleError("The collection galaxy.yml at '%s' is missing the following mandatory keys: %s"
+ % (to_native(b_galaxy_yml_path), ", ".join(sorted(missing_keys))))
+
+ extra_keys = set_keys.difference(all_keys)
+ if len(extra_keys) > 0:
+ display.warning("Found unknown keys in collection galaxy.yml at '%s': %s"
+ % (to_text(b_galaxy_yml_path), ", ".join(extra_keys)))
+
+ # Add the defaults if they have not been set
+ for optional_string in string_keys:
+ if optional_string not in galaxy_yml:
+ galaxy_yml[optional_string] = None
+
+ for optional_list in list_keys:
+ list_val = galaxy_yml.get(optional_list, None)
+
+ if list_val is None:
+ galaxy_yml[optional_list] = []
+ elif not isinstance(list_val, list):
+ galaxy_yml[optional_list] = [list_val]
+
+ for optional_dict in dict_keys:
+ if optional_dict not in galaxy_yml:
+ galaxy_yml[optional_dict] = {}
+
+ # license is a builtin var in Python, to avoid confusion we just rename it to license_ids
+ galaxy_yml['license_ids'] = galaxy_yml['license']
+ del galaxy_yml['license']
+
+ return galaxy_yml
+
+
+def _build_files_manifest(b_collection_path, namespace, name, ignore_patterns):
+ # We always ignore .pyc and .retry files as well as some well known version control directories. The ignore
+ # patterns can be extended by the build_ignore key in galaxy.yml
+ b_ignore_patterns = [
+ b'galaxy.yml',
+ b'galaxy.yaml',
+ b'.git',
+ b'*.pyc',
+ b'*.retry',
+ b'tests/output', # Ignore ansible-test result output directory.
+ to_bytes('{0}-{1}-*.tar.gz'.format(namespace, name)), # Ignores previously built artifacts in the root dir.
+ ]
+ b_ignore_patterns += [to_bytes(p) for p in ignore_patterns]
+ b_ignore_dirs = frozenset([b'CVS', b'.bzr', b'.hg', b'.git', b'.svn', b'__pycache__', b'.tox'])
+
+ entry_template = {
+ 'name': None,
+ 'ftype': None,
+ 'chksum_type': None,
+ 'chksum_sha256': None,
+ 'format': MANIFEST_FORMAT
+ }
+ manifest = {
+ 'files': [
+ {
+ 'name': '.',
+ 'ftype': 'dir',
+ 'chksum_type': None,
+ 'chksum_sha256': None,
+ 'format': MANIFEST_FORMAT,
+ },
+ ],
+ 'format': MANIFEST_FORMAT,
+ }
+
+ def _walk(b_path, b_top_level_dir):
+ for b_item in os.listdir(b_path):
+ b_abs_path = os.path.join(b_path, b_item)
+ b_rel_base_dir = b'' if b_path == b_top_level_dir else b_path[len(b_top_level_dir) + 1:]
+ b_rel_path = os.path.join(b_rel_base_dir, b_item)
+ rel_path = to_text(b_rel_path, errors='surrogate_or_strict')
+
+ if os.path.isdir(b_abs_path):
+ if any(b_item == b_path for b_path in b_ignore_dirs) or \
+ any(fnmatch.fnmatch(b_rel_path, b_pattern) for b_pattern in b_ignore_patterns):
+ display.vvv("Skipping '%s' for collection build" % to_text(b_abs_path))
+ continue
+
+ if os.path.islink(b_abs_path):
+ b_link_target = os.path.realpath(b_abs_path)
+
+ if not _is_child_path(b_link_target, b_top_level_dir):
+ display.warning("Skipping '%s' as it is a symbolic link to a directory outside the collection"
+ % to_text(b_abs_path))
+ continue
+
+ manifest_entry = entry_template.copy()
+ manifest_entry['name'] = rel_path
+ manifest_entry['ftype'] = 'dir'
+
+ manifest['files'].append(manifest_entry)
+
+ if not os.path.islink(b_abs_path):
+ _walk(b_abs_path, b_top_level_dir)
+ else:
+ if any(fnmatch.fnmatch(b_rel_path, b_pattern) for b_pattern in b_ignore_patterns):
+ display.vvv("Skipping '%s' for collection build" % to_text(b_abs_path))
+ continue
+
+ # Handling of file symlinks occur in _build_collection_tar, the manifest for a symlink is the same for
+ # a normal file.
+ manifest_entry = entry_template.copy()
+ manifest_entry['name'] = rel_path
+ manifest_entry['ftype'] = 'file'
+ manifest_entry['chksum_type'] = 'sha256'
+ manifest_entry['chksum_sha256'] = secure_hash(b_abs_path, hash_func=sha256)
+
+ manifest['files'].append(manifest_entry)
+
+ _walk(b_collection_path, b_collection_path)
+
+ return manifest
+
+
+def _build_manifest(namespace, name, version, authors, readme, tags, description, license_ids, license_file,
+ dependencies, repository, documentation, homepage, issues, **kwargs):
+
+ manifest = {
+ 'collection_info': {
+ 'namespace': namespace,
+ 'name': name,
+ 'version': version,
+ 'authors': authors,
+ 'readme': readme,
+ 'tags': tags,
+ 'description': description,
+ 'license': license_ids,
+ 'license_file': license_file if license_file else None, # Handle galaxy.yml having an empty string (None)
+ 'dependencies': dependencies,
+ 'repository': repository,
+ 'documentation': documentation,
+ 'homepage': homepage,
+ 'issues': issues,
+ },
+ 'file_manifest_file': {
+ 'name': 'FILES.json',
+ 'ftype': 'file',
+ 'chksum_type': 'sha256',
+ 'chksum_sha256': None, # Filled out in _build_collection_tar
+ 'format': MANIFEST_FORMAT
+ },
+ 'format': MANIFEST_FORMAT,
+ }
+
+ return manifest
+
+
+def _build_collection_tar(b_collection_path, b_tar_path, collection_manifest, file_manifest):
+ """Build a tar.gz collection artifact from the manifest data."""
+ files_manifest_json = to_bytes(json.dumps(file_manifest, indent=True), errors='surrogate_or_strict')
+ collection_manifest['file_manifest_file']['chksum_sha256'] = secure_hash_s(files_manifest_json, hash_func=sha256)
+ collection_manifest_json = to_bytes(json.dumps(collection_manifest, indent=True), errors='surrogate_or_strict')
+
+ with _tempdir() as b_temp_path:
+ b_tar_filepath = os.path.join(b_temp_path, os.path.basename(b_tar_path))
+
+ with tarfile.open(b_tar_filepath, mode='w:gz') as tar_file:
+ # Add the MANIFEST.json and FILES.json file to the archive
+ for name, b in [('MANIFEST.json', collection_manifest_json), ('FILES.json', files_manifest_json)]:
+ b_io = BytesIO(b)
+ tar_info = tarfile.TarInfo(name)
+ tar_info.size = len(b)
+ tar_info.mtime = time.time()
+ tar_info.mode = 0o0644
+ tar_file.addfile(tarinfo=tar_info, fileobj=b_io)
+
+ for file_info in file_manifest['files']:
+ if file_info['name'] == '.':
+ continue
+
+ # arcname expects a native string, cannot be bytes
+ filename = to_native(file_info['name'], errors='surrogate_or_strict')
+ b_src_path = os.path.join(b_collection_path, to_bytes(filename, errors='surrogate_or_strict'))
+
+ def reset_stat(tarinfo):
+ if tarinfo.type != tarfile.SYMTYPE:
+ existing_is_exec = tarinfo.mode & stat.S_IXUSR
+ tarinfo.mode = 0o0755 if existing_is_exec or tarinfo.isdir() else 0o0644
+ tarinfo.uid = tarinfo.gid = 0
+ tarinfo.uname = tarinfo.gname = ''
+
+ return tarinfo
+
+ if os.path.islink(b_src_path):
+ b_link_target = os.path.realpath(b_src_path)
+ if _is_child_path(b_link_target, b_collection_path):
+ b_rel_path = os.path.relpath(b_link_target, start=os.path.dirname(b_src_path))
+
+ tar_info = tarfile.TarInfo(filename)
+ tar_info.type = tarfile.SYMTYPE
+ tar_info.linkname = to_native(b_rel_path, errors='surrogate_or_strict')
+ tar_info = reset_stat(tar_info)
+ tar_file.addfile(tarinfo=tar_info)
+
+ continue
+
+ # Dealing with a normal file, just add it by name.
+ tar_file.add(os.path.realpath(b_src_path), arcname=filename, recursive=False, filter=reset_stat)
+
+ shutil.copy(b_tar_filepath, b_tar_path)
+ collection_name = "%s.%s" % (collection_manifest['collection_info']['namespace'],
+ collection_manifest['collection_info']['name'])
+ display.display('Created collection for %s at %s' % (collection_name, to_text(b_tar_path)))
+
+
+def _build_collection_dir(b_collection_path, b_collection_output, collection_manifest, file_manifest):
+ """Build a collection directory from the manifest data.
+
+ This should follow the same pattern as _build_collection_tar.
+ """
+ os.makedirs(b_collection_output, mode=0o0755)
+
+ files_manifest_json = to_bytes(json.dumps(file_manifest, indent=True), errors='surrogate_or_strict')
+ collection_manifest['file_manifest_file']['chksum_sha256'] = secure_hash_s(files_manifest_json, hash_func=sha256)
+ collection_manifest_json = to_bytes(json.dumps(collection_manifest, indent=True), errors='surrogate_or_strict')
+
+ # Write contents to the files
+ for name, b in [('MANIFEST.json', collection_manifest_json), ('FILES.json', files_manifest_json)]:
+ b_path = os.path.join(b_collection_output, to_bytes(name, errors='surrogate_or_strict'))
+ with open(b_path, 'wb') as file_obj, BytesIO(b) as b_io:
+ shutil.copyfileobj(b_io, file_obj)
+
+ os.chmod(b_path, 0o0644)
+
+ base_directories = []
+ for file_info in file_manifest['files']:
+ if file_info['name'] == '.':
+ continue
+
+ src_file = os.path.join(b_collection_path, to_bytes(file_info['name'], errors='surrogate_or_strict'))
+ dest_file = os.path.join(b_collection_output, to_bytes(file_info['name'], errors='surrogate_or_strict'))
+
+ if any(src_file.startswith(directory) for directory in base_directories):
+ continue
+
+ existing_is_exec = os.stat(src_file).st_mode & stat.S_IXUSR
+ mode = 0o0755 if existing_is_exec else 0o0644
+
+ if os.path.isdir(src_file):
+ mode = 0o0755
+ base_directories.append(src_file)
+ shutil.copytree(src_file, dest_file)
+ else:
+ shutil.copyfile(src_file, dest_file)
+
+ os.chmod(dest_file, mode)
+
+
+def find_existing_collections(path, fallback_metadata=False):
+ collections = []
+
+ b_path = to_bytes(path, errors='surrogate_or_strict')
+ for b_namespace in os.listdir(b_path):
+ b_namespace_path = os.path.join(b_path, b_namespace)
+ if os.path.isfile(b_namespace_path):
+ continue
+
+ for b_collection in os.listdir(b_namespace_path):
+ b_collection_path = os.path.join(b_namespace_path, b_collection)
+ if os.path.isdir(b_collection_path):
+ req = CollectionRequirement.from_path(b_collection_path, False, fallback_metadata=fallback_metadata)
+ display.vvv("Found installed collection %s:%s at '%s'" % (to_text(req), req.latest_version,
+ to_text(b_collection_path)))
+ collections.append(req)
+
+ return collections
+
+
+def _build_dependency_map(collections, existing_collections, b_temp_path, apis, validate_certs, force, force_deps,
+ no_deps, allow_pre_release=False):
+ dependency_map = {}
+
+ # First build the dependency map on the actual requirements
+ for name, version, source, req_type in collections:
+ _get_collection_info(dependency_map, existing_collections, name, version, source, b_temp_path, apis,
+ validate_certs, (force or force_deps), allow_pre_release=allow_pre_release, req_type=req_type)
+
+ checked_parents = set([to_text(c) for c in dependency_map.values() if c.skip])
+ while len(dependency_map) != len(checked_parents):
+ while not no_deps: # Only parse dependencies if no_deps was not set
+ parents_to_check = set(dependency_map.keys()).difference(checked_parents)
+
+ deps_exhausted = True
+ for parent in parents_to_check:
+ parent_info = dependency_map[parent]
+
+ if parent_info.dependencies:
+ deps_exhausted = False
+ for dep_name, dep_requirement in parent_info.dependencies.items():
+ _get_collection_info(dependency_map, existing_collections, dep_name, dep_requirement,
+ None, b_temp_path, apis, validate_certs, force_deps,
+ parent=parent, allow_pre_release=allow_pre_release)
+
+ checked_parents.add(parent)
+
+ # No extra dependencies were resolved, exit loop
+ if deps_exhausted:
+ break
+
+ # Now we have resolved the deps to our best extent, now select the latest version for collections with
+ # multiple versions found and go from there
+ deps_not_checked = set(dependency_map.keys()).difference(checked_parents)
+ for collection in deps_not_checked:
+ dependency_map[collection].set_latest_version()
+ if no_deps or len(dependency_map[collection].dependencies) == 0:
+ checked_parents.add(collection)
+
+ return dependency_map
+
+
+def _collections_from_scm(collection, requirement, b_temp_path, force, parent=None):
+ """Returns a list of collections found in the repo. If there is a galaxy.yml in the collection then just return
+ the specific collection. Otherwise, check each top-level directory for a galaxy.yml.
+
+ :param collection: URI to a git repo
+ :param requirement: The version of the artifact
+ :param b_temp_path: The temporary path to the archive of a collection
+ :param force: Whether to overwrite an existing collection or fail
+ :param parent: The name of the parent collection
+ :raises AnsibleError: if nothing found
+ :return: List of CollectionRequirement objects
+ :rtype: list
+ """
+
+ reqs = []
+ name, version, path, fragment = parse_scm(collection, requirement)
+ b_repo_root = to_bytes(name, errors='surrogate_or_strict')
+
+ b_collection_path = os.path.join(b_temp_path, b_repo_root)
+ if fragment:
+ b_fragment = to_bytes(fragment, errors='surrogate_or_strict')
+ b_collection_path = os.path.join(b_collection_path, b_fragment)
+
+ b_galaxy_path = get_galaxy_metadata_path(b_collection_path)
+
+ err = ("%s appears to be an SCM collection source, but the required galaxy.yml was not found. "
+ "Append #path/to/collection/ to your URI (before the comma separated version, if one is specified) "
+ "to point to a directory containing the galaxy.yml or directories of collections" % collection)
+
+ display.vvvvv("Considering %s as a possible path to a collection's galaxy.yml" % b_galaxy_path)
+ if os.path.exists(b_galaxy_path):
+ return [CollectionRequirement.from_path(b_collection_path, force, parent, fallback_metadata=True, skip=False)]
+
+ if not os.path.isdir(b_collection_path) or not os.listdir(b_collection_path):
+ raise AnsibleError(err)
+
+ for b_possible_collection in os.listdir(b_collection_path):
+ b_collection = os.path.join(b_collection_path, b_possible_collection)
+ if not os.path.isdir(b_collection):
+ continue
+ b_galaxy = get_galaxy_metadata_path(b_collection)
+ display.vvvvv("Considering %s as a possible path to a collection's galaxy.yml" % b_galaxy)
+ if os.path.exists(b_galaxy):
+ reqs.append(CollectionRequirement.from_path(b_collection, force, parent, fallback_metadata=True, skip=False))
+ if not reqs:
+ raise AnsibleError(err)
+
+ return reqs
+
+
+def _get_collection_info(dep_map, existing_collections, collection, requirement, source, b_temp_path, apis,
+ validate_certs, force, parent=None, allow_pre_release=False, req_type=None):
+ dep_msg = ""
+ if parent:
+ dep_msg = " - as dependency of %s" % parent
+ display.vvv("Processing requirement collection '%s'%s" % (to_text(collection), dep_msg))
+
+ b_tar_path = None
+
+ is_file = (
+ req_type == 'file' or
+ (not req_type and os.path.isfile(to_bytes(collection, errors='surrogate_or_strict')))
+ )
+
+ is_url = (
+ req_type == 'url' or
+ (not req_type and urlparse(collection).scheme.lower() in ['http', 'https'])
+ )
+
+ is_scm = (
+ req_type == 'git' or
+ (not req_type and not b_tar_path and collection.startswith(('git+', 'git@')))
+ )
+
+ if is_file:
+ display.vvvv("Collection requirement '%s' is a tar artifact" % to_text(collection))
+ b_tar_path = to_bytes(collection, errors='surrogate_or_strict')
+ elif is_url:
+ display.vvvv("Collection requirement '%s' is a URL to a tar artifact" % collection)
+ try:
+ b_tar_path = _download_file(collection, b_temp_path, None, validate_certs)
+ except urllib_error.URLError as err:
+ raise AnsibleError("Failed to download collection tar from '%s': %s"
+ % (to_native(collection), to_native(err)))
+
+ if is_scm:
+ if not collection.startswith('git'):
+ collection = 'git+' + collection
+
+ name, version, path, fragment = parse_scm(collection, requirement)
+ b_tar_path = scm_archive_collection(path, name=name, version=version)
+
+ with tarfile.open(b_tar_path, mode='r') as collection_tar:
+ collection_tar.extractall(path=to_text(b_temp_path))
+
+ # Ignore requirement if it is set (it must follow semantic versioning, unlike a git version, which is any tree-ish)
+ # If the requirement was the only place version was set, requirement == version at this point
+ if requirement not in {"*", ""} and requirement != version:
+ display.warning(
+ "The collection {0} appears to be a git repository and two versions were provided: '{1}', and '{2}'. "
+ "The version {2} is being disregarded.".format(collection, version, requirement)
+ )
+ requirement = "*"
+
+ reqs = _collections_from_scm(collection, requirement, b_temp_path, force, parent)
+ for req in reqs:
+ collection_info = get_collection_info_from_req(dep_map, req)
+ update_dep_map_collection_info(dep_map, existing_collections, collection_info, parent, requirement)
+ else:
+ if b_tar_path:
+ req = CollectionRequirement.from_tar(b_tar_path, force, parent=parent)
+ collection_info = get_collection_info_from_req(dep_map, req)
+ else:
+ validate_collection_name(collection)
+
+ display.vvvv("Collection requirement '%s' is the name of a collection" % collection)
+ if collection in dep_map:
+ collection_info = dep_map[collection]
+ collection_info.add_requirement(parent, requirement)
+ else:
+ apis = [source] if source else apis
+ collection_info = CollectionRequirement.from_name(collection, apis, requirement, force, parent=parent,
+ allow_pre_release=allow_pre_release)
+
+ update_dep_map_collection_info(dep_map, existing_collections, collection_info, parent, requirement)
+
+
+def get_collection_info_from_req(dep_map, collection):
+ collection_name = to_text(collection)
+ if collection_name in dep_map:
+ collection_info = dep_map[collection_name]
+ collection_info.add_requirement(None, collection.latest_version)
+ else:
+ collection_info = collection
+ return collection_info
+
+
+def update_dep_map_collection_info(dep_map, existing_collections, collection_info, parent, requirement):
+ existing = [c for c in existing_collections if to_text(c) == to_text(collection_info)]
+ if existing and not collection_info.force:
+ # Test that the installed collection fits the requirement
+ existing[0].add_requirement(parent, requirement)
+ collection_info = existing[0]
+
+ dep_map[to_text(collection_info)] = collection_info
+
+
+def parse_scm(collection, version):
+ if ',' in collection:
+ collection, version = collection.split(',', 1)
+ elif version == '*' or not version:
+ version = 'HEAD'
+
+ if collection.startswith('git+'):
+ path = collection[4:]
+ else:
+ path = collection
+
+ path, fragment = urldefrag(path)
+ fragment = fragment.strip(os.path.sep)
+
+ if path.endswith(os.path.sep + '.git'):
+ name = path.split(os.path.sep)[-2]
+ elif '://' not in path and '@' not in path:
+ name = path
+ else:
+ name = path.split('/')[-1]
+ if name.endswith('.git'):
+ name = name[:-4]
+
+ return name, version, path, fragment
+
+
+def _download_file(url, b_path, expected_hash, validate_certs, headers=None):
+ urlsplit = os.path.splitext(to_text(url.rsplit('/', 1)[1]))
+ b_file_name = to_bytes(urlsplit[0], errors='surrogate_or_strict')
+ b_file_ext = to_bytes(urlsplit[1], errors='surrogate_or_strict')
+ b_file_path = tempfile.NamedTemporaryFile(dir=b_path, prefix=b_file_name, suffix=b_file_ext, delete=False).name
+
+ display.display("Downloading %s to %s" % (url, to_text(b_path)))
+ # Galaxy redirs downloads to S3 which reject the request if an Authorization header is attached so don't redir that
+ resp = open_url(to_native(url, errors='surrogate_or_strict'), validate_certs=validate_certs, headers=headers,
+ unredirected_headers=['Authorization'], http_agent=user_agent())
+
+ with open(b_file_path, 'wb') as download_file:
+ actual_hash = _consume_file(resp, download_file)
+
+ if expected_hash:
+ display.vvvv("Validating downloaded file hash %s with expected hash %s" % (actual_hash, expected_hash))
+ if expected_hash != actual_hash:
+ raise AnsibleError("Mismatch artifact hash with downloaded file")
+
+ return b_file_path
+
+
+def _extract_tar_dir(tar, dirname, b_dest):
+ """ Extracts a directory from a collection tar. """
+ member_names = [to_native(dirname, errors='surrogate_or_strict')]
+
+ # Create list of members with and without trailing separator
+ if not member_names[-1].endswith(os.path.sep):
+ member_names.append(member_names[-1] + os.path.sep)
+
+ # Try all of the member names and stop on the first one that are able to successfully get
+ for member in member_names:
+ try:
+ tar_member = tar.getmember(member)
+ except KeyError:
+ continue
+ break
+ else:
+ # If we still can't find the member, raise a nice error.
+ raise AnsibleError("Unable to extract '%s' from collection" % to_native(member, errors='surrogate_or_strict'))
+
+ b_dir_path = os.path.join(b_dest, to_bytes(dirname, errors='surrogate_or_strict'))
+
+ b_parent_path = os.path.dirname(b_dir_path)
+ try:
+ os.makedirs(b_parent_path, mode=0o0755)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ if tar_member.type == tarfile.SYMTYPE:
+ b_link_path = to_bytes(tar_member.linkname, errors='surrogate_or_strict')
+ if not _is_child_path(b_link_path, b_dest, link_name=b_dir_path):
+ raise AnsibleError("Cannot extract symlink '%s' in collection: path points to location outside of "
+ "collection '%s'" % (to_native(dirname), b_link_path))
+
+ os.symlink(b_link_path, b_dir_path)
+
+ else:
+ if not os.path.isdir(b_dir_path):
+ os.mkdir(b_dir_path, 0o0755)
+
+
+def _extract_tar_file(tar, filename, b_dest, b_temp_path, expected_hash=None):
+ """ Extracts a file from a collection tar. """
+ with _get_tar_file_member(tar, filename) as (tar_member, tar_obj):
+ if tar_member.type == tarfile.SYMTYPE:
+ actual_hash = _consume_file(tar_obj)
+
+ else:
+ with tempfile.NamedTemporaryFile(dir=b_temp_path, delete=False) as tmpfile_obj:
+ actual_hash = _consume_file(tar_obj, tmpfile_obj)
+
+ if expected_hash and actual_hash != expected_hash:
+ raise AnsibleError("Checksum mismatch for '%s' inside collection at '%s'"
+ % (to_native(filename, errors='surrogate_or_strict'), to_native(tar.name)))
+
+ b_dest_filepath = os.path.abspath(os.path.join(b_dest, to_bytes(filename, errors='surrogate_or_strict')))
+ b_parent_dir = os.path.dirname(b_dest_filepath)
+ if not _is_child_path(b_parent_dir, b_dest):
+ raise AnsibleError("Cannot extract tar entry '%s' as it will be placed outside the collection directory"
+ % to_native(filename, errors='surrogate_or_strict'))
+
+ if not os.path.exists(b_parent_dir):
+ # Seems like Galaxy does not validate if all file entries have a corresponding dir ftype entry. This check
+ # makes sure we create the parent directory even if it wasn't set in the metadata.
+ os.makedirs(b_parent_dir, mode=0o0755)
+
+ if tar_member.type == tarfile.SYMTYPE:
+ b_link_path = to_bytes(tar_member.linkname, errors='surrogate_or_strict')
+ if not _is_child_path(b_link_path, b_dest, link_name=b_dest_filepath):
+ raise AnsibleError("Cannot extract symlink '%s' in collection: path points to location outside of "
+ "collection '%s'" % (to_native(filename), b_link_path))
+
+ os.symlink(b_link_path, b_dest_filepath)
+
+ else:
+ shutil.move(to_bytes(tmpfile_obj.name, errors='surrogate_or_strict'), b_dest_filepath)
+
+ # Default to rw-r--r-- and only add execute if the tar file has execute.
+ tar_member = tar.getmember(to_native(filename, errors='surrogate_or_strict'))
+ new_mode = 0o644
+ if stat.S_IMODE(tar_member.mode) & stat.S_IXUSR:
+ new_mode |= 0o0111
+
+ os.chmod(b_dest_filepath, new_mode)
+
+
+def _get_tar_file_member(tar, filename):
+ n_filename = to_native(filename, errors='surrogate_or_strict')
+ try:
+ member = tar.getmember(n_filename)
+ except KeyError:
+ raise AnsibleError("Collection tar at '%s' does not contain the expected file '%s'." % (
+ to_native(tar.name),
+ n_filename))
+
+ return _tarfile_extract(tar, member)
+
+
+def _get_json_from_tar_file(b_path, filename):
+ file_contents = ''
+
+ with tarfile.open(b_path, mode='r') as collection_tar:
+ with _get_tar_file_member(collection_tar, filename) as (dummy, tar_obj):
+ bufsize = 65536
+ data = tar_obj.read(bufsize)
+ while data:
+ file_contents += to_text(data)
+ data = tar_obj.read(bufsize)
+
+ return json.loads(file_contents)
+
+
+def _get_tar_file_hash(b_path, filename):
+ with tarfile.open(b_path, mode='r') as collection_tar:
+ with _get_tar_file_member(collection_tar, filename) as (dummy, tar_obj):
+ return _consume_file(tar_obj)
+
+
+def _is_child_path(path, parent_path, link_name=None):
+ """ Checks that path is a path within the parent_path specified. """
+ b_path = to_bytes(path, errors='surrogate_or_strict')
+
+ if link_name and not os.path.isabs(b_path):
+ # If link_name is specified, path is the source of the link and we need to resolve the absolute path.
+ b_link_dir = os.path.dirname(to_bytes(link_name, errors='surrogate_or_strict'))
+ b_path = os.path.abspath(os.path.join(b_link_dir, b_path))
+
+ b_parent_path = to_bytes(parent_path, errors='surrogate_or_strict')
+ return b_path == b_parent_path or b_path.startswith(b_parent_path + to_bytes(os.path.sep))
+
+
+def _consume_file(read_from, write_to=None):
+ bufsize = 65536
+ sha256_digest = sha256()
+ data = read_from.read(bufsize)
+ while data:
+ if write_to is not None:
+ write_to.write(data)
+ write_to.flush()
+ sha256_digest.update(data)
+ data = read_from.read(bufsize)
+
+ return sha256_digest.hexdigest()
+
+
+def get_galaxy_metadata_path(b_path):
+ return os.path.join(b_path, b'galaxy.yml')
diff --git a/lib/ansible/galaxy/data/apb/.travis.yml b/lib/ansible/galaxy/data/apb/.travis.yml
new file mode 100644
index 00000000..44c0ba40
--- /dev/null
+++ b/lib/ansible/galaxy/data/apb/.travis.yml
@@ -0,0 +1,25 @@
+---
+services: docker
+sudo: required
+language: python
+python:
+ - '2.7'
+
+env:
+ - OPENSHIFT_VERSION=v3.9.0
+ - KUBERNETES_VERSION=v1.9.0
+
+script:
+ # Configure test values
+ - export apb_name=APB_NAME
+
+ # Download test shim.
+ - wget -O ${PWD}/apb-test.sh https://raw.githubusercontent.com/ansibleplaybookbundle/apb-test-shim/master/apb-test.sh
+ - chmod +x ${PWD}/apb-test.sh
+
+ # Run tests.
+ - ${PWD}/apb-test.sh
+
+# Uncomment to allow travis to notify galaxy
+# notifications:
+# webhooks: https://galaxy.ansible.com/api/v1/notifications/
diff --git a/lib/ansible/galaxy/data/apb/Dockerfile.j2 b/lib/ansible/galaxy/data/apb/Dockerfile.j2
new file mode 100644
index 00000000..4d99a8b0
--- /dev/null
+++ b/lib/ansible/galaxy/data/apb/Dockerfile.j2
@@ -0,0 +1,9 @@
+FROM ansibleplaybookbundle/apb-base
+
+LABEL "com.redhat.apb.spec"=\
+""
+
+COPY playbooks /opt/apb/actions
+COPY . /opt/ansible/roles/{{ role_name }}
+RUN chmod -R g=u /opt/{ansible,apb}
+USER apb
diff --git a/lib/ansible/galaxy/data/apb/Makefile.j2 b/lib/ansible/galaxy/data/apb/Makefile.j2
new file mode 100644
index 00000000..ebeaa61f
--- /dev/null
+++ b/lib/ansible/galaxy/data/apb/Makefile.j2
@@ -0,0 +1,21 @@
+DOCKERHOST = DOCKERHOST
+DOCKERORG = DOCKERORG
+IMAGENAME = {{ role_name }}
+TAG = latest
+USER=$(shell id -u)
+PWD=$(shell pwd)
+build_and_push: apb_build docker_push apb_push
+
+.PHONY: apb_build
+apb_build:
+ docker run --rm --privileged -v $(PWD):/mnt:z -v $(HOME)/.kube:/.kube -v /var/run/docker.sock:/var/run/docker.sock -u $(USER) docker.io/ansibleplaybookbundle/apb-tools:latest prepare
+ docker build -t $(DOCKERHOST)/$(DOCKERORG)/$(IMAGENAME):$(TAG) .
+
+.PHONY: docker_push
+docker_push:
+ docker push $(DOCKERHOST)/$(DOCKERORG)/$(IMAGENAME):$(TAG)
+
+.PHONY: apb_push
+apb_push:
+ docker run --rm --privileged -v $(PWD):/mnt:z -v $(HOME)/.kube:/.kube -v /var/run/docker.sock:/var/run/docker.sock -u $(USER) docker.io/ansibleplaybookbundle/apb-tools:latest push
+
diff --git a/lib/ansible/galaxy/data/apb/README.md b/lib/ansible/galaxy/data/apb/README.md
new file mode 100644
index 00000000..2e350a03
--- /dev/null
+++ b/lib/ansible/galaxy/data/apb/README.md
@@ -0,0 +1,38 @@
+APB Name
+=========
+
+A brief description of the APB goes here.
+
+Requirements
+------------
+
+Any pre-requisites that may not be covered by Ansible itself or the role should be mentioned here. For instance, if the role uses the EC2 module, it may be a good idea to mention in this section that the boto package is required.
+
+APB Variables
+--------------
+
+A description of the settable variables for this APB should go here, including any variables that are in defaults/main.yml, vars/main.yml, apb.yml, and any variables that can/should be set via parameters to the role. Any variables that are read from other roles and/or the global scope (i.e. hostvars, group vars, etc.) should be mentioned here as well.
+
+Dependencies
+------------
+
+A list of other APBs/roles hosted on Galaxy should go here, plus any details in regards to parameters that may need to be set for other roles, or variables that are used from other roles.
+
+Example Playbook
+----------------
+
+Including an example of how to use your APB (for instance, with variables passed in as parameters) is always nice for users too:
+
+ - hosts: servers
+ roles:
+ - { role: username.rolename, x: 42 }
+
+License
+-------
+
+BSD
+
+Author Information
+------------------
+
+An optional section for the role authors to include contact information, or a website (HTML is not allowed).
diff --git a/lib/ansible/galaxy/data/apb/apb.yml.j2 b/lib/ansible/galaxy/data/apb/apb.yml.j2
new file mode 100644
index 00000000..f9688019
--- /dev/null
+++ b/lib/ansible/galaxy/data/apb/apb.yml.j2
@@ -0,0 +1,13 @@
+version: '1.0.0'
+name: {{ role_name }}
+description: {{ description }}
+bindable: False
+async: optional
+metadata:
+ displayName: {{ role_name }}
+plans:
+ - name: default
+ description: This default plan deploys {{ role_name }}
+ free: True
+ metadata: {}
+ parameters: []
diff --git a/lib/ansible/galaxy/data/apb/defaults/main.yml.j2 b/lib/ansible/galaxy/data/apb/defaults/main.yml.j2
new file mode 100644
index 00000000..3818e64c
--- /dev/null
+++ b/lib/ansible/galaxy/data/apb/defaults/main.yml.j2
@@ -0,0 +1,2 @@
+---
+# defaults file for {{ role_name }}
diff --git a/lib/ansible/galaxy/data/apb/files/.git_keep b/lib/ansible/galaxy/data/apb/files/.git_keep
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/lib/ansible/galaxy/data/apb/files/.git_keep
diff --git a/lib/ansible/galaxy/data/apb/handlers/main.yml.j2 b/lib/ansible/galaxy/data/apb/handlers/main.yml.j2
new file mode 100644
index 00000000..3f4c4967
--- /dev/null
+++ b/lib/ansible/galaxy/data/apb/handlers/main.yml.j2
@@ -0,0 +1,2 @@
+---
+# handlers file for {{ role_name }}
diff --git a/lib/ansible/galaxy/data/apb/meta/main.yml.j2 b/lib/ansible/galaxy/data/apb/meta/main.yml.j2
new file mode 100644
index 00000000..862f8ef8
--- /dev/null
+++ b/lib/ansible/galaxy/data/apb/meta/main.yml.j2
@@ -0,0 +1,44 @@
+galaxy_info:
+ author: {{ author }}
+ description: {{ description }}
+ company: {{ company }}
+
+ # If the issue tracker for your role is not on github, uncomment the
+ # next line and provide a value
+ # issue_tracker_url: {{ issue_tracker_url }}
+
+ # Choose a valid license ID from https://spdx.org - some suggested licenses:
+ # - BSD-3-Clause (default)
+ # - MIT
+ # - GPL-2.0-or-later
+ # - GPL-3.0-only
+ # - Apache-2.0
+ # - CC-BY-4.0
+ license: {{ license }}
+
+ #
+ # platforms is a list of platforms, and each platform has a name and a list of versions.
+ #
+ # platforms:
+ # - name: Fedora
+ # versions:
+ # - all
+ # - 25
+ # - name: SomePlatform
+ # versions:
+ # - all
+ # - 1.0
+ # - 7
+ # - 99.99
+
+ galaxy_tags:
+ - apb
+ # List tags for your role here, one per line. A tag is a keyword that describes
+ # and categorizes the role. Users find roles by searching for tags.
+ #
+ # NOTE: A tag is limited to a single word comprised of alphanumeric characters.
+ # Maximum 20 tags per role.
+
+dependencies: []
+ # List your role dependencies here, one per line. Be sure to remove the '[]' above,
+ # if you add dependencies to this list.
diff --git a/lib/ansible/galaxy/data/apb/playbooks/deprovision.yml.j2 b/lib/ansible/galaxy/data/apb/playbooks/deprovision.yml.j2
new file mode 100644
index 00000000..19527310
--- /dev/null
+++ b/lib/ansible/galaxy/data/apb/playbooks/deprovision.yml.j2
@@ -0,0 +1,8 @@
+- name: "{{ role_name }} playbook to deprovision the application"
+ hosts: localhost
+ gather_facts: false
+ connection: local
+ vars:
+ apb_action: deprovision
+ roles:
+ - role: {{ role_name }}
diff --git a/lib/ansible/galaxy/data/apb/playbooks/provision.yml.j2 b/lib/ansible/galaxy/data/apb/playbooks/provision.yml.j2
new file mode 100644
index 00000000..7b08605e
--- /dev/null
+++ b/lib/ansible/galaxy/data/apb/playbooks/provision.yml.j2
@@ -0,0 +1,8 @@
+- name: "{{ role_name }} playbook to provision the application"
+ hosts: localhost
+ gather_facts: false
+ connection: local
+ vars:
+ apb_action: provision
+ roles:
+ - role: {{ role_name }}
diff --git a/lib/ansible/galaxy/data/apb/tasks/main.yml.j2 b/lib/ansible/galaxy/data/apb/tasks/main.yml.j2
new file mode 100644
index 00000000..a9880650
--- /dev/null
+++ b/lib/ansible/galaxy/data/apb/tasks/main.yml.j2
@@ -0,0 +1,2 @@
+---
+# tasks file for {{ role_name }}
diff --git a/lib/ansible/galaxy/data/apb/templates/.git_keep b/lib/ansible/galaxy/data/apb/templates/.git_keep
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/lib/ansible/galaxy/data/apb/templates/.git_keep
diff --git a/lib/ansible/galaxy/data/apb/tests/ansible.cfg b/lib/ansible/galaxy/data/apb/tests/ansible.cfg
new file mode 100644
index 00000000..2f74f1b2
--- /dev/null
+++ b/lib/ansible/galaxy/data/apb/tests/ansible.cfg
@@ -0,0 +1,2 @@
+[defaults]
+inventory=./inventory
diff --git a/lib/ansible/galaxy/data/apb/tests/inventory b/lib/ansible/galaxy/data/apb/tests/inventory
new file mode 100644
index 00000000..ea69cbf1
--- /dev/null
+++ b/lib/ansible/galaxy/data/apb/tests/inventory
@@ -0,0 +1,3 @@
+localhost
+
+
diff --git a/lib/ansible/galaxy/data/apb/tests/test.yml.j2 b/lib/ansible/galaxy/data/apb/tests/test.yml.j2
new file mode 100644
index 00000000..fb14f85c
--- /dev/null
+++ b/lib/ansible/galaxy/data/apb/tests/test.yml.j2
@@ -0,0 +1,7 @@
+---
+- hosts: localhost
+ gather_facts: no
+ connection: local
+ tasks:
+
+ # Add tasks and assertions for testing the service here.
diff --git a/lib/ansible/galaxy/data/apb/vars/main.yml.j2 b/lib/ansible/galaxy/data/apb/vars/main.yml.j2
new file mode 100644
index 00000000..092d511a
--- /dev/null
+++ b/lib/ansible/galaxy/data/apb/vars/main.yml.j2
@@ -0,0 +1,2 @@
+---
+# vars file for {{ role_name }}
diff --git a/lib/ansible/galaxy/data/collections_galaxy_meta.yml b/lib/ansible/galaxy/data/collections_galaxy_meta.yml
new file mode 100644
index 00000000..75137234
--- /dev/null
+++ b/lib/ansible/galaxy/data/collections_galaxy_meta.yml
@@ -0,0 +1,110 @@
+# Copyright (c) 2019 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# key: The name of the key as defined in galaxy.yml
+# description: Comment/info on the key to be used as the generated doc and auto generated skeleton galaxy.yml file
+# required: Whether the key is required (default is no)
+# type: The type of value that can be set, aligns to the values in the plugin formatter
+---
+- key: namespace
+ description:
+ - The namespace of the collection.
+ - This can be a company/brand/organization or product namespace under which all content lives.
+ - May only contain alphanumeric lowercase characters and underscores. Namespaces cannot start with underscores or
+ numbers and cannot contain consecutive underscores.
+ required: yes
+ type: str
+
+- key: name
+ description:
+ - The name of the collection.
+ - Has the same character restrictions as C(namespace).
+ required: yes
+ type: str
+
+- key: version
+ description:
+ - The version of the collection.
+ - Must be compatible with semantic versioning.
+ required: yes
+ type: str
+
+- key: readme
+ description:
+ - The path to the Markdown (.md) readme file.
+ - This path is relative to the root of the collection.
+ required: yes
+ type: str
+
+- key: authors
+ description:
+ - A list of the collection's content authors.
+ - Can be just the name or in the format 'Full Name <email> (url) @nicks:irc/im.site#channel'.
+ required: yes
+ type: list
+
+- key: description
+ description:
+ - A short summary description of the collection.
+ type: str
+
+- key: license
+ description:
+ - Either a single license or a list of licenses for content inside of a collection.
+ - Ansible Galaxy currently only accepts L(SPDX,https://spdx.org/licenses/) licenses
+ - This key is mutually exclusive with C(license_file).
+ type: list
+
+- key: license_file
+ description:
+ - The path to the license file for the collection.
+ - This path is relative to the root of the collection.
+ - This key is mutually exclusive with C(license).
+ type: str
+
+- key: tags
+ description:
+ - A list of tags you want to associate with the collection for indexing/searching.
+ - A tag name has the same character requirements as C(namespace) and C(name).
+ type: list
+
+- key: dependencies
+ description:
+ - Collections that this collection requires to be installed for it to be usable.
+ - The key of the dict is the collection label C(namespace.name).
+ - The value is a version range
+ L(specifiers,https://python-semanticversion.readthedocs.io/en/latest/#requirement-specification).
+ - Multiple version range specifiers can be set and are separated by C(,).
+ type: dict
+
+- key: repository
+ description:
+ - The URL of the originating SCM repository.
+ type: str
+
+- key: documentation
+ description:
+ - The URL to any online docs.
+ type: str
+
+- key: homepage
+ description:
+ - The URL to the homepage of the collection/project.
+ type: str
+
+- key: issues
+ description:
+ - The URL to the collection issue tracker.
+ type: str
+
+- key: build_ignore
+ description:
+ - A list of file glob-like patterns used to filter any files or directories
+ that should not be included in the build artifact.
+ - A pattern is matched from the relative path of the file or directory of the
+ collection directory.
+ - This uses C(fnmatch) to match the files or directories.
+ - Some directories and files like C(galaxy.yml), C(*.pyc), C(*.retry), and
+ C(.git) are always filtered.
+ type: list
+ version_added: '2.10'
diff --git a/lib/ansible/galaxy/data/container/.travis.yml b/lib/ansible/galaxy/data/container/.travis.yml
new file mode 100644
index 00000000..a3370b7d
--- /dev/null
+++ b/lib/ansible/galaxy/data/container/.travis.yml
@@ -0,0 +1,45 @@
+language: python
+dist: trusty
+sudo: required
+
+services:
+ - docker
+
+before_install:
+ - sudo apt-add-repository 'deb http://archive.ubuntu.com/ubuntu trusty-backports universe'
+ - sudo apt-get update -qq
+ - sudo apt-get install -y -o Dpkg::Options::="--force-confold" --force-yes docker-engine
+
+install:
+ # Install the latest Ansible Container and Ansible
+ - pip install git+https://github.com/ansible/ansible-container.git
+ - pip install ansible
+
+script:
+ # Make sure docker is functioning
+ - docker version
+ - docker-compose version
+ - docker info
+
+ # Create an Ansible Container project
+ - mkdir -p tests
+ - cd tests
+ - ansible-container init
+
+ # Install the role into the project
+ - echo "Installing and testing git+https://github.com/${TRAVIS_REPO_SLUG},${TRAVIS_COMMIT}"
+ - ansible-container install git+https://github.com/${TRAVIS_REPO_SLUG},${TRAVIS_COMMIT}
+
+ # Build the service image
+ - ansible-container build
+
+ # Start the service
+ - ansible-container run -d
+ - docker ps
+
+ # Run tests
+ - ansible-playbook test.yml
+
+notifications:
+ email: false
+ webhooks: https://galaxy.ansible.com/api/v1/notifications/
diff --git a/lib/ansible/galaxy/data/container/README.md b/lib/ansible/galaxy/data/container/README.md
new file mode 100644
index 00000000..1b66bdb5
--- /dev/null
+++ b/lib/ansible/galaxy/data/container/README.md
@@ -0,0 +1,49 @@
+# Role Name
+
+Adds a <SERVICE_NAME> service to your [Ansible Container](https://github.com/ansible/ansible-container) project. Run the following commands
+to install the service:
+
+```
+# Set the working directory to your Ansible Container project root
+$ cd myproject
+
+# Install the service
+$ ansible-container install <USERNAME.ROLE_NAME>
+```
+
+## Requirements
+
+- [Ansible Container](https://github.com/ansible/ansible-container)
+- An existing Ansible Container project. To create a project, simply run the following:
+ ```
+ # Create an empty project directory
+ $ mkdir myproject
+
+ # Set the working directory to the new directory
+ $ cd myproject
+
+ # Initialize the project
+ $ ansible-container init
+ ```
+
+- Continue listing any prerequisites here...
+
+
+## Role Variables
+
+A description of the settable variables for this role should go here, including any variables that are in defaults/main.yml, vars/main.yml, and any variables that can/should be set
+via parameters to the role. Any variables that are read from other roles and/or the global scope (ie. hostvars, group vars, etc.) should be mentioned here as well.
+
+## Dependencies
+
+A list of other roles hosted on Galaxy should go here, plus any details in regards to parameters that may need to be set for other roles, or variables that are used from other roles.
+
+## License
+
+BSD
+
+## Author Information
+
+An optional section for the role authors to include contact information, or a website (HTML is not allowed).
+
+
diff --git a/lib/ansible/galaxy/data/container/defaults/main.yml.j2 b/lib/ansible/galaxy/data/container/defaults/main.yml.j2
new file mode 100644
index 00000000..3818e64c
--- /dev/null
+++ b/lib/ansible/galaxy/data/container/defaults/main.yml.j2
@@ -0,0 +1,2 @@
+---
+# defaults file for {{ role_name }}
diff --git a/lib/ansible/galaxy/data/container/files/.git_keep b/lib/ansible/galaxy/data/container/files/.git_keep
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/lib/ansible/galaxy/data/container/files/.git_keep
diff --git a/lib/ansible/galaxy/data/container/handlers/main.yml.j2 b/lib/ansible/galaxy/data/container/handlers/main.yml.j2
new file mode 100644
index 00000000..3f4c4967
--- /dev/null
+++ b/lib/ansible/galaxy/data/container/handlers/main.yml.j2
@@ -0,0 +1,2 @@
+---
+# handlers file for {{ role_name }}
diff --git a/lib/ansible/galaxy/data/container/meta/container.yml.j2 b/lib/ansible/galaxy/data/container/meta/container.yml.j2
new file mode 100644
index 00000000..f033d341
--- /dev/null
+++ b/lib/ansible/galaxy/data/container/meta/container.yml.j2
@@ -0,0 +1,11 @@
+# Add your Ansible Container service definitions here.
+# For example:
+ #
+ # web:
+ # image: ubuntu:trusty
+ # ports:
+ # - "80:80"
+ # command: ['/usr/bin/dumb-init', '/usr/sbin/apache2ctl', '-D', 'FOREGROUND']
+ # dev_overrides:
+ # environment:
+ # - "DEBUG=1"
diff --git a/lib/ansible/galaxy/data/container/meta/main.yml.j2 b/lib/ansible/galaxy/data/container/meta/main.yml.j2
new file mode 100644
index 00000000..72fc9a22
--- /dev/null
+++ b/lib/ansible/galaxy/data/container/meta/main.yml.j2
@@ -0,0 +1,52 @@
+galaxy_info:
+ author: {{ author }}
+ description: {{ description }}
+ company: {{ company }}
+
+ # If the issue tracker for your role is not on github, uncomment the
+ # next line and provide a value
+ # issue_tracker_url: {{ issue_tracker_url }}
+
+ # Choose a valid license ID from https://spdx.org - some suggested licenses:
+ # - BSD-3-Clause (default)
+ # - MIT
+ # - GPL-2.0-or-later
+ # - GPL-3.0-only
+ # - Apache-2.0
+ # - CC-BY-4.0
+ license: {{ license }}
+
+ min_ansible_container_version: 0.2.0
+
+ # If Ansible is required outside of the build container, provide the minimum version:
+ # min_ansible_version:
+
+ #
+ # Provide a list of supported platforms, and for each platform a list of versions.
+ # If you don't wish to enumerate all versions for a particular platform, use 'all'.
+ # To view available platforms and versions (or releases), visit:
+ # https://galaxy.ansible.com/api/v1/platforms/
+ #
+ # platforms:
+ # - name: Fedora
+ # versions:
+ # - all
+ # - 25
+ # - name: SomePlatform
+ # versions:
+ # - all
+ # - 1.0
+ # - 7
+ # - 99.99
+
+ galaxy_tags:
+ - container
+ # List tags for your role here, one per line. A tag is a keyword that describes
+ # and categorizes the role. Users find roles by searching for tags.
+ #
+ # NOTE: A tag is limited to a single word comprised of alphanumeric characters.
+ # Maximum 20 tags per role.
+
+dependencies: []
+ # List your role dependencies here, one per line. Be sure to remove the '[]' above,
+ # if you add dependencies to this list.
diff --git a/lib/ansible/galaxy/data/container/tasks/main.yml.j2 b/lib/ansible/galaxy/data/container/tasks/main.yml.j2
new file mode 100644
index 00000000..a9880650
--- /dev/null
+++ b/lib/ansible/galaxy/data/container/tasks/main.yml.j2
@@ -0,0 +1,2 @@
+---
+# tasks file for {{ role_name }}
diff --git a/lib/ansible/galaxy/data/container/templates/.git_keep b/lib/ansible/galaxy/data/container/templates/.git_keep
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/lib/ansible/galaxy/data/container/templates/.git_keep
diff --git a/lib/ansible/galaxy/data/container/tests/ansible.cfg b/lib/ansible/galaxy/data/container/tests/ansible.cfg
new file mode 100644
index 00000000..2f74f1b2
--- /dev/null
+++ b/lib/ansible/galaxy/data/container/tests/ansible.cfg
@@ -0,0 +1,2 @@
+[defaults]
+inventory=./inventory
diff --git a/lib/ansible/galaxy/data/container/tests/inventory b/lib/ansible/galaxy/data/container/tests/inventory
new file mode 100644
index 00000000..ea69cbf1
--- /dev/null
+++ b/lib/ansible/galaxy/data/container/tests/inventory
@@ -0,0 +1,3 @@
+localhost
+
+
diff --git a/lib/ansible/galaxy/data/container/tests/test.yml.j2 b/lib/ansible/galaxy/data/container/tests/test.yml.j2
new file mode 100644
index 00000000..fb14f85c
--- /dev/null
+++ b/lib/ansible/galaxy/data/container/tests/test.yml.j2
@@ -0,0 +1,7 @@
+---
+- hosts: localhost
+ gather_facts: no
+ connection: local
+ tasks:
+
+ # Add tasks and assertions for testing the service here.
diff --git a/lib/ansible/galaxy/data/container/vars/main.yml.j2 b/lib/ansible/galaxy/data/container/vars/main.yml.j2
new file mode 100644
index 00000000..092d511a
--- /dev/null
+++ b/lib/ansible/galaxy/data/container/vars/main.yml.j2
@@ -0,0 +1,2 @@
+---
+# vars file for {{ role_name }}
diff --git a/lib/ansible/galaxy/data/default/collection/README.md.j2 b/lib/ansible/galaxy/data/default/collection/README.md.j2
new file mode 100644
index 00000000..5e516220
--- /dev/null
+++ b/lib/ansible/galaxy/data/default/collection/README.md.j2
@@ -0,0 +1,3 @@
+# Ansible Collection - {{ namespace }}.{{ collection_name }}
+
+Documentation for the collection.
diff --git a/lib/ansible/galaxy/data/default/collection/docs/.git_keep b/lib/ansible/galaxy/data/default/collection/docs/.git_keep
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/lib/ansible/galaxy/data/default/collection/docs/.git_keep
diff --git a/lib/ansible/galaxy/data/default/collection/galaxy.yml.j2 b/lib/ansible/galaxy/data/default/collection/galaxy.yml.j2
new file mode 100644
index 00000000..a95008fc
--- /dev/null
+++ b/lib/ansible/galaxy/data/default/collection/galaxy.yml.j2
@@ -0,0 +1,11 @@
+### REQUIRED
+{% for option in required_config %}
+{{ option.description | comment_ify }}
+{{ {option.key: option.value} | to_nice_yaml }}
+{% endfor %}
+
+### OPTIONAL but strongly recommended
+{% for option in optional_config %}
+{{ option.description | comment_ify }}
+{{ {option.key: option.value} | to_nice_yaml }}
+{% endfor %}
diff --git a/lib/ansible/galaxy/data/default/collection/plugins/README.md.j2 b/lib/ansible/galaxy/data/default/collection/plugins/README.md.j2
new file mode 100644
index 00000000..7c006cfa
--- /dev/null
+++ b/lib/ansible/galaxy/data/default/collection/plugins/README.md.j2
@@ -0,0 +1,31 @@
+# Collections Plugins Directory
+
+This directory can be used to ship various plugins inside an Ansible collection. Each plugin is placed in a folder that
+is named after the type of plugin it is in. It can also include the `module_utils` and `modules` directory that
+would contain module utils and modules respectively.
+
+Here is an example directory of the majority of plugins currently supported by Ansible:
+
+```
+└── plugins
+ ├── action
+ ├── become
+ ├── cache
+ ├── callback
+ ├── cliconf
+ ├── connection
+ ├── filter
+ ├── httpapi
+ ├── inventory
+ ├── lookup
+ ├── module_utils
+ ├── modules
+ ├── netconf
+ ├── shell
+ ├── strategy
+ ├── terminal
+ ├── test
+ └── vars
+```
+
+A full list of plugin types can be found at [Working With Plugins]({{ ansible_plugin_list_dir }}).
diff --git a/lib/ansible/galaxy/data/default/collection/roles/.git_keep b/lib/ansible/galaxy/data/default/collection/roles/.git_keep
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/lib/ansible/galaxy/data/default/collection/roles/.git_keep
diff --git a/lib/ansible/galaxy/data/default/role/.travis.yml b/lib/ansible/galaxy/data/default/role/.travis.yml
new file mode 100644
index 00000000..36bbf620
--- /dev/null
+++ b/lib/ansible/galaxy/data/default/role/.travis.yml
@@ -0,0 +1,29 @@
+---
+language: python
+python: "2.7"
+
+# Use the new container infrastructure
+sudo: false
+
+# Install ansible
+addons:
+ apt:
+ packages:
+ - python-pip
+
+install:
+ # Install ansible
+ - pip install ansible
+
+ # Check ansible version
+ - ansible --version
+
+ # Create ansible.cfg with correct roles_path
+ - printf '[defaults]\nroles_path=../' >ansible.cfg
+
+script:
+ # Basic role syntax check
+ - ansible-playbook tests/test.yml -i tests/inventory --syntax-check
+
+notifications:
+ webhooks: https://galaxy.ansible.com/api/v1/notifications/ \ No newline at end of file
diff --git a/lib/ansible/galaxy/data/default/role/README.md b/lib/ansible/galaxy/data/default/role/README.md
new file mode 100644
index 00000000..225dd44b
--- /dev/null
+++ b/lib/ansible/galaxy/data/default/role/README.md
@@ -0,0 +1,38 @@
+Role Name
+=========
+
+A brief description of the role goes here.
+
+Requirements
+------------
+
+Any pre-requisites that may not be covered by Ansible itself or the role should be mentioned here. For instance, if the role uses the EC2 module, it may be a good idea to mention in this section that the boto package is required.
+
+Role Variables
+--------------
+
+A description of the settable variables for this role should go here, including any variables that are in defaults/main.yml, vars/main.yml, and any variables that can/should be set via parameters to the role. Any variables that are read from other roles and/or the global scope (ie. hostvars, group vars, etc.) should be mentioned here as well.
+
+Dependencies
+------------
+
+A list of other roles hosted on Galaxy should go here, plus any details in regards to parameters that may need to be set for other roles, or variables that are used from other roles.
+
+Example Playbook
+----------------
+
+Including an example of how to use your role (for instance, with variables passed in as parameters) is always nice for users too:
+
+ - hosts: servers
+ roles:
+ - { role: username.rolename, x: 42 }
+
+License
+-------
+
+BSD
+
+Author Information
+------------------
+
+An optional section for the role authors to include contact information, or a website (HTML is not allowed).
diff --git a/lib/ansible/galaxy/data/default/role/defaults/main.yml.j2 b/lib/ansible/galaxy/data/default/role/defaults/main.yml.j2
new file mode 100644
index 00000000..3818e64c
--- /dev/null
+++ b/lib/ansible/galaxy/data/default/role/defaults/main.yml.j2
@@ -0,0 +1,2 @@
+---
+# defaults file for {{ role_name }}
diff --git a/lib/ansible/galaxy/data/default/role/files/.git_keep b/lib/ansible/galaxy/data/default/role/files/.git_keep
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/lib/ansible/galaxy/data/default/role/files/.git_keep
diff --git a/lib/ansible/galaxy/data/default/role/handlers/main.yml.j2 b/lib/ansible/galaxy/data/default/role/handlers/main.yml.j2
new file mode 100644
index 00000000..3f4c4967
--- /dev/null
+++ b/lib/ansible/galaxy/data/default/role/handlers/main.yml.j2
@@ -0,0 +1,2 @@
+---
+# handlers file for {{ role_name }}
diff --git a/lib/ansible/galaxy/data/default/role/meta/main.yml.j2 b/lib/ansible/galaxy/data/default/role/meta/main.yml.j2
new file mode 100644
index 00000000..4891a68b
--- /dev/null
+++ b/lib/ansible/galaxy/data/default/role/meta/main.yml.j2
@@ -0,0 +1,55 @@
+galaxy_info:
+ author: {{ author }}
+ description: {{ description }}
+ company: {{ company }}
+
+ # If the issue tracker for your role is not on github, uncomment the
+ # next line and provide a value
+ # issue_tracker_url: {{ issue_tracker_url }}
+
+ # Choose a valid license ID from https://spdx.org - some suggested licenses:
+ # - BSD-3-Clause (default)
+ # - MIT
+ # - GPL-2.0-or-later
+ # - GPL-3.0-only
+ # - Apache-2.0
+ # - CC-BY-4.0
+ license: {{ license }}
+
+ min_ansible_version: {{ min_ansible_version }}
+
+ # If this a Container Enabled role, provide the minimum Ansible Container version.
+ # min_ansible_container_version:
+
+ #
+ # Provide a list of supported platforms, and for each platform a list of versions.
+ # If you don't wish to enumerate all versions for a particular platform, use 'all'.
+ # To view available platforms and versions (or releases), visit:
+ # https://galaxy.ansible.com/api/v1/platforms/
+ #
+ # platforms:
+ # - name: Fedora
+ # versions:
+ # - all
+ # - 25
+ # - name: SomePlatform
+ # versions:
+ # - all
+ # - 1.0
+ # - 7
+ # - 99.99
+
+ galaxy_tags: []
+ # List tags for your role here, one per line. A tag is a keyword that describes
+ # and categorizes the role. Users find roles by searching for tags. Be sure to
+ # remove the '[]' above, if you add tags to this list.
+ #
+ # NOTE: A tag is limited to a single word comprised of alphanumeric characters.
+ # Maximum 20 tags per role.
+
+dependencies: []
+ # List your role dependencies here, one per line. Be sure to remove the '[]' above,
+ # if you add dependencies to this list.
+{% for dependency in dependencies %}
+ #- {{ dependency }}
+{%- endfor %}
diff --git a/lib/ansible/galaxy/data/default/role/tasks/main.yml.j2 b/lib/ansible/galaxy/data/default/role/tasks/main.yml.j2
new file mode 100644
index 00000000..a9880650
--- /dev/null
+++ b/lib/ansible/galaxy/data/default/role/tasks/main.yml.j2
@@ -0,0 +1,2 @@
+---
+# tasks file for {{ role_name }}
diff --git a/lib/ansible/galaxy/data/default/role/templates/.git_keep b/lib/ansible/galaxy/data/default/role/templates/.git_keep
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/lib/ansible/galaxy/data/default/role/templates/.git_keep
diff --git a/lib/ansible/galaxy/data/default/role/tests/inventory b/lib/ansible/galaxy/data/default/role/tests/inventory
new file mode 100644
index 00000000..878877b0
--- /dev/null
+++ b/lib/ansible/galaxy/data/default/role/tests/inventory
@@ -0,0 +1,2 @@
+localhost
+
diff --git a/lib/ansible/galaxy/data/default/role/tests/test.yml.j2 b/lib/ansible/galaxy/data/default/role/tests/test.yml.j2
new file mode 100644
index 00000000..0c40f95a
--- /dev/null
+++ b/lib/ansible/galaxy/data/default/role/tests/test.yml.j2
@@ -0,0 +1,5 @@
+---
+- hosts: localhost
+ remote_user: root
+ roles:
+ - {{ role_name }}
diff --git a/lib/ansible/galaxy/data/default/role/vars/main.yml.j2 b/lib/ansible/galaxy/data/default/role/vars/main.yml.j2
new file mode 100644
index 00000000..092d511a
--- /dev/null
+++ b/lib/ansible/galaxy/data/default/role/vars/main.yml.j2
@@ -0,0 +1,2 @@
+---
+# vars file for {{ role_name }}
diff --git a/lib/ansible/galaxy/data/network/.travis.yml b/lib/ansible/galaxy/data/network/.travis.yml
new file mode 100644
index 00000000..36bbf620
--- /dev/null
+++ b/lib/ansible/galaxy/data/network/.travis.yml
@@ -0,0 +1,29 @@
+---
+language: python
+python: "2.7"
+
+# Use the new container infrastructure
+sudo: false
+
+# Install ansible
+addons:
+ apt:
+ packages:
+ - python-pip
+
+install:
+ # Install ansible
+ - pip install ansible
+
+ # Check ansible version
+ - ansible --version
+
+ # Create ansible.cfg with correct roles_path
+ - printf '[defaults]\nroles_path=../' >ansible.cfg
+
+script:
+ # Basic role syntax check
+ - ansible-playbook tests/test.yml -i tests/inventory --syntax-check
+
+notifications:
+ webhooks: https://galaxy.ansible.com/api/v1/notifications/ \ No newline at end of file
diff --git a/lib/ansible/galaxy/data/network/README.md b/lib/ansible/galaxy/data/network/README.md
new file mode 100644
index 00000000..84533c63
--- /dev/null
+++ b/lib/ansible/galaxy/data/network/README.md
@@ -0,0 +1,38 @@
+Role Name
+=========
+
+A brief description of the role goes here.
+
+Requirements
+------------
+
+Any pre-requisites that may not be covered by Ansible itself or the role should be mentioned here. For instance, if the role uses any vendor specific SDKs or module with specific dependencies, it may be a good idea to mention in this section that the package is required.
+
+Role Variables
+--------------
+
+A description of the settable variables for this role should go here, including any variables that are in defaults/main.yml, vars/main.yml, and any variables that can/should be set via parameters to the role. Any variables that are read from other roles and/or the global scope (ie. hostvars, group vars, etc.) should be mentioned here as well.
+
+Dependencies
+------------
+
+A list of other roles hosted on Galaxy should go here, plus any details in regards to parameters that may need to be set for other roles, or variables that are used from other roles.
+
+Example Playbook
+----------------
+
+Including an example of how to use your role (for instance, with variables passed in as parameters) is always nice for users too:
+
+ - hosts: servers
+ roles:
+ - { role: username.rolename, x: 42 }
+
+License
+-------
+
+BSD
+
+Author Information
+------------------
+
+An optional section for the role authors to include contact information, or a website (HTML is not allowed).
diff --git a/lib/ansible/galaxy/data/network/cliconf_plugins/example.py.j2 b/lib/ansible/galaxy/data/network/cliconf_plugins/example.py.j2
new file mode 100644
index 00000000..02f234ac
--- /dev/null
+++ b/lib/ansible/galaxy/data/network/cliconf_plugins/example.py.j2
@@ -0,0 +1,40 @@
+#
+# (c) 2018 Red Hat Inc.
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+from __future__ import (absolute_import, division, print_function)
+from ansible.errors import AnsibleError
+__metaclass__ = type
+
+try:
+ from ansible.plugins.cliconf import CliconfBase
+ """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/plugins/cliconf/iosxr.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/plugins/cliconf/junos.py
+ """
+except ImportError:
+ raise AnsibleError("Cliconf Plugin [ {{ role_name }} ]: Dependency not satisfied")
+
+
+class Cliconf(CliconfBase):
+ """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/plugins/cliconf/iosxr.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/plugins/cliconf/junos.py
+ """
+ raise AnsibleError("Cliconf Plugin [ {{ role_name }} ]: Not implemented")
diff --git a/lib/ansible/galaxy/data/network/defaults/main.yml.j2 b/lib/ansible/galaxy/data/network/defaults/main.yml.j2
new file mode 100644
index 00000000..3818e64c
--- /dev/null
+++ b/lib/ansible/galaxy/data/network/defaults/main.yml.j2
@@ -0,0 +1,2 @@
+---
+# defaults file for {{ role_name }}
diff --git a/lib/ansible/galaxy/data/network/files/.git_keep b/lib/ansible/galaxy/data/network/files/.git_keep
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/lib/ansible/galaxy/data/network/files/.git_keep
diff --git a/lib/ansible/galaxy/data/network/library/example_command.py.j2 b/lib/ansible/galaxy/data/network/library/example_command.py.j2
new file mode 100644
index 00000000..0f3dac2d
--- /dev/null
+++ b/lib/ansible/galaxy/data/network/library/example_command.py.j2
@@ -0,0 +1,66 @@
+#
+# (c) 2018 Red Hat Inc.
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+from __future__ import (absolute_import, division, print_function)
+from ansible.errors import AnsibleError
+__metaclass__ = type
+
+
+### Documentation
+DOCUMENTATION = """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/iosxr/iosxr_command.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/junos/junos_command.py
+"""
+
+EXAMPLES = """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/iosxr/iosxr_command.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/junos/junos_command.py
+"""
+
+
+RETURN = """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/iosxr/iosxr_command.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/junos/junos_command.py
+"""
+
+#### Imports
+try:
+ from ansible.module_utils.basic import AnsibleModule
+ """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/iosxr/iosxr_command.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/junos/junos_command.py
+ """
+except ImportError:
+ raise AnsibleError("[ {{ role_name }}_command ]: Dependency not satisfied")
+
+#### Implementation
+def main():
+ """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/iosxr/iosxr_command.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/junos/junos_command.py
+ """
+ raise AnsibleError(" [ {{ role_name }}_command ]: Not Implemented")
+
+#### Entrypoint
+if __name__ == '__main__':
+ main()
diff --git a/lib/ansible/galaxy/data/network/library/example_config.py.j2 b/lib/ansible/galaxy/data/network/library/example_config.py.j2
new file mode 100644
index 00000000..2c2c72be
--- /dev/null
+++ b/lib/ansible/galaxy/data/network/library/example_config.py.j2
@@ -0,0 +1,66 @@
+#
+# (c) 2018 Red Hat Inc.
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+from __future__ import (absolute_import, division, print_function)
+from ansible.errors import AnsibleError
+__metaclass__ = type
+
+
+### Documentation
+DOCUMENTATION = """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/iosxr/iosxr_config.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/junos/junos_config.py
+"""
+
+EXAMPLES = """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/iosxr/iosxr_config.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/junos/junos_config.py
+"""
+
+
+RETURN = """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/iosxr/iosxr_config.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/junos/junos_config.py
+"""
+
+### Imports
+try:
+ from ansible.module_utils.basic import AnsibleModule
+ """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/iosxr/iosxr_config.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/junos/junos_config.py
+ """
+except ImportError:
+ raise AnsibleError("[ {{ role_name }}_config ]: Dependency not satisfied")
+
+### Implementation
+def main():
+ """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/iosxr/iosxr_config.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/junos/junos_config.py
+ """
+ raise AnsibleError(" [ {{ role_name }}_config ]: Not Implemented")
+
+### Entrypoint
+if __name__ == '__main__':
+ main()
diff --git a/lib/ansible/galaxy/data/network/library/example_facts.py.j2 b/lib/ansible/galaxy/data/network/library/example_facts.py.j2
new file mode 100644
index 00000000..9f7608c3
--- /dev/null
+++ b/lib/ansible/galaxy/data/network/library/example_facts.py.j2
@@ -0,0 +1,66 @@
+#
+# (c) 2018 Red Hat Inc.
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+from __future__ import (absolute_import, division, print_function)
+from ansible.errors import AnsibleError
+__metaclass__ = type
+
+
+### Documentation
+DOCUMENTATION = """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/iosxr/iosxr_facts.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/junos/junos_facts.py
+"""
+
+EXAMPLES = """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/iosxr/iosxr_facts.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/junos/junos_facts.py
+"""
+
+
+RETURN = """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/iosxr/iosxr_facts.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/junos/junos_facts.py
+"""
+
+### Imports
+try:
+ from ansible.module_utils.basic import AnsibleModule
+ """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/iosxr/iosxr_facts.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/junos/junos_facts.py
+ """
+except ImportError:
+ raise AnsibleError("[ {{ role_name }}_facts ]: Dependency not satisfied")
+
+### Implementation
+def main():
+ """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/iosxr/iosxr_facts.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/network/junos/junos_facts.py
+ """
+ raise AnsibleError(" [ {{ role_name }}_facts ]: Not Implemented")
+
+### Entrypoint
+if __name__ == '__main__':
+ main()
diff --git a/lib/ansible/galaxy/data/network/meta/main.yml.j2 b/lib/ansible/galaxy/data/network/meta/main.yml.j2
new file mode 100644
index 00000000..d0184ae8
--- /dev/null
+++ b/lib/ansible/galaxy/data/network/meta/main.yml.j2
@@ -0,0 +1,52 @@
+galaxy_info:
+ author: {{ author }}
+ description: {{ description }}
+ company: {{ company }}
+
+ # If the issue tracker for your role is not on github, uncomment the
+ # next line and provide a value
+ # issue_tracker_url: {{ issue_tracker_url }}
+
+ # Choose a valid license ID from https://spdx.org - some suggested licenses:
+ # - BSD-3-Clause (default)
+ # - MIT
+ # - GPL-2.0-or-later
+ # - GPL-3.0-only
+ # - Apache-2.0
+ # - CC-BY-4.0
+ license: {{ license }}
+
+ min_ansible_version: {{ min_ansible_version }}
+
+ # If this a Container Enabled role, provide the minimum Ansible Container version.
+ # min_ansible_container_version:
+
+ #
+ # platforms is a list of platforms, and each platform has a name and a list of versions.
+ #
+ # platforms:
+ # - name: VYOS
+ # versions:
+ # - all
+ # - 25
+ # - name: SomePlatform
+ # versions:
+ # - all
+ # - 1.0
+ # - 7
+ # - 99.99
+
+ galaxy_tags: []
+ # List tags for your role here, one per line. A tag is a keyword that describes
+ # and categorizes the role. Users find roles by searching for tags. Be sure to
+ # remove the '[]' above, if you add tags to this list.
+ #
+ # NOTE: A tag is limited to a single word comprised of alphanumeric characters.
+ # Maximum 20 tags per role.
+
+dependencies: []
+ # List your role dependencies here, one per line. Be sure to remove the '[]' above,
+ # if you add dependencies to this list.
+{%- for dependency in dependencies %}
+ #- {{ dependency }}
+{%- endfor %}
diff --git a/lib/ansible/galaxy/data/network/module_utils/example.py.j2 b/lib/ansible/galaxy/data/network/module_utils/example.py.j2
new file mode 100644
index 00000000..9bf2d3f6
--- /dev/null
+++ b/lib/ansible/galaxy/data/network/module_utils/example.py.j2
@@ -0,0 +1,40 @@
+#
+# (c) 2018 Red Hat Inc.
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+from __future__ import (absolute_import, division, print_function)
+from ansible.errors import AnsibleError
+__metaclass__ = type
+
+### Imports
+try:
+ from ansible.module_utils.basic import env_fallback, return_values
+ from ansible.module_utils.connection import Connection
+ """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/module_utils/network/iosxr/iosxr.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/module_utils/network/junos//junos.py
+ """
+except ImportError:
+ raise AnsibleError("Netconf Plugin [ {{ role_name }} ]: Dependency not satisfied")
+
+### Implementation
+"""
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/module_utils/network/iosxr/iosxr.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/module_utils/network/junos//junos.py
+"""
diff --git a/lib/ansible/galaxy/data/network/netconf_plugins/example.py.j2 b/lib/ansible/galaxy/data/network/netconf_plugins/example.py.j2
new file mode 100644
index 00000000..e3a1ce61
--- /dev/null
+++ b/lib/ansible/galaxy/data/network/netconf_plugins/example.py.j2
@@ -0,0 +1,40 @@
+#
+# (c) 2018 Red Hat Inc.
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+from __future__ import (absolute_import, division, print_function)
+from ansible.errors import AnsibleError
+__metaclass__ = type
+
+try:
+ from ansible.plugins.terminal import NetconfBase
+ """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/plugins/netconf/iosxr.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/plugins/netconf/junos.py
+ """
+except ImportError:
+ raise AnsibleError("Netconf Plugin [ {{ role_name }} ]: Dependency not satisfied")
+
+
+class Netconf(NetconfBase):
+ """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/plugins/netconf/iosxr.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/plugins/netconf/junos.py
+ """
+ raise AnsibleError("Netconf Plugin [ {{ role_name }} ]: Not implemented")
diff --git a/lib/ansible/galaxy/data/network/tasks/main.yml.j2 b/lib/ansible/galaxy/data/network/tasks/main.yml.j2
new file mode 100644
index 00000000..a9880650
--- /dev/null
+++ b/lib/ansible/galaxy/data/network/tasks/main.yml.j2
@@ -0,0 +1,2 @@
+---
+# tasks file for {{ role_name }}
diff --git a/lib/ansible/galaxy/data/network/templates/.git_keep b/lib/ansible/galaxy/data/network/templates/.git_keep
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/lib/ansible/galaxy/data/network/templates/.git_keep
diff --git a/lib/ansible/galaxy/data/network/terminal_plugins/example.py.j2 b/lib/ansible/galaxy/data/network/terminal_plugins/example.py.j2
new file mode 100644
index 00000000..621a140c
--- /dev/null
+++ b/lib/ansible/galaxy/data/network/terminal_plugins/example.py.j2
@@ -0,0 +1,40 @@
+#
+# (c) 2018 Red Hat Inc.
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+from __future__ import (absolute_import, division, print_function)
+from ansible.errors import AnsibleError
+__metaclass__ = type
+
+try:
+ from ansible.plugins.terminal import TerminalBase
+ """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/plugins/terminal/iosxr.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/plugins/terminal/junos.py
+ """
+except ImportError:
+ raise AnsibleError("Terminal Plugin [ {{ role_name }} ]: Dependency not satisfied")
+
+
+class TerminalModule(TerminalBase):
+ """
+ Examples:
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/plugins/terminal/iosxr.py
+ https://github.com/ansible/ansible/blob/devel/lib/ansible/plugins/terminal/junos.py
+ """
+ raise AnsibleError("Terminal Plugin [ {{ role_name }} ]: Not implemented")
diff --git a/lib/ansible/galaxy/data/network/tests/inventory b/lib/ansible/galaxy/data/network/tests/inventory
new file mode 100644
index 00000000..878877b0
--- /dev/null
+++ b/lib/ansible/galaxy/data/network/tests/inventory
@@ -0,0 +1,2 @@
+localhost
+
diff --git a/lib/ansible/galaxy/data/network/tests/test.yml.j2 b/lib/ansible/galaxy/data/network/tests/test.yml.j2
new file mode 100644
index 00000000..11284eb5
--- /dev/null
+++ b/lib/ansible/galaxy/data/network/tests/test.yml.j2
@@ -0,0 +1,14 @@
+---
+- hosts: localhost
+ connection: network_cli
+ gather_facts: False
+
+ roles:
+ - {{ role_name }}
+
+- hosts: localhost
+ connection: netconf
+ gather_facts: False
+
+ roles:
+ - {{ role_name }}
diff --git a/lib/ansible/galaxy/data/network/vars/main.yml.j2 b/lib/ansible/galaxy/data/network/vars/main.yml.j2
new file mode 100644
index 00000000..092d511a
--- /dev/null
+++ b/lib/ansible/galaxy/data/network/vars/main.yml.j2
@@ -0,0 +1,2 @@
+---
+# vars file for {{ role_name }}
diff --git a/lib/ansible/galaxy/role.py b/lib/ansible/galaxy/role.py
new file mode 100644
index 00000000..7de44ded
--- /dev/null
+++ b/lib/ansible/galaxy/role.py
@@ -0,0 +1,399 @@
+########################################################################
+#
+# (C) 2015, Brian Coca <bcoca@ansible.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+########################################################################
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import errno
+import datetime
+import os
+import tarfile
+import tempfile
+import yaml
+from distutils.version import LooseVersion
+from shutil import rmtree
+
+from ansible import context
+from ansible.errors import AnsibleError
+from ansible.galaxy.user_agent import user_agent
+from ansible.module_utils._text import to_native, to_text
+from ansible.module_utils.urls import open_url
+from ansible.playbook.role.requirement import RoleRequirement
+from ansible.utils.display import Display
+
+display = Display()
+
+
+class GalaxyRole(object):
+
+ SUPPORTED_SCMS = set(['git', 'hg'])
+ META_MAIN = (os.path.join('meta', 'main.yml'), os.path.join('meta', 'main.yaml'))
+ META_INSTALL = os.path.join('meta', '.galaxy_install_info')
+ META_REQUIREMENTS = (os.path.join('meta', 'requirements.yml'), os.path.join('meta', 'requirements.yaml'))
+ ROLE_DIRS = ('defaults', 'files', 'handlers', 'meta', 'tasks', 'templates', 'vars', 'tests')
+
+ def __init__(self, galaxy, api, name, src=None, version=None, scm=None, path=None):
+
+ self._metadata = None
+ self._requirements = None
+ self._install_info = None
+ self._validate_certs = not context.CLIARGS['ignore_certs']
+
+ display.debug('Validate TLS certificates: %s' % self._validate_certs)
+
+ self.galaxy = galaxy
+ self.api = api
+
+ self.name = name
+ self.version = version
+ self.src = src or name
+ self.scm = scm
+ self.paths = [os.path.join(x, self.name) for x in galaxy.roles_paths]
+
+ if path is not None:
+ if not path.endswith(os.path.join(os.path.sep, self.name)):
+ path = os.path.join(path, self.name)
+ else:
+ # Look for a meta/main.ya?ml inside the potential role dir in case
+ # the role name is the same as parent directory of the role.
+ #
+ # Example:
+ # ./roles/testing/testing/meta/main.yml
+ for meta_main in self.META_MAIN:
+ if os.path.exists(os.path.join(path, name, meta_main)):
+ path = os.path.join(path, self.name)
+ break
+ self.path = path
+ else:
+ # use the first path by default
+ self.path = os.path.join(galaxy.roles_paths[0], self.name)
+
+ def __repr__(self):
+ """
+ Returns "rolename (version)" if version is not null
+ Returns "rolename" otherwise
+ """
+ if self.version:
+ return "%s (%s)" % (self.name, self.version)
+ else:
+ return self.name
+
+ def __eq__(self, other):
+ return self.name == other.name
+
+ @property
+ def metadata(self):
+ """
+ Returns role metadata
+ """
+ if self._metadata is None:
+ for path in self.paths:
+ for meta_main in self.META_MAIN:
+ meta_path = os.path.join(path, meta_main)
+ if os.path.isfile(meta_path):
+ try:
+ with open(meta_path, 'r') as f:
+ self._metadata = yaml.safe_load(f)
+ except Exception:
+ display.vvvvv("Unable to load metadata for %s" % self.name)
+ return False
+ break
+
+ return self._metadata
+
+ @property
+ def install_info(self):
+ """
+ Returns role install info
+ """
+ if self._install_info is None:
+
+ info_path = os.path.join(self.path, self.META_INSTALL)
+ if os.path.isfile(info_path):
+ try:
+ f = open(info_path, 'r')
+ self._install_info = yaml.safe_load(f)
+ except Exception:
+ display.vvvvv("Unable to load Galaxy install info for %s" % self.name)
+ return False
+ finally:
+ f.close()
+ return self._install_info
+
+ @property
+ def _exists(self):
+ for path in self.paths:
+ if os.path.isdir(path):
+ return True
+
+ return False
+
+ def _write_galaxy_install_info(self):
+ """
+ Writes a YAML-formatted file to the role's meta/ directory
+ (named .galaxy_install_info) which contains some information
+ we can use later for commands like 'list' and 'info'.
+ """
+
+ info = dict(
+ version=self.version,
+ install_date=datetime.datetime.utcnow().strftime("%c"),
+ )
+ if not os.path.exists(os.path.join(self.path, 'meta')):
+ os.makedirs(os.path.join(self.path, 'meta'))
+ info_path = os.path.join(self.path, self.META_INSTALL)
+ with open(info_path, 'w+') as f:
+ try:
+ self._install_info = yaml.safe_dump(info, f)
+ except Exception:
+ return False
+
+ return True
+
+ def remove(self):
+ """
+ Removes the specified role from the roles path.
+ There is a sanity check to make sure there's a meta/main.yml file at this
+ path so the user doesn't blow away random directories.
+ """
+ if self.metadata:
+ try:
+ rmtree(self.path)
+ return True
+ except Exception:
+ pass
+
+ return False
+
+ def fetch(self, role_data):
+ """
+ Downloads the archived role to a temp location based on role data
+ """
+ if role_data:
+
+ # first grab the file and save it to a temp location
+ if "github_user" in role_data and "github_repo" in role_data:
+ archive_url = 'https://github.com/%s/%s/archive/%s.tar.gz' % (role_data["github_user"], role_data["github_repo"], self.version)
+ else:
+ archive_url = self.src
+
+ display.display("- downloading role from %s" % archive_url)
+
+ try:
+ url_file = open_url(archive_url, validate_certs=self._validate_certs, http_agent=user_agent())
+ temp_file = tempfile.NamedTemporaryFile(delete=False)
+ data = url_file.read()
+ while data:
+ temp_file.write(data)
+ data = url_file.read()
+ temp_file.close()
+ return temp_file.name
+ except Exception as e:
+ display.error(u"failed to download the file: %s" % to_text(e))
+
+ return False
+
+ def install(self):
+
+ if self.scm:
+ # create tar file from scm url
+ tmp_file = RoleRequirement.scm_archive_role(keep_scm_meta=context.CLIARGS['keep_scm_meta'], **self.spec)
+ elif self.src:
+ if os.path.isfile(self.src):
+ tmp_file = self.src
+ elif '://' in self.src:
+ role_data = self.src
+ tmp_file = self.fetch(role_data)
+ else:
+ role_data = self.api.lookup_role_by_name(self.src)
+ if not role_data:
+ raise AnsibleError("- sorry, %s was not found on %s." % (self.src, self.api.api_server))
+
+ if role_data.get('role_type') == 'APP':
+ # Container Role
+ display.warning("%s is a Container App role, and should only be installed using Ansible "
+ "Container" % self.name)
+
+ role_versions = self.api.fetch_role_related('versions', role_data['id'])
+ if not self.version:
+ # convert the version names to LooseVersion objects
+ # and sort them to get the latest version. If there
+ # are no versions in the list, we'll grab the head
+ # of the master branch
+ if len(role_versions) > 0:
+ loose_versions = [LooseVersion(a.get('name', None)) for a in role_versions]
+ try:
+ loose_versions.sort()
+ except TypeError:
+ raise AnsibleError(
+ 'Unable to compare role versions (%s) to determine the most recent version due to incompatible version formats. '
+ 'Please contact the role author to resolve versioning conflicts, or specify an explicit role version to '
+ 'install.' % ', '.join([v.vstring for v in loose_versions])
+ )
+ self.version = to_text(loose_versions[-1])
+ elif role_data.get('github_branch', None):
+ self.version = role_data['github_branch']
+ else:
+ self.version = 'master'
+ elif self.version != 'master':
+ if role_versions and to_text(self.version) not in [a.get('name', None) for a in role_versions]:
+ raise AnsibleError("- the specified version (%s) of %s was not found in the list of available versions (%s)." % (self.version,
+ self.name,
+ role_versions))
+
+ # check if there's a source link for our role_version
+ for role_version in role_versions:
+ if role_version['name'] == self.version and 'source' in role_version:
+ self.src = role_version['source']
+
+ tmp_file = self.fetch(role_data)
+
+ else:
+ raise AnsibleError("No valid role data found")
+
+ if tmp_file:
+
+ display.debug("installing from %s" % tmp_file)
+
+ if not tarfile.is_tarfile(tmp_file):
+ raise AnsibleError("the downloaded file does not appear to be a valid tar archive.")
+ else:
+ role_tar_file = tarfile.open(tmp_file, "r")
+ # verify the role's meta file
+ meta_file = None
+ members = role_tar_file.getmembers()
+ # next find the metadata file
+ for member in members:
+ for meta_main in self.META_MAIN:
+ if meta_main in member.name:
+ # Look for parent of meta/main.yml
+ # Due to possibility of sub roles each containing meta/main.yml
+ # look for shortest length parent
+ meta_parent_dir = os.path.dirname(os.path.dirname(member.name))
+ if not meta_file:
+ archive_parent_dir = meta_parent_dir
+ meta_file = member
+ else:
+ if len(meta_parent_dir) < len(archive_parent_dir):
+ archive_parent_dir = meta_parent_dir
+ meta_file = member
+ if not meta_file:
+ raise AnsibleError("this role does not appear to have a meta/main.yml file.")
+ else:
+ try:
+ self._metadata = yaml.safe_load(role_tar_file.extractfile(meta_file))
+ except Exception:
+ raise AnsibleError("this role does not appear to have a valid meta/main.yml file.")
+
+ # we strip off any higher-level directories for all of the files contained within
+ # the tar file here. The default is 'github_repo-target'. Gerrit instances, on the other
+ # hand, does not have a parent directory at all.
+ installed = False
+ while not installed:
+ display.display("- extracting %s to %s" % (self.name, self.path))
+ try:
+ if os.path.exists(self.path):
+ if not os.path.isdir(self.path):
+ raise AnsibleError("the specified roles path exists and is not a directory.")
+ elif not context.CLIARGS.get("force", False):
+ raise AnsibleError("the specified role %s appears to already exist. Use --force to replace it." % self.name)
+ else:
+ # using --force, remove the old path
+ if not self.remove():
+ raise AnsibleError("%s doesn't appear to contain a role.\n please remove this directory manually if you really "
+ "want to put the role here." % self.path)
+ else:
+ os.makedirs(self.path)
+
+ # now we do the actual extraction to the path
+ for member in members:
+ # we only extract files, and remove any relative path
+ # bits that might be in the file for security purposes
+ # and drop any containing directory, as mentioned above
+ if member.isreg() or member.issym():
+ n_member_name = to_native(member.name)
+ n_archive_parent_dir = to_native(archive_parent_dir)
+ n_parts = n_member_name.replace(n_archive_parent_dir, "", 1).split(os.sep)
+ n_final_parts = []
+ for n_part in n_parts:
+ if n_part != '..' and '~' not in n_part and '$' not in n_part:
+ n_final_parts.append(n_part)
+ member.name = os.path.join(*n_final_parts)
+ role_tar_file.extract(member, to_native(self.path))
+
+ # write out the install info file for later use
+ self._write_galaxy_install_info()
+ installed = True
+ except OSError as e:
+ error = True
+ if e.errno == errno.EACCES and len(self.paths) > 1:
+ current = self.paths.index(self.path)
+ if len(self.paths) > current:
+ self.path = self.paths[current + 1]
+ error = False
+ if error:
+ raise AnsibleError("Could not update files in %s: %s" % (self.path, to_native(e)))
+
+ # return the parsed yaml metadata
+ display.display("- %s was installed successfully" % str(self))
+ if not (self.src and os.path.isfile(self.src)):
+ try:
+ os.unlink(tmp_file)
+ except (OSError, IOError) as e:
+ display.warning(u"Unable to remove tmp file (%s): %s" % (tmp_file, to_text(e)))
+ return True
+
+ return False
+
+ @property
+ def spec(self):
+ """
+ Returns role spec info
+ {
+ 'scm': 'git',
+ 'src': 'http://git.example.com/repos/repo.git',
+ 'version': 'v1.0',
+ 'name': 'repo'
+ }
+ """
+ return dict(scm=self.scm, src=self.src, version=self.version, name=self.name)
+
+ @property
+ def requirements(self):
+ """
+ Returns role requirements
+ """
+ if self._requirements is None:
+ self._requirements = []
+ for meta_requirements in self.META_REQUIREMENTS:
+ meta_path = os.path.join(self.path, meta_requirements)
+ if os.path.isfile(meta_path):
+ try:
+ f = open(meta_path, 'r')
+ self._requirements = yaml.safe_load(f)
+ except Exception:
+ display.vvvvv("Unable to load requirements for %s" % self.name)
+ finally:
+ f.close()
+
+ break
+
+ return self._requirements
diff --git a/lib/ansible/galaxy/token.py b/lib/ansible/galaxy/token.py
new file mode 100644
index 00000000..7231c8f9
--- /dev/null
+++ b/lib/ansible/galaxy/token.py
@@ -0,0 +1,180 @@
+########################################################################
+#
+# (C) 2015, Chris Houseknecht <chouse@ansible.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+########################################################################
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import base64
+import os
+import json
+from stat import S_IRUSR, S_IWUSR
+
+import yaml
+
+from ansible import constants as C
+from ansible.galaxy.user_agent import user_agent
+from ansible.module_utils._text import to_bytes, to_native, to_text
+from ansible.module_utils.urls import open_url
+from ansible.utils.display import Display
+
+display = Display()
+
+
+class NoTokenSentinel(object):
+ """ Represents an ansible.cfg server with not token defined (will ignore cmdline and GALAXY_TOKEN_PATH. """
+ def __new__(cls, *args, **kwargs):
+ return cls
+
+
+class KeycloakToken(object):
+ '''A token granted by a Keycloak server.
+
+ Like sso.redhat.com as used by cloud.redhat.com
+ ie Automation Hub'''
+
+ token_type = 'Bearer'
+
+ def __init__(self, access_token=None, auth_url=None, validate_certs=True):
+ self.access_token = access_token
+ self.auth_url = auth_url
+ self._token = None
+ self.validate_certs = validate_certs
+
+ def _form_payload(self):
+ return 'grant_type=refresh_token&client_id=cloud-services&refresh_token=%s' % self.access_token
+
+ def get(self):
+ if self._token:
+ return self._token
+
+ # - build a request to POST to auth_url
+ # - body is form encoded
+ # - 'request_token' is the offline token stored in ansible.cfg
+ # - 'grant_type' is 'refresh_token'
+ # - 'client_id' is 'cloud-services'
+ # - should probably be based on the contents of the
+ # offline_ticket's JWT payload 'aud' (audience)
+ # or 'azp' (Authorized party - the party to which the ID Token was issued)
+ payload = self._form_payload()
+
+ resp = open_url(to_native(self.auth_url),
+ data=payload,
+ validate_certs=self.validate_certs,
+ method='POST',
+ http_agent=user_agent())
+
+ # TODO: handle auth errors
+
+ data = json.loads(to_text(resp.read(), errors='surrogate_or_strict'))
+
+ # - extract 'access_token'
+ self._token = data.get('access_token')
+
+ return self._token
+
+ def headers(self):
+ headers = {}
+ headers['Authorization'] = '%s %s' % (self.token_type, self.get())
+ return headers
+
+
+class GalaxyToken(object):
+ ''' Class to storing and retrieving local galaxy token '''
+
+ token_type = 'Token'
+
+ def __init__(self, token=None):
+ self.b_file = to_bytes(C.GALAXY_TOKEN_PATH, errors='surrogate_or_strict')
+ # Done so the config file is only opened when set/get/save is called
+ self._config = None
+ self._token = token
+
+ @property
+ def config(self):
+ if self._config is None:
+ self._config = self._read()
+
+ # Prioritise the token passed into the constructor
+ if self._token:
+ self._config['token'] = None if self._token is NoTokenSentinel else self._token
+
+ return self._config
+
+ def _read(self):
+ action = 'Opened'
+ if not os.path.isfile(self.b_file):
+ # token file not found, create and chomd u+rw
+ open(self.b_file, 'w').close()
+ os.chmod(self.b_file, S_IRUSR | S_IWUSR) # owner has +rw
+ action = 'Created'
+
+ with open(self.b_file, 'r') as f:
+ config = yaml.safe_load(f)
+
+ display.vvv('%s %s' % (action, to_text(self.b_file)))
+
+ return config or {}
+
+ def set(self, token):
+ self._token = token
+ self.save()
+
+ def get(self):
+ return self.config.get('token', None)
+
+ def save(self):
+ with open(self.b_file, 'w') as f:
+ yaml.safe_dump(self.config, f, default_flow_style=False)
+
+ def headers(self):
+ headers = {}
+ token = self.get()
+ if token:
+ headers['Authorization'] = '%s %s' % (self.token_type, self.get())
+ return headers
+
+
+class BasicAuthToken(object):
+ token_type = 'Basic'
+
+ def __init__(self, username, password=None):
+ self.username = username
+ self.password = password
+ self._token = None
+
+ @staticmethod
+ def _encode_token(username, password):
+ token = "%s:%s" % (to_text(username, errors='surrogate_or_strict'),
+ to_text(password, errors='surrogate_or_strict', nonstring='passthru') or '')
+ b64_val = base64.b64encode(to_bytes(token, encoding='utf-8', errors='surrogate_or_strict'))
+ return to_text(b64_val)
+
+ def get(self):
+ if self._token:
+ return self._token
+
+ self._token = self._encode_token(self.username, self.password)
+
+ return self._token
+
+ def headers(self):
+ headers = {}
+ headers['Authorization'] = '%s %s' % (self.token_type, self.get())
+ return headers
diff --git a/lib/ansible/galaxy/user_agent.py b/lib/ansible/galaxy/user_agent.py
new file mode 100644
index 00000000..c860bcdb
--- /dev/null
+++ b/lib/ansible/galaxy/user_agent.py
@@ -0,0 +1,23 @@
+# Copyright: (c) 2019, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import platform
+import sys
+
+from ansible.module_utils.ansible_release import __version__ as ansible_version
+
+
+def user_agent():
+ """Returns a user agent used by ansible-galaxy to include the Ansible version, platform and python version."""
+
+ python_version = sys.version_info
+ return u"ansible-galaxy/{ansible_version} ({platform}; python:{py_major}.{py_minor}.{py_micro})".format(
+ ansible_version=ansible_version,
+ platform=platform.system(),
+ py_major=python_version.major,
+ py_minor=python_version.minor,
+ py_micro=python_version.micro,
+ )