ansible/test/units/galaxy/test_collection.py
Toshio Kuratomi 741b6e65ef [stable-2.9] Galaxy publish fix (#63580)
* Handle galaxy v2/v3 API diffs for artifact publish response

For publishing a collection artifact
(POST /v3/collections/artifacts/), the response
format is different between v2 and v3.

For v2 galaxy, the 'task' url returned is
a full url with scheme:

        {"task": "https://galaxy-dev.ansible.com/api/v2/collection-imports/35573/"}

For v3 galaxy, the task url is relative:

        {"task": "/api/automation-hub/v3/imports/collections/838d1308-a8f4-402c-95cb-7823f3806cd8/"}

So check which API we are using and update the task url approriately.

* Use full url for all wait_for_import messages

Update unit tests to parameterize the expected
responses and urls.

* update explanatory comment

* Rename n_url to full_url.

* Fix issue with overwrite of the complete path

* Fixes overwrite of the complete path in case there's extra path stored
  in self.api_sever
* Normalizes the input to the wait_import_task function so it receives
  the same value on both v2 and v3

Builds on #63523

* Update unittests for new call signature

* Add changelog for ansible-galaxy publish API fixes.
(cherry picked from commit 4cad7e4)

Co-authored-by: Toshio Kuratomi <a.badger@gmail.com>
2019-10-16 18:24:40 -07:00

565 lines
21 KiB
Python

# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import os
import pytest
import tarfile
import uuid
from hashlib import sha256
from io import BytesIO
from units.compat.mock import MagicMock
from ansible import context
from ansible.cli.galaxy import GalaxyCLI
from ansible.errors import AnsibleError
from ansible.galaxy import api, collection, token
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.utils import context_objects as co
from ansible.utils.display import Display
from ansible.utils.hashing import secure_hash_s
@pytest.fixture(autouse='function')
def reset_cli_args():
co.GlobalCLIArgs._Singleton__instance = None
yield
co.GlobalCLIArgs._Singleton__instance = None
@pytest.fixture()
def collection_input(tmp_path_factory):
''' Creates a collection skeleton directory for build tests '''
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
namespace = 'ansible_namespace'
collection = 'collection'
skeleton = os.path.join(os.path.dirname(os.path.split(__file__)[0]), 'cli', 'test_data', 'collection_skeleton')
galaxy_args = ['ansible-galaxy', 'collection', 'init', '%s.%s' % (namespace, collection),
'-c', '--init-path', test_dir, '--collection-skeleton', skeleton]
GalaxyCLI(args=galaxy_args).run()
collection_dir = os.path.join(test_dir, namespace, collection)
output_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Output'))
return collection_dir, output_dir
@pytest.fixture()
def collection_artifact(monkeypatch, tmp_path_factory):
''' Creates a temp collection artifact and mocked open_url instance for publishing tests '''
mock_open = MagicMock()
monkeypatch.setattr(collection, 'open_url', mock_open)
mock_uuid = MagicMock()
mock_uuid.return_value.hex = 'uuid'
monkeypatch.setattr(uuid, 'uuid4', mock_uuid)
tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
input_file = to_text(tmp_path / 'collection.tar.gz')
with tarfile.open(input_file, 'w:gz') as tfile:
b_io = BytesIO(b"\x00\x01\x02\x03")
tar_info = tarfile.TarInfo('test')
tar_info.size = 4
tar_info.mode = 0o0644
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
return input_file, mock_open
@pytest.fixture()
def galaxy_yml(request, tmp_path_factory):
b_test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
b_galaxy_yml = os.path.join(b_test_dir, b'galaxy.yml')
with open(b_galaxy_yml, 'wb') as galaxy_obj:
galaxy_obj.write(to_bytes(request.param))
yield b_galaxy_yml
@pytest.fixture()
def tmp_tarfile(tmp_path_factory):
''' Creates a temporary tar file for _extract_tar_file tests '''
filename = u'ÅÑŚÌβŁÈ'
temp_dir = to_bytes(tmp_path_factory.mktemp('test-%s Collections' % to_native(filename)))
tar_file = os.path.join(temp_dir, to_bytes('%s.tar.gz' % filename))
data = os.urandom(8)
with tarfile.open(tar_file, 'w:gz') as tfile:
b_io = BytesIO(data)
tar_info = tarfile.TarInfo(filename)
tar_info.size = len(data)
tar_info.mode = 0o0644
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
sha256_hash = sha256()
sha256_hash.update(data)
with tarfile.open(tar_file, 'r') as tfile:
yield temp_dir, tfile, filename, sha256_hash.hexdigest()
@pytest.fixture()
def galaxy_server():
context.CLIARGS._store = {'ignore_certs': False}
galaxy_api = api.GalaxyAPI(None, 'test_server', 'https://galaxy.ansible.com',
token=token.GalaxyToken(token='key'))
return galaxy_api
def test_build_collection_no_galaxy_yaml():
fake_path = u'/fake/ÅÑŚÌβŁÈ/path'
expected = to_native("The collection galaxy.yml path '%s/galaxy.yml' does not exist." % fake_path)
with pytest.raises(AnsibleError, match=expected):
collection.build_collection(fake_path, 'output', False)
def test_build_existing_output_file(collection_input):
input_dir, output_dir = collection_input
existing_output_dir = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
os.makedirs(existing_output_dir)
expected = "The output collection artifact '%s' already exists, but is a directory - aborting" \
% to_native(existing_output_dir)
with pytest.raises(AnsibleError, match=expected):
collection.build_collection(input_dir, output_dir, False)
def test_build_existing_output_without_force(collection_input):
input_dir, output_dir = collection_input
existing_output = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
with open(existing_output, 'w+') as out_file:
out_file.write("random garbage")
out_file.flush()
expected = "The file '%s' already exists. You can use --force to re-create the collection artifact." \
% to_native(existing_output)
with pytest.raises(AnsibleError, match=expected):
collection.build_collection(input_dir, output_dir, False)
def test_build_existing_output_with_force(collection_input):
input_dir, output_dir = collection_input
existing_output = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
with open(existing_output, 'w+') as out_file:
out_file.write("random garbage")
out_file.flush()
collection.build_collection(input_dir, output_dir, True)
# Verify the file was replaced with an actual tar file
assert tarfile.is_tarfile(existing_output)
@pytest.mark.parametrize('galaxy_yml', [b'namespace: value: broken'], indirect=True)
def test_invalid_yaml_galaxy_file(galaxy_yml):
expected = to_native(b"Failed to parse the galaxy.yml at '%s' with the following error:" % galaxy_yml)
with pytest.raises(AnsibleError, match=expected):
collection._get_galaxy_yml(galaxy_yml)
@pytest.mark.parametrize('galaxy_yml', [b'namespace: test_namespace'], indirect=True)
def test_missing_required_galaxy_key(galaxy_yml):
expected = "The collection galaxy.yml at '%s' is missing the following mandatory keys: authors, name, " \
"readme, version" % to_native(galaxy_yml)
with pytest.raises(AnsibleError, match=expected):
collection._get_galaxy_yml(galaxy_yml)
@pytest.mark.parametrize('galaxy_yml', [b"""
namespace: namespace
name: collection
authors: Jordan
version: 0.1.0
readme: README.md
invalid: value"""], indirect=True)
def test_warning_extra_keys(galaxy_yml, monkeypatch):
display_mock = MagicMock()
monkeypatch.setattr(Display, 'warning', display_mock)
collection._get_galaxy_yml(galaxy_yml)
assert display_mock.call_count == 1
assert display_mock.call_args[0][0] == "Found unknown keys in collection galaxy.yml at '%s': invalid"\
% to_text(galaxy_yml)
@pytest.mark.parametrize('galaxy_yml', [b"""
namespace: namespace
name: collection
authors: Jordan
version: 0.1.0
readme: README.md"""], indirect=True)
def test_defaults_galaxy_yml(galaxy_yml):
actual = collection._get_galaxy_yml(galaxy_yml)
assert actual['namespace'] == 'namespace'
assert actual['name'] == 'collection'
assert actual['authors'] == ['Jordan']
assert actual['version'] == '0.1.0'
assert actual['readme'] == 'README.md'
assert actual['description'] is None
assert actual['repository'] is None
assert actual['documentation'] is None
assert actual['homepage'] is None
assert actual['issues'] is None
assert actual['tags'] == []
assert actual['dependencies'] == {}
assert actual['license_ids'] == []
@pytest.mark.parametrize('galaxy_yml', [(b"""
namespace: namespace
name: collection
authors: Jordan
version: 0.1.0
readme: README.md
license: MIT"""), (b"""
namespace: namespace
name: collection
authors: Jordan
version: 0.1.0
readme: README.md
license:
- MIT""")], indirect=True)
def test_galaxy_yml_list_value(galaxy_yml):
actual = collection._get_galaxy_yml(galaxy_yml)
assert actual['license_ids'] == ['MIT']
def test_build_ignore_files_and_folders(collection_input, monkeypatch):
input_dir = collection_input[0]
mock_display = MagicMock()
monkeypatch.setattr(Display, 'vvv', mock_display)
git_folder = os.path.join(input_dir, '.git')
retry_file = os.path.join(input_dir, 'ansible.retry')
os.makedirs(git_folder)
with open(retry_file, 'w+') as ignore_file:
ignore_file.write('random')
ignore_file.flush()
actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection')
assert actual['format'] == 1
for manifest_entry in actual['files']:
assert manifest_entry['name'] not in ['.git', 'ansible.retry', 'galaxy.yml']
expected_msgs = [
"Skipping '%s' for collection build" % to_text(retry_file),
"Skipping '%s' for collection build" % to_text(git_folder),
]
assert mock_display.call_count == 2
assert mock_display.mock_calls[0][1][0] in expected_msgs
assert mock_display.mock_calls[1][1][0] in expected_msgs
def test_build_ignore_older_release_in_root(collection_input, monkeypatch):
input_dir = collection_input[0]
mock_display = MagicMock()
monkeypatch.setattr(Display, 'vvv', mock_display)
# This is expected to be ignored because it is in the root collection dir.
release_file = os.path.join(input_dir, 'namespace-collection-0.0.0.tar.gz')
# This is not expected to be ignored because it is not in the root collection dir.
fake_release_file = os.path.join(input_dir, 'plugins', 'namespace-collection-0.0.0.tar.gz')
for filename in [release_file, fake_release_file]:
with open(filename, 'w+') as file_obj:
file_obj.write('random')
file_obj.flush()
actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection')
assert actual['format'] == 1
plugin_release_found = False
for manifest_entry in actual['files']:
assert manifest_entry['name'] != 'namespace-collection-0.0.0.tar.gz'
if manifest_entry['name'] == 'plugins/namespace-collection-0.0.0.tar.gz':
plugin_release_found = True
assert plugin_release_found
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == "Skipping '%s' for collection build" % to_text(release_file)
def test_build_ignore_symlink_target_outside_collection(collection_input, monkeypatch):
input_dir, outside_dir = collection_input
mock_display = MagicMock()
monkeypatch.setattr(Display, 'warning', mock_display)
link_path = os.path.join(input_dir, 'plugins', 'connection')
os.symlink(outside_dir, link_path)
actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection')
for manifest_entry in actual['files']:
assert manifest_entry['name'] != 'plugins/connection'
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == "Skipping '%s' as it is a symbolic link to a directory outside " \
"the collection" % to_text(link_path)
def test_build_copy_symlink_target_inside_collection(collection_input):
input_dir = collection_input[0]
os.makedirs(os.path.join(input_dir, 'playbooks', 'roles'))
roles_link = os.path.join(input_dir, 'playbooks', 'roles', 'linked')
roles_target = os.path.join(input_dir, 'roles', 'linked')
roles_target_tasks = os.path.join(roles_target, 'tasks')
os.makedirs(roles_target_tasks)
with open(os.path.join(roles_target_tasks, 'main.yml'), 'w+') as tasks_main:
tasks_main.write("---\n- hosts: localhost\n tasks:\n - ping:")
tasks_main.flush()
os.symlink(roles_target, roles_link)
actual = collection._build_files_manifest(to_bytes(input_dir), 'namespace', 'collection')
linked_entries = [e for e in actual['files'] if e['name'].startswith('playbooks/roles/linked')]
assert len(linked_entries) == 3
assert linked_entries[0]['name'] == 'playbooks/roles/linked'
assert linked_entries[0]['ftype'] == 'dir'
assert linked_entries[1]['name'] == 'playbooks/roles/linked/tasks'
assert linked_entries[1]['ftype'] == 'dir'
assert linked_entries[2]['name'] == 'playbooks/roles/linked/tasks/main.yml'
assert linked_entries[2]['ftype'] == 'file'
assert linked_entries[2]['chksum_sha256'] == '9c97a1633c51796999284c62236b8d5462903664640079b80c37bf50080fcbc3'
def test_build_with_symlink_inside_collection(collection_input):
input_dir, output_dir = collection_input
os.makedirs(os.path.join(input_dir, 'playbooks', 'roles'))
roles_link = os.path.join(input_dir, 'playbooks', 'roles', 'linked')
file_link = os.path.join(input_dir, 'docs', 'README.md')
roles_target = os.path.join(input_dir, 'roles', 'linked')
roles_target_tasks = os.path.join(roles_target, 'tasks')
os.makedirs(roles_target_tasks)
with open(os.path.join(roles_target_tasks, 'main.yml'), 'w+') as tasks_main:
tasks_main.write("---\n- hosts: localhost\n tasks:\n - ping:")
tasks_main.flush()
os.symlink(roles_target, roles_link)
os.symlink(os.path.join(input_dir, 'README.md'), file_link)
collection.build_collection(input_dir, output_dir, False)
output_artifact = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
assert tarfile.is_tarfile(output_artifact)
with tarfile.open(output_artifact, mode='r') as actual:
members = actual.getmembers()
linked_members = [m for m in members if m.path.startswith('playbooks/roles/linked/tasks')]
assert len(linked_members) == 2
assert linked_members[0].name == 'playbooks/roles/linked/tasks'
assert linked_members[0].isdir()
assert linked_members[1].name == 'playbooks/roles/linked/tasks/main.yml'
assert linked_members[1].isreg()
linked_task = actual.extractfile(linked_members[1].name)
actual_task = secure_hash_s(linked_task.read())
linked_task.close()
assert actual_task == 'f4dcc52576b6c2cd8ac2832c52493881c4e54226'
linked_file = [m for m in members if m.path == 'docs/README.md']
assert len(linked_file) == 1
assert linked_file[0].isreg()
linked_file_obj = actual.extractfile(linked_file[0].name)
actual_file = secure_hash_s(linked_file_obj.read())
linked_file_obj.close()
assert actual_file == '63444bfc766154e1bc7557ef6280de20d03fcd81'
def test_publish_no_wait(galaxy_server, collection_artifact, monkeypatch):
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
artifact_path, mock_open = collection_artifact
fake_import_uri = 'https://galaxy.server.com/api/v2/import/1234'
mock_publish = MagicMock()
mock_publish.return_value = fake_import_uri
monkeypatch.setattr(galaxy_server, 'publish_collection', mock_publish)
collection.publish_collection(artifact_path, galaxy_server, False, 0)
assert mock_publish.call_count == 1
assert mock_publish.mock_calls[0][1][0] == artifact_path
assert mock_display.call_count == 1
assert mock_display.mock_calls[0][1][0] == \
"Collection has been pushed to the Galaxy server %s %s, not waiting until import has completed due to " \
"--no-wait being set. Import task results can be found at %s" % (galaxy_server.name, galaxy_server.api_server,
fake_import_uri)
def test_publish_with_wait(galaxy_server, collection_artifact, monkeypatch):
mock_display = MagicMock()
monkeypatch.setattr(Display, 'display', mock_display)
artifact_path, mock_open = collection_artifact
fake_import_uri = 'https://galaxy.server.com/api/v2/import/1234'
mock_publish = MagicMock()
mock_publish.return_value = fake_import_uri
monkeypatch.setattr(galaxy_server, 'publish_collection', mock_publish)
mock_wait = MagicMock()
monkeypatch.setattr(galaxy_server, 'wait_import_task', mock_wait)
collection.publish_collection(artifact_path, galaxy_server, True, 0)
assert mock_publish.call_count == 1
assert mock_publish.mock_calls[0][1][0] == artifact_path
assert mock_wait.call_count == 1
assert mock_wait.mock_calls[0][1][0] == '1234'
assert mock_display.mock_calls[0][1][0] == "Collection has been published to the Galaxy server test_server %s" \
% galaxy_server.api_server
def test_find_existing_collections(tmp_path_factory, monkeypatch):
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
collection1 = os.path.join(test_dir, 'namespace1', 'collection1')
collection2 = os.path.join(test_dir, 'namespace2', 'collection2')
fake_collection1 = os.path.join(test_dir, 'namespace3', 'collection3')
fake_collection2 = os.path.join(test_dir, 'namespace4')
os.makedirs(collection1)
os.makedirs(collection2)
os.makedirs(os.path.split(fake_collection1)[0])
open(fake_collection1, 'wb+').close()
open(fake_collection2, 'wb+').close()
collection1_manifest = json.dumps({
'collection_info': {
'namespace': 'namespace1',
'name': 'collection1',
'version': '1.2.3',
'authors': ['Jordan Borean'],
'readme': 'README.md',
'dependencies': {},
},
'format': 1,
})
with open(os.path.join(collection1, 'MANIFEST.json'), 'wb') as manifest_obj:
manifest_obj.write(to_bytes(collection1_manifest))
mock_warning = MagicMock()
monkeypatch.setattr(Display, 'warning', mock_warning)
actual = collection._find_existing_collections(test_dir)
assert len(actual) == 2
for actual_collection in actual:
assert actual_collection.skip is True
if str(actual_collection) == 'namespace1.collection1':
assert actual_collection.namespace == 'namespace1'
assert actual_collection.name == 'collection1'
assert actual_collection.b_path == to_bytes(collection1)
assert actual_collection.api is None
assert actual_collection.versions == set(['1.2.3'])
assert actual_collection.latest_version == '1.2.3'
assert actual_collection.dependencies == {}
else:
assert actual_collection.namespace == 'namespace2'
assert actual_collection.name == 'collection2'
assert actual_collection.b_path == to_bytes(collection2)
assert actual_collection.api is None
assert actual_collection.versions == set(['*'])
assert actual_collection.latest_version == '*'
assert actual_collection.dependencies == {}
assert mock_warning.call_count == 1
assert mock_warning.mock_calls[0][1][0] == "Collection at '%s' does not have a MANIFEST.json file, cannot " \
"detect version." % to_text(collection2)
def test_download_file(tmp_path_factory, monkeypatch):
temp_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
data = b"\x00\x01\x02\x03"
sha256_hash = sha256()
sha256_hash.update(data)
mock_open = MagicMock()
mock_open.return_value = BytesIO(data)
monkeypatch.setattr(collection, 'open_url', mock_open)
expected = os.path.join(temp_dir, b'file')
actual = collection._download_file('http://google.com/file', temp_dir, sha256_hash.hexdigest(), True)
assert actual.startswith(expected)
assert os.path.isfile(actual)
with open(actual, 'rb') as file_obj:
assert file_obj.read() == data
assert mock_open.call_count == 1
assert mock_open.mock_calls[0][1][0] == 'http://google.com/file'
def test_download_file_hash_mismatch(tmp_path_factory, monkeypatch):
temp_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
data = b"\x00\x01\x02\x03"
mock_open = MagicMock()
mock_open.return_value = BytesIO(data)
monkeypatch.setattr(collection, 'open_url', mock_open)
expected = "Mismatch artifact hash with downloaded file"
with pytest.raises(AnsibleError, match=expected):
collection._download_file('http://google.com/file', temp_dir, 'bad', True)
def test_extract_tar_file_invalid_hash(tmp_tarfile):
temp_dir, tfile, filename, dummy = tmp_tarfile
expected = "Checksum mismatch for '%s' inside collection at '%s'" % (to_native(filename), to_native(tfile.name))
with pytest.raises(AnsibleError, match=expected):
collection._extract_tar_file(tfile, filename, temp_dir, temp_dir, "fakehash")
def test_extract_tar_file_missing_member(tmp_tarfile):
temp_dir, tfile, dummy, dummy = tmp_tarfile
expected = "Collection tar at '%s' does not contain the expected file 'missing'." % to_native(tfile.name)
with pytest.raises(AnsibleError, match=expected):
collection._extract_tar_file(tfile, 'missing', temp_dir, temp_dir)
def test_extract_tar_file_missing_parent_dir(tmp_tarfile):
temp_dir, tfile, filename, checksum = tmp_tarfile
output_dir = os.path.join(temp_dir, b'output')
output_file = os.path.join(output_dir, to_bytes(filename))
collection._extract_tar_file(tfile, filename, output_dir, temp_dir, checksum)
os.path.isfile(output_file)