Overhaul ansible-test test path handling. (#61416)

* Remove .keep files from test/results/ dirs.

* Remove classification of test/results/ dir.

* Add results_relative to data context.

* Use variables in delegation paths.

* Standardize file writing and results paths.

* Fix issues reported by PyCharm.

* Clean up invocation of coverage command.

It now runs through the injector.

* Hack to allow intercept_command in cover.py.

* Simplify git ignore for test results.

* Use test result tmp dir instead of cache dir.

* Remove old .pytest_cache reference.

* Fix unit test docker delegation.

* Show HTML report link.

* Clean up more results references.

* Move import sanity test output to .tmp dir.

* Exclude test results dir from coverage.

* Fix import sanity test lib paths.

* Fix hard-coded import test paths.

* Fix most hard-coded integration test paths.

* Fix PyCharm warnings.

* Fix import placement.

* Fix integration test dir path.

* Fix Shippable scripts.

* Fix Shippable matrix check.

* Overhaul key pair management.
This commit is contained in:
Matt Clay 2019-08-27 23:40:06 -07:00 committed by GitHub
parent bf108ee7bf
commit f5d829392a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
38 changed files with 390 additions and 304 deletions

9
.gitignore vendored
View file

@ -79,14 +79,7 @@ ansible.egg-info/
# Release directory
packaging/release/ansible_release
/.cache/
/test/results/coverage/*=coverage.*
/test/results/coverage/coverage*
/test/results/reports/coverage*.xml
/test/results/reports/coverage*/
/test/results/bot/*.json
/test/results/junit/*.xml
/test/results/logs/*.log
/test/results/data/*.json
/test/results/
/test/integration/cloud-config-aws.yml
/test/integration/inventory.networking
/test/integration/inventory.winrm

0
test/cache/.keep vendored
View file

View file

@ -16,6 +16,9 @@ def main():
import traceback
import warnings
import_dir = os.environ['SANITY_IMPORT_DIR']
minimal_dir = os.environ['SANITY_MINIMAL_DIR']
try:
import importlib.util
imp = None # pylint: disable=invalid-name
@ -266,9 +269,6 @@ def main():
filepath = os.path.relpath(warning.filename)
lineno = warning.lineno
import_dir = 'test/runner/.tox/import/'
minimal_dir = 'test/runner/.tox/minimal-'
if filepath.startswith('../') or filepath.startswith(minimal_dir):
# The warning occurred outside our source tree.
# The best we can do is to report the file which was tested that triggered the warning.

View file

@ -21,6 +21,7 @@ from .util import (
from .util_common import (
run_command,
ResultType,
)
from .config import (
@ -82,7 +83,7 @@ def ansible_environment(args, color=True, ansible_config=None):
if args.debug:
env.update(dict(
ANSIBLE_DEBUG='true',
ANSIBLE_LOG_PATH=os.path.join(data_context().results, 'logs', 'debug.log'),
ANSIBLE_LOG_PATH=os.path.join(ResultType.LOGS.name, 'debug.log'),
))
if data_context().content.collection:

View file

@ -276,7 +276,7 @@ class PathMapper:
if ext == '.cs':
return self.get_csharp_module_utils_usage(path)
if path.startswith('test/integration/targets/'):
if is_subdir(path, data_context().content.integration_targets_path):
return self.get_integration_target_usage(path)
return []
@ -338,7 +338,8 @@ class PathMapper:
:rtype: list[str]
"""
target_name = path.split('/')[3]
dependents = [os.path.join('test/integration/targets/%s/' % target) for target in sorted(self.integration_dependencies.get(target_name, set()))]
dependents = [os.path.join(data_context().content.integration_targets_path, target) + os.path.sep
for target in sorted(self.integration_dependencies.get(target_name, set()))]
return dependents
@ -620,22 +621,10 @@ class PathMapper:
if path.startswith('test/ansible_test/'):
return minimal # these tests are not invoked from ansible-test
if path.startswith('test/cache/'):
return minimal
if path.startswith('test/results/'):
return minimal
if path.startswith('test/legacy/'):
return minimal
if path.startswith('test/env/'):
return minimal
if path.startswith('test/integration/roles/'):
return minimal
if path.startswith('test/integration/targets/'):
if is_subdir(path, data_context().content.integration_targets_path):
if not os.path.exists(path):
return minimal
@ -655,25 +644,8 @@ class PathMapper:
FOCUSED_TARGET: True,
}
if path.startswith('test/integration/'):
if dirname == 'test/integration':
if self.prefixes.get(name) == 'network' and ext == '.yaml':
return minimal # network integration test playbooks are not used by ansible-test
if filename == 'network-all.yaml':
return minimal # network integration test playbook not used by ansible-test
if filename == 'platform_agnostic.yaml':
return minimal # network integration test playbook not used by ansible-test
if filename.startswith('inventory.') and filename.endswith('.template'):
return minimal # ansible-test does not use these inventory templates
if filename == 'inventory':
return {
'integration': self.integration_all_target,
}
if is_subdir(path, data_context().content.integration_path):
if dirname == data_context().content.integration_path:
for command in (
'integration',
'windows-integration',

View file

@ -888,7 +888,7 @@ def complete_network_testcase(prefix, parsed_args, **_):
if len(parsed_args.include) != 1:
return []
test_dir = 'test/integration/targets/%s/tests' % parsed_args.include[0]
test_dir = os.path.join(data_context().content.integration_targets_path, parsed_args.include[0], 'tests')
connection_dirs = data_context().content.get_dirs(test_dir)
for connection_dir in connection_dirs:

View file

@ -5,7 +5,6 @@ __metaclass__ = type
import abc
import atexit
import datetime
import json
import time
import os
import platform
@ -23,10 +22,14 @@ from ..util import (
load_plugins,
ABC,
to_bytes,
make_dirs,
ANSIBLE_TEST_CONFIG_ROOT,
)
from ..util_common import (
write_json_test_results,
ResultType,
)
from ..target import (
TestTarget,
)
@ -158,17 +161,14 @@ def cloud_init(args, targets):
)
if not args.explain and results:
results_path = os.path.join(data_context().results, 'data', '%s-%s.json' % (
args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0)))))
result_name = '%s-%s.json' % (
args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0))))
data = dict(
clouds=results,
)
make_dirs(os.path.dirname(results_path))
with open(results_path, 'w') as results_fd:
results_fd.write(json.dumps(data, sort_keys=True, indent=4))
write_json_test_results(ResultType.DATA, result_name, data)
class CloudBase(ABC):
@ -280,8 +280,6 @@ class CloudBase(ABC):
class CloudProvider(CloudBase):
"""Base class for cloud provider plugins. Sets up cloud resources before delegation."""
TEST_DIR = 'test/integration'
def __init__(self, args, config_extension='.ini'):
"""
:type args: IntegrationConfig
@ -291,7 +289,7 @@ class CloudProvider(CloudBase):
self.remove_config = False
self.config_static_name = 'cloud-config-%s%s' % (self.platform, config_extension)
self.config_static_path = os.path.join(self.TEST_DIR, self.config_static_name)
self.config_static_path = os.path.join(data_context().content.integration_path, self.config_static_name)
self.config_template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, '%s.template' % self.config_static_name)
self.config_extension = config_extension
@ -352,8 +350,8 @@ class CloudProvider(CloudBase):
"""
prefix = '%s-' % os.path.splitext(os.path.basename(self.config_static_path))[0]
with tempfile.NamedTemporaryFile(dir=self.TEST_DIR, prefix=prefix, suffix=self.config_extension, delete=False) as config_fd:
filename = os.path.join(self.TEST_DIR, os.path.basename(config_fd.name))
with tempfile.NamedTemporaryFile(dir=data_context().content.integration_path, prefix=prefix, suffix=self.config_extension, delete=False) as config_fd:
filename = os.path.join(data_context().content.integration_path, os.path.basename(config_fd.name))
self.config_path = filename
self.remove_config = True

View file

@ -3,7 +3,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import time
from . import (
CloudProvider,
@ -14,10 +13,8 @@ from . import (
from ..util import (
find_executable,
display,
ApplicationError,
is_shippable,
ConfigParser,
SubprocessError,
)
from ..docker_util import (
@ -32,10 +29,6 @@ from ..core_ci import (
AnsibleCoreCI,
)
from ..http import (
HttpClient,
)
class VcenterProvider(CloudProvider):
"""VMware vcenter/esx plugin. Sets up cloud resources for tests."""

View file

@ -14,7 +14,6 @@ from .util import (
generate_pip_command,
get_docker_completion,
ApplicationError,
INTEGRATION_DIR_RELATIVE,
)
from .util_common import (
@ -247,7 +246,7 @@ class IntegrationConfig(TestConfig):
def get_ansible_config(self): # type: () -> str
"""Return the path to the Ansible config for the given config."""
ansible_config_relative_path = os.path.join(INTEGRATION_DIR_RELATIVE, '%s.cfg' % self.command)
ansible_config_relative_path = os.path.join(data_context().content.integration_path, '%s.cfg' % self.command)
ansible_config_path = os.path.join(data_context().content.root, ansible_config_relative_path)
if not os.path.exists(ansible_config_path):
@ -327,6 +326,7 @@ class CoverageConfig(EnvironmentConfig):
self.group_by = frozenset(args.group_by) if 'group_by' in args and args.group_by else set() # type: t.FrozenSet[str]
self.all = args.all if 'all' in args else False # type: bool
self.stub = args.stub if 'stub' in args else False # type: bool
self.coverage = False # temporary work-around to support intercept_command in cover.py
class CoverageReportConfig(CoverageConfig):

View file

@ -28,6 +28,8 @@ from .util import (
from .util_common import (
run_command,
write_json_file,
ResultType,
)
from .config import (
@ -492,10 +494,7 @@ class AnsibleCoreCI:
config = self.save()
make_dirs(os.path.dirname(self.path))
with open(self.path, 'w') as instance_fd:
instance_fd.write(json.dumps(config, indent=4, sort_keys=True))
write_json_file(self.path, config, create_directories=True)
def save(self):
"""
@ -559,47 +558,81 @@ class SshKey:
"""
:type args: EnvironmentConfig
"""
cache_dir = os.path.join(data_context().content.root, 'test/cache')
key_pair = self.get_key_pair()
self.key = os.path.join(cache_dir, self.KEY_NAME)
self.pub = os.path.join(cache_dir, self.PUB_NAME)
if not key_pair:
key_pair = self.generate_key_pair(args)
key_dst = os.path.relpath(self.key, data_context().content.root)
pub_dst = os.path.relpath(self.pub, data_context().content.root)
if not os.path.isfile(self.key) or not os.path.isfile(self.pub):
base_dir = os.path.expanduser('~/.ansible/test/')
key = os.path.join(base_dir, self.KEY_NAME)
pub = os.path.join(base_dir, self.PUB_NAME)
if not args.explain:
make_dirs(base_dir)
if not os.path.isfile(key) or not os.path.isfile(pub):
run_command(args, ['ssh-keygen', '-m', 'PEM', '-q', '-t', 'rsa', '-N', '', '-f', key])
self.key = key
self.pub = pub
key, pub = key_pair
key_dst, pub_dst = self.get_in_tree_key_pair_paths()
def ssh_key_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
"""Add the SSH keys to the payload file list."""
"""
Add the SSH keys to the payload file list.
They are either outside the source tree or in the cache dir which is ignored by default.
"""
if data_context().content.collection:
working_path = data_context().content.collection.directory
else:
working_path = ''
files.append((key, os.path.join(working_path, key_dst)))
files.append((pub, os.path.join(working_path, pub_dst)))
files.append((key, os.path.join(working_path, os.path.relpath(key_dst, data_context().content.root))))
files.append((pub, os.path.join(working_path, os.path.relpath(pub_dst, data_context().content.root))))
data_context().register_payload_callback(ssh_key_callback)
self.key, self.pub = key, pub
if args.explain:
self.pub_contents = None
else:
with open(self.pub, 'r') as pub_fd:
self.pub_contents = pub_fd.read().strip()
def get_in_tree_key_pair_paths(self): # type: () -> t.Optional[t.Tuple[str, str]]
"""Return the ansible-test SSH key pair paths from the content tree."""
temp_dir = ResultType.TMP.path
key = os.path.join(temp_dir, self.KEY_NAME)
pub = os.path.join(temp_dir, self.PUB_NAME)
return key, pub
def get_source_key_pair_paths(self): # type: () -> t.Optional[t.Tuple[str, str]]
"""Return the ansible-test SSH key pair paths for the current user."""
base_dir = os.path.expanduser('~/.ansible/test/')
key = os.path.join(base_dir, self.KEY_NAME)
pub = os.path.join(base_dir, self.PUB_NAME)
return key, pub
def get_key_pair(self): # type: () -> t.Optional[t.Tuple[str, str]]
"""Return the ansible-test SSH key pair paths if present, otherwise return None."""
key, pub = self.get_in_tree_key_pair_paths()
if os.path.isfile(key) and os.path.isfile(pub):
return key, pub
key, pub = self.get_source_key_pair_paths()
if os.path.isfile(key) and os.path.isfile(pub):
return key, pub
return None
def generate_key_pair(self, args): # type: (EnvironmentConfig) -> t.Tuple[str, str]
"""Generate an SSH key pair for use by all ansible-test invocations for the current user."""
key, pub = self.get_source_key_pair_paths()
if not args.explain:
make_dirs(os.path.dirname(key))
if not os.path.isfile(key) or not os.path.isfile(pub):
run_command(args, ['ssh-keygen', '-m', 'PEM', '-q', '-t', 'rsa', '-N', '', '-f', key])
return key, pub
class InstanceConnection:
"""Container for remote instance status and connection details."""

View file

@ -18,6 +18,8 @@ from xml.dom import (
minidom,
)
from . import types as t
from .target import (
walk_module_targets,
walk_compile_targets,
@ -34,7 +36,8 @@ from .util import (
)
from .util_common import (
run_command,
intercept_command,
ResultType,
)
from .config import (
@ -57,6 +60,7 @@ from .data import (
COVERAGE_GROUPS = ('command', 'target', 'environment', 'version')
COVERAGE_CONFIG_PATH = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'coveragerc')
COVERAGE_OUTPUT_FILE_NAME = 'coverage'
def command_coverage_combine(args):
@ -74,9 +78,9 @@ def _command_coverage_combine_python(args):
"""
coverage = initialize_coverage(args)
modules = dict((t.module, t.path) for t in list(walk_module_targets()) if t.path.endswith('.py'))
modules = dict((target.module, target.path) for target in list(walk_module_targets()) if target.path.endswith('.py'))
coverage_dir = os.path.join(data_context().results, 'coverage')
coverage_dir = ResultType.COVERAGE.path
coverage_files = [os.path.join(coverage_dir, f) for f in os.listdir(coverage_dir)
if '=coverage.' in f and '=python' in f]
@ -140,7 +144,7 @@ def _command_coverage_combine_python(args):
invalid_path_count = 0
invalid_path_chars = 0
coverage_file = os.path.join(data_context().results, 'coverage', 'coverage')
coverage_file = os.path.join(ResultType.COVERAGE.path, COVERAGE_OUTPUT_FILE_NAME)
for group in sorted(groups):
arc_data = groups[group]
@ -322,9 +326,7 @@ def command_coverage_report(args):
if args.omit:
options.extend(['--omit', args.omit])
env = common_environment()
env.update(dict(COVERAGE_FILE=output_file))
run_command(args, env=env, cmd=['coverage', 'report', '--rcfile', COVERAGE_CONFIG_PATH] + options)
run_coverage(args, output_file, 'report', options)
def command_coverage_html(args):
@ -339,10 +341,10 @@ def command_coverage_html(args):
display.info("Skipping output file %s in html generation" % output_file, verbosity=3)
continue
dir_name = os.path.join(data_context().results, 'reports', os.path.basename(output_file))
env = common_environment()
env.update(dict(COVERAGE_FILE=output_file))
run_command(args, env=env, cmd=['coverage', 'html', '--rcfile', COVERAGE_CONFIG_PATH, '-i', '-d', dir_name])
dir_name = os.path.join(ResultType.REPORTS.path, os.path.basename(output_file))
run_coverage(args, output_file, 'html', ['-i', '-d', dir_name])
display.info('HTML report generated: file:///%s' % os.path.join(dir_name, 'index.html'))
def command_coverage_xml(args):
@ -352,7 +354,7 @@ def command_coverage_xml(args):
output_files = command_coverage_combine(args)
for output_file in output_files:
xml_name = os.path.join(data_context().results, 'reports', '%s.xml' % os.path.basename(output_file))
xml_name = os.path.join(ResultType.REPORTS.path, '%s.xml' % os.path.basename(output_file))
if output_file.endswith('-powershell'):
report = _generage_powershell_xml(output_file)
@ -363,9 +365,7 @@ def command_coverage_xml(args):
with open(xml_name, 'w') as xml_fd:
xml_fd.write(pretty)
else:
env = common_environment()
env.update(dict(COVERAGE_FILE=output_file))
run_command(args, env=env, cmd=['coverage', 'xml', '--rcfile', COVERAGE_CONFIG_PATH, '-i', '-o', xml_name])
run_coverage(args, output_file, 'xml', ['-i', '-o', xml_name])
def command_coverage_erase(args):
@ -374,7 +374,7 @@ def command_coverage_erase(args):
"""
initialize_coverage(args)
coverage_dir = os.path.join(data_context().results, 'coverage')
coverage_dir = ResultType.COVERAGE.path
for name in os.listdir(coverage_dir):
if not name.startswith('coverage') and '=coverage.' not in name:
@ -440,13 +440,13 @@ def _command_coverage_combine_powershell(args):
:type args: CoverageConfig
:rtype: list[str]
"""
coverage_dir = os.path.join(data_context().results, 'coverage')
coverage_dir = ResultType.COVERAGE.path
coverage_files = [os.path.join(coverage_dir, f) for f in os.listdir(coverage_dir)
if '=coverage.' in f and '=powershell' in f]
def _default_stub_value(line_count):
def _default_stub_value(lines):
val = {}
for line in range(line_count):
for line in range(lines):
val[line] = 0
return val
@ -504,7 +504,7 @@ def _command_coverage_combine_powershell(args):
invalid_path_count = 0
invalid_path_chars = 0
coverage_file = os.path.join(data_context().results, 'coverage', 'coverage')
coverage_file = os.path.join(ResultType.COVERAGE.path, COVERAGE_OUTPUT_FILE_NAME)
for group in sorted(groups):
coverage_data = groups[group]
@ -543,7 +543,7 @@ def _command_coverage_combine_powershell(args):
def _generage_powershell_xml(coverage_file):
"""
:type input_path: str
:type coverage_file: str
:rtype: Element
"""
with open(coverage_file, 'rb') as coverage_fd:
@ -669,7 +669,7 @@ def _add_cobertura_package(packages, package_name, package_data):
def _generate_powershell_output_report(args, coverage_file):
"""
:type args: CoverageConfig
:type args: CoverageReportConfig
:type coverage_file: str
:rtype: str
"""
@ -756,3 +756,13 @@ def _generate_powershell_output_report(args, coverage_file):
report = '{0}\n{1}\n{2}\n{1}\n{3}'.format(header, line_break, "\n".join(lines), totals)
return report
def run_coverage(args, output_file, command, cmd): # type: (CoverageConfig, str, str, t.List[str]) -> None
"""Run the coverage cli tool with the specified options."""
env = common_environment()
env.update(dict(COVERAGE_FILE=output_file))
cmd = ['python', '-m', 'coverage', command, '--rcfile', COVERAGE_CONFIG_PATH] + cmd
intercept_command(args, target_name='coverage', env=env, cmd=cmd, disable_coverage=True)

View file

@ -17,6 +17,10 @@ from .util import (
remove_tree,
)
from .util_common import (
write_text_file,
)
from .data import (
data_context,
)
@ -45,8 +49,7 @@ def coverage_setup(args): # type: (TestConfig) -> None
else:
args.coverage_config_base_path = tempfile.mkdtemp()
with open(os.path.join(args.coverage_config_base_path, COVERAGE_CONFIG_NAME), 'w') as coverage_config_path_fd:
coverage_config_path_fd.write(coverage_config)
write_text_file(os.path.join(args.coverage_config_base_path, COVERAGE_CONFIG_NAME), coverage_config)
def coverage_cleanup(args): # type: (TestConfig) -> None
@ -81,6 +84,7 @@ omit =
*/pyshared/*
*/pytest
*/AnsiballZ_*.py
*/test/results/*
'''
return coverage_config
@ -110,7 +114,7 @@ include =
%s/*
omit =
*/test/runner/.tox/*
*/test/results/*
''' % data_context().content.root
else:
coverage_config += '''

View file

@ -72,7 +72,8 @@ class DataContext:
content = self.__create_content_layout(layout_providers, source_providers, current_path, True)
self.content = content # type: ContentLayout
self.results = os.path.join(self.content.root, 'test', 'results')
self.results_relative = os.path.join('test', 'results')
self.results = os.path.join(self.content.root, self.results_relative)
def create_collection_layouts(self): # type: () -> t.List[ContentLayout]
"""

View file

@ -50,6 +50,7 @@ from .util import (
from .util_common import (
run_command,
ResultType,
)
from .docker_util import (
@ -241,6 +242,8 @@ def delegate_docker(args, exclude, require, integration_targets):
else:
content_root = install_root
remote_results_root = os.path.join(content_root, data_context().results_relative)
cmd = generate_command(args, python_interpreter, os.path.join(install_root, 'bin'), content_root, options, exclude, require)
if isinstance(args, TestConfig):
@ -321,19 +324,12 @@ def delegate_docker(args, exclude, require, integration_targets):
# also disconnect from the network once requirements have been installed
if isinstance(args, UnitsConfig):
writable_dirs = [
os.path.join(install_root, '.pytest_cache'),
os.path.join(content_root, ResultType.JUNIT.relative_path),
os.path.join(content_root, ResultType.COVERAGE.relative_path),
]
if content_root != install_root:
writable_dirs.append(os.path.join(content_root, 'test/results/junit'))
writable_dirs.append(os.path.join(content_root, 'test/results/coverage'))
docker_exec(args, test_id, ['mkdir', '-p'] + writable_dirs)
docker_exec(args, test_id, ['chmod', '777'] + writable_dirs)
if content_root == install_root:
docker_exec(args, test_id, ['find', os.path.join(content_root, 'test/results/'), '-type', 'd', '-exec', 'chmod', '777', '{}', '+'])
docker_exec(args, test_id, ['chmod', '755', '/root'])
docker_exec(args, test_id, ['chmod', '644', os.path.join(content_root, args.metadata_path)])
@ -353,10 +349,16 @@ def delegate_docker(args, exclude, require, integration_targets):
try:
docker_exec(args, test_id, cmd, options=cmd_options)
finally:
local_test_root = os.path.dirname(data_context().results)
remote_test_root = os.path.dirname(remote_results_root)
remote_results_name = os.path.basename(remote_results_root)
remote_temp_file = os.path.join('/root', remote_results_name + '.tgz')
with tempfile.NamedTemporaryFile(prefix='ansible-result-', suffix='.tgz') as local_result_fd:
docker_exec(args, test_id, ['tar', 'czf', '/root/results.tgz', '-C', os.path.join(content_root, 'test'), 'results'])
docker_get(args, test_id, '/root/results.tgz', local_result_fd.name)
run_command(args, ['tar', 'oxzf', local_result_fd.name, '-C', 'test'])
docker_exec(args, test_id, ['tar', 'czf', remote_temp_file, '-C', remote_test_root, remote_results_name])
docker_get(args, test_id, remote_temp_file, local_result_fd.name)
run_command(args, ['tar', 'oxzf', local_result_fd.name, '-C', local_test_root])
finally:
if httptester_id:
docker_rm(args, httptester_id)
@ -470,8 +472,14 @@ def delegate_remote(args, exclude, require, integration_targets):
download = False
if download and content_root:
manage.ssh('rm -rf /tmp/results && cp -a %s/test/results /tmp/results && chmod -R a+r /tmp/results' % content_root)
manage.download('/tmp/results', 'test')
local_test_root = os.path.dirname(data_context().results)
remote_results_root = os.path.join(content_root, data_context().results_relative)
remote_results_name = os.path.basename(remote_results_root)
remote_temp_path = os.path.join('/tmp', remote_results_name)
manage.ssh('rm -rf {0} && cp -a {1} {0} && chmod -R a+r {0}'.format(remote_temp_path, remote_results_root))
manage.download(remote_temp_path, local_test_root)
finally:
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
core_ci.stop()

View file

@ -26,6 +26,12 @@ from .util import (
get_available_python_versions,
)
from .util_common import (
write_json_test_results,
write_json_file,
ResultType,
)
from .git import (
Git,
)
@ -47,10 +53,6 @@ from .test import (
TestTimeout,
)
from .data import (
data_context,
)
from .executor import (
SUPPORTED_PYTHON_VERSIONS,
)
@ -122,8 +124,7 @@ def show_dump_env(args):
show_dict(data, verbose)
if args.dump and not args.explain:
with open(os.path.join(data_context().results, 'bot', 'data-environment.json'), 'w') as results_fd:
results_fd.write(json.dumps(data, sort_keys=True))
write_json_test_results(ResultType.BOT, 'data-environment.json', data)
def set_timeout(args):
@ -151,8 +152,7 @@ def set_timeout(args):
deadline=deadline,
)
with open(TIMEOUT_PATH, 'w') as timeout_fd:
json.dump(data, timeout_fd, indent=4, sort_keys=True)
write_json_file(TIMEOUT_PATH, data)
elif os.path.exists(TIMEOUT_PATH):
os.remove(TIMEOUT_PATH)

View file

@ -56,7 +56,6 @@ from .util import (
find_python,
get_docker_completion,
get_remote_completion,
COVERAGE_OUTPUT_NAME,
cmd_quote,
ANSIBLE_LIB_ROOT,
ANSIBLE_TEST_DATA_ROOT,
@ -71,6 +70,9 @@ from .util_common import (
intercept_command,
named_temporary_file,
run_command,
write_text_file,
write_json_test_results,
ResultType,
)
from .docker_util import (
@ -128,9 +130,7 @@ from .integration import (
integration_test_environment,
integration_test_config_file,
setup_common_temp_dir,
INTEGRATION_VARS_FILE_RELATIVE,
get_inventory_relative_path,
INTEGRATION_DIR_RELATIVE,
check_inventory,
delegate_inventory,
)
@ -198,8 +198,8 @@ def install_command_requirements(args, python_version=None):
:type python_version: str | None
"""
if not args.explain:
make_dirs(os.path.join(data_context().results, 'coverage'))
make_dirs(os.path.join(data_context().results, 'data'))
make_dirs(ResultType.COVERAGE.path)
make_dirs(ResultType.DATA.path)
if isinstance(args, ShellConfig):
if args.raw:
@ -322,12 +322,9 @@ Author-email: info@ansible.com
License: GPLv3+
''' % get_ansible_version()
os.mkdir(egg_info_path)
pkg_info_path = os.path.join(egg_info_path, 'PKG-INFO')
with open(pkg_info_path, 'w') as pkg_info_fd:
pkg_info_fd.write(pkg_info.lstrip())
write_text_file(pkg_info_path, pkg_info.lstrip(), create_directories=True)
def generate_pip_install(pip, command, packages=None):
@ -394,7 +391,7 @@ def command_network_integration(args):
template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template'
if args.inventory:
inventory_path = os.path.join(data_context().content.root, INTEGRATION_DIR_RELATIVE, args.inventory)
inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path, args.inventory)
else:
inventory_path = os.path.join(data_context().content.root, inventory_relative_path)
@ -445,8 +442,7 @@ def command_network_integration(args):
display.info('>>> Inventory: %s\n%s' % (inventory_path, inventory.strip()), verbosity=3)
if not args.explain:
with open(inventory_path, 'w') as inventory_fd:
inventory_fd.write(inventory)
write_text_file(inventory_path, inventory)
success = False
@ -576,7 +572,7 @@ def command_windows_integration(args):
template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template'
if args.inventory:
inventory_path = os.path.join(data_context().content.root, INTEGRATION_DIR_RELATIVE, args.inventory)
inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path, args.inventory)
else:
inventory_path = os.path.join(data_context().content.root, inventory_relative_path)
@ -620,8 +616,7 @@ def command_windows_integration(args):
display.info('>>> Inventory: %s\n%s' % (inventory_path, inventory.strip()), verbosity=3)
if not args.explain:
with open(inventory_path, 'w') as inventory_fd:
inventory_fd.write(inventory)
write_text_file(inventory_path, inventory)
use_httptester = args.httptester and any('needs/httptester/' in target.aliases for target in internal_targets)
# if running under Docker delegation, the httptester may have already been started
@ -681,9 +676,9 @@ def command_windows_integration(args):
pre_target = forward_ssh_ports
post_target = cleanup_ssh_ports
def run_playbook(playbook, playbook_vars):
def run_playbook(playbook, run_playbook_vars): # type: (str, t.Dict[str, t.Any]) -> None
playbook_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'playbooks', playbook)
command = ['ansible-playbook', '-i', inventory_path, playbook_path, '-e', json.dumps(playbook_vars)]
command = ['ansible-playbook', '-i', inventory_path, playbook_path, '-e', json.dumps(run_playbook_vars)]
if args.verbosity:
command.append('-%s' % ('v' * args.verbosity))
@ -716,7 +711,7 @@ def command_windows_integration(args):
for filename in os.listdir(local_temp_path):
with open_zipfile(os.path.join(local_temp_path, filename)) as coverage_zip:
coverage_zip.extractall(os.path.join(data_context().results, 'coverage'))
coverage_zip.extractall(ResultType.COVERAGE.path)
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
for instance in instances:
@ -882,7 +877,7 @@ def command_integration_filter(args, # type: TIntegrationConfig
cloud_init(args, internal_targets)
vars_file_src = os.path.join(data_context().content.root, INTEGRATION_VARS_FILE_RELATIVE)
vars_file_src = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
if os.path.exists(vars_file_src):
def integration_config_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
@ -895,7 +890,7 @@ def command_integration_filter(args, # type: TIntegrationConfig
else:
working_path = ''
files.append((vars_file_src, os.path.join(working_path, INTEGRATION_VARS_FILE_RELATIVE)))
files.append((vars_file_src, os.path.join(working_path, data_context().content.integration_vars_path)))
data_context().register_payload_callback(integration_config_callback)
@ -1086,23 +1081,22 @@ def command_integration_filtered(args, targets, all_targets, inventory_path, pre
finally:
if not args.explain:
if args.coverage:
coverage_temp_path = os.path.join(common_temp_path, COVERAGE_OUTPUT_NAME)
coverage_save_path = os.path.join(data_context().results, 'coverage')
coverage_temp_path = os.path.join(common_temp_path, ResultType.COVERAGE.name)
coverage_save_path = ResultType.COVERAGE.path
for filename in os.listdir(coverage_temp_path):
shutil.copy(os.path.join(coverage_temp_path, filename), os.path.join(coverage_save_path, filename))
remove_tree(common_temp_path)
results_path = os.path.join(data_context().results, 'data', '%s-%s.json' % (
args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0)))))
result_name = '%s-%s.json' % (
args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0))))
data = dict(
targets=results,
)
with open(results_path, 'w') as results_fd:
results_fd.write(json.dumps(data, sort_keys=True, indent=4))
write_json_test_results(ResultType.DATA, result_name, data)
if failed:
raise ApplicationError('The %d integration test(s) listed below (out of %d) failed. See error output above for details:\n%s' % (
@ -1286,7 +1280,7 @@ def integration_environment(args, target, test_dir, inventory_path, ansible_conf
callback_plugins = ['junit'] + (env_config.callback_plugins or [] if env_config else [])
integration = dict(
JUNIT_OUTPUT_DIR=os.path.join(data_context().results, 'junit'),
JUNIT_OUTPUT_DIR=ResultType.JUNIT.path,
ANSIBLE_CALLBACK_WHITELIST=','.join(sorted(set(callback_plugins))),
ANSIBLE_TEST_CI=args.metadata.ci_provider,
ANSIBLE_TEST_COVERAGE='check' if args.coverage_check else ('yes' if args.coverage else ''),

View file

@ -5,6 +5,8 @@ __metaclass__ = type
import ast
import os
from . import types as t
from .util import (
display,
ApplicationError,
@ -35,13 +37,8 @@ def get_python_module_utils_imports(compile_targets):
for target in compile_targets:
imports_by_target_path[target.path] = extract_python_module_utils_imports(target.path, module_utils)
def recurse_import(import_name, depth=0, seen=None):
"""Recursively expand module_utils imports from module_utils files.
:type import_name: str
:type depth: int
:type seen: set[str] | None
:rtype set[str]
"""
def recurse_import(import_name, depth=0, seen=None): # type: (str, int, t.Optional[t.Set[str]]) -> t.Set[str]
"""Recursively expand module_utils imports from module_utils files."""
display.info('module_utils import: %s%s' % (' ' * depth, import_name), verbosity=4)
if seen is None:

View file

@ -27,17 +27,16 @@ from ..util import (
display,
make_dirs,
COVERAGE_CONFIG_NAME,
COVERAGE_OUTPUT_NAME,
MODE_DIRECTORY,
MODE_DIRECTORY_WRITE,
MODE_FILE,
INTEGRATION_DIR_RELATIVE,
INTEGRATION_VARS_FILE_RELATIVE,
to_bytes,
)
from ..util_common import (
named_temporary_file,
write_text_file,
ResultType,
)
from ..coverage_util import (
@ -73,12 +72,11 @@ def setup_common_temp_dir(args, path):
coverage_config = generate_coverage_config(args)
with open(coverage_config_path, 'w') as coverage_config_fd:
coverage_config_fd.write(coverage_config)
write_text_file(coverage_config_path, coverage_config)
os.chmod(coverage_config_path, MODE_FILE)
coverage_output_path = os.path.join(path, COVERAGE_OUTPUT_NAME)
coverage_output_path = os.path.join(path, ResultType.COVERAGE.name)
os.mkdir(coverage_output_path)
os.chmod(coverage_output_path, MODE_DIRECTORY_WRITE)
@ -153,7 +151,7 @@ def get_inventory_relative_path(args): # type: (IntegrationConfig) -> str
NetworkIntegrationConfig: 'inventory.networking',
} # type: t.Dict[t.Type[IntegrationConfig], str]
return os.path.join(INTEGRATION_DIR_RELATIVE, inventory_names[type(args)])
return os.path.join(data_context().content.integration_path, inventory_names[type(args)])
def delegate_inventory(args, inventory_path_src): # type: (IntegrationConfig, str) -> None
@ -202,10 +200,10 @@ def integration_test_environment(args, target, inventory_path_src):
if args.no_temp_workdir or 'no/temp_workdir/' in target.aliases:
display.warning('Disabling the temp work dir is a temporary debugging feature that may be removed in the future without notice.')
integration_dir = os.path.join(data_context().content.root, INTEGRATION_DIR_RELATIVE)
integration_dir = os.path.join(data_context().content.root, data_context().content.integration_path)
inventory_path = inventory_path_src
ansible_config = ansible_config_src
vars_file = os.path.join(data_context().content.root, INTEGRATION_VARS_FILE_RELATIVE)
vars_file = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
yield IntegrationEnvironment(integration_dir, inventory_path, ansible_config, vars_file)
return
@ -237,11 +235,11 @@ def integration_test_environment(args, target, inventory_path_src):
files_needed = get_files_needed(target_dependencies)
integration_dir = os.path.join(temp_dir, INTEGRATION_DIR_RELATIVE)
integration_dir = os.path.join(temp_dir, data_context().content.integration_path)
ansible_config = os.path.join(temp_dir, ansible_config_relative)
vars_file_src = os.path.join(data_context().content.root, INTEGRATION_VARS_FILE_RELATIVE)
vars_file = os.path.join(temp_dir, INTEGRATION_VARS_FILE_RELATIVE)
vars_file_src = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
vars_file = os.path.join(temp_dir, data_context().content.integration_vars_path)
file_copies = [
(ansible_config_src, ansible_config),
@ -253,8 +251,10 @@ def integration_test_environment(args, target, inventory_path_src):
file_copies += [(path, os.path.join(temp_dir, path)) for path in files_needed]
integration_targets_relative_path = data_context().content.integration_targets_path
directory_copies = [
(os.path.join(INTEGRATION_DIR_RELATIVE, 'targets', target.name), os.path.join(integration_dir, 'targets', target.name))
(os.path.join(integration_targets_relative_path, target.name), os.path.join(temp_dir, integration_targets_relative_path, target.name))
for target in target_dependencies
]

View file

@ -11,6 +11,10 @@ from .util import (
is_shippable,
)
from .util_common import (
write_json_file,
)
from .diff import (
parse_diff,
FileDiff,
@ -72,8 +76,7 @@ class Metadata:
display.info('>>> Metadata: %s\n%s' % (path, data), verbosity=3)
with open(path, 'w') as data_fd:
json.dump(data, data_fd, sort_keys=True, indent=4)
write_json_file(path, data)
@staticmethod
def from_file(path):

View file

@ -81,6 +81,7 @@ class ContentLayout(Layout):
paths, # type: t.List[str]
plugin_paths, # type: t.Dict[str, str]
collection=None, # type: t.Optional[CollectionDetail]
integration_path=None, # type: t.Optional[str]
unit_path=None, # type: t.Optional[str]
unit_module_path=None, # type: t.Optional[str]
unit_module_utils_path=None, # type: t.Optional[str]
@ -89,6 +90,9 @@ class ContentLayout(Layout):
self.plugin_paths = plugin_paths
self.collection = collection
self.integration_path = integration_path
self.integration_targets_path = os.path.join(integration_path, 'targets')
self.integration_vars_path = os.path.join(integration_path, 'integration_config.yml')
self.unit_path = unit_path
self.unit_module_path = unit_module_path
self.unit_module_utils_path = unit_module_utils_path

View file

@ -31,6 +31,7 @@ class AnsibleLayout(LayoutProvider):
return ContentLayout(root,
paths,
plugin_paths=plugin_paths,
integration_path='test/integration',
unit_path='test/units',
unit_module_path='test/units/modules',
unit_module_utils_path='test/units/module_utils',

View file

@ -44,6 +44,7 @@ class CollectionLayout(LayoutProvider):
namespace=collection_namespace,
root=collection_root,
),
integration_path='test/integration',
unit_path='test/unit',
unit_module_path='test/unit/plugins/modules',
unit_module_utils_path='test/unit/plugins/module_utils',

View file

@ -24,7 +24,6 @@ from ..util import (
display,
find_python,
parse_to_list_of_dict,
make_dirs,
is_subdir,
ANSIBLE_LIB_ROOT,
)
@ -32,6 +31,8 @@ from ..util import (
from ..util_common import (
intercept_command,
run_command,
write_text_file,
ResultType,
)
from ..ansible_util import (
@ -75,8 +76,10 @@ class ImportTest(SanityMultipleVersion):
env = ansible_environment(args, color=False)
temp_root = os.path.join(ResultType.TMP.path, 'sanity', 'import')
# create a clean virtual environment to minimize the available imports beyond the python standard library
virtual_environment_path = os.path.abspath('test/runner/.tox/minimal-py%s' % python_version.replace('.', ''))
virtual_environment_path = os.path.join(temp_root, 'minimal-py%s' % python_version.replace('.', ''))
virtual_environment_bin = os.path.join(virtual_environment_path, 'bin')
remove_tree(virtual_environment_path)
@ -96,7 +99,7 @@ class ImportTest(SanityMultipleVersion):
os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'importer.py')), importer_path)
# create a minimal python library
python_path = os.path.abspath('test/runner/.tox/import/lib')
python_path = os.path.join(temp_root, 'lib')
ansible_path = os.path.join(python_path, 'ansible')
ansible_init = os.path.join(ansible_path, '__init__.py')
ansible_link = os.path.join(ansible_path, 'module_utils')
@ -104,10 +107,7 @@ class ImportTest(SanityMultipleVersion):
if not args.explain:
remove_tree(ansible_path)
make_dirs(ansible_path)
with open(ansible_init, 'w'):
pass
write_text_file(ansible_init, '', create_directories=True)
os.symlink(os.path.join(ANSIBLE_LIB_ROOT, 'module_utils'), ansible_link)
@ -116,21 +116,22 @@ class ImportTest(SanityMultipleVersion):
# the __init__.py files are needed only for Python 2.x
# the empty modules directory is required for the collection loader to generate the synthetic packages list
make_dirs(os.path.join(ansible_path, 'utils'))
with open(os.path.join(ansible_path, 'utils/__init__.py'), 'w'):
pass
write_text_file(os.path.join(ansible_path, 'utils/__init__.py'), '', create_directories=True)
os.symlink(os.path.join(ANSIBLE_LIB_ROOT, 'utils', 'collection_loader.py'), os.path.join(ansible_path, 'utils', 'collection_loader.py'))
os.symlink(os.path.join(ANSIBLE_LIB_ROOT, 'utils', 'singleton.py'), os.path.join(ansible_path, 'utils', 'singleton.py'))
make_dirs(os.path.join(ansible_path, 'modules'))
with open(os.path.join(ansible_path, 'modules/__init__.py'), 'w'):
pass
write_text_file(os.path.join(ansible_path, 'modules/__init__.py'), '', create_directories=True)
# activate the virtual environment
env['PATH'] = '%s:%s' % (virtual_environment_bin, env['PATH'])
env['PYTHONPATH'] = python_path
env.update(
SANITY_IMPORT_DIR=os.path.relpath(temp_root, data_context().content.root) + os.path.sep,
SANITY_MINIMAL_DIR=os.path.relpath(virtual_environment_path, data_context().content.root) + os.path.sep,
)
# make sure coverage is available in the virtual environment if needed
if args.coverage:
run_command(args, generate_pip_install(['pip'], 'sanity.import', packages=['setuptools']), env=env)
@ -163,9 +164,11 @@ class ImportTest(SanityMultipleVersion):
results = parse_to_list_of_dict(pattern, ex.stdout)
relative_temp_root = os.path.relpath(temp_root, data_context().content.root) + os.path.sep
results = [SanityMessage(
message=r['message'],
path=r['path'],
path=os.path.relpath(r['path'], relative_temp_root) if r['path'].startswith(relative_temp_root) else r['path'],
line=int(r['line']),
column=int(r['column']),
) for r in results]

View file

@ -2,7 +2,6 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import textwrap
import re
import os
@ -37,8 +36,9 @@ from ..util import (
display,
)
from ..data import (
data_context,
from ..util_common import (
write_json_test_results,
ResultType,
)
@ -180,8 +180,7 @@ class IntegrationAliasesTest(SanityVersionNeutral):
self.check_changes(args, results)
with open(os.path.join(data_context().results, 'bot', 'data-sanity-ci.json'), 'w') as results_fd:
json.dump(results, results_fd, sort_keys=True, indent=4)
write_json_test_results(ResultType.BOT, 'data-sanity-ci.json', results)
messages = []

View file

@ -228,7 +228,7 @@ def walk_integration_targets():
"""
:rtype: collections.Iterable[IntegrationTarget]
"""
path = 'test/integration/targets'
path = data_context().content.integration_targets_path
modules = frozenset(target.module for target in walk_module_targets())
paths = data_context().content.get_dirs(path)
prefixes = load_integration_prefixes()
@ -241,7 +241,7 @@ def load_integration_prefixes():
"""
:rtype: dict[str, str]
"""
path = 'test/integration'
path = data_context().content.integration_path
file_paths = sorted(f for f in data_context().content.get_files(path) if os.path.splitext(os.path.basename(f))[0] == 'target-prefixes')
prefixes = {}
@ -306,7 +306,7 @@ def analyze_integration_target_dependencies(integration_targets):
:type integration_targets: list[IntegrationTarget]
:rtype: dict[str,set[str]]
"""
real_target_root = os.path.realpath('test/integration/targets') + '/'
real_target_root = os.path.realpath(data_context().content.integration_targets_path) + '/'
role_targets = [target for target in integration_targets if target.type == 'role']
hidden_role_target_names = set(target.name for target in role_targets if 'hidden/' in target.aliases)
@ -595,10 +595,12 @@ class IntegrationTarget(CompletionTarget):
if self.type not in ('script', 'role'):
groups.append('hidden')
targets_relative_path = data_context().content.integration_targets_path
# Collect file paths before group expansion to avoid including the directories.
# Ignore references to test targets, as those must be defined using `needs/target/*` or other target references.
self.needs_file = tuple(sorted(set('/'.join(g.split('/')[2:]) for g in groups if
g.startswith('needs/file/') and not g.startswith('needs/file/test/integration/targets/'))))
g.startswith('needs/file/') and not g.startswith('needs/file/%s/' % targets_relative_path))))
for group in itertools.islice(groups, 0, len(groups)):
if '/' in group:

View file

@ -3,25 +3,24 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import datetime
import json
import os
from . import types as t
from .util import (
display,
make_dirs,
to_bytes,
)
from .util_common import (
write_text_test_results,
write_json_test_results,
ResultType,
)
from .config import (
TestConfig,
)
from .data import (
data_context,
)
def calculate_best_confidence(choices, metadata):
"""
@ -118,23 +117,22 @@ class TestResult:
:type args: TestConfig
"""
def create_path(self, directory, extension):
def create_result_name(self, extension):
"""
:type directory: str
:type extension: str
:rtype: str
"""
path = os.path.join(data_context().results, directory, 'ansible-test-%s' % self.command)
name = 'ansible-test-%s' % self.command
if self.test:
path += '-%s' % self.test
name += '-%s' % self.test
if self.python_version:
path += '-python-%s' % self.python_version
name += '-python-%s' % self.python_version
path += extension
name += extension
return path
return name
def save_junit(self, args, test_case, properties=None):
"""
@ -143,8 +141,6 @@ class TestResult:
:type properties: dict[str, str] | None
:rtype: str | None
"""
path = self.create_path('junit', '.xml')
test_suites = [
self.junit.TestSuite(
name='ansible-test',
@ -159,8 +155,7 @@ class TestResult:
if args.explain:
return
with open(path, 'wb') as xml:
xml.write(to_bytes(report))
write_text_test_results(ResultType.JUNIT, self.create_result_name('.xml'), report)
class TestTimeout(TestResult):
@ -207,10 +202,7 @@ One or more of the following situations may be responsible:
</testsuites>
''' % (timestamp, message, output)
path = self.create_path('junit', '.xml')
with open(path, 'w') as junit_fd:
junit_fd.write(xml.lstrip())
write_text_test_results(ResultType.JUNIT, self.create_result_name('.xml'), xml.lstrip())
class TestSuccess(TestResult):
@ -335,16 +327,10 @@ class TestFailure(TestResult):
],
)
path = self.create_path('bot', '.json')
if args.explain:
return
make_dirs(os.path.dirname(path))
with open(path, 'w') as bot_fd:
json.dump(bot_data, bot_fd, indent=4, sort_keys=True)
bot_fd.write('\n')
write_json_test_results(ResultType.BOT, self.create_result_name('.json'), bot_data)
def populate_confidence(self, metadata):
"""

View file

@ -17,6 +17,7 @@ try:
Tuple,
Type,
TypeVar,
Union,
)
except ImportError:
pass

View file

@ -15,6 +15,7 @@ from ..util import (
from ..util_common import (
intercept_command,
ResultType,
)
from ..ansible_util import (
@ -98,7 +99,7 @@ def command_units(args):
'yes' if args.color else 'no',
'-p', 'no:cacheprovider',
'-c', os.path.join(ANSIBLE_TEST_DATA_ROOT, 'pytest.ini'),
'--junit-xml', os.path.join(data_context().results, 'junit', 'python%s-units.xml' % version),
'--junit-xml', os.path.join(ResultType.JUNIT.path, 'python%s-units.xml' % version),
]
if not data_context().content.collection:

View file

@ -62,7 +62,6 @@ except AttributeError:
MAXFD = -1
COVERAGE_CONFIG_NAME = 'coveragerc'
COVERAGE_OUTPUT_NAME = 'coverage'
ANSIBLE_TEST_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@ -82,9 +81,6 @@ if not os.path.exists(ANSIBLE_LIB_ROOT):
ANSIBLE_TEST_DATA_ROOT = os.path.join(ANSIBLE_TEST_ROOT, '_data')
ANSIBLE_TEST_CONFIG_ROOT = os.path.join(ANSIBLE_TEST_ROOT, 'config')
INTEGRATION_DIR_RELATIVE = 'test/integration'
INTEGRATION_VARS_FILE_RELATIVE = os.path.join(INTEGRATION_DIR_RELATIVE, 'integration_config.yml')
# Modes are set to allow all users the same level of access.
# This permits files to be used in tests that change users.
# The only exception is write access to directories for the user creating them.
@ -801,8 +797,8 @@ def get_available_port():
def get_subclasses(class_type): # type: (t.Type[C]) -> t.Set[t.Type[C]]
"""Returns the set of types that are concrete subclasses of the given type."""
subclasses = set()
queue = [class_type]
subclasses = set() # type: t.Set[t.Type[C]]
queue = [class_type] # type: t.List[t.Type[C]]
while queue:
parent = queue.pop()

View file

@ -4,15 +4,17 @@ __metaclass__ = type
import atexit
import contextlib
import json
import os
import shutil
import tempfile
import textwrap
from . import types as t
from .util import (
common_environment,
COVERAGE_CONFIG_NAME,
COVERAGE_OUTPUT_NAME,
display,
find_python,
is_shippable,
@ -22,6 +24,7 @@ from .util import (
raw_command,
to_bytes,
ANSIBLE_TEST_DATA_ROOT,
make_dirs,
)
from .data import (
@ -29,6 +32,47 @@ from .data import (
)
class ResultType:
"""Test result type."""
BOT = None # type: ResultType
COVERAGE = None # type: ResultType
DATA = None # type: ResultType
JUNIT = None # type: ResultType
LOGS = None # type: ResultType
REPORTS = None # type: ResultType
TMP = None # type: ResultType
@staticmethod
def _populate():
ResultType.BOT = ResultType('bot')
ResultType.COVERAGE = ResultType('coverage')
ResultType.DATA = ResultType('data')
ResultType.JUNIT = ResultType('junit')
ResultType.LOGS = ResultType('logs')
ResultType.REPORTS = ResultType('reports')
ResultType.TMP = ResultType('.tmp')
def __init__(self, name): # type: (str) -> None
self.name = name
@property
def relative_path(self): # type: () -> str
"""The content relative path to the results."""
return os.path.join(data_context().results_relative, self.name)
@property
def path(self): # type: () -> str
"""The absolute path to the results."""
return os.path.join(data_context().results, self.name)
def __str__(self): # type: () -> str
return self.name
# noinspection PyProtectedMember
ResultType._populate() # pylint: disable=protected-access
class CommonConfig:
"""Configuration common to all commands."""
def __init__(self, args, command):
@ -75,6 +119,33 @@ def named_temporary_file(args, prefix, suffix, directory, content):
yield tempfile_fd.name
def write_json_test_results(category, name, content): # type: (ResultType, str, t.Union[t.List[t.Any], t.Dict[str, t.Any]]) -> None
"""Write the given json content to the specified test results path, creating directories as needed."""
path = os.path.join(category.path, name)
write_json_file(path, content, create_directories=True)
def write_text_test_results(category, name, content): # type: (ResultType, str, str) -> None
"""Write the given text content to the specified test results path, creating directories as needed."""
path = os.path.join(category.path, name)
write_text_file(path, content, create_directories=True)
def write_json_file(path, content, create_directories=False): # type: (str, t.Union[t.List[t.Any], t.Dict[str, t.Any]], bool) -> None
"""Write the given json content to the specified path, optionally creating missing directories."""
text_content = json.dumps(content, sort_keys=True, indent=4, ensure_ascii=False) + '\n'
write_text_file(path, text_content, create_directories=create_directories)
def write_text_file(path, content, create_directories=False): # type: (str, str, bool) -> None
"""Write the given text content to the specified path, optionally creating missing directories."""
if create_directories:
make_dirs(os.path.dirname(path))
with open(to_bytes(path), 'wb') as file:
file.write(to_bytes(content))
def get_python_path(args, interpreter):
"""
:type args: TestConfig
@ -126,8 +197,7 @@ def get_python_path(args, interpreter):
execv(python, [python] + argv[1:])
''' % (interpreter, interpreter)).lstrip()
with open(injected_interpreter, 'w') as python_fd:
python_fd.write(code)
write_text_file(injected_interpreter, code)
os.chmod(injected_interpreter, MODE_FILE_EXECUTE)
@ -173,7 +243,7 @@ def get_coverage_environment(args, target_name, version, temp_path, module_cover
raise Exception('No temp path and no coverage config base path. Check for missing coverage_context usage.')
config_file = os.path.join(coverage_config_base_path, COVERAGE_CONFIG_NAME)
coverage_file = os.path.join(coverage_output_base_path, COVERAGE_OUTPUT_NAME, '%s=%s=%s=%s=coverage' % (
coverage_file = os.path.join(coverage_output_base_path, ResultType.COVERAGE.name, '%s=%s=%s=%s=coverage' % (
args.command, target_name, args.coverage_label or 'local-%s' % version, 'python-%s' % version))
if not args.explain and not os.path.exists(config_file):

View file

View file

@ -94,7 +94,13 @@ def fail(message, output): # type: (str, str) -> NoReturn
</testsuites>
''' % (timestamp, message, output)
with open('test/results/junit/check-matrix.xml', 'w') as junit_fd:
path = 'shippable/testresults/check-matrix.xml'
dir_path = os.path.dirname(path)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
with open(path, 'w') as junit_fd:
junit_fd.write(xml.lstrip())
sys.stderr.write(message + '\n')

View file

@ -73,6 +73,7 @@ find lib/ansible/modules -type d -empty -print -delete
function cleanup
{
if [ -d test/results/coverage/ ]; then
if find test/results/coverage/ -mindepth 1 -name '.*' -prune -o -print -quit | grep -q .; then
# for complete on-demand coverage generate a report for all files with no coverage on the "other" job so we only have one copy
if [ "${COVERAGE}" == "--coverage" ] && [ "${CHANGED}" == "" ] && [ "${test}" == "sanity/1" ]; then
@ -117,11 +118,19 @@ function cleanup
done
fi
fi
fi
rmdir shippable/testresults/
if [ -d test/results/junit/ ]; then
cp -a test/results/junit/ shippable/testresults/
fi
if [ -d test/results/data/ ]; then
cp -a test/results/data/ shippable/testresults/
fi
if [ -d test/results/bot/ ]; then
cp -aT test/results/bot/ shippable/testresults/
fi
}
trap cleanup EXIT