2016-11-30 05:21:53 +00:00
|
|
|
"""Execute Ansible tests."""
|
|
|
|
|
|
|
|
from __future__ import absolute_import, print_function
|
|
|
|
|
2017-05-11 11:05:21 +00:00
|
|
|
import json
|
2016-11-30 05:21:53 +00:00
|
|
|
import os
|
2017-08-23 18:09:50 +00:00
|
|
|
import collections
|
2017-08-25 22:14:47 +00:00
|
|
|
import datetime
|
2017-03-08 08:47:21 +00:00
|
|
|
import re
|
2016-11-30 05:21:53 +00:00
|
|
|
import tempfile
|
|
|
|
import time
|
|
|
|
import textwrap
|
|
|
|
import functools
|
2017-10-20 15:48:01 +00:00
|
|
|
import pipes
|
2018-09-18 15:37:14 +00:00
|
|
|
import sys
|
2017-07-06 07:47:28 +00:00
|
|
|
import hashlib
|
2018-10-04 04:41:27 +00:00
|
|
|
import difflib
|
|
|
|
import filecmp
|
2016-11-30 05:21:53 +00:00
|
|
|
|
|
|
|
import lib.pytar
|
|
|
|
import lib.thread
|
|
|
|
|
|
|
|
from lib.core_ci import (
|
|
|
|
AnsibleCoreCI,
|
2017-01-18 20:51:24 +00:00
|
|
|
SshKey,
|
2016-11-30 05:21:53 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
from lib.manage_ci import (
|
|
|
|
ManageWindowsCI,
|
2017-01-08 07:36:35 +00:00
|
|
|
ManageNetworkCI,
|
2016-11-30 05:21:53 +00:00
|
|
|
)
|
|
|
|
|
2017-05-05 08:23:00 +00:00
|
|
|
from lib.cloud import (
|
|
|
|
cloud_filter,
|
|
|
|
cloud_init,
|
|
|
|
get_cloud_environment,
|
|
|
|
get_cloud_platforms,
|
|
|
|
)
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
from lib.util import (
|
|
|
|
ApplicationWarning,
|
|
|
|
ApplicationError,
|
|
|
|
SubprocessError,
|
|
|
|
display,
|
|
|
|
run_command,
|
2017-10-26 07:21:46 +00:00
|
|
|
intercept_command,
|
2016-11-30 05:21:53 +00:00
|
|
|
remove_tree,
|
|
|
|
make_dirs,
|
|
|
|
is_shippable,
|
2017-03-15 19:17:42 +00:00
|
|
|
is_binary_file,
|
2017-05-11 11:05:21 +00:00
|
|
|
find_executable,
|
|
|
|
raw_command,
|
2017-11-15 20:00:10 +00:00
|
|
|
get_coverage_path,
|
2018-05-09 16:24:39 +00:00
|
|
|
get_available_port,
|
2018-09-18 15:37:14 +00:00
|
|
|
generate_pip_command,
|
|
|
|
find_python,
|
2018-09-21 21:44:05 +00:00
|
|
|
get_docker_completion,
|
2018-05-09 16:24:39 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
from lib.docker_util import (
|
|
|
|
docker_pull,
|
|
|
|
docker_run,
|
|
|
|
get_docker_container_id,
|
|
|
|
get_docker_container_ip,
|
2016-11-30 05:21:53 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
from lib.ansible_util import (
|
|
|
|
ansible_environment,
|
|
|
|
)
|
|
|
|
|
|
|
|
from lib.target import (
|
|
|
|
IntegrationTarget,
|
|
|
|
walk_external_targets,
|
|
|
|
walk_internal_targets,
|
|
|
|
walk_posix_integration_targets,
|
|
|
|
walk_network_integration_targets,
|
|
|
|
walk_windows_integration_targets,
|
|
|
|
walk_units_targets,
|
|
|
|
)
|
|
|
|
|
|
|
|
from lib.changes import (
|
|
|
|
ShippableChanges,
|
|
|
|
LocalChanges,
|
|
|
|
)
|
|
|
|
|
|
|
|
from lib.git import (
|
|
|
|
Git,
|
|
|
|
)
|
|
|
|
|
|
|
|
from lib.classification import (
|
|
|
|
categorize_changes,
|
|
|
|
)
|
|
|
|
|
2017-07-06 23:14:44 +00:00
|
|
|
from lib.config import (
|
|
|
|
TestConfig,
|
|
|
|
EnvironmentConfig,
|
|
|
|
IntegrationConfig,
|
|
|
|
NetworkIntegrationConfig,
|
|
|
|
PosixIntegrationConfig,
|
|
|
|
ShellConfig,
|
|
|
|
UnitsConfig,
|
|
|
|
WindowsIntegrationConfig,
|
|
|
|
)
|
|
|
|
|
2018-04-12 23:15:28 +00:00
|
|
|
from lib.metadata import (
|
|
|
|
ChangeDescription,
|
|
|
|
)
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
SUPPORTED_PYTHON_VERSIONS = (
|
|
|
|
'2.6',
|
|
|
|
'2.7',
|
|
|
|
'3.5',
|
2017-01-10 18:30:41 +00:00
|
|
|
'3.6',
|
2017-11-21 01:01:09 +00:00
|
|
|
'3.7',
|
2016-11-30 05:21:53 +00:00
|
|
|
)
|
|
|
|
|
2018-05-09 16:24:39 +00:00
|
|
|
HTTPTESTER_HOSTS = (
|
|
|
|
'ansible.http.tests',
|
|
|
|
'sni1.ansible.http.tests',
|
|
|
|
'fail.ansible.http.tests',
|
|
|
|
)
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
|
2017-01-24 18:31:39 +00:00
|
|
|
def check_startup():
|
|
|
|
"""Checks to perform at startup before running commands."""
|
|
|
|
check_legacy_modules()
|
|
|
|
|
|
|
|
|
|
|
|
def check_legacy_modules():
|
|
|
|
"""Detect conflicts with legacy core/extras module directories to avoid problems later."""
|
|
|
|
for directory in 'core', 'extras':
|
|
|
|
path = 'lib/ansible/modules/%s' % directory
|
|
|
|
|
|
|
|
for root, _, file_names in os.walk(path):
|
|
|
|
if file_names:
|
|
|
|
# the directory shouldn't exist, but if it does, it must contain no files
|
|
|
|
raise ApplicationError('Files prohibited in "%s". '
|
|
|
|
'These are most likely legacy modules from version 2.2 or earlier.' % root)
|
|
|
|
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
def create_shell_command(command):
|
|
|
|
"""
|
|
|
|
:type command: list[str]
|
|
|
|
:rtype: list[str]
|
|
|
|
"""
|
|
|
|
optional_vars = (
|
|
|
|
'TERM',
|
|
|
|
)
|
|
|
|
|
|
|
|
cmd = ['/usr/bin/env']
|
|
|
|
cmd += ['%s=%s' % (var, os.environ[var]) for var in optional_vars if var in os.environ]
|
|
|
|
cmd += command
|
|
|
|
|
|
|
|
return cmd
|
|
|
|
|
|
|
|
|
2018-09-18 15:37:14 +00:00
|
|
|
def install_command_requirements(args, python_version=None):
|
2016-11-30 05:21:53 +00:00
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
2018-09-18 15:37:14 +00:00
|
|
|
:type python_version: str | None
|
2016-11-30 05:21:53 +00:00
|
|
|
"""
|
|
|
|
generate_egg_info(args)
|
|
|
|
|
|
|
|
if not args.requirements:
|
|
|
|
return
|
|
|
|
|
2018-03-07 22:02:31 +00:00
|
|
|
if isinstance(args, ShellConfig):
|
|
|
|
return
|
|
|
|
|
2017-03-08 08:47:21 +00:00
|
|
|
packages = []
|
2016-11-30 05:21:53 +00:00
|
|
|
|
|
|
|
if isinstance(args, TestConfig):
|
|
|
|
if args.coverage:
|
2017-03-08 08:47:21 +00:00
|
|
|
packages.append('coverage')
|
2017-03-02 20:36:46 +00:00
|
|
|
if args.junit:
|
2017-03-08 08:47:21 +00:00
|
|
|
packages.append('junit-xml')
|
|
|
|
|
2018-09-18 15:37:14 +00:00
|
|
|
if not python_version:
|
|
|
|
python_version = args.python_version
|
|
|
|
|
|
|
|
pip = generate_pip_command(find_python(python_version))
|
2017-10-26 07:21:46 +00:00
|
|
|
|
|
|
|
commands = [generate_pip_install(pip, args.command, packages=packages)]
|
2017-05-05 08:23:00 +00:00
|
|
|
|
2017-07-15 02:11:25 +00:00
|
|
|
if isinstance(args, IntegrationConfig):
|
2017-10-20 15:48:01 +00:00
|
|
|
for cloud_platform in get_cloud_platforms(args):
|
2017-10-26 07:21:46 +00:00
|
|
|
commands.append(generate_pip_install(pip, '%s.cloud.%s' % (args.command, cloud_platform)))
|
|
|
|
|
|
|
|
commands = [cmd for cmd in commands if cmd]
|
2017-05-05 08:23:00 +00:00
|
|
|
|
2017-10-20 15:48:01 +00:00
|
|
|
# only look for changes when more than one requirements file is needed
|
|
|
|
detect_pip_changes = len(commands) > 1
|
2017-03-08 08:47:21 +00:00
|
|
|
|
2017-10-20 15:48:01 +00:00
|
|
|
# first pass to install requirements, changes expected unless environment is already set up
|
2017-10-26 07:21:46 +00:00
|
|
|
changes = run_pip_commands(args, pip, commands, detect_pip_changes)
|
2017-10-20 15:48:01 +00:00
|
|
|
|
|
|
|
if not changes:
|
|
|
|
return # no changes means we can stop early
|
|
|
|
|
|
|
|
# second pass to check for conflicts in requirements, changes are not expected here
|
2017-10-26 07:21:46 +00:00
|
|
|
changes = run_pip_commands(args, pip, commands, detect_pip_changes)
|
2017-10-20 15:48:01 +00:00
|
|
|
|
|
|
|
if not changes:
|
|
|
|
return # no changes means no conflicts
|
|
|
|
|
|
|
|
raise ApplicationError('Conflicts detected in requirements. The following commands reported changes during verification:\n%s' %
|
|
|
|
'\n'.join((' '.join(pipes.quote(c) for c in cmd) for cmd in changes)))
|
|
|
|
|
|
|
|
|
2017-10-26 07:21:46 +00:00
|
|
|
def run_pip_commands(args, pip, commands, detect_pip_changes=False):
|
2017-10-20 15:48:01 +00:00
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
2018-03-14 18:35:59 +00:00
|
|
|
:type pip: list[str]
|
2017-10-20 15:48:01 +00:00
|
|
|
:type commands: list[list[str]]
|
|
|
|
:type detect_pip_changes: bool
|
|
|
|
:rtype: list[list[str]]
|
|
|
|
"""
|
|
|
|
changes = []
|
|
|
|
|
2017-10-26 07:21:46 +00:00
|
|
|
after_list = pip_list(args, pip) if detect_pip_changes else None
|
2017-03-02 20:36:46 +00:00
|
|
|
|
2017-10-20 15:48:01 +00:00
|
|
|
for cmd in commands:
|
|
|
|
if not cmd:
|
|
|
|
continue
|
2016-11-30 05:21:53 +00:00
|
|
|
|
2017-10-20 15:48:01 +00:00
|
|
|
before_list = after_list
|
2016-11-30 05:21:53 +00:00
|
|
|
|
2017-10-20 15:48:01 +00:00
|
|
|
try:
|
|
|
|
run_command(args, cmd)
|
|
|
|
except SubprocessError as ex:
|
|
|
|
if ex.status != 2:
|
|
|
|
raise
|
|
|
|
|
|
|
|
# If pip is too old it won't understand the arguments we passed in, so we'll need to upgrade it.
|
|
|
|
|
|
|
|
# Installing "coverage" on ubuntu 16.04 fails with the error:
|
|
|
|
# AttributeError: 'Requirement' object has no attribute 'project_name'
|
|
|
|
# See: https://bugs.launchpad.net/ubuntu/xenial/+source/python-pip/+bug/1626258
|
|
|
|
# Upgrading pip works around the issue.
|
2018-03-14 18:35:59 +00:00
|
|
|
run_command(args, pip + ['install', '--upgrade', 'pip'])
|
2017-10-20 15:48:01 +00:00
|
|
|
run_command(args, cmd)
|
|
|
|
|
2017-10-26 07:21:46 +00:00
|
|
|
after_list = pip_list(args, pip) if detect_pip_changes else None
|
2017-10-20 15:48:01 +00:00
|
|
|
|
|
|
|
if before_list != after_list:
|
|
|
|
changes.append(cmd)
|
|
|
|
|
|
|
|
return changes
|
|
|
|
|
|
|
|
|
2017-10-26 07:21:46 +00:00
|
|
|
def pip_list(args, pip):
|
2017-10-20 15:48:01 +00:00
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
2018-03-14 18:35:59 +00:00
|
|
|
:type pip: list[str]
|
2017-10-20 15:48:01 +00:00
|
|
|
:rtype: str
|
|
|
|
"""
|
2018-03-14 18:35:59 +00:00
|
|
|
stdout, _ = run_command(args, pip + ['list'], capture=True)
|
2017-10-20 15:48:01 +00:00
|
|
|
return stdout
|
2016-11-30 05:21:53 +00:00
|
|
|
|
|
|
|
|
|
|
|
def generate_egg_info(args):
|
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
|
|
|
"""
|
|
|
|
if os.path.isdir('lib/ansible.egg-info'):
|
|
|
|
return
|
|
|
|
|
2018-03-14 18:35:59 +00:00
|
|
|
run_command(args, [args.python_executable, 'setup.py', 'egg_info'], capture=args.verbosity < 3)
|
2016-11-30 05:21:53 +00:00
|
|
|
|
|
|
|
|
2017-10-26 07:21:46 +00:00
|
|
|
def generate_pip_install(pip, command, packages=None):
|
2016-11-30 05:21:53 +00:00
|
|
|
"""
|
2018-03-14 18:35:59 +00:00
|
|
|
:type pip: list[str]
|
2016-11-30 05:21:53 +00:00
|
|
|
:type command: str
|
2017-03-08 08:47:21 +00:00
|
|
|
:type packages: list[str] | None
|
2017-02-14 02:49:36 +00:00
|
|
|
:rtype: list[str] | None
|
2016-11-30 05:21:53 +00:00
|
|
|
"""
|
|
|
|
constraints = 'test/runner/requirements/constraints.txt'
|
|
|
|
requirements = 'test/runner/requirements/%s.txt' % command
|
|
|
|
|
2017-03-08 08:47:21 +00:00
|
|
|
options = []
|
|
|
|
|
2017-10-26 07:21:46 +00:00
|
|
|
if os.path.exists(requirements) and os.path.getsize(requirements):
|
|
|
|
options += ['-r', requirements]
|
2017-03-08 08:47:21 +00:00
|
|
|
|
|
|
|
if packages:
|
|
|
|
options += packages
|
|
|
|
|
|
|
|
if not options:
|
2016-11-30 05:21:53 +00:00
|
|
|
return None
|
|
|
|
|
2018-03-14 18:35:59 +00:00
|
|
|
return pip + ['install', '--disable-pip-version-check', '-c', constraints] + options
|
2016-11-30 05:21:53 +00:00
|
|
|
|
|
|
|
|
|
|
|
def command_shell(args):
|
|
|
|
"""
|
|
|
|
:type args: ShellConfig
|
|
|
|
"""
|
|
|
|
if args.delegate:
|
|
|
|
raise Delegate()
|
|
|
|
|
|
|
|
install_command_requirements(args)
|
|
|
|
|
2018-05-09 16:24:39 +00:00
|
|
|
if args.inject_httptester:
|
|
|
|
inject_httptester(args)
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
cmd = create_shell_command(['bash', '-i'])
|
|
|
|
run_command(args, cmd)
|
|
|
|
|
|
|
|
|
|
|
|
def command_posix_integration(args):
|
|
|
|
"""
|
|
|
|
:type args: PosixIntegrationConfig
|
|
|
|
"""
|
2017-08-23 18:09:50 +00:00
|
|
|
all_targets = tuple(walk_posix_integration_targets(include_hidden=True))
|
|
|
|
internal_targets = command_integration_filter(args, all_targets)
|
|
|
|
command_integration_filtered(args, internal_targets, all_targets)
|
2016-11-30 05:21:53 +00:00
|
|
|
|
|
|
|
|
|
|
|
def command_network_integration(args):
|
|
|
|
"""
|
|
|
|
:type args: NetworkIntegrationConfig
|
|
|
|
"""
|
2017-07-14 23:52:11 +00:00
|
|
|
default_filename = 'test/integration/inventory.networking'
|
2017-05-18 17:37:53 +00:00
|
|
|
|
2017-07-14 23:52:11 +00:00
|
|
|
if args.inventory:
|
|
|
|
filename = os.path.join('test/integration', args.inventory)
|
|
|
|
else:
|
|
|
|
filename = default_filename
|
|
|
|
|
|
|
|
if not args.explain and not args.platform and not os.path.exists(filename):
|
|
|
|
if args.inventory:
|
|
|
|
filename = os.path.abspath(filename)
|
|
|
|
|
|
|
|
raise ApplicationError(
|
|
|
|
'Inventory not found: %s\n'
|
|
|
|
'Use --inventory to specify the inventory path.\n'
|
|
|
|
'Use --platform to provision resources and generate an inventory file.\n'
|
|
|
|
'See also inventory template: %s.template' % (filename, default_filename)
|
|
|
|
)
|
2017-05-18 17:37:53 +00:00
|
|
|
|
2017-08-23 18:09:50 +00:00
|
|
|
all_targets = tuple(walk_network_integration_targets(include_hidden=True))
|
2017-10-26 07:21:46 +00:00
|
|
|
internal_targets = command_integration_filter(args, all_targets, init_callback=network_init)
|
2017-11-15 01:08:48 +00:00
|
|
|
instances = [] # type: list [lib.thread.WrappedThread]
|
2017-01-08 07:36:35 +00:00
|
|
|
|
|
|
|
if args.platform:
|
2017-11-15 20:00:10 +00:00
|
|
|
get_coverage_path(args) # initialize before starting threads
|
|
|
|
|
2017-10-26 07:21:46 +00:00
|
|
|
configs = dict((config['platform_version'], config) for config in args.metadata.instance_config)
|
2017-01-08 07:36:35 +00:00
|
|
|
|
|
|
|
for platform_version in args.platform:
|
|
|
|
platform, version = platform_version.split('/', 1)
|
2017-10-26 07:21:46 +00:00
|
|
|
config = configs.get(platform_version)
|
2017-01-18 00:24:05 +00:00
|
|
|
|
2017-10-26 07:21:46 +00:00
|
|
|
if not config:
|
2017-01-18 00:24:05 +00:00
|
|
|
continue
|
|
|
|
|
2017-10-26 07:21:46 +00:00
|
|
|
instance = lib.thread.WrappedThread(functools.partial(network_run, args, platform, version, config))
|
2017-01-08 07:36:35 +00:00
|
|
|
instance.daemon = True
|
|
|
|
instance.start()
|
|
|
|
instances.append(instance)
|
|
|
|
|
|
|
|
while any(instance.is_alive() for instance in instances):
|
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
remotes = [instance.wait_for_result() for instance in instances]
|
|
|
|
inventory = network_inventory(remotes)
|
|
|
|
|
2017-01-18 00:24:05 +00:00
|
|
|
display.info('>>> Inventory: %s\n%s' % (filename, inventory.strip()), verbosity=3)
|
|
|
|
|
2017-01-08 07:36:35 +00:00
|
|
|
if not args.explain:
|
2017-01-18 00:24:05 +00:00
|
|
|
with open(filename, 'w') as inventory_fd:
|
2017-01-08 07:36:35 +00:00
|
|
|
inventory_fd.write(inventory)
|
|
|
|
|
2017-11-15 01:08:48 +00:00
|
|
|
success = False
|
|
|
|
|
|
|
|
try:
|
|
|
|
command_integration_filtered(args, internal_targets, all_targets)
|
|
|
|
success = True
|
|
|
|
finally:
|
|
|
|
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
|
|
|
|
for instance in instances:
|
|
|
|
instance.result.stop()
|
2016-11-30 05:21:53 +00:00
|
|
|
|
|
|
|
|
2017-10-26 07:21:46 +00:00
|
|
|
def network_init(args, internal_targets):
|
|
|
|
"""
|
|
|
|
:type args: NetworkIntegrationConfig
|
|
|
|
:type internal_targets: tuple[IntegrationTarget]
|
|
|
|
"""
|
|
|
|
if not args.platform:
|
|
|
|
return
|
|
|
|
|
|
|
|
if args.metadata.instance_config is not None:
|
|
|
|
return
|
|
|
|
|
|
|
|
platform_targets = set(a for t in internal_targets for a in t.aliases if a.startswith('network/'))
|
|
|
|
|
|
|
|
instances = [] # type: list [lib.thread.WrappedThread]
|
|
|
|
|
|
|
|
# generate an ssh key (if needed) up front once, instead of for each instance
|
|
|
|
SshKey(args)
|
|
|
|
|
|
|
|
for platform_version in args.platform:
|
|
|
|
platform, version = platform_version.split('/', 1)
|
|
|
|
platform_target = 'network/%s/' % platform
|
|
|
|
|
2018-01-10 17:32:53 +00:00
|
|
|
if platform_target not in platform_targets:
|
2017-10-26 07:21:46 +00:00
|
|
|
display.warning('Skipping "%s" because selected tests do not target the "%s" platform.' % (
|
|
|
|
platform_version, platform))
|
|
|
|
continue
|
|
|
|
|
|
|
|
instance = lib.thread.WrappedThread(functools.partial(network_start, args, platform, version))
|
|
|
|
instance.daemon = True
|
|
|
|
instance.start()
|
|
|
|
instances.append(instance)
|
|
|
|
|
|
|
|
while any(instance.is_alive() for instance in instances):
|
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
args.metadata.instance_config = [instance.wait_for_result() for instance in instances]
|
|
|
|
|
|
|
|
|
|
|
|
def network_start(args, platform, version):
|
2017-01-08 07:36:35 +00:00
|
|
|
"""
|
|
|
|
:type args: NetworkIntegrationConfig
|
|
|
|
:type platform: str
|
|
|
|
:type version: str
|
|
|
|
:rtype: AnsibleCoreCI
|
|
|
|
"""
|
2017-11-29 08:46:08 +00:00
|
|
|
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage, provider=args.remote_provider)
|
2017-01-08 07:36:35 +00:00
|
|
|
core_ci.start()
|
2017-10-26 07:21:46 +00:00
|
|
|
|
|
|
|
return core_ci.save()
|
|
|
|
|
|
|
|
|
|
|
|
def network_run(args, platform, version, config):
|
|
|
|
"""
|
|
|
|
:type args: NetworkIntegrationConfig
|
|
|
|
:type platform: str
|
|
|
|
:type version: str
|
|
|
|
:type config: dict[str, str]
|
|
|
|
:rtype: AnsibleCoreCI
|
|
|
|
"""
|
2017-11-29 08:46:08 +00:00
|
|
|
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage, provider=args.remote_provider, load=False)
|
2017-10-26 07:21:46 +00:00
|
|
|
core_ci.load(config)
|
2017-01-08 07:36:35 +00:00
|
|
|
core_ci.wait()
|
|
|
|
|
|
|
|
manage = ManageNetworkCI(core_ci)
|
|
|
|
manage.wait()
|
|
|
|
|
|
|
|
return core_ci
|
|
|
|
|
|
|
|
|
|
|
|
def network_inventory(remotes):
|
|
|
|
"""
|
|
|
|
:type remotes: list[AnsibleCoreCI]
|
|
|
|
:rtype: str
|
|
|
|
"""
|
|
|
|
groups = dict([(remote.platform, []) for remote in remotes])
|
2018-01-09 22:52:36 +00:00
|
|
|
net = []
|
2017-01-08 07:36:35 +00:00
|
|
|
|
|
|
|
for remote in remotes:
|
2017-01-16 20:31:17 +00:00
|
|
|
options = dict(
|
|
|
|
ansible_host=remote.connection.hostname,
|
|
|
|
ansible_user=remote.connection.username,
|
2017-11-22 18:28:09 +00:00
|
|
|
ansible_ssh_private_key_file=os.path.abspath(remote.ssh_key.key),
|
2017-01-16 20:31:17 +00:00
|
|
|
ansible_network_os=remote.platform,
|
2017-05-12 11:49:12 +00:00
|
|
|
ansible_connection='local'
|
2017-01-16 20:31:17 +00:00
|
|
|
)
|
|
|
|
|
2017-01-08 07:36:35 +00:00
|
|
|
groups[remote.platform].append(
|
2017-01-16 20:31:17 +00:00
|
|
|
'%s %s' % (
|
2017-07-03 22:00:16 +00:00
|
|
|
remote.name.replace('.', '-'),
|
2017-01-16 20:31:17 +00:00
|
|
|
' '.join('%s="%s"' % (k, options[k]) for k in sorted(options)),
|
2017-01-08 07:36:35 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2018-01-09 22:52:36 +00:00
|
|
|
net.append(remote.platform)
|
|
|
|
|
|
|
|
groups['net:children'] = net
|
|
|
|
|
2017-01-08 07:36:35 +00:00
|
|
|
template = ''
|
|
|
|
|
|
|
|
for group in groups:
|
|
|
|
hosts = '\n'.join(groups[group])
|
|
|
|
|
2017-01-18 00:24:05 +00:00
|
|
|
template += textwrap.dedent("""
|
2017-01-08 07:36:35 +00:00
|
|
|
[%s]
|
|
|
|
%s
|
2017-01-18 00:24:05 +00:00
|
|
|
""") % (group, hosts)
|
2017-01-08 07:36:35 +00:00
|
|
|
|
2017-01-18 00:24:05 +00:00
|
|
|
inventory = template
|
2017-01-08 07:36:35 +00:00
|
|
|
|
|
|
|
return inventory
|
|
|
|
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
def command_windows_integration(args):
|
|
|
|
"""
|
|
|
|
:type args: WindowsIntegrationConfig
|
|
|
|
"""
|
2017-05-18 17:37:53 +00:00
|
|
|
filename = 'test/integration/inventory.winrm'
|
|
|
|
|
|
|
|
if not args.explain and not args.windows and not os.path.isfile(filename):
|
|
|
|
raise ApplicationError('Use the --windows option or provide an inventory file (see %s.template).' % filename)
|
|
|
|
|
2017-08-23 18:09:50 +00:00
|
|
|
all_targets = tuple(walk_windows_integration_targets(include_hidden=True))
|
2017-10-26 07:21:46 +00:00
|
|
|
internal_targets = command_integration_filter(args, all_targets, init_callback=windows_init)
|
2017-11-15 01:08:48 +00:00
|
|
|
instances = [] # type: list [lib.thread.WrappedThread]
|
2016-11-30 05:21:53 +00:00
|
|
|
|
|
|
|
if args.windows:
|
2017-11-15 20:00:10 +00:00
|
|
|
get_coverage_path(args) # initialize before starting threads
|
|
|
|
|
2017-10-26 07:21:46 +00:00
|
|
|
configs = dict((config['platform_version'], config) for config in args.metadata.instance_config)
|
2016-11-30 05:21:53 +00:00
|
|
|
|
|
|
|
for version in args.windows:
|
2017-10-26 07:21:46 +00:00
|
|
|
config = configs['windows/%s' % version]
|
|
|
|
|
|
|
|
instance = lib.thread.WrappedThread(functools.partial(windows_run, args, version, config))
|
2016-11-30 05:21:53 +00:00
|
|
|
instance.daemon = True
|
|
|
|
instance.start()
|
|
|
|
instances.append(instance)
|
|
|
|
|
|
|
|
while any(instance.is_alive() for instance in instances):
|
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
remotes = [instance.wait_for_result() for instance in instances]
|
|
|
|
inventory = windows_inventory(remotes)
|
|
|
|
|
2017-01-18 00:24:05 +00:00
|
|
|
display.info('>>> Inventory: %s\n%s' % (filename, inventory.strip()), verbosity=3)
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
if not args.explain:
|
2017-01-18 00:24:05 +00:00
|
|
|
with open(filename, 'w') as inventory_fd:
|
2016-11-30 05:21:53 +00:00
|
|
|
inventory_fd.write(inventory)
|
|
|
|
|
2017-11-15 01:08:48 +00:00
|
|
|
success = False
|
|
|
|
|
|
|
|
try:
|
|
|
|
command_integration_filtered(args, internal_targets, all_targets)
|
|
|
|
success = True
|
|
|
|
finally:
|
|
|
|
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
|
|
|
|
for instance in instances:
|
|
|
|
instance.result.stop()
|
2017-10-26 07:21:46 +00:00
|
|
|
|
|
|
|
|
2018-09-21 18:38:22 +00:00
|
|
|
# noinspection PyUnusedLocal
|
2017-10-26 07:21:46 +00:00
|
|
|
def windows_init(args, internal_targets): # pylint: disable=locally-disabled, unused-argument
|
|
|
|
"""
|
|
|
|
:type args: WindowsIntegrationConfig
|
|
|
|
:type internal_targets: tuple[IntegrationTarget]
|
|
|
|
"""
|
|
|
|
if not args.windows:
|
|
|
|
return
|
|
|
|
|
|
|
|
if args.metadata.instance_config is not None:
|
|
|
|
return
|
|
|
|
|
|
|
|
instances = [] # type: list [lib.thread.WrappedThread]
|
|
|
|
|
|
|
|
for version in args.windows:
|
|
|
|
instance = lib.thread.WrappedThread(functools.partial(windows_start, args, version))
|
|
|
|
instance.daemon = True
|
|
|
|
instance.start()
|
|
|
|
instances.append(instance)
|
2016-11-30 05:21:53 +00:00
|
|
|
|
2017-10-26 07:21:46 +00:00
|
|
|
while any(instance.is_alive() for instance in instances):
|
|
|
|
time.sleep(1)
|
2016-11-30 05:21:53 +00:00
|
|
|
|
2017-10-26 07:21:46 +00:00
|
|
|
args.metadata.instance_config = [instance.wait_for_result() for instance in instances]
|
|
|
|
|
|
|
|
|
|
|
|
def windows_start(args, version):
|
2016-11-30 05:21:53 +00:00
|
|
|
"""
|
|
|
|
:type args: WindowsIntegrationConfig
|
|
|
|
:type version: str
|
|
|
|
:rtype: AnsibleCoreCI
|
|
|
|
"""
|
2017-11-29 08:46:08 +00:00
|
|
|
core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage, provider=args.remote_provider)
|
2016-11-30 05:21:53 +00:00
|
|
|
core_ci.start()
|
2017-10-26 07:21:46 +00:00
|
|
|
|
|
|
|
return core_ci.save()
|
|
|
|
|
|
|
|
|
|
|
|
def windows_run(args, version, config):
|
|
|
|
"""
|
|
|
|
:type args: WindowsIntegrationConfig
|
|
|
|
:type version: str
|
|
|
|
:type config: dict[str, str]
|
|
|
|
:rtype: AnsibleCoreCI
|
|
|
|
"""
|
2017-11-29 08:46:08 +00:00
|
|
|
core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage, provider=args.remote_provider, load=False)
|
2017-10-26 07:21:46 +00:00
|
|
|
core_ci.load(config)
|
2016-11-30 05:21:53 +00:00
|
|
|
core_ci.wait()
|
|
|
|
|
|
|
|
manage = ManageWindowsCI(core_ci)
|
|
|
|
manage.wait()
|
|
|
|
|
|
|
|
return core_ci
|
|
|
|
|
|
|
|
|
|
|
|
def windows_inventory(remotes):
|
|
|
|
"""
|
|
|
|
:type remotes: list[AnsibleCoreCI]
|
|
|
|
:rtype: str
|
|
|
|
"""
|
2017-01-16 20:31:17 +00:00
|
|
|
hosts = []
|
|
|
|
|
|
|
|
for remote in remotes:
|
|
|
|
options = dict(
|
|
|
|
ansible_host=remote.connection.hostname,
|
|
|
|
ansible_user=remote.connection.username,
|
|
|
|
ansible_password=remote.connection.password,
|
|
|
|
ansible_port=remote.connection.port,
|
|
|
|
)
|
|
|
|
|
|
|
|
hosts.append(
|
|
|
|
'%s %s' % (
|
|
|
|
remote.name.replace('/', '_'),
|
|
|
|
' '.join('%s="%s"' % (k, options[k]) for k in sorted(options)),
|
|
|
|
)
|
|
|
|
)
|
2016-11-30 05:21:53 +00:00
|
|
|
|
|
|
|
template = """
|
|
|
|
[windows]
|
|
|
|
%s
|
|
|
|
|
|
|
|
[windows:vars]
|
|
|
|
ansible_connection=winrm
|
|
|
|
ansible_winrm_server_cert_validation=ignore
|
|
|
|
|
|
|
|
# support winrm connection tests (temporary solution, does not support testing enable/disable of pipelining)
|
|
|
|
[winrm:children]
|
|
|
|
windows
|
|
|
|
|
|
|
|
# support winrm binary module tests (temporary solution)
|
|
|
|
[testhost_binary_modules:children]
|
|
|
|
windows
|
|
|
|
"""
|
|
|
|
|
|
|
|
template = textwrap.dedent(template)
|
|
|
|
inventory = template % ('\n'.join(hosts))
|
|
|
|
|
|
|
|
return inventory
|
|
|
|
|
|
|
|
|
2017-10-26 07:21:46 +00:00
|
|
|
def command_integration_filter(args, targets, init_callback=None):
|
2016-11-30 05:21:53 +00:00
|
|
|
"""
|
|
|
|
:type args: IntegrationConfig
|
|
|
|
:type targets: collections.Iterable[IntegrationTarget]
|
2017-10-26 07:21:46 +00:00
|
|
|
:type init_callback: (IntegrationConfig, tuple[IntegrationTarget]) -> None
|
2016-11-30 05:21:53 +00:00
|
|
|
:rtype: tuple[IntegrationTarget]
|
|
|
|
"""
|
2017-08-23 18:09:50 +00:00
|
|
|
targets = tuple(target for target in targets if 'hidden/' not in target.aliases)
|
2016-11-30 05:21:53 +00:00
|
|
|
changes = get_changes_filter(args)
|
2018-10-02 19:26:14 +00:00
|
|
|
|
|
|
|
# special behavior when the --changed-all-target target is selected based on changes
|
|
|
|
if args.changed_all_target in changes:
|
|
|
|
# act as though the --changed-all-target target was in the include list
|
|
|
|
if args.changed_all_mode == 'include' and args.changed_all_target not in args.include:
|
|
|
|
args.include.append(args.changed_all_target)
|
|
|
|
args.delegate_args += ['--include', args.changed_all_target]
|
|
|
|
# act as though the --changed-all-target target was in the exclude list
|
|
|
|
elif args.changed_all_mode == 'exclude' and args.changed_all_target not in args.exclude:
|
|
|
|
args.exclude.append(args.changed_all_target)
|
|
|
|
|
|
|
|
require = args.require + changes
|
|
|
|
exclude = args.exclude
|
2016-11-30 05:21:53 +00:00
|
|
|
|
|
|
|
internal_targets = walk_internal_targets(targets, args.include, exclude, require)
|
|
|
|
environment_exclude = get_integration_filter(args, internal_targets)
|
|
|
|
|
2017-05-05 08:23:00 +00:00
|
|
|
environment_exclude += cloud_filter(args, internal_targets)
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
if environment_exclude:
|
|
|
|
exclude += environment_exclude
|
|
|
|
internal_targets = walk_internal_targets(targets, args.include, exclude, require)
|
|
|
|
|
|
|
|
if not internal_targets:
|
|
|
|
raise AllTargetsSkipped()
|
|
|
|
|
|
|
|
if args.start_at and not any(t.name == args.start_at for t in internal_targets):
|
|
|
|
raise ApplicationError('Start at target matches nothing: %s' % args.start_at)
|
|
|
|
|
2017-10-26 07:21:46 +00:00
|
|
|
if init_callback:
|
|
|
|
init_callback(args, internal_targets)
|
|
|
|
|
2017-05-05 08:23:00 +00:00
|
|
|
cloud_init(args, internal_targets)
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
if args.delegate:
|
2018-10-02 19:26:14 +00:00
|
|
|
raise Delegate(require=require, exclude=exclude, integration_targets=internal_targets)
|
2016-11-30 05:21:53 +00:00
|
|
|
|
|
|
|
install_command_requirements(args)
|
|
|
|
|
|
|
|
return internal_targets
|
|
|
|
|
|
|
|
|
2017-08-23 18:09:50 +00:00
|
|
|
def command_integration_filtered(args, targets, all_targets):
|
2016-11-30 05:21:53 +00:00
|
|
|
"""
|
|
|
|
:type args: IntegrationConfig
|
|
|
|
:type targets: tuple[IntegrationTarget]
|
2017-08-23 18:09:50 +00:00
|
|
|
:type all_targets: tuple[IntegrationTarget]
|
2016-11-30 05:21:53 +00:00
|
|
|
"""
|
|
|
|
found = False
|
2017-07-14 23:52:11 +00:00
|
|
|
passed = []
|
|
|
|
failed = []
|
2016-11-30 05:21:53 +00:00
|
|
|
|
|
|
|
targets_iter = iter(targets)
|
2017-08-23 18:09:50 +00:00
|
|
|
all_targets_dict = dict((target.name, target) for target in all_targets)
|
|
|
|
|
|
|
|
setup_errors = []
|
|
|
|
setup_targets_executed = set()
|
|
|
|
|
|
|
|
for target in all_targets:
|
|
|
|
for setup_target in target.setup_once + target.setup_always:
|
|
|
|
if setup_target not in all_targets_dict:
|
|
|
|
setup_errors.append('Target "%s" contains invalid setup target: %s' % (target.name, setup_target))
|
|
|
|
|
|
|
|
if setup_errors:
|
|
|
|
raise ApplicationError('Found %d invalid setup aliases:\n%s' % (len(setup_errors), '\n'.join(setup_errors)))
|
2016-11-30 05:21:53 +00:00
|
|
|
|
|
|
|
test_dir = os.path.expanduser('~/ansible_testing')
|
|
|
|
|
2017-07-15 02:11:25 +00:00
|
|
|
if not args.explain and any('needs/ssh/' in target.aliases for target in targets):
|
2016-11-30 05:21:53 +00:00
|
|
|
max_tries = 20
|
|
|
|
display.info('SSH service required for tests. Checking to make sure we can connect.')
|
|
|
|
for i in range(1, max_tries + 1):
|
|
|
|
try:
|
|
|
|
run_command(args, ['ssh', '-o', 'BatchMode=yes', 'localhost', 'id'], capture=True)
|
|
|
|
display.info('SSH service responded.')
|
|
|
|
break
|
2017-05-03 15:19:44 +00:00
|
|
|
except SubprocessError:
|
2016-11-30 05:21:53 +00:00
|
|
|
if i == max_tries:
|
2017-05-03 15:19:44 +00:00
|
|
|
raise
|
2016-11-30 05:21:53 +00:00
|
|
|
seconds = 3
|
|
|
|
display.warning('SSH service not responding. Waiting %d second(s) before checking again.' % seconds)
|
|
|
|
time.sleep(seconds)
|
|
|
|
|
2018-05-09 16:24:39 +00:00
|
|
|
if args.inject_httptester:
|
|
|
|
inject_httptester(args)
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
start_at_task = args.start_at_task
|
|
|
|
|
2017-08-25 22:14:47 +00:00
|
|
|
results = {}
|
|
|
|
|
2018-10-04 04:41:27 +00:00
|
|
|
current_environment = None # type: EnvironmentDescription | None
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
for target in targets_iter:
|
|
|
|
if args.start_at and not found:
|
|
|
|
found = target.name == args.start_at
|
|
|
|
|
|
|
|
if not found:
|
|
|
|
continue
|
|
|
|
|
2017-07-15 02:11:25 +00:00
|
|
|
if args.list_targets:
|
|
|
|
print(target.name)
|
|
|
|
continue
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
tries = 2 if args.retry_on_error else 1
|
|
|
|
verbosity = args.verbosity
|
|
|
|
|
2017-05-05 08:23:00 +00:00
|
|
|
cloud_environment = get_cloud_environment(args, target)
|
|
|
|
|
2018-10-04 04:41:27 +00:00
|
|
|
original_environment = current_environment if current_environment else EnvironmentDescription(args)
|
|
|
|
current_environment = None
|
2017-05-11 11:05:21 +00:00
|
|
|
|
|
|
|
display.info('>>> Environment Description\n%s' % original_environment, verbosity=3)
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
try:
|
|
|
|
while tries:
|
|
|
|
tries -= 1
|
|
|
|
|
|
|
|
try:
|
2018-03-07 22:02:31 +00:00
|
|
|
if cloud_environment:
|
|
|
|
cloud_environment.setup_once()
|
|
|
|
|
2017-08-23 18:09:50 +00:00
|
|
|
run_setup_targets(args, test_dir, target.setup_once, all_targets_dict, setup_targets_executed, False)
|
2017-08-25 22:14:47 +00:00
|
|
|
|
|
|
|
start_time = time.time()
|
|
|
|
|
2017-08-23 18:09:50 +00:00
|
|
|
run_setup_targets(args, test_dir, target.setup_always, all_targets_dict, setup_targets_executed, True)
|
|
|
|
|
|
|
|
if not args.explain:
|
|
|
|
# create a fresh test directory for each test target
|
|
|
|
remove_tree(test_dir)
|
|
|
|
make_dirs(test_dir)
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
if target.script_path:
|
|
|
|
command_integration_script(args, target)
|
|
|
|
else:
|
|
|
|
command_integration_role(args, target, start_at_task)
|
|
|
|
start_at_task = None
|
2017-08-25 22:14:47 +00:00
|
|
|
|
|
|
|
end_time = time.time()
|
|
|
|
|
|
|
|
results[target.name] = dict(
|
|
|
|
name=target.name,
|
|
|
|
type=target.type,
|
|
|
|
aliases=target.aliases,
|
|
|
|
modules=target.modules,
|
|
|
|
run_time_seconds=int(end_time - start_time),
|
|
|
|
setup_once=target.setup_once,
|
|
|
|
setup_always=target.setup_always,
|
|
|
|
coverage=args.coverage,
|
|
|
|
coverage_label=args.coverage_label,
|
|
|
|
python_version=args.python_version,
|
|
|
|
)
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
break
|
|
|
|
except SubprocessError:
|
2017-05-05 08:23:00 +00:00
|
|
|
if cloud_environment:
|
|
|
|
cloud_environment.on_failure(target, tries)
|
|
|
|
|
2017-05-11 11:05:21 +00:00
|
|
|
if not original_environment.validate(target.name, throw=False):
|
|
|
|
raise
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
if not tries:
|
|
|
|
raise
|
|
|
|
|
|
|
|
display.warning('Retrying test target "%s" with maximum verbosity.' % target.name)
|
|
|
|
display.verbosity = args.verbosity = 6
|
2017-05-11 11:05:21 +00:00
|
|
|
|
2018-01-16 23:52:42 +00:00
|
|
|
start_time = time.time()
|
2018-10-04 04:41:27 +00:00
|
|
|
current_environment = EnvironmentDescription(args)
|
2018-01-16 23:52:42 +00:00
|
|
|
end_time = time.time()
|
|
|
|
|
2018-10-04 04:41:27 +00:00
|
|
|
EnvironmentDescription.check(original_environment, current_environment, target.name, throw=True)
|
|
|
|
|
2018-01-16 23:52:42 +00:00
|
|
|
results[target.name]['validation_seconds'] = int(end_time - start_time)
|
|
|
|
|
2017-07-14 23:52:11 +00:00
|
|
|
passed.append(target)
|
|
|
|
except Exception as ex:
|
|
|
|
failed.append(target)
|
|
|
|
|
|
|
|
if args.continue_on_error:
|
|
|
|
display.error(ex)
|
|
|
|
continue
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
display.notice('To resume at this test target, use the option: --start-at %s' % target.name)
|
|
|
|
|
|
|
|
next_target = next(targets_iter, None)
|
|
|
|
|
|
|
|
if next_target:
|
|
|
|
display.notice('To resume after this test target, use the option: --start-at %s' % next_target.name)
|
|
|
|
|
|
|
|
raise
|
|
|
|
finally:
|
|
|
|
display.verbosity = args.verbosity = verbosity
|
|
|
|
|
2017-08-25 22:14:47 +00:00
|
|
|
if not args.explain:
|
|
|
|
results_path = 'test/results/data/%s-%s.json' % (args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0))))
|
|
|
|
|
|
|
|
data = dict(
|
|
|
|
targets=results,
|
|
|
|
)
|
|
|
|
|
|
|
|
with open(results_path, 'w') as results_fd:
|
|
|
|
results_fd.write(json.dumps(data, sort_keys=True, indent=4))
|
|
|
|
|
2017-07-14 23:52:11 +00:00
|
|
|
if failed:
|
|
|
|
raise ApplicationError('The %d integration test(s) listed below (out of %d) failed. See error output above for details:\n%s' % (
|
|
|
|
len(failed), len(passed) + len(failed), '\n'.join(target.name for target in failed)))
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
|
2018-05-09 16:24:39 +00:00
|
|
|
def start_httptester(args):
|
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
|
|
|
:rtype: str, list[str]
|
|
|
|
"""
|
|
|
|
|
|
|
|
# map ports from remote -> localhost -> container
|
|
|
|
# passing through localhost is only used when ansible-test is not already running inside a docker container
|
|
|
|
ports = [
|
|
|
|
dict(
|
|
|
|
remote=8080,
|
|
|
|
container=80,
|
|
|
|
),
|
|
|
|
dict(
|
|
|
|
remote=8443,
|
|
|
|
container=443,
|
|
|
|
),
|
|
|
|
]
|
|
|
|
|
|
|
|
container_id = get_docker_container_id()
|
|
|
|
|
|
|
|
if container_id:
|
|
|
|
display.info('Running in docker container: %s' % container_id, verbosity=1)
|
|
|
|
else:
|
|
|
|
for item in ports:
|
|
|
|
item['localhost'] = get_available_port()
|
|
|
|
|
|
|
|
docker_pull(args, args.httptester)
|
|
|
|
|
|
|
|
httptester_id = run_httptester(args, dict((port['localhost'], port['container']) for port in ports if 'localhost' in port))
|
|
|
|
|
|
|
|
if container_id:
|
|
|
|
container_host = get_docker_container_ip(args, httptester_id)
|
|
|
|
display.info('Found httptester container address: %s' % container_host, verbosity=1)
|
|
|
|
else:
|
|
|
|
container_host = 'localhost'
|
|
|
|
|
|
|
|
ssh_options = []
|
|
|
|
|
|
|
|
for port in ports:
|
|
|
|
ssh_options += ['-R', '%d:%s:%d' % (port['remote'], container_host, port.get('localhost', port['container']))]
|
|
|
|
|
|
|
|
return httptester_id, ssh_options
|
|
|
|
|
|
|
|
|
|
|
|
def run_httptester(args, ports=None):
|
|
|
|
"""
|
|
|
|
:type args: EnvironmentConfig
|
|
|
|
:type ports: dict[int, int] | None
|
|
|
|
:rtype: str
|
|
|
|
"""
|
|
|
|
options = [
|
|
|
|
'--detach',
|
|
|
|
]
|
|
|
|
|
|
|
|
if ports:
|
|
|
|
for localhost_port, container_port in ports.items():
|
|
|
|
options += ['-p', '%d:%d' % (localhost_port, container_port)]
|
|
|
|
|
|
|
|
httptester_id, _ = docker_run(args, args.httptester, options=options)
|
|
|
|
|
|
|
|
if args.explain:
|
|
|
|
httptester_id = 'httptester_id'
|
|
|
|
else:
|
|
|
|
httptester_id = httptester_id.strip()
|
|
|
|
|
|
|
|
return httptester_id
|
|
|
|
|
|
|
|
|
|
|
|
def inject_httptester(args):
|
|
|
|
"""
|
|
|
|
:type args: CommonConfig
|
|
|
|
"""
|
|
|
|
comment = ' # ansible-test httptester\n'
|
|
|
|
append_lines = ['127.0.0.1 %s%s' % (host, comment) for host in HTTPTESTER_HOSTS]
|
|
|
|
|
|
|
|
with open('/etc/hosts', 'r+') as hosts_fd:
|
|
|
|
original_lines = hosts_fd.readlines()
|
|
|
|
|
|
|
|
if not any(line.endswith(comment) for line in original_lines):
|
|
|
|
hosts_fd.writelines(append_lines)
|
|
|
|
|
|
|
|
# determine which forwarding mechanism to use
|
|
|
|
pfctl = find_executable('pfctl', required=False)
|
|
|
|
iptables = find_executable('iptables', required=False)
|
|
|
|
|
|
|
|
if pfctl:
|
|
|
|
kldload = find_executable('kldload', required=False)
|
|
|
|
|
|
|
|
if kldload:
|
|
|
|
try:
|
|
|
|
run_command(args, ['kldload', 'pf'], capture=True)
|
|
|
|
except SubprocessError:
|
|
|
|
pass # already loaded
|
|
|
|
|
|
|
|
rules = '''
|
|
|
|
rdr pass inet proto tcp from any to any port 80 -> 127.0.0.1 port 8080
|
|
|
|
rdr pass inet proto tcp from any to any port 443 -> 127.0.0.1 port 8443
|
|
|
|
'''
|
|
|
|
cmd = ['pfctl', '-ef', '-']
|
|
|
|
|
|
|
|
try:
|
|
|
|
run_command(args, cmd, capture=True, data=rules)
|
|
|
|
except SubprocessError:
|
|
|
|
pass # non-zero exit status on success
|
|
|
|
|
|
|
|
elif iptables:
|
|
|
|
ports = [
|
|
|
|
(80, 8080),
|
|
|
|
(443, 8443),
|
|
|
|
]
|
|
|
|
|
|
|
|
for src, dst in ports:
|
|
|
|
rule = ['-o', 'lo', '-p', 'tcp', '--dport', str(src), '-j', 'REDIRECT', '--to-port', str(dst)]
|
|
|
|
|
|
|
|
try:
|
|
|
|
# check for existing rule
|
|
|
|
cmd = ['iptables', '-t', 'nat', '-C', 'OUTPUT'] + rule
|
|
|
|
run_command(args, cmd, capture=True)
|
|
|
|
except SubprocessError:
|
|
|
|
# append rule when it does not exist
|
|
|
|
cmd = ['iptables', '-t', 'nat', '-A', 'OUTPUT'] + rule
|
|
|
|
run_command(args, cmd, capture=True)
|
|
|
|
else:
|
|
|
|
raise ApplicationError('No supported port forwarding mechanism detected.')
|
|
|
|
|
|
|
|
|
2017-08-23 18:09:50 +00:00
|
|
|
def run_setup_targets(args, test_dir, target_names, targets_dict, targets_executed, always):
|
|
|
|
"""
|
2018-03-07 22:02:31 +00:00
|
|
|
:type args: IntegrationConfig
|
|
|
|
:type test_dir: str
|
|
|
|
:type target_names: list[str]
|
|
|
|
:type targets_dict: dict[str, IntegrationTarget]
|
|
|
|
:type targets_executed: set[str]
|
|
|
|
:type always: bool
|
2017-08-23 18:09:50 +00:00
|
|
|
"""
|
|
|
|
for target_name in target_names:
|
|
|
|
if not always and target_name in targets_executed:
|
|
|
|
continue
|
|
|
|
|
|
|
|
target = targets_dict[target_name]
|
|
|
|
|
|
|
|
if not args.explain:
|
|
|
|
# create a fresh test directory for each test target
|
|
|
|
remove_tree(test_dir)
|
|
|
|
make_dirs(test_dir)
|
|
|
|
|
|
|
|
if target.script_path:
|
|
|
|
command_integration_script(args, target)
|
|
|
|
else:
|
|
|
|
command_integration_role(args, target, None)
|
|
|
|
|
|
|
|
targets_executed.add(target_name)
|
|
|
|
|
|
|
|
|
2017-05-05 08:23:00 +00:00
|
|
|
def integration_environment(args, target, cmd):
|
2016-11-30 05:21:53 +00:00
|
|
|
"""
|
|
|
|
:type args: IntegrationConfig
|
2017-05-05 08:23:00 +00:00
|
|
|
:type target: IntegrationTarget
|
|
|
|
:type cmd: list[str]
|
2016-11-30 05:21:53 +00:00
|
|
|
:rtype: dict[str, str]
|
|
|
|
"""
|
|
|
|
env = ansible_environment(args)
|
|
|
|
|
2018-05-09 16:24:39 +00:00
|
|
|
if args.inject_httptester:
|
|
|
|
env.update(dict(
|
|
|
|
HTTPTESTER='1',
|
|
|
|
))
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
integration = dict(
|
|
|
|
JUNIT_OUTPUT_DIR=os.path.abspath('test/results/junit'),
|
|
|
|
ANSIBLE_CALLBACK_WHITELIST='junit',
|
2017-07-12 21:16:46 +00:00
|
|
|
ANSIBLE_TEST_CI=args.metadata.ci_provider,
|
2016-11-30 05:21:53 +00:00
|
|
|
)
|
|
|
|
|
2017-07-07 19:37:08 +00:00
|
|
|
if args.debug_strategy:
|
|
|
|
env.update(dict(ANSIBLE_STRATEGY='debug'))
|
|
|
|
|
2017-07-28 17:56:25 +00:00
|
|
|
if 'non_local/' in target.aliases:
|
|
|
|
if args.coverage:
|
|
|
|
display.warning('Skipping coverage reporting for non-local test: %s' % target.name)
|
|
|
|
|
|
|
|
env.update(dict(ANSIBLE_TEST_REMOTE_INTERPRETER=''))
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
env.update(integration)
|
|
|
|
|
2017-05-05 08:23:00 +00:00
|
|
|
cloud_environment = get_cloud_environment(args, target)
|
|
|
|
|
|
|
|
if cloud_environment:
|
|
|
|
cloud_environment.configure_environment(env, cmd)
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
return env
|
|
|
|
|
|
|
|
|
|
|
|
def command_integration_script(args, target):
|
|
|
|
"""
|
|
|
|
:type args: IntegrationConfig
|
|
|
|
:type target: IntegrationTarget
|
|
|
|
"""
|
|
|
|
display.info('Running %s integration test script' % target.name)
|
|
|
|
|
|
|
|
cmd = ['./%s' % os.path.basename(target.script_path)]
|
|
|
|
|
|
|
|
if args.verbosity:
|
|
|
|
cmd.append('-' + ('v' * args.verbosity))
|
|
|
|
|
2017-05-05 08:23:00 +00:00
|
|
|
env = integration_environment(args, target, cmd)
|
2016-11-30 05:21:53 +00:00
|
|
|
cwd = target.path
|
|
|
|
|
2017-05-11 05:25:02 +00:00
|
|
|
intercept_command(args, cmd, target_name=target.name, env=env, cwd=cwd)
|
2016-11-30 05:21:53 +00:00
|
|
|
|
|
|
|
|
|
|
|
def command_integration_role(args, target, start_at_task):
|
|
|
|
"""
|
|
|
|
:type args: IntegrationConfig
|
|
|
|
:type target: IntegrationTarget
|
2017-08-23 18:09:50 +00:00
|
|
|
:type start_at_task: str | None
|
2016-11-30 05:21:53 +00:00
|
|
|
"""
|
|
|
|
display.info('Running %s integration test role' % target.name)
|
|
|
|
|
|
|
|
vars_file = 'integration_config.yml'
|
|
|
|
|
2017-02-16 22:26:31 +00:00
|
|
|
if isinstance(args, WindowsIntegrationConfig):
|
2016-11-30 05:21:53 +00:00
|
|
|
inventory = 'inventory.winrm'
|
|
|
|
hosts = 'windows'
|
|
|
|
gather_facts = False
|
2017-02-16 22:26:31 +00:00
|
|
|
elif isinstance(args, NetworkIntegrationConfig):
|
2017-07-14 23:52:11 +00:00
|
|
|
inventory = args.inventory or 'inventory.networking'
|
2016-11-30 05:21:53 +00:00
|
|
|
hosts = target.name[:target.name.find('_')]
|
|
|
|
gather_facts = False
|
|
|
|
else:
|
|
|
|
inventory = 'inventory'
|
|
|
|
hosts = 'testhost'
|
|
|
|
gather_facts = True
|
|
|
|
|
2017-05-05 08:23:00 +00:00
|
|
|
cloud_environment = get_cloud_environment(args, target)
|
|
|
|
|
|
|
|
if cloud_environment:
|
|
|
|
hosts = cloud_environment.inventory_hosts or hosts
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
playbook = '''
|
|
|
|
- hosts: %s
|
|
|
|
gather_facts: %s
|
|
|
|
roles:
|
|
|
|
- { role: %s }
|
|
|
|
''' % (hosts, gather_facts, target.name)
|
|
|
|
|
|
|
|
with tempfile.NamedTemporaryFile(dir='test/integration', prefix='%s-' % target.name, suffix='.yml') as pb_fd:
|
|
|
|
pb_fd.write(playbook.encode('utf-8'))
|
|
|
|
pb_fd.flush()
|
|
|
|
|
|
|
|
filename = os.path.basename(pb_fd.name)
|
|
|
|
|
|
|
|
display.info('>>> Playbook: %s\n%s' % (filename, playbook.strip()), verbosity=3)
|
|
|
|
|
|
|
|
cmd = ['ansible-playbook', filename, '-i', inventory, '-e', '@%s' % vars_file]
|
|
|
|
|
|
|
|
if start_at_task:
|
|
|
|
cmd += ['--start-at-task', start_at_task]
|
|
|
|
|
2017-05-25 14:47:52 +00:00
|
|
|
if args.tags:
|
|
|
|
cmd += ['--tags', args.tags]
|
|
|
|
|
|
|
|
if args.skip_tags:
|
|
|
|
cmd += ['--skip-tags', args.skip_tags]
|
|
|
|
|
2017-06-30 18:43:34 +00:00
|
|
|
if args.diff:
|
|
|
|
cmd += ['--diff']
|
|
|
|
|
2018-02-14 23:40:35 +00:00
|
|
|
if isinstance(args, NetworkIntegrationConfig):
|
|
|
|
if args.testcase:
|
|
|
|
cmd += ['-e', 'testcase=%s' % args.testcase]
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
if args.verbosity:
|
|
|
|
cmd.append('-' + ('v' * args.verbosity))
|
|
|
|
|
2017-05-05 08:23:00 +00:00
|
|
|
env = integration_environment(args, target, cmd)
|
2016-11-30 05:21:53 +00:00
|
|
|
cwd = 'test/integration'
|
|
|
|
|
|
|
|
env['ANSIBLE_ROLES_PATH'] = os.path.abspath('test/integration/targets')
|
|
|
|
|
2017-05-11 05:25:02 +00:00
|
|
|
intercept_command(args, cmd, target_name=target.name, env=env, cwd=cwd)
|
2016-11-30 05:21:53 +00:00
|
|
|
|
|
|
|
|
|
|
|
def command_units(args):
|
|
|
|
"""
|
|
|
|
:type args: UnitsConfig
|
|
|
|
"""
|
|
|
|
changes = get_changes_filter(args)
|
2018-10-02 19:26:14 +00:00
|
|
|
require = args.require + changes
|
2016-11-30 05:21:53 +00:00
|
|
|
include, exclude = walk_external_targets(walk_units_targets(), args.include, args.exclude, require)
|
|
|
|
|
|
|
|
if not include:
|
|
|
|
raise AllTargetsSkipped()
|
|
|
|
|
|
|
|
if args.delegate:
|
|
|
|
raise Delegate(require=changes)
|
|
|
|
|
|
|
|
version_commands = []
|
|
|
|
|
|
|
|
for version in SUPPORTED_PYTHON_VERSIONS:
|
|
|
|
# run all versions unless version given, in which case run only that version
|
2017-10-26 07:21:46 +00:00
|
|
|
if args.python and version != args.python_version:
|
2016-11-30 05:21:53 +00:00
|
|
|
continue
|
|
|
|
|
2018-09-18 15:37:14 +00:00
|
|
|
if args.requirements_mode != 'skip':
|
|
|
|
install_command_requirements(args, version)
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
env = ansible_environment(args)
|
|
|
|
|
|
|
|
cmd = [
|
|
|
|
'pytest',
|
2017-02-24 00:58:57 +00:00
|
|
|
'--boxed',
|
2016-11-30 05:21:53 +00:00
|
|
|
'-r', 'a',
|
2018-09-18 20:58:22 +00:00
|
|
|
'-n', 'auto',
|
2016-11-30 05:21:53 +00:00
|
|
|
'--color',
|
|
|
|
'yes' if args.color else 'no',
|
|
|
|
'--junit-xml',
|
|
|
|
'test/results/junit/python%s-units.xml' % version,
|
|
|
|
]
|
|
|
|
|
|
|
|
if args.collect_only:
|
|
|
|
cmd.append('--collect-only')
|
|
|
|
|
|
|
|
if args.verbosity:
|
|
|
|
cmd.append('-' + ('v' * args.verbosity))
|
|
|
|
|
|
|
|
if exclude:
|
|
|
|
cmd += ['--ignore=%s' % target.path for target in exclude]
|
|
|
|
|
|
|
|
cmd += [target.path for target in include]
|
|
|
|
|
|
|
|
version_commands.append((version, cmd, env))
|
|
|
|
|
2018-09-18 15:37:14 +00:00
|
|
|
if args.requirements_mode == 'only':
|
|
|
|
sys.exit()
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
for version, command, env in version_commands:
|
|
|
|
display.info('Unit test with Python %s' % version)
|
2016-12-07 01:48:48 +00:00
|
|
|
|
|
|
|
try:
|
2017-05-11 05:25:02 +00:00
|
|
|
intercept_command(args, command, target_name='units', env=env, python_version=version)
|
2016-12-07 01:48:48 +00:00
|
|
|
except SubprocessError as ex:
|
|
|
|
# pytest exits with status code 5 when all tests are skipped, which isn't an error for our use case
|
|
|
|
if ex.status != 5:
|
|
|
|
raise
|
2016-11-30 05:21:53 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_changes_filter(args):
|
|
|
|
"""
|
|
|
|
:type args: TestConfig
|
|
|
|
:rtype: list[str]
|
|
|
|
"""
|
|
|
|
paths = detect_changes(args)
|
|
|
|
|
2018-04-12 23:15:28 +00:00
|
|
|
if not args.metadata.change_description:
|
|
|
|
if paths:
|
|
|
|
changes = categorize_changes(args, paths, args.command)
|
|
|
|
else:
|
|
|
|
changes = ChangeDescription()
|
|
|
|
|
|
|
|
args.metadata.change_description = changes
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
if paths is None:
|
|
|
|
return [] # change detection not enabled, do not filter targets
|
|
|
|
|
|
|
|
if not paths:
|
|
|
|
raise NoChangesDetected()
|
|
|
|
|
2018-04-12 23:15:28 +00:00
|
|
|
if args.metadata.change_description.targets is None:
|
2016-11-30 05:21:53 +00:00
|
|
|
raise NoTestsForChanges()
|
|
|
|
|
2018-04-12 23:15:28 +00:00
|
|
|
return args.metadata.change_description.targets
|
2016-11-30 05:21:53 +00:00
|
|
|
|
|
|
|
|
|
|
|
def detect_changes(args):
|
|
|
|
"""
|
|
|
|
:type args: TestConfig
|
|
|
|
:rtype: list[str] | None
|
|
|
|
"""
|
2017-05-14 07:04:52 +00:00
|
|
|
if args.changed and is_shippable():
|
2016-11-30 05:21:53 +00:00
|
|
|
display.info('Shippable detected, collecting parameters from environment.')
|
|
|
|
paths = detect_changes_shippable(args)
|
|
|
|
elif args.changed_from or args.changed_path:
|
|
|
|
paths = args.changed_path or []
|
|
|
|
if args.changed_from:
|
|
|
|
with open(args.changed_from, 'r') as changes_fd:
|
|
|
|
paths += changes_fd.read().splitlines()
|
|
|
|
elif args.changed:
|
|
|
|
paths = detect_changes_local(args)
|
|
|
|
else:
|
|
|
|
return None # change detection not enabled
|
|
|
|
|
2017-09-06 23:40:04 +00:00
|
|
|
if paths is None:
|
|
|
|
return None # act as though change detection not enabled, do not filter targets
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
display.info('Detected changes in %d file(s).' % len(paths))
|
|
|
|
|
|
|
|
for path in paths:
|
|
|
|
display.info(path, verbosity=1)
|
|
|
|
|
|
|
|
return paths
|
|
|
|
|
|
|
|
|
|
|
|
def detect_changes_shippable(args):
|
|
|
|
"""Initialize change detection on Shippable.
|
2017-03-15 19:17:42 +00:00
|
|
|
:type args: TestConfig
|
2017-09-06 23:40:04 +00:00
|
|
|
:rtype: list[str] | None
|
2016-11-30 05:21:53 +00:00
|
|
|
"""
|
|
|
|
git = Git(args)
|
|
|
|
result = ShippableChanges(args, git)
|
|
|
|
|
|
|
|
if result.is_pr:
|
|
|
|
job_type = 'pull request'
|
|
|
|
elif result.is_tag:
|
|
|
|
job_type = 'tag'
|
|
|
|
else:
|
|
|
|
job_type = 'merge commit'
|
|
|
|
|
|
|
|
display.info('Processing %s for branch %s commit %s' % (job_type, result.branch, result.commit))
|
|
|
|
|
2017-03-15 19:17:42 +00:00
|
|
|
if not args.metadata.changes:
|
|
|
|
args.metadata.populate_changes(result.diff)
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
return result.paths
|
|
|
|
|
|
|
|
|
|
|
|
def detect_changes_local(args):
|
|
|
|
"""
|
|
|
|
:type args: TestConfig
|
|
|
|
:rtype: list[str]
|
|
|
|
"""
|
|
|
|
git = Git(args)
|
|
|
|
result = LocalChanges(args, git)
|
|
|
|
|
|
|
|
display.info('Detected branch %s forked from %s at commit %s' % (
|
|
|
|
result.current_branch, result.fork_branch, result.fork_point))
|
|
|
|
|
|
|
|
if result.untracked and not args.untracked:
|
|
|
|
display.warning('Ignored %s untracked file(s). Use --untracked to include them.' %
|
|
|
|
len(result.untracked))
|
|
|
|
|
|
|
|
if result.committed and not args.committed:
|
|
|
|
display.warning('Ignored %s committed change(s). Omit --ignore-committed to include them.' %
|
|
|
|
len(result.committed))
|
|
|
|
|
|
|
|
if result.staged and not args.staged:
|
|
|
|
display.warning('Ignored %s staged change(s). Omit --ignore-staged to include them.' %
|
|
|
|
len(result.staged))
|
|
|
|
|
|
|
|
if result.unstaged and not args.unstaged:
|
|
|
|
display.warning('Ignored %s unstaged change(s). Omit --ignore-unstaged to include them.' %
|
|
|
|
len(result.unstaged))
|
|
|
|
|
|
|
|
names = set()
|
|
|
|
|
|
|
|
if args.tracked:
|
|
|
|
names |= set(result.tracked)
|
|
|
|
if args.untracked:
|
|
|
|
names |= set(result.untracked)
|
|
|
|
if args.committed:
|
|
|
|
names |= set(result.committed)
|
|
|
|
if args.staged:
|
|
|
|
names |= set(result.staged)
|
|
|
|
if args.unstaged:
|
|
|
|
names |= set(result.unstaged)
|
|
|
|
|
2017-03-15 19:17:42 +00:00
|
|
|
if not args.metadata.changes:
|
|
|
|
args.metadata.populate_changes(result.diff)
|
|
|
|
|
|
|
|
for path in result.untracked:
|
|
|
|
if is_binary_file(path):
|
|
|
|
args.metadata.changes[path] = ((0, 0),)
|
|
|
|
continue
|
|
|
|
|
|
|
|
with open(path, 'r') as source_fd:
|
|
|
|
line_count = len(source_fd.read().splitlines())
|
|
|
|
|
|
|
|
args.metadata.changes[path] = ((1, line_count),)
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
return sorted(names)
|
|
|
|
|
|
|
|
|
|
|
|
def get_integration_filter(args, targets):
|
|
|
|
"""
|
|
|
|
:type args: IntegrationConfig
|
|
|
|
:type targets: tuple[IntegrationTarget]
|
|
|
|
:rtype: list[str]
|
|
|
|
"""
|
|
|
|
if args.tox:
|
|
|
|
# tox has the same exclusions as the local environment
|
|
|
|
return get_integration_local_filter(args, targets)
|
|
|
|
|
|
|
|
if args.docker:
|
|
|
|
return get_integration_docker_filter(args, targets)
|
|
|
|
|
|
|
|
if args.remote:
|
|
|
|
return get_integration_remote_filter(args, targets)
|
|
|
|
|
|
|
|
return get_integration_local_filter(args, targets)
|
|
|
|
|
|
|
|
|
2018-04-12 23:15:28 +00:00
|
|
|
def common_integration_filter(args, targets, exclude):
|
|
|
|
"""
|
|
|
|
:type args: IntegrationConfig
|
|
|
|
:type targets: tuple[IntegrationTarget]
|
|
|
|
:type exclude: list[str]
|
|
|
|
"""
|
|
|
|
override_disabled = set(target for target in args.include if target.startswith('disabled/'))
|
|
|
|
|
|
|
|
if not args.allow_disabled:
|
|
|
|
skip = 'disabled/'
|
|
|
|
override = [target.name for target in targets if override_disabled & set(target.aliases)]
|
|
|
|
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
|
|
|
|
if skipped:
|
|
|
|
exclude.extend(skipped)
|
|
|
|
display.warning('Excluding tests marked "%s" which require --allow-disabled or prefixing with "disabled/": %s'
|
|
|
|
% (skip.rstrip('/'), ', '.join(skipped)))
|
|
|
|
|
|
|
|
override_unsupported = set(target for target in args.include if target.startswith('unsupported/'))
|
|
|
|
|
|
|
|
if not args.allow_unsupported:
|
|
|
|
skip = 'unsupported/'
|
|
|
|
override = [target.name for target in targets if override_unsupported & set(target.aliases)]
|
|
|
|
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
|
|
|
|
if skipped:
|
|
|
|
exclude.extend(skipped)
|
|
|
|
display.warning('Excluding tests marked "%s" which require --allow-unsupported or prefixing with "unsupported/": %s'
|
|
|
|
% (skip.rstrip('/'), ', '.join(skipped)))
|
|
|
|
|
|
|
|
override_unstable = set(target for target in args.include if target.startswith('unstable/'))
|
|
|
|
|
|
|
|
if args.allow_unstable_changed:
|
|
|
|
override_unstable |= set(args.metadata.change_description.focused_targets or [])
|
|
|
|
|
|
|
|
if not args.allow_unstable:
|
|
|
|
skip = 'unstable/'
|
|
|
|
override = [target.name for target in targets if override_unstable & set(target.aliases)]
|
|
|
|
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
|
|
|
|
if skipped:
|
|
|
|
exclude.extend(skipped)
|
|
|
|
display.warning('Excluding tests marked "%s" which require --allow-unstable or prefixing with "unstable/": %s'
|
|
|
|
% (skip.rstrip('/'), ', '.join(skipped)))
|
|
|
|
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
def get_integration_local_filter(args, targets):
|
|
|
|
"""
|
|
|
|
:type args: IntegrationConfig
|
|
|
|
:type targets: tuple[IntegrationTarget]
|
|
|
|
:rtype: list[str]
|
|
|
|
"""
|
|
|
|
exclude = []
|
|
|
|
|
2018-04-12 23:15:28 +00:00
|
|
|
common_integration_filter(args, targets, exclude)
|
|
|
|
|
2018-04-04 01:53:53 +00:00
|
|
|
if not args.allow_root and os.getuid() != 0:
|
2016-11-30 05:21:53 +00:00
|
|
|
skip = 'needs/root/'
|
|
|
|
skipped = [target.name for target in targets if skip in target.aliases]
|
|
|
|
if skipped:
|
|
|
|
exclude.append(skip)
|
2018-04-04 01:53:53 +00:00
|
|
|
display.warning('Excluding tests marked "%s" which require --allow-root or running as root: %s'
|
2016-11-30 05:21:53 +00:00
|
|
|
% (skip.rstrip('/'), ', '.join(skipped)))
|
|
|
|
|
2018-04-04 01:53:53 +00:00
|
|
|
override_destructive = set(target for target in args.include if target.startswith('destructive/'))
|
2016-11-30 05:21:53 +00:00
|
|
|
|
2018-04-04 01:53:53 +00:00
|
|
|
if not args.allow_destructive:
|
2016-11-30 05:21:53 +00:00
|
|
|
skip = 'destructive/'
|
2018-04-04 01:53:53 +00:00
|
|
|
override = [target.name for target in targets if override_destructive & set(target.aliases)]
|
|
|
|
skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
|
2016-11-30 05:21:53 +00:00
|
|
|
if skipped:
|
2018-04-04 01:53:53 +00:00
|
|
|
exclude.extend(skipped)
|
|
|
|
display.warning('Excluding tests marked "%s" which require --allow-destructive or prefixing with "destructive/" to run locally: %s'
|
2016-11-30 05:21:53 +00:00
|
|
|
% (skip.rstrip('/'), ', '.join(skipped)))
|
|
|
|
|
2017-05-18 17:37:53 +00:00
|
|
|
if args.python_version.startswith('3'):
|
2017-10-26 07:21:46 +00:00
|
|
|
python_version = 3
|
|
|
|
else:
|
|
|
|
python_version = 2
|
|
|
|
|
|
|
|
skip = 'skip/python%d/' % python_version
|
|
|
|
skipped = [target.name for target in targets if skip in target.aliases]
|
|
|
|
if skipped:
|
|
|
|
exclude.append(skip)
|
|
|
|
display.warning('Excluding tests marked "%s" which are not supported on python %d: %s'
|
|
|
|
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
|
2017-05-18 17:37:53 +00:00
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
return exclude
|
|
|
|
|
|
|
|
|
|
|
|
def get_integration_docker_filter(args, targets):
|
|
|
|
"""
|
|
|
|
:type args: IntegrationConfig
|
|
|
|
:type targets: tuple[IntegrationTarget]
|
|
|
|
:rtype: list[str]
|
|
|
|
"""
|
|
|
|
exclude = []
|
|
|
|
|
2018-04-12 23:15:28 +00:00
|
|
|
common_integration_filter(args, targets, exclude)
|
|
|
|
|
2018-09-21 06:09:54 +00:00
|
|
|
skip = 'skip/docker/'
|
|
|
|
skipped = [target.name for target in targets if skip in target.aliases]
|
|
|
|
if skipped:
|
|
|
|
exclude.append(skip)
|
|
|
|
display.warning('Excluding tests marked "%s" which cannot run under docker: %s'
|
|
|
|
% (skip.rstrip('/'), ', '.join(skipped)))
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
if not args.docker_privileged:
|
|
|
|
skip = 'needs/privileged/'
|
|
|
|
skipped = [target.name for target in targets if skip in target.aliases]
|
|
|
|
if skipped:
|
|
|
|
exclude.append(skip)
|
|
|
|
display.warning('Excluding tests marked "%s" which require --docker-privileged to run under docker: %s'
|
|
|
|
% (skip.rstrip('/'), ', '.join(skipped)))
|
|
|
|
|
2017-10-26 07:21:46 +00:00
|
|
|
python_version = 2 # images are expected to default to python 2 unless otherwise specified
|
|
|
|
|
2018-09-21 21:44:05 +00:00
|
|
|
python_version = int(get_docker_completion().get(args.docker_raw).get('python', str(python_version)))
|
2017-10-26 07:21:46 +00:00
|
|
|
|
|
|
|
if args.python: # specifying a numeric --python option overrides the default python
|
|
|
|
if args.python.startswith('3'):
|
|
|
|
python_version = 3
|
|
|
|
elif args.python.startswith('2'):
|
|
|
|
python_version = 2
|
|
|
|
|
|
|
|
skip = 'skip/python%d/' % python_version
|
|
|
|
skipped = [target.name for target in targets if skip in target.aliases]
|
|
|
|
if skipped:
|
|
|
|
exclude.append(skip)
|
|
|
|
display.warning('Excluding tests marked "%s" which are not supported on python %d: %s'
|
|
|
|
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
|
2016-11-30 05:21:53 +00:00
|
|
|
|
|
|
|
return exclude
|
|
|
|
|
|
|
|
|
|
|
|
def get_integration_remote_filter(args, targets):
|
|
|
|
"""
|
|
|
|
:type args: IntegrationConfig
|
|
|
|
:type targets: tuple[IntegrationTarget]
|
|
|
|
:rtype: list[str]
|
|
|
|
"""
|
|
|
|
parts = args.remote.split('/', 1)
|
|
|
|
|
|
|
|
platform = parts[0]
|
|
|
|
|
|
|
|
exclude = []
|
|
|
|
|
2018-04-12 23:15:28 +00:00
|
|
|
common_integration_filter(args, targets, exclude)
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
skip = 'skip/%s/' % platform
|
|
|
|
skipped = [target.name for target in targets if skip in target.aliases]
|
|
|
|
if skipped:
|
|
|
|
exclude.append(skip)
|
2017-10-26 07:21:46 +00:00
|
|
|
display.warning('Excluding tests marked "%s" which are not supported on %s: %s'
|
2016-11-30 05:21:53 +00:00
|
|
|
% (skip.rstrip('/'), platform, ', '.join(skipped)))
|
|
|
|
|
2017-10-26 07:21:46 +00:00
|
|
|
python_version = 2 # remotes are expected to default to python 2
|
|
|
|
|
|
|
|
skip = 'skip/python%d/' % python_version
|
|
|
|
skipped = [target.name for target in targets if skip in target.aliases]
|
|
|
|
if skipped:
|
|
|
|
exclude.append(skip)
|
|
|
|
display.warning('Excluding tests marked "%s" which are not supported on python %d: %s'
|
|
|
|
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
|
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
return exclude
|
|
|
|
|
|
|
|
|
2017-05-11 11:05:21 +00:00
|
|
|
class EnvironmentDescription(object):
|
|
|
|
"""Description of current running environment."""
|
2017-07-15 02:11:25 +00:00
|
|
|
def __init__(self, args):
|
|
|
|
"""Initialize snapshot of environment configuration.
|
|
|
|
:type args: IntegrationConfig
|
|
|
|
"""
|
|
|
|
self.args = args
|
|
|
|
|
|
|
|
if self.args.explain:
|
|
|
|
self.data = {}
|
|
|
|
return
|
|
|
|
|
2018-10-04 04:41:27 +00:00
|
|
|
warnings = []
|
|
|
|
|
2017-05-11 11:05:21 +00:00
|
|
|
versions = ['']
|
|
|
|
versions += SUPPORTED_PYTHON_VERSIONS
|
|
|
|
versions += list(set(v.split('.')[0] for v in SUPPORTED_PYTHON_VERSIONS))
|
|
|
|
|
|
|
|
python_paths = dict((v, find_executable('python%s' % v, required=False)) for v in sorted(versions))
|
|
|
|
pip_paths = dict((v, find_executable('pip%s' % v, required=False)) for v in sorted(versions))
|
2018-10-04 04:41:27 +00:00
|
|
|
program_versions = dict((v, self.get_version([python_paths[v], 'test/runner/versions.py'], warnings)) for v in sorted(python_paths) if python_paths[v])
|
2017-05-11 11:05:21 +00:00
|
|
|
pip_interpreters = dict((v, self.get_shebang(pip_paths[v])) for v in sorted(pip_paths) if pip_paths[v])
|
2017-07-06 07:47:28 +00:00
|
|
|
known_hosts_hash = self.get_hash(os.path.expanduser('~/.ssh/known_hosts'))
|
2017-05-11 11:05:21 +00:00
|
|
|
|
2018-10-04 04:41:27 +00:00
|
|
|
for version in sorted(versions):
|
|
|
|
self.check_python_pip_association(version, python_paths, pip_paths, pip_interpreters, warnings)
|
|
|
|
|
|
|
|
for warning in warnings:
|
|
|
|
display.warning(warning, unique=True)
|
|
|
|
|
2017-05-11 11:05:21 +00:00
|
|
|
self.data = dict(
|
|
|
|
python_paths=python_paths,
|
|
|
|
pip_paths=pip_paths,
|
2018-10-04 04:41:27 +00:00
|
|
|
program_versions=program_versions,
|
2017-05-11 11:05:21 +00:00
|
|
|
pip_interpreters=pip_interpreters,
|
2017-07-06 07:47:28 +00:00
|
|
|
known_hosts_hash=known_hosts_hash,
|
2018-10-04 04:41:27 +00:00
|
|
|
warnings=warnings,
|
2017-05-11 11:05:21 +00:00
|
|
|
)
|
|
|
|
|
2018-10-04 04:41:27 +00:00
|
|
|
@staticmethod
|
|
|
|
def check_python_pip_association(version, python_paths, pip_paths, pip_interpreters, warnings):
|
|
|
|
"""
|
|
|
|
:type version: str
|
|
|
|
:param python_paths: dict[str, str]
|
|
|
|
:param pip_paths: dict[str, str]
|
|
|
|
:param pip_interpreters: dict[str, str]
|
|
|
|
:param warnings: list[str]
|
|
|
|
"""
|
|
|
|
python_label = 'Python%s' % (' %s' % version if version else '')
|
|
|
|
|
|
|
|
pip_path = pip_paths.get(version)
|
|
|
|
python_path = python_paths.get(version)
|
|
|
|
|
|
|
|
if not python_path and not pip_path:
|
|
|
|
# neither python or pip is present for this version
|
|
|
|
return
|
|
|
|
|
|
|
|
if not python_path:
|
|
|
|
warnings.append('A %s interpreter was not found, yet a matching pip was found at "%s".' % (python_label, pip_path))
|
|
|
|
return
|
|
|
|
|
|
|
|
if not pip_path:
|
|
|
|
warnings.append('A %s interpreter was found at "%s", yet a matching pip was not found.' % (python_label, python_path))
|
|
|
|
return
|
|
|
|
|
|
|
|
pip_shebang = pip_interpreters.get(version)
|
|
|
|
|
|
|
|
match = re.search(r'#!\s*(?P<command>[^\s]+)', pip_shebang)
|
|
|
|
|
|
|
|
if not match:
|
|
|
|
warnings.append('A %s pip was found at "%s", but it does not have a valid shebang: %s' % (python_label, pip_path, pip_shebang))
|
|
|
|
return
|
|
|
|
|
|
|
|
pip_interpreter = os.path.realpath(match.group('command'))
|
|
|
|
python_interpreter = os.path.realpath(python_path)
|
|
|
|
|
|
|
|
if pip_interpreter == python_interpreter:
|
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
|
|
|
identical = filecmp.cmp(pip_interpreter, python_interpreter)
|
|
|
|
except OSError:
|
|
|
|
identical = False
|
|
|
|
|
|
|
|
if identical:
|
|
|
|
return
|
|
|
|
|
|
|
|
warnings.append('A %s pip was found at "%s", but it uses interpreter "%s" instead of "%s".' % (
|
|
|
|
python_label, pip_path, pip_interpreter, python_interpreter))
|
|
|
|
|
2017-05-11 11:05:21 +00:00
|
|
|
def __str__(self):
|
|
|
|
"""
|
|
|
|
:rtype: str
|
|
|
|
"""
|
|
|
|
return json.dumps(self.data, sort_keys=True, indent=4)
|
|
|
|
|
|
|
|
def validate(self, target_name, throw):
|
|
|
|
"""
|
|
|
|
:type target_name: str
|
|
|
|
:type throw: bool
|
|
|
|
:rtype: bool
|
|
|
|
"""
|
2017-07-15 02:11:25 +00:00
|
|
|
current = EnvironmentDescription(self.args)
|
2017-05-11 11:05:21 +00:00
|
|
|
|
2018-10-04 04:41:27 +00:00
|
|
|
return self.check(self, current, target_name, throw)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def check(original, current, target_name, throw):
|
|
|
|
"""
|
|
|
|
:type original: EnvironmentDescription
|
|
|
|
:type current: EnvironmentDescription
|
|
|
|
:type target_name: str
|
|
|
|
:type throw: bool
|
|
|
|
:rtype: bool
|
|
|
|
"""
|
|
|
|
original_json = str(original)
|
2017-05-11 11:05:21 +00:00
|
|
|
current_json = str(current)
|
|
|
|
|
|
|
|
if original_json == current_json:
|
|
|
|
return True
|
|
|
|
|
2018-10-04 04:41:27 +00:00
|
|
|
unified_diff = '\n'.join(difflib.unified_diff(
|
|
|
|
a=original_json.splitlines(),
|
|
|
|
b=current_json.splitlines(),
|
|
|
|
fromfile='original.json',
|
|
|
|
tofile='current.json',
|
|
|
|
lineterm='',
|
|
|
|
))
|
|
|
|
|
2017-05-11 11:05:21 +00:00
|
|
|
message = ('Test target "%s" has changed the test environment!\n'
|
|
|
|
'If these changes are necessary, they must be reverted before the test finishes.\n'
|
|
|
|
'>>> Original Environment\n'
|
|
|
|
'%s\n'
|
|
|
|
'>>> Current Environment\n'
|
2018-10-04 04:41:27 +00:00
|
|
|
'%s\n'
|
|
|
|
'>>> Environment Diff\n'
|
|
|
|
'%s'
|
|
|
|
% (target_name, original_json, current_json, unified_diff))
|
2017-05-11 11:05:21 +00:00
|
|
|
|
|
|
|
if throw:
|
|
|
|
raise ApplicationError(message)
|
|
|
|
|
|
|
|
display.error(message)
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
@staticmethod
|
2018-10-04 04:41:27 +00:00
|
|
|
def get_version(command, warnings):
|
2017-05-11 11:05:21 +00:00
|
|
|
"""
|
|
|
|
:type command: list[str]
|
2018-10-04 04:41:27 +00:00
|
|
|
:type warnings: list[str]
|
|
|
|
:rtype: list[str]
|
2017-05-11 11:05:21 +00:00
|
|
|
"""
|
2017-07-11 23:00:08 +00:00
|
|
|
try:
|
|
|
|
stdout, stderr = raw_command(command, capture=True, cmd_verbosity=2)
|
2018-10-04 04:41:27 +00:00
|
|
|
except SubprocessError as ex:
|
|
|
|
warnings.append(u'%s' % ex)
|
2017-07-11 23:00:08 +00:00
|
|
|
return None # all failures are equal, we don't care why it failed, only that it did
|
|
|
|
|
2018-10-04 04:41:27 +00:00
|
|
|
return [line.strip() for line in ((stdout or '').strip() + (stderr or '').strip()).splitlines()]
|
2017-05-11 11:05:21 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_shebang(path):
|
|
|
|
"""
|
|
|
|
:type path: str
|
|
|
|
:rtype: str
|
|
|
|
"""
|
|
|
|
with open(path) as script_fd:
|
2018-10-04 04:41:27 +00:00
|
|
|
return script_fd.readline().strip()
|
2017-05-11 11:05:21 +00:00
|
|
|
|
2017-07-06 07:47:28 +00:00
|
|
|
@staticmethod
|
|
|
|
def get_hash(path):
|
|
|
|
"""
|
|
|
|
:type path: str
|
|
|
|
:rtype: str | None
|
|
|
|
"""
|
|
|
|
if not os.path.exists(path):
|
|
|
|
return None
|
|
|
|
|
|
|
|
file_hash = hashlib.md5()
|
|
|
|
|
|
|
|
with open(path, 'rb') as file_fd:
|
|
|
|
file_hash.update(file_fd.read())
|
|
|
|
|
|
|
|
return file_hash.hexdigest()
|
|
|
|
|
2017-05-11 11:05:21 +00:00
|
|
|
|
2016-11-30 05:21:53 +00:00
|
|
|
class NoChangesDetected(ApplicationWarning):
|
|
|
|
"""Exception when change detection was performed, but no changes were found."""
|
|
|
|
def __init__(self):
|
|
|
|
super(NoChangesDetected, self).__init__('No changes detected.')
|
|
|
|
|
|
|
|
|
|
|
|
class NoTestsForChanges(ApplicationWarning):
|
|
|
|
"""Exception when changes detected, but no tests trigger as a result."""
|
|
|
|
def __init__(self):
|
|
|
|
super(NoTestsForChanges, self).__init__('No tests found for detected changes.')
|
|
|
|
|
|
|
|
|
|
|
|
class Delegate(Exception):
|
|
|
|
"""Trigger command delegation."""
|
2018-05-09 16:24:39 +00:00
|
|
|
def __init__(self, exclude=None, require=None, integration_targets=None):
|
2016-11-30 05:21:53 +00:00
|
|
|
"""
|
|
|
|
:type exclude: list[str] | None
|
|
|
|
:type require: list[str] | None
|
2018-05-09 16:24:39 +00:00
|
|
|
:type integration_targets: tuple[IntegrationTarget] | None
|
2016-11-30 05:21:53 +00:00
|
|
|
"""
|
|
|
|
super(Delegate, self).__init__()
|
|
|
|
|
|
|
|
self.exclude = exclude or []
|
|
|
|
self.require = require or []
|
2018-05-09 16:24:39 +00:00
|
|
|
self.integration_targets = integration_targets or tuple()
|
2016-11-30 05:21:53 +00:00
|
|
|
|
|
|
|
|
|
|
|
class AllTargetsSkipped(ApplicationWarning):
|
|
|
|
"""All targets skipped."""
|
|
|
|
def __init__(self):
|
|
|
|
super(AllTargetsSkipped, self).__init__('All targets skipped.')
|