Ansible Config part2 (#27448)

* Ansible Config part2

- made dump_me nicer, added note this is not prod
- moved internal key removal function to vars
- carry tracebacks in errors we can now show tracebacks for plugins on vvv
- show inventory plugin tracebacks on vvv
- minor fixes to cg groups plugin
- draft config from plugin docs
- made search path warning 'saner' (top level dirs only)
- correctly display config entries and others
- removed unneeded code
- commented out some conn plugin specific from base.yml
- also deprecated sudo/su
- updated ssh conn docs
- shared get option method for connection plugins
- note about needing eval for defaults
- tailored yaml ext
- updated strategy entry
- for connection pliugins, options load on plugin load
- allow for long types in definitions
- better display in ansible-doc
- cleaned up/updated source docs and base.yml
- added many descriptions
- deprecated include toggles as include is
- draft backwards compat get_config
- fixes to ansible-config, added --only-changed
- some code reoorg
- small license headers
- show default in doc type
- pushed module utils details to 5vs
- work w/o config file
- PEPE ATE!
- moved loader to it's own file
- fixed rhn_register test
- fixed boto requirement in make tests
- I ate Pepe
- fixed dynamic eval of defaults
- better doc code

skip ipaddr filter tests when missing netaddr
removed devnull string from config
better becoem resolution

* killed extra space with extreeme prejudice

cause its an affront against all that is holy that 2 spaces touch each other!

shippable timing out on some images, but merging as it passes most
This commit is contained in:
Brian Coca 2017-08-15 16:38:59 -04:00 committed by GitHub
parent 8b617aaef5
commit f921369445
53 changed files with 1859 additions and 1569 deletions

View file

@ -45,7 +45,7 @@ from ansible.module_utils.six import PY3
from ansible.module_utils.six.moves import cPickle
from ansible.module_utils.connection import send_data, recv_data
from ansible.playbook.play_context import PlayContext
from ansible.plugins import connection_loader
from ansible.plugins.loader import connection_loader
from ansible.utils.path import unfrackpath, makedirs_safe
from ansible.errors import AnsibleConnectionFailure
from ansible.utils.display import Display

View file

@ -29,7 +29,7 @@ from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.module_utils._text import to_text
from ansible.parsing.splitter import parse_kv
from ansible.playbook.play import Play
from ansible.plugins import get_all_plugin_loaders
from ansible.plugins.loader import get_all_plugin_loaders
try:
from __main__ import display
@ -105,6 +105,9 @@ class AdHocCLI(CLI):
(sshpass, becomepass) = self.ask_passwords()
passwords = {'conn_pass': sshpass, 'become_pass': becomepass}
# dynamically load any plugins
get_all_plugin_loaders()
loader, inventory, variable_manager = self._play_prereqs(self.options)
no_hosts = False
@ -138,13 +141,6 @@ class AdHocCLI(CLI):
if self.options.module_name in ('include', 'include_role'):
raise AnsibleOptionsError("'%s' is not a valid action for ad-hoc commands" % self.options.module_name)
# dynamically load any plugins from the playbook directory
for name, obj in get_all_plugin_loaders():
if obj.subdir:
plugin_path = os.path.join('.', obj.subdir)
if os.path.isdir(plugin_path):
obj.add_directory(plugin_path)
play_ds = self._play_ds(pattern, self.options.seconds, self.options.poll_interval)
play = Play().load(play_ds, variable_manager=variable_manager, loader=loader)

View file

@ -26,8 +26,7 @@ import sys
import yaml
from ansible.cli import CLI
from ansible.config.data import Setting
from ansible.config.manager import ConfigManager
from ansible.config.manager import ConfigManager, Setting
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.module_utils._text import to_native, to_text
from ansible.parsing.yaml.dumper import AnsibleDumper
@ -68,6 +67,8 @@ class ConfigCLI(CLI):
if self.action == "list":
self.parser.set_usage("usage: %prog list [options] ")
if self.action == "dump":
self.parser.add_option('--only-changed', dest='only_changed', action='store_true',
help="Only show configurations that have changed from the default")
self.parser.set_usage("usage: %prog dump [options] [-c ansible.cfg]")
elif self.action == "view":
self.parser.set_usage("usage: %prog view [options] [-c ansible.cfg] ")
@ -154,14 +155,15 @@ class ConfigCLI(CLI):
'''
list all current configs reading lib/constants.py and shows env and config file setting names
'''
self.pager(to_text(yaml.dump(self.config.initial_defs, Dumper=AnsibleDumper), errors='surrogate_or_strict'))
self.pager(to_text(yaml.dump(self.config.get_configuration_definitions(), Dumper=AnsibleDumper), errors='surrogate_or_strict'))
def execute_dump(self):
'''
Shows the current settings, merges ansible.cfg if specified
'''
# FIXME: deal with plugins, not just base config
text = []
defaults = self.config.initial_defs.copy()
defaults = self.config.get_configuration_definitions().copy()
for setting in self.config.data.get_settings():
if setting.name in defaults:
defaults[setting.name] = setting
@ -176,6 +178,7 @@ class ConfigCLI(CLI):
else:
color = 'green'
msg = "%s(%s) = %s" % (setting, 'default', defaults[setting].get('default'))
if not self.options.only_changed or color == 'yellow':
text.append(stringc(msg, color))
self.pager(to_text('\n'.join(text), errors='surrogate_or_strict'))

View file

@ -44,7 +44,7 @@ from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.parsing.splitter import parse_kv
from ansible.playbook.play import Play
from ansible.plugins import module_loader
from ansible.plugins.loader import module_loader
from ansible.utils import plugin_docs
from ansible.utils.color import stringc

View file

@ -30,7 +30,7 @@ from ansible.cli import CLI
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.module_utils.six import string_types
from ansible.parsing.yaml.dumper import AnsibleDumper
from ansible.plugins import module_loader, action_loader, lookup_loader, callback_loader, cache_loader, connection_loader, strategy_loader, PluginLoader
from ansible.plugins.loader import module_loader, action_loader, lookup_loader, callback_loader, cache_loader, connection_loader, strategy_loader, PluginLoader
from ansible.utils import plugin_docs
try:
from __main__ import display
@ -66,7 +66,8 @@ class DocCLI(CLI):
self.parser.add_option("-a", "--all", action="store_true", default=False, dest='all_plugins',
help='Show documentation for all plugins')
self.parser.add_option("-t", "--type", action="store", default='module', dest='type', type='choice',
help='Choose which plugin type', choices=['cache', 'callback', 'connection', 'inventory', 'lookup', 'module', 'strategy'])
help='Choose which plugin type (defaults to "module")',
choices=['cache', 'callback', 'connection', 'inventory', 'lookup', 'module', 'strategy'])
super(DocCLI, self).parse()
@ -99,6 +100,10 @@ class DocCLI(CLI):
for i in self.options.module_path.split(os.pathsep):
loader.add_directory(i)
# save only top level paths for errors
search_paths = DocCLI.print_paths(loader)
loader._paths = None # reset so we can use subdirs below
# list plugins for type
if self.options.list_dir:
paths = loader._get_paths()
@ -125,7 +130,7 @@ class DocCLI(CLI):
# if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
filename = loader.find_plugin(plugin, mod_type='.py', ignore_deprecated=True)
if filename is None:
display.warning("%s %s not found in %s\n" % (plugin_type, plugin, DocCLI.print_paths(loader)))
display.warning("%s %s not found in:\n%s\n" % (plugin_type, plugin, search_paths))
continue
if any(filename.endswith(x) for x in C.BLACKLIST_EXTS):
@ -255,7 +260,7 @@ class DocCLI(CLI):
# Uses a list to get the order right
ret = []
for i in finder._get_paths():
for i in finder._get_paths(subdirs=False):
if i not in ret:
ret.append(i)
return os.pathsep.join(ret)
@ -288,6 +293,9 @@ class DocCLI(CLI):
return "\n".join(text)
def _dump_yaml(self, struct, indent):
return CLI.tty_ify('\n'.join([indent + line for line in yaml.dump(struct, default_flow_style=False, Dumper=AnsibleDumper).split('\n')]))
def add_fields(self, text, fields, limit, opt_indent):
for o in sorted(fields):
@ -322,123 +330,109 @@ class DocCLI(CLI):
del opt['choices']
default = ''
if 'default' in opt or not required:
default = "[Default: " + str(opt.pop('default', '(null)')) + "]"
default = "[Default: %s" % str(opt.pop('default', '(null)')) + "]"
text.append(textwrap.fill(CLI.tty_ify(aliases + choices + default), limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
if 'options' in opt:
text.append(opt_indent + "options:\n")
self.add_fields(text, opt['options'], limit, opt_indent + opt_indent)
text.append('')
del opt['options']
text.append("%soptions:\n" % opt_indent)
self.add_fields(text, opt.pop('options'), limit, opt_indent + opt_indent)
if 'spec' in opt:
text.append(opt_indent + "spec:\n")
self.add_fields(text, opt['spec'], limit, opt_indent + opt_indent)
text.append('')
del opt['spec']
text.append("%sspec:\n" % opt_indent)
self.add_fields(text, opt.pop('spec'), limit, opt_indent + opt_indent)
for conf in ('config', 'env_vars', 'host_vars'):
if conf in opt:
text.append(textwrap.fill(CLI.tty_ify("%s: " % conf), limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
for entry in opt[conf]:
if isinstance(entry, dict):
pre = " -"
for key in entry:
text.append(textwrap.fill(CLI.tty_ify("%s %s: %s" % (pre, key, entry[key])),
limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
pre = " "
else:
text.append(textwrap.fill(CLI.tty_ify(" - %s" % entry), limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
del opt[conf]
conf = {}
for config in ('env', 'ini', 'yaml', 'vars'):
if config in opt and opt[config]:
conf[config] = opt.pop(config)
# unspecified keys
for k in opt:
if conf:
text.append(self._dump_yaml({'set_via': conf}, opt_indent))
for k in sorted(opt):
if k.startswith('_'):
continue
if isinstance(opt[k], string_types):
text.append(textwrap.fill(CLI.tty_ify("%s: %s" % (k, opt[k])), limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
elif isinstance(opt[k], (list, dict)):
text.append(textwrap.fill(CLI.tty_ify("%s: %s" % (k, yaml.dump(opt[k], Dumper=AnsibleDumper, default_flow_style=False))),
limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
text.append('%s%s: %s' % (opt_indent, k, textwrap.fill(CLI.tty_ify(opt[k]), limit - (len(k) + 2), subsequent_indent=opt_indent)))
elif isinstance(opt[k], (list, tuple)):
text.append(CLI.tty_ify('%s%s: %s' % (opt_indent, k, ', '.join(opt[k]))))
else:
display.vv("Skipping %s key cuase we don't know how to handle eet" % k)
text.append(self._dump_yaml({k: opt[k]}, opt_indent))
text.append('')
def get_man_text(self, doc):
IGNORE = frozenset(['module', 'docuri', 'version_added', 'short_description', 'now_date'])
opt_indent = " "
text = []
text.append("> %s (%s)\n" % (doc[self.options.type].upper(), doc['filename']))
text.append("> %s (%s)\n" % (doc[self.options.type].upper(), doc.pop('filename')))
pad = display.columns * 0.20
limit = max(display.columns - int(pad), 70)
if isinstance(doc['description'], list):
desc = " ".join(doc['description'])
desc = " ".join(doc.pop('description'))
else:
desc = doc['description']
desc = doc.pop('description')
text.append("%s\n" % textwrap.fill(CLI.tty_ify(desc), limit, initial_indent=" ", subsequent_indent=" "))
text.append("%s\n" % textwrap.fill(CLI.tty_ify(desc), limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
if 'deprecated' in doc and doc['deprecated'] is not None and len(doc['deprecated']) > 0:
text.append("DEPRECATED: \n%s\n" % doc['deprecated'])
text.append("DEPRECATED: \n%s\n" % doc.pop('deprecated'))
if 'action' in doc and doc['action']:
if doc.pop('action', False):
text.append(" * note: %s\n" % "This module has a corresponding action plugin.")
if 'options' in doc and doc['options']:
text.append("Options (= is mandatory):\n")
self.add_fields(text, doc['options'], limit, opt_indent)
text.append("OPTIONS (= is mandatory):\n")
self.add_fields(text, doc.pop('options'), limit, opt_indent)
text.append('')
if 'notes' in doc and doc['notes'] and len(doc['notes']) > 0:
text.append("Notes:")
text.append("NOTES:")
for note in doc['notes']:
text.append(textwrap.fill(CLI.tty_ify(note), limit - 6, initial_indent=" * ", subsequent_indent=opt_indent))
text.append(textwrap.fill(CLI.tty_ify(note), limit - 6, initial_indent=opt_indent[:-2] + "* ", subsequent_indent=opt_indent))
text.append('')
del doc['notes']
if 'requirements' in doc and doc['requirements'] is not None and len(doc['requirements']) > 0:
req = ", ".join(doc['requirements'])
text.append("Requirements:%s\n" % textwrap.fill(CLI.tty_ify(req), limit - 16, initial_indent=" ", subsequent_indent=opt_indent))
if 'examples' in doc and len(doc['examples']) > 0:
text.append("Example%s:\n" % ('' if len(doc['examples']) < 2 else 's'))
for ex in doc['examples']:
text.append("%s\n" % (ex['code']))
req = ", ".join(doc.pop('requirements'))
text.append("REQUIREMENTS:%s\n" % textwrap.fill(CLI.tty_ify(req), limit - 16, initial_indent=" ", subsequent_indent=opt_indent))
if 'plainexamples' in doc and doc['plainexamples'] is not None:
text.append("EXAMPLES:\n")
text.append("EXAMPLES:")
if isinstance(doc['plainexamples'], string_types):
text.append(doc['plainexamples'])
text.append(doc.pop('plainexamples').strip())
else:
text.append(yaml.dump(doc['plainexamples'], indent=2, default_flow_style=False))
text.append(yaml.dump(doc.pop('plainexamples'), indent=2, default_flow_style=False))
text.append('')
if 'returndocs' in doc and doc['returndocs'] is not None:
text.append("RETURN VALUES:\n")
if isinstance(doc['returndocs'], string_types):
text.append(doc['returndocs'])
text.append(doc.pop('returndocs'))
else:
text.append(yaml.dump(doc['returndocs'], indent=2, default_flow_style=False))
text.append(yaml.dump(doc.pop('returndocs'), indent=2, default_flow_style=False))
text.append('')
maintainers = set()
if 'author' in doc:
if isinstance(doc['author'], string_types):
maintainers.add(doc['author'])
else:
maintainers.update(doc['author'])
# Control rest of keys on verbosity (3 == full, 0 only adds small list)
rest = []
if self.options.verbosity >= 3:
rest = doc
elif 'author' in doc:
rest = ['author']
if 'maintainers' in doc:
if isinstance(doc['maintainers'], string_types):
maintainers.add(doc['author'])
# Generic handler
for k in sorted(rest):
if k in IGNORE or not doc[k]:
continue
if isinstance(doc[k], string_types):
text.append('%s: %s' % (k.upper(), textwrap.fill(CLI.tty_ify(doc[k]), limit - (len(k) + 2), subsequent_indent=opt_indent)))
elif isinstance(doc[k], (list, tuple)):
text.append('%s: %s' % (k.upper(), ', '.join(doc[k])))
else:
maintainers.update(doc['author'])
text.append('MAINTAINERS: ' + ', '.join(maintainers))
text.append(self._dump_yaml({k.upper(): doc[k]}, opt_indent))
text.append('')
if 'metadata' in doc and doc['metadata']:
text.append("METADATA:")
for k in doc['metadata']:
if isinstance(k, list):
text.append("\t%s: %s" % (k.capitalize(), ", ".join(doc['metadata'][k])))
else:
text.append("\t%s: %s" % (k.capitalize(), doc['metadata'][k]))
text.append('')
return "\n".join(text)

View file

@ -31,7 +31,7 @@ import time
from ansible.cli import CLI
from ansible.errors import AnsibleOptionsError
from ansible.module_utils._text import to_native
from ansible.plugins import module_loader
from ansible.plugins.loader import module_loader
from ansible.utils.cmd_functions import run_cmd
try:

View file

@ -1,27 +1,10 @@
# (c) 2017, Ansible by Red Hat, inc
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from collections import namedtuple
Setting = namedtuple('Setting','name value origin')
class ConfigData(object):
@ -59,3 +42,4 @@ class ConfigData(object):
if plugin.name not in self._plugins[plugin.type]:
self._plugins[plugin.type][plugin.name] = {}
self._plugins[plugin.type][plugin.name][setting.name] = setting

View file

@ -1,19 +1,5 @@
# (c) 2017, Ansible by Red Hat, inc
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
@ -24,7 +10,9 @@ import sys
import tempfile
import yaml
from ansible.config.data import ConfigData, Setting
from collections import namedtuple
from ansible.config.data import ConfigData
from ansible.errors import AnsibleOptionsError, AnsibleError
from ansible.module_utils.six import string_types
from ansible.module_utils.six.moves import configparser
@ -34,131 +22,169 @@ from ansible.parsing.quoting import unquote
from ansible.utils.path import unfrackpath
from ansible.utils.path import makedirs_safe
Plugin = namedtuple('Plugin','name type')
Setting = namedtuple('Setting','name value origin')
# FIXME: see if we can unify in module_utils with similar function used by argspec
def ensure_type(value, value_type):
''' return a configuration variable with casting
:arg value: The value to ensure correct typing of
:kwarg value_type: The type of the value. This can be any of the following strings:
:boolean: sets the value to a True or False value
:integer: Sets the value to an integer or raises a ValueType error
:float: Sets the value to a float or raises a ValueType error
:list: Treats the value as a comma separated list. Split the value
and return it as a python list.
:none: Sets the value to None
:path: Expands any environment variables and tilde's in the value.
:tmp_path: Create a unique temporary directory inside of the directory
specified by value and return its path.
:pathlist: Treat the value as a typical PATH string. (On POSIX, this
means colon separated strings.) Split the value and then expand
each part for environment variables and tildes.
'''
if value_type:
value_type = value_type.lower()
if value_type in ('boolean', 'bool'):
value = boolean(value, strict=False)
elif value:
if value_type in ('integer', 'int'):
value = int(value)
elif value_type == 'float':
value = float(value)
elif value_type == 'list':
if isinstance(value, string_types):
value = [x.strip() for x in value.split(',')]
elif value_type == 'none':
if value == "None":
value = None
elif value_type == 'path':
value = resolve_path(value)
elif value_type in ('tmp', 'temppath', 'tmppath'):
value = resolve_path(value)
if not os.path.exists(value):
makedirs_safe(value, 0o700)
prefix = 'ansible-local-%s' % os.getpid()
value = tempfile.mkdtemp(prefix=prefix, dir=value)
elif value_type == 'pathlist':
if isinstance(value, string_types):
value = [resolve_path(x) for x in value.split(os.pathsep)]
# defaults to string types
elif isinstance(value, string_types):
value = unquote(value)
return to_text(value, errors='surrogate_or_strict', nonstring='passthru')
# FIXME: see if this can live in utils/path
def resolve_path(path):
''' resolve relative or 'varaible' paths '''
if '{{CWD}}' in path: # allow users to force CWD using 'magic' {{CWD}}
path = path.replace('{{CWD}}', os.getcwd())
return unfrackpath(path, follow=False)
def get_ini_config(p, entries):
''' returns the value of last ini entry found '''
value = None
if p is not None:
for entry in entries:
try:
value = p.get(entry.get('section','defaults'), entry.get('key',''), raw=True)
except:
pass
return value
class ConfigManager(object):
def __init__(self, conf_file=None):
self.data = ConfigData()
#FIXME: make dynamic?
bconfig_def = to_bytes('%s/data/config.yml' % os.path.dirname(__file__))
if os.path.exists(bconfig_def):
with open(bconfig_def, 'rb') as config_def:
self.initial_defs = yaml.safe_load(config_def)
else:
raise AnsibleError("Missing base configuration definition file (bad install?): %s" % to_native(bconfig_def))
# FIXME: generic file type?
def get_config_type(cfile):
ftype = None
if conf_file is None:
# set config using ini
conf_file = self.find_ini_config_file()
ftype = 'ini'
else:
ext = os.path.splitext(conf_file)[-1]
if cfile is not None:
ext = os.path.splitext(cfile)[-1]
if ext in ('.ini', '.cfg'):
ftype = 'ini'
elif ext in ('.yaml', '.yml'):
ftype = 'yaml'
else:
raise AnsibleOptionsError("Unsupported configuration file extension: \n{0}".format(ext))
raise AnsibleOptionsError("Unsupported configuration file extension for %s: %s" % (cfile, to_native(ext)))
self.parse_config(conf_file, ftype)
return ftype
def parse_config(self, cfile, ftype):
# FIXME: can move to module_utils for use for ini plugins also?
def get_ini_config_value(p, entry):
''' returns the value of last ini entry found '''
value = None
if p is not None:
try:
value = p.get(entry.get('section','defaults'), entry.get('key',''), raw=True)
except: # FIXME: actually report issues here
pass
return value
class ConfigManager(object):
UNABLE = []
DEPRECATED = []
def __init__(self, conf_file=None):
self._base_defs = {}
self._plugins = {}
self._parser = None
self._config_file = conf_file
self.data = ConfigData()
#FIXME: make dynamic? scan for more? make it's own method?
# Create configuration definitions from source
bconfig_def = to_bytes('%s/base.yml' % os.path.dirname(__file__))
if os.path.exists(bconfig_def):
with open(bconfig_def, 'rb') as config_def:
self._base_defs = yaml.safe_load(config_def)
else:
raise AnsibleError("Missing base configuration definition file (bad install?): %s" % to_native(bconfig_def))
if self._config_file is None:
# set config using ini
self._config_file = self._find_ini_config_file()
if self._config_file:
if os.path.exists(self._config_file):
# initialize parser and read config
self._parse_config_file()
# update constants
self.update_config_data()
def _parse_config_file(self, cfile=None):
''' return flat configuration settings from file(s) '''
# TODO: take list of files with merge/nomerge
parser = None
if cfile:
if cfile is None:
cfile = self._config_file
ftype = get_config_type(cfile)
if cfile is not None:
if ftype == 'ini':
parser = configparser.ConfigParser()
self._parser = configparser.ConfigParser()
try:
parser.read(cfile)
self._parser.read(cfile)
except configparser.Error as e:
raise AnsibleOptionsError("Error reading config file: \n{0}".format(e))
elif ftype == 'yaml':
with open(cfile, 'rb') as config_stream:
parser = yaml.safe_load(config_stream)
raise AnsibleOptionsError("Error reading config file (%s): %s" % (cfile, to_native(e)))
# FIXME: this should eventually handle yaml config files
#elif ftype == 'yaml':
# with open(cfile, 'rb') as config_stream:
# self._parser = yaml.safe_load(config_stream)
else:
raise AnsibleOptionsError("Unsupported configuration file type: \n{0}".format(ftype))
self.update_config(cfile, self.initial_defs, parser, ftype)
def update_config(self, configfile, defs, parser, ftype):
# update the constant for config file
self.data.update_setting(Setting('CONFIG_FILE', configfile, ''))
origin = None
# env and config defs can have several entries, ordered in list from lowest to highest precedence
for config in self.initial_defs:
value = None
# env vars are highest precedence
if defs[config].get('env'):
try:
for env_var in defs[config]['env']:
env_value = os.environ.get(env_var.get('name'), None)
if env_value is not None: # only set if env var is defined
value = env_value
origin = 'env: %s' % env_var.get('name')
except:
sys.stderr.write("Error while loading environment configs for %s\n" % config)
# try config file entries next
if value is None and defs[config].get(ftype):
if ftype == 'ini':
# load from ini config
try:
value = get_ini_config(parser, defs[config]['ini'])
origin = configfile
except Exception as e:
sys.stderr.write("Error while loading ini config %s: %s" % (configfile, str(e)))
elif ftype == 'yaml':
# FIXME: break down key from defs (. notation???)
key = 'name'
value = parser.get(key)
origin = configfile
# set default if we got here w/o a value
if value is None:
value = defs[config].get('default')
origin = 'default'
# ensure correct type
try:
value = self.ensure_type(value, defs[config].get('value_type'))
except:
sys.stderr.write("Unable to set correct type for %s, skipping" % config)
continue
# set the constant
self.data.update_setting(Setting(config, value, origin))
raise AnsibleOptionsError("Unsupported configuration file type: %s" % to_native(ftype))
def find_ini_config_file(self):
''' Load Config File order(first found is used): ENV, CWD, HOME, /etc/ansible '''
def _find_yaml_config_files(self):
''' Load YAML Config Files in order, check merge flags, keep origin of settings'''
pass
def _find_ini_config_file(self):
''' Load INI Config File order(first found is used): ENV, CWD, HOME, /etc/ansible '''
# FIXME: eventually deprecate ini configs
path0 = os.getenv("ANSIBLE_CONFIG", None)
if path0 is not None:
@ -180,57 +206,163 @@ class ConfigManager(object):
return path
def ensure_type(self, value, value_type):
''' return a configuration variable with casting
:arg value: The value to ensure correct typing of
:kwarg value_type: The type of the value. This can be any of the following strings:
:boolean: sets the value to a True or False value
:integer: Sets the value to an integer or raises a ValueType error
:float: Sets the value to a float or raises a ValueType error
:list: Treats the value as a comma separated list. Split the value
and return it as a python list.
:none: Sets the value to None
:path: Expands any environment variables and tilde's in the value.
:tmp_path: Create a unique temporary directory inside of the directory
specified by value and return its path.
:pathlist: Treat the value as a typical PATH string. (On POSIX, this
means colon separated strings.) Split the value and then expand
each part for environment variables and tildes.
'''
if value_type == 'boolean':
value = boolean(value, strict=False)
def get_configuration_definitions(self, plugin_type=None, name=None):
''' just list the possible settings, either base or for specific plugins or plugin '''
elif value:
if value_type == 'integer':
value = int(value)
ret = {}
if plugin_type is None:
ret = self._base_defs
elif name is None:
ret = self._plugins.get(plugin_type, {})
else:
ret = {name: self._plugins.get(plugin_type, {}).get(name, {})}
elif value_type == 'float':
value = float(value)
return ret
elif value_type == 'list':
if isinstance(value, string_types):
value = [x.strip() for x in value.split(',')]
def _loop_entries(self, container, entry_list):
''' repeat code for value entry assignment '''
elif value_type == 'none':
if value == "None":
value = None
origin = None
for entry in entry_list:
name = entry.get('name')
temp_value = container.get(name, None)
if temp_value is not None: # only set if env var is defined
value = temp_value
origin = name
elif value_type == 'path':
value = resolve_path(value)
# deal with deprecation of setting source, if used
#FIXME: if entry.get('deprecated'):
elif value_type == 'tmppath':
value = resolve_path(value)
if not os.path.exists(value):
makedirs_safe(value, 0o700)
prefix = 'ansible-local-%s' % os.getpid()
value = tempfile.mkdtemp(prefix=prefix, dir=value)
return value, origin
elif value_type == 'pathlist':
if isinstance(value, string_types):
value = [resolve_path(x) for x in value.split(os.pathsep)]
def get_config_value(self, config, cfile=None, plugin_type=None, plugin_name=None, variables=None):
''' wrapper '''
value, _drop = self.get_config_value_and_origin(config, cfile=cfile, plugin_type=plugin_type, plugin_name=plugin_name, variables=variables)
return value
elif isinstance(value, string_types):
value = unquote(value)
def get_config_value_and_origin(self, config, cfile=None, plugin_type=None, plugin_name=None, variables=None):
''' Given a config key figure out the actual value and report on the origin of the settings '''
return to_text(value, errors='surrogate_or_strict', nonstring='passthru')
if cfile is None:
cfile = self._config_file
# Note: sources that are lists listed in low to high precedence (last one wins)
value = None
defs = {}
if plugin_type is None:
defs = self._base_defs
elif plugin_name is None:
defs = self._plugins[plugin_type]
else:
defs = self._plugins[plugin_type][plugin_name]
# Use 'variable overrides' if present, highest precedence, but only present when querying running play
if variables:
value, origin = self._loop_entries(variables, defs[config]['vars'])
origin = 'var: %s' % origin
# env vars are next precedence
if value is None and defs[config].get('env'):
value, origin = self._loop_entries(os.environ, defs[config]['env'])
origin = 'env: %s' % origin
# try config file entries next, if we have one
if value is None and cfile is not None:
ftype = get_config_type(cfile)
if ftype and defs[config].get(ftype):
if ftype == 'ini':
# load from ini config
try: # FIXME: generaelize _loop_entries to allow for files also
for ini_entry in defs[config]['ini']:
value = get_ini_config_value(self._parser, ini_entry)
origin = cfile
#FIXME: if ini_entry.get('deprecated'):
except Exception as e:
sys.stderr.write("Error while loading ini config %s: %s" % (cfile, to_native(e)))
elif ftype == 'yaml':
pass # FIXME: implement, also , break down key from defs (. notation???)
origin = cfile
'''
# for plugins, try using existing constants, this is for backwards compatiblity
if plugin_name and defs[config].get('constants'):
value, origin = self._loop_entries(self.data, defs[config]['constants'])
origin = 'constant: %s' % origin
'''
# set default if we got here w/o a value
if value is None:
value = defs[config].get('default')
origin = 'default'
# FIXME: moved eval to constants as this does not have access to previous vars
if plugin_type is None and isinstance(value, string_types) and (value.startswith('eval(') and value.endswith(')')):
return value, origin
#default_value = defs[config].get('default')
#if plugin_type is None and isinstance(default_value, string_types) and (default_value.startswith('eval(') and default_value.endswith(')')):
# try:
# eval_string = default_value.replace('eval(', '', 1)[:-1]
# value = eval(eval_string) # FIXME: safe eval?
# except:
# value = default_value
#else:
# value = default_value
# ensure correct type
try:
value = ensure_type(value, defs[config].get('type'))
except Exception as e:
self.UNABLE.append(config)
# deal with deprecation of the setting
if defs[config].get('deprecated') and origin != 'default':
self.DEPRECATED.append((config, defs[config].get('deprecated')))
return value, origin
def update_plugin_config(self, plugin_type, name, defs):
''' really: update constants '''
# no sense?
self.initialize_plugin_configuration_definitions(plugin_type, name, defs)
self.update_config_data(defs)
def initialize_plugin_configuration_definitions(self, plugin_type, name, defs):
if plugin_type not in self._plugins:
self._plugins[plugin_type] = {}
self._plugins[plugin_type][name] = defs
def update_config_data(self, defs=None, configfile=None):
''' really: update constants '''
if defs is None:
defs = self._base_defs
if configfile is None:
configfile = self._config_file
if not isinstance(defs, dict):
raise AnsibleOptionsError("Invalid configuration definition type: %s for %s" % (type(defs), defs))
# update the constant for config file
self.data.update_setting(Setting('CONFIG_FILE', configfile, ''))
origin = None
# env and config defs can have several entries, ordered in list from lowest to highest precedence
for config in defs:
if not isinstance(defs[config], dict):
raise AnsibleOptionsError("Invalid configuration definition '%s': type is %s" % (to_native(config), type(defs[config])))
# get value and origin
value, origin = self.get_config_value_and_origin(config, configfile)
# set the constant
self.data.update_setting(Setting(config, value, origin))
# FIXME: find better way to do this by passing back to where display is available
if self.UNABLE:
sys.stderr.write("Unable to set correct type for:\n\t%s\n" % '\n\t'.join(self.UNABLE))
if self.DEPRECATED:
for k, reason in self.DEPRECATED:
sys.stderr.write("[DEPRECATED] %s: %(why)s. It will be removed in %(version)s. As alternative %(alternative)s", (k, reason))

View file

@ -1,46 +1,58 @@
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Copyright (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os # used to set lang
from string import ascii_letters, digits
from ansible.module_utils._text import to_text
from ansible.module_utils.parsing.convert_bool import boolean, BOOLEANS_TRUE
from ansible.module_utils.six import string_types
from ansible.config.manager import ConfigManager
_config = ConfigManager()
# Generate constants from config
for setting in _config.data.get_settings():
vars()[setting.name] = setting.value
def _deprecated(msg):
''' display is not guaranteed here, nor it being the full class, but try anyways, fallback to sys.stderr.write '''
try:
from __main__ import display
display.deprecated(msg, version='2.8')
except:
import sys
sys.stderr.write('[DEPRECATED] %s, to be removed in 2.8' % msg)
def mk_boolean(value):
''' moved to module_utils'''
# We don't have a display here so we can't call deprecated
# display.deprecated('ansible.constants.mk_boolean() is deprecated. Use ansible.module_utils.parsing.convert_bool.boolean() instead', version='2.8')
_deprecated('ansible.constants.mk_boolean() is deprecated. Use ansible.module_utils.parsing.convert_bool.boolean() instead')
return boolean(value, strict=False)
def get_config(parser, section, key, env_var, default_value, value_type=None, expand_relative_paths=False):
''' kept for backwarsd compatibility, but deprecated '''
_deprecated('ansible.constants.get_config() is deprecated. There is new config API, see porting docs.')
# ### CONSTANTS ### yes, actual ones
import os
value = None
# small reconstruction of the old code env/ini/default
value = os.environ.get(env_var, None)
if value is None:
try:
value = config.get_ini_config(parser, [{'key': key, 'section': section}])
except:
pass
if value is None:
value = default_value
try:
value = config.ensure_type(value, value_type)
except:
pass
return value
### CONSTANTS ### yes, actual ones
BLACKLIST_EXTS = ('.pyc', '.pyo', '.swp', '.bak', '~', '.rpm', '.md', '.txt')
BECOME_METHODS = ['sudo', 'su', 'pbrun', 'pfexec', 'doas', 'dzdo', 'ksu', 'runas', 'pmrun']
BECOME_ERROR_STRINGS = {
@ -79,3 +91,22 @@ RESTRICTED_RESULT_KEYS = ['ansible_rsync_path', 'ansible_playbook_python']
TREE_DIR = None
VAULT_VERSION_MIN = 1.0
VAULT_VERSION_MAX = 1.0
### POPULATE SETTINGS FROM CONFIG ###
config = ConfigManager()
# Generate constants from config
for setting in config.data.get_settings():
# FIXME: find better way to do in manager class and/or ensure types
if isinstance(setting.value, string_types) and (setting.value.startswith('eval(') and setting.value.endswith(')')):
try:
eval_string = setting.value.replace('eval(', '', 1)[:-1]
vars()[setting.name] = eval(eval_string) # FIXME: safe eval?
continue
except:
pass
vars()[setting.name] = setting.value

View file

@ -20,6 +20,8 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from collections import Sequence
import traceback
import sys
from ansible.errors.yaml_strings import (
YAML_COMMON_DICT_ERROR,
@ -68,6 +70,8 @@ class AnsibleError(Exception):
self.message += '\nexception type: %s' % to_native(type(orig_exc))
self.message += '\nexception: %s' % to_native(orig_exc)
self.tb = ''.join(traceback.format_tb(sys.exc_info()[2]))
def __str__(self):
return self.message

View file

@ -36,7 +36,7 @@ from ansible.release import __version__, __author__
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_bytes, to_text
from ansible.plugins import module_utils_loader, ps_module_utils_loader
from ansible.plugins.loader import module_utils_loader, ps_module_utils_loader
from ansible.plugins.shell.powershell import async_watchdog, async_wrapper, become_wrapper, leaf_exec, exec_wrapper
# Must import strategy and use write_locks from there
# If we import write_locks directly then we end up binding a
@ -579,7 +579,7 @@ def recursive_finder(name, data, py_module_names, py_module_cache, zf):
zf.writestr(os.path.join("ansible/module_utils",
py_module_file_name), py_module_cache[py_module_name][0])
display.vvv("Using module_utils file %s" % py_module_cache[py_module_name][1])
display.vvvvv("Using module_utils file %s" % py_module_cache[py_module_name][1])
# Add the names of the files we're scheduling to examine in the loop to
# py_module_names so that we don't re-examine them in the next pass

View file

@ -731,6 +731,7 @@ class TaskExecutor:
conn_type = self._play_context.connection
connection = self._shared_loader_obj.connection_loader.get(conn_type, self._play_context, self._new_stdin)
self._play_context.set_options_from_plugin(connection)
if not connection:
raise AnsibleError("the connection plugin '%s' was not found" % conn_type)

View file

@ -31,7 +31,7 @@ from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_text
from ansible.playbook.block import Block
from ansible.playbook.play_context import PlayContext
from ansible.plugins import callback_loader, strategy_loader, module_loader
from ansible.plugins.loader import callback_loader, strategy_loader, module_loader
from ansible.plugins.callback import CallbackBase
from ansible.template import Templar
from ansible.utils.helpers import pct_to_int

View file

@ -30,7 +30,7 @@ from ansible.inventory.data import InventoryData
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_bytes, to_text
from ansible.parsing.utils.addresses import parse_address
from ansible.plugins import PluginLoader
from ansible.plugins.loader import PluginLoader
from ansible.utils.path import unfrackpath
try:
@ -260,14 +260,15 @@ class InventoryManager(object):
display.vvv(u'Parsed %s inventory source with %s plugin' % (to_text(source), plugin_name))
break
except AnsibleParserError as e:
failures.append(u'\n* Failed to parse %s with %s inventory plugin: %s\n' % (to_text(source), plugin_name, to_text(e)))
failures.append({'src': source, 'plugin': plugin_name, 'exc': e})
else:
display.debug(u'%s did not meet %s requirements' % (to_text(source), plugin_name))
else:
if failures:
# only if no plugin processed files should we show errors.
for fail in failures:
display.warning(fail)
display.warning(u'\n* Failed to parse %s with %s inventory plugin: %s' % (to_text(fail['src']), fail['plugin'], to_text(fail['exc'])))
display.vvv(fail['exc'].tb)
if not parsed:
display.warning(u"Unable to parse %s as an inventory source" % to_text(source))

View file

@ -23,7 +23,7 @@ from ansible.errors import AnsibleParserError, AnsibleError
from ansible.module_utils.six import iteritems, string_types
from ansible.module_utils._text import to_text
from ansible.parsing.splitter import parse_kv, split_args
from ansible.plugins import module_loader, action_loader
from ansible.plugins.loader import module_loader, action_loader
from ansible.template import Templar

View file

@ -0,0 +1,83 @@
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ast
import yaml
from ansible.parsing.yaml.loader import AnsibleLoader
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
def read_docstring(filename, verbose=True, ignore_errors=True):
"""
Search for assignment of the DOCUMENTATION and EXAMPLES variables in the given file.
Parse DOCUMENTATION from YAML and return the YAML doc or None together with EXAMPLES, as plain text.
"""
data = {
'doc': None,
'plainexamples': None,
'returndocs': None,
'metadata': None
}
string_to_vars = {
'DOCUMENTATION': 'doc',
'EXAMPLES': 'plainexamples',
'RETURN': 'returndocs',
'ANSIBLE_METADATA': 'metadata'
}
try:
M = ast.parse(''.join(open(filename)))
try:
display.debug('Attempt first docstring is yaml docs')
docstring = yaml.load(M.body[0].value.s)
for string in string_to_vars.keys():
if string in docstring:
data[string_to_vars[string]] = docstring[string]
display.debug('assigned :%s' % string_to_vars[string])
except Exception as e:
display.debug('failed docstring parsing: %s' % str(e))
if 'docs' not in data or not data['docs']:
display.debug('Fallback to vars parsing')
for child in M.body:
if isinstance(child, ast.Assign):
for t in child.targets:
try:
theid = t.id
except AttributeError:
# skip errors can happen when trying to use the normal code
display.warning("Failed to assign id for %s on %s, skipping" % (t, filename))
continue
if theid in string_to_vars:
varkey = string_to_vars[theid]
if isinstance(child.value, ast.Dict):
data[varkey] = ast.literal_eval(child.value)
else:
if theid in ['DOCUMENTATION', 'ANSIBLE_METADATA']:
# string should be yaml
data[varkey] = AnsibleLoader(child.value.s, file_name=filename).get_single_data()
else:
# not yaml, should be a simple string
data[varkey] = child.value.s
display.debug('assigned :%s' % varkey)
except:
if verbose:
display.error("unable to parse %s" % filename)
if not ignore_errors:
raise
return data

View file

@ -26,7 +26,7 @@ from ansible.errors import AnsibleParserError
from ansible.module_utils._text import to_text
from ansible.playbook.play import Play
from ansible.playbook.playbook_include import PlaybookInclude
from ansible.plugins import get_all_plugin_loaders
from ansible.plugins.loader import get_all_plugin_loaders
try:
from __main__ import display

View file

@ -197,9 +197,10 @@ class Base(with_metaclass(BaseMeta, object)):
self.vars = dict()
def dump_me(self, depth=0):
''' this is never called from production code, it is here to be used when debugging as a 'complex print' '''
if depth == 0:
print("DUMPING OBJECT ------------------------------------------------------")
print("%s- %s (%s, id=%s)" % (" " * depth, self.__class__.__name__, self, id(self)))
display.debug("DUMPING OBJECT ------------------------------------------------------")
display.debug("%s- %s (%s, id=%s)" % (" " * depth, self.__class__.__name__, self, id(self)))
if hasattr(self, '_parent') and self._parent:
self._parent.dump_me(depth + 2)
dep_chain = self._parent.get_dep_chain()

View file

@ -36,6 +36,7 @@ from ansible.module_utils._text import to_bytes
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.plugins import get_plugin_class
from ansible.utils.ssh_functions import check_for_controlpersist
@ -54,31 +55,47 @@ __all__ = ['PlayContext']
# in variable names.
MAGIC_VARIABLE_MAPPING = dict(
accelerate_port=('ansible_accelerate_port', ),
# base
connection=('ansible_connection', ),
module_compression=('ansible_module_compression', ),
shell=('ansible_shell_type', ),
executable=('ansible_shell_executable', ),
remote_tmp_dir=('ansible_remote_tmp', ),
# connection common
remote_addr=('ansible_ssh_host', 'ansible_host'),
remote_user=('ansible_ssh_user', 'ansible_user'),
remote_tmp_dir=('ansible_remote_tmp', ),
port=('ansible_ssh_port', 'ansible_port'),
timeout=('ansible_ssh_timeout', 'ansible_timeout'),
ssh_executable=('ansible_ssh_executable', ),
accelerate_port=('ansible_accelerate_port', ),
password=('ansible_ssh_pass', 'ansible_password'),
private_key_file=('ansible_ssh_private_key_file', 'ansible_private_key_file'),
port=('ansible_ssh_port', 'ansible_port'),
pipelining=('ansible_ssh_pipelining', 'ansible_pipelining'),
shell=('ansible_shell_type', ),
timeout=('ansible_ssh_timeout', 'ansible_timeout'),
private_key_file=('ansible_ssh_private_key_file', 'ansible_private_key_file'),
# networking modules
network_os=('ansible_network_os', ),
# ssh TODO: remove
ssh_executable=('ansible_ssh_executable', ),
ssh_common_args=('ansible_ssh_common_args', ),
sftp_extra_args=('ansible_sftp_extra_args', ),
scp_extra_args=('ansible_scp_extra_args', ),
ssh_extra_args=('ansible_ssh_extra_args', ),
ssh_transfer_method=('ansible_ssh_transfer_method', ),
# docker TODO: remove
docker_extra_args=('ansible_docker_extra_args', ),
# become
become=('ansible_become', ),
become_method=('ansible_become_method', ),
become_user=('ansible_become_user', ),
become_pass=('ansible_become_password', 'ansible_become_pass'),
become_exe=('ansible_become_exe', ),
become_flags=('ansible_become_flags', ),
ssh_common_args=('ansible_ssh_common_args', ),
docker_extra_args=('ansible_docker_extra_args', ),
sftp_extra_args=('ansible_sftp_extra_args', ),
scp_extra_args=('ansible_scp_extra_args', ),
ssh_extra_args=('ansible_ssh_extra_args', ),
ssh_transfer_method=('ansible_ssh_transfer_method', ),
# deprecated
sudo=('ansible_sudo', ),
sudo_user=('ansible_sudo_user', ),
sudo_pass=('ansible_sudo_password', 'ansible_sudo_pass'),
@ -89,10 +106,9 @@ MAGIC_VARIABLE_MAPPING = dict(
su_pass=('ansible_su_password', 'ansible_su_pass'),
su_exe=('ansible_su_exe', ),
su_flags=('ansible_su_flags', ),
executable=('ansible_shell_executable', ),
module_compression=('ansible_module_compression', ),
)
# TODO: needs to be configurable
b_SU_PROMPT_LOCALIZATIONS = [
to_bytes('Password'),
to_bytes('암호'),
@ -135,7 +151,7 @@ TASK_ATTRIBUTE_OVERRIDES = (
'become_method',
'become_flags',
'connection',
'docker_extra_args',
'docker_extra_args', # TODO: remove
'delegate_to',
'no_log',
'remote_user',
@ -143,6 +159,11 @@ TASK_ATTRIBUTE_OVERRIDES = (
RESET_VARS = (
'ansible_connection',
'ansible_user',
'ansible_host',
'ansible_port',
# TODO: ???
'ansible_docker_extra_args',
'ansible_ssh_host',
'ansible_ssh_pass',
@ -151,9 +172,6 @@ RESET_VARS = (
'ansible_ssh_private_key_file',
'ansible_ssh_pipelining',
'ansible_ssh_executable',
'ansible_user',
'ansible_host',
'ansible_port',
)
@ -165,47 +183,59 @@ class PlayContext(Base):
connection/authentication information.
'''
# base
_module_compression = FieldAttribute(isa='string', default=C.DEFAULT_MODULE_COMPRESSION)
_shell = FieldAttribute(isa='string')
_executable = FieldAttribute(isa='string', default=C.DEFAULT_EXECUTABLE)
# connection fields, some are inherited from Base:
# (connection, port, remote_user, environment, no_log)
_docker_extra_args = FieldAttribute(isa='string')
_remote_addr = FieldAttribute(isa='string')
_remote_tmp_dir = FieldAttribute(isa='string', default=C.DEFAULT_REMOTE_TMP)
_password = FieldAttribute(isa='string')
_private_key_file = FieldAttribute(isa='string', default=C.DEFAULT_PRIVATE_KEY_FILE)
_timeout = FieldAttribute(isa='int', default=C.DEFAULT_TIMEOUT)
_shell = FieldAttribute(isa='string')
_network_os = FieldAttribute(isa='string')
_connection_user = FieldAttribute(isa='string')
_private_key_file = FieldAttribute(isa='string', default=C.DEFAULT_PRIVATE_KEY_FILE)
_pipelining = FieldAttribute(isa='bool', default=C.ANSIBLE_PIPELINING)
# networking modules
_network_os = FieldAttribute(isa='string')
# docker FIXME: remove these
_docker_extra_args = FieldAttribute(isa='string')
# ssh # FIXME: remove these
_ssh_executable = FieldAttribute(isa='string', default=C.ANSIBLE_SSH_EXECUTABLE)
_ssh_args = FieldAttribute(isa='string', default=C.ANSIBLE_SSH_ARGS)
_ssh_common_args = FieldAttribute(isa='string')
_sftp_extra_args = FieldAttribute(isa='string')
_scp_extra_args = FieldAttribute(isa='string')
_ssh_extra_args = FieldAttribute(isa='string')
_ssh_executable = FieldAttribute(isa='string', default=C.ANSIBLE_SSH_EXECUTABLE)
_ssh_transfer_method = FieldAttribute(isa='string', default=C.DEFAULT_SSH_TRANSFER_METHOD)
# ???
_connection_lockfd = FieldAttribute(isa='int')
_pipelining = FieldAttribute(isa='bool', default=C.ANSIBLE_SSH_PIPELINING)
# accelerate FIXME: remove as soon as deprecation period expires
_accelerate = FieldAttribute(isa='bool', default=False)
_accelerate_ipv6 = FieldAttribute(isa='bool', default=False, always_post_validate=True)
_accelerate_port = FieldAttribute(isa='int', default=C.ACCELERATE_PORT, always_post_validate=True)
_executable = FieldAttribute(isa='string', default=C.DEFAULT_EXECUTABLE)
_module_compression = FieldAttribute(isa='string', default=C.DEFAULT_MODULE_COMPRESSION)
# privilege escalation fields
_become = FieldAttribute(isa='bool')
_become_method = FieldAttribute(isa='string')
_become_user = FieldAttribute(isa='string')
_become_pass = FieldAttribute(isa='string')
_become_exe = FieldAttribute(isa='string')
_become_flags = FieldAttribute(isa='string')
_become_exe = FieldAttribute(isa='string', default=C.DEFAULT_BECOME_EXE)
_become_flags = FieldAttribute(isa='string', default=C.DEFAULT_BECOME_FLAGS)
_prompt = FieldAttribute(isa='string')
# backwards compatibility fields for sudo/su
_sudo_exe = FieldAttribute(isa='string')
_sudo_flags = FieldAttribute(isa='string')
# DEPRECATED: backwards compatibility fields for sudo/su
_sudo_exe = FieldAttribute(isa='string', default=C.DEFAULT_SUDO_EXE)
_sudo_flags = FieldAttribute(isa='string', default=C.DEFAULT_SUDO_FLAGS)
_sudo_pass = FieldAttribute(isa='string')
_su_exe = FieldAttribute(isa='string')
_su_flags = FieldAttribute(isa='string')
_su_exe = FieldAttribute(isa='string', default=C.DEFAULT_SU_EXE)
_su_flags = FieldAttribute(isa='string', default=C.DEFAULT_SU_FLAGS)
_su_pass = FieldAttribute(isa='string')
# general flags
@ -277,6 +307,22 @@ class PlayContext(Base):
if play.force_handlers is not None:
self.force_handlers = play.force_handlers
def set_options_from_plugin(self, plugin):
# generic derived from connection plugin
# get options for plugins
options = C.config.get_configuration_definitions(get_plugin_class(plugin), plugin._load_name)
for option in options:
if option:
flag = options[option].get('name')
if flag:
setattr(self, flag, self.connection.get_option(flag))
# TODO: made irrelavent by above
# get ssh options FIXME: make these common to all connections
# for flag in ('ssh_common_args', 'docker_extra_args', 'sftp_extra_args', 'scp_extra_args', 'ssh_extra_args'):
# setattr(self, flag, getattr(options, flag, ''))
def set_options(self, options):
'''
Configures this connection information instance with data from
@ -291,12 +337,10 @@ class PlayContext(Base):
self.check_mode = boolean(options.check, strict=False)
# get ssh options FIXME: make these common to all connections
for flag in ['ssh_common_args', 'docker_extra_args', 'sftp_extra_args', 'scp_extra_args', 'ssh_extra_args']:
setattr(self, flag, getattr(options, flag, ''))
# general flags (should we move out?)
for flag in ['connection', 'remote_user', 'private_key_file', 'verbosity', 'force_handlers', 'step', 'start_at_task', 'diff']:
# for flag in ('connection', 'remote_user', 'private_key_file', 'verbosity', 'force_handlers', 'step', 'start_at_task', 'diff'):
# should only be 'non plugin' flags
for flag in ('connection', 'private_key_file', 'verbosity', 'force_handlers', 'step', 'start_at_task', 'diff'):
attribute = getattr(options, flag, False)
if attribute:
setattr(self, flag, attribute)
@ -492,22 +536,18 @@ class PlayContext(Base):
command = success_cmd
# set executable to use for the privilege escalation method, with various overrides
exe = (
self.become_exe or
getattr(self, '%s_exe' % self.become_method, None) or
C.DEFAULT_BECOME_EXE or
getattr(C, 'DEFAULT_%s_EXE' % self.become_method.upper(), None) or
self.become_method
)
exe = self.become_method
for myexe in (getattr(self, '%s_exe' % self.become_method, None), self.become_exe):
if myexe:
exe = myexe
break
# set flags to use for the privilege escalation method, with various overrides
flags = (
self.become_flags or
getattr(self, '%s_flags' % self.become_method, None) or
C.DEFAULT_BECOME_FLAGS or
getattr(C, 'DEFAULT_%s_FLAGS' % self.become_method.upper(), None) or
''
)
flags = ''
for myflag in (getattr(self, '%s_flags' % self.become_method, None), self.become_flags):
if myflag is not None:
flags = myflag
break
if self.become_method == 'sudo':
# If we have a password, we run sudo with a randomly-generated

View file

@ -31,7 +31,7 @@ from ansible.playbook.conditional import Conditional
from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.role.metadata import RoleMetadata
from ansible.playbook.taggable import Taggable
from ansible.plugins import get_all_plugin_loaders
from ansible.plugins.loader import get_all_plugin_loaders
from ansible.utils.vars import combine_vars

View file

@ -26,7 +26,7 @@ from ansible.module_utils.six import iteritems, string_types
from ansible.module_utils._text import to_native
from ansible.parsing.mod_args import ModuleArgsParser
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping, AnsibleUnicode
from ansible.plugins import lookup_loader
from ansible.plugins.loader import lookup_loader
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.become import Become

View file

@ -21,18 +21,10 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import glob
import imp
import os
import os.path
import sys
import warnings
from collections import defaultdict
from abc import ABCMeta
from ansible import constants as C
from ansible.module_utils._text import to_text
from ansible.module_utils.six import with_metaclass
try:
from __main__ import display
@ -46,537 +38,11 @@ PATH_CACHE = {}
PLUGIN_PATH_CACHE = {}
def get_all_plugin_loaders():
return [(name, obj) for (name, obj) in globals().items() if isinstance(obj, PluginLoader)]
def get_plugin_class(obj):
return obj.__class__.__name__.lower().replace('module', '')
class PluginLoader:
class AnsiblePlugin(with_metaclass(ABCMeta, object)):
'''
PluginLoader loads plugins from the configured plugin directories.
It searches for plugins by iterating through the combined list of
play basedirs, configured paths, and the python path.
The first match is used.
'''
def __init__(self, class_name, package, config, subdir, aliases={}, required_base_class=None):
self.class_name = class_name
self.base_class = required_base_class
self.package = package
self.subdir = subdir
self.aliases = aliases
if config and not isinstance(config, list):
config = [config]
elif not config:
config = []
self.config = config
if class_name not in MODULE_CACHE:
MODULE_CACHE[class_name] = {}
if class_name not in PATH_CACHE:
PATH_CACHE[class_name] = None
if class_name not in PLUGIN_PATH_CACHE:
PLUGIN_PATH_CACHE[class_name] = defaultdict(dict)
self._module_cache = MODULE_CACHE[class_name]
self._paths = PATH_CACHE[class_name]
self._plugin_path_cache = PLUGIN_PATH_CACHE[class_name]
self._extra_dirs = []
self._searched_paths = set()
def __setstate__(self, data):
'''
Deserializer.
'''
class_name = data.get('class_name')
package = data.get('package')
config = data.get('config')
subdir = data.get('subdir')
aliases = data.get('aliases')
base_class = data.get('base_class')
PATH_CACHE[class_name] = data.get('PATH_CACHE')
PLUGIN_PATH_CACHE[class_name] = data.get('PLUGIN_PATH_CACHE')
self.__init__(class_name, package, config, subdir, aliases, base_class)
self._extra_dirs = data.get('_extra_dirs', [])
self._searched_paths = data.get('_searched_paths', set())
def __getstate__(self):
'''
Serializer.
'''
return dict(
class_name=self.class_name,
base_class=self.base_class,
package=self.package,
config=self.config,
subdir=self.subdir,
aliases=self.aliases,
_extra_dirs=self._extra_dirs,
_searched_paths=self._searched_paths,
PATH_CACHE=PATH_CACHE[self.class_name],
PLUGIN_PATH_CACHE=PLUGIN_PATH_CACHE[self.class_name],
)
def format_paths(self, paths):
''' Returns a string suitable for printing of the search path '''
# Uses a list to get the order right
ret = []
for i in paths:
if i not in ret:
ret.append(i)
return os.pathsep.join(ret)
def print_paths(self):
return self.format_paths(self._get_paths())
def _all_directories(self, dir):
results = []
results.append(dir)
for root, subdirs, files in os.walk(dir, followlinks=True):
if '__init__.py' in files:
for x in subdirs:
results.append(os.path.join(root, x))
return results
def _get_package_paths(self, subdirs=True):
''' Gets the path of a Python package '''
if not self.package:
return []
if not hasattr(self, 'package_path'):
m = __import__(self.package)
parts = self.package.split('.')[1:]
for parent_mod in parts:
m = getattr(m, parent_mod)
self.package_path = os.path.dirname(m.__file__)
if subdirs:
return self._all_directories(self.package_path)
return [self.package_path]
def _get_paths(self, subdirs=True):
''' Return a list of paths to search for plugins in '''
# FIXME: This is potentially buggy if subdirs is sometimes True and
# sometimes False. In current usage, everything calls this with
# subdirs=True except for module_utils_loader which always calls it
# with subdirs=False. So there currently isn't a problem with this
# caching.
if self._paths is not None:
return self._paths
ret = self._extra_dirs[:]
# look in any configured plugin paths, allow one level deep for subcategories
if self.config is not None:
for path in self.config:
path = os.path.realpath(os.path.expanduser(path))
if subdirs:
contents = glob.glob("%s/*" % path) + glob.glob("%s/*/*" % path)
for c in contents:
if os.path.isdir(c) and c not in ret:
ret.append(c)
if path not in ret:
ret.append(path)
# look for any plugins installed in the package subtree
# Note package path always gets added last so that every other type of
# path is searched before it.
ret.extend(self._get_package_paths(subdirs=subdirs))
# HACK: because powershell modules are in the same directory
# hierarchy as other modules we have to process them last. This is
# because powershell only works on windows but the other modules work
# anywhere (possibly including windows if the correct language
# interpreter is installed). the non-powershell modules can have any
# file extension and thus powershell modules are picked up in that.
# The non-hack way to fix this is to have powershell modules be
# a different PluginLoader/ModuleLoader. But that requires changing
# other things too (known thing to change would be PATHS_CACHE,
# PLUGIN_PATHS_CACHE, and MODULE_CACHE. Since those three dicts key
# on the class_name and neither regular modules nor powershell modules
# would have class_names, they would not work as written.
reordered_paths = []
win_dirs = []
for path in ret:
if path.endswith('windows'):
win_dirs.append(path)
else:
reordered_paths.append(path)
reordered_paths.extend(win_dirs)
# cache and return the result
self._paths = reordered_paths
return reordered_paths
def add_directory(self, directory, with_subdir=False):
''' Adds an additional directory to the search path '''
directory = os.path.realpath(directory)
if directory is not None:
if with_subdir:
directory = os.path.join(directory, self.subdir)
if directory not in self._extra_dirs:
# append the directory and invalidate the path cache
self._extra_dirs.append(directory)
self._paths = None
def find_plugin(self, name, mod_type='', ignore_deprecated=False):
''' Find a plugin named name '''
if mod_type:
suffix = mod_type
elif self.class_name:
# Ansible plugins that run in the controller process (most plugins)
suffix = '.py'
else:
# Only Ansible Modules. Ansible modules can be any executable so
# they can have any suffix
suffix = ''
# The particular cache to look for modules within. This matches the
# requested mod_type
pull_cache = self._plugin_path_cache[suffix]
try:
return pull_cache[name]
except KeyError:
# Cache miss. Now let's find the plugin
pass
# TODO: Instead of using the self._paths cache (PATH_CACHE) and
# self._searched_paths we could use an iterator. Before enabling that
# we need to make sure we don't want to add additional directories
# (add_directory()) once we start using the iterator. Currently, it
# looks like _get_paths() never forces a cache refresh so if we expect
# additional directories to be added later, it is buggy.
for path in (p for p in self._get_paths() if p not in self._searched_paths and os.path.isdir(p)):
try:
full_paths = (os.path.join(path, f) for f in os.listdir(path))
except OSError as e:
display.warning("Error accessing plugin paths: %s" % to_text(e))
for full_path in (f for f in full_paths if os.path.isfile(f) and not f.endswith('__init__.py')):
full_name = os.path.basename(full_path)
# HACK: We have no way of executing python byte
# compiled files as ansible modules so specifically exclude them
# FIXME: I believe this is only correct for modules and
# module_utils. For all other plugins we want .pyc and .pyo should
# bew valid
if full_path.endswith(('.pyc', '.pyo')):
continue
splitname = os.path.splitext(full_name)
base_name = splitname[0]
try:
extension = splitname[1]
except IndexError:
extension = ''
# Module found, now enter it into the caches that match
# this file
if base_name not in self._plugin_path_cache['']:
self._plugin_path_cache[''][base_name] = full_path
if full_name not in self._plugin_path_cache['']:
self._plugin_path_cache[''][full_name] = full_path
if base_name not in self._plugin_path_cache[extension]:
self._plugin_path_cache[extension][base_name] = full_path
if full_name not in self._plugin_path_cache[extension]:
self._plugin_path_cache[extension][full_name] = full_path
self._searched_paths.add(path)
try:
return pull_cache[name]
except KeyError:
# Didn't find the plugin in this directory. Load modules from
# the next one
pass
# if nothing is found, try finding alias/deprecated
if not name.startswith('_'):
alias_name = '_' + name
# We've already cached all the paths at this point
if alias_name in pull_cache:
if not ignore_deprecated and not os.path.islink(pull_cache[alias_name]):
display.deprecated('%s is kept for backwards compatibility '
'but usage is discouraged. The module '
'documentation details page may explain '
'more about this rationale.' %
name.lstrip('_'))
return pull_cache[alias_name]
return None
def has_plugin(self, name):
''' Checks if a plugin named name exists '''
return self.find_plugin(name) is not None
__contains__ = has_plugin
def _load_module_source(self, name, path):
# avoid collisions across plugins
full_name = '.'.join([self.package, name])
if full_name in sys.modules:
# Avoids double loading, See https://github.com/ansible/ansible/issues/13110
return sys.modules[full_name]
with warnings.catch_warnings():
warnings.simplefilter("ignore", RuntimeWarning)
with open(path, 'rb') as module_file:
module = imp.load_source(full_name, path, module_file)
return module
def get(self, name, *args, **kwargs):
''' instantiates a plugin of the given name using arguments '''
found_in_cache = True
class_only = kwargs.pop('class_only', False)
if name in self.aliases:
name = self.aliases[name]
path = self.find_plugin(name)
if path is None:
return None
if path not in self._module_cache:
self._module_cache[path] = self._load_module_source(name, path)
found_in_cache = False
obj = getattr(self._module_cache[path], self.class_name)
if self.base_class:
# The import path is hardcoded and should be the right place,
# so we are not expecting an ImportError.
module = __import__(self.package, fromlist=[self.base_class])
# Check whether this obj has the required base class.
try:
plugin_class = getattr(module, self.base_class)
except AttributeError:
return None
if not issubclass(obj, plugin_class):
return None
self._display_plugin_load(self.class_name, name, self._searched_paths, path,
found_in_cache=found_in_cache, class_only=class_only)
if not class_only:
try:
obj = obj(*args, **kwargs)
except TypeError as e:
if "abstract" in e.args[0]:
# Abstract Base Class. The found plugin file does not
# fully implement the defined interface.
return None
raise
# set extra info on the module, in case we want it later
setattr(obj, '_original_path', path)
setattr(obj, '_load_name', name)
return obj
def _display_plugin_load(self, class_name, name, searched_paths, path, found_in_cache=None, class_only=None):
msg = 'Loading %s \'%s\' from %s' % (class_name, os.path.basename(name), path)
if len(searched_paths) > 1:
msg = '%s (searched paths: %s)' % (msg, self.format_paths(searched_paths))
if found_in_cache or class_only:
msg = '%s (found_in_cache=%s, class_only=%s)' % (msg, found_in_cache, class_only)
display.debug(msg)
def all(self, *args, **kwargs):
''' instantiates all plugins with the same arguments '''
path_only = kwargs.pop('path_only', False)
class_only = kwargs.pop('class_only', False)
all_matches = []
found_in_cache = True
for i in self._get_paths():
all_matches.extend(glob.glob(os.path.join(i, "*.py")))
for path in sorted(all_matches, key=lambda match: os.path.basename(match)):
name, _ = os.path.splitext(path)
if '__init__' in name:
continue
if path_only:
yield path
continue
if path not in self._module_cache:
self._module_cache[path] = self._load_module_source(name, path)
found_in_cache = False
try:
obj = getattr(self._module_cache[path], self.class_name)
except AttributeError as e:
display.warning("Skipping plugin (%s) as it seems to be invalid: %s" % (path, to_text(e)))
continue
if self.base_class:
# The import path is hardcoded and should be the right place,
# so we are not expecting an ImportError.
module = __import__(self.package, fromlist=[self.base_class])
# Check whether this obj has the required base class.
try:
plugin_class = getattr(module, self.base_class)
except AttributeError:
continue
if not issubclass(obj, plugin_class):
continue
self._display_plugin_load(self.class_name, name, self._searched_paths, path, found_in_cache=found_in_cache, class_only=class_only)
if not class_only:
try:
obj = obj(*args, **kwargs)
except TypeError as e:
display.warning("Skipping plugin (%s) as it seems to be incomplete: %s" % (path, to_text(e)))
# set extra info on the module, in case we want it later
setattr(obj, '_original_path', path)
setattr(obj, '_load_name', name)
yield obj
action_loader = PluginLoader(
'ActionModule',
'ansible.plugins.action',
C.DEFAULT_ACTION_PLUGIN_PATH,
'action_plugins',
required_base_class='ActionBase',
)
cache_loader = PluginLoader(
'CacheModule',
'ansible.plugins.cache',
C.DEFAULT_CACHE_PLUGIN_PATH,
'cache_plugins',
)
callback_loader = PluginLoader(
'CallbackModule',
'ansible.plugins.callback',
C.DEFAULT_CALLBACK_PLUGIN_PATH,
'callback_plugins',
)
connection_loader = PluginLoader(
'Connection',
'ansible.plugins.connection',
C.DEFAULT_CONNECTION_PLUGIN_PATH,
'connection_plugins',
aliases={'paramiko': 'paramiko_ssh'},
required_base_class='ConnectionBase',
)
shell_loader = PluginLoader(
'ShellModule',
'ansible.plugins.shell',
'shell_plugins',
'shell_plugins',
)
module_loader = PluginLoader(
'',
'ansible.modules',
C.DEFAULT_MODULE_PATH,
'library',
)
module_utils_loader = PluginLoader(
'',
'ansible.module_utils',
C.DEFAULT_MODULE_UTILS_PATH,
'module_utils',
)
# NB: dedicated loader is currently necessary because PS module_utils expects "with subdir" lookup where
# regular module_utils doesn't. This can be revisited once we have more granular loaders.
ps_module_utils_loader = PluginLoader(
'',
'ansible.module_utils',
C.DEFAULT_MODULE_UTILS_PATH,
'module_utils',
)
lookup_loader = PluginLoader(
'LookupModule',
'ansible.plugins.lookup',
C.DEFAULT_LOOKUP_PLUGIN_PATH,
'lookup_plugins',
required_base_class='LookupBase',
)
filter_loader = PluginLoader(
'FilterModule',
'ansible.plugins.filter',
C.DEFAULT_FILTER_PLUGIN_PATH,
'filter_plugins',
)
test_loader = PluginLoader(
'TestModule',
'ansible.plugins.test',
C.DEFAULT_TEST_PLUGIN_PATH,
'test_plugins'
)
fragment_loader = PluginLoader(
'ModuleDocFragment',
'ansible.utils.module_docs_fragments',
os.path.join(os.path.dirname(__file__), 'module_docs_fragments'),
'',
)
strategy_loader = PluginLoader(
'StrategyModule',
'ansible.plugins.strategy',
C.DEFAULT_STRATEGY_PLUGIN_PATH,
'strategy_plugins',
required_base_class='StrategyBase',
)
terminal_loader = PluginLoader(
'TerminalModule',
'ansible.plugins.terminal',
'terminal_plugins',
'terminal_plugins'
)
vars_loader = PluginLoader(
'VarsModule',
'ansible.plugins.vars',
C.DEFAULT_VARS_PLUGIN_PATH,
'vars_plugins',
)
cliconf_loader = PluginLoader(
'Cliconf',
'ansible.plugins.cliconf',
'cliconf_plugins',
'cliconf_plugins',
required_base_class='CliconfBase'
)
netconf_loader = PluginLoader(
'Netconf',
'ansible.plugins.netconf',
'netconf_plugins',
'netconf_plugins',
required_base_class='NetconfBase'
)
def get_option(self, option):
return C.get_plugin_option(get_plugin_class(self), self.name, option)

View file

@ -40,6 +40,7 @@ from ansible.parsing.utils.jsonify import jsonify
from ansible.playbook.play_context import MAGIC_VARIABLE_MAPPING
from ansible.release import __version__
from ansible.utils.unsafe_proxy import wrap_var
from ansible.vars.manager import remove_internal_keys
try:
@ -743,7 +744,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
tmpdir_delete = (not data.pop("_ansible_suppress_tmpdir_delete", False) and wrap_async)
# remove internal keys
self._remove_internal_keys(data)
remove_internal_keys(data)
# cleanup tmp?
if (self._play_context.become and self._play_context.become_user != 'root') and not persist_files and delete_remote_tmp or tmpdir_delete:
@ -766,17 +767,6 @@ class ActionBase(with_metaclass(ABCMeta, object)):
display.debug("done with _execute_module (%s, %s)" % (module_name, module_args))
return data
def _remove_internal_keys(self, data):
for key in list(data.keys()):
if key.startswith('_ansible_') and key != '_ansible_parsed' or key in C.INTERNAL_RESULT_KEYS:
display.warning("Removed unexpected internal key in module return: %s = %s" % (key, data[key]))
del data[key]
# remove bad/empty internal keys
for key in ['warnings', 'deprecations']:
if key in data and not data[key]:
del data[key]
def _clean_returned_data(self, data):
remove_keys = set()
fact_keys = set(data.keys())
@ -817,7 +807,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
display.warning("Removed restricted key from module data: %s = %s" % (r_key, r_val))
del data[r_key]
self._remove_internal_keys(data)
remove_internal_keys(data)
def _parse_returned_data(self, res):
try:

View file

@ -26,7 +26,7 @@ from ansible import constants as C
from ansible.module_utils.basic import AnsibleFallbackNotFound
from ansible.module_utils.junos import junos_argument_spec
from ansible.module_utils.six import iteritems
from ansible.plugins import connection_loader, module_loader
from ansible.plugins.loader import connection_loader, module_loader
from ansible.plugins.action.normal import ActionModule as _ActionModule
from ansible.module_utils.connection import Connection

View file

@ -26,7 +26,7 @@ from ansible.module_utils._text import to_text
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.playbook.play_context import MAGIC_VARIABLE_MAPPING
from ansible.plugins.action import ActionBase
from ansible.plugins import connection_loader
from ansible.plugins.loader import connection_loader
class ActionModule(ActionBase):

View file

@ -27,7 +27,7 @@ from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils.six import with_metaclass
from ansible.module_utils._text import to_bytes
from ansible.plugins import cache_loader
from ansible.plugins.loader import cache_loader
try:
from __main__ import display

View file

@ -15,6 +15,17 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
'''
DOCUMENTATION:
callback: json
short_description: Ansbile screen output asjson
version_added: "2.2"
description:
- This callback converts all events into JSON output
type: stdout
plugin_api_version: "2.0"
'''
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type

View file

@ -23,14 +23,15 @@ import fcntl
import gettext
import os
import shlex
from abc import ABCMeta, abstractmethod, abstractproperty
from abc import abstractmethod, abstractproperty
from functools import wraps
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils.six import string_types, with_metaclass
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_bytes, to_text
from ansible.plugins import shell_loader
from ansible.plugins import AnsiblePlugin
from ansible.plugins.loader import shell_loader
try:
from __main__ import display
@ -53,7 +54,7 @@ def ensure_connect(func):
return wrapped
class ConnectionBase(with_metaclass(ABCMeta, object)):
class ConnectionBase(AnsiblePlugin):
'''
A base class for connections to contain common code.
'''

View file

@ -29,16 +29,24 @@ DOCUMENTATION:
author: Tomas Tomecek (ttomecek@redhat.com)
version_added: 2.4
options:
remote_addr:
description:
- The ID of the container you want to access.
default: inventory_hostname
config:
vars:
- name: ansible_host
remote_user:
description:
- User specified via name or ID which is used to execute commands inside the container.
config:
ini:
- section: defaults
key: remote_user
env_vars:
- ANSIBLE_REMOTE_USER
host_vars:
- ansible_user
env:
- name: ANSIBLE_REMOTE_USER
vars:
- name: ansible_user
"""
from __future__ import (absolute_import, division, print_function)

View file

@ -25,7 +25,7 @@ import json
from ansible import constants as C
from ansible.errors import AnsibleConnectionFailure, AnsibleError
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.plugins import netconf_loader
from ansible.plugins.loader import netconf_loader
from ansible.plugins.connection import ConnectionBase, ensure_connect
from ansible.utils.jsonrpc import Rpc

View file

@ -31,8 +31,7 @@ from ansible import constants as C
from ansible.errors import AnsibleConnectionFailure
from ansible.module_utils.six import BytesIO, binary_type
from ansible.module_utils._text import to_bytes, to_text
from ansible.plugins import cliconf_loader
from ansible.plugins import terminal_loader
from ansible.plugins.loader import cliconf_loader, terminal_loader
from ansible.plugins.connection.paramiko_ssh import Connection as _Connection
from ansible.utils.jsonrpc import Rpc

View file

@ -26,73 +26,122 @@ DOCUMENTATION:
author: ansible (@core)
version_added: historical
options:
_host:
host:
description: Hostname/ip to connect to.
default: inventory_hostname
host_vars:
- ansible_host
- ansible_ssh_host
_host_key_checking:
type: bool
vars:
- name: ansible_host
- name: ansible_ssh_host
host_key_checking:
constants:
- name: HOST_KEY_CHECKING
description: Determines if ssh should check host keys
config:
type: boolean
ini:
- section: defaults
key: 'host_key_checking'
env_vars:
- ANSIBLE_HOST_KEY_CHECKING
_password:
env:
- name: ANSIBLE_HOST_KEY_CHECKING
password:
description: Authentication password for the C(remote_user). Can be supplied as CLI option.
host_vars:
- ansible_password
- ansible_ssh_pass
_ssh_args:
vars:
- name: ansible_password
- name: ansible_ssh_pass
ssh_args:
description: Arguments to pass to all ssh cli tools
default: '-C -o ControlMaster=auto -o ControlPersist=60s'
config:
ini:
- section: 'ssh_connection'
key: 'ssh_args'
env_vars:
- ANSIBLE_SSH_ARGS
_ssh_common_args:
description: Common extra args for ssh CLI tools
host_vars:
- ansible_ssh_common_args
_scp_extra_args:
env:
- name: ANSIBLE_SSH_ARGS
ssh_common_args:
description: Common extra args for all ssh CLI tools
vars:
- name: ansible_ssh_common_args
ssh_executable:
default: ssh
description:
- This defines the location of the ssh binary. It defaults to `ssh` which will use the first ssh binary available in $PATH.
- This option is usually not required, it might be useful when access to system ssh is restricted,
or when using ssh wrappers to connect to remote hosts.
env: [{name: ANSIBLE_SSH_EXECUTABLE}]
ini:
- {key: ssh_executable, section: ssh_connection}
yaml: {key: ssh_connection.ssh_executable}
const:
- name: ANSIBLE_SSH_EXECUTABLE
version_added: "2.2"
scp_extra_args:
description: Extra exclusive to the 'scp' CLI
host_vars:
- ansible_scp_extra_args
_sftp_extra_args:
vars:
- name: ansible_scp_extra_args
sftp_extra_args:
description: Extra exclusive to the 'sftp' CLI
host_vars:
- ansible_sftp_extra_args
_ssh_extra_args:
vars:
- name: ansible_sftp_extra_args
ssh_extra_args:
description: Extra exclusive to the 'ssh' CLI
host_vars:
- ansible_ssh_extra_args
vars:
- name: ansible_ssh_extra_args
ssh_retries:
# constant: ANSIBLE_SSH_RETRIES
description: Number of attempts to connect.
default: 3
env:
- name: ANSIBLE_SSH_RETRIES
ini:
- section: connection
key: retries
- section: ssh_connection
key: retries
port:
description: Remote port to connect to.
type: int
config:
default: 22
ini:
- section: defaults
key: remote_port
default: 22
env_vars:
- ANSIBLE_REMOTE_PORT
host_vars:
- ansible_port
- ansible_ssh_port
env:
- name: ANSIBLE_REMOTE_PORT
vars:
- name: ansible_port
- name: ansible_ssh_port
remote_user:
description:
- User name with which to login to the remote server, normally set by the remote_user keyword.
- If no user is supplied, Ansible will let the ssh client binary choose the user as it normally
config:
ini:
- section: defaults
key: remote_user
env_vars:
- ANSIBLE_REMOTE_USER
host_vars:
- ansible_user
- ansible_ssh_user
env:
- name: ANSIBLE_REMOTE_USER
vars:
- name: ansible_user
- name: ansible_ssh_user
pipelining:
default: ANSIBLE_PIPELINING
description:
- Pipelining reduces the number of SSH operations required to execute a module on the remote server,
by executing many Ansible modules without actual file transfer.
- This can result in a very significant performance improvement when enabled.
- However this conflicts with privilege escalation (become).
For example, when using sudo operations you must first disable 'requiretty' in the sudoers file for the target hosts,
which is why this feature is disabled by default.
env: [{name: ANSIBLE_SSH_PIPELINING}]
ini:
- {key: pipelining, section: ssh_connection}
type: boolean
vars: [{name: ansible_ssh_pipelining}]
# TODO:
# ANSIBLE_SSH_RETRIES
# self._play_context.private_key_file
# ANSIBLE_SSH_CONTROL_PATH
# ANSIBLE_SSH_CONTROL_PATH_DIR
# DEFAULT_SFTP_BATCH_MODE
# DEFAULT_SCP_IF_SSH
'''
from __future__ import (absolute_import, division, print_function)

View file

@ -25,10 +25,9 @@ DOCUMENTATION:
- Uses a YAML configuration file to identify group and the Jinja2 expressions that qualify a host for membership.
- Only variables already in inventory are available for expressions (no facts).
- Failed expressions will be ignored (assumes vars were missing).
EXAMPLES:
# inventory.config file in YAML format
plugin: constructed_groups
groups:
EXAMPLES: | # inventory.config file in YAML format
plugin: constructed_groups
groups:
# simple name matching
webservers: inventory_hostname.startswith('web')
@ -77,7 +76,7 @@ class InventoryModule(BaseInventoryPlugin):
def parse(self, inventory, loader, path, cache=False):
''' parses the inventory file '''
super(InventoryModule, self).parse(inventory, loader, path)
super(InventoryModule, self).parse(inventory, loader, path, cache=True)
try:
data = self.loader.load_from_file(path)
@ -94,19 +93,19 @@ class InventoryModule(BaseInventoryPlugin):
for host in inventory.hosts:
# get available variables to templar
hostvars = host.get_vars()
if host.name in inventory.cache: # adds facts if cache is active
hostvars = combine_vars(hostvars, inventory.cache[host.name])
hostvars = inventory.hosts[host].get_vars()
if host in inventory.cache: # adds facts if cache is active
hostvars = combine_vars(hostvars, inventory.cache[host])
templar.set_available_variables(hostvars)
# process each 'group entry'
for group_name, expression in data.get('groups', {}):
conditional = u"{%% if %s %%} True {%% else %%} False {%% endif %%}" % expression
for group_name in data.get('groups', {}):
conditional = u"{%% if %s %%} True {%% else %%} False {%% endif %%}" % data['groups'][group_name]
result = templar.template(conditional)
if result and bool(result):
# ensure group exists
inventory.add_group(group_name)
# add host to group
inventory.add_child(group_name, host.name)
inventory.add_child(group_name, host)
except Exception as e:
raise AnsibleParserError("failed to parse %s: %s " % (to_native(path), to_native(e)))

View file

@ -21,17 +21,19 @@ DOCUMENTATION:
version_added: "2.4"
short_description: Uses a specifically YAML file as inventory source.
description:
- YAML based inventory, starts with the 'all' group and has hosts/vars/children entries.
- "YAML based inventory, starts with the 'all' group and has hosts/vars/children entries."
- Host entries can have sub-entries defined, which will be treated as variables.
- Vars entries are normal group vars.
- Children are 'child groups', which can also have their own vars/hosts/children and so on.
- File MUST have a valid extension: yaml, yml, json.
- "Children are 'child groups', which can also have their own vars/hosts/children and so on."
- File MUST have a valid extension, defined in configuration
notes:
- It takes the place of the previously hardcoded YAML inventory.
- To function it requires being whitelisted in configuration.
options:
_yaml_extensions:
yaml_extensions:
description: list of 'valid' extensions for files containing YAML
type: list
default: ['.yaml', '.yml', '.json']
EXAMPLES:
all: # keys must be unique, i.e. only one 'hosts' per group
hosts:

View file

@ -0,0 +1,588 @@
# (c) 2012, Daniel Hokka Zakrisson <daniel@hozac.com>
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> and others
# (c) 2017, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import glob
import imp
import os
import os.path
import sys
import warnings
from collections import defaultdict
from ansible import constants as C
from ansible.plugins import get_plugin_class, MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE
from ansible.module_utils._text import to_text
from ansible.parsing.plugin_docs import read_docstring
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
def get_all_plugin_loaders():
return [(name, obj) for (name, obj) in globals().items() if isinstance(obj, PluginLoader)]
class PluginLoader:
'''
PluginLoader loads plugins from the configured plugin directories.
It searches for plugins by iterating through the combined list of
play basedirs, configured paths, and the python path.
The first match is used.
'''
def __init__(self, class_name, package, config, subdir, aliases={}, required_base_class=None):
self.class_name = class_name
self.base_class = required_base_class
self.package = package
self.subdir = subdir
self.aliases = aliases
if config and not isinstance(config, list):
config = [config]
elif not config:
config = []
self.config = config
if class_name not in MODULE_CACHE:
MODULE_CACHE[class_name] = {}
if class_name not in PATH_CACHE:
PATH_CACHE[class_name] = None
if class_name not in PLUGIN_PATH_CACHE:
PLUGIN_PATH_CACHE[class_name] = defaultdict(dict)
self._module_cache = MODULE_CACHE[class_name]
self._paths = PATH_CACHE[class_name]
self._plugin_path_cache = PLUGIN_PATH_CACHE[class_name]
self._extra_dirs = []
self._searched_paths = set()
def __setstate__(self, data):
'''
Deserializer.
'''
class_name = data.get('class_name')
package = data.get('package')
config = data.get('config')
subdir = data.get('subdir')
aliases = data.get('aliases')
base_class = data.get('base_class')
PATH_CACHE[class_name] = data.get('PATH_CACHE')
PLUGIN_PATH_CACHE[class_name] = data.get('PLUGIN_PATH_CACHE')
self.__init__(class_name, package, config, subdir, aliases, base_class)
self._extra_dirs = data.get('_extra_dirs', [])
self._searched_paths = data.get('_searched_paths', set())
def __getstate__(self):
'''
Serializer.
'''
return dict(
class_name=self.class_name,
base_class=self.base_class,
package=self.package,
config=self.config,
subdir=self.subdir,
aliases=self.aliases,
_extra_dirs=self._extra_dirs,
_searched_paths=self._searched_paths,
PATH_CACHE=PATH_CACHE[self.class_name],
PLUGIN_PATH_CACHE=PLUGIN_PATH_CACHE[self.class_name],
)
def format_paths(self, paths):
''' Returns a string suitable for printing of the search path '''
# Uses a list to get the order right
ret = []
for i in paths:
if i not in ret:
ret.append(i)
return os.pathsep.join(ret)
def print_paths(self):
return self.format_paths(self._get_paths(subdirs=False))
def _all_directories(self, dir):
results = []
results.append(dir)
for root, subdirs, files in os.walk(dir, followlinks=True):
if '__init__.py' in files:
for x in subdirs:
results.append(os.path.join(root, x))
return results
def _get_package_paths(self, subdirs=True):
''' Gets the path of a Python package '''
if not self.package:
return []
if not hasattr(self, 'package_path'):
m = __import__(self.package)
parts = self.package.split('.')[1:]
for parent_mod in parts:
m = getattr(m, parent_mod)
self.package_path = os.path.dirname(m.__file__)
if subdirs:
return self._all_directories(self.package_path)
return [self.package_path]
def _get_paths(self, subdirs=True):
''' Return a list of paths to search for plugins in '''
# FIXME: This is potentially buggy if subdirs is sometimes True and sometimes False.
# In current usage, everything calls this with subdirs=True except for module_utils_loader and ansible-doc
# which always calls it with subdirs=False. So there currently isn't a problem with this caching.
if self._paths is not None:
return self._paths
ret = self._extra_dirs[:]
# look in any configured plugin paths, allow one level deep for subcategories
if self.config is not None:
for path in self.config:
path = os.path.realpath(os.path.expanduser(path))
if subdirs:
contents = glob.glob("%s/*" % path) + glob.glob("%s/*/*" % path)
for c in contents:
if os.path.isdir(c) and c not in ret:
ret.append(c)
if path not in ret:
ret.append(path)
# look for any plugins installed in the package subtree
# Note package path always gets added last so that every other type of
# path is searched before it.
ret.extend(self._get_package_paths(subdirs=subdirs))
# HACK: because powershell modules are in the same directory
# hierarchy as other modules we have to process them last. This is
# because powershell only works on windows but the other modules work
# anywhere (possibly including windows if the correct language
# interpreter is installed). the non-powershell modules can have any
# file extension and thus powershell modules are picked up in that.
# The non-hack way to fix this is to have powershell modules be
# a different PluginLoader/ModuleLoader. But that requires changing
# other things too (known thing to change would be PATHS_CACHE,
# PLUGIN_PATHS_CACHE, and MODULE_CACHE. Since those three dicts key
# on the class_name and neither regular modules nor powershell modules
# would have class_names, they would not work as written.
reordered_paths = []
win_dirs = []
for path in ret:
if path.endswith('windows'):
win_dirs.append(path)
else:
reordered_paths.append(path)
reordered_paths.extend(win_dirs)
# cache and return the result
self._paths = reordered_paths
return reordered_paths
def _load_config_defs(self, name, path):
''' Reads plugin docs to find configuration setting definitions, to push to config manager for later use '''
# plugins w/o class name don't support config
if self.class_name and self.class_name in ('Connection'):
# FIXME: expand from just connection
type_name = get_plugin_class(self)
dstring = read_docstring(path, verbose=False, ignore_errors=False)
if dstring.get('doc', False):
if 'options' in dstring['doc'] and isinstance(dstring['doc']['options'], dict):
C.config.initialize_plugin_configuration_definitions(type_name, name, dstring['doc']['options'])
display.debug('Loaded config def from plugin (%s/%s)' % (type_name, name))
def add_directory(self, directory, with_subdir=False):
''' Adds an additional directory to the search path '''
directory = os.path.realpath(directory)
if directory is not None:
if with_subdir:
directory = os.path.join(directory, self.subdir)
if directory not in self._extra_dirs:
# append the directory and invalidate the path cache
self._extra_dirs.append(directory)
self._paths = None
def find_plugin(self, name, mod_type='', ignore_deprecated=False):
''' Find a plugin named name '''
if mod_type:
suffix = mod_type
elif self.class_name:
# Ansible plugins that run in the controller process (most plugins)
suffix = '.py'
else:
# Only Ansible Modules. Ansible modules can be any executable so
# they can have any suffix
suffix = ''
# The particular cache to look for modules within. This matches the
# requested mod_type
pull_cache = self._plugin_path_cache[suffix]
try:
return pull_cache[name]
except KeyError:
# Cache miss. Now let's find the plugin
pass
# TODO: Instead of using the self._paths cache (PATH_CACHE) and
# self._searched_paths we could use an iterator. Before enabling that
# we need to make sure we don't want to add additional directories
# (add_directory()) once we start using the iterator. Currently, it
# looks like _get_paths() never forces a cache refresh so if we expect
# additional directories to be added later, it is buggy.
for path in (p for p in self._get_paths() if p not in self._searched_paths and os.path.isdir(p)):
try:
full_paths = (os.path.join(path, f) for f in os.listdir(path))
except OSError as e:
display.warning("Error accessing plugin paths: %s" % to_text(e))
for full_path in (f for f in full_paths if os.path.isfile(f) and not f.endswith('__init__.py')):
full_name = os.path.basename(full_path)
# HACK: We have no way of executing python byte compiled files as ansible modules so specifically exclude them
# FIXME: I believe this is only correct for modules and module_utils.
# For all other plugins we want .pyc and .pyo should be valid
if full_path.endswith(('.pyc', '.pyo')):
continue
splitname = os.path.splitext(full_name)
base_name = splitname[0]
try:
extension = splitname[1]
except IndexError:
extension = ''
# Module found, now enter it into the caches that match this file
if base_name not in self._plugin_path_cache['']:
self._plugin_path_cache[''][base_name] = full_path
if full_name not in self._plugin_path_cache['']:
self._plugin_path_cache[''][full_name] = full_path
if base_name not in self._plugin_path_cache[extension]:
self._plugin_path_cache[extension][base_name] = full_path
if full_name not in self._plugin_path_cache[extension]:
self._plugin_path_cache[extension][full_name] = full_path
self._searched_paths.add(path)
try:
return pull_cache[name]
except KeyError:
# Didn't find the plugin in this directory. Load modules from the next one
pass
# if nothing is found, try finding alias/deprecated
if not name.startswith('_'):
alias_name = '_' + name
# We've already cached all the paths at this point
if alias_name in pull_cache:
if not ignore_deprecated and not os.path.islink(pull_cache[alias_name]):
# FIXME: this is not always the case, some are just aliases
display.deprecated('%s is kept for backwards compatibility but usage is discouraged. '
'The module documentation details page may explain more about this rationale.' % name.lstrip('_'))
return pull_cache[alias_name]
return None
def has_plugin(self, name):
''' Checks if a plugin named name exists '''
return self.find_plugin(name) is not None
__contains__ = has_plugin
def _load_module_source(self, name, path):
# avoid collisions across plugins
full_name = '.'.join([self.package, name])
if full_name in sys.modules:
# Avoids double loading, See https://github.com/ansible/ansible/issues/13110
return sys.modules[full_name]
with warnings.catch_warnings():
warnings.simplefilter("ignore", RuntimeWarning)
with open(path, 'rb') as module_file:
module = imp.load_source(full_name, path, module_file)
return module
def _update_object(self, obj, name, path):
# load plugin config data
self._load_config_defs(name, path)
# set extra info on the module, in case we want it later
setattr(obj, '_original_path', path)
setattr(obj, '_load_name', name)
def get(self, name, *args, **kwargs):
''' instantiates a plugin of the given name using arguments '''
found_in_cache = True
class_only = kwargs.pop('class_only', False)
if name in self.aliases:
name = self.aliases[name]
path = self.find_plugin(name)
if path is None:
return None
if path not in self._module_cache:
self._module_cache[path] = self._load_module_source(name, path)
found_in_cache = False
obj = getattr(self._module_cache[path], self.class_name)
if self.base_class:
# The import path is hardcoded and should be the right place,
# so we are not expecting an ImportError.
module = __import__(self.package, fromlist=[self.base_class])
# Check whether this obj has the required base class.
try:
plugin_class = getattr(module, self.base_class)
except AttributeError:
return None
if not issubclass(obj, plugin_class):
return None
self._display_plugin_load(self.class_name, name, self._searched_paths, path, found_in_cache=found_in_cache, class_only=class_only)
if not class_only:
try:
obj = obj(*args, **kwargs)
except TypeError as e:
if "abstract" in e.args[0]:
# Abstract Base Class. The found plugin file does not
# fully implement the defined interface.
return None
raise
self._update_object(obj, name, path)
return obj
def _display_plugin_load(self, class_name, name, searched_paths, path, found_in_cache=None, class_only=None):
msg = 'Loading %s \'%s\' from %s' % (class_name, os.path.basename(name), path)
if len(searched_paths) > 1:
msg = '%s (searched paths: %s)' % (msg, self.format_paths(searched_paths))
if found_in_cache or class_only:
msg = '%s (found_in_cache=%s, class_only=%s)' % (msg, found_in_cache, class_only)
display.debug(msg)
def all(self, *args, **kwargs):
''' instantiates all plugins with the same arguments '''
path_only = kwargs.pop('path_only', False)
class_only = kwargs.pop('class_only', False)
all_matches = []
found_in_cache = True
for i in self._get_paths():
all_matches.extend(glob.glob(os.path.join(i, "*.py")))
for path in sorted(all_matches, key=lambda match: os.path.basename(match)):
name = os.path.basename(os.path.splitext(path)[0])
if '__init__' in name:
continue
if path_only:
yield path
continue
if path not in self._module_cache:
self._module_cache[path] = self._load_module_source(name, path)
found_in_cache = False
try:
obj = getattr(self._module_cache[path], self.class_name)
except AttributeError as e:
display.warning("Skipping plugin (%s) as it seems to be invalid: %s" % (path, to_text(e)))
continue
if self.base_class:
# The import path is hardcoded and should be the right place,
# so we are not expecting an ImportError.
module = __import__(self.package, fromlist=[self.base_class])
# Check whether this obj has the required base class.
try:
plugin_class = getattr(module, self.base_class)
except AttributeError:
continue
if not issubclass(obj, plugin_class):
continue
self._display_plugin_load(self.class_name, name, self._searched_paths, path, found_in_cache=found_in_cache, class_only=class_only)
if not class_only:
try:
obj = obj(*args, **kwargs)
except TypeError as e:
display.warning("Skipping plugin (%s) as it seems to be incomplete: %s" % (path, to_text(e)))
self._update_object(obj, name, path)
yield obj
action_loader = PluginLoader(
'ActionModule',
'ansible.plugins.action',
C.DEFAULT_ACTION_PLUGIN_PATH,
'action_plugins',
required_base_class='ActionBase',
)
cache_loader = PluginLoader(
'CacheModule',
'ansible.plugins.cache',
C.DEFAULT_CACHE_PLUGIN_PATH,
'cache_plugins',
)
callback_loader = PluginLoader(
'CallbackModule',
'ansible.plugins.callback',
C.DEFAULT_CALLBACK_PLUGIN_PATH,
'callback_plugins',
)
connection_loader = PluginLoader(
'Connection',
'ansible.plugins.connection',
C.DEFAULT_CONNECTION_PLUGIN_PATH,
'connection_plugins',
aliases={'paramiko': 'paramiko_ssh'},
required_base_class='ConnectionBase',
)
shell_loader = PluginLoader(
'ShellModule',
'ansible.plugins.shell',
'shell_plugins',
'shell_plugins',
)
module_loader = PluginLoader(
'',
'ansible.modules',
C.DEFAULT_MODULE_PATH,
'library',
)
module_utils_loader = PluginLoader(
'',
'ansible.module_utils',
C.DEFAULT_MODULE_UTILS_PATH,
'module_utils',
)
# NB: dedicated loader is currently necessary because PS module_utils expects "with subdir" lookup where
# regular module_utils doesn't. This can be revisited once we have more granular loaders.
ps_module_utils_loader = PluginLoader(
'',
'ansible.module_utils',
C.DEFAULT_MODULE_UTILS_PATH,
'module_utils',
)
lookup_loader = PluginLoader(
'LookupModule',
'ansible.plugins.lookup',
C.DEFAULT_LOOKUP_PLUGIN_PATH,
'lookup_plugins',
required_base_class='LookupBase',
)
filter_loader = PluginLoader(
'FilterModule',
'ansible.plugins.filter',
C.DEFAULT_FILTER_PLUGIN_PATH,
'filter_plugins',
)
test_loader = PluginLoader(
'TestModule',
'ansible.plugins.test',
C.DEFAULT_TEST_PLUGIN_PATH,
'test_plugins'
)
fragment_loader = PluginLoader(
'ModuleDocFragment',
'ansible.utils.module_docs_fragments',
os.path.join(os.path.dirname(__file__), 'module_docs_fragments'),
'',
)
strategy_loader = PluginLoader(
'StrategyModule',
'ansible.plugins.strategy',
C.DEFAULT_STRATEGY_PLUGIN_PATH,
'strategy_plugins',
required_base_class='StrategyBase',
)
terminal_loader = PluginLoader(
'TerminalModule',
'ansible.plugins.terminal',
'terminal_plugins',
'terminal_plugins'
)
vars_loader = PluginLoader(
'VarsModule',
'ansible.plugins.vars',
C.DEFAULT_VARS_PLUGIN_PATH,
'vars_plugins',
)
cliconf_loader = PluginLoader(
'Cliconf',
'ansible.plugins.cliconf',
'cliconf_plugins',
'cliconf_plugins',
required_base_class='CliconfBase'
)
netconf_loader = PluginLoader(
'Netconf',
'ansible.plugins.netconf',
'netconf_plugins',
'netconf_plugins',
required_base_class='NetconfBase'
)

View file

@ -29,20 +29,24 @@ DOCUMENTATION:
description:
- the list of keys to lookup on the etcd server
type: list
element_type: string
elements: string
required: True
_etcd_url:
description:
- Environment variable with the url for the etcd server
default: 'http://127.0.0.1:4001'
env_vars:
env:
- name: ANSIBLE_ETCD_URL
yaml:
- key: etcd.url
_etcd_version:
description:
- Environment variable with the etcd protocol version
default: 'v1'
env_vars:
env:
- name: ANSIBLE_ETCD_VERSION
yaml:
- key: etcd.version
EXAMPLES:
- name: "a value from a locally running etcd"
debug: msg={{ lookup('etcd', 'foo/bar') }}
@ -50,10 +54,11 @@ EXAMPLES:
- name: "a values from a folder on a locally running etcd"
debug: msg={{ lookup('etcd', 'foo') }}
RETURN:
_list:
_raw:
description:
- list of values associated with input keys
type: strings
type: list
elements: strings
'''
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type

View file

@ -40,7 +40,7 @@ from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.included_file import IncludedFile
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.role_include import IncludeRole
from ansible.plugins import action_loader, connection_loader, filter_loader, lookup_loader, module_loader, test_loader
from ansible.plugins.loader import action_loader, connection_loader, filter_loader, lookup_loader, module_loader, test_loader
from ansible.template import Templar
from ansible.utils.vars import combine_vars
from ansible.vars.manager import strip_internal_keys
@ -899,6 +899,7 @@ class StrategyBase:
msg = "ending play"
elif meta_action == 'reset_connection':
connection = connection_loader.get(play_context.connection, play_context, os.devnull)
play_context.set_options_from_plugin(connection)
if connection:
connection.reset()
msg = 'reset connection'

View file

@ -34,7 +34,7 @@ import time
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.playbook.included_file import IncludedFile
from ansible.plugins import action_loader
from ansible.plugins.loader import action_loader
from ansible.plugins.strategy import StrategyBase
from ansible.template import Templar
from ansible.module_utils._text import to_text

View file

@ -38,7 +38,7 @@ from ansible.module_utils._text import to_text
from ansible.playbook.block import Block
from ansible.playbook.included_file import IncludedFile
from ansible.playbook.task import Task
from ansible.plugins import action_loader
from ansible.plugins.loader import action_loader
from ansible.plugins.strategy import StrategyBase
from ansible.template import Templar

View file

@ -45,7 +45,7 @@ from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleFilterError, AnsibleUndefinedVariable
from ansible.module_utils.six import string_types, text_type
from ansible.module_utils._text import to_native, to_text, to_bytes
from ansible.plugins import filter_loader, lookup_loader, test_loader
from ansible.plugins.loader import filter_loader, lookup_loader, test_loader
from ansible.template.safe_eval import safe_eval
from ansible.template.template import AnsibleJ2Template
from ansible.template.vars import AnsibleJ2Vars

View file

@ -24,7 +24,7 @@ import sys
from ansible import constants as C
from ansible.module_utils.six import string_types
from ansible.module_utils.six.moves import builtins
from ansible.plugins import filter_loader, test_loader
from ansible.plugins.loader import filter_loader, test_loader
def safe_eval(expr, locals={}, include_exceptions=False):

View file

@ -20,15 +20,12 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ast
import yaml
from collections import MutableMapping, MutableSet, MutableSequence
from ansible.module_utils.six import string_types
from ansible.parsing.metadata import extract_metadata
from ansible.parsing.plugin_docs import read_docstring
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.plugins import fragment_loader
from ansible.plugins.loader import fragment_loader
try:
from __main__ import display
@ -93,94 +90,13 @@ def add_fragments(doc, filename):
def get_docstring(filename, verbose=False):
"""
Search for assignment of the DOCUMENTATION and EXAMPLES variables
in the given file.
Parse DOCUMENTATION from YAML and return the YAML doc or None
together with EXAMPLES, as plain text.
DOCUMENTATION can be extended using documentation fragments
loaded by the PluginLoader from the module_docs_fragments
directory.
DOCUMENTATION can be extended using documentation fragments loaded by the PluginLoader from the module_docs_fragments directory.
"""
# FIXME: Should refactor this so that we have a docstring parsing
# function and a separate variable parsing function
# Can have a function one higher that invokes whichever is needed
#
# Should look roughly like this:
# get_plugin_doc(filename, verbose=False)
# documentation = extract_docstring(plugin_ast, identifier, verbose=False)
# if not documentation and not (filter or test):
# documentation = extract_variables(plugin_ast)
# documentation['metadata'] = extract_metadata(plugin_ast)
data = {
'doc': None,
'plainexamples': None,
'returndocs': None,
'metadata': None
}
string_to_vars = {
'DOCUMENTATION': 'doc',
'EXAMPLES': 'plainexamples',
'RETURN': 'returndocs',
}
try:
b_module_data = open(filename, 'rb').read()
M = ast.parse(b_module_data)
try:
display.debug('Attempt first docstring is yaml docs')
docstring = yaml.load(M.body[0].value.s)
for string in string_to_vars.keys():
if string in docstring:
data[string_to_vars[string]] = docstring[string]
display.debug('assigned :%s' % string_to_vars[string])
except Exception as e:
display.debug('failed docstring parsing: %s' % str(e))
if 'docs' not in data or not data['docs']:
display.debug('Fallback to vars parsing')
for child in M.body:
if isinstance(child, ast.Assign):
for t in child.targets:
try:
theid = t.id
except AttributeError:
# skip errors can happen when trying to use the normal code
display.warning("Failed to assign id for %s on %s, skipping" % (t, filename))
continue
if theid in string_to_vars:
varkey = string_to_vars[theid]
if isinstance(child.value, ast.Dict):
data[varkey] = ast.literal_eval(child.value)
else:
if theid == 'DOCUMENTATION':
# string should be yaml
data[varkey] = AnsibleLoader(child.value.s, file_name=filename).get_single_data()
else:
# not yaml, should be a simple string
data[varkey] = child.value.s
display.debug('assigned :%s' % varkey)
# Metadata is per-file rather than per-plugin/function
data['metadata'] = extract_metadata(module_ast=M)[0]
data = read_docstring(filename, verbose=verbose)
# add fragments to documentation
if data['doc']:
if data.get('doc', False):
add_fragments(data['doc'], filename)
# remove version
if data['metadata']:
for x in ('version', 'metadata_version'):
if x in data['metadata']:
del data['metadata'][x]
except Exception as e:
display.error("unable to parse %s" % filename)
if verbose is True:
display.display("unable to parse %s" % filename)
raise
return data['doc'], data['plainexamples'], data['returndocs'], data['metadata']

View file

@ -37,7 +37,7 @@ from ansible.inventory.host import Host
from ansible.inventory.helpers import sort_groups, get_group_vars
from ansible.module_utils._text import to_native
from ansible.module_utils.six import iteritems, string_types, text_type
from ansible.plugins import lookup_loader, vars_loader
from ansible.plugins.loader import lookup_loader, vars_loader
from ansible.plugins.cache import FactCache
from ansible.template import Templar
from ansible.utils.listify import listify_lookup_plugin_terms
@ -86,6 +86,21 @@ def strip_internal_keys(dirty):
return clean
def remove_internal_keys(data):
'''
More nuanced version of strip_internal_keys
'''
for key in list(data.keys()):
if (key.startswith('_ansible_') and key != '_ansible_parsed') or key in C.INTERNAL_RESULT_KEYS:
display.warning("Removed unexpected internal key in module return: %s = %s" % (key, data[key]))
del data[key]
# remove bad/empty internal keys
for key in ['warnings', 'deprecations']:
if key in data and not data[key]:
del data[key]
class VariableManager:
def __init__(self, loader=None, inventory=None):

View file

@ -190,8 +190,7 @@ setup(
'galaxy/data/*/*/.*',
'galaxy/data/*/*/*.*',
'galaxy/data/*/tests/inventory',
'config/data/*.yaml',
'config/data/*.yml',
'config/base.yml',
],
},
classifiers=[

View file

@ -21,20 +21,19 @@ def ansible_environment(args, color=True):
if not path.startswith(ansible_path + os.pathsep):
path = ansible_path + os.pathsep + path
ansible_config = '/dev/null'
if os.path.isfile('test/integration/%s.cfg' % args.command):
ansible_config = os.path.abspath('test/integration/%s.cfg' % args.command)
ansible = dict(
ANSIBLE_FORCE_COLOR='%s' % 'true' if args.color and color else 'false',
ANSIBLE_DEPRECATION_WARNINGS='false',
ANSIBLE_CONFIG=ansible_config,
ANSIBLE_HOST_KEY_CHECKING='false',
PYTHONPATH=os.path.abspath('lib'),
PAGER='/bin/cat',
PATH=path,
)
if os.path.isfile('test/integration/%s.cfg' % args.command):
ansible_config = os.path.abspath('test/integration/%s.cfg' % args.command)
ansible['ANSIBLE_CONFIG'] = ansible_config
env.update(ansible)
if args.debug:

View file

@ -1,4 +1,5 @@
lib/ansible/cli/config.py
lib/ansible/constants.py
lib/ansible/config/data.py
lib/ansible/config/manager.py
lib/ansible/modules/cloud/amazon/_ec2_ami_search.py

View file

@ -24,7 +24,7 @@ from ansible.compat.tests.mock import patch, MagicMock
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.executor.task_executor import TaskExecutor
from ansible.playbook.play_context import PlayContext
from ansible.plugins import action_loader, lookup_loader
from ansible.plugins.loader import action_loader, lookup_loader
from ansible.parsing.yaml.objects import AnsibleUnicode
from units.mock.loader import DictDataLoader

View file

@ -16,14 +16,20 @@
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import pytest
import boto3
import os
import json
import collections
from . placebo_fixtures import placeboify, maybe_sleep
from nose.plugins.skip import SkipTest
from ansible.modules.cloud.amazon import data_pipeline
from ansible.module_utils._text import to_text
try:
import boto3
except ImportError:
raise SkipTest("test_api_gateway.py requires the `boto3` and `botocore` modules")
@pytest.fixture(scope='module')
def dp_setup():

View file

@ -3,6 +3,7 @@ import json
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import PropertyMock, patch, mock_open
from ansible.module_utils import basic
from ansible.module_utils._text import to_native
from ansible.module_utils.six.moves import xmlrpc_client
from ansible.modules.packaging.os import rhn_register
@ -96,7 +97,7 @@ class TestRhnRegister(unittest.TestCase):
orig_import = __import__
with patch('__builtin__.__import__', side_effect=mock_import):
rhn = self.module.Rhn()
self.assertEqual('123456789', rhn.systemid)
self.assertEqual('123456789', to_native(rhn.systemid))
def test_without_required_parameters(self):
"""Failure must occurs when all parameters are missing"""

View file

@ -20,6 +20,11 @@ __metaclass__ = type
from ansible.compat.tests import unittest
from ansible.plugins.filter.ipaddr import (ipaddr, _netmask_query, nthhost, next_nth_usable,
previous_nth_usable, network_in_usable, network_in_network)
try:
import netaddr
except ImportError:
from nose.plugins.skip import SkipTest
raise SkipTest("This test requires the `netaddr` python library")
class TestIpFilter(unittest.TestCase):

View file

@ -29,7 +29,7 @@ from ansible.compat.tests.mock import mock_open, patch
from ansible.errors import AnsibleError
from ansible.module_utils.six import text_type
from ansible.module_utils.six.moves import builtins
from ansible.plugins import PluginLoader
from ansible.plugins.loader import PluginLoader
from ansible.plugins.lookup import password
from ansible.utils import encrypt

View file

@ -24,7 +24,7 @@ import os
from ansible.compat.tests import BUILTINS, unittest
from ansible.compat.tests.mock import mock_open, patch, MagicMock
from ansible.plugins import MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE, PluginLoader
from ansible.plugins.loader import MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE, PluginLoader
class TestErrors(unittest.TestCase):