adds ansible-doc JSON plugin data dump
* used for changelog generation of new plugins
This commit is contained in:
parent
6ef2ffe310
commit
096d243526
2 changed files with 143 additions and 78 deletions
|
@ -17,6 +17,7 @@ from __future__ import (absolute_import, division, print_function)
|
|||
__metaclass__ = type
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import textwrap
|
||||
import traceback
|
||||
|
@ -55,6 +56,18 @@ class DocCLI(CLI):
|
|||
super(DocCLI, self).__init__(args)
|
||||
self.plugin_list = set()
|
||||
|
||||
self.loader_map = {
|
||||
'cache': cache_loader,
|
||||
'callback': callback_loader,
|
||||
'connection': connection_loader,
|
||||
'lookup': lookup_loader,
|
||||
'strategy': strategy_loader,
|
||||
'vars': vars_loader,
|
||||
'inventory': inventory_loader,
|
||||
'shell': shell_loader,
|
||||
'module': module_loader,
|
||||
}
|
||||
|
||||
def parse(self):
|
||||
|
||||
self.parser = CLI.base_parser(
|
||||
|
@ -72,13 +85,15 @@ class DocCLI(CLI):
|
|||
help='Show playbook snippet for specified plugin(s)')
|
||||
self.parser.add_option("-a", "--all", action="store_true", default=False, dest='all_plugins',
|
||||
help='**For internal testing only** Show documentation for all plugins.')
|
||||
self.parser.add_option("-j", "--json", action="store_true", default=False, dest='json_dump',
|
||||
help='**For internal testing only** Dump json metadata for all plugins.')
|
||||
self.parser.add_option("-t", "--type", action="store", default='module', dest='type', type='choice',
|
||||
help='Choose which plugin type (defaults to "module")',
|
||||
choices=C.DOCUMENTABLE_PLUGINS)
|
||||
super(DocCLI, self).parse()
|
||||
|
||||
if [self.options.all_plugins, self.options.list_dir, self.options.list_files, self.options.show_snippet].count(True) > 1:
|
||||
raise AnsibleOptionsError("Only one of -l, -F, -s or -a can be used at the same time.")
|
||||
if [self.options.all_plugins, self.options.json_dump, self.options.list_dir, self.options.list_files, self.options.show_snippet].count(True) > 1:
|
||||
raise AnsibleOptionsError("Only one of -l, -F, -s, -j or -a can be used at the same time.")
|
||||
|
||||
display.verbosity = self.options.verbosity
|
||||
|
||||
|
@ -87,26 +102,7 @@ class DocCLI(CLI):
|
|||
super(DocCLI, self).run()
|
||||
|
||||
plugin_type = self.options.type
|
||||
|
||||
# choose plugin type
|
||||
if plugin_type == 'cache':
|
||||
loader = cache_loader
|
||||
elif plugin_type == 'callback':
|
||||
loader = callback_loader
|
||||
elif plugin_type == 'connection':
|
||||
loader = connection_loader
|
||||
elif plugin_type == 'lookup':
|
||||
loader = lookup_loader
|
||||
elif plugin_type == 'strategy':
|
||||
loader = strategy_loader
|
||||
elif plugin_type == 'vars':
|
||||
loader = vars_loader
|
||||
elif plugin_type == 'inventory':
|
||||
loader = inventory_loader
|
||||
elif plugin_type == 'shell':
|
||||
loader = shell_loader
|
||||
else:
|
||||
loader = module_loader
|
||||
loader = self.loader_map.get(plugin_type, self.loader_map['module'])
|
||||
|
||||
# add to plugin path from command line
|
||||
if self.options.module_path:
|
||||
|
@ -122,7 +118,7 @@ class DocCLI(CLI):
|
|||
if self.options.list_files:
|
||||
paths = loader._get_paths()
|
||||
for path in paths:
|
||||
self.find_plugins(path, plugin_type)
|
||||
self.plugin_list = self.find_plugins(path, plugin_type)
|
||||
|
||||
list_text = self.get_plugin_list_filenames(loader)
|
||||
self.pager(list_text)
|
||||
|
@ -132,17 +128,27 @@ class DocCLI(CLI):
|
|||
if self.options.list_dir:
|
||||
paths = loader._get_paths()
|
||||
for path in paths:
|
||||
self.find_plugins(path, plugin_type)
|
||||
self.plugin_list = self.find_plugins(path, plugin_type)
|
||||
|
||||
self.pager(self.get_plugin_list_text(loader))
|
||||
return 0
|
||||
|
||||
# process all plugins of type
|
||||
if self.options.all_plugins:
|
||||
paths = loader._get_paths()
|
||||
for path in paths:
|
||||
self.find_plugins(path, plugin_type)
|
||||
self.args = sorted(set(self.plugin_list))
|
||||
self.args = self.get_all_plugins_of_type(plugin_type)
|
||||
|
||||
# dump plugin metadata as JSON
|
||||
if self.options.json_dump:
|
||||
plugin_data = {}
|
||||
for plugin_type in self.loader_map.keys():
|
||||
plugin_data[plugin_type] = dict()
|
||||
plugin_names = self.get_all_plugins_of_type(plugin_type)
|
||||
for plugin_name in plugin_names:
|
||||
plugin_data[plugin_type][plugin_name] = self.get_plugin_metadata(plugin_type, plugin_name)
|
||||
|
||||
self.pager(json.dumps(plugin_data, sort_keys=True, indent=4))
|
||||
|
||||
return 0
|
||||
|
||||
if len(self.args) == 0:
|
||||
raise AnsibleOptionsError("Incorrect options passed")
|
||||
|
@ -150,22 +156,78 @@ class DocCLI(CLI):
|
|||
# process command line list
|
||||
text = ''
|
||||
for plugin in self.args:
|
||||
text += self.format_plugin_doc(plugin, loader, plugin_type, search_paths)
|
||||
|
||||
if text:
|
||||
self.pager(text)
|
||||
|
||||
return 0
|
||||
|
||||
def get_all_plugins_of_type(self, plugin_type):
|
||||
loader = self.loader_map[plugin_type]
|
||||
plugin_list = set()
|
||||
paths = loader._get_paths()
|
||||
for path in paths:
|
||||
plugins_to_add = self.find_plugins(path, plugin_type)
|
||||
plugin_list.update(plugins_to_add)
|
||||
return sorted(set(plugin_list))
|
||||
|
||||
def get_plugin_metadata(self, plugin_type, plugin_name):
|
||||
# if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
|
||||
loader = self.loader_map[plugin_type]
|
||||
filename = loader.find_plugin(plugin_name, mod_type='.py', ignore_deprecated=True, check_aliases=True)
|
||||
if filename is None:
|
||||
raise AnsibleError("unable to load {0} plugin named {1} ".format(plugin_type, plugin_name))
|
||||
|
||||
try:
|
||||
doc, __, __, __ = get_docstring(filename, fragment_loader, verbose=(self.options.verbosity > 0))
|
||||
except Exception:
|
||||
display.vvv(traceback.format_exc())
|
||||
raise AnsibleError(
|
||||
"%s %s at %s has a documentation error formatting or is missing documentation." %
|
||||
(plugin_type, plugin_name, filename), wrap_text=False)
|
||||
|
||||
return dict(
|
||||
name=plugin_name,
|
||||
namespace=self.namespace_from_plugin_filepath(filename, plugin_name, loader.package_path),
|
||||
description=doc.get('short_description', "UNKNOWN"),
|
||||
version_added=doc.get('version_added', "UNKNOWN")
|
||||
)
|
||||
|
||||
def namespace_from_plugin_filepath(self, filepath, plugin_name, basedir):
|
||||
if not basedir.endswith('/'):
|
||||
basedir += '/'
|
||||
rel_path = filepath.replace(basedir, '')
|
||||
extension_free = os.path.splitext(rel_path)[0]
|
||||
namespace_only = extension_free.rsplit(plugin_name, 1)[0].strip('/_')
|
||||
clean_ns = namespace_only.replace('/', '.')
|
||||
if clean_ns == '':
|
||||
clean_ns = None
|
||||
|
||||
return clean_ns
|
||||
|
||||
def format_plugin_doc(self, plugin, loader, plugin_type, search_paths):
|
||||
text = ''
|
||||
|
||||
try:
|
||||
# if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
|
||||
filename = loader.find_plugin(plugin, mod_type='.py', ignore_deprecated=True, check_aliases=True)
|
||||
if filename is None:
|
||||
display.warning("%s %s not found in:\n%s\n" % (plugin_type, plugin, search_paths))
|
||||
continue
|
||||
return
|
||||
|
||||
if any(filename.endswith(x) for x in C.BLACKLIST_EXTS):
|
||||
continue
|
||||
return
|
||||
|
||||
try:
|
||||
doc, plainexamples, returndocs, metadata = get_docstring(filename, fragment_loader, verbose=(self.options.verbosity > 0))
|
||||
doc, plainexamples, returndocs, metadata = get_docstring(filename, fragment_loader,
|
||||
verbose=(self.options.verbosity > 0))
|
||||
except Exception:
|
||||
display.vvv(traceback.format_exc())
|
||||
display.error("%s %s has a documentation error formatting or is missing documentation." % (plugin_type, plugin), wrap_text=False)
|
||||
continue
|
||||
display.error(
|
||||
"%s %s has a documentation error formatting or is missing documentation." % (plugin_type, plugin),
|
||||
wrap_text=False)
|
||||
return
|
||||
|
||||
if doc is not None:
|
||||
|
||||
|
@ -190,25 +252,26 @@ class DocCLI(CLI):
|
|||
text += self.get_snippet_text(doc)
|
||||
else:
|
||||
text += self.get_man_text(doc)
|
||||
|
||||
return text
|
||||
else:
|
||||
# this typically means we couldn't even parse the docstring, not just that the YAML is busted,
|
||||
# probably a quoting issue.
|
||||
raise AnsibleError("Parsing produced an empty object.")
|
||||
except Exception as e:
|
||||
display.vvv(traceback.format_exc())
|
||||
raise AnsibleError("%s %s missing documentation (or could not parse documentation): %s\n" % (plugin_type, plugin, str(e)))
|
||||
|
||||
if text:
|
||||
self.pager(text)
|
||||
return 0
|
||||
raise AnsibleError(
|
||||
"%s %s missing documentation (or could not parse documentation): %s\n" % (plugin_type, plugin, str(e)))
|
||||
|
||||
def find_plugins(self, path, ptype):
|
||||
|
||||
display.vvvv("Searching %s for plugins" % path)
|
||||
|
||||
plugin_list = set()
|
||||
|
||||
if not os.path.exists(path):
|
||||
display.vvvv("%s does not exist" % path)
|
||||
return
|
||||
return plugin_list
|
||||
|
||||
bkey = ptype.upper()
|
||||
for plugin in os.listdir(path):
|
||||
|
@ -233,9 +296,11 @@ class DocCLI(CLI):
|
|||
plugin = plugin.lstrip('_') # remove underscore from deprecated plugins
|
||||
|
||||
if plugin not in BLACKLIST.get(bkey, ()):
|
||||
self.plugin_list.add(plugin)
|
||||
plugin_list.add(plugin)
|
||||
display.vvvv("Added %s" % plugin)
|
||||
|
||||
return plugin_list
|
||||
|
||||
def get_plugin_list_text(self, loader):
|
||||
columns = display.columns
|
||||
displace = max(len(x) for x in self.plugin_list)
|
||||
|
|
|
@ -84,7 +84,7 @@ BECOME_MISSING_STRINGS = {
|
|||
'enable': '',
|
||||
'machinectl': '',
|
||||
} # FIXME: deal with i18n
|
||||
BLACKLIST_EXTS = ('.pyc', '.pyo', '.swp', '.bak', '~', '.rpm', '.md', '.txt')
|
||||
BLACKLIST_EXTS = ('.pyc', '.pyo', '.swp', '.bak', '~', '.rpm', '.md', '.txt', '.rst')
|
||||
BOOL_TRUE = BOOLEANS_TRUE
|
||||
CONTROLER_LANG = os.getenv('LANG', 'en_US.UTF-8')
|
||||
DEFAULT_BECOME_PASS = None
|
||||
|
|
Loading…
Reference in a new issue