2013-06-27 03:01:30 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
'''
|
|
|
|
DigitalOcean external inventory script
|
|
|
|
======================================
|
|
|
|
|
|
|
|
Generates Ansible inventory of DigitalOcean Droplets.
|
|
|
|
|
|
|
|
In addition to the --list and --host options used by Ansible, there are options
|
|
|
|
for generating JSON of other DigitalOcean data. This is useful when creating
|
|
|
|
droplets. For example, --regions will return all the DigitalOcean Regions.
|
|
|
|
This information can also be easily found in the cache file, whose default
|
|
|
|
location is /tmp/ansible-digital_ocean.cache).
|
|
|
|
|
|
|
|
The --pretty (-p) option pretty-prints the output for better human readability.
|
|
|
|
|
2013-07-02 19:49:43 +00:00
|
|
|
----
|
|
|
|
Although the cache stores all the information received from DigitalOcean,
|
|
|
|
the cache is not used for current droplet information (in --list, --host,
|
|
|
|
--all, and --droplets). This is so that accurate droplet information is always
|
|
|
|
found. You can force this script to use the cache with --force-cache.
|
|
|
|
|
2013-06-27 18:52:32 +00:00
|
|
|
----
|
2013-06-27 03:01:30 +00:00
|
|
|
Configuration is read from `digital_ocean.ini`, then from environment variables,
|
|
|
|
then and command-line arguments.
|
|
|
|
|
2015-05-13 15:12:48 +00:00
|
|
|
Most notably, the DigitalOcean API Token must be specified. It can be specified
|
|
|
|
in the INI file or with the following environment variables:
|
|
|
|
export DO_API_TOKEN='abc123' or
|
|
|
|
export DO_API_KEY='abc123'
|
2013-06-27 03:01:30 +00:00
|
|
|
|
2015-05-13 15:12:48 +00:00
|
|
|
Alternatively, it can be passed on the command-line with --api-token.
|
2013-06-27 03:01:30 +00:00
|
|
|
|
2013-06-27 18:52:32 +00:00
|
|
|
If you specify DigitalOcean credentials in the INI file, a handy way to
|
|
|
|
get them into your environment (e.g., to use the digital_ocean module)
|
|
|
|
is to use the output of the --env option with export:
|
|
|
|
export $(digital_ocean.py --env)
|
|
|
|
|
|
|
|
----
|
2013-06-27 03:01:30 +00:00
|
|
|
The following groups are generated from --list:
|
|
|
|
- ID (droplet ID)
|
|
|
|
- NAME (droplet NAME)
|
|
|
|
- image_ID
|
|
|
|
- image_NAME
|
|
|
|
- distro_NAME (distribution NAME from image)
|
|
|
|
- region_NAME
|
|
|
|
- size_NAME
|
|
|
|
- status_STATUS
|
|
|
|
|
|
|
|
When run against a specific host, this script returns the following variables:
|
2015-05-13 15:12:48 +00:00
|
|
|
- do_backup_ids
|
2013-06-27 03:01:30 +00:00
|
|
|
- do_created_at
|
2015-05-13 15:12:48 +00:00
|
|
|
- do_disk
|
|
|
|
- do_features - list
|
2013-06-27 03:01:30 +00:00
|
|
|
- do_id
|
2015-05-13 15:12:48 +00:00
|
|
|
- do_image - object
|
2013-06-27 03:01:30 +00:00
|
|
|
- do_ip_address
|
2015-08-02 03:21:20 +00:00
|
|
|
- do_private_ip_address
|
2015-05-13 15:12:48 +00:00
|
|
|
- do_kernel - object
|
|
|
|
- do_locked
|
|
|
|
- de_memory
|
2013-06-27 03:01:30 +00:00
|
|
|
- do_name
|
2015-05-13 15:12:48 +00:00
|
|
|
- do_networks - object
|
|
|
|
- do_next_backup_window
|
|
|
|
- do_region - object
|
|
|
|
- do_size - object
|
|
|
|
- do_size_slug
|
|
|
|
- do_snapshot_ids - list
|
2013-06-27 03:01:30 +00:00
|
|
|
- do_status
|
2015-05-13 15:12:48 +00:00
|
|
|
- do_vcpus
|
2013-06-27 03:01:30 +00:00
|
|
|
|
|
|
|
-----
|
|
|
|
```
|
|
|
|
usage: digital_ocean.py [-h] [--list] [--host HOST] [--all]
|
|
|
|
[--droplets] [--regions] [--images] [--sizes]
|
|
|
|
[--ssh-keys] [--domains] [--pretty]
|
2015-05-13 15:12:48 +00:00
|
|
|
[--cache-path CACHE_PATH]
|
|
|
|
[--cache-max_age CACHE_MAX_AGE]
|
|
|
|
[--force-cache]
|
|
|
|
[--refresh-cache]
|
2015-05-07 19:53:10 +00:00
|
|
|
[--api-token API_TOKEN]
|
2013-06-27 03:01:30 +00:00
|
|
|
|
|
|
|
Produce an Ansible Inventory file based on DigitalOcean credentials
|
|
|
|
|
|
|
|
optional arguments:
|
|
|
|
-h, --help show this help message and exit
|
|
|
|
--list List all active Droplets as Ansible inventory
|
|
|
|
(default: True)
|
|
|
|
--host HOST Get all Ansible inventory variables about a specific
|
|
|
|
Droplet
|
|
|
|
--all List all DigitalOcean information as JSON
|
|
|
|
--droplets List Droplets as JSON
|
|
|
|
--regions List Regions as JSON
|
|
|
|
--images List Images as JSON
|
|
|
|
--sizes List Sizes as JSON
|
|
|
|
--ssh-keys List SSH keys as JSON
|
|
|
|
--domains List Domains as JSON
|
|
|
|
--pretty, -p Pretty-print results
|
2015-05-13 15:12:48 +00:00
|
|
|
--cache-path CACHE_PATH
|
|
|
|
Path to the cache files (default: .)
|
|
|
|
--cache-max_age CACHE_MAX_AGE
|
|
|
|
Maximum age of the cached items (default: 0)
|
|
|
|
--force-cache Only use data from the cache
|
|
|
|
--refresh-cache Force refresh of cache by making API requests to
|
|
|
|
DigitalOcean (default: False - use cache files)
|
2015-05-07 19:53:10 +00:00
|
|
|
--api-token API_TOKEN, -a API_TOKEN
|
|
|
|
DigitalOcean API Token
|
2013-06-27 03:01:30 +00:00
|
|
|
```
|
|
|
|
|
|
|
|
'''
|
|
|
|
|
|
|
|
# (c) 2013, Evan Wies <evan@neomantra.net>
|
|
|
|
#
|
|
|
|
# Inspired by the EC2 inventory plugin:
|
2015-07-10 16:59:52 +00:00
|
|
|
# https://github.com/ansible/ansible/blob/devel/contrib/inventory/ec2.py
|
2013-06-27 03:01:30 +00:00
|
|
|
#
|
|
|
|
# This file is part of Ansible,
|
|
|
|
#
|
|
|
|
# Ansible is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Ansible is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
######################################################################
|
|
|
|
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import re
|
|
|
|
import argparse
|
|
|
|
from time import time
|
|
|
|
import ConfigParser
|
2016-02-14 16:04:26 +00:00
|
|
|
import ast
|
2013-06-27 03:01:30 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
import json
|
|
|
|
except ImportError:
|
|
|
|
import simplejson as json
|
|
|
|
|
|
|
|
try:
|
|
|
|
from dopy.manager import DoError, DoManager
|
2015-08-27 18:58:51 +00:00
|
|
|
except ImportError as e:
|
2015-08-28 06:18:13 +00:00
|
|
|
print("failed=True msg='`dopy` library required for this script'")
|
2013-06-27 03:01:30 +00:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class DigitalOceanInventory(object):
|
|
|
|
|
|
|
|
###########################################################################
|
|
|
|
# Main execution path
|
|
|
|
###########################################################################
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
''' Main execution path '''
|
|
|
|
|
|
|
|
# DigitalOceanInventory data
|
|
|
|
self.data = {} # All DigitalOcean data
|
|
|
|
self.inventory = {} # Ansible Inventory
|
|
|
|
|
2015-05-13 15:12:48 +00:00
|
|
|
# Define defaults
|
|
|
|
self.cache_path = '.'
|
|
|
|
self.cache_max_age = 0
|
2015-10-02 23:16:44 +00:00
|
|
|
self.use_private_network = False
|
2016-02-14 16:04:26 +00:00
|
|
|
self.group_variables = {}
|
2015-05-13 15:12:48 +00:00
|
|
|
|
2013-06-27 03:01:30 +00:00
|
|
|
# Read settings, environment variables, and CLI arguments
|
|
|
|
self.read_settings()
|
|
|
|
self.read_environment()
|
|
|
|
self.read_cli_args()
|
|
|
|
|
|
|
|
# Verify credentials were set
|
2015-05-07 19:53:10 +00:00
|
|
|
if not hasattr(self, 'api_token'):
|
2015-08-28 06:18:13 +00:00
|
|
|
print('''Could not find values for DigitalOcean api_token.
|
2015-05-07 19:53:10 +00:00
|
|
|
They must be specified via either ini file, command line argument (--api-token),
|
2015-08-28 06:18:13 +00:00
|
|
|
or environment variables (DO_API_TOKEN)''')
|
2013-06-27 03:01:30 +00:00
|
|
|
sys.exit(-1)
|
|
|
|
|
2013-06-27 18:52:32 +00:00
|
|
|
# env command, show DigitalOcean credentials
|
|
|
|
if self.args.env:
|
2015-08-28 06:18:13 +00:00
|
|
|
print("DO_API_TOKEN=%s" % self.api_token)
|
2013-06-27 18:52:32 +00:00
|
|
|
sys.exit(0)
|
|
|
|
|
2015-05-13 15:12:48 +00:00
|
|
|
# Manage cache
|
|
|
|
self.cache_filename = self.cache_path + "/ansible-digital_ocean.cache"
|
|
|
|
self.cache_refreshed = False
|
|
|
|
|
|
|
|
if self.is_cache_valid:
|
|
|
|
self.load_from_cache()
|
|
|
|
if len(self.data) == 0:
|
|
|
|
if self.args.force_cache:
|
2015-08-28 06:18:13 +00:00
|
|
|
print('''Cache is empty and --force-cache was specified''')
|
2015-05-13 15:12:48 +00:00
|
|
|
sys.exit(-1)
|
|
|
|
|
2015-05-07 19:53:10 +00:00
|
|
|
self.manager = DoManager(None, self.api_token, api_version=2)
|
2013-06-27 03:01:30 +00:00
|
|
|
|
|
|
|
# Pick the json_data to print based on the CLI command
|
2015-05-07 19:53:10 +00:00
|
|
|
if self.args.droplets:
|
2015-05-13 15:12:48 +00:00
|
|
|
self.load_from_digital_ocean('droplets')
|
|
|
|
json_data = {'droplets': self.data['droplets']}
|
2015-05-07 19:53:10 +00:00
|
|
|
elif self.args.regions:
|
2015-05-13 15:12:48 +00:00
|
|
|
self.load_from_digital_ocean('regions')
|
|
|
|
json_data = {'regions': self.data['regions']}
|
2015-05-07 19:53:10 +00:00
|
|
|
elif self.args.images:
|
2015-05-13 15:12:48 +00:00
|
|
|
self.load_from_digital_ocean('images')
|
|
|
|
json_data = {'images': self.data['images']}
|
2015-05-07 19:53:10 +00:00
|
|
|
elif self.args.sizes:
|
2015-05-13 15:12:48 +00:00
|
|
|
self.load_from_digital_ocean('sizes')
|
|
|
|
json_data = {'sizes': self.data['sizes']}
|
2015-05-07 19:53:10 +00:00
|
|
|
elif self.args.ssh_keys:
|
2015-05-13 15:12:48 +00:00
|
|
|
self.load_from_digital_ocean('ssh_keys')
|
|
|
|
json_data = {'ssh_keys': self.data['ssh_keys']}
|
2015-05-07 19:53:10 +00:00
|
|
|
elif self.args.domains:
|
2015-05-13 15:12:48 +00:00
|
|
|
self.load_from_digital_ocean('domains')
|
|
|
|
json_data = {'domains': self.data['domains']}
|
2015-05-07 19:53:10 +00:00
|
|
|
elif self.args.all:
|
2015-05-13 15:12:48 +00:00
|
|
|
self.load_from_digital_ocean()
|
|
|
|
json_data = self.data
|
2015-05-07 19:53:10 +00:00
|
|
|
elif self.args.host:
|
|
|
|
json_data = self.load_droplet_variables_for_host()
|
2013-06-27 03:01:30 +00:00
|
|
|
else: # '--list' this is last to make it default
|
2015-05-13 15:12:48 +00:00
|
|
|
self.load_from_digital_ocean('droplets')
|
2015-05-07 19:53:10 +00:00
|
|
|
self.build_inventory()
|
|
|
|
json_data = self.inventory
|
2013-06-27 03:01:30 +00:00
|
|
|
|
2015-05-14 06:42:48 +00:00
|
|
|
if self.cache_refreshed:
|
|
|
|
self.write_to_cache()
|
|
|
|
|
2013-06-27 03:01:30 +00:00
|
|
|
if self.args.pretty:
|
2015-08-28 06:18:13 +00:00
|
|
|
print(json.dumps(json_data, sort_keys=True, indent=2))
|
2013-06-27 03:01:30 +00:00
|
|
|
else:
|
2015-08-28 06:18:13 +00:00
|
|
|
print(json.dumps(json_data))
|
2013-06-27 03:01:30 +00:00
|
|
|
# That's all she wrote...
|
|
|
|
|
|
|
|
|
|
|
|
###########################################################################
|
|
|
|
# Script configuration
|
|
|
|
###########################################################################
|
|
|
|
|
|
|
|
def read_settings(self):
|
|
|
|
''' Reads the settings from the digital_ocean.ini file '''
|
|
|
|
config = ConfigParser.SafeConfigParser()
|
|
|
|
config.read(os.path.dirname(os.path.realpath(__file__)) + '/digital_ocean.ini')
|
|
|
|
|
|
|
|
# Credentials
|
2015-05-07 19:53:10 +00:00
|
|
|
if config.has_option('digital_ocean', 'api_token'):
|
|
|
|
self.api_token = config.get('digital_ocean', 'api_token')
|
2013-06-27 03:01:30 +00:00
|
|
|
|
|
|
|
# Cache related
|
|
|
|
if config.has_option('digital_ocean', 'cache_path'):
|
|
|
|
self.cache_path = config.get('digital_ocean', 'cache_path')
|
|
|
|
if config.has_option('digital_ocean', 'cache_max_age'):
|
|
|
|
self.cache_max_age = config.getint('digital_ocean', 'cache_max_age')
|
|
|
|
|
2015-10-02 23:16:44 +00:00
|
|
|
# Private IP Address
|
|
|
|
if config.has_option('digital_ocean', 'use_private_network'):
|
|
|
|
self.use_private_network = config.get('digital_ocean', 'use_private_network')
|
2013-06-27 03:01:30 +00:00
|
|
|
|
2016-02-14 16:04:26 +00:00
|
|
|
# Group variables
|
|
|
|
if config.has_option('digital_ocean', 'group_variables'):
|
|
|
|
self.group_variables = ast.literal_eval(config.get('digital_ocean', 'group_variables'))
|
|
|
|
|
2013-06-27 03:01:30 +00:00
|
|
|
def read_environment(self):
|
|
|
|
''' Reads the settings from environment variables '''
|
|
|
|
# Setup credentials
|
2015-05-07 19:53:10 +00:00
|
|
|
if os.getenv("DO_API_TOKEN"):
|
|
|
|
self.api_token = os.getenv("DO_API_TOKEN")
|
|
|
|
if os.getenv("DO_API_KEY"):
|
|
|
|
self.api_token = os.getenv("DO_API_KEY")
|
2013-06-27 03:01:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
def read_cli_args(self):
|
|
|
|
''' Command line argument processing '''
|
|
|
|
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on DigitalOcean credentials')
|
|
|
|
|
|
|
|
parser.add_argument('--list', action='store_true', help='List all active Droplets as Ansible inventory (default: True)')
|
|
|
|
parser.add_argument('--host', action='store', help='Get all Ansible inventory variables about a specific Droplet')
|
|
|
|
|
|
|
|
parser.add_argument('--all', action='store_true', help='List all DigitalOcean information as JSON')
|
2013-07-02 19:49:43 +00:00
|
|
|
parser.add_argument('--droplets','-d', action='store_true', help='List Droplets as JSON')
|
2013-06-27 03:01:30 +00:00
|
|
|
parser.add_argument('--regions', action='store_true', help='List Regions as JSON')
|
|
|
|
parser.add_argument('--images', action='store_true', help='List Images as JSON')
|
|
|
|
parser.add_argument('--sizes', action='store_true', help='List Sizes as JSON')
|
|
|
|
parser.add_argument('--ssh-keys', action='store_true', help='List SSH keys as JSON')
|
|
|
|
parser.add_argument('--domains', action='store_true',help='List Domains as JSON')
|
|
|
|
|
|
|
|
parser.add_argument('--pretty','-p', action='store_true', help='Pretty-print results')
|
|
|
|
|
2015-05-13 15:12:48 +00:00
|
|
|
parser.add_argument('--cache-path', action='store', help='Path to the cache files (default: .)')
|
|
|
|
parser.add_argument('--cache-max_age', action='store', help='Maximum age of the cached items (default: 0)')
|
|
|
|
parser.add_argument('--force-cache', action='store_true', default=False, help='Only use data from the cache')
|
|
|
|
parser.add_argument('--refresh-cache','-r', action='store_true', default=False,
|
|
|
|
help='Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files)')
|
|
|
|
|
2015-05-07 19:53:10 +00:00
|
|
|
parser.add_argument('--env','-e', action='store_true', help='Display DO_API_TOKEN')
|
|
|
|
parser.add_argument('--api-token','-a', action='store', help='DigitalOcean API Token')
|
2013-06-27 03:01:30 +00:00
|
|
|
|
|
|
|
self.args = parser.parse_args()
|
|
|
|
|
2015-05-07 19:53:10 +00:00
|
|
|
if self.args.api_token:
|
|
|
|
self.api_token = self.args.api_token
|
2013-06-27 03:01:30 +00:00
|
|
|
|
2013-07-02 19:49:43 +00:00
|
|
|
# Make --list default if none of the other commands are specified
|
2015-05-07 19:53:10 +00:00
|
|
|
if (not self.args.droplets and not self.args.regions and
|
|
|
|
not self.args.images and not self.args.sizes and
|
|
|
|
not self.args.ssh_keys and not self.args.domains and
|
|
|
|
not self.args.all and not self.args.host):
|
|
|
|
self.args.list = True
|
2013-06-27 03:01:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
###########################################################################
|
|
|
|
# Data Management
|
|
|
|
###########################################################################
|
|
|
|
|
2015-05-07 19:53:10 +00:00
|
|
|
def load_from_digital_ocean(self, resource=None):
|
|
|
|
'''Get JSON from DigitalOcean API'''
|
2015-05-13 15:12:48 +00:00
|
|
|
if self.args.force_cache:
|
|
|
|
return
|
2015-05-14 06:42:48 +00:00
|
|
|
# We always get fresh droplets
|
|
|
|
if self.is_cache_valid() and not (resource=='droplets' or resource is None):
|
|
|
|
return
|
2015-05-13 15:12:48 +00:00
|
|
|
if self.args.refresh_cache:
|
|
|
|
resource=None
|
|
|
|
|
2015-05-07 19:53:10 +00:00
|
|
|
if resource == 'droplets' or resource is None:
|
2015-05-13 15:12:48 +00:00
|
|
|
self.data['droplets'] = self.manager.all_active_droplets()
|
2015-05-14 06:42:48 +00:00
|
|
|
self.cache_refreshed = True
|
2015-05-07 19:53:10 +00:00
|
|
|
if resource == 'regions' or resource is None:
|
2015-05-13 15:12:48 +00:00
|
|
|
self.data['regions'] = self.manager.all_regions()
|
2015-05-14 06:42:48 +00:00
|
|
|
self.cache_refreshed = True
|
2015-05-07 19:53:10 +00:00
|
|
|
if resource == 'images' or resource is None:
|
2015-05-13 15:12:48 +00:00
|
|
|
self.data['images'] = self.manager.all_images(filter=None)
|
2015-05-14 06:42:48 +00:00
|
|
|
self.cache_refreshed = True
|
2015-05-07 19:53:10 +00:00
|
|
|
if resource == 'sizes' or resource is None:
|
2015-05-13 15:12:48 +00:00
|
|
|
self.data['sizes'] = self.manager.sizes()
|
2015-05-14 06:42:48 +00:00
|
|
|
self.cache_refreshed = True
|
2015-05-07 19:53:10 +00:00
|
|
|
if resource == 'ssh_keys' or resource is None:
|
2015-05-13 15:12:48 +00:00
|
|
|
self.data['ssh_keys'] = self.manager.all_ssh_keys()
|
2015-05-14 06:42:48 +00:00
|
|
|
self.cache_refreshed = True
|
2015-05-07 19:53:10 +00:00
|
|
|
if resource == 'domains' or resource is None:
|
2015-05-13 15:12:48 +00:00
|
|
|
self.data['domains'] = self.manager.all_domains()
|
2015-05-14 06:42:48 +00:00
|
|
|
self.cache_refreshed = True
|
2013-06-27 03:01:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
def build_inventory(self):
|
|
|
|
'''Build Ansible inventory of droplets'''
|
|
|
|
self.inventory = {}
|
|
|
|
|
|
|
|
# add all droplets by id and name
|
|
|
|
for droplet in self.data['droplets']:
|
2015-10-02 23:16:44 +00:00
|
|
|
#when using private_networking, the API reports the private one in "ip_address".
|
|
|
|
if 'private_networking' in droplet['features'] and not self.use_private_network:
|
2015-08-02 03:21:20 +00:00
|
|
|
for net in droplet['networks']['v4']:
|
|
|
|
if net['type']=='public':
|
|
|
|
dest=net['ip_address']
|
|
|
|
else:
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
dest = droplet['ip_address']
|
2013-06-27 03:01:30 +00:00
|
|
|
|
2016-02-14 16:04:26 +00:00
|
|
|
dest = { 'hosts': [ dest ], 'vars': self.group_variables }
|
|
|
|
|
|
|
|
self.inventory[droplet['id']] = dest
|
|
|
|
self.inventory[droplet['name']] = dest
|
|
|
|
self.inventory['region_' + droplet['region']['slug']] = dest
|
|
|
|
self.inventory['image_' + str(droplet['image']['id'])] = dest
|
|
|
|
self.inventory['size_' + droplet['size']['slug']] = dest
|
2013-06-27 03:01:30 +00:00
|
|
|
|
2015-05-07 19:53:10 +00:00
|
|
|
image_slug = droplet['image']['slug']
|
|
|
|
if image_slug:
|
2016-02-14 16:04:26 +00:00
|
|
|
self.inventory['image_' + self.to_safe(image_slug)] = dest
|
2015-05-07 19:53:10 +00:00
|
|
|
else:
|
|
|
|
image_name = droplet['image']['name']
|
|
|
|
if image_name:
|
2016-02-14 16:04:26 +00:00
|
|
|
self.inventory['image_' + self.to_safe(image_name)] = dest
|
2013-06-27 03:01:30 +00:00
|
|
|
|
2016-02-14 16:04:26 +00:00
|
|
|
self.inventory['distro_' + self.to_safe(droplet['image']['distribution'])] = dest
|
|
|
|
self.inventory['status_' + droplet['status']] = dest
|
2013-06-27 03:01:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
def load_droplet_variables_for_host(self):
|
2014-05-03 16:40:05 +00:00
|
|
|
'''Generate a JSON response to a --host call'''
|
2015-05-07 19:53:10 +00:00
|
|
|
host = int(self.args.host)
|
2013-06-27 03:01:30 +00:00
|
|
|
|
2015-05-13 15:12:48 +00:00
|
|
|
droplet = self.manager.show_droplet(host)
|
|
|
|
|
|
|
|
# Put all the information in a 'do_' namespace
|
|
|
|
info = {}
|
|
|
|
for k, v in droplet.items():
|
|
|
|
info['do_'+k] = v
|
|
|
|
|
|
|
|
return {'droplet': info}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
###########################################################################
|
|
|
|
# Cache Management
|
|
|
|
###########################################################################
|
2013-06-27 03:01:30 +00:00
|
|
|
|
2015-05-13 15:12:48 +00:00
|
|
|
def is_cache_valid(self):
|
|
|
|
''' Determines if the cache files have expired, or if it is still valid '''
|
|
|
|
if os.path.isfile(self.cache_filename):
|
|
|
|
mod_time = os.path.getmtime(self.cache_filename)
|
|
|
|
current_time = time()
|
|
|
|
if (mod_time + self.cache_max_age) > current_time:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
def load_from_cache(self):
|
|
|
|
''' Reads the data from the cache file and assigns it to member variables as Python Objects'''
|
|
|
|
try:
|
|
|
|
cache = open(self.cache_filename, 'r')
|
|
|
|
json_data = cache.read()
|
|
|
|
cache.close()
|
|
|
|
data = json.loads(json_data)
|
|
|
|
except IOError:
|
|
|
|
data = {'data': {}, 'inventory': {}}
|
|
|
|
|
|
|
|
self.data = data['data']
|
|
|
|
self.inventory = data['inventory']
|
|
|
|
|
|
|
|
|
|
|
|
def write_to_cache(self):
|
|
|
|
''' Writes data in JSON format to a file '''
|
|
|
|
data = { 'data': self.data, 'inventory': self.inventory }
|
|
|
|
json_data = json.dumps(data, sort_keys=True, indent=2)
|
|
|
|
|
|
|
|
cache = open(self.cache_filename, 'w')
|
|
|
|
cache.write(json_data)
|
|
|
|
cache.close()
|
2013-06-27 03:01:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
###########################################################################
|
|
|
|
# Utilities
|
|
|
|
###########################################################################
|
|
|
|
|
|
|
|
def push(self, my_dict, key, element):
|
|
|
|
''' Pushed an element onto an array that may not have been defined in the dict '''
|
|
|
|
if key in my_dict:
|
2015-05-07 19:53:10 +00:00
|
|
|
my_dict[key].append(element)
|
2013-06-27 03:01:30 +00:00
|
|
|
else:
|
|
|
|
my_dict[key] = [element]
|
|
|
|
|
|
|
|
|
|
|
|
def to_safe(self, word):
|
|
|
|
''' Converts 'bad' characters in a string to underscores so they can be used as Ansible groups '''
|
|
|
|
return re.sub("[^A-Za-z0-9\-\.]", "_", word)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
###########################################################################
|
|
|
|
# Run the script
|
|
|
|
DigitalOceanInventory()
|