|
@ -1,6 +1,6 @@ |
|
|
#!/usr/bin/env python |
|
|
#!/usr/bin/env python |
|
|
|
|
|
|
|
|
''' |
|
|
|
|
|
|
|
|
""" |
|
|
DigitalOcean external inventory script |
|
|
DigitalOcean external inventory script |
|
|
====================================== |
|
|
====================================== |
|
|
|
|
|
|
|
@ -22,7 +22,7 @@ found. You can force this script to use the cache with --force-cache. |
|
|
|
|
|
|
|
|
---- |
|
|
---- |
|
|
Configuration is read from `digital_ocean.ini`, then from environment variables, |
|
|
Configuration is read from `digital_ocean.ini`, then from environment variables, |
|
|
then and command-line arguments. |
|
|
|
|
|
|
|
|
and then from command-line arguments. |
|
|
|
|
|
|
|
|
Most notably, the DigitalOcean API Token must be specified. It can be specified |
|
|
Most notably, the DigitalOcean API Token must be specified. It can be specified |
|
|
in the INI file or with the following environment variables: |
|
|
in the INI file or with the following environment variables: |
|
@ -40,6 +40,7 @@ is to use the output of the --env option with export: |
|
|
The following groups are generated from --list: |
|
|
The following groups are generated from --list: |
|
|
- ID (droplet ID) |
|
|
- ID (droplet ID) |
|
|
- NAME (droplet NAME) |
|
|
- NAME (droplet NAME) |
|
|
|
|
|
- digital_ocean |
|
|
- image_ID |
|
|
- image_ID |
|
|
- image_NAME |
|
|
- image_NAME |
|
|
- distro_NAME (distribution NAME from image) |
|
|
- distro_NAME (distribution NAME from image) |
|
@ -73,14 +74,12 @@ For each host, the following variables are registered: |
|
|
|
|
|
|
|
|
----- |
|
|
----- |
|
|
``` |
|
|
``` |
|
|
usage: digital_ocean.py [-h] [--list] [--host HOST] [--all] |
|
|
|
|
|
[--droplets] [--regions] [--images] [--sizes] |
|
|
|
|
|
[--ssh-keys] [--domains] [--pretty] |
|
|
|
|
|
[--cache-path CACHE_PATH] |
|
|
|
|
|
[--cache-max_age CACHE_MAX_AGE] |
|
|
|
|
|
[--force-cache] |
|
|
|
|
|
[--refresh-cache] |
|
|
|
|
|
[--api-token API_TOKEN] |
|
|
|
|
|
|
|
|
usage: digital_ocean.py [-h] [--list] [--host HOST] [--all] [--droplets] |
|
|
|
|
|
[--regions] [--images] [--sizes] [--ssh-keys] |
|
|
|
|
|
[--domains] [--tags] [--pretty] |
|
|
|
|
|
[--cache-path CACHE_PATH] |
|
|
|
|
|
[--cache-max_age CACHE_MAX_AGE] [--force-cache] |
|
|
|
|
|
[--refresh-cache] [--env] [--api-token API_TOKEN] |
|
|
|
|
|
|
|
|
Produce an Ansible Inventory file based on DigitalOcean credentials |
|
|
Produce an Ansible Inventory file based on DigitalOcean credentials |
|
|
|
|
|
|
|
@ -91,65 +90,129 @@ optional arguments: |
|
|
--host HOST Get all Ansible inventory variables about a specific |
|
|
--host HOST Get all Ansible inventory variables about a specific |
|
|
Droplet |
|
|
Droplet |
|
|
--all List all DigitalOcean information as JSON |
|
|
--all List all DigitalOcean information as JSON |
|
|
--droplets List Droplets as JSON |
|
|
|
|
|
|
|
|
--droplets, -d List Droplets as JSON |
|
|
--regions List Regions as JSON |
|
|
--regions List Regions as JSON |
|
|
--images List Images as JSON |
|
|
--images List Images as JSON |
|
|
--sizes List Sizes as JSON |
|
|
--sizes List Sizes as JSON |
|
|
--ssh-keys List SSH keys as JSON |
|
|
--ssh-keys List SSH keys as JSON |
|
|
--domains List Domains as JSON |
|
|
--domains List Domains as JSON |
|
|
|
|
|
--tags List Tags as JSON |
|
|
--pretty, -p Pretty-print results |
|
|
--pretty, -p Pretty-print results |
|
|
--cache-path CACHE_PATH |
|
|
--cache-path CACHE_PATH |
|
|
Path to the cache files (default: .) |
|
|
Path to the cache files (default: .) |
|
|
--cache-max_age CACHE_MAX_AGE |
|
|
--cache-max_age CACHE_MAX_AGE |
|
|
Maximum age of the cached items (default: 0) |
|
|
Maximum age of the cached items (default: 0) |
|
|
--force-cache Only use data from the cache |
|
|
--force-cache Only use data from the cache |
|
|
--refresh-cache Force refresh of cache by making API requests to |
|
|
|
|
|
|
|
|
--refresh-cache, -r Force refresh of cache by making API requests to |
|
|
DigitalOcean (default: False - use cache files) |
|
|
DigitalOcean (default: False - use cache files) |
|
|
|
|
|
--env, -e Display DO_API_TOKEN |
|
|
--api-token API_TOKEN, -a API_TOKEN |
|
|
--api-token API_TOKEN, -a API_TOKEN |
|
|
DigitalOcean API Token |
|
|
DigitalOcean API Token |
|
|
``` |
|
|
``` |
|
|
|
|
|
|
|
|
''' |
|
|
|
|
|
|
|
|
""" |
|
|
|
|
|
|
|
|
# (c) 2013, Evan Wies <evan@neomantra.net> |
|
|
# (c) 2013, Evan Wies <evan@neomantra.net> |
|
|
|
|
|
# (c) 2017, Ansible Project |
|
|
|
|
|
# (c) 2017, Abhijeet Kasurde <akasurde@redhat.com> |
|
|
# |
|
|
# |
|
|
# Inspired by the EC2 inventory plugin: |
|
|
# Inspired by the EC2 inventory plugin: |
|
|
# https://github.com/ansible/ansible/blob/devel/contrib/inventory/ec2.py |
|
|
# https://github.com/ansible/ansible/blob/devel/contrib/inventory/ec2.py |
|
|
# |
|
|
# |
|
|
# This file is part of Ansible, |
|
|
|
|
|
# |
|
|
|
|
|
# Ansible is free software: you can redistribute it and/or modify |
|
|
|
|
|
# it under the terms of the GNU General Public License as published by |
|
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or |
|
|
|
|
|
# (at your option) any later version. |
|
|
|
|
|
# |
|
|
|
|
|
# Ansible is distributed in the hope that it will be useful, |
|
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
|
|
|
|
# GNU General Public License for more details. |
|
|
|
|
|
# |
|
|
|
|
|
# You should have received a copy of the GNU General Public License |
|
|
|
|
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>. |
|
|
|
|
|
|
|
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) |
|
|
|
|
|
|
|
|
|
|
|
from __future__ import (absolute_import, division, print_function) |
|
|
|
|
|
__metaclass__ = type |
|
|
|
|
|
|
|
|
###################################################################### |
|
|
###################################################################### |
|
|
|
|
|
|
|
|
|
|
|
import argparse |
|
|
|
|
|
import ast |
|
|
import os |
|
|
import os |
|
|
import sys |
|
|
|
|
|
import re |
|
|
import re |
|
|
import argparse |
|
|
|
|
|
|
|
|
import requests |
|
|
|
|
|
import sys |
|
|
from time import time |
|
|
from time import time |
|
|
import ConfigParser |
|
|
|
|
|
import ast |
|
|
|
|
|
|
|
|
|
|
|
try: |
|
|
try: |
|
|
import json |
|
|
|
|
|
|
|
|
import ConfigParser |
|
|
except ImportError: |
|
|
except ImportError: |
|
|
import simplejson as json |
|
|
|
|
|
|
|
|
import configparser as ConfigParser |
|
|
|
|
|
|
|
|
try: |
|
|
|
|
|
from dopy.manager import DoManager |
|
|
|
|
|
except ImportError as e: |
|
|
|
|
|
sys.exit("failed=True msg='`dopy` library required for this script'") |
|
|
|
|
|
|
|
|
import json |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class DoManager: |
|
|
|
|
|
def __init__(self, api_token): |
|
|
|
|
|
self.api_token = api_token |
|
|
|
|
|
self.api_endpoint = 'https://api.digitalocean.com/v2' |
|
|
|
|
|
self.headers = {'Authorization': 'Bearer {0}'.format(self.api_token), |
|
|
|
|
|
'Content-type': 'application/json'} |
|
|
|
|
|
self.timeout = 60 |
|
|
|
|
|
|
|
|
|
|
|
def _url_builder(self, path): |
|
|
|
|
|
if path[0] == '/': |
|
|
|
|
|
path = path[1:] |
|
|
|
|
|
return '%s/%s' % (self.api_endpoint, path) |
|
|
|
|
|
|
|
|
|
|
|
def send(self, url, method='GET', data=None): |
|
|
|
|
|
url = self._url_builder(url) |
|
|
|
|
|
data = json.dumps(data) |
|
|
|
|
|
try: |
|
|
|
|
|
if method == 'GET': |
|
|
|
|
|
resp_data = {} |
|
|
|
|
|
incomplete = True |
|
|
|
|
|
while incomplete: |
|
|
|
|
|
resp = requests.get(url, data=data, headers=self.headers, timeout=self.timeout) |
|
|
|
|
|
json_resp = resp.json() |
|
|
|
|
|
|
|
|
|
|
|
for key, value in json_resp.items(): |
|
|
|
|
|
if isinstance(value, list) and key in resp_data: |
|
|
|
|
|
resp_data[key] += value |
|
|
|
|
|
else: |
|
|
|
|
|
resp_data[key] = value |
|
|
|
|
|
|
|
|
|
|
|
try: |
|
|
|
|
|
url = json_resp['links']['pages']['next'] |
|
|
|
|
|
except KeyError: |
|
|
|
|
|
incomplete = False |
|
|
|
|
|
|
|
|
|
|
|
except ValueError as e: |
|
|
|
|
|
sys.exit("Unable to parse result from %s: %s" % (url, e)) |
|
|
|
|
|
return resp_data |
|
|
|
|
|
|
|
|
|
|
|
def all_active_droplets(self): |
|
|
|
|
|
resp = self.send('droplets/') |
|
|
|
|
|
return resp['droplets'] |
|
|
|
|
|
|
|
|
|
|
|
def all_regions(self): |
|
|
|
|
|
resp = self.send('regions/') |
|
|
|
|
|
return resp['regions'] |
|
|
|
|
|
|
|
|
|
|
|
def all_images(self, filter_name='global'): |
|
|
|
|
|
params = {'filter': filter_name} |
|
|
|
|
|
resp = self.send('images/', data=params) |
|
|
|
|
|
return resp['images'] |
|
|
|
|
|
|
|
|
|
|
|
def sizes(self): |
|
|
|
|
|
resp = self.send('sizes/') |
|
|
|
|
|
return resp['sizes'] |
|
|
|
|
|
|
|
|
|
|
|
def all_ssh_keys(self): |
|
|
|
|
|
resp = self.send('account/keys') |
|
|
|
|
|
return resp['ssh_keys'] |
|
|
|
|
|
|
|
|
|
|
|
def all_domains(self): |
|
|
|
|
|
resp = self.send('domains/') |
|
|
|
|
|
return resp['domains'] |
|
|
|
|
|
|
|
|
|
|
|
def show_droplet(self, droplet_id): |
|
|
|
|
|
resp = self.send('droplets/%s' % droplet_id) |
|
|
|
|
|
return resp['droplet'] |
|
|
|
|
|
|
|
|
|
|
|
def all_tags(self): |
|
|
|
|
|
resp = self.send('tags') |
|
|
|
|
|
return resp['tags'] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class DigitalOceanInventory(object): |
|
|
class DigitalOceanInventory(object): |
|
@ -159,7 +222,7 @@ class DigitalOceanInventory(object): |
|
|
########################################################################### |
|
|
########################################################################### |
|
|
|
|
|
|
|
|
def __init__(self): |
|
|
def __init__(self): |
|
|
''' Main execution path ''' |
|
|
|
|
|
|
|
|
"""Main execution path """ |
|
|
|
|
|
|
|
|
# DigitalOceanInventory data |
|
|
# DigitalOceanInventory data |
|
|
self.data = {} # All DigitalOcean data |
|
|
self.data = {} # All DigitalOcean data |
|
@ -178,9 +241,9 @@ class DigitalOceanInventory(object): |
|
|
|
|
|
|
|
|
# Verify credentials were set |
|
|
# Verify credentials were set |
|
|
if not hasattr(self, 'api_token'): |
|
|
if not hasattr(self, 'api_token'): |
|
|
sys.stderr.write('''Could not find values for DigitalOcean api_token. |
|
|
|
|
|
They must be specified via either ini file, command line argument (--api-token), |
|
|
|
|
|
or environment variables (DO_API_TOKEN)\n''') |
|
|
|
|
|
|
|
|
msg = 'Could not find values for DigitalOcean api_token. They must be specified via either ini file, ' \ |
|
|
|
|
|
'command line argument (--api-token), or environment variables (DO_API_TOKEN)\n' |
|
|
|
|
|
sys.stderr.write(msg) |
|
|
sys.exit(-1) |
|
|
sys.exit(-1) |
|
|
|
|
|
|
|
|
# env command, show DigitalOcean credentials |
|
|
# env command, show DigitalOcean credentials |
|
@ -196,10 +259,10 @@ or environment variables (DO_API_TOKEN)\n''') |
|
|
self.load_from_cache() |
|
|
self.load_from_cache() |
|
|
if len(self.data) == 0: |
|
|
if len(self.data) == 0: |
|
|
if self.args.force_cache: |
|
|
if self.args.force_cache: |
|
|
sys.stderr.write('''Cache is empty and --force-cache was specified\n''') |
|
|
|
|
|
|
|
|
sys.stderr.write('Cache is empty and --force-cache was specified\n') |
|
|
sys.exit(-1) |
|
|
sys.exit(-1) |
|
|
|
|
|
|
|
|
self.manager = DoManager(None, self.api_token, api_version=2) |
|
|
|
|
|
|
|
|
self.manager = DoManager(self.api_token) |
|
|
|
|
|
|
|
|
# Pick the json_data to print based on the CLI command |
|
|
# Pick the json_data to print based on the CLI command |
|
|
if self.args.droplets: |
|
|
if self.args.droplets: |
|
@ -220,6 +283,9 @@ or environment variables (DO_API_TOKEN)\n''') |
|
|
elif self.args.domains: |
|
|
elif self.args.domains: |
|
|
self.load_from_digital_ocean('domains') |
|
|
self.load_from_digital_ocean('domains') |
|
|
json_data = {'domains': self.data['domains']} |
|
|
json_data = {'domains': self.data['domains']} |
|
|
|
|
|
elif self.args.tags: |
|
|
|
|
|
self.load_from_digital_ocean('tags') |
|
|
|
|
|
json_data = {'tags': self.data['tags']} |
|
|
elif self.args.all: |
|
|
elif self.args.all: |
|
|
self.load_from_digital_ocean() |
|
|
self.load_from_digital_ocean() |
|
|
json_data = self.data |
|
|
json_data = self.data |
|
@ -234,19 +300,19 @@ or environment variables (DO_API_TOKEN)\n''') |
|
|
self.write_to_cache() |
|
|
self.write_to_cache() |
|
|
|
|
|
|
|
|
if self.args.pretty: |
|
|
if self.args.pretty: |
|
|
print(json.dumps(json_data, sort_keys=True, indent=2)) |
|
|
|
|
|
|
|
|
print(json.dumps(json_data, indent=2)) |
|
|
else: |
|
|
else: |
|
|
print(json.dumps(json_data)) |
|
|
print(json.dumps(json_data)) |
|
|
# That's all she wrote... |
|
|
|
|
|
|
|
|
|
|
|
########################################################################### |
|
|
########################################################################### |
|
|
# Script configuration |
|
|
# Script configuration |
|
|
########################################################################### |
|
|
########################################################################### |
|
|
|
|
|
|
|
|
def read_settings(self): |
|
|
def read_settings(self): |
|
|
''' Reads the settings from the digital_ocean.ini file ''' |
|
|
|
|
|
config = ConfigParser.SafeConfigParser() |
|
|
|
|
|
config.read(os.path.dirname(os.path.realpath(__file__)) + '/digital_ocean.ini') |
|
|
|
|
|
|
|
|
""" Reads the settings from the digital_ocean.ini file """ |
|
|
|
|
|
config = ConfigParser.ConfigParser() |
|
|
|
|
|
config_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'digital_ocean.ini') |
|
|
|
|
|
config.read(config_path) |
|
|
|
|
|
|
|
|
# Credentials |
|
|
# Credentials |
|
|
if config.has_option('digital_ocean', 'api_token'): |
|
|
if config.has_option('digital_ocean', 'api_token'): |
|
@ -267,7 +333,7 @@ or environment variables (DO_API_TOKEN)\n''') |
|
|
self.group_variables = ast.literal_eval(config.get('digital_ocean', 'group_variables')) |
|
|
self.group_variables = ast.literal_eval(config.get('digital_ocean', 'group_variables')) |
|
|
|
|
|
|
|
|
def read_environment(self): |
|
|
def read_environment(self): |
|
|
''' Reads the settings from environment variables ''' |
|
|
|
|
|
|
|
|
""" Reads the settings from environment variables """ |
|
|
# Setup credentials |
|
|
# Setup credentials |
|
|
if os.getenv("DO_API_TOKEN"): |
|
|
if os.getenv("DO_API_TOKEN"): |
|
|
self.api_token = os.getenv("DO_API_TOKEN") |
|
|
self.api_token = os.getenv("DO_API_TOKEN") |
|
@ -275,7 +341,7 @@ or environment variables (DO_API_TOKEN)\n''') |
|
|
self.api_token = os.getenv("DO_API_KEY") |
|
|
self.api_token = os.getenv("DO_API_KEY") |
|
|
|
|
|
|
|
|
def read_cli_args(self): |
|
|
def read_cli_args(self): |
|
|
''' Command line argument processing ''' |
|
|
|
|
|
|
|
|
""" Command line argument processing """ |
|
|
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on DigitalOcean credentials') |
|
|
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on DigitalOcean credentials') |
|
|
|
|
|
|
|
|
parser.add_argument('--list', action='store_true', help='List all active Droplets as Ansible inventory (default: True)') |
|
|
parser.add_argument('--list', action='store_true', help='List all active Droplets as Ansible inventory (default: True)') |
|
@ -288,6 +354,7 @@ or environment variables (DO_API_TOKEN)\n''') |
|
|
parser.add_argument('--sizes', action='store_true', help='List Sizes as JSON') |
|
|
parser.add_argument('--sizes', action='store_true', help='List Sizes as JSON') |
|
|
parser.add_argument('--ssh-keys', action='store_true', help='List SSH keys as JSON') |
|
|
parser.add_argument('--ssh-keys', action='store_true', help='List SSH keys as JSON') |
|
|
parser.add_argument('--domains', action='store_true', help='List Domains as JSON') |
|
|
parser.add_argument('--domains', action='store_true', help='List Domains as JSON') |
|
|
|
|
|
parser.add_argument('--tags', action='store_true', help='List Tags as JSON') |
|
|
|
|
|
|
|
|
parser.add_argument('--pretty', '-p', action='store_true', help='Pretty-print results') |
|
|
parser.add_argument('--pretty', '-p', action='store_true', help='Pretty-print results') |
|
|
|
|
|
|
|
@ -309,6 +376,7 @@ or environment variables (DO_API_TOKEN)\n''') |
|
|
if (not self.args.droplets and not self.args.regions and |
|
|
if (not self.args.droplets and not self.args.regions and |
|
|
not self.args.images and not self.args.sizes and |
|
|
not self.args.images and not self.args.sizes and |
|
|
not self.args.ssh_keys and not self.args.domains and |
|
|
not self.args.ssh_keys and not self.args.domains and |
|
|
|
|
|
not self.args.tags and |
|
|
not self.args.all and not self.args.host): |
|
|
not self.args.all and not self.args.host): |
|
|
self.args.list = True |
|
|
self.args.list = True |
|
|
|
|
|
|
|
@ -317,7 +385,7 @@ or environment variables (DO_API_TOKEN)\n''') |
|
|
########################################################################### |
|
|
########################################################################### |
|
|
|
|
|
|
|
|
def load_from_digital_ocean(self, resource=None): |
|
|
def load_from_digital_ocean(self, resource=None): |
|
|
'''Get JSON from DigitalOcean API''' |
|
|
|
|
|
|
|
|
"""Get JSON from DigitalOcean API """ |
|
|
if self.args.force_cache and os.path.isfile(self.cache_filename): |
|
|
if self.args.force_cache and os.path.isfile(self.cache_filename): |
|
|
return |
|
|
return |
|
|
# We always get fresh droplets |
|
|
# We always get fresh droplets |
|
@ -333,7 +401,7 @@ or environment variables (DO_API_TOKEN)\n''') |
|
|
self.data['regions'] = self.manager.all_regions() |
|
|
self.data['regions'] = self.manager.all_regions() |
|
|
self.cache_refreshed = True |
|
|
self.cache_refreshed = True |
|
|
if resource == 'images' or resource is None: |
|
|
if resource == 'images' or resource is None: |
|
|
self.data['images'] = self.manager.all_images(filter=None) |
|
|
|
|
|
|
|
|
self.data['images'] = self.manager.all_images() |
|
|
self.cache_refreshed = True |
|
|
self.cache_refreshed = True |
|
|
if resource == 'sizes' or resource is None: |
|
|
if resource == 'sizes' or resource is None: |
|
|
self.data['sizes'] = self.manager.sizes() |
|
|
self.data['sizes'] = self.manager.sizes() |
|
@ -344,9 +412,27 @@ or environment variables (DO_API_TOKEN)\n''') |
|
|
if resource == 'domains' or resource is None: |
|
|
if resource == 'domains' or resource is None: |
|
|
self.data['domains'] = self.manager.all_domains() |
|
|
self.data['domains'] = self.manager.all_domains() |
|
|
self.cache_refreshed = True |
|
|
self.cache_refreshed = True |
|
|
|
|
|
if resource == 'tags' or resource is None: |
|
|
|
|
|
self.data['tags'] = self.manager.all_tags() |
|
|
|
|
|
self.cache_refreshed = True |
|
|
|
|
|
|
|
|
|
|
|
def add_inventory_group(self, key): |
|
|
|
|
|
""" Method to create group dict """ |
|
|
|
|
|
host_dict = {'hosts': [], 'vars': {}} |
|
|
|
|
|
self.inventory[key] = host_dict |
|
|
|
|
|
return |
|
|
|
|
|
|
|
|
|
|
|
def add_host(self, group, host): |
|
|
|
|
|
""" Helper method to reduce host duplication """ |
|
|
|
|
|
if group not in self.inventory: |
|
|
|
|
|
self.add_inventory_group(group) |
|
|
|
|
|
|
|
|
|
|
|
if host not in self.inventory[group]['hosts']: |
|
|
|
|
|
self.inventory[group]['hosts'].append(host) |
|
|
|
|
|
return |
|
|
|
|
|
|
|
|
def build_inventory(self): |
|
|
def build_inventory(self): |
|
|
'''Build Ansible inventory of droplets''' |
|
|
|
|
|
|
|
|
""" Build Ansible inventory of droplets """ |
|
|
self.inventory = { |
|
|
self.inventory = { |
|
|
'all': { |
|
|
'all': { |
|
|
'hosts': [], |
|
|
'hosts': [], |
|
@ -357,52 +443,44 @@ or environment variables (DO_API_TOKEN)\n''') |
|
|
|
|
|
|
|
|
# add all droplets by id and name |
|
|
# add all droplets by id and name |
|
|
for droplet in self.data['droplets']: |
|
|
for droplet in self.data['droplets']: |
|
|
# when using private_networking, the API reports the private one in "ip_address". |
|
|
|
|
|
if 'private_networking' in droplet['features'] and not self.use_private_network: |
|
|
|
|
|
for net in droplet['networks']['v4']: |
|
|
|
|
|
if net['type'] == 'public': |
|
|
|
|
|
dest = net['ip_address'] |
|
|
|
|
|
else: |
|
|
|
|
|
continue |
|
|
|
|
|
else: |
|
|
|
|
|
dest = droplet['ip_address'] |
|
|
|
|
|
|
|
|
for net in droplet['networks']['v4']: |
|
|
|
|
|
if net['type'] == 'public': |
|
|
|
|
|
dest = net['ip_address'] |
|
|
|
|
|
else: |
|
|
|
|
|
continue |
|
|
|
|
|
|
|
|
self.inventory['all']['hosts'].append(dest) |
|
|
self.inventory['all']['hosts'].append(dest) |
|
|
|
|
|
|
|
|
self.inventory[droplet['id']] = [dest] |
|
|
|
|
|
self.inventory[droplet['name']] = [dest] |
|
|
|
|
|
|
|
|
self.add_host(droplet['id'], dest) |
|
|
|
|
|
|
|
|
|
|
|
self.add_host(droplet['name'], dest) |
|
|
|
|
|
|
|
|
# groups that are always present |
|
|
# groups that are always present |
|
|
for group in ('region_' + droplet['region']['slug'], |
|
|
|
|
|
|
|
|
for group in ('digital_ocean', |
|
|
|
|
|
'region_' + droplet['region']['slug'], |
|
|
'image_' + str(droplet['image']['id']), |
|
|
'image_' + str(droplet['image']['id']), |
|
|
'size_' + droplet['size']['slug'], |
|
|
'size_' + droplet['size']['slug'], |
|
|
'distro_' + self.to_safe(droplet['image']['distribution']), |
|
|
|
|
|
|
|
|
'distro_' + DigitalOceanInventory.to_safe(droplet['image']['distribution']), |
|
|
'status_' + droplet['status']): |
|
|
'status_' + droplet['status']): |
|
|
if group not in self.inventory: |
|
|
|
|
|
self.inventory[group] = {'hosts': [], 'vars': {}} |
|
|
|
|
|
self.inventory[group]['hosts'].append(dest) |
|
|
|
|
|
|
|
|
self.add_host(group, dest) |
|
|
|
|
|
|
|
|
# groups that are not always present |
|
|
# groups that are not always present |
|
|
for group in (droplet['image']['slug'], |
|
|
for group in (droplet['image']['slug'], |
|
|
droplet['image']['name']): |
|
|
droplet['image']['name']): |
|
|
if group: |
|
|
if group: |
|
|
image = 'image_' + self.to_safe(group) |
|
|
|
|
|
if image not in self.inventory: |
|
|
|
|
|
self.inventory[image] = {'hosts': [], 'vars': {}} |
|
|
|
|
|
self.inventory[image]['hosts'].append(dest) |
|
|
|
|
|
|
|
|
image = 'image_' + DigitalOceanInventory.to_safe(group) |
|
|
|
|
|
self.add_host(image, dest) |
|
|
|
|
|
|
|
|
if droplet['tags']: |
|
|
if droplet['tags']: |
|
|
for tag in droplet['tags']: |
|
|
for tag in droplet['tags']: |
|
|
if tag not in self.inventory: |
|
|
|
|
|
self.inventory[tag] = {'hosts': [], 'vars': {}} |
|
|
|
|
|
self.inventory[tag]['hosts'].append(dest) |
|
|
|
|
|
|
|
|
self.add_host(tag, dest) |
|
|
|
|
|
|
|
|
# hostvars |
|
|
# hostvars |
|
|
info = self.do_namespace(droplet) |
|
|
info = self.do_namespace(droplet) |
|
|
self.inventory['_meta']['hostvars'][dest] = info |
|
|
self.inventory['_meta']['hostvars'][dest] = info |
|
|
|
|
|
|
|
|
def load_droplet_variables_for_host(self): |
|
|
def load_droplet_variables_for_host(self): |
|
|
'''Generate a JSON response to a --host call''' |
|
|
|
|
|
|
|
|
""" Generate a JSON response to a --host call """ |
|
|
host = int(self.args.host) |
|
|
host = int(self.args.host) |
|
|
droplet = self.manager.show_droplet(host) |
|
|
droplet = self.manager.show_droplet(host) |
|
|
info = self.do_namespace(droplet) |
|
|
info = self.do_namespace(droplet) |
|
@ -413,7 +491,7 @@ or environment variables (DO_API_TOKEN)\n''') |
|
|
########################################################################### |
|
|
########################################################################### |
|
|
|
|
|
|
|
|
def is_cache_valid(self): |
|
|
def is_cache_valid(self): |
|
|
''' Determines if the cache files have expired, or if it is still valid ''' |
|
|
|
|
|
|
|
|
""" Determines if the cache files have expired, or if it is still valid """ |
|
|
if os.path.isfile(self.cache_filename): |
|
|
if os.path.isfile(self.cache_filename): |
|
|
mod_time = os.path.getmtime(self.cache_filename) |
|
|
mod_time = os.path.getmtime(self.cache_filename) |
|
|
current_time = time() |
|
|
current_time = time() |
|
@ -422,11 +500,10 @@ or environment variables (DO_API_TOKEN)\n''') |
|
|
return False |
|
|
return False |
|
|
|
|
|
|
|
|
def load_from_cache(self): |
|
|
def load_from_cache(self): |
|
|
''' Reads the data from the cache file and assigns it to member variables as Python Objects''' |
|
|
|
|
|
|
|
|
""" Reads the data from the cache file and assigns it to member variables as Python Objects """ |
|
|
try: |
|
|
try: |
|
|
cache = open(self.cache_filename, 'r') |
|
|
|
|
|
json_data = cache.read() |
|
|
|
|
|
cache.close() |
|
|
|
|
|
|
|
|
with open(self.cache_filename, 'r') as cache: |
|
|
|
|
|
json_data = cache.read() |
|
|
data = json.loads(json_data) |
|
|
data = json.loads(json_data) |
|
|
except IOError: |
|
|
except IOError: |
|
|
data = {'data': {}, 'inventory': {}} |
|
|
data = {'data': {}, 'inventory': {}} |
|
@ -435,31 +512,24 @@ or environment variables (DO_API_TOKEN)\n''') |
|
|
self.inventory = data['inventory'] |
|
|
self.inventory = data['inventory'] |
|
|
|
|
|
|
|
|
def write_to_cache(self): |
|
|
def write_to_cache(self): |
|
|
''' Writes data in JSON format to a file ''' |
|
|
|
|
|
|
|
|
""" Writes data in JSON format to a file """ |
|
|
data = {'data': self.data, 'inventory': self.inventory} |
|
|
data = {'data': self.data, 'inventory': self.inventory} |
|
|
json_data = json.dumps(data, sort_keys=True, indent=2) |
|
|
|
|
|
|
|
|
json_data = json.dumps(data, indent=2) |
|
|
|
|
|
|
|
|
cache = open(self.cache_filename, 'w') |
|
|
|
|
|
cache.write(json_data) |
|
|
|
|
|
cache.close() |
|
|
|
|
|
|
|
|
with open(self.cache_filename, 'w') as cache: |
|
|
|
|
|
cache.write(json_data) |
|
|
|
|
|
|
|
|
########################################################################### |
|
|
########################################################################### |
|
|
# Utilities |
|
|
# Utilities |
|
|
########################################################################### |
|
|
########################################################################### |
|
|
|
|
|
|
|
|
def push(self, my_dict, key, element): |
|
|
|
|
|
''' Pushed an element onto an array that may not have been defined in the dict ''' |
|
|
|
|
|
if key in my_dict: |
|
|
|
|
|
my_dict[key].append(element) |
|
|
|
|
|
else: |
|
|
|
|
|
my_dict[key] = [element] |
|
|
|
|
|
|
|
|
|
|
|
def to_safe(self, word): |
|
|
|
|
|
''' Converts 'bad' characters in a string to underscores so they can be used as Ansible groups ''' |
|
|
|
|
|
return re.sub("[^A-Za-z0-9\-\.]", "_", word) |
|
|
|
|
|
|
|
|
|
|
|
def do_namespace(self, data): |
|
|
|
|
|
''' Returns a copy of the dictionary with all the keys put in a 'do_' namespace ''' |
|
|
|
|
|
|
|
|
@staticmethod |
|
|
|
|
|
def to_safe(word): |
|
|
|
|
|
""" Converts 'bad' characters in a string to underscores so they can be used as Ansible groups """ |
|
|
|
|
|
return re.sub(r"[^A-Za-z0-9\-.]", "_", word) |
|
|
|
|
|
|
|
|
|
|
|
@staticmethod |
|
|
|
|
|
def do_namespace(data): |
|
|
|
|
|
""" Returns a copy of the dictionary with all the keys put in a 'do_' namespace """ |
|
|
info = {} |
|
|
info = {} |
|
|
for k, v in data.items(): |
|
|
for k, v in data.items(): |
|
|
info['do_' + k] = v |
|
|
info['do_' + k] = v |
|
|