[in progress] Azure support.

This commit is contained in:
Luke Sneeringer
2014-07-28 13:22:58 -05:00
parent e19984d6db
commit 95567ff004
8 changed files with 324 additions and 29 deletions

View File

@@ -964,9 +964,9 @@ class InventorySourceOptionsSerializer(BaseSerializer):
def metadata(self):
metadata = super(InventorySourceOptionsSerializer, self).metadata()
field_opts = metadata.get('source_regions', {})
field_opts['ec2_region_choices'] = self.opts.model.get_ec2_region_choices()
field_opts['rax_region_choices'] = self.opts.model.get_rax_region_choices()
field_opts['gce_region_choices'] = self.opts.model.get_gce_region_choices()
for cp in ('azure', 'ec2', 'gce', 'rax'):
get_regions = getattr(self.opts.model, 'get_%s_region_choices' % cp)
field_opts['%s_region_choices' % cp] = get_regions()
return metadata
def to_native(self, obj):

4
awx/main/constants.py Normal file
View File

@@ -0,0 +1,4 @@
# Copyright (c) 2014 AnsibleWorks, Inc.
# All Rights Reserved.
CLOUD_PROVIDERS = ('azure', 'ec2', 'gce', 'rax')

View File

@@ -14,6 +14,7 @@ from django.core.urlresolvers import reverse
# AWX
from awx.main import storage
from awx.main.constants import CLOUD_PROVIDERS
from awx.main.utils import decrypt_field
from awx.main.models.base import *
@@ -34,6 +35,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique):
('rax', _('Rackspace')),
('vmware', _('VMWare')),
('gce', _('Google Compute Engine')),
('azure', _('Windows Azure')),
]
PASSWORD_FIELDS = ('password', 'ssh_key_data', 'ssh_key_unlock',
@@ -312,7 +314,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique):
# If update_fields has been specified, add our field names to it,
# if hit hasn't been specified, then we're just doing a normal save.
update_fields = kwargs.get('update_fields', [])
cloud = self.kind in ('aws', 'rax', 'gce', 'vmware', 'azure')
cloud = self.kind in CLOUD_PROVIDERS + ('aws',)
if self.cloud != cloud:
self.cloud = cloud
if 'cloud' not in update_fields:

View File

@@ -30,6 +30,7 @@ from django.utils.timezone import now, make_aware, get_default_timezone
from django.core.cache import cache
# AWX
from awx.main.constants import CLOUD_PROVIDERS
from awx.main.fields import AutoOneToOneField
from awx.main.models.base import *
from awx.main.models.jobs import Job
@@ -733,10 +734,11 @@ class InventorySourceOptions(BaseModel):
'''
SOURCE_CHOICES = [
('file', _('Local File, Directory or Script')),
('rax', _('Rackspace Cloud Servers')),
('ec2', _('Amazon EC2')),
('gce', _('Google Compute Engine')),
('file', _('Local File, Directory or Script')),
('rax', _('Rackspace Cloud Servers')),
('ec2', _('Amazon EC2')),
('gce', _('Google Compute Engine')),
('azure', _('Windows Azure')),
]
class Meta:
@@ -822,6 +824,19 @@ class InventorySourceOptions(BaseModel):
regions.insert(0, ('all', 'All'))
return regions
@classmethod
def get_azure_region_choices(self):
"""Return a complete list of regions in Windows Azure, as a list of
two-tuples.
"""
# It's not possible to get a list of regions from Azure without
# authenticating first (someone reading these might think there's
# a pattern here!). Therefore, you guessed it, use a list from
# settings.
regions = list(getattr(settings, 'AZURE_REGION_CHOICES', []))
regions.insert(0, ('all', 'All'))
return regions
def clean_credential(self):
if not self.source:
return None
@@ -835,21 +850,20 @@ class InventorySourceOptions(BaseModel):
'Cloud-based inventory sources (such as %s) require '
'credentials for the matching cloud service.' % self.source
)
elif self.source in ('ec2', 'rax', 'gce'):
elif self.source in CLOUD_PROVIDERS:
raise ValidationError('Credential is required for a cloud source')
return cred
def clean_source_regions(self):
regions = self.source_regions
if self.source == 'ec2':
valid_regions = [x[0] for x in self.get_ec2_region_choices()]
region_transform = lambda x: x.strip().lower()
elif self.source == 'rax':
valid_regions = [x[0] for x in self.get_rax_region_choices()]
region_transform = lambda x: x.strip().upper()
elif self.source == 'gce':
valid_regions = [x[0] for x in self.get_gce_region_choices()]
region_transform = lambda x: x.strip().lower()
if self.source in CLOUD_PROVIDERS:
get_regions = getattr(self, 'get_%s_region_choices' % self.source)
valid_regions = [x[0] for x in get_regions()]
if self.source == 'rax':
region_transform = lambda x: x.strip().upper()
else:
region_transform = lambda x: x.strip().lower()
else:
return ''
all_region = region_transform('all')

View File

@@ -880,6 +880,9 @@ class RunInventoryUpdate(BaseTask):
env['VMWARE_HOST'] = passwords.get('source_host', '')
env['VMWARE_USER'] = passwords.get('source_username', '')
env['VMWARE_PASSWORD'] = passwords.get('source_password', '')
elif inventory_update.source == 'azure':
env['AZURE_SUBSCRIPTION_ID'] = passwords.get('source_username', '')
env['AZURE_CERT_PATH'] =
elif inventory_update.source == 'gce':
env['GCE_EMAIL'] = passwords.get('source_username', '')
env['GCE_PROJECT'] = passwords.get('source_project', '')

View File

@@ -0,0 +1,238 @@
#!/usr/bin/env python
'''
Windows Azure external inventory script
=======================================
Generates inventory that Ansible can understand by making API request to
Windows Azure using the azure python library.
NOTE: This script assumes Ansible is being executed where azure is already
installed.
pip install azure
Adapted from the ansible Linode plugin by Dan Slimmon.
'''
# (c) 2013, John Whitbeck
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
######################################################################
# Standard imports
import re
import sys
import argparse
import os
from urlparse import urlparse
from time import time
try:
import json
except ImportError:
import simplejson as json
try:
import azure
from azure import WindowsAzureError
from azure.servicemanagement import ServiceManagementService
except ImportError as e:
print "failed=True msg='`azure` library required for this script'"
sys.exit(1)
# Imports for ansible
import ConfigParser
class AzureInventory(object):
def __init__(self):
"""Main execution path."""
# Inventory grouped by display group
self.inventory = {}
# Index of deployment name -> host
self.index = {}
# Read settings and parse CLI arguments
self.read_settings()
self.read_environment()
self.parse_cli_args()
# Cache setting defaults.
self.cache_path_cache = '/tmp/ansible-azure.cache'
self.cache_path_index = '/tmp/ansible-azure.index'
self.cache_max_age = 0
# Initialize Azure ServiceManagementService
self.sms = ServiceManagementService(self.subscription_id, self.cert_path)
# Cache
if self.args.refresh_cache:
self.do_api_calls_update_cache()
elif not self.is_cache_valid():
self.do_api_calls_update_cache()
if self.args.list_images:
data_to_print = self.json_format_dict(self.get_images(), True)
elif self.args.list:
# Display list of nodes for inventory
if len(self.inventory) == 0:
data_to_print = self.get_inventory_from_cache()
else:
data_to_print = self.json_format_dict(self.inventory, True)
print data_to_print
def get_images(self):
images = []
for image in self.sms.list_os_images():
if str(image.label).lower().find(self.args.list_images.lower()) >= 0:
images.append(vars(image))
return json.loads(json.dumps(images, default=lambda o: o.__dict__))
def is_cache_valid(self):
"""Determines if the cache file has expired, or if it is still valid."""
if os.path.isfile(self.cache_path_cache):
mod_time = os.path.getmtime(self.cache_path_cache)
current_time = time()
if (mod_time + self.cache_max_age) > current_time:
if os.path.isfile(self.cache_path_index):
return True
return False
def read_settings(self):
"""Reads the settings from the .ini file."""
config = ConfigParser.SafeConfigParser()
config.read(os.path.dirname(os.path.realpath(__file__)) + '/windows_azure.ini')
# Credentials related
if config.has_option('azure', 'subscription_id'):
self.subscription_id = config.get('azure', 'subscription_id')
if config.has_option('azure', 'cert_path'):
self.cert_path = config.get('azure', 'cert_path')
# Cache related
if config.has_option('azure', 'cache_path'):
cache_path = config.get('azure', 'cache_path')
self.cache_path_cache = cache_path + "/ansible-azure.cache"
self.cache_path_index = cache_path + "/ansible-azure.index"
if config.has_option('azure', 'cache_max_age'):
self.cache_max_age = config.getint('azure', 'cache_max_age')
def read_environment(self):
''' Reads the settings from environment variables '''
# Credentials
if os.getenv("AZURE_SUBSCRIPTION_ID"):
self.subscription_id = os.getenv("AZURE_SUBSCRIPTION_ID")
if os.getenv("AZURE_CERT_PATH"):
self.cert_path = os.getenv("AZURE_CERT_PATH")
def parse_cli_args(self):
"""Command line argument processing"""
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on Azure')
parser.add_argument('--list', action='store_true', default=True,
help='List nodes (default: True)')
parser.add_argument('--list-images', action='store',
help='Get all available images.')
parser.add_argument('--refresh-cache', action='store_true', default=False,
help='Force refresh of cache by making API requests to Azure (default: False - use cache files)')
self.args = parser.parse_args()
def do_api_calls_update_cache(self):
"""Do API calls, and save data in cache files."""
self.add_cloud_services()
self.write_to_cache(self.inventory, self.cache_path_cache)
self.write_to_cache(self.index, self.cache_path_index)
def add_cloud_services(self):
"""Makes an Azure API call to get the list of cloud services."""
try:
for cloud_service in self.sms.list_hosted_services():
self.add_deployments(cloud_service)
except WindowsAzureError as e:
print "Looks like Azure's API is down:"
print
print e
sys.exit(1)
def add_deployments(self, cloud_service):
"""Makes an Azure API call to get the list of virtual machines associated with a cloud service"""
try:
for deployment in self.sms.get_hosted_service_properties(cloud_service.service_name,embed_detail=True).deployments.deployments:
if deployment.deployment_slot == "Production":
self.add_deployment(cloud_service, deployment)
except WindowsAzureError as e:
print "Looks like Azure's API is down:"
print
print e
sys.exit(1)
def add_deployment(self, cloud_service, deployment):
"""Adds a deployment to the inventory and index"""
dest = urlparse(deployment.url).hostname
# Add to index
self.index[dest] = deployment.name
# List of all azure deployments
self.push(self.inventory, "azure", dest)
# Inventory: Group by service name
self.push(self.inventory, self.to_safe(cloud_service.service_name), dest)
# Inventory: Group by region
self.push(self.inventory, self.to_safe(cloud_service.hosted_service_properties.location), dest)
def push(self, my_dict, key, element):
"""Pushed an element onto an array that may not have been defined in the dict."""
if key in my_dict:
my_dict[key].append(element);
else:
my_dict[key] = [element]
def get_inventory_from_cache(self):
"""Reads the inventory from the cache file and returns it as a JSON object."""
cache = open(self.cache_path_cache, 'r')
json_inventory = cache.read()
return json_inventory
def load_index_from_cache(self):
"""Reads the index from the cache file and sets self.index."""
cache = open(self.cache_path_index, 'r')
json_index = cache.read()
self.index = json.loads(json_index)
def write_to_cache(self, data, filename):
"""Writes data in JSON format to a file."""
json_data = self.json_format_dict(data, True)
cache = open(filename, 'w')
cache.write(json_data)
cache.close()
def to_safe(self, word):
"""Escapes any characters that would be invalid in an ansible group name."""
return re.sub("[^A-Za-z0-9\-]", "_", word)
def json_format_dict(self, data, pretty=False):
"""Converts a dict to a JSON object and dumps it as a formatted string."""
if pretty:
return json.dumps(data, sort_keys=True, indent=2)
else:
return json.dumps(data)
AzureInventory()

View File

@@ -437,6 +437,42 @@ GCE_EXCLUDE_EMPTY_GROUPS = True
GCE_INSTANCE_ID_VAR = None
# -------------------
# -- Windows Azure --
# -------------------
# It's not possible to get zones in Azure without authenticating, so we
# provide a list here.
WA_REGION_CHOICES = [
('Central_US', 'US Central'),
('East_US_1', 'US East'),
('East_US_2', 'US East 2'),
('North_Central_US', 'US North Central'),
('South_Central_US', 'US South Central'),
('West_US', 'US West'),
('North_Europe', 'Europe North'),
('West_Europe', 'Europe West'),
('East_Asia_Pacific', 'Asia Pacific East'),
('Southest_Asia_Pacific', 'Asia Pacific Southeast'),
('East_Japan', 'Japan East'),
('West_Japan', 'Japan West'),
('South_Brazil', 'Brazil South'),
]
WA_REGIONS_BLACKLIST = []
# Inventory variable name/value for determining whether a host is active
# in Google Compute Engine.
WA_ENABLED_VAR = 'status'
WA_ENABLED_VALUE = 'running'
# Filter for allowed group and host names when importing inventory from
# Google Compute Engine.
WA_GROUP_FILTER = r'^.+$'
WA_HOST_FILTER = r'^.+$'
WA_EXCLUDE_EMPTY_GROUPS = True
WA_INSTANCE_ID_VAR = None
# ---------------------
# -- Activity Stream --
# ---------------------

View File

@@ -97,14 +97,10 @@ angular.module('CredentialFormDefinition', [])
'<dd>Used to check out and synchronize playbook repositories with a remote source control ' +
'management system such as Git, Subversion (svn), or Mercurial (hg). These credentials are ' +
'used on the Projects tab.</dd>\n' +
'<dt>Amazon Web Services</dt>\n' +
'<dd>Access keys for Amazon Web Services used for inventory management or deployment.</dd>\n' +
'<dt>Rackspace</dt>\n' +
'<dd>Access information for Rackspace Cloud used for inventory management or deployment.</dd>\n' +
'<dt>Google Compute Engine</dt>\n' +
'<dd>Credentials for Google Compute Engine, used for inventory management or deployment.</dd>\n' +
'<dt>VMWare</dt>\n' +
'<dd>Access information for VMWare vSphere used for inventory management or deployment.</dd>\n' +
'<dt>Others (Cloud Providers)</dt>\n' +
'<dd>Access keys for authenticating to the specific ' +
'cloud provider, usually used for inventory sync ' +
'and deployment.</dd>\n' +
'</dl>\n'
}]
},
@@ -141,7 +137,8 @@ angular.module('CredentialFormDefinition', [])
"username": {
labelBind: 'usernameLabel',
type: 'text',
ngShow: "kind.value && kind.value !== 'aws' && kind.value!=='gce'",
ngShow: "kind.value && kind.value !== 'aws' && " +
"kind.value !== 'gce'",
awRequiredWhen: {
variable: 'aws_required',
init: false
@@ -218,7 +215,8 @@ angular.module('CredentialFormDefinition', [])
"ssh_key_data": {
labelBind: 'sshKeyDataLabel',
type: 'textarea',
ngShow: "kind.value == 'ssh' || kind.value == 'scm' || kind.value == 'gce'",
ngShow: "kind.value == 'ssh' || kind.value == 'scm' || " +
"kind.value == 'gce' || kind.value == 'azure'",
awRequiredWhen: {
variable: 'key_required',
init: true
@@ -237,7 +235,7 @@ angular.module('CredentialFormDefinition', [])
ngChange: "clearPWConfirm('ssh_key_unlock_confirm')",
associated: 'ssh_key_unlock_confirm',
ask: true,
askShow: "kind.value == 'ssh'", //Only allow ask for machine credentials
askShow: "kind.value == 'ssh'", // Only allow ask for machine credentials
clear: true
},
"ssh_key_unlock_confirm": {