AC-505 Work in progress on cloud inventory sync.

This commit is contained in:
Chris Church 2013-10-07 23:21:31 -04:00
parent a968ee38f6
commit 1ffb40458c
18 changed files with 1494 additions and 296 deletions

View File

@ -446,12 +446,17 @@ class InventorySourceAccess(BaseAccess):
return obj and self.user.can_access(Group, 'read', obj.group)
def can_add(self, data):
# Automatically created from group.
# Automatically created from group or management command.
return False
def can_change(self, obj, data):
# Checks for admin or change permission on group.
return obj and self.user.can_access(Group, 'change', obj.group, None)
if obj and obj.group:
return self.user.can_access(Group, 'change', obj.group, None)
# Can't change inventory sources attached to only the inventory, since
# these are created automatically from the management command.
else:
return False
class InventoryUpdateAccess(BaseAccess):
'''

View File

@ -24,7 +24,7 @@ from django.contrib.auth.models import User
from awx.main.models import *
from awx.main.licenses import LicenseReader
LOGGER = None
logger = logging.getLogger('awx.main.commands.inventory_import')
class ImportException(BaseException):
@ -53,11 +53,11 @@ class MemGroup(object):
group_vars = os.path.join(inventory_base, 'group_vars', name)
if os.path.exists(group_vars):
LOGGER.debug("loading group_vars")
logger.debug("loading group_vars")
self.variables = yaml.load(open(group_vars).read())
def child_group_by_name(self, grp_name, loader):
LOGGER.debug("looking for child group: %s" % grp_name)
logger.debug("looking for child group: %s" % grp_name)
if grp_name == 'all':
return
# slight hack here, passing in 'self' for all_group but child=True won't use it
@ -66,14 +66,14 @@ class MemGroup(object):
for x in self.child_groups:
if x.name == grp_name:
return x
LOGGER.debug("adding child group %s to group %s" % (grp.name, self.name))
logger.debug("adding child group %s to group %s" % (grp.name, self.name))
self.child_groups.append(grp)
return grp
def add_child_group(self, grp):
assert grp.name is not 'all'
LOGGER.debug("adding child group %s to group %s" % (grp.name, self.name))
logger.debug("adding child group %s to group %s" % (grp.name, self.name))
assert type(grp) == MemGroup
if grp not in self.child_groups:
@ -82,33 +82,29 @@ class MemGroup(object):
grp.parents.append(self)
def add_host(self, host):
LOGGER.debug("adding host %s to group %s" % (host.name, self.name))
logger.debug("adding host %s to group %s" % (host.name, self.name))
assert type(host) == MemHost
if host not in self.hosts:
self.hosts.append(host)
def set_variables(self, values):
LOGGER.debug("setting variables %s on group %s" % (values, self.name))
self.variables = values
def debug_tree(self):
LOGGER.debug("describing tree of group (%s)" % self.name)
logger.debug("describing tree of group (%s)" % self.name)
LOGGER.debug("group: %s, %s" % (self.name, self.variables))
logger.debug("group: %s, %s" % (self.name, self.variables))
for x in self.child_groups:
LOGGER.debug(" child: %s" % (x.name))
logger.debug(" child: %s" % (x.name))
for x in self.hosts:
LOGGER.debug(" host: %s, %s" % (x.name, x.variables))
logger.debug(" host: %s, %s" % (x.name, x.variables))
LOGGER.debug("---")
logger.debug("---")
for x in self.child_groups:
x.debug_tree()
class MemHost(object):
def __init__(self, name, inventory_base):
LOGGER.debug("adding host name: %s" % name)
logger.debug("adding host name: %s" % name)
assert name is not None
assert inventory_base is not None
@ -117,23 +113,19 @@ class MemHost(object):
self.variables = {}
self.inventory_base = inventory_base
if name.find(":") != -1:
if ':' in name:
tokens = name.split(":")
self.name = tokens[0]
self.variables['ansible_ssh_port'] = tokens[1]
self.variables['ansible_ssh_port'] = int(tokens[1])
if "[" in name:
raise ImportException("block ranges like host[0:50].example.com are not yet supported by the importer")
host_vars = os.path.join(inventory_base, 'host_vars', name)
if os.path.exists(host_vars):
LOGGER.debug("loading host_vars")
self.variables = yaml.load(open(host_vars).read())
logger.debug("loading host_vars")
self.variables.update(yaml.load(open(host_vars).read()))
def set_variables(self, values):
LOGGER.debug("setting variables %s on host %s" % (values, self.name))
self.variables = values
class BaseLoader(object):
def __init__(self, inventory_base=None, all_group=None):
@ -141,14 +133,12 @@ class BaseLoader(object):
self.all_group = all_group
def get_host(self, name):
if ":" in name:
tokens = name.split(":")
name = tokens[0]
host_name = name.split(':')[0]
host = None
if not name in self.all_group.host_names:
if not host_name in self.all_group.host_names:
host = MemHost(name, self.inventory_base)
self.all_group.host_names[name] = host
return self.all_group.host_names[name]
self.all_group.host_names[host_name] = host
return self.all_group.host_names[host_name]
def get_group(self, name, all_group=None, child=False):
all_group = all_group or self.all_group
@ -168,10 +158,10 @@ class IniLoader(BaseLoader):
def __init__(self, inventory_base=None, all_group=None):
super(IniLoader, self).__init__(inventory_base, all_group)
LOGGER.debug("processing ini")
logger.debug("processing ini")
def load(self, src):
LOGGER.debug("loading: %s on %s" % (src, self.all_group))
logger.debug("loading: %s on %s" % (src, self.all_group))
if self.inventory_base is None:
self.inventory_base = os.path.dirname(src)
@ -182,11 +172,10 @@ class IniLoader(BaseLoader):
input_mode = 'host'
for line in lines:
if line.find("#"):
tokens = line.split("#")
line = tokens[0]
if line.startswith("["):
line = line.split('#')[0].strip()
if not line:
continue
elif line.startswith("["):
# mode change, possible new group name
line = line.replace("[","").replace("]","").lstrip().rstrip()
if line.find(":vars") != -1:
@ -202,9 +191,6 @@ class IniLoader(BaseLoader):
group = self.get_group(line)
else:
# add a host or variable to the existing group/host
line = line.lstrip().rstrip()
if line == "":
continue
tokens = shlex.split(line)
if input_mode == 'host':
@ -254,7 +240,7 @@ class ExecutableJsonLoader(BaseLoader):
def __init__(self, inventory_base=None, all_group=None):
super(ExecutableJsonLoader, self).__init__(inventory_base, all_group)
LOGGER.debug("processing executable JSON source")
logger.debug("processing executable JSON source")
self.child_group_names = {}
def command_to_json(self, cmd):
@ -274,7 +260,7 @@ class ExecutableJsonLoader(BaseLoader):
def load(self, src):
LOGGER.debug("loading %s onto %s" % (src, self.all_group))
logger.debug("loading %s onto %s" % (src, self.all_group))
if self.inventory_base is None:
self.inventory_base = os.path.dirname(src)
@ -336,9 +322,9 @@ class ExecutableJsonLoader(BaseLoader):
def load_generic(src):
LOGGER.debug("analyzing type of source")
logger.debug("analyzing type of source")
if not os.path.exists(src):
LOGGER.debug("source missing")
logger.debug("source missing")
raise CommandError("source does not exist")
if os.path.isdir(src):
all_group = MemGroup('all', src)
@ -358,216 +344,251 @@ def load_generic(src):
all_group = MemGroup('all', os.path.dirname(src))
IniLoader(None, all_group).load(src)
LOGGER.debug("loading process complete")
logger.debug("loading process complete")
return all_group
class Command(NoArgsCommand):
'''
Management command to import directory, INI, or dynamic inventory
Management command to import inventory from a directory, ini file, or
dynamic inventory script.
'''
help = 'Import or sync external inventory sources'
option_list = NoArgsCommand.option_list + (
make_option('--inventory-name', dest='inventory_name', type='str', default=None, metavar='n',
help='name of inventory source to sync'),
make_option('--inventory-id', dest='inventory_id', type='int', default=None, metavar='i',
help='inventory id to sync'),
make_option('--overwrite', dest='overwrite', action='store_true', metavar="o",
default=False, help='overwrite the destination'),
make_option('--overwrite-vars', dest='overwrite_vars', action='store_true', metavar="V",
default=False, help='overwrite (rather than merge) variables'),
make_option('--keep-vars', dest='keep_vars', action='store_true', metavar="k",
default=False, help='use database variables if set'),
make_option('--source', dest='source', type='str', default=None, metavar='s',
help='inventory directory, file, or script to load'),
make_option('--inventory-name', dest='inventory_name', type='str',
default=None, metavar='n',
help='name of inventory to sync'),
make_option('--inventory-id', dest='inventory_id', type='int',
default=None, metavar='i', help='id of inventory to sync'),
make_option('--overwrite', dest='overwrite', action='store_true',
metavar="o", default=False,
help='overwrite the destination hosts and groups'),
make_option('--overwrite-vars', dest='overwrite_vars',
action='store_true', metavar="V", default=False,
help='overwrite (rather than merge) variables'),
make_option('--keep-vars', dest='keep_vars', action='store_true',
metavar="k", default=False,
help='use database variables if set'),
make_option('--source', dest='source', type='str', default=None,
metavar='s', help='inventory directory, file, or script '
'to load'),
)
def init_logging(self):
log_levels = dict(enumerate([logging.ERROR, logging.INFO,
logging.DEBUG, 0]))
global LOGGER
LOGGER = self.logger = logging.getLogger('awx.main.commands.inventory_import')
self.logger = logging.getLogger('awx.main.commands.inventory_import')
self.logger.setLevel(log_levels.get(self.verbosity, 0))
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter('%(message)s'))
self.logger.addHandler(handler)
self.logger.propagate = False
@transaction.commit_on_success
def handle_noargs(self, **options):
self.verbosity = int(options.get('verbosity', 1))
self.init_logging()
name = options.get('inventory_name', None)
id = options.get('inventory_id', None)
overwrite = options.get('overwrite', False)
overwrite_vars = options.get('overwrite_vars', False)
keep_vars = options.get('keep_vars', False)
source = options.get('source', None)
LOGGER.debug("name=%s" % name)
LOGGER.debug("id=%s" % id)
if name is not None and id is not None:
raise CommandError("--inventory-name and --inventory-id are mutually exclusive")
if name is None and id is None:
raise CommandError("--inventory-name or --inventory-id is required")
if (overwrite or overwrite_vars) and keep_vars:
raise CommandError("--overwrite/--overwrite-vars and --keep-vars are mutually exclusive")
if not source:
raise CommandError("--source is required")
LOGGER.debug("preparing loader")
all_group = load_generic(source)
LOGGER.debug("debugging loaded result")
all_group.debug_tree()
# now that memGroup is correct and supports JSON executables, INI, and trees
# now merge and/or overwrite with the database itself!
if id:
inventory = Inventory.objects.filter(pk=id)
def load_inventory_from_database(self):
'''
Load inventory and related objects from the database.
'''
# Load inventory object based on name or ID.
if self.inventory_id:
q = dict(id=self.inventory_id)
else:
inventory = Inventory.objects.filter(name=name)
count = inventory.count()
if count != 1:
raise CommandError("%d inventory objects matched, expected 1" % count)
inventory = inventory.all()[0]
q = dict(name=self.inventory_name)
try:
self.inventory = Inventory.objects.get(**q)
except Inventory.DoesNotExist:
raise CommandError('Inventory with %s = %s cannot be found' % q.items()[0])
except Inventory.MultipleObjectsReturned:
raise CommandError('Inventory with %s = %s returned multiple results' % q.items()[0])
self.logger.info('Updating inventory %d: %s' % (self.inventory.pk,
self.inventory.name))
LOGGER.info("MODIFYING INVENTORY: %s" % inventory.name)
# Load inventory source if specified via environment variable (when
# inventory_import is called from an InventoryUpdate task).
inventory_source_id = os.getenv('INVENTORY_SOURCE_ID', None)
if inventory_source_id:
try:
self.inventory_source = InventorySource.objects.get(pk=inventory_source_id,
inventory=self.inventory)
except InventorySource.DoesNotExist:
raise CommandError('Inventory source with id=%s not found' % \
inventory_source_id)
self.inventory_update = None
# Otherwise, create a new inventory source to capture this invocation
# via command line.
else:
self.inventory_source, created = InventorySource.objects.get_or_create(
inventory=self.inventory,
group=None,
source='file',
source_path=os.path.abspath(self.source),
overwrite=self.overwrite,
overwrite_vars=self.overwrite_vars,
)
self.inventory_update = self.inventory_source.inventory_updates.create(
job_args=json.dumps(sys.argv),
job_env=dict(os.environ.items()),
job_cwd=os.getcwd(),
)
# if overwrite is set, for each host in the database but NOT in the local
# list, delete it. Delete individually so signal handlers will run.
if overwrite:
LOGGER.info("deleting any hosts not in the remote source: %s" % all_group.host_names.keys())
for host in Host.objects.exclude(name__in = all_group.host_names.keys()).filter(inventory=inventory):
# FIXME: Wait or raise error if inventory is being updated by another
# source.
def load_into_database(self):
'''
Load inventory from in-memory groups to the database, overwriting or
merging as appropriate.
'''
# If overwrite is set, for each host in the database that is NOT in
# the local list, delete it. When importing from a cloud inventory
# source attached to a specific group, only delete hosts beneath that
# group. Delete each host individually so signal handlers will run.
if self.overwrite:
self.logger.debug('deleting any hosts not in the remote source')
if self.inventory_source.group:
del_hosts = self.inventory_source.group.all_hosts
# FIXME: Also include hosts from inventory_source.managed_hosts?
else:
del_hosts = self.inventory.hosts.all()
del_hosts = del_hosts.exclude(name__in=self.all_group.host_names.keys())
for host in del_hosts:
host.delete()
# if overwrite is set, for each group in the database but NOT in the local
# list, delete it. Delete individually so signal handlers will run.
if overwrite:
LOGGER.info("deleting any groups not in the remote source")
for group in Group.objects.exclude(name__in = all_group.group_names.keys()).filter(inventory=inventory):
# If overwrite is set, for each group in the database that is NOT in
# the local list, delete it. When importing from a cloud inventory
# source attached to a specific group, only delete children of that
# group. Delete each group individually so signal handlers will run.
if self.overwrite:
self.logger.debug('deleting any groups not in the remote source')
if self.inventory_source.group:
del_groups = self.inventory_source.group.all_children
# FIXME: Also include groups from inventory_source.managed_groups?
else:
del_groups = self.inventory.groups.all()
del_groups = del_groups.exclude(name__in=self.all_group.group_names.keys())
for group in del_groups:
group.delete()
# if overwrite is set, throw away all invalid child relationships for groups
if overwrite:
LOGGER.info("clearing any child relationships to rebuild from remote source")
db_groups = Group.objects.filter(inventory=inventory)
# If overwrite is set, clear all invalid child relationships for groups
# and all invalid host memberships. When importing from a cloud
# inventory source attached to a specific group, only clear
# relationships for hosts and groups that are beneath the inventory
# source group.
if self.overwrite:
self.logger.info("clearing any child relationships to rebuild from remote source")
if self.inventory_source.group:
db_groups = self.inventory_source.group.all_children
else:
db_groups = self.inventory.groups.all()
for db_group in db_groups:
db_kids = db_group.children.all()
mem_kids = all_group.group_names[db_group.name].child_groups
mem_kid_names = [ k.name for k in mem_kids ]
removed = False
for db_kid in db_kids:
if db_kid.name not in mem_kid_names:
removed = True
LOGGER.debug("removing non-DB kid: %s" % (db_kid.name))
db_group.children.remove(db_kid)
if removed:
db_group.save()
db_kids = db_group.children.all()
mem_kids = self.all_group.group_names[db_group.name].child_groups
mem_kid_names = [ k.name for k in mem_kids ]
for db_kid in db_kids:
if db_kid.name not in mem_kid_names:
self.logger.debug("removing non-DB kid: %s" % (db_kid.name))
db_group.children.remove(db_kid)
# Update/overwrite inventory variables from "all" group.
db_variables = inventory.variables_dict
mem_variables = all_group.variables
if overwrite_vars or overwrite:
LOGGER.info('replacing inventory variables from "all" group')
db_hosts = db_group.hosts.all()
mem_hosts = self.all_group.group_names[db_group.name].hosts
mem_host_names = [ h.name for h in mem_hosts ]
for db_host in db_hosts:
if db_host.name not in mem_host_names:
self.logger.debug("removing non-DB host: %s" % (db_host.name))
db_group.hosts.remove(db_host)
# Update/overwrite variables from "all" group. If importing from a
# cloud source attached to a specific group, variables will be set on
# the base group, otherwise they will be set on the inventory.
if self.inventory_source.group:
all_obj = self.inventory_source.group
all_obj.inventory_sources.add(self.inventory_source)
else:
all_obj = self.inventory
db_variables = all_obj.variables_dict
mem_variables = self.all_group.variables
if self.overwrite_vars or self.overwrite:
self.logger.info('replacing inventory variables from "all" group')
db_variables = mem_variables
else:
LOGGER.info('updating inventory variables from "all" group')
self.logger.info('updating inventory variables from "all" group')
db_variables.update(mem_variables)
inventory.variables = json.dumps(db_variables)
inventory.save()
all_obj.variables = json.dumps(db_variables)
all_obj.save(update_fields=['variables'])
# this will be slightly inaccurate, but attribute to first superuser.
user = User.objects.filter(is_superuser=True)[0]
# FIXME: Attribute changes to superuser?
db_groups = Group.objects.filter(inventory=inventory)
db_hosts = Host.objects.filter(inventory=inventory)
db_group_names = [ g.name for g in db_groups ]
db_host_names = [ h.name for h in db_hosts ]
# for each group not in the database but in the local list, create it
for (k,v) in all_group.group_names.iteritems():
if k not in db_group_names:
variables = json.dumps(v.variables)
LOGGER.info("inserting new group %s" % k)
host = Group.objects.create(inventory=inventory, name=k, variables=variables, created_by=user,
description="imported")
host.save()
# for each host not in the database but in the local list, create it
for (k,v) in all_group.host_names.iteritems():
if k not in db_host_names:
variables = json.dumps(v.variables)
LOGGER.info("inserting new host %s" % k)
group = Host.objects.create(inventory=inventory, name=k, variables=variables, created_by=user,
description="imported")
group.save()
# if overwrite is set, clear any host membership on all hosts that should not exist
if overwrite:
LOGGER.info("purging host group memberships")
db_groups = Group.objects.filter(inventory=inventory)
for db_group in db_groups:
db_hosts = db_group.hosts.all()
mem_hosts = all_group.group_names[db_group.name].hosts
mem_host_names = [ h.name for h in mem_hosts ]
removed = False
for db_host in db_hosts:
if db_host.name not in mem_host_names:
removed = True
LOGGER.debug("removing non-DB host: %s" % (db_host.name))
db_group.hosts.remove(db_host)
if removed:
db_group.save()
# For each group in the local list, create it if it doesn't exist in
# the database. Otherwise, update/replace database variables from the
# imported data. Associate with the inventory source group if
# importing from cloud inventory source.
for k,v in self.all_group.group_names.iteritems():
variables = json.dumps(v.variables)
defaults = dict(variables=variables, description='imported')
group, created = self.inventory.groups.get_or_create(name=k,
defaults=defaults)
if created:
self.logger.info('inserting new group %s' % k)
else:
self.logger.info('updating existing group %s' % k)
db_variables = group.variables_dict
mem_variables = v.variables
if self.overwrite_vars or self.overwrite:
db_variables = mem_variables
else:
db_variables.update(mem_variables)
group.variables = json.dumps(db_variables)
group.save(update_fields=['variables'])
if self.inventory_source.group:
self.inventory_source.group.children.add(group)
group.inventory_sources.add(self.inventory_source)
# For each host in the local list, create it if it doesn't exist in
# the database. Otherwise, update/replace database variables from the
# imported data. Associate with the inventory source group if
# importing from cloud inventory source.
for k,v in self.all_group.host_names.iteritems():
variables = json.dumps(v.variables)
defaults = dict(variables=variables, description='imported')
host, created = self.inventory.hosts.get_or_create(name=k,
defaults=defaults)
if created:
self.logger.info('inserting new host %s' % k)
else:
self.logger.info('updating existing host %s' % k)
db_variables = host.variables_dict
mem_variables = v.variables
if self.overwrite_vars or self.overwrite:
db_variables = mem_variables
else:
db_variables.update(mem_variables)
host.variables = json.dumps(db_variables)
host.save(update_fields=['variables'])
if self.inventory_source.group:
self.inventory_source.group.hosts.add(host)
host.inventory_sources.add(self.inventory_source)
# for each host in a mem group, add it to the parents to which it belongs
# FIXME: confirm Django is ok with calling add twice and not making two rows
for (k,v) in all_group.group_names.iteritems():
LOGGER.info("adding parent arrangements for %s" % k)
db_group = Group.objects.get(name=k, inventory__pk=inventory.pk)
for (k,v) in self.all_group.group_names.iteritems():
self.logger.info("adding parent arrangements for %s" % k)
db_group = Group.objects.get(name=k, inventory__pk=self.inventory.pk)
mem_hosts = v.hosts
for h in mem_hosts:
db_host = Host.objects.get(name=h.name, inventory__pk=inventory.pk)
db_host = Host.objects.get(name=h.name, inventory__pk=self.inventory.pk)
db_group.hosts.add(db_host)
LOGGER.debug("*** ADDING %s to %s ***" % (db_host, db_group))
#db_group.save()
def variable_mangler(model, mem_hash, overwrite, overwrite_vars):
db_collection = model.objects.filter(inventory=inventory)
for obj in db_collection:
if obj.name in mem_hash:
mem_group = mem_hash[obj.name]
db_variables = json.loads(obj.variables)
mem_variables = mem_group.variables
if overwrite_vars or overwrite:
db_variables = mem_variables
else:
db_variables.update(mem_variables)
db_variables = json.dumps(db_variables)
obj.variables = db_variables
obj.save()
variable_mangler(Group, all_group.group_names, overwrite, overwrite_vars)
variable_mangler(Host, all_group.host_names, overwrite, overwrite_vars)
self.logger.debug("*** ADDING %s to %s ***" % (db_host, db_group))
# for each group, draw in child group arrangements
# FIXME: confirm django add behavior as above
for (k,v) in all_group.group_names.iteritems():
db_group = Group.objects.get(inventory=inventory, name=k)
for (k,v) in self.all_group.group_names.iteritems():
db_group = Group.objects.get(inventory=self.inventory, name=k)
for mem_child_group in v.child_groups:
db_child = Group.objects.get(inventory=inventory, name=mem_child_group.name)
db_child = Group.objects.get(inventory=self.inventory, name=mem_child_group.name)
db_group.children.add(db_child)
#db_group.save()
def check_license(self):
reader = LicenseReader()
license_info = reader.from_file()
available_instances = license_info.get('available_instances', 0)
@ -575,10 +596,71 @@ class Command(NoArgsCommand):
new_count = Host.objects.filter(active=True).count()
if free_instances < 0:
if license_info.get('demo', False):
raise ImportError("demo mode free license count exceeded, would bring available instances to %s, demo mode allows %s, see http://ansibleworks.com/ansibleworks-awx for licensing information" % (new_count, available_instances))
raise CommandError("demo mode free license count exceeded, would bring available instances to %s, demo mode allows %s, see http://ansibleworks.com/ansibleworks-awx for licensing information" % (new_count, available_instances))
else:
raise ImportError("number of licensed instances exceeded, would bring available instances to %s, system is licensed for %s, see http://ansibleworks.com/ansibleworks-awx for license extension information" % (new_count, available_instances))
raise CommandError("number of licensed instances exceeded, would bring available instances to %s, system is licensed for %s, see http://ansibleworks.com/ansibleworks-awx for license extension information" % (new_count, available_instances))
@transaction.commit_on_success
def handle_noargs(self, **options):
self.verbosity = int(options.get('verbosity', 1))
self.init_logging()
self.inventory_name = options.get('inventory_name', None)
self.inventory_id = options.get('inventory_id', None)
self.overwrite = bool(options.get('overwrite', False))
self.overwrite_vars = bool(options.get('overwrite_vars', False))
self.keep_vars = bool(options.get('keep_vars', False))
self.source = options.get('source', None)
# Load inventory and related objects from database.
if self.inventory_name and self.inventory_id:
raise CommandError('--inventory-name and --inventory-id are mutually exclusive')
elif not self.inventory_name and not self.inventory_id:
raise CommandError('--inventory-name or --inventory-id is required')
if (self.overwrite or self.overwrite_vars) and self.keep_vars:
raise CommandError('--overwrite/--overwrite-vars and --keep-vars are mutually exclusive')
if not self.source:
raise CommandError('--source is required')
self.load_inventory_from_database()
status, tb, exc = 'error', '', None
try:
# Update inventory update for this command line invocation.
if self.inventory_update:
self.inventory_update.status = 'running'
self.inventory_update.save()
transaction.commit()
self.logger.debug('preparing to load from %s' % self.source)
self.all_group = load_generic(self.source)
self.logger.debug('debugging loaded result:')
self.all_group.debug_tree()
# now that memGroup is correct and supports JSON executables, INI, and trees
# now merge and/or overwrite with the database itself!
self.load_into_database()
self.check_license()
LOGGER.info("inventory import complete, %s, id=%s" % (inventory.name, inventory.id))
self.logger.info("inventory import complete, %s, id=%s" % \
(self.inventory.name, self.inventory.id))
status = 'successful'
except Exception, e:
if isinstance(e, KeyboardInterrupt):
status = 'canceled'
exc = e
else:
tb = traceback.format_exc()
exc = e
if self.inventory_update:
transaction.rollback()
if self.inventory_update:
self.inventory_update = InventoryUpdate.objects.get(pk=self.inventory_update.pk)
self.inventory_update.result_traceback = tb
self.inventory_update.status = status
self.inventory_update.save(update_fields=['status', 'result_traceback'])
transaction.commit()
if exc:
raise exc

View File

@ -0,0 +1,500 @@
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Group.has_inventory_sources'
db.add_column(u'main_group', 'has_inventory_sources',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
# Adding M2M table for field inventory_sources on 'Group'
m2m_table_name = db.shorten_name(u'main_group_inventory_sources')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('group', models.ForeignKey(orm['main.group'], null=False)),
('inventorysource', models.ForeignKey(orm[u'main.inventorysource'], null=False))
))
db.create_unique(m2m_table_name, ['group_id', 'inventorysource_id'])
# Adding field 'Inventory.has_inventory_sources'
db.add_column(u'main_inventory', 'has_inventory_sources',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
# Adding field 'Host.enabled'
db.add_column(u'main_host', 'enabled',
self.gf('django.db.models.fields.BooleanField')(default=True),
keep_default=False)
# Adding field 'Host.has_inventory_sources'
db.add_column(u'main_host', 'has_inventory_sources',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
# Adding M2M table for field inventory_sources on 'Host'
m2m_table_name = db.shorten_name(u'main_host_inventory_sources')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('host', models.ForeignKey(orm['main.host'], null=False)),
('inventorysource', models.ForeignKey(orm[u'main.inventorysource'], null=False))
))
db.create_unique(m2m_table_name, ['host_id', 'inventorysource_id'])
# Deleting field 'InventorySource.keep_vars'
db.delete_column(u'main_inventorysource', 'keep_vars')
# Deleting field 'InventorySource.overwrite_hosts'
db.delete_column(u'main_inventorysource', 'overwrite_hosts')
# Deleting field 'InventorySource.source_env'
db.delete_column(u'main_inventorysource', 'source_env')
# Adding field 'InventorySource.inventory'
db.add_column(u'main_inventorysource', 'inventory',
self.gf('django.db.models.fields.related.ForeignKey')(default=None, related_name='inventory_sources', null=True, to=orm['main.Inventory']),
keep_default=False)
# Adding field 'InventorySource.source_vars'
db.add_column(u'main_inventorysource', 'source_vars',
self.gf('django.db.models.fields.TextField')(default='', blank=True),
keep_default=False)
# Adding field 'InventorySource.overwrite'
db.add_column(u'main_inventorysource', 'overwrite',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
# Adding field 'InventorySource.update_interval'
db.add_column(u'main_inventorysource', 'update_interval',
self.gf('django.db.models.fields.PositiveIntegerField')(default=0),
keep_default=False)
# Changing field 'InventorySource.group'
db.alter_column(u'main_inventorysource', 'group_id', self.gf('awx.main.fields.AutoOneToOneField')(null=True, to=orm['main.Group'], unique=True))
def backwards(self, orm):
# Deleting field 'Group.has_inventory_sources'
db.delete_column(u'main_group', 'has_inventory_sources')
# Removing M2M table for field inventory_sources on 'Group'
db.delete_table(db.shorten_name(u'main_group_inventory_sources'))
# Deleting field 'Inventory.has_inventory_sources'
db.delete_column(u'main_inventory', 'has_inventory_sources')
# Deleting field 'Host.enabled'
db.delete_column(u'main_host', 'enabled')
# Deleting field 'Host.has_inventory_sources'
db.delete_column(u'main_host', 'has_inventory_sources')
# Removing M2M table for field inventory_sources on 'Host'
db.delete_table(db.shorten_name(u'main_host_inventory_sources'))
# Adding field 'InventorySource.keep_vars'
db.add_column(u'main_inventorysource', 'keep_vars',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
# Adding field 'InventorySource.overwrite_hosts'
db.add_column(u'main_inventorysource', 'overwrite_hosts',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
# Adding field 'InventorySource.source_env'
db.add_column(u'main_inventorysource', 'source_env',
self.gf('django.db.models.fields.TextField')(default='', blank=True),
keep_default=False)
# Deleting field 'InventorySource.inventory'
db.delete_column(u'main_inventorysource', 'inventory_id')
# Deleting field 'InventorySource.source_vars'
db.delete_column(u'main_inventorysource', 'source_vars')
# Deleting field 'InventorySource.overwrite'
db.delete_column(u'main_inventorysource', 'overwrite')
# Deleting field 'InventorySource.update_interval'
db.delete_column(u'main_inventorysource', 'update_interval')
# Changing field 'InventorySource.group'
db.alter_column(u'main_inventorysource', 'group_id', self.gf('awx.main.fields.AutoOneToOneField')(default=None, unique=True, to=orm['main.Group']))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'main.authtoken': {
'Meta': {'object_name': 'AuthToken'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'expires': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '40', 'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'request_hash': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'auth_tokens'", 'to': u"orm['auth.User']"})
},
'main.credential': {
'Meta': {'object_name': 'Credential'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'credential\', \'app_label\': \'main\'}(class)s_created+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'credential\', \'app_label\': \'main\'}(class)s_modified+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'ssh_key_data': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'ssh_key_unlock': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'ssh_password': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'ssh_username': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'sudo_password': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'sudo_username': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'credentials'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['main.Team']", 'blank': 'True', 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'credentials'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': u"orm['auth.User']", 'blank': 'True', 'null': 'True'})
},
'main.group': {
'Meta': {'unique_together': "(('name', 'inventory'),)", 'object_name': 'Group'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'group\', \'app_label\': \'main\'}(class)s_created+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'has_active_failures': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'has_inventory_sources': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'hosts': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'groups'", 'blank': 'True', 'to': "orm['main.Host']"}),
'hosts_with_active_failures': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inventory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'groups'", 'to': "orm['main.Inventory']"}),
'inventory_sources': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'groups'", 'blank': 'True', 'to': u"orm['main.InventorySource']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'group\', \'app_label\': \'main\'}(class)s_modified+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'parents': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'children'", 'blank': 'True', 'to': "orm['main.Group']"}),
'variables': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'})
},
'main.host': {
'Meta': {'unique_together': "(('name', 'inventory'),)", 'object_name': 'Host'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'host\', \'app_label\': \'main\'}(class)s_created+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'has_active_failures': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'has_inventory_sources': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inventory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'hosts'", 'to': "orm['main.Inventory']"}),
'inventory_sources': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'hosts'", 'blank': 'True', 'to': u"orm['main.InventorySource']"}),
'last_job': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'hosts_as_last_job+'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['main.Job']", 'blank': 'True', 'null': 'True'}),
'last_job_host_summary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'hosts_as_last_job_summary+'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': u"orm['main.JobHostSummary']", 'blank': 'True', 'null': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'host\', \'app_label\': \'main\'}(class)s_modified+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'variables': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'})
},
'main.inventory': {
'Meta': {'unique_together': "(('name', 'organization'),)", 'object_name': 'Inventory'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'inventory\', \'app_label\': \'main\'}(class)s_created+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'has_active_failures': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'has_inventory_sources': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'hosts_with_active_failures': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'inventory\', \'app_label\': \'main\'}(class)s_modified+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '512'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'inventories'", 'to': "orm['main.Organization']"}),
'variables': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'})
},
u'main.inventorysource': {
'Meta': {'object_name': 'InventorySource'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'inventorysource\', \'app_label\': u\'main\'}(class)s_created+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'current_update': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'inventory_source_as_current_update+'", 'null': 'True', 'to': "orm['main.InventoryUpdate']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'group': ('awx.main.fields.AutoOneToOneField', [], {'related_name': "'inventory_source'", 'null': 'True', 'default': 'None', 'to': "orm['main.Group']", 'blank': 'True', 'unique': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inventory': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'inventory_sources'", 'null': 'True', 'to': "orm['main.Inventory']"}),
'last_update': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'inventory_source_as_last_update+'", 'null': 'True', 'to': "orm['main.InventoryUpdate']"}),
'last_update_failed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'inventorysource\', \'app_label\': u\'main\'}(class)s_modified+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'overwrite': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'overwrite_vars': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'source': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '32', 'blank': 'True'}),
'source_password': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'source_path': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'source_regions': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'source_tags': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'source_username': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'source_vars': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'none'", 'max_length': '32', 'null': 'True'}),
'update_interval': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'update_on_launch': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'main.inventoryupdate': {
'Meta': {'object_name': 'InventoryUpdate'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'cancel_flag': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'celery_task_id': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'inventoryupdate\', \'app_label\': \'main\'}(class)s_created+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'failed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inventory_source': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'inventory_updates'", 'to': u"orm['main.InventorySource']"}),
'job_args': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'job_cwd': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'job_env': ('jsonfield.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'inventoryupdate\', \'app_label\': \'main\'}(class)s_modified+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'result_stdout': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'result_traceback': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '20'})
},
'main.job': {
'Meta': {'object_name': 'Job'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'cancel_flag': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'celery_task_id': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'job\', \'app_label\': \'main\'}(class)s_created+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'credential': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'jobs'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['main.Credential']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'extra_vars': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'failed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'forks': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'blank': 'True'}),
'hosts': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'jobs'", 'blank': 'True', 'through': u"orm['main.JobHostSummary']", 'to': "orm['main.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inventory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'jobs'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['main.Inventory']"}),
'job_args': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'job_cwd': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'job_env': ('jsonfield.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'job_tags': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'job_template': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'jobs'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['main.JobTemplate']", 'blank': 'True', 'null': 'True'}),
'job_type': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'launch_type': ('django.db.models.fields.CharField', [], {'default': "'manual'", 'max_length': '20'}),
'limit': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'job\', \'app_label\': \'main\'}(class)s_modified+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'playbook': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'jobs'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['main.Project']"}),
'result_stdout': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'result_traceback': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '20'}),
'verbosity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'blank': 'True'})
},
'main.jobevent': {
'Meta': {'ordering': "('pk',)", 'object_name': 'JobEvent'},
'changed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now_add': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'event_data': ('jsonfield.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'failed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'job_events_as_primary_host'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['main.Host']", 'blank': 'True', 'null': 'True'}),
'hosts': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'job_events'", 'blank': 'True', 'to': "orm['main.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'job': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'job_events'", 'to': "orm['main.Job']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'children'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['main.JobEvent']", 'blank': 'True', 'null': 'True'}),
'play': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'task': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'})
},
u'main.jobhostsummary': {
'Meta': {'ordering': "('-pk',)", 'unique_together': "[('job', 'host')]", 'object_name': 'JobHostSummary'},
'changed': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now_add': 'True', 'blank': 'True'}),
'dark': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'failed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'failures': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'job_host_summaries'", 'to': "orm['main.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'job': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'job_host_summaries'", 'to': "orm['main.Job']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'ok': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'processed': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'skipped': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'main.jobtemplate': {
'Meta': {'object_name': 'JobTemplate'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'jobtemplate\', \'app_label\': \'main\'}(class)s_created+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'credential': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'job_templates'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['main.Credential']", 'blank': 'True', 'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'extra_vars': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'forks': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'blank': 'True'}),
'host_config_key': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inventory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'job_templates'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['main.Inventory']"}),
'job_tags': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'job_type': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'limit': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'jobtemplate\', \'app_label\': \'main\'}(class)s_modified+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '512'}),
'playbook': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'job_templates'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['main.Project']"}),
'verbosity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'blank': 'True'})
},
'main.organization': {
'Meta': {'object_name': 'Organization'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'admins': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'admin_of_organizations'", 'blank': 'True', 'to': u"orm['auth.User']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'organization\', \'app_label\': \'main\'}(class)s_created+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'organization\', \'app_label\': \'main\'}(class)s_modified+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '512'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'organizations'", 'blank': 'True', 'to': u"orm['main.Project']"}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'organizations'", 'blank': 'True', 'to': u"orm['auth.User']"})
},
'main.permission': {
'Meta': {'object_name': 'Permission'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'permission\', \'app_label\': \'main\'}(class)s_created+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inventory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'permissions'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['main.Inventory']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'permission\', \'app_label\': \'main\'}(class)s_modified+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'permission_type': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'permissions'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['main.Project']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'permissions'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['main.Team']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'permissions'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"})
},
u'main.profile': {
'Meta': {'object_name': 'Profile'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ldap_dn': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('awx.main.fields.AutoOneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': u"orm['auth.User']"})
},
u'main.project': {
'Meta': {'object_name': 'Project'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'project\', \'app_label\': u\'main\'}(class)s_created+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'current_update': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'project_as_current_update+'", 'null': 'True', 'to': "orm['main.ProjectUpdate']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_update': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'project_as_last_update+'", 'null': 'True', 'to': "orm['main.ProjectUpdate']"}),
'last_update_failed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'local_path': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'project\', \'app_label\': u\'main\'}(class)s_modified+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '512'}),
'scm_branch': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '256', 'null': 'True', 'blank': 'True'}),
'scm_clean': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'scm_delete_on_next_update': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'scm_delete_on_update': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'scm_key_data': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'scm_key_unlock': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'scm_password': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'scm_type': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '8', 'null': 'True', 'blank': 'True'}),
'scm_update_on_launch': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'scm_url': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'scm_username': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '256', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'ok'", 'max_length': '32', 'null': 'True'})
},
'main.projectupdate': {
'Meta': {'object_name': 'ProjectUpdate'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'cancel_flag': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'celery_task_id': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'projectupdate\', \'app_label\': \'main\'}(class)s_created+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'failed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'job_args': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'job_cwd': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'job_env': ('jsonfield.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'projectupdate\', \'app_label\': \'main\'}(class)s_modified+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'project_updates'", 'to': u"orm['main.Project']"}),
'result_stdout': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'result_traceback': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '20'})
},
'main.team': {
'Meta': {'object_name': 'Team'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'team\', \'app_label\': \'main\'}(class)s_created+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': '"{\'class\': \'team\', \'app_label\': \'main\'}(class)s_modified+"', 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '512'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'teams'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['main.Organization']"}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'teams'", 'blank': 'True', 'to': u"orm['main.Project']"}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'teams'", 'blank': 'True', 'to': u"orm['auth.User']"})
},
u'taggit.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"})
}
}
complete_apps = ['main']

View File

@ -44,7 +44,8 @@ __all__ = ['PrimordialModel', 'Organization', 'Team', 'Project',
'Job', 'JobHostSummary', 'JobEvent', 'AuthToken',
'PERM_INVENTORY_ADMIN', 'PERM_INVENTORY_READ',
'PERM_INVENTORY_WRITE', 'PERM_INVENTORY_DEPLOY',
'PERM_INVENTORY_CHECK', 'JOB_STATUS_CHOICES']
'PERM_INVENTORY_CHECK', 'JOB_STATUS_CHOICES',
'CLOUD_INVENTORY_SOURCES']
logger = logging.getLogger('awx.main.models')
@ -81,6 +82,8 @@ JOB_STATUS_CHOICES = [
('canceled', _('Canceled')), # The job was canceled before completion.
]
CLOUD_INVENTORY_SOURCES = ['ec2', 'rackspace']
class PrimordialModel(models.Model):
'''
common model for all object types that have these standard fields
@ -199,6 +202,11 @@ class Inventory(CommonModel):
editable=False,
help_text=_('Number of hosts in this inventory with active failures.'),
)
has_inventory_sources = models.BooleanField(
default=False,
editable=False,
help_text=_('Flag indicating whether this inventory has any external inventory sources.'),
)
def get_absolute_url(self):
return reverse('main:inventory_detail', args=(self.pk,))
@ -212,6 +220,9 @@ class Inventory(CommonModel):
host.mark_inactive()
for group in self.groups.filter(active=True):
group.mark_inactive()
group.inventory_source.mark_inactive()
for inventory_source in self.inventory_sources.filter(active=True):
inventory_source.mark_inactive()
@property
def variables_dict(self):
@ -220,17 +231,20 @@ class Inventory(CommonModel):
except ValueError:
return yaml.safe_load(self.variables)
def update_has_active_failures(self, update_groups=True, update_hosts=True):
def update_computed_fields(self, update_groups=True, update_hosts=True):
if update_hosts:
for host in self.hosts.filter(active=True):
host.update_has_active_failures(update_inventory=False,
host.update_computed_fields(update_inventory=False,
update_groups=False)
if update_groups:
for group in self.groups.filter(active=True):
group.update_has_active_failures()
group.update_computed_fields()
failed_hosts = self.hosts.filter(active=True, has_active_failures=True)
hosts_with_active_failures = failed_hosts.count()
has_active_failures = bool(hosts_with_active_failures)
inv_src_qs = self.inventory_sources.filter(active=True,
source__in=CLOUD_INVENTORY_SOURCES)
has_inventory_sources = bool(inv_src_qs.count())
update_fields = []
if self.hosts_with_active_failures != hosts_with_active_failures:
self.hosts_with_active_failures = hosts_with_active_failures
@ -238,6 +252,9 @@ class Inventory(CommonModel):
if self.has_active_failures != has_active_failures:
self.has_active_failures = has_active_failures
update_fields.append('has_active_failures')
if self.has_inventory_sources != has_inventory_sources:
self.has_inventory_sources = has_inventory_sources
update_fields.append('has_inventory_sources')
if update_fields:
self.save(update_fields=update_fields)
@ -246,6 +263,10 @@ class Inventory(CommonModel):
group_pks = self.groups.values_list('pk', flat=True)
return self.groups.exclude(parents__pk__in=group_pks).distinct()
@property
def inventory_sources(self):
return InventorySource.objects.filter(group__inventory=self)
class Host(CommonModelNameNotUnique):
'''
A managed node
@ -255,24 +276,37 @@ class Host(CommonModelNameNotUnique):
app_label = 'main'
unique_together = (("name", "inventory"),)
inventory = models.ForeignKey(
'Inventory',
null=False,
related_name='hosts',
)
enabled = models.BooleanField(
default=True,
help_text=_('Is this host online and available for running jobs?'),
)
variables = models.TextField(
blank=True,
default='',
help_text=_('Variables in JSON or YAML format.'),
)
inventory = models.ForeignKey('Inventory', null=False, related_name='hosts')
last_job = models.ForeignKey('Job', blank=True, null=True, default=None, on_delete=models.SET_NULL, related_name='hosts_as_last_job+')
last_job_host_summary = models.ForeignKey('JobHostSummary', blank=True, null=True, default=None, on_delete=models.SET_NULL, related_name='hosts_as_last_job_summary+')
has_active_failures = models.BooleanField(default=False, editable=False)
has_inventory_sources = models.BooleanField(
default=False,
editable=False,
help_text=_('Flag indicating whether this host was created/updated from any external inventory sources.'),
)
# FIXME: Track which inventory source(s) created this host.
#inventory_sources = models.ManyToManyField(
# 'InventorySource',
# related_name='synced_hosts',
# blank=True,
# editable=False,
#)
# Track which inventory source(s) created or modified this host.
inventory_sources = models.ManyToManyField(
'InventorySource',
related_name='hosts',
blank=True,
editable=False,
)
def __unicode__(self):
return self.name
@ -280,20 +314,38 @@ class Host(CommonModelNameNotUnique):
def get_absolute_url(self):
return reverse('main:host_detail', args=(self.pk,))
def update_has_active_failures(self, update_inventory=True,
def mark_inactive(self, save=True):
'''
When marking hosts inactive, remove all associations to related
inventory sources.
'''
super(Host, self).mark_inactive(save=save)
self.inventory_sources.clear()
def update_computed_fields(self, update_inventory=True,
update_groups=True):
has_active_failures = bool(self.last_job_host_summary and
self.last_job_host_summary.job.active and
self.last_job_host_summary.failed)
inv_src_qs = self.inventory_sources.filter(active=True,
source__in=CLOUD_INVENTORY_SOURCES)
has_inventory_sources = bool(inv_src_qs.count())
update_fields = []
if self.has_active_failures != has_active_failures:
self.has_active_failures = has_active_failures
self.save()
update_fields.append('has_active_failures')
if self.has_inventory_sources != has_inventory_sources:
self.has_inventory_sources = has_inventory_sources
update_fields.append('has_inventory_sources')
if update_fields:
self.save(update_fields=update_fields)
if update_inventory:
self.inventory.update_has_active_failures(update_groups=False,
update_hosts=False)
self.inventory.update_computed_fields(update_groups=False,
update_hosts=False)
if update_groups:
for group in self.all_groups.filter(active=True):
group.update_has_active_failures()
group.update_computed_fields()
@property
def variables_dict(self):
@ -341,14 +393,19 @@ class Group(CommonModelNameNotUnique):
editable=False,
help_text=_('Number of hosts in this group with active failures.'),
)
has_inventory_sources = models.BooleanField(
default=False,
editable=False,
help_text=_('Flag indicating whether this group was created/updated from any external inventory sources.'),
)
# FIXME: Track which inventory source(s) created this group.
#inventory_sources = models.ManyToManyField(
# 'InventorySource',
# related_name='synced_groups',
# blank=True,
# editable=False,
#)
# Track which inventory source(s) created or modified this group.
inventory_sources = models.ManyToManyField(
'InventorySource',
related_name='groups',
blank=True,
editable=False,
)
def __unicode__(self):
return self.name
@ -359,19 +416,24 @@ class Group(CommonModelNameNotUnique):
def mark_inactive(self, save=True):
'''
When marking groups inactive, remove all associations to related
groups/hosts.
groups/hosts/inventory_sources.
'''
super(Group, self).mark_inactive(save=save)
self.inventory_source.mark_inactive(save=save)
self.inventory_sources.clear()
self.parents.clear()
self.children.clear()
self.hosts.clear()
def update_has_active_failures(self):
def update_computed_fields(self):
failed_hosts = self.all_hosts.filter(active=True,
last_job_host_summary__job__active=True,
last_job_host_summary__failed=True)
hosts_with_active_failures = failed_hosts.count()
has_active_failures = bool(hosts_with_active_failures)
inv_src_qs = self.inventory_sources.filter(active=True,
source__in=CLOUD_INVENTORY_SOURCES)
has_inventory_sources = bool(inv_src_qs.count())
update_fields = []
if self.hosts_with_active_failures != hosts_with_active_failures:
self.hosts_with_active_failures = hosts_with_active_failures
@ -379,6 +441,9 @@ class Group(CommonModelNameNotUnique):
if self.has_active_failures != has_active_failures:
self.has_active_failures = has_active_failures
update_fields.append('has_active_failures')
if self.has_inventory_sources != has_inventory_sources:
self.has_inventory_sources = has_inventory_sources
update_fields.append('has_inventory_sources')
if update_fields:
self.save(update_fields=update_fields)
@ -452,7 +517,7 @@ class InventorySource(PrimordialModel):
# FIXME: Track inventory source for import via management command?
SOURCE_CHOICES = [
('file', _('Local File or Script')),
('file', _('Local File, Directory or Script')),
('rackspace', _('Rackspace Cloud Servers')),
('ec2', _('Amazon EC2')),
]
@ -467,9 +532,18 @@ class InventorySource(PrimordialModel):
('successful', _('Successful')),
]
inventory = models.ForeignKey(
'Inventory',
related_name='inventory_sources',
null=True,
default=None,
)
group = AutoOneToOneField(
'Group',
related_name='inventory_source',
blank=True,
null=True,
default=None,
editable=False,
)
source = models.CharField(
@ -482,8 +556,9 @@ class InventorySource(PrimordialModel):
max_length=1024,
blank=True,
default='',
editable=False,
)
source_env = models.TextField(
source_vars = models.TextField(
blank=True,
default='',
)
@ -507,18 +582,21 @@ class InventorySource(PrimordialModel):
blank=True,
default='',
)
overwrite_hosts = models.BooleanField(
overwrite = models.BooleanField(
default=False,
help_text=_('Overwrite local groups and hosts from remote inventory source.'),
)
overwrite_vars = models.BooleanField(
default=False,
)
keep_vars = models.BooleanField(
default=False,
help_text=_('Overwrite local variables from remote inventory source.'),
)
update_on_launch = models.BooleanField(
default=False,
)
update_interval = models.PositiveIntegerField(
default=0,
help_text=_('If nonzero, inventory source will be updated every N minutes.'),
)
current_update = models.ForeignKey(
'InventoryUpdate',
null=True,
@ -575,6 +653,11 @@ class InventorySource(PrimordialModel):
for field in updated_fields:
if field not in update_fields:
update_fields.append(field)
# Update inventory from group (if available).
if self.group and not self.inventory:
self.inventory = self.group.inventory
if 'inventory' not in update_fields:
update_fields.append('inventory')
# Do the actual save.
super(InventorySource, self).save(*args, **kwargs)
# After saving a new instance for the first time (to get a primary
@ -1825,7 +1908,7 @@ class JobHostSummary(models.Model):
update_fields.append('last_job_host_summary')
if update_fields:
self.host.save(update_fields=update_fields)
self.host.update_has_active_failures()
self.host.update_computed_fields()
class JobEvent(models.Model):
'''

View File

@ -21,6 +21,7 @@ from django.utils.translation import ugettext_lazy as _
# Django REST Framework
from rest_framework.compat import get_concrete_model
from rest_framework import fields
from rest_framework import serializers
# AWX
@ -58,6 +59,18 @@ SUMMARIZABLE_FK_FIELDS = {
'inventory_source': ('source', 'last_updated', 'status'),
}
class ChoiceField(fields.ChoiceField):
def metadata(self):
metadata = super(ChoiceField, self).metadata()
if self.choices:
metadata['choices'] = self.choices
return metadata
# Monkeypatch REST framework to replace default ChoiceField used by
# ModelSerializer.
serializers.ChoiceField = ChoiceField
class BaseSerializer(serializers.ModelSerializer):
# add the URL and related resources
@ -166,7 +179,6 @@ class UserSerializer(BaseSerializer):
'last_name', 'email', 'is_superuser', 'password', 'ldap_dn')
def to_native(self, obj):
print obj
ret = super(UserSerializer, self).to_native(obj)
ret.pop('password', None)
ret.fields.pop('password', None)
@ -344,7 +356,6 @@ class ProjectSerializer(BaseSerializer):
except ValueError, e:
raise serializers.ValidationError((e.args or ('Invalid SCM URL',))[0])
scm_url_parts = urlparse.urlsplit(scm_url)
#print scm_url_parts
if scm_type and not any(scm_url_parts):
raise serializers.ValidationError('SCM URL is required')
return attrs
@ -439,7 +450,8 @@ class InventorySerializer(BaseSerializerWithVariables):
model = Inventory
fields = BASE_FIELDS + ('organization', 'variables',
'has_active_failures',
'hosts_with_active_failures')
'hosts_with_active_failures',
'has_inventory_sources')
def get_related(self, obj):
if obj is None:
@ -461,7 +473,8 @@ class HostSerializer(BaseSerializerWithVariables):
class Meta:
model = Host
fields = BASE_FIELDS + ('inventory', 'variables', 'has_active_failures',
fields = BASE_FIELDS + ('inventory', 'enabled', 'variables',
'has_active_failures', 'has_inventory_sources',
'last_job', 'last_job_host_summary')
def get_related(self, obj):
@ -475,6 +488,7 @@ class HostSerializer(BaseSerializerWithVariables):
all_groups = reverse('main:host_all_groups_list', args=(obj.pk,)),
job_events = reverse('main:host_job_events_list', args=(obj.pk,)),
job_host_summaries = reverse('main:host_job_host_summaries_list', args=(obj.pk,)),
#inventory_sources = reverse('main:host_inventory_sources_list', args=(obj.pk,)),
))
if obj.last_job:
res['last_job'] = reverse('main:job_detail', args=(obj.last_job.pk,))
@ -541,6 +555,7 @@ class GroupSerializer(BaseSerializerWithVariables):
job_events = reverse('main:group_job_events_list', args=(obj.pk,)),
job_host_summaries = reverse('main:group_job_host_summaries_list', args=(obj.pk,)),
inventory_source = reverse('main:inventory_source_detail', args=(obj.inventory_source.pk,)),
#inventory_sources = reverse('main:group_inventory_sources_list', args=(obj.pk,)),
))
return res
@ -557,7 +572,8 @@ class GroupTreeSerializer(GroupSerializer):
class Meta:
model = Group
fields = BASE_FIELDS + ('inventory', 'variables', 'has_active_failures',
'children')
'hosts_with_active_failures',
'has_inventory_sources', 'children')
def get_children(self, obj):
if obj is None:
@ -605,11 +621,11 @@ class InventorySourceSerializer(BaseSerializer):
class Meta:
model = InventorySource
fields = ('id', 'url', 'related', 'summary_fields', 'created',
'modified', 'group', 'source', 'source_path', 'source_env',
'source_username', 'source_password', 'source_regions',
'source_tags', 'overwrite_hosts', 'overwrite_vars',
'keep_vars', 'update_on_launch', 'last_update_failed',
'status', 'last_updated')
'modified', 'group', 'source', 'source_path',
'source_vars', 'source_username', 'source_password',
'source_regions', 'source_tags', 'overwrite',
'overwrite_vars', 'update_on_launch', 'update_interval',
'last_update_failed', 'status', 'last_updated')
def to_native(self, obj):
ret = super(InventorySourceSerializer, self).to_native(obj)
@ -617,6 +633,12 @@ class InventorySourceSerializer(BaseSerializer):
for field in InventorySource.PASSWORD_FIELDS:
if field in ret and unicode(ret[field]).startswith('$encrypted$'):
ret[field] = '$encrypted$'
# Make regions/tags into a list of strings.
for field in ('source_regions', 'source_tags'):
if field in ret:
value = ret[field]
if isinstance(value, basestring):
ret[field] = [x.strip() for x in value.split(',') if x.strip()]
return ret
def restore_object(self, attrs, instance=None):
@ -624,6 +646,7 @@ class InventorySourceSerializer(BaseSerializer):
for field in InventorySource.PASSWORD_FIELDS:
if unicode(attrs.get(field, '')).startswith('$encrypted$'):
attrs.pop(field, None)
# FIXME: Accept list of strings for regions/tags.
instance = super(InventorySourceSerializer, self).restore_object(attrs, instance)
return instance
@ -635,6 +658,8 @@ class InventorySourceSerializer(BaseSerializer):
group = reverse('main:group_detail', args=(obj.group.pk,)),
update = reverse('main:inventory_source_update_view', args=(obj.pk,)),
inventory_updates = reverse('main:inventory_source_updates_list', args=(obj.pk,)),
hosts = reverse('main:inventory_source_hosts_list', args=(obj.pk,)),
groups = reverse('main:inventory_source_groups_list', args=(obj.pk,)),
))
if obj.current_update:
res['current_update'] = reverse('main:inventory_update_detail',
@ -650,6 +675,43 @@ class InventorySourceSerializer(BaseSerializer):
d = super(InventorySourceSerializer, self).get_summary_fields(obj)
return d
def validate_source(self, attrs, source):
src = attrs.get(source, '')
obj = self.object
# FIXME
return attrs
def validate_source_vars(self, attrs, source):
# source_env must be blank, a valid JSON or YAML dict, or ...
# FIXME: support key=value pairs.
try:
json.loads(attrs.get(source, '').strip() or '{}')
return attrs
except ValueError:
pass
try:
yaml.safe_load(attrs[source])
return attrs
except yaml.YAMLError:
pass
raise serializers.ValidationError('Must be valid JSON or YAML')
def validate_source_username(self, attrs, source):
# FIXME
return attrs
def validate_source_password(self, attrs, source):
# FIXME
return attrs
def validate_source_regions(self, attrs, source):
# FIXME
return attrs
def validate_source_tags(self, attrs, source):
# FIXME
return attrs
class InventoryUpdateSerializer(BaseSerializer):
class Meta:

View File

@ -22,9 +22,9 @@ logger = logging.getLogger('awx.main.signals')
_inventory_updating = threading.local()
def update_inventory_has_active_failures(sender, **kwargs):
def update_inventory_computed_fields(sender, **kwargs):
'''
Signal handler and wrapper around inventory.update_has_active_failures to
Signal handler and wrapper around inventory.update_computed_fields to
prevent unnecessary recursive calls.
'''
if not getattr(_inventory_updating, 'is_updating', False):
@ -33,6 +33,10 @@ def update_inventory_has_active_failures(sender, **kwargs):
sender_name = 'group.hosts'
elif sender == Group.parents.through:
sender_name = 'group.parents'
elif sender == Host.inventory_sources.through:
sender_name = 'host.inventory_sources'
elif sender == Group.inventory_sources.through:
sender_name = 'group.inventory_sources'
else:
sender_name = unicode(sender._meta.verbose_name)
if kwargs['signal'] == post_save:
@ -45,24 +49,28 @@ def update_inventory_has_active_failures(sender, **kwargs):
sender_action = 'changed'
else:
return
logger.debug('%s %s, updating inventory has_active_failures: %r %r',
logger.debug('%s %s, updating inventory computed fields: %r %r',
sender_name, sender_action, sender, kwargs)
try:
_inventory_updating.is_updating = True
inventory = instance.inventory
update_hosts = issubclass(sender, Job)
inventory.update_has_active_failures(update_hosts=update_hosts)
inventory.update_computed_fields(update_hosts=update_hosts)
finally:
_inventory_updating.is_updating = False
post_save.connect(update_inventory_has_active_failures, sender=Host)
post_delete.connect(update_inventory_has_active_failures, sender=Host)
post_save.connect(update_inventory_has_active_failures, sender=Group)
post_delete.connect(update_inventory_has_active_failures, sender=Group)
m2m_changed.connect(update_inventory_has_active_failures, sender=Group.hosts.through)
m2m_changed.connect(update_inventory_has_active_failures, sender=Group.parents.through)
post_save.connect(update_inventory_has_active_failures, sender=Job)
post_delete.connect(update_inventory_has_active_failures, sender=Job)
post_save.connect(update_inventory_computed_fields, sender=Host)
post_delete.connect(update_inventory_computed_fields, sender=Host)
post_save.connect(update_inventory_computed_fields, sender=Group)
post_delete.connect(update_inventory_computed_fields, sender=Group)
m2m_changed.connect(update_inventory_computed_fields, sender=Group.hosts.through)
m2m_changed.connect(update_inventory_computed_fields, sender=Group.parents.through)
m2m_changed.connect(update_inventory_computed_fields, sender=Host.inventory_sources.through)
m2m_changed.connect(update_inventory_computed_fields, sender=Group.inventory_sources.through)
post_save.connect(update_inventory_computed_fields, sender=Job)
post_delete.connect(update_inventory_computed_fields, sender=Job)
post_save.connect(update_inventory_computed_fields, sender=InventorySource)
post_delete.connect(update_inventory_computed_fields, sender=InventorySource)
# Migrate hosts, groups to parent group(s) whenever a group is deleted or
# marked as inactive.
@ -98,6 +106,7 @@ def save_related_pks_before_group_marked_inactive(sender, **kwargs):
instance._saved_parents_pks = set(instance.parents.values_list('pk', flat=True))
instance._saved_hosts_pks = set(instance.hosts.values_list('pk', flat=True))
instance._saved_children_pks = set(instance.children.values_list('pk', flat=True))
instance._saved_inventory_source_pk = instance.inventory_source.pk
@receiver(post_save, sender=Group)
def migrate_children_from_inactive_group_to_parent_groups(sender, **kwargs):
@ -117,6 +126,10 @@ def migrate_children_from_inactive_group_to_parent_groups(sender, **kwargs):
child_group, parent_group, instance)
parent_group.children.add(child_group)
parent_group.children.remove(instance)
inventory_source_pk = getattr(instance, '_saved_inventory_source_pk', None)
if inventory_source_pk:
inventory_source = InventorySource.objects.get(pk=inventory_source_pk)
inventory_source.mark_inactive()
# Update host pointers to last_job and last_job_host_summary when a job is
# marked inactive or deleted.

View File

@ -25,6 +25,7 @@ from celery import Task
# Django
from django.conf import settings
from django.db import transaction
from django.utils.timezone import now
# AWX
@ -116,7 +117,12 @@ class BaseTask(Task):
return env
def build_safe_env(self, instance, **kwargs):
return self.build_env(instance, **kwargs)
hidden_re = re.compile('API|TOKEN|KEY|SECRET|PASS')
env = self.build_env(instance, **kwargs)
for k,v in env.items():
if hidden_re.search(k):
env[k] = '*'*len(str(v))
return env
def build_args(self, instance, **kwargs):
raise NotImplementedError
@ -174,6 +180,9 @@ class BaseTask(Task):
updates['result_stdout'] = logfile.getvalue()
last_stdout_update = time.time()
instance = self.update_model(instance.pk, **updates)
# Commit transaction needed when running unit tests. FIXME: Is it
# needed or breaks anything for normal operation?
transaction.commit()
if instance.cancel_flag:
child.close(True)
canceled = True
@ -729,18 +738,16 @@ class RunInventoryUpdate(BaseTask):
inventory = inventory_source.group.inventory
args = ['awx-manage', 'inventory_import']
args.extend(['--inventory-id', str(inventory.pk)])
if inventory_source.overwrite_hosts:
if inventory_source.overwrite:
args.append('--overwrite')
if inventory_source.overwrite_vars:
args.append('--overwrite-vars')
if inventory_source.keep_vars:
args.append('--keep-vars')
args.append('--source')
if inventory_source.source == 'ec2':
ec2_path = self.get_path_to('..', 'plugins', 'inventory', 'ec2.py')
args.append(ec2_path)
elif inventory_source.source == 'rackspace':
rax_path = self.get_path_to('..', 'plugins', 'inventory', 'rax.py')
rax_path = self.get_path_to('..', 'plugins', 'inventory', 'rax2.py')
args.append(rax_path)
elif inventory_source.source == 'file':
args.append(inventory_source.source_path)

View File

@ -18,6 +18,11 @@ separate API request for each host. Refer to
[Tuning the External Inventory Script](http://www.ansibleworks.com/docs/api.html#tuning-the-external-inventory-script)
for more information on this feature.
_(New in AWX 1.4)_ By default, the inventory script will only return hosts that
are enabled in the inventory. This feature allows disabled hosts to be skipped
when running jobs without removing them from the inventory. Specify a query
string of `?all=1` to return all hosts, including disabled ones.
## Host Response
Make a GET request to this resource with a query string similar to

View File

@ -37,13 +37,15 @@ TEST_PLAYBOOK = '''- hosts: test-group
'''
TEST_INVENTORY_INI = '''\
# Some comment about blah blah blah...
[webservers]
web1.example.com ansible_ssh_host=w1.example.net
web2.example.com
web3.example.com
web3.example.com:1022
[webservers:vars]
webvar=blah
[webservers:vars] # Comment on a section
webvar=blah # Comment on an option
[dbservers]
db1.example.com
@ -434,13 +436,42 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
ini_file.close()
self._temp_files.append(self.ini_path)
def create_test_dir(self):
def create_test_dir(self, hostnames=None):
hostnames = hostnames or []
self.inv_dir = tempfile.mkdtemp()
self._temp_project_dirs.append(self.inv_dir)
self.create_test_ini(self.inv_dir)
group_vars = os.path.join(self.inv_dir, 'group_vars')
os.makedirs(group_vars)
file(os.path.join(group_vars, 'all'), 'wb').write(TEST_GROUP_VARS)
if hostnames:
host_vars = os.path.join(self.inv_dir, 'host_vars')
os.makedirs(host_vars)
for hostname in hostnames:
test_host_vars = '''test_hostname: %s''' % hostname
file(os.path.join(host_vars, hostname), 'wb').write(test_host_vars)
def check_adhoc_inventory_source(self, inventory, except_host_pks=None,
except_group_pks=None):
# Check that management command created a new inventory source and
# related inventory update.
inventory_sources = inventory.inventory_sources.filter(group=None)
self.assertEqual(inventory_sources.count(), 1)
inventory_source = inventory_sources[0]
self.assertEqual(inventory_source.source, 'file')
self.assertEqual(inventory_source.inventory_updates.count(), 1)
inventory_update = inventory_source.inventory_updates.all()[0]
self.assertEqual(inventory_update.status, 'successful')
for host in inventory.hosts.all():
if host.pk in (except_host_pks or []):
continue
source_pks = host.inventory_sources.values_list('pk', flat=True)
self.assertTrue(inventory_source.pk in source_pks)
for group in inventory.groups.all():
if group.pk in (except_group_pks or []):
continue
source_pks = group.inventory_sources.values_list('pk', flat=True)
self.assertTrue(inventory_source.pk in source_pks)
def test_invalid_options(self):
inventory_id = self.inventories[0].pk
@ -491,14 +522,14 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
inventory_id=invalid_id,
source=self.ini_path)
self.assertTrue(isinstance(result, CommandError), result)
self.assertTrue('matched' in str(result))
self.assertTrue('found' in str(result))
# Invalid inventory name.
invalid_name = 'invalid inventory name'
result, stdout, stderr = self.run_command('inventory_import',
inventory_name=invalid_name,
source=self.ini_path)
self.assertTrue(isinstance(result, CommandError), result)
self.assertTrue('matched' in str(result))
self.assertTrue('found' in str(result))
def test_ini_file(self, source=None):
inv_src = source or self.ini_path
@ -532,6 +563,10 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
if host.name == 'web1.example.com':
self.assertEqual(host.variables_dict,
{'ansible_ssh_host': 'w1.example.net'})
elif host.name in ('db1.example.com', 'db2.example.com') and source and os.path.isdir(source):
self.assertEqual(host.variables_dict, {'test_hostname': host.name})
elif host.name == 'web3.example.com':
self.assertEqual(host.variables_dict, {'ansible_ssh_port': 1022})
else:
self.assertEqual(host.variables_dict, {})
for group in new_inv.groups.all():
@ -553,11 +588,106 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
host_names = set(['web1.example.com','web2.example.com',
'web3.example.com'])
self.assertEqual(hosts, host_names)
self.check_adhoc_inventory_source(new_inv)
def test_dir_with_ini_file(self):
self.create_test_dir()
self.create_test_dir(hostnames=['db1.example.com', 'db2.example.com'])
self.test_ini_file(self.inv_dir)
def test_merge_from_ini_file(self, overwrite=False, overwrite_vars=False):
new_inv_vars = json.dumps({'varc': 'C'})
new_inv = self.organizations[0].inventories.create(name='inv123',
variables=new_inv_vars)
lb_host_vars = json.dumps({'lbvar': 'ni!'})
lb_host = new_inv.hosts.create(name='lb.example.com',
variables=lb_host_vars)
lb_group = new_inv.groups.create(name='lbservers')
servers_group_vars = json.dumps({'vard': 'D'})
servers_group = new_inv.groups.create(name='servers',
variables=servers_group_vars)
servers_group.children.add(lb_group)
lb_group.hosts.add(lb_host)
result, stdout, stderr = self.run_command('inventory_import',
inventory_id=new_inv.pk,
source=self.ini_path,
overwrite=overwrite,
overwrite_vars=overwrite_vars)
self.assertEqual(result, None)
# Check that inventory is populated as expected.
new_inv = Inventory.objects.get(pk=new_inv.pk)
expected_group_names = set(['servers', 'dbservers', 'webservers',
'lbservers'])
if overwrite:
expected_group_names.remove('lbservers')
group_names = set(new_inv.groups.values_list('name', flat=True))
self.assertEqual(expected_group_names, group_names)
expected_host_names = set(['web1.example.com', 'web2.example.com',
'web3.example.com', 'db1.example.com',
'db2.example.com', 'lb.example.com'])
if overwrite:
expected_host_names.remove('lb.example.com')
host_names = set(new_inv.hosts.values_list('name', flat=True))
self.assertEqual(expected_host_names, host_names)
expected_inv_vars = {'vara': 'A', 'varc': 'C'}
if overwrite or overwrite_vars:
expected_inv_vars.pop('varc')
self.assertEqual(new_inv.variables_dict, expected_inv_vars)
for host in new_inv.hosts.all():
if host.name == 'web1.example.com':
self.assertEqual(host.variables_dict,
{'ansible_ssh_host': 'w1.example.net'})
elif host.name == 'web3.example.com':
self.assertEqual(host.variables_dict, {'ansible_ssh_port': 1022})
elif host.name == 'lb.example.com':
self.assertEqual(host.variables_dict, {'lbvar': 'ni!'})
else:
self.assertEqual(host.variables_dict, {})
for group in new_inv.groups.all():
if group.name == 'servers':
expected_vars = {'varb': 'B', 'vard': 'D'}
if overwrite or overwrite_vars:
expected_vars.pop('vard')
self.assertEqual(group.variables_dict, expected_vars)
children = set(group.children.values_list('name', flat=True))
expected_children = set(['dbservers', 'webservers', 'lbservers'])
if overwrite:
expected_children.remove('lbservers')
self.assertEqual(children, expected_children)
self.assertEqual(group.hosts.count(), 0)
elif group.name == 'dbservers':
self.assertEqual(group.variables_dict, {'dbvar': 'ugh'})
self.assertEqual(group.children.count(), 0)
hosts = set(group.hosts.values_list('name', flat=True))
host_names = set(['db1.example.com','db2.example.com'])
self.assertEqual(hosts, host_names)
elif group.name == 'webservers':
self.assertEqual(group.variables_dict, {'webvar': 'blah'})
self.assertEqual(group.children.count(), 0)
hosts = set(group.hosts.values_list('name', flat=True))
host_names = set(['web1.example.com','web2.example.com',
'web3.example.com'])
self.assertEqual(hosts, host_names)
elif group.name == 'lbservers':
self.assertEqual(group.variables_dict, {})
self.assertEqual(group.children.count(), 0)
hosts = set(group.hosts.values_list('name', flat=True))
host_names = set(['lb.example.com'])
self.assertEqual(hosts, host_names)
if overwrite:
except_host_pks = set()
except_group_pks = set()
else:
except_host_pks = set([lb_host.pk])
except_group_pks = set([lb_group.pk])
self.check_adhoc_inventory_source(new_inv, except_host_pks,
except_group_pks)
def test_overwrite_vars_from_ini_file(self):
self.test_merge_from_ini_file(overwrite_vars=True)
def test_overwrite_from_ini_file(self):
self.test_merge_from_ini_file(overwrite=True)
def test_executable_file(self):
# New empty inventory.
old_inv = self.inventories[1]
@ -572,8 +702,6 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
rest_api_url = urlparse.urlunsplit([parts.scheme, netloc, parts.path,
parts.query, parts.fragment])
os.environ.setdefault('REST_API_URL', rest_api_url)
#os.environ.setdefault('REST_API_TOKEN',
# self.super_django_user.auth_token.key)
os.environ['INVENTORY_ID'] = str(old_inv.pk)
source = os.path.join(os.path.dirname(__file__), '..', '..', 'plugins',
'inventory', 'awx.py')
@ -602,6 +730,7 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
old_hosts = set(old_group.hosts.values_list('name', flat=True))
new_hosts = set(new_group.hosts.values_list('name', flat=True))
self.assertEqual(old_hosts, new_hosts)
self.check_adhoc_inventory_source(new_inv)
def test_executable_file_with_meta_hostvars(self):
os.environ['INVENTORY_HOSTVARS'] = '1'

View File

@ -992,6 +992,7 @@ class InventoryUpdatesTest(BaseTransactionTest):
inventory_update.result_traceback)
else:
pass # If should_fail is None, we don't care.
return inventory_update
def check_inventory_source(self, inventory_source):
inventory_source = InventorySource.objects.get(pk=inventory_source.pk)
@ -999,26 +1000,35 @@ class InventoryUpdatesTest(BaseTransactionTest):
self.assertTrue(inventory_source.can_update)
self.assertEqual(inventory.groups.count(), 1)
self.assertEqual(inventory.hosts.count(), 0)
self.check_inventory_update(inventory_source)
inventory_update = self.check_inventory_update(inventory_source)
inventory_source = InventorySource.objects.get(pk=inventory_source.pk)
self.assertNotEqual(inventory.groups.count(), 1)
self.assertNotEqual(inventory.hosts.count(), 0)
for host in inventory.hosts.all():
source_pks = host.inventory_sources.values_list('pk', flat=True)
self.assertTrue(inventory_source.pk in source_pks)
for group in inventory.groups.all():
source_pks = group.inventory_sources.values_list('pk', flat=True)
self.assertTrue(inventory_source.pk in source_pks)
def test_update_from_ec2(self):
source_username = getattr(settings, 'TEST_AWS_ACCESS_KEY_ID', '')
source_password = getattr(settings, 'TEST_AWS_SECRET_ACCESS_KEY', '')
source_regions = getattr(settings, 'TEST_AWS_REGIONS', 'all')
if not all([source_username, source_password]):
self.skipTest('no test ec2 credentials defined!')
inventory_source = self.update_inventory_source(self.group,
source='ec2', source_username=source_username,
source_password=source_password)
source_password=source_password, source_regions=source_regions)
self.check_inventory_source(inventory_source)
def test_update_from_rackspace(self):
source_username = getattr(settings, 'TEST_RACKSPACE_USERNAME', '')
source_password = getattr(settings, 'TEST_RACKSPACE_API_KEY', '')
source_regions = getattr(settings, 'TEST_RACKSPACE_REGIONS', '')
if not all([source_username, source_password]):
self.skipTest('no test rackspace credentials defined!')
inventory_source = self.update_inventory_source(self.group,
source='rackspace', source_username=source_username,
source_password=source_password)
source_password=source_password, source_regions=source_regions)
self.check_inventory_source(inventory_source)

View File

@ -622,7 +622,7 @@ class ProjectsTest(BaseTest):
@override_settings(CELERY_ALWAYS_EAGER=True,
CELERY_EAGER_PROPAGATES_EXCEPTIONS=True,
ANSIBLE_TRANSPORT='local',
PROJECT_UPDATE_IDLE_TIMEOUT=30,
PROJECT_UPDATE_IDLE_TIMEOUT=60,
PROJECT_UPDATE_VVV=True)
class ProjectUpdatesTest(BaseTransactionTest):

View File

@ -343,3 +343,61 @@ class InventoryScriptTest(BaseScriptTest):
rc, stdout, stderr = self.run_inventory_script(list=True, host='blah')
self.assertNotEqual(rc, 0, stderr)
self.assertEqual(json.loads(stdout), {})
def test_with_disabled_hosts(self):
inventory = self.inventories[1]
self.assertTrue(inventory.active)
for host in inventory.hosts.filter(active=True, enabled=True):
host.enabled = False
host.save(update_fields=['enabled'])
os.environ['INVENTORY_ID'] = str(inventory.pk)
# Load inventory list as normal (only enabled hosts).
rc, stdout, stderr = self.run_inventory_script(list=True)
self.assertEqual(rc, 0, stderr)
data = json.loads(stdout)
groups = inventory.groups.filter(active=True)
groupnames = list(groups.values_list('name', flat=True)) + ['all']
self.assertEqual(set(data.keys()), set(groupnames))
for k,v in data.items():
self.assertTrue(isinstance(v, dict))
if k == 'all':
self.assertEqual(v.get('vars', {}), inventory.variables_dict)
continue
group = inventory.groups.get(active=True, name=k)
hosts = group.hosts.filter(active=True, enabled=True)
hostnames = hosts.values_list('name', flat=True)
self.assertEqual(set(v.get('hosts', [])), set(hostnames))
self.assertFalse(hostnames)
if group.variables:
self.assertEqual(v.get('vars', {}), group.variables_dict)
if k == 'group-3':
children = group.children.filter(active=True)
childnames = children.values_list('name', flat=True)
self.assertEqual(set(v.get('children', [])), set(childnames))
else:
self.assertTrue(len(v['children']) == 0)
# Load inventory list with all hosts.
rc, stdout, stderr = self.run_inventory_script(list=True, all=True)
self.assertEqual(rc, 0, stderr)
data = json.loads(stdout)
groups = inventory.groups.filter(active=True)
groupnames = list(groups.values_list('name', flat=True)) + ['all']
self.assertEqual(set(data.keys()), set(groupnames))
for k,v in data.items():
self.assertTrue(isinstance(v, dict))
if k == 'all':
self.assertEqual(v.get('vars', {}), inventory.variables_dict)
continue
group = inventory.groups.get(active=True, name=k)
hosts = group.hosts.filter(active=True)
hostnames = hosts.values_list('name', flat=True)
self.assertEqual(set(v.get('hosts', [])), set(hostnames))
self.assertTrue(hostnames)
if group.variables:
self.assertEqual(v.get('vars', {}), group.variables_dict)
if k == 'group-3':
children = group.children.filter(active=True)
childnames = children.values_list('name', flat=True)
self.assertEqual(set(v.get('children', [])), set(childnames))
else:
self.assertTrue(len(v['children']) == 0)

View File

@ -520,7 +520,7 @@ class RunJobTest(BaseCeleryTest):
host.name = '_'.join(host.name.split('_')[3:]) or 'undeleted host'
host.active = True
host.save()
host.update_has_active_failures()
host.update_computed_fields()
self.group = Group.objects.get(pk=self.group.pk)
self.assertTrue(self.group.has_active_failures)
self.inventory = Inventory.objects.get(pk=self.inventory.pk)
@ -552,7 +552,7 @@ class RunJobTest(BaseCeleryTest):
host.last_job = job
host.last_job_host_summary = JobHostSummary.objects.get(job=job, host=host)
host.save()
job.inventory.update_has_active_failures()
job.inventory.update_computed_fields()
self.host = Host.objects.get(pk=self.host.pk)
self.assertTrue(self.host.has_active_failures)
self.group = Group.objects.get(pk=self.group.pk)

View File

@ -74,6 +74,7 @@ host_urls = patterns('awx.main.views',
url(r'^(?P<pk>[0-9]+)/all_groups/$', 'host_all_groups_list'),
url(r'^(?P<pk>[0-9]+)/job_events/', 'host_job_events_list'),
url(r'^(?P<pk>[0-9]+)/job_host_summaries/$', 'host_job_host_summaries_list'),
#url(r'^(?P<pk>[0-9]+)/inventory_sources/$', 'host_inventory_sources_list'),
)
group_urls = patterns('awx.main.views',
@ -86,12 +87,15 @@ group_urls = patterns('awx.main.views',
url(r'^(?P<pk>[0-9]+)/job_events/$', 'group_job_events_list'),
url(r'^(?P<pk>[0-9]+)/job_host_summaries/$', 'group_job_host_summaries_list'),
url(r'^(?P<pk>[0-9]+)/potential_children/$', 'group_potential_children_list'),
#url(r'^(?P<pk>[0-9]+)/inventory_sources/$', 'group_inventory_sources_list'),
)
inventory_source_urls = patterns('awx.main.views',
url(r'^(?P<pk>[0-9]+)/$', 'inventory_source_detail'),
url(r'^(?P<pk>[0-9]+)/update/$', 'inventory_source_update_view'),
url(r'^(?P<pk>[0-9]+)/inventory_updates/$', 'inventory_source_updates_list'),
#url(r'^(?P<pk>[0-9]+)/groups/$', 'inventory_source_groups_list'),
#url(r'^(?P<pk>[0-9]+)/hosts/$', 'inventory_source_hosts_list'),
)
inventory_update_urls = patterns('awx.main.views',

View File

@ -639,9 +639,13 @@ class InventoryScriptView(RetrieveAPIView):
self.object = self.get_object()
hostname = request.QUERY_PARAMS.get('host', '')
hostvars = bool(request.QUERY_PARAMS.get('hostvars', ''))
show_all = bool(request.QUERY_PARAMS.get('all', ''))
if show_all:
hosts_q = dict(active=True)
else:
hosts_q = dict(active=True, enabled=True)
if hostname:
host = get_object_or_404(self.object.hosts, active=True,
name=hostname)
host = get_object_or_404(self.object.hosts, name=hostname, **hosts_q)
data = host.variables_dict
else:
data = SortedDict()
@ -650,7 +654,7 @@ class InventoryScriptView(RetrieveAPIView):
data['all']['vars'] = self.object.variables_dict
for group in self.object.groups.filter(active=True):
hosts = group.hosts.filter(active=True)
hosts = group.hosts.filter(**hosts_q)
children = group.children.filter(active=True)
group_info = SortedDict()
group_info['hosts'] = list(hosts.values_list('name', flat=True))
@ -661,15 +665,15 @@ class InventoryScriptView(RetrieveAPIView):
if hostvars:
data.setdefault('_meta', SortedDict())
data['_meta'].setdefault('hostvars', SortedDict())
for host in self.object.hosts.filter(active=True):
for host in self.object.hosts.filter(**hosts_q):
data['_meta']['hostvars'][host.name] = host.variables_dict
# workaround for Ansible inventory bug (github #3687), localhost
# must be explicitly listed in the all group for dynamic inventory
# scripts to pick it up.
localhost_names = ('localhost', '127.0.0.1', '::1')
localhosts_qs = self.object.hosts.filter(active=True,
name__in=localhost_names)
localhosts_qs = self.object.hosts.filter(name__in=localhost_names,
**hosts_q)
localhosts = list(localhosts_qs.values_list('name', flat=True))
if localhosts:
data.setdefault('all', SortedDict())

View File

@ -77,10 +77,14 @@ class InventoryScript(object):
'%s:%d' % (parts.hostname, port),
parts.path, parts.query, parts.fragment])
url_path = '/api/v1/inventories/%d/script/' % self.inventory_id
q = {}
if self.show_all:
q['all'] = 1
if self.hostname:
url_path += '?%s' % urllib.urlencode({'host': self.hostname})
q['host'] = self.hostname
elif self.hostvars:
url_path += '?%s' % urllib.urlencode({'hostvars': 1})
q['hostvars'] = 1
url_path += '?%s' % urllib.urlencode(q)
url = urlparse.urljoin(url, url_path)
response = requests.get(url, auth=auth)
response.raise_for_status()
@ -112,6 +116,8 @@ class InventoryScript(object):
self.list_ = self.options.get('list', False)
self.hostvars = bool(self.options.get('hostvars', False) or
os.getenv('INVENTORY_HOSTVARS', ''))
self.show_all = bool(self.options.get('show_all', False) or
os.getenv('INVENTORY_ALL', ''))
self.indent = self.options.get('indent', None)
if self.list_ and self.hostname:
raise RuntimeError('Only --list or --host may be specified')
@ -154,6 +160,10 @@ def main():
default=False, help='Return hostvars inline with --list,'
' under ["_meta"]["hostvars"]. Can also be specified '
'using INVENTORY_HOSTVARS environment variable.')
parser.add_option('--all', action='store_true', dest='show_all',
default=False, help='Return all hosts, including those '
'marked as offline/disabled. Can also be specified '
'using INVENTORY_ALL environment variable.')
parser.add_option('--host', dest='hostname', default='',
help='Return JSON hash of host vars.')
parser.add_option('--indent', dest='indent', type='int', default=None,

224
awx/plugins/inventory/rax2.py Executable file
View File

@ -0,0 +1,224 @@
#!/usr/bin/env python
# (c) 2013, Jesse Keating <jesse.keating@rackspace.com>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
inventory: rax
short_description: Rackspace Public Cloud external inventory script
description:
- Generates inventory that Ansible can understand by making API request to Rackspace Public Cloud API
- |
When run against a specific host, this script returns the following variables:
rax_os-ext-sts_task_state
rax_addresses
rax_links
rax_image
rax_os-ext-sts_vm_state
rax_flavor
rax_id
rax_rax-bandwidth_bandwidth
rax_user_id
rax_os-dcf_diskconfig
rax_accessipv4
rax_accessipv6
rax_progress
rax_os-ext-sts_power_state
rax_metadata
rax_status
rax_updated
rax_hostid
rax_name
rax_created
rax_tenant_id
rax__loaded
where some item can have nested structure.
- credentials are set in a credentials file
version_added: None
options:
creds_file:
description:
- File to find the Rackspace Public Cloud credentials in
required: true
default: null
authors:
- Jesse Keating <jesse.keating@rackspace.com>
- Paul Durivage <paul.durivage@rackspace.com>
notes:
- One environment variable needs to be set: RAX_CREDS_FILE.
- RAX_CREDS_FILE points to a credentials file appropriate for pyrax.
- Another optional environment variable may be set: RAX_REGION.
- RAX_REGION defines one or more Rackspace Public Cloud regions (DFW, ORD, LON, ...)
as a comma-separated list of region names. If set to "all" or omitted, all
regions in pyrax.regions will be used.
- See https://github.com/rackspace/pyrax/blob/master/docs/getting_started.md#authenticating
requirements: [ "pyrax" ]
examples:
- description: List server instances
code: RAX_CREDS_FILE=~/.raxpub rax.py --list
- description: List server instances only in DFW and ORD regions
code: RAX_CREDS_FILE=~/.raxpub RAX_REGION=DFW,ORD rax.py --list
'''
import sys
import re
import os
import argparse
import collections
try:
import json
except:
import simplejson as json
try:
import pyrax
except ImportError:
print('pyrax required for this module')
sys.exit(1)
def host(regions, hostname):
hostvars = {}
for region in regions:
# Connect to the region
cs = pyrax.connect_to_cloudservers(region=region)
for server in cs.servers.list():
if server.name == hostname:
keys = [key for key in vars(server) if key not in ('manager', '_info')]
for key in keys:
# Extract value
value = getattr(server, key)
# Generate sanitized key
key = 'rax_' + (re.sub("[^A-Za-z0-9\-]", "_", key)
.lower()
.lstrip("_"))
hostvars[key] = value
# And finally, add an IP address
hostvars['ansible_ssh_host'] = server.accessIPv4
print(json.dumps(hostvars, sort_keys=True, indent=4))
def _list(regions):
groups = collections.defaultdict(list)
hostvars = collections.defaultdict(dict)
# Go through all the regions looking for servers
for region in regions:
# Connect to the region
cs = pyrax.connect_to_cloudservers(region=region)
for server in cs.servers.list():
# Create a group on region
groups[region].append(server.name)
# Anything we can discern from the hostname?
try:
subdom = server.name.split('.')[0]
except IndexError:
pass
else:
for name in ('web', 'db', 'sql', 'lb', 'app'):
if name in subdom:
groups[name].append(server.name)
# Check if group metadata key in servers' metadata
try:
group = server.metadata['group']
except KeyError:
pass
else:
# Create group if not exist and add the server
groups[group].append(server.name)
# Add host metadata
keys = [key for key in vars(server) if key not in ('manager', '_info')]
for key in keys:
# Extract value
value = getattr(server, key)
# Generate sanitized key
key = 'rax_' + (re.sub("[^A-Za-z0-9\-]", "_", key)
.lower()
.lstrip('_'))
hostvars[server.name][key] = value
# And finally, add an IP address
hostvars[server.name]['ansible_ssh_host'] = server.accessIPv4
if hostvars:
groups['_meta'] = {'hostvars': hostvars}
print(json.dumps(groups, sort_keys=True, indent=4))
def parse_args():
parser = argparse.ArgumentParser(description='Ansible Rackspace Cloud '
'inventory module')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--list', action='store_true',
help='List active servers')
group.add_argument('--host', help='List details about the specific host')
return parser.parse_args()
def setup():
try:
creds_file = os.environ['RAX_CREDS_FILE']
region = os.getenv('RAX_REGION') or 'all'
except KeyError, e:
sys.stderr.write('Unable to load environment '
'variable %s\n' % e.message)
sys.exit(1)
pyrax.set_setting('identity_type', 'rackspace')
try:
pyrax.set_credential_file(os.path.expanduser(creds_file))
except Exception, e:
sys.stderr.write("%s: %s\n" % (e, e.message))
sys.exit(1)
regions = []
for region in region.split(','):
region = region.strip().upper()
if region == 'ALL':
regions = pyrax.regions
break
elif region not in pyrax.regions:
sys.stderr.write('Unsupported region %s' % region)
sys.exit(1)
elif region not in regions:
regions.append(region)
return regions
def main():
args = parse_args()
regions = setup()
if args.list:
_list(regions)
elif args.host:
host(regions, args.host)
sys.exit(0)
if __name__ == '__main__':
main()

View File

@ -384,7 +384,9 @@ TEST_AUTH_LDAP_ORGANIZATION_MAP_2_RESULT = {
# EC2 credentials
TEST_AWS_ACCESS_KEY_ID = ''
TEST_AWS_SECRET_ACCESS_KEY = ''
TEST_AWS_REGIONS = 'all'
# Rackspace credentials
TEST_RACKSPACE_USERNAME = ''
TEST_RACKSPACE_API_KEY = ''
TEST_RACKSPACE_REGIONS = 'all'