mirror of
https://github.com/ansible/awx.git
synced 2026-01-19 05:31:22 -03:30
AC-505. Initial working task/tests for cloud inventory import.
This commit is contained in:
parent
f7256ca343
commit
17c350d7e1
@ -58,6 +58,11 @@ def prepare_env():
|
||||
sys.modules['django.utils.six'] = sys.modules['six']
|
||||
django.utils.six = sys.modules['django.utils.six']
|
||||
from django.utils import six
|
||||
# Use the AWX_TEST_DATABASE_NAME environment variable to specify the test
|
||||
# database name to use when management command is run as an external
|
||||
# program via unit tests.
|
||||
if os.environ.get('AWX_TEST_DATABASE_NAME', None):
|
||||
settings.DATABASES['default']['NAME'] = os.environ['AWX_TEST_DATABASE_NAME']
|
||||
|
||||
def manage():
|
||||
# Prepare the AWX environment.
|
||||
|
||||
@ -14,14 +14,14 @@ from django.utils.dateparse import parse_datetime
|
||||
from django.utils.timezone import now, is_aware, make_aware
|
||||
|
||||
# AWX
|
||||
from awx.main.models import ProjectUpdate, Job
|
||||
from awx.main.models import Job, ProjectUpdate, InventoryUpdate
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
'''
|
||||
Management command to cleanup old jobs and project updates.
|
||||
'''
|
||||
|
||||
help = 'Remove old jobs and project updates from the database.'
|
||||
help = 'Remove old jobs, project and inventory updates from the database.'
|
||||
|
||||
option_list = NoArgsCommand.option_list + (
|
||||
make_option('--days', dest='days', type='int', default=90, metavar='N',
|
||||
@ -31,10 +31,13 @@ class Command(NoArgsCommand):
|
||||
'be removed)'),
|
||||
make_option('--jobs', dest='only_jobs', action='store_true',
|
||||
default=False,
|
||||
help='Only remove jobs (leave project updates alone)'),
|
||||
help='Only remove jobs'),
|
||||
make_option('--project-updates', dest='only_project_updates',
|
||||
action='store_true', default=False,
|
||||
help='Only remove project updates (leave jobs alone)'),
|
||||
help='Only remove project updates'),
|
||||
make_option('--inventory-updates', dest='only_inventory_updates',
|
||||
action='store_true', default=False,
|
||||
help='Only remove inventory updates'),
|
||||
)
|
||||
|
||||
def cleanup_jobs(self):
|
||||
@ -74,6 +77,24 @@ class Command(NoArgsCommand):
|
||||
if not self.dry_run:
|
||||
pu.delete()
|
||||
|
||||
def cleanup_inventory_updates(self):
|
||||
for iu in InventoryUpdate.objects.all():
|
||||
iu_display = '"%s" (started %s)' % (unicode(iu), unicode(iu.created))
|
||||
if iu.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s inventory update %s', action_text, iu.status, iu_display)
|
||||
if iu in (iu.inventory_source.current_update, iu.inventory_source.last_update) and iu.inventory_source.source:
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s', action_text, iu_display)
|
||||
elif iu.created >= self.cutoff:
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s', action_text, iu_display)
|
||||
else:
|
||||
action_text = 'would delete' if self.dry_run else 'deleting'
|
||||
self.logger.info('%s %s', action_text, iu_display)
|
||||
if not self.dry_run:
|
||||
iu.delete()
|
||||
|
||||
def init_logging(self):
|
||||
log_levels = dict(enumerate([logging.ERROR, logging.INFO,
|
||||
logging.DEBUG, 0]))
|
||||
@ -93,7 +114,10 @@ class Command(NoArgsCommand):
|
||||
self.cutoff = now() - datetime.timedelta(days=self.days)
|
||||
self.only_jobs = bool(options.get('only_jobs', False))
|
||||
self.only_project_updates = bool(options.get('only_project_updates', False))
|
||||
if self.only_jobs or (not self.only_jobs and not self.only_project_updates):
|
||||
self.only_inventory_updates = bool(options.get('only_inventory_updates', False))
|
||||
if self.only_jobs or (not self.only_jobs and not self.only_project_updates and not self.only_inventory_updates):
|
||||
self.cleanup_jobs()
|
||||
if self.only_project_updates or (not self.only_jobs and not self.only_project_updates):
|
||||
if self.only_project_updates or (not self.only_jobs and not self.only_project_updates and not self.only_inventory_updates):
|
||||
self.cleanup_project_updates()
|
||||
if self.only_inventory_updates or (not self.only_jobs and not self.only_project_updates and not self.only_inventory_updates):
|
||||
self.cleanup_inventory_updates()
|
||||
|
||||
@ -258,12 +258,13 @@ class ExecutableJsonLoader(BaseLoader):
|
||||
self.child_group_names = {}
|
||||
|
||||
def command_to_json(self, cmd):
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
(stdout, stderr) = proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
raise ImportException("%s list failed %s with output: %s" % (cmd, stderr, proc.returncode))
|
||||
data = {}
|
||||
stdout, stderr = '', ''
|
||||
try:
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
stdout, stderr = proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
raise Exception("%s list failed %s with output: %s" % (cmd, stderr, proc.returncode))
|
||||
data = json.loads(stdout)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
@ -335,9 +336,6 @@ class ExecutableJsonLoader(BaseLoader):
|
||||
|
||||
|
||||
def load_generic(src):
|
||||
LOGGER.debug("preparing loaders")
|
||||
|
||||
|
||||
LOGGER.debug("analyzing type of source")
|
||||
if not os.path.exists(src):
|
||||
LOGGER.debug("source missing")
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import ConfigParser
|
||||
import cStringIO
|
||||
import datetime
|
||||
import distutils.version
|
||||
@ -66,23 +67,21 @@ class BaseTask(Task):
|
||||
'''
|
||||
return os.path.abspath(os.path.join(os.path.dirname(__file__), *args))
|
||||
|
||||
def build_ssh_key_path(self, instance, **kwargs):
|
||||
def build_private_data(self, instance, **kwargs):
|
||||
'''
|
||||
Create a temporary file containing the SSH private key.
|
||||
Return any private data that needs to be written to a temporary file
|
||||
for this task.
|
||||
'''
|
||||
ssh_key_data = ''
|
||||
if hasattr(instance, 'credential'):
|
||||
credential = instance.credential
|
||||
if hasattr(credential, 'ssh_key_data'):
|
||||
ssh_key_data = decrypt_field(credential, 'ssh_key_data')
|
||||
elif hasattr(instance, 'project'):
|
||||
project = instance.project
|
||||
if hasattr(project, 'scm_key_data'):
|
||||
ssh_key_data = decrypt_field(project, 'scm_key_data')
|
||||
if ssh_key_data:
|
||||
|
||||
def build_private_data_file(self, instance, **kwargs):
|
||||
'''
|
||||
Create a temporary file containing the private data.
|
||||
'''
|
||||
private_data = self.build_private_data(instance, **kwargs)
|
||||
if private_data is not None:
|
||||
handle, path = tempfile.mkstemp()
|
||||
f = os.fdopen(handle, 'w')
|
||||
f.write(ssh_key_data)
|
||||
f.write(private_data)
|
||||
f.close()
|
||||
os.chmod(path, stat.S_IRUSR|stat.S_IWUSR)
|
||||
return path
|
||||
@ -116,6 +115,9 @@ class BaseTask(Task):
|
||||
env['ANSIBLE_NOCOLOR'] = '1' # Prevent output of escape sequences.
|
||||
return env
|
||||
|
||||
def build_safe_env(self, instance, **kwargs):
|
||||
return self.build_env(instance, **kwargs)
|
||||
|
||||
def build_args(self, instance, **kwargs):
|
||||
raise NotImplementedError
|
||||
|
||||
@ -237,23 +239,24 @@ class BaseTask(Task):
|
||||
if not self.pre_run_check(instance, **kwargs):
|
||||
return
|
||||
instance = self.update_model(pk, status='running')
|
||||
kwargs['ssh_key_path'] = self.build_ssh_key_path(instance, **kwargs)
|
||||
kwargs['private_data_file'] = self.build_private_data_file(instance, **kwargs)
|
||||
kwargs['passwords'] = self.build_passwords(instance, **kwargs)
|
||||
args = self.build_args(instance, **kwargs)
|
||||
safe_args = self.build_safe_args(instance, **kwargs)
|
||||
output_replacements = self.build_output_replacements(instance, **kwargs)
|
||||
cwd = self.build_cwd(instance, **kwargs)
|
||||
env = self.build_env(instance, **kwargs)
|
||||
safe_env = self.build_safe_env(instance, **kwargs)
|
||||
instance = self.update_model(pk, job_args=json.dumps(safe_args),
|
||||
job_cwd=cwd, job_env=env)
|
||||
job_cwd=cwd, job_env=safe_env)
|
||||
status, stdout = self.run_pexpect(instance, args, cwd, env,
|
||||
kwargs['passwords'])
|
||||
except Exception:
|
||||
tb = traceback.format_exc()
|
||||
finally:
|
||||
if kwargs.get('ssh_key_path', ''):
|
||||
if kwargs.get('private_data_file', ''):
|
||||
try:
|
||||
os.remove(kwargs['ssh_key_path'])
|
||||
os.remove(kwargs['private_data_file'])
|
||||
except IOError:
|
||||
pass
|
||||
instance = self.update_model(pk, status=status, result_stdout=stdout,
|
||||
@ -269,6 +272,16 @@ class RunJob(BaseTask):
|
||||
name = 'run_job'
|
||||
model = Job
|
||||
|
||||
def build_private_data(self, job, **kwargs):
|
||||
'''
|
||||
Return SSH private key data needed for this job.
|
||||
'''
|
||||
credential = getattr(job, 'credential', None)
|
||||
if credential:
|
||||
return decrypt_field(credential, 'ssh_key_data')
|
||||
else:
|
||||
return ''
|
||||
|
||||
def build_passwords(self, job, **kwargs):
|
||||
'''
|
||||
Build a dictionary of passwords for SSH private key, SSH user and sudo.
|
||||
@ -348,7 +361,7 @@ class RunJob(BaseTask):
|
||||
if job.job_tags:
|
||||
args.extend(['-t', job.job_tags])
|
||||
args.append(job.playbook) # relative path to project.local_path
|
||||
ssh_key_path = kwargs.get('ssh_key_path', '')
|
||||
ssh_key_path = kwargs.get('private_data_file', '')
|
||||
if ssh_key_path:
|
||||
cmd = ' '.join([subprocess.list2cmdline(['ssh-add', ssh_key_path]),
|
||||
'&&', subprocess.list2cmdline(args)])
|
||||
@ -435,6 +448,13 @@ class RunProjectUpdate(BaseTask):
|
||||
name = 'run_project_update'
|
||||
model = ProjectUpdate
|
||||
|
||||
def build_private_data(self, project_update, **kwargs):
|
||||
'''
|
||||
Return SSH private key data needed for this project update.
|
||||
'''
|
||||
project = project_update.project
|
||||
return decrypt_field(project, 'scm_key_data')
|
||||
|
||||
def build_passwords(self, project_update, **kwargs):
|
||||
'''
|
||||
Build a dictionary of passwords for SSH private key unlock and SCM
|
||||
@ -528,7 +548,7 @@ class RunProjectUpdate(BaseTask):
|
||||
args.extend(['-e', json.dumps(extra_vars)])
|
||||
args.append('project_update.yml')
|
||||
|
||||
ssh_key_path = kwargs.get('ssh_key_path', '')
|
||||
ssh_key_path = kwargs.get('private_data_file', '')
|
||||
if ssh_key_path:
|
||||
subcmds = [('ssh-add', ssh_key_path), args]
|
||||
cmd = ' && '.join([subprocess.list2cmdline(x) for x in subcmds])
|
||||
@ -629,12 +649,77 @@ class RunInventoryUpdate(BaseTask):
|
||||
name = 'run_inventory_update'
|
||||
model = InventoryUpdate
|
||||
|
||||
def build_private_data(self, inventory_update, **kwargs):
|
||||
'''
|
||||
Return private data needed for inventory update.
|
||||
'''
|
||||
inventory_source = inventory_update.inventory_source
|
||||
cp = ConfigParser.ConfigParser()
|
||||
# Build custom ec2.ini for ec2 inventory script to use.
|
||||
if inventory_source.source == 'ec2':
|
||||
section = 'ec2'
|
||||
cp.add_section(section)
|
||||
cp.set(section, 'regions', inventory_source.source_regions or 'all')
|
||||
cp.set(section, 'regions_exclude', '')
|
||||
# FIXME: Provide a way to override these defaults.. source_env?
|
||||
cp.set(section, 'destination_variable', 'public_dns_name')
|
||||
cp.set(section, 'vpc_destination_variable', 'ip_address')
|
||||
cp.set(section, 'route53', 'False')
|
||||
# FIXME: Separate temp path for each source so they don't clobber
|
||||
# each other.
|
||||
cp.set(section, 'cache_path', '/tmp')
|
||||
cp.set(section, 'cache_max_age', '300')
|
||||
# Build pyrax creds INI for rax inventory script.
|
||||
elif inventory_source.source == 'rackspace':
|
||||
section = 'rackspace_cloud'
|
||||
cp.add_section(section)
|
||||
cp.set(section, 'username', inventory_source.source_username)
|
||||
cp.set(section, 'api_key', decrypt_field(inventory_source,
|
||||
'source_password'))
|
||||
# Return INI content.
|
||||
if cp.sections():
|
||||
f = cStringIO.StringIO()
|
||||
cp.write(f)
|
||||
return f.getvalue()
|
||||
|
||||
def build_passwords(self, inventory_update, **kwargs):
|
||||
'''
|
||||
Build a dictionary of passwords inventory sources.
|
||||
'''
|
||||
passwords = super(RunInventoryUpdate, self).build_passwords(inventory_update,
|
||||
**kwargs)
|
||||
inventory_source = inventory_update.inventory_source
|
||||
passwords['source_username'] = inventory_source.source_username
|
||||
passwords['source_password'] = kwargs.get('source_password', \
|
||||
decrypt_field(inventory_source, 'source_password'))
|
||||
return passwords
|
||||
|
||||
def build_env(self, inventory_update, **kwargs):
|
||||
'''
|
||||
Build environment dictionary for inventory import.
|
||||
'''
|
||||
env = super(RunInventoryUpdate, self).build_env(inventory_update, **kwargs)
|
||||
# FIXME
|
||||
# Update PYTHONPATH to use local site-packages for inventory scripts.
|
||||
python_paths = env.get('PYTHONPATH', '').split(os.pathsep)
|
||||
local_site_packages = self.get_path_to('..', 'lib', 'site-packages')
|
||||
if local_site_packages not in python_paths:
|
||||
python_paths.insert(0, local_site_packages)
|
||||
env['PYTHONPATH'] = os.pathsep.join(python_paths)
|
||||
# Pass inventory source ID to inventory script.
|
||||
inventory_source = inventory_update.inventory_source
|
||||
env['INVENTORY_SOURCE_ID'] = str(inventory_source.pk)
|
||||
# Set environment variables specific to each source.
|
||||
if inventory_source.source == 'ec2':
|
||||
env['AWS_ACCESS_KEY_ID'] = kwargs.get('passwords', {}).get('source_username', '')
|
||||
env['AWS_SECRET_ACCESS_KEY'] = kwargs.get('passwords', {}).get('source_password', '')
|
||||
env['EC2_INI_PATH'] = kwargs.get('private_data_file', '')
|
||||
elif inventory_source.source == 'rackspace':
|
||||
env['RAX_CREDS_FILE'] = kwargs.get('private_data_file', '')
|
||||
env['RAX_REGION'] = inventory_source.source_regions
|
||||
elif inventory_source.source == 'file':
|
||||
# FIXME: Parse source_env to dict, update env.
|
||||
pass
|
||||
#print env
|
||||
return env
|
||||
|
||||
def build_args(self, inventory_update, **kwargs):
|
||||
@ -642,7 +727,29 @@ class RunInventoryUpdate(BaseTask):
|
||||
Build command line argument list for running inventory import.
|
||||
'''
|
||||
# FIXME
|
||||
return ['echo', 'FIXME']
|
||||
inventory_source = inventory_update.inventory_source
|
||||
inventory = inventory_source.group.inventory
|
||||
args = ['awx-manage', 'inventory_import']
|
||||
args.extend(['--inventory-id', str(inventory.pk)])
|
||||
if inventory_source.overwrite_hosts:
|
||||
args.append('--overwrite')
|
||||
if inventory_source.overwrite_vars:
|
||||
args.append('--overwrite-vars')
|
||||
if inventory_source.keep_vars:
|
||||
args.append('--keep-vars')
|
||||
args.append('--source')
|
||||
if inventory_source.source == 'ec2':
|
||||
ec2_path = self.get_path_to('..', 'plugins', 'inventory', 'ec2.py')
|
||||
args.append(ec2_path)
|
||||
elif inventory_source.source == 'rackspace':
|
||||
rax_path = self.get_path_to('..', 'plugins', 'inventory', 'rax.py')
|
||||
args.append(rax_path)
|
||||
elif inventory_source.source == 'file':
|
||||
args.append(inventory_source.source_path)
|
||||
args.append('-v2')
|
||||
if settings.DEBUG:
|
||||
args.append('--traceback')
|
||||
return args
|
||||
|
||||
def build_cwd(self, inventory_update, **kwargs):
|
||||
return self.get_path_to('..', 'plugins', 'inventory')
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
|
||||
from awx.main.tests.organizations import OrganizationsTest
|
||||
from awx.main.tests.users import *
|
||||
from awx.main.tests.inventory import InventoryTest
|
||||
from awx.main.tests.inventory import *
|
||||
from awx.main.tests.projects import ProjectsTest, ProjectUpdatesTest
|
||||
from awx.main.tests.commands import *
|
||||
from awx.main.tests.scripts import *
|
||||
|
||||
@ -1,13 +1,22 @@
|
||||
# Copyright (c) 2013 AnsibleWorks, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.test.utils import override_settings
|
||||
|
||||
# AWX
|
||||
from awx.main.models import *
|
||||
from awx.main.tests.base import BaseTest
|
||||
from awx.main.tests.base import BaseTest, BaseTransactionTest
|
||||
|
||||
__all__ = ['InventoryTest', 'InventoryUpdatesTest']
|
||||
|
||||
class InventoryTest(BaseTest):
|
||||
|
||||
@ -931,3 +940,85 @@ class InventoryTest(BaseTest):
|
||||
set([h_e.pk]))
|
||||
self.assertEqual(set(h_e.all_groups.values_list('pk', flat=True)),
|
||||
set([g_e.pk]))
|
||||
|
||||
@override_settings(CELERY_ALWAYS_EAGER=True,
|
||||
CELERY_EAGER_PROPAGATES_EXCEPTIONS=True)
|
||||
class InventoryUpdatesTest(BaseTransactionTest):
|
||||
|
||||
def setUp(self):
|
||||
super(InventoryUpdatesTest, self).setUp()
|
||||
self.setup_users()
|
||||
self.organization = self.make_organizations(self.super_django_user, 1)[0]
|
||||
self.organization.admins.add(self.normal_django_user)
|
||||
self.organization.users.add(self.other_django_user)
|
||||
self.organization.users.add(self.normal_django_user)
|
||||
self.inventory = self.organization.inventories.create(name='Cloud Inventory')
|
||||
self.group = self.inventory.groups.create(name='Cloud Group')
|
||||
# Pass test database name in environment for use by the inventory_import
|
||||
# management command.
|
||||
os.environ['AWX_TEST_DATABASE_NAME'] = settings.DATABASES['default']['NAME']
|
||||
|
||||
def update_inventory_source(self, group, **kwargs):
|
||||
inventory_source = group.inventory_source
|
||||
update_fields = []
|
||||
for field, value in kwargs.items():
|
||||
if getattr(inventory_source, field) != value:
|
||||
setattr(inventory_source, field, value)
|
||||
update_fields.append(field)
|
||||
if update_fields:
|
||||
inventory_source.save(update_fields=update_fields)
|
||||
return inventory_source
|
||||
|
||||
def check_inventory_update(self, inventory_source, should_fail=False,
|
||||
**kwargs):
|
||||
inventory_update = kwargs.pop('inventory_update', None)
|
||||
should_error = kwargs.pop('should_error', False)
|
||||
if not inventory_update:
|
||||
inventory_update = inventory_source.update(**kwargs)
|
||||
self.assertTrue(inventory_update)
|
||||
inventory_update = InventoryUpdate.objects.get(pk=inventory_update.pk)
|
||||
#print inventory_update.result_stdout
|
||||
if should_error:
|
||||
self.assertEqual(inventory_update.status, 'error',
|
||||
inventory_update.result_stdout + \
|
||||
inventory_update.result_traceback)
|
||||
elif should_fail:
|
||||
self.assertEqual(inventory_update.status, 'failed',
|
||||
inventory_update.result_stdout + \
|
||||
inventory_update.result_traceback)
|
||||
elif should_fail is False:
|
||||
self.assertEqual(inventory_update.status, 'successful',
|
||||
inventory_update.result_stdout + \
|
||||
inventory_update.result_traceback)
|
||||
else:
|
||||
pass # If should_fail is None, we don't care.
|
||||
|
||||
def check_inventory_source(self, inventory_source):
|
||||
inventory_source = InventorySource.objects.get(pk=inventory_source.pk)
|
||||
inventory = inventory_source.group.inventory
|
||||
self.assertTrue(inventory_source.can_update)
|
||||
self.assertEqual(inventory.groups.count(), 1)
|
||||
self.assertEqual(inventory.hosts.count(), 0)
|
||||
self.check_inventory_update(inventory_source)
|
||||
self.assertNotEqual(inventory.groups.count(), 1)
|
||||
self.assertNotEqual(inventory.hosts.count(), 0)
|
||||
|
||||
def test_update_from_ec2(self):
|
||||
source_username = getattr(settings, 'TEST_AWS_ACCESS_KEY_ID', '')
|
||||
source_password = getattr(settings, 'TEST_AWS_SECRET_ACCESS_KEY', '')
|
||||
if not all([source_username, source_password]):
|
||||
self.skipTest('no test ec2 credentials defined!')
|
||||
inventory_source = self.update_inventory_source(self.group,
|
||||
source='ec2', source_username=source_username,
|
||||
source_password=source_password)
|
||||
self.check_inventory_source(inventory_source)
|
||||
|
||||
def test_update_from_rackspace(self):
|
||||
source_username = getattr(settings, 'TEST_RACKSPACE_USERNAME', '')
|
||||
source_password = getattr(settings, 'TEST_RACKSPACE_API_KEY', '')
|
||||
if not all([source_username, source_password]):
|
||||
self.skipTest('no test rackspace credentials defined!')
|
||||
inventory_source = self.update_inventory_source(self.group,
|
||||
source='rackspace', source_username=source_username,
|
||||
source_password=source_password)
|
||||
self.check_inventory_source(inventory_source)
|
||||
|
||||
@ -12,6 +12,11 @@ variables needed for Boto have already been set:
|
||||
export AWS_ACCESS_KEY_ID='AK123'
|
||||
export AWS_SECRET_ACCESS_KEY='abc123'
|
||||
|
||||
This script also assumes there is an ec2.ini file alongside it. To specify a
|
||||
different path to ec2.ini, define the EC2_INI_PATH environment variable:
|
||||
|
||||
export EC2_INI_PATH=/path/to/my_ec2.ini
|
||||
|
||||
If you're using eucalyptus you need to set the above variables and
|
||||
you need to define:
|
||||
|
||||
@ -177,7 +182,9 @@ class Ec2Inventory(object):
|
||||
''' Reads the settings from the ec2.ini file '''
|
||||
|
||||
config = ConfigParser.SafeConfigParser()
|
||||
config.read(os.path.dirname(os.path.realpath(__file__)) + '/ec2.ini')
|
||||
ec2_default_ini_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ec2.ini')
|
||||
ec2_ini_path = os.environ.get('EC2_INI_PATH', ec2_default_ini_path)
|
||||
config.read(ec2_ini_path)
|
||||
|
||||
# is eucalyptus?
|
||||
self.eucalyptus_host = None
|
||||
|
||||
@ -64,8 +64,8 @@ options:
|
||||
default: DFW
|
||||
author: Jesse Keating
|
||||
notes:
|
||||
- Two environment variables need to be set, RAX_CREDS and RAX_REGION.
|
||||
- RAX_CREDS points to a credentials file appropriate for pyrax
|
||||
- Two environment variables need to be set, RAX_CREDS_FILE and RAX_REGION.
|
||||
- RAX_CREDS_FILE points to a credentials file appropriate for pyrax
|
||||
- RAX_REGION defines a Rackspace Public Cloud region (DFW, ORD, LON, ...)
|
||||
requirements: [ "pyrax" ]
|
||||
examples:
|
||||
|
||||
@ -373,3 +373,18 @@ TEST_AUTH_LDAP_ORGANIZATION_MAP_2_RESULT = {
|
||||
'Test Org': {'admins': False, 'users': True},
|
||||
'Test Org 2': {'admins': True, 'users': False},
|
||||
}
|
||||
|
||||
###############################################################################
|
||||
# INVENTORY IMPORT TEST SETTINGS
|
||||
###############################################################################
|
||||
|
||||
# Define these variables to enable more complete testing of inventory import
|
||||
# from cloud providers.
|
||||
|
||||
# EC2 credentials
|
||||
TEST_AWS_ACCESS_KEY_ID = ''
|
||||
TEST_AWS_SECRET_ACCESS_KEY = ''
|
||||
|
||||
# Rackspace credentials
|
||||
TEST_RACKSPACE_USERNAME = ''
|
||||
TEST_RACKSPACE_API_KEY = ''
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user