Build-in inventory plugin code structure with gce working

supporting and related changes
 - Fix inconsistency between can_update / can_start
 - Avoid creating inventory file twice unnecessarily
 - Non-functional consolidation in Azure injection logic
 - Inject GCE creds as indented JSON for readability
 - Create new injector class structure, add gce
 - Reduce management command overrides of runtime environment
This commit is contained in:
AlanCoding 2018-12-07 11:08:25 -05:00
parent 90ea9a8cc4
commit 6c130fa6c3
No known key found for this signature in database
GPG Key ID: FD2C3C012A72926B
6 changed files with 213 additions and 54 deletions

View File

@ -124,7 +124,13 @@ class AnsibleInventoryLoader(object):
def get_base_args(self):
# get ansible-inventory absolute path for running in bubblewrap/proot, in Popen
bargs= [self.get_path_to_ansible_inventory(), '-i', self.source]
# NOTE: why do we add "python" to the start of these args?
# the script that runs ansible-inventory specifies a python interpreter
# that makes no sense in light of the fact that we put all the dependencies
# inside of /venv/ansible, so we override the specified interpreter
# https://github.com/ansible/ansible/issues/50714
bargs= ['python', self.get_path_to_ansible_inventory(), '-i', self.source]
logger.debug('Using base command: {}'.format(' '.join(bargs)))
return bargs

View File

@ -24,12 +24,15 @@ def gce(cred, env, private_data_dir):
'type': 'service_account',
'private_key': cred.get_input('ssh_key_data', default=''),
'client_email': username,
'project_id': project
'project_id': project,
# need token uri for inventory plugins
# should this really be hard coded? Good question.
'token_uri': 'https://accounts.google.com/o/oauth2/token',
}
handle, path = tempfile.mkstemp(dir=private_data_dir)
f = os.fdopen(handle, 'w')
json.dump(json_cred, f)
f.close()
path = os.path.join(private_data_dir, 'creds.json')
with open(path, 'w') as f:
json.dump(json_cred, f, indent=2)
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
env['GCE_CREDENTIALS_FILE_PATH'] = path
@ -38,13 +41,13 @@ def azure_rm(cred, env, private_data_dir):
client = cred.get_input('client', default='')
tenant = cred.get_input('tenant', default='')
env['AZURE_SUBSCRIPTION_ID'] = cred.get_input('subscription', default='')
if len(client) and len(tenant):
env['AZURE_CLIENT_ID'] = client
env['AZURE_TENANT'] = tenant
env['AZURE_SECRET'] = cred.get_input('secret', default='')
env['AZURE_SUBSCRIPTION_ID'] = cred.get_input('subscription', default='')
else:
env['AZURE_SUBSCRIPTION_ID'] = cred.get_input('subscription', default='')
env['AZURE_AD_USER'] = cred.get_input('username', default='')
env['AZURE_PASSWORD'] = cred.get_input('password', default='')

View File

@ -10,6 +10,11 @@ import re
import copy
import os.path
from urllib.parse import urljoin
import yaml
import configparser
import stat
import tempfile
from distutils.version import LooseVersion as Version
# Django
from django.conf import settings
@ -1015,6 +1020,8 @@ class InventorySourceOptions(BaseModel):
Common fields for InventorySource and InventoryUpdate.
'''
injectors = dict()
SOURCE_CHOICES = [
('', _('Manual')),
('file', _('File, Directory or Script')),
@ -1308,6 +1315,8 @@ class InventorySourceOptions(BaseModel):
return None
def get_inventory_plugin_name(self):
if self.source in InventorySourceOptions.injectors:
return InventorySourceOptions.injectors[self.source].plugin_name
if self.source in CLOUD_PROVIDERS or self.source == 'custom':
# TODO: today, all vendored sources are scripts
# in future release inventory plugins will replace these
@ -1532,8 +1541,15 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, RelatedJobsMix
return bool(self.source_script)
elif self.source == 'scm':
return bool(self.source_project)
else:
return bool(self.source in CLOUD_INVENTORY_SOURCES)
elif self.source == 'file':
return False
elif self.source == 'ec2':
# Permit credential-less ec2 updates to allow IAM roles
return True
elif self.source == 'gce':
# These updates will hang if correct credential is not supplied
return bool(self.get_cloud_credential().kind == 'gce')
return True
def create_inventory_update(self, **kwargs):
return self.create_unified_job(**kwargs)
@ -1695,6 +1711,14 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin,
def get_ui_url(self):
return urljoin(settings.TOWER_URL_BASE, "/#/jobs/inventory/{}".format(self.pk))
@property
def ansible_virtualenv_path(self):
if self.inventory and self.inventory.organization:
virtualenv = self.inventory.organization.custom_virtualenv
if virtualenv:
return virtualenv
return settings.ANSIBLE_VENV_PATH
def get_actual_source_path(self):
'''Alias to source_path that combines with project path for for SCM file based sources'''
if self.inventory_source_id is None or self.inventory_source.source_project_id is None:
@ -1717,13 +1741,7 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin,
def can_start(self):
if not super(InventoryUpdate, self).can_start:
return False
if (self.source not in ('custom', 'ec2', 'scm') and
not (self.get_cloud_credential())):
return False
elif self.source == 'scm' and not self.inventory_source.source_project:
return False
elif self.source == 'file':
elif not self.inventory_source or not self.inventory_source._can_update():
return False
return True
@ -1801,3 +1819,88 @@ class CustomInventoryScript(CommonModelNameNotUnique, ResourceMixin):
def get_absolute_url(self, request=None):
return reverse('api:inventory_script_detail', kwargs={'pk': self.pk}, request=request)
# TODO: move these to their own file somewhere?
class PluginFileInjector(object):
plugin_name = None
initial_version = None
def __init__(self, ansible_version):
# This is InventoryOptions instance, could be source or inventory update
self.ansible_version = ansible_version
@property
def filename(self):
return '{0}.yml'.format(self.plugin_name)
def inventory_contents(self, inventory_source):
return yaml.safe_dump(self.inventory_as_dict(inventory_source), default_flow_style=False)
def should_use_plugin(self):
return bool(
self.initial_version and
Version(self.ansible_version) >= Version(self.initial_version)
)
def build_env(self, *args, **kwargs):
if self.should_use_plugin():
return self.build_plugin_env(*args, **kwargs)
else:
return self.build_script_env(*args, **kwargs)
def build_plugin_env(self, inventory_update, env, private_data_dir):
return env
def build_script_env(self, inventory_update, env, private_data_dir):
return env
def build_private_data(self, *args, **kwargs):
if self.should_use_plugin():
return self.build_private_data(*args, **kwargs)
else:
return self.build_private_data(*args, **kwargs)
def build_script_private_data(self, *args, **kwargs):
pass
def build_plugin_private_data(self, *args, **kwargs):
pass
class gce(PluginFileInjector):
plugin_name = 'gcp_compute'
initial_version = '2.6'
def build_script_env(self, inventory_update, env, private_data_dir):
env['GCE_ZONE'] = inventory_update.source_regions if inventory_update.source_regions != 'all' else '' # noqa
# by default, the GCE inventory source caches results on disk for
# 5 minutes; disable this behavior
cp = configparser.ConfigParser()
cp.add_section('cache')
cp.set('cache', 'cache_max_age', '0')
handle, path = tempfile.mkstemp(dir=private_data_dir)
cp.write(os.fdopen(handle, 'w'))
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
env['GCE_INI_PATH'] = path
return env
def inventory_as_dict(self, inventory_source):
# NOTE: generalizing this to be use templating like credential types would be nice
# but with YAML content that need to inject list parameters into the YAML,
# it is hard to see any clean way we can possibly do this
ret = dict(
plugin='gcp_compute',
projects=[inventory_source.get_cloud_credential().project],
filters=None, # necessary cruft, see: https://github.com/ansible/ansible/pull/50025
service_account_file="creds.json",
auth_kind="serviceaccount"
)
if inventory_source.source_regions:
ret['zones'] = inventory_source.source_regions.split(',')
return ret
for cls in PluginFileInjector.__subclasses__():
InventorySourceOptions.injectors[cls.__name__] = cls

View File

@ -52,7 +52,7 @@ from awx.main.access import access_registry
from awx.main.models import (
Schedule, TowerScheduleState, Instance, InstanceGroup,
UnifiedJob, Notification,
Inventory, SmartInventoryMembership,
Inventory, InventorySource, SmartInventoryMembership,
Job, AdHocCommand, ProjectUpdate, InventoryUpdate, SystemJob,
JobEvent, ProjectUpdateEvent, InventoryUpdateEvent, AdHocCommandEvent, SystemJobEvent,
build_safe_env
@ -67,6 +67,7 @@ from awx.main.utils import (get_ssh_version, update_scm_url,
get_licenser,
ignore_inventory_computed_fields,
ignore_inventory_group_removal, extract_ansible_vars, schedule_task_manager)
from awx.main.utils.common import _get_ansible_version
from awx.main.utils.safe_yaml import safe_dump, sanitize_jinja
from awx.main.utils.reload import stop_local_services
from awx.main.utils.pglock import advisory_lock
@ -713,12 +714,25 @@ class BaseTask(object):
logger.error('Failed to update %s after %d retries.',
self.model._meta.object_name, _attempt)
def get_ansible_version(self, instance):
if not hasattr(self, '_ansible_version'):
self._ansible_version = _get_ansible_version(
ansible_path=self.get_path_to_ansible(instance, executable='ansible'))
return self._ansible_version
def get_path_to(self, *args):
'''
Return absolute path relative to this file.
'''
return os.path.abspath(os.path.join(os.path.dirname(__file__), *args))
def get_path_to_ansible(self, instance, executable='ansible-playbook', **kwargs):
venv_path = getattr(instance, 'ansible_virtualenv_path', settings.ANSIBLE_VENV_PATH)
venv_exe = os.path.join(venv_path, 'bin', executable)
if os.path.exists(venv_exe):
return venv_exe
return shutil.which(executable)
def build_private_data(self, instance, private_data_dir):
'''
Return SSH private key data (only if stored in DB as ssh_key_data).
@ -2134,9 +2148,13 @@ class RunInventoryUpdate(BaseTask):
def build_env(self, inventory_update, private_data_dir, isolated, private_data_files=None):
"""Build environment dictionary for inventory import.
This is the mechanism by which any data that needs to be passed
This used to be the mechanism by which any data that needs to be passed
to the inventory update script is set up. In particular, this is how
inventory update is aware of its proper credentials.
Most environment injection is now accomplished by the credential
injectors. The primary purpose this still serves is to
still point to the inventory update INI or config file.
"""
env = super(RunInventoryUpdate, self).build_env(inventory_update,
private_data_dir,
@ -2145,6 +2163,7 @@ class RunInventoryUpdate(BaseTask):
if private_data_files is None:
private_data_files = {}
self.add_awx_venv(env)
self.add_ansible_venv(inventory_update.ansible_virtualenv_path, env)
# Pass inventory source ID to inventory script.
env['INVENTORY_SOURCE_ID'] = str(inventory_update.inventory_source_id)
env['INVENTORY_UPDATE_ID'] = str(inventory_update.pk)
@ -2176,25 +2195,19 @@ class RunInventoryUpdate(BaseTask):
inventory_update.get_cloud_credential(), ''
)
if inventory_update.source == 'gce':
env['GCE_ZONE'] = inventory_update.source_regions if inventory_update.source_regions != 'all' else '' # noqa
if inventory_update.source in InventorySource.injectors:
# TODO: mapping from credential.kind to inventory_source.source
injector = InventorySource.injectors[inventory_update.source](self.get_ansible_version(inventory_update))
env = injector.build_env(inventory_update, env, private_data_dir)
# by default, the GCE inventory source caches results on disk for
# 5 minutes; disable this behavior
cp = configparser.ConfigParser()
cp.add_section('cache')
cp.set('cache', 'cache_max_age', '0')
handle, path = tempfile.mkstemp(dir=private_data_dir)
cp.write(os.fdopen(handle, 'w'))
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
env['GCE_INI_PATH'] = path
elif inventory_update.source in ['scm', 'custom']:
if inventory_update.source == 'tower':
env['TOWER_INVENTORY'] = inventory_update.instance_filters
env['TOWER_LICENSE_TYPE'] = get_licenser().validate()['license_type']
if inventory_update.source in ['scm', 'custom']:
for env_k in inventory_update.source_vars_dict:
if str(env_k) not in env and str(env_k) not in settings.INV_ENV_VARIABLE_BLACKLIST:
env[str(env_k)] = str(inventory_update.source_vars_dict[env_k])
elif inventory_update.source == 'tower':
env['TOWER_INVENTORY'] = inventory_update.instance_filters
env['TOWER_LICENSE_TYPE'] = get_licenser().validate()['license_type']
elif inventory_update.source == 'file':
raise NotImplementedError('Cannot update file sources through the task system.')
return env
@ -2259,33 +2272,61 @@ class RunInventoryUpdate(BaseTask):
getattr(settings, '%s_INSTANCE_ID_VAR' % src.upper()),])
# Add arguments for the source inventory script
args.append('--source')
if src in CLOUD_PROVIDERS:
# Get the path to the inventory plugin, and append it to our
# arguments.
plugin_path = self.get_path_to('..', 'plugins', 'inventory',
'%s.py' % src)
args.append(plugin_path)
elif src == 'scm':
args.append(inventory_update.get_actual_source_path())
elif src == 'custom':
handle, path = tempfile.mkstemp(dir=private_data_dir)
f = os.fdopen(handle, 'w')
if inventory_update.source_script is None:
raise RuntimeError('Inventory Script does not exist')
f.write(inventory_update.source_script.script)
f.close()
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
args.append(path)
args.append(self.build_inventory(inventory_update, private_data_dir))
if src == 'custom':
args.append("--custom")
args.append('-v%d' % inventory_update.verbosity)
if settings.DEBUG:
args.append('--traceback')
return args
def build_inventory(self, inventory_update, private_data_dir):
src = inventory_update.source
if src in CLOUD_PROVIDERS:
if src in InventorySource.injectors:
cloud_cred = inventory_update.get_cloud_credential()
injector = InventorySource.injectors[cloud_cred.kind](self.get_ansible_version(inventory_update))
content = injector.inventory_contents(inventory_update)
content = content.encode('utf-8')
# must be a statically named file
inventory_path = os.path.join(private_data_dir, injector.filename)
with open(inventory_path, 'w') as f:
f.write(content)
os.chmod(inventory_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
else:
# Get the path to the inventory plugin, and append it to our
# arguments.
inventory_path = self.get_path_to('..', 'plugins', 'inventory', '%s.py' % src)
elif src == 'scm':
inventory_path = inventory_update.get_actual_source_path()
elif src == 'custom':
handle, inventory_path = tempfile.mkstemp(dir=private_data_dir)
f = os.fdopen(handle, 'w')
if inventory_update.source_script is None:
raise RuntimeError('Inventory Script does not exist')
f.write(inventory_update.source_script.script)
f.close()
os.chmod(inventory_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
return inventory_path
def build_cwd(self, inventory_update, private_data_dir):
if inventory_update.source == 'scm' and inventory_update.source_project_update:
'''
There are two cases where the inventory "source" is in a different
location from the private data:
- deprecated vendored inventory scripts in awx/plugins/inventory
- SCM, where source needs to live in the project folder
in these cases, the inventory does not exist in the standard tempdir
'''
src = inventory_update.source
if src == 'scm' and inventory_update.source_project_update:
return inventory_update.source_project_update.get_project_path(check_if_exists=False)
return self.get_path_to('..', 'plugins', 'inventory')
if src in CLOUD_PROVIDERS:
injector = None
if src in InventorySource.injectors:
injector = InventorySource.injectors[src](self.get_ansible_version(inventory_update))
if (not injector) or (not injector.should_use_plugin()):
return self.get_path_to('..', 'plugins', 'inventory')
return private_data_dir
def build_playbook_path_relative_to_cwd(self, inventory_update, private_data_dir):
return None

View File

@ -30,6 +30,7 @@ azure-graphrbac==0.40.0
# AWS
boto==2.47.0 # last which does not break ec2 scripts
boto3==1.6.2
google-auth==1.6.2 # needed for gce inventory imports
# netconf for network modules
ncclient==0.6.3
# netaddr filter

View File

@ -38,6 +38,7 @@ bcrypt==3.1.4 # via paramiko
boto3==1.6.2
boto==2.47.0
botocore==1.9.3 # via boto3, s3transfer
cachetools==3.0.0 # via google-auth
certifi==2018.1.18 # via msrest, requests
cffi==1.11.5 # via bcrypt, cryptography, pynacl
chardet==3.0.4 # via requests
@ -50,6 +51,8 @@ docutils==0.14 # via botocore
dogpile.cache==0.6.5 # via openstacksdk
entrypoints==0.2.3 # via keyring
enum34==1.1.6; python_version < '3' # via cryptography, knack, msrest, ovirt-engine-sdk-python
futures==3.2.0 # via openstacksdk, s3transfer
google-auth==1.6.2
humanfriendly==4.8 # via azure-cli-core
idna==2.6 # via cryptography, requests
ipaddress==1.0.19 # via cryptography, openstacksdk
@ -81,6 +84,7 @@ pbr==3.1.1 # via keystoneauth1, openstacksdk, os-service-types, s
pexpect==4.6.0
psutil==5.4.3
ptyprocess==0.5.2 # via pexpect
pyasn1-modules==0.2.3 # via google-auth
pyasn1==0.4.2 # via paramiko
pycparser==2.18 # via cffi
pycurl==7.43.0.1 # via ovirt-engine-sdk-python
@ -100,11 +104,12 @@ requests-ntlm==1.1.0 # via pywinrm
requests-oauthlib==0.8.0 # via msrest
requests==2.20.0
requestsexceptions==1.4.0 # via openstacksdk, os-client-config
rsa==4.0 # via google-auth
s3transfer==0.1.13 # via boto3
secretstorage==2.3.1 # via keyring
selectors2==2.0.1 # via ncclient
shade==1.27.0
six==1.11.0 # via azure-cli-core, bcrypt, cryptography, isodate, keystoneauth1, knack, munch, ncclient, ntlm-auth, openstacksdk, ovirt-engine-sdk-python, packaging, pynacl, pyopenssl, python-dateutil, pyvmomi, pywinrm, stevedore
six==1.11.0 # via azure-cli-core, bcrypt, cryptography, google-auth, isodate, keystoneauth1, knack, munch, ncclient, ntlm-auth, openstacksdk, ovirt-engine-sdk-python, packaging, pynacl, pyopenssl, python-dateutil, pyvmomi, pywinrm, stevedore
stevedore==1.28.0 # via keystoneauth1
tabulate==0.7.7 # via azure-cli-core, knack
urllib3==1.24 # via requests