AC-1040 Give unified jobs a copy of config from unified job templates to capture the settings as they were when the job is run.

This commit is contained in:
Chris Church 2014-03-21 00:05:36 -04:00
parent 52b8792058
commit 2cb3f31b01
6 changed files with 189 additions and 131 deletions

View File

@ -79,10 +79,12 @@ class Migration(DataMigration):
new_ctype = self._get_content_type_for_model(orm, orm.ProjectUpdate)
for project_update in orm.ProjectUpdate.objects.order_by('pk'):
project = project_update.project
new_project = orm.ProjectNew.objects.get(old_pk=project_update.project_id)
d = self._get_dict_from_common_task_model(project_update)
d.update({
'polymorphic_ctype_id': new_ctype.pk,
'project_id': orm.ProjectNew.objects.get(old_pk=project_update.project_id).pk,
'project_id': new_project.pk,
'unified_job_template_id': new_project.pk,
'local_path': project.local_path,
'scm_type': project.scm_type,
'scm_url': project.scm_url,
@ -148,6 +150,7 @@ class Migration(DataMigration):
new_ctype = self._get_content_type_for_model(orm, orm.InventoryUpdate)
for inventory_update in orm.InventoryUpdate.objects.order_by('pk'):
inventory_source = inventory_update.inventory_source
new_inventory_source = orm.InventorySourceNew.objects.get(old_pk=inventory_update.inventory_source_id)
d = self._get_dict_from_common_task_model(inventory_update)
d.update({
'polymorphic_ctype_id': new_ctype.pk,
@ -158,7 +161,8 @@ class Migration(DataMigration):
'source_regions': inventory_source.source_regions,
'overwrite': inventory_source.overwrite,
'overwrite_vars': inventory_source.overwrite_vars,
'inventory_source_id': orm.InventorySourceNew.objects.get(old_pk=inventory_update.inventory_source_id).pk,
'inventory_source_id': new_inventory_source.pk,
'unified_job_template_id': new_inventory_source.pk,
'license_error': inventory_update.license_error,
})
new_inventory_update, created = orm.InventoryUpdateNew.objects.get_or_create(old_pk=inventory_update.pk, defaults=d)
@ -227,7 +231,9 @@ class Migration(DataMigration):
if job.project:
d['project_id'] = orm.ProjectNew.objects.get(old_pk=job.project_id).pk
if job.job_template:
d['job_template_id'] = orm.JobTemplateNew.objects.get(old_pk=job.job_template_id).pk
new_job_template = orm.JobTemplateNew.objects.get(old_pk=job.job_template_id)
d['job_template_id'] = new_job_template.pk
d['unified_job_template_id'] = new_job_template.pk
new_job, created = orm.JobNew.objects.get_or_create(old_pk=job.pk, defaults=d)
# Update JobTemplate last run.

View File

@ -741,6 +741,8 @@ class InventorySourceOptions(BaseModel):
', '.join(invalid_regions)))
return ','.join(regions)
source_vars_dict = VarsDictProperty('source_vars')
class InventorySourceBase(InventorySourceOptions):
@ -758,6 +760,15 @@ class InventorySourceBase(InventorySourceOptions):
class InventorySourceBaseMethods(object):
@classmethod
def _get_unified_job_class(cls):
return InventoryUpdate
@classmethod
def _get_unified_job_field_names(cls):
return ['source', 'source_path', 'source_vars', 'credential',
'source_regions', 'overwrite', 'overwrite_vars']
def save(self, *args, **kwargs):
# If update_fields has been specified, add our field names to it,
# if it hasn't been specified, then we're just doing a normal save.
@ -775,8 +786,6 @@ class InventorySourceBaseMethods(object):
# Do the actual save.
super(InventorySourceBaseMethods, self).save(*args, **kwargs)
source_vars_dict = VarsDictProperty('source_vars')
def get_absolute_url(self):
return reverse('api:inventory_source_detail', args=(self.pk,))
@ -784,15 +793,18 @@ class InventorySourceBaseMethods(object):
# FIXME: Prevent update when another one is active!
return bool(self.source)
def create_inventory_update(self, **kwargs):
return self._create_unified_job_instance(**kwargs)
def update_signature(self, **kwargs):
if self.can_update:
inventory_update = self.inventory_updates.create() # FIXME: Copy inventory source fields to update
inventory_update = self.create_inventory_update()
inventory_update_sig = inventory_update.start_signature()
return (inventory_update, inventory_update_sig)
def update(self, **kwargs):
if self.can_update:
inventory_update = self.inventory_updates.create() # FIXME: Copy inventory source fields to update
inventory_update = self.create_inventory_update()
inventory_update.start()
return inventory_update
@ -928,6 +940,15 @@ class InventoryUpdateBase(InventorySourceOptions):
class InventoryUpdateBaseMethods(object):
@classmethod
def _get_parent_field_name(cls):
return 'inventory_source'
@classmethod
def _get_task_class(cls):
from awx.main.tasks import RunInventoryUpdate
return RunInventoryUpdate
def save(self, *args, **kwargs):
update_fields = kwargs.get('update_fields', [])
if bool('license' in self.result_stdout and
@ -937,16 +958,9 @@ class InventoryUpdateBaseMethods(object):
update_fields.append('license_error')
super(InventoryUpdateBaseMethods, self).save(*args, **kwargs)
def _get_parent_instance(self):
return self.inventory_source
def get_absolute_url(self):
return reverse('api:inventory_update_detail', args=(self.pk,))
def _get_task_class(self):
from awx.main.tasks import RunInventoryUpdate
return RunInventoryUpdate
if getattr(settings, 'UNIFIED_JOBS_STEP') == 0:

View File

@ -143,27 +143,21 @@ class JobTemplateBase(JobOptions):
class JobTemplateBaseMethods(object):
@classmethod
def _get_unified_job_class(cls):
return Job
@classmethod
def _get_unified_job_field_names(cls):
return ['job_type', 'inventory', 'project', 'playbook', 'credential',
'cloud_credential', 'forks', 'limit', 'verbosity',
'extra_vars', 'job_tags']
def create_job(self, **kwargs):
'''
Create a new job based on this template.
'''
save_job = kwargs.pop('save', True)
kwargs['job_template'] = self
kwargs.setdefault('job_type', self.job_type)
kwargs.setdefault('inventory', self.inventory)
kwargs.setdefault('project', self.project)
kwargs.setdefault('playbook', self.playbook)
kwargs.setdefault('credential', self.credential)
kwargs.setdefault('cloud_credential', self.cloud_credential)
kwargs.setdefault('forks', self.forks)
kwargs.setdefault('limit', self.limit)
kwargs.setdefault('verbosity', self.verbosity)
kwargs.setdefault('extra_vars', self.extra_vars)
kwargs.setdefault('job_tags', self.job_tags)
job = Job(**kwargs)
if save_job:
job.save()
return job
return self._create_unified_job_instance(**kwargs)
def get_absolute_url(self):
return reverse('api:job_template_detail', args=(self.pk,))
@ -256,6 +250,15 @@ class JobBase(JobOptions):
class JobBaseMethods(object):
@classmethod
def _get_parent_field_name(cls):
return 'job_template'
@classmethod
def _get_task_class(cls):
from awx.main.tasks import RunJob
return RunJob
def get_absolute_url(self):
return reverse('api:job_detail', args=(self.pk,))
@ -278,10 +281,6 @@ class JobBaseMethods(object):
needed.append(pw)
return needed
def _get_task_class(self):
from awx.main.tasks import RunJob
return RunJob
def _get_passwords_needed_to_start(self):
return self.passwords_needed_to_start

View File

@ -147,6 +147,46 @@ class ProjectOptions(models.Model):
pass
return cred
def get_project_path(self, check_if_exists=True):
local_path = os.path.basename(self.local_path)
if local_path and not local_path.startswith('.'):
proj_path = os.path.join(settings.PROJECTS_ROOT, local_path)
if not check_if_exists or os.path.exists(proj_path):
return proj_path
@property
def playbooks(self):
valid_re = re.compile(r'^\s*?-?\s*?(?:hosts|include):\s*?.*?$')
results = []
project_path = self.get_project_path()
if project_path:
for dirpath, dirnames, filenames in os.walk(project_path):
for filename in filenames:
if os.path.splitext(filename)[-1] not in ['.yml', '.yaml']:
continue
playbook = os.path.join(dirpath, filename)
# Filter files that do not have either hosts or top-level
# includes. Use regex to allow files with invalid YAML to
# show up.
matched = False
try:
for line in file(playbook):
if valid_re.match(line):
matched = True
except IOError:
continue
if not matched:
continue
playbook = os.path.relpath(playbook, project_path)
# Filter files in a roles subdirectory.
if 'roles' in playbook.split(os.sep):
continue
# Filter files in a tasks subdirectory.
if 'tasks' in playbook.split(os.sep):
continue
results.append(playbook)
return results
class ProjectBase(ProjectOptions):
'''
@ -174,6 +214,15 @@ class ProjectBase(ProjectOptions):
class ProjectBaseMethods(object):
@classmethod
def _get_unified_job_class(cls):
return ProjectUpdate
@classmethod
def _get_unified_job_field_names(cls):
return ['local_path', 'scm_type', 'scm_url', 'scm_branch',
'scm_clean', 'scm_delete_on_update', 'credential']
def save(self, *args, **kwargs):
new_instance = not bool(self.pk)
# If update_fields has been specified, add our field names to it,
@ -239,61 +288,26 @@ class ProjectBaseMethods(object):
# FIXME: Prevent update when another one is active!
return bool(self.scm_type)# and not self.current_update)
def create_project_update(self, **kwargs):
if self.scm_delete_on_next_update:
kwargs['scm_delete_on_update'] = True
return self._create_unified_job_instance(**kwargs)
def update_signature(self, **kwargs):
if self.can_update:
project_update = self.project_updates.create() # FIXME: Copy options to ProjectUpdate
project_update = self.create_project_update()
project_update_sig = project_update.start_signature()
return (project_update, project_update_sig)
def update(self, **kwargs):
if self.can_update:
project_update = self.project_updates.create() # FIXME: Copy options to ProjectUpdate
project_update = self.create_project_update()
project_update.start()
return project_update
def get_absolute_url(self):
return reverse('api:project_detail', args=(self.pk,))
def get_project_path(self, check_if_exists=True):
local_path = os.path.basename(self.local_path)
if local_path and not local_path.startswith('.'):
proj_path = os.path.join(settings.PROJECTS_ROOT, local_path)
if not check_if_exists or os.path.exists(proj_path):
return proj_path
@property
def playbooks(self):
valid_re = re.compile(r'^\s*?-?\s*?(?:hosts|include):\s*?.*?$')
results = []
project_path = self.get_project_path()
if project_path:
for dirpath, dirnames, filenames in os.walk(project_path):
for filename in filenames:
if os.path.splitext(filename)[-1] not in ['.yml', '.yaml']:
continue
playbook = os.path.join(dirpath, filename)
# Filter files that do not have either hosts or top-level
# includes. Use regex to allow files with invalid YAML to
# show up.
matched = False
try:
for line in file(playbook):
if valid_re.match(line):
matched = True
except IOError:
continue
if not matched:
continue
playbook = os.path.relpath(playbook, project_path)
# Filter files in a roles subdirectory.
if 'roles' in playbook.split(os.sep):
continue
# Filter files in a tasks subdirectory.
if 'tasks' in playbook.split(os.sep):
continue
results.append(playbook)
return results
if getattr(settings, 'UNIFIED_JOBS_STEP') == 0:
@ -376,16 +390,18 @@ class ProjectUpdateBase(ProjectOptions):
class ProjectUpdateBaseMethods(object):
def _get_parent_instance(self):
return self.project
@classmethod
def _get_parent_field_name(cls):
return 'project'
@classmethod
def _get_task_class(cls):
from awx.main.tasks import RunProjectUpdate
return RunProjectUpdate
def get_absolute_url(self):
return reverse('api:project_update_detail', args=(self.pk,))
def _get_task_class(self):
from awx.main.tasks import RunProjectUpdate
return RunProjectUpdate
def _update_parent_instance(self):
parent_instance = self._get_parent_instance()
if parent_instance:

View File

@ -182,11 +182,34 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique):
def update(self, **kwargs):
raise NotImplementedError # Implement in subclass.
def _get_child_queryset(self):
pass
@classmethod
def _get_unified_job_class(cls):
raise NotImplementedError # Implement in subclass.
def _create_child_instance(self, **kwargs):
pass
@classmethod
def _get_unified_job_field_names(cls):
raise NotImplementedError # Implement in subclass.
def _create_unified_job_instance(self, **kwargs):
'''
Create a new unified job based on this unified job template.
'''
save_unified_job = kwargs.pop('save', True)
unified_job_class = self._get_unified_job_class()
parent_field_name = unified_job_class._get_parent_field_name()
kwargs.pop('%s_id' % parent_field_name, None)
kwargs[parent_field_name] = self
for field_name in self._get_unified_job_field_names():
if field_name in kwargs:
continue
# Foreign keys can be specified as field_name or field_name_id.
if hasattr(self, '%s_id' % field_name) and ('%s_id' % field_name) in kwargs:
continue
kwargs[field_name] = getattr(self, field_name)
unified_job = unified_job_class(**kwargs)
if save_unified_job:
unified_job.save()
return unified_job
class UnifiedJob(PolymorphicModel, PrimordialModel):
@ -306,11 +329,19 @@ class UnifiedJob(PolymorphicModel, PrimordialModel):
editable=False,
)
@classmethod
def _get_task_class(cls):
raise NotImplementedError # Implement in subclasses.
@classmethod
def _get_parent_field_name(cls):
return 'unified_job_template' # Override in subclasses.
def __unicode__(self):
return u'%s-%s-%s' % (self.created, self.id, self.status)
def _get_parent_instance(self):
return self.job_template
return getattr(self, self._get_parent_field_name())
def _update_parent_instance(self):
parent_instance = self._get_parent_instance()
@ -360,6 +391,10 @@ class UnifiedJob(PolymorphicModel, PrimordialModel):
self.elapsed = elapsed
if 'elapsed' not in update_fields:
update_fields.append('elapsed')
if self.unified_job_template != self._get_parent_instance():
self.unified_job_template = self._get_parent_instance()
if 'unified_job_template' not in update_fields:
update_fields.append('unified_job_template')
super(UnifiedJob, self).save(*args, **kwargs)
# If status changed, update parent instance....
if self.status != status_before:
@ -396,9 +431,6 @@ class UnifiedJob(PolymorphicModel, PrimordialModel):
def can_start(self):
return bool(self.status == 'new')
def _get_task_class(self):
raise NotImplementedError
def _get_passwords_needed_to_start(self):
return []

View File

@ -516,9 +516,8 @@ class RunProjectUpdate(BaseTask):
'''
Return SSH private key data needed for this project update.
'''
project = project_update.project
if project.credential:
return decrypt_field(project.credential, 'ssh_key_data') or None
if project_update.credential:
return decrypt_field(project_update.credential, 'ssh_key_data') or None
def build_passwords(self, project_update, **kwargs):
'''
@ -527,12 +526,11 @@ class RunProjectUpdate(BaseTask):
'''
passwords = super(RunProjectUpdate, self).build_passwords(project_update,
**kwargs)
project = project_update.project
if project.credential:
passwords['scm_key_unlock'] = decrypt_field(project.credential,
if project_update.credential:
passwords['scm_key_unlock'] = decrypt_field(project_update.credential,
'ssh_key_unlock')
passwords['scm_username'] = project.credential.username
passwords['scm_password'] = decrypt_field(project.credential,
passwords['scm_username'] = project_update.credential.username
passwords['scm_password'] = decrypt_field(project_update.credential,
'password')
return passwords
@ -552,9 +550,8 @@ class RunProjectUpdate(BaseTask):
for authentication.
'''
extra_vars = {}
project = project_update.project
scm_type = project.scm_type
scm_url = update_scm_url(scm_type, project.scm_url,
scm_type = project_update.scm_type
scm_url = update_scm_url(scm_type, project_update.scm_url,
check_special_cases=False)
scm_url_parts = urlparse.urlsplit(scm_url)
scm_username = kwargs.get('passwords', {}).get('scm_username', '')
@ -597,18 +594,16 @@ class RunProjectUpdate(BaseTask):
args.append('-vvv')
else:
args.append('-v')
project = project_update.project
scm_url, extra_vars = self._build_scm_url_extra_vars(project_update,
**kwargs)
scm_branch = project.scm_branch or {'hg': 'tip'}.get(project.scm_type, 'HEAD')
scm_delete_on_update = project.scm_delete_on_update or project.scm_delete_on_next_update
scm_branch = project_update.scm_branch or {'hg': 'tip'}.get(project_update.scm_type, 'HEAD')
extra_vars.update({
'project_path': project.get_project_path(check_if_exists=False),
'scm_type': project.scm_type,
'project_path': project_update.get_project_path(check_if_exists=False),
'scm_type': project_update.scm_type,
'scm_url': scm_url,
'scm_branch': scm_branch,
'scm_clean': project.scm_clean,
'scm_delete_on_update': scm_delete_on_update,
'scm_clean': project_update.scm_clean,
'scm_delete_on_update': project_update.scm_delete_on_update,
})
args.extend(['-e', json.dumps(extra_vars)])
args.append('project_update.yml')
@ -652,8 +647,7 @@ class RunProjectUpdate(BaseTask):
**kwargs)[0]
if after_url != before_url:
output_replacements.append((before_url, after_url))
project = project_update.project
if project.scm_type == 'svn' and scm_username and scm_password:
if project_update.scm_type == 'svn' and scm_username and scm_password:
d_before = {
'username': scm_username,
'password': scm_password,
@ -719,14 +713,13 @@ class RunInventoryUpdate(BaseTask):
'''
Return private data needed for inventory update.
'''
inventory_source = inventory_update.inventory_source
cp = ConfigParser.ConfigParser()
# Build custom ec2.ini for ec2 inventory script to use.
if inventory_source.source == 'ec2':
if inventory_update.source == 'ec2':
section = 'ec2'
cp.add_section(section)
ec2_opts = dict(inventory_source.source_vars_dict.items())
regions = inventory_source.source_regions or 'all'
ec2_opts = dict(inventory_update.source_vars_dict.items())
regions = inventory_update.source_regions or 'all'
regions = ','.join([x.strip() for x in regions.split(',')])
regions_blacklist = ','.join(settings.EC2_REGIONS_BLACKLIST)
ec2_opts['regions'] = regions
@ -739,10 +732,10 @@ class RunInventoryUpdate(BaseTask):
for k,v in ec2_opts.items():
cp.set(section, k, str(v))
# Build pyrax creds INI for rax inventory script.
elif inventory_source.source == 'rax':
elif inventory_update.source == 'rax':
section = 'rackspace_cloud'
cp.add_section(section)
credential = inventory_source.credential
credential = inventory_update.credential
if credential:
cp.set(section, 'username', credential.username)
cp.set(section, 'api_key', decrypt_field(credential,
@ -759,8 +752,7 @@ class RunInventoryUpdate(BaseTask):
'''
passwords = super(RunInventoryUpdate, self).build_passwords(inventory_update,
**kwargs)
inventory_source = inventory_update.inventory_source
credential = inventory_source.credential
credential = inventory_update.credential
if credential:
passwords['source_username'] = credential.username
passwords['source_password'] = decrypt_field(credential, 'password')
@ -772,20 +764,19 @@ class RunInventoryUpdate(BaseTask):
'''
env = super(RunInventoryUpdate, self).build_env(inventory_update, **kwargs)
# Pass inventory source ID to inventory script.
inventory_source = inventory_update.inventory_source
env['INVENTORY_SOURCE_ID'] = str(inventory_source.pk)
env['INVENTORY_SOURCE_ID'] = str(inventory_update.inventory_source_id)
# Set environment variables specific to each source.
if inventory_source.source == 'ec2':
if inventory_update.source == 'ec2':
env['AWS_ACCESS_KEY_ID'] = kwargs.get('passwords', {}).get('source_username', '')
env['AWS_SECRET_ACCESS_KEY'] = kwargs.get('passwords', {}).get('source_password', '')
env['EC2_INI_PATH'] = kwargs.get('private_data_file', '')
elif inventory_source.source == 'rax':
elif inventory_update.source == 'rax':
env['RAX_CREDS_FILE'] = kwargs.get('private_data_file', '')
env['RAX_REGION'] = inventory_source.source_regions or 'all'
env['RAX_REGION'] = inventory_update.source_regions or 'all'
# Set this environment variable so the vendored package won't
# complain about not being able to determine its version number.
env['PBR_VERSION'] = '0.5.21'
elif inventory_source.source == 'file':
elif inventory_update.source == 'file':
# FIXME: Parse source_env to dict, update env.
pass
#print env
@ -799,25 +790,25 @@ class RunInventoryUpdate(BaseTask):
inventory = inventory_source.group.inventory
args = ['awx-manage', 'inventory_import']
args.extend(['--inventory-id', str(inventory.pk)])
if inventory_source.overwrite:
if inventory_update.overwrite:
args.append('--overwrite')
if inventory_source.overwrite_vars:
if inventory_update.overwrite_vars:
args.append('--overwrite-vars')
args.append('--source')
if inventory_source.source == 'ec2':
if inventory_update.source == 'ec2':
ec2_path = self.get_path_to('..', 'plugins', 'inventory', 'ec2.py')
args.append(ec2_path)
args.extend(['--enabled-var', 'ec2_state'])
args.extend(['--enabled-value', 'running'])
#args.extend(['--instance-id', 'ec2_id'])
elif inventory_source.source == 'rax':
elif inventory_update.source == 'rax':
rax_path = self.get_path_to('..', 'plugins', 'inventory', 'rax.py')
args.append(rax_path)
args.extend(['--enabled-var', 'rax_status'])
args.extend(['--enabled-value', 'ACTIVE'])
#args.extend(['--instance-id', 'rax_id'])
elif inventory_source.source == 'file':
args.append(inventory_source.source_path)
elif inventory_update.source == 'file':
args.append(inventory_update.source_path)
verbosity = getattr(settings, 'INVENTORY_UPDATE_VERBOSITY', 1)
args.append('-v%d' % verbosity)
if settings.DEBUG: