slicing rename test cleanup and bugfix

This commit is contained in:
AlanCoding 2018-10-16 10:46:20 -04:00
parent bbd3edba47
commit 6dc58af8e1
No known key found for this signature in database
GPG Key ID: FD2C3C012A72926B
9 changed files with 32 additions and 20 deletions

View File

@ -3587,7 +3587,8 @@ class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
class Meta:
model = WorkflowJob
fields = ('*', 'workflow_job_template', 'extra_vars', 'allow_simultaneous', 'job_template',
fields = ('*', 'workflow_job_template', 'extra_vars', 'allow_simultaneous',
'job_template', 'is_sliced_job',
'-execution_node', '-event_processing_finished', '-controller_node',)
def get_related(self, obj):
@ -3596,6 +3597,8 @@ class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
res['workflow_job_template'] = self.reverse('api:workflow_job_template_detail',
kwargs={'pk': obj.workflow_job_template.pk})
res['notifications'] = self.reverse('api:workflow_job_notifications_list', kwargs={'pk': obj.pk})
if obj.job_template_id:
res['job_template'] = self.reverse('api:job_template_detail', kwargs={'pk': obj.job_template_id})
res['workflow_nodes'] = self.reverse('api:workflow_job_workflow_nodes_list', kwargs={'pk': obj.pk})
res['labels'] = self.reverse('api:workflow_job_label_list', kwargs={'pk': obj.pk})
res['activity_stream'] = self.reverse('api:workflow_job_activity_stream_list', kwargs={'pk': obj.pk})

View File

@ -329,17 +329,17 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
return self.create_unified_job(**kwargs)
def create_unified_job(self, **kwargs):
prevent_splitting = kwargs.pop('_prevent_slicing', False)
split_event = bool(self.job_slice_count > 1 and (not prevent_splitting))
if split_event:
# A Split Job Template will generate a WorkflowJob rather than a Job
prevent_slicing = kwargs.pop('_prevent_slicing', False)
slice_event = bool(self.job_slice_count > 1 and (not prevent_slicing))
if slice_event:
# A Slice Job Template will generate a WorkflowJob rather than a Job
from awx.main.models.workflow import WorkflowJobTemplate, WorkflowJobNode
kwargs['_unified_job_class'] = WorkflowJobTemplate._get_unified_job_class()
kwargs['_parent_field_name'] = "job_template"
kwargs.setdefault('_eager_fields', {})
kwargs['_eager_fields']['is_sliced_job'] = True
job = super(JobTemplate, self).create_unified_job(**kwargs)
if split_event:
if slice_event:
try:
wj_config = job.launch_config
except JobLaunchConfig.DoesNotExist:
@ -349,7 +349,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
actual_inventory.hosts.count())):
create_kwargs = dict(workflow_job=job,
unified_job_template=self,
ancestor_artifacts=dict(job_split=idx + 1))
ancestor_artifacts=dict(job_slice=idx + 1))
WorkflowJobNode.objects.create(**create_kwargs)
return job

View File

@ -334,7 +334,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
parent_field_name = None
if "_unified_job_class" in kwargs:
# Special case where spawned job is different type than usual
# Only used for split jobs
# Only used for slice jobs
unified_job_class = kwargs.pop("_unified_job_class")
fields = unified_job_class._get_unified_job_field_names() & fields
parent_field_name = kwargs.pop('_parent_field_name')
@ -354,7 +354,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
for fd, val in eager_fields.items():
setattr(unified_job, fd, val)
# NOTE: split workflow jobs _get_parent_field_name method
# NOTE: slice workflow jobs _get_parent_field_name method
# is not correct until this is set
if not parent_field_name:
parent_field_name = unified_job._get_parent_field_name()

View File

@ -4,7 +4,6 @@
# Python
#import urlparse
import logging
import six
# Django
from django.db import models
@ -253,11 +252,10 @@ class WorkflowJobNode(WorkflowNodeBase):
data['extra_vars'] = extra_vars
# ensure that unified jobs created by WorkflowJobs are marked
data['_eager_fields'] = {'launch_type': 'workflow'}
# Extra processing in the case that this is a split job
if 'job_split' in self.ancestor_artifacts and is_root_node:
split_str = six.text_type(self.ancestor_artifacts['job_split'] + 1)
# Extra processing in the case that this is a slice job
if 'job_slice' in self.ancestor_artifacts and is_root_node:
data['_eager_fields']['allow_simultaneous'] = True
data['_eager_fields']['job_slice_number'] = self.ancestor_artifacts['job_split']
data['_eager_fields']['job_slice_number'] = self.ancestor_artifacts['job_slice']
data['_eager_fields']['job_slice_count'] = self.workflow_job.workflow_job_nodes.count()
data['_prevent_slicing'] = True
return data
@ -473,7 +471,7 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
def _get_parent_field_name(self):
if self.job_template_id:
# This is a workflow job which is a container for split jobs
# This is a workflow job which is a container for slice jobs
return 'job_template'
return 'workflow_job_template'

View File

@ -825,10 +825,11 @@ class BaseTask(object):
return False
def build_inventory(self, instance, **kwargs):
script_data = instance.inventory.get_script_data(
hostvars=True,
slice_number=instance.job_slice_number, slice_count=instance.job_slice_count
)
script_params = dict(hostvars=True)
if hasattr(instance, 'job_slice_number'):
script_params['slice_number'] = instance.job_slice_number
script_params['slice_count'] = instance.job_slice_count
script_data = instance.inventory.get_script_data(**script_params)
json_data = json.dumps(script_data)
handle, path = tempfile.mkstemp(dir=kwargs.get('private_data_dir', None))
f = os.fdopen(handle, 'w')

View File

@ -246,6 +246,8 @@ class TestJobExecution(object):
# If `Job.update_model` is called, we're not actually persisting
# to the database; just update the status, which is usually
# the update we care about for testing purposes
if kwargs.get('result_traceback'):
raise Exception('Task encountered error:\n{}'.format(kwargs['result_traceback']))
if 'status' in kwargs:
self.instance.status = kwargs['status']
if 'job_env' in kwargs:

View File

@ -77,6 +77,10 @@ function ListJobsController (
});
vm.getSliceJobDetails = (job) => {
if (!job.job_slice_count) {
return null;
}
if (job.job_slice_count === 1) {
return null;
}

View File

@ -129,6 +129,10 @@ function getSourceWorkflowJobDetails () {
function getSliceJobDetails () {
const count = resource.model.get('job_slice_count');
if (!count) {
return null;
}
if (count === 1) {
return null;
}

View File

@ -113,7 +113,7 @@ export default ['workflowData', 'workflowResultsService', 'workflowDataOptions',
if(workflowData.summary_fields && workflowData.summary_fields.job_template &&
workflowData.summary_fields.job_template.id){
$scope.split_job_template_link = `/#/templates/job_template/${$scope.workflow.summary_fields.job_template.id}`;
$scope.slice_job_template_link = `/#/templates/job_template/${$scope.workflow.summary_fields.job_template.id}`;
}
// turn related api browser routes into front end routes