Merge pull request #3794 from AlanCoding/workflow_status_8

Reimplement select workflow status items
This commit is contained in:
Alan Rominger
2016-10-31 13:36:59 -04:00
committed by GitHub
7 changed files with 96 additions and 19 deletions

View File

@@ -2242,10 +2242,10 @@ class WorkflowJobListSerializer(WorkflowJobSerializer, UnifiedJobListSerializer)
pass pass
class WorkflowNodeBaseSerializer(BaseSerializer): class WorkflowNodeBaseSerializer(BaseSerializer):
job_type = serializers.SerializerMethodField() job_type = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
job_tags = serializers.SerializerMethodField() job_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
limit = serializers.SerializerMethodField() limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
skip_tags = serializers.SerializerMethodField() skip_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
success_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True) success_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
failure_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True) failure_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
always_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True) always_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
@@ -2261,17 +2261,12 @@ class WorkflowNodeBaseSerializer(BaseSerializer):
res['unified_job_template'] = obj.unified_job_template.get_absolute_url() res['unified_job_template'] = obj.unified_job_template.get_absolute_url()
return res return res
def get_job_type(self, obj): def validate(self, attrs):
return obj.char_prompts.get('job_type', None) # char_prompts go through different validation, so remove them here
for fd in ['job_type', 'job_tags', 'skip_tags', 'limit']:
def get_job_tags(self, obj): if fd in attrs:
return obj.char_prompts.get('job_tags', None) attrs.pop(fd)
return super(WorkflowNodeBaseSerializer, self).validate(attrs)
def get_skip_tags(self, obj):
return obj.char_prompts.get('skip_tags', None)
def get_limit(self, obj):
return obj.char_prompts.get('limit', None)
class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer): class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer):

View File

@@ -0,0 +1,15 @@
# Workflow Job Template Workflow Node List
Workflow nodes reference templates to execute and define the ordering
in which to execute them. After a job in this workflow finishes,
the subsequent actions are to:
- run nodes contained in "failure_nodes" or "always_nodes" if job failed
- run nodes contained in "success_nodes" or "always_nodes" if job succeeded
The workflow job is marked as `successful` if all of the jobs running as
a part of the workflow job have completed, and the workflow job has not
been canceled. Even if a job within the workflow has failed, the workflow
job will not be marked as failed.
{% include "api/sub_list_create_api_view.md" %}

View File

@@ -2404,6 +2404,7 @@ class JobTemplateLabelList(DeleteLastUnattachLabelMixin, SubListCreateAttachDeta
serializer_class = LabelSerializer serializer_class = LabelSerializer
parent_model = JobTemplate parent_model = JobTemplate
relationship = 'labels' relationship = 'labels'
new_in_300 = True
def post(self, request, *args, **kwargs): def post(self, request, *args, **kwargs):
# If a label already exists in the database, attach it instead of erroring out # If a label already exists in the database, attach it instead of erroring out
@@ -2697,6 +2698,7 @@ class WorkflowJobTemplateList(ListCreateAPIView):
model = WorkflowJobTemplate model = WorkflowJobTemplate
serializer_class = WorkflowJobTemplateListSerializer serializer_class = WorkflowJobTemplateListSerializer
always_allow_superuser = False always_allow_superuser = False
new_in_310 = True
# TODO: RBAC # TODO: RBAC
''' '''
@@ -2714,10 +2716,12 @@ class WorkflowJobTemplateDetail(RetrieveUpdateDestroyAPIView):
model = WorkflowJobTemplate model = WorkflowJobTemplate
serializer_class = WorkflowJobTemplateSerializer serializer_class = WorkflowJobTemplateSerializer
always_allow_superuser = False always_allow_superuser = False
new_in_310 = True
class WorkflowJobTemplateLabelList(JobTemplateLabelList): class WorkflowJobTemplateLabelList(JobTemplateLabelList):
parent_model = WorkflowJobTemplate parent_model = WorkflowJobTemplate
new_in_310 = True
# TODO: # TODO:
@@ -2725,6 +2729,7 @@ class WorkflowJobTemplateLaunch(GenericAPIView):
model = WorkflowJobTemplate model = WorkflowJobTemplate
serializer_class = EmptySerializer serializer_class = EmptySerializer
new_in_310 = True
def get(self, request, *args, **kwargs): def get(self, request, *args, **kwargs):
data = {} data = {}
@@ -2750,6 +2755,12 @@ class WorkflowJobTemplateWorkflowNodesList(SubListCreateAPIView):
parent_model = WorkflowJobTemplate parent_model = WorkflowJobTemplate
relationship = 'workflow_job_template_nodes' relationship = 'workflow_job_template_nodes'
parent_key = 'workflow_job_template' parent_key = 'workflow_job_template'
new_in_310 = True
def update_raw_data(self, data):
for fd in ['job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags']:
data[fd] = None
return super(WorkflowJobTemplateWorkflowNodesList, self).update_raw_data(data)
# TODO: # TODO:
class WorkflowJobTemplateJobsList(SubListAPIView): class WorkflowJobTemplateJobsList(SubListAPIView):
@@ -2765,12 +2776,14 @@ class WorkflowJobList(ListCreateAPIView):
model = WorkflowJob model = WorkflowJob
serializer_class = WorkflowJobListSerializer serializer_class = WorkflowJobListSerializer
new_in_310 = True
# TODO: # TODO:
class WorkflowJobDetail(RetrieveDestroyAPIView): class WorkflowJobDetail(RetrieveDestroyAPIView):
model = WorkflowJob model = WorkflowJob
serializer_class = WorkflowJobSerializer serializer_class = WorkflowJobSerializer
new_in_310 = True
class WorkflowJobWorkflowNodesList(SubListAPIView): class WorkflowJobWorkflowNodesList(SubListAPIView):
@@ -2780,6 +2793,7 @@ class WorkflowJobWorkflowNodesList(SubListAPIView):
parent_model = WorkflowJob parent_model = WorkflowJob
relationship = 'workflow_job_nodes' relationship = 'workflow_job_nodes'
parent_key = 'workflow_job' parent_key = 'workflow_job'
new_in_310 = True
class SystemJobTemplateList(ListAPIView): class SystemJobTemplateList(ListAPIView):

View File

@@ -93,6 +93,22 @@ class WorkflowNodeBase(CreatedModifiedModel):
data[fd] = self.char_prompts[fd] data[fd] = self.char_prompts[fd]
return data return data
@property
def job_type(self):
return self.char_prompts.get('job_type', None)
@property
def job_tags(self):
return self.char_prompts.get('job_tags', None)
@property
def skip_tags(self):
return self.char_prompts.get('skip_tags', None)
@property
def limit(self):
return self.char_prompts.get('limit', None)
def get_prompts_warnings(self): def get_prompts_warnings(self):
ujt_obj = self.unified_job_template ujt_obj = self.unified_job_template
if ujt_obj is None: if ujt_obj is None:
@@ -382,6 +398,9 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, JobNotificationMixin, Workflow
from awx.main.tasks import RunWorkflowJob from awx.main.tasks import RunWorkflowJob
return RunWorkflowJob return RunWorkflowJob
def _has_failed(self):
return False
def socketio_emit_data(self): def socketio_emit_data(self):
return {} return {}

View File

@@ -73,10 +73,12 @@ def process_finished_workflow_jobs(workflow_jobs):
dag = WorkflowDAG(workflow_job) dag = WorkflowDAG(workflow_job)
if dag.is_workflow_done(): if dag.is_workflow_done():
with transaction.atomic(): with transaction.atomic():
# TODO: detect if wfj failed if workflow_job._has_failed():
workflow_job.status = 'completed' workflow_job.status = 'failed'
else:
workflow_job.status = 'successful'
workflow_job.save() workflow_job.save()
workflow_job.websocket_emit_status('completed') workflow_job.websocket_emit_status(workflow_job.status)
def rebuild_graph(): def rebuild_graph():
"""Regenerate the task graph by refreshing known tasks from Tower, purging """Regenerate the task graph by refreshing known tasks from Tower, purging

View File

@@ -90,3 +90,35 @@ class TestWorkflowJobTemplate:
assert len(parent_qs) == 1 assert len(parent_qs) == 1
assert parent_qs[0] == wfjt.workflow_job_template_nodes.all()[1] assert parent_qs[0] == wfjt.workflow_job_template_nodes.all()[1]
@pytest.mark.django_db
class TestWorkflowJobFailure:
"""
Tests to re-implement if workflow failure status is introduced in
a future Tower version.
"""
@pytest.fixture
def wfj(self):
return WorkflowJob.objects.create(name='test-wf-job')
def test_workflow_not_failed_unran_job(self, wfj):
"""
Test that an un-ran node will not mark workflow job as failed
"""
WorkflowJobNode.objects.create(workflow_job=wfj)
assert not wfj._has_failed()
def test_workflow_not_failed_successful_job(self, wfj):
"""
Test that a sucessful node will not mark workflow job as failed
"""
job = Job.objects.create(name='test-job', status='successful')
WorkflowJobNode.objects.create(workflow_job=wfj, job=job)
assert not wfj._has_failed()
def test_workflow_not_failed_failed_job_but_okay(self, wfj):
"""
Test that a failed node will not mark workflow job as failed
"""
job = Job.objects.create(name='test-job', status='failed')
WorkflowJobNode.objects.create(workflow_job=wfj, job=job)
assert not wfj._has_failed()

View File

@@ -215,7 +215,7 @@ class TestWorkflowWarnings:
def test_warn_scan_errors_node_prompts(self, job_node_with_prompts): def test_warn_scan_errors_node_prompts(self, job_node_with_prompts):
job_node_with_prompts.unified_job_template.job_type = 'scan' job_node_with_prompts.unified_job_template.job_type = 'scan'
job_node_with_prompts.job_type = 'run' job_node_with_prompts.char_prompts['job_type'] = 'run'
job_node_with_prompts.inventory = Inventory(name='different-inventory', pk=23) job_node_with_prompts.inventory = Inventory(name='different-inventory', pk=23)
assert 'ignored' in job_node_with_prompts.get_prompts_warnings() assert 'ignored' in job_node_with_prompts.get_prompts_warnings()
assert 'job_type' in job_node_with_prompts.get_prompts_warnings()['ignored'] assert 'job_type' in job_node_with_prompts.get_prompts_warnings()['ignored']