diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 3677f1a028..c44758ead1 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2242,10 +2242,10 @@ class WorkflowJobListSerializer(WorkflowJobSerializer, UnifiedJobListSerializer) pass class WorkflowNodeBaseSerializer(BaseSerializer): - job_type = serializers.SerializerMethodField() - job_tags = serializers.SerializerMethodField() - limit = serializers.SerializerMethodField() - skip_tags = serializers.SerializerMethodField() + job_type = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None) + job_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None) + limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None) + skip_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None) success_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True) failure_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True) always_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True) @@ -2261,17 +2261,12 @@ class WorkflowNodeBaseSerializer(BaseSerializer): res['unified_job_template'] = obj.unified_job_template.get_absolute_url() return res - def get_job_type(self, obj): - return obj.char_prompts.get('job_type', None) - - def get_job_tags(self, obj): - return obj.char_prompts.get('job_tags', None) - - def get_skip_tags(self, obj): - return obj.char_prompts.get('skip_tags', None) - - def get_limit(self, obj): - return obj.char_prompts.get('limit', None) + def validate(self, attrs): + # char_prompts go through different validation, so remove them here + for fd in ['job_type', 'job_tags', 'skip_tags', 'limit']: + if fd in attrs: + attrs.pop(fd) + return super(WorkflowNodeBaseSerializer, self).validate(attrs) class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer): diff --git a/awx/api/templates/api/workflow_job_template_workflow_nodes_list.md b/awx/api/templates/api/workflow_job_template_workflow_nodes_list.md new file mode 100644 index 0000000000..9e5d0f688f --- /dev/null +++ b/awx/api/templates/api/workflow_job_template_workflow_nodes_list.md @@ -0,0 +1,15 @@ +# Workflow Job Template Workflow Node List + +Workflow nodes reference templates to execute and define the ordering +in which to execute them. After a job in this workflow finishes, +the subsequent actions are to: + + - run nodes contained in "failure_nodes" or "always_nodes" if job failed + - run nodes contained in "success_nodes" or "always_nodes" if job succeeded + +The workflow job is marked as `successful` if all of the jobs running as +a part of the workflow job have completed, and the workflow job has not +been canceled. Even if a job within the workflow has failed, the workflow +job will not be marked as failed. + +{% include "api/sub_list_create_api_view.md" %} \ No newline at end of file diff --git a/awx/api/views.py b/awx/api/views.py index 3fb9826519..699d5c9e81 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -2404,6 +2404,7 @@ class JobTemplateLabelList(DeleteLastUnattachLabelMixin, SubListCreateAttachDeta serializer_class = LabelSerializer parent_model = JobTemplate relationship = 'labels' + new_in_300 = True def post(self, request, *args, **kwargs): # If a label already exists in the database, attach it instead of erroring out @@ -2697,6 +2698,7 @@ class WorkflowJobTemplateList(ListCreateAPIView): model = WorkflowJobTemplate serializer_class = WorkflowJobTemplateListSerializer always_allow_superuser = False + new_in_310 = True # TODO: RBAC ''' @@ -2714,10 +2716,12 @@ class WorkflowJobTemplateDetail(RetrieveUpdateDestroyAPIView): model = WorkflowJobTemplate serializer_class = WorkflowJobTemplateSerializer always_allow_superuser = False + new_in_310 = True class WorkflowJobTemplateLabelList(JobTemplateLabelList): parent_model = WorkflowJobTemplate + new_in_310 = True # TODO: @@ -2725,6 +2729,7 @@ class WorkflowJobTemplateLaunch(GenericAPIView): model = WorkflowJobTemplate serializer_class = EmptySerializer + new_in_310 = True def get(self, request, *args, **kwargs): data = {} @@ -2750,6 +2755,12 @@ class WorkflowJobTemplateWorkflowNodesList(SubListCreateAPIView): parent_model = WorkflowJobTemplate relationship = 'workflow_job_template_nodes' parent_key = 'workflow_job_template' + new_in_310 = True + + def update_raw_data(self, data): + for fd in ['job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags']: + data[fd] = None + return super(WorkflowJobTemplateWorkflowNodesList, self).update_raw_data(data) # TODO: class WorkflowJobTemplateJobsList(SubListAPIView): @@ -2765,12 +2776,14 @@ class WorkflowJobList(ListCreateAPIView): model = WorkflowJob serializer_class = WorkflowJobListSerializer + new_in_310 = True # TODO: class WorkflowJobDetail(RetrieveDestroyAPIView): model = WorkflowJob serializer_class = WorkflowJobSerializer + new_in_310 = True class WorkflowJobWorkflowNodesList(SubListAPIView): @@ -2780,6 +2793,7 @@ class WorkflowJobWorkflowNodesList(SubListAPIView): parent_model = WorkflowJob relationship = 'workflow_job_nodes' parent_key = 'workflow_job' + new_in_310 = True class SystemJobTemplateList(ListAPIView): diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index d109b43519..a4f02deef2 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -93,6 +93,22 @@ class WorkflowNodeBase(CreatedModifiedModel): data[fd] = self.char_prompts[fd] return data + @property + def job_type(self): + return self.char_prompts.get('job_type', None) + + @property + def job_tags(self): + return self.char_prompts.get('job_tags', None) + + @property + def skip_tags(self): + return self.char_prompts.get('skip_tags', None) + + @property + def limit(self): + return self.char_prompts.get('limit', None) + def get_prompts_warnings(self): ujt_obj = self.unified_job_template if ujt_obj is None: @@ -382,6 +398,9 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, JobNotificationMixin, Workflow from awx.main.tasks import RunWorkflowJob return RunWorkflowJob + def _has_failed(self): + return False + def socketio_emit_data(self): return {} diff --git a/awx/main/scheduler/__init__.py b/awx/main/scheduler/__init__.py index b93ce6956e..6ecdc09b37 100644 --- a/awx/main/scheduler/__init__.py +++ b/awx/main/scheduler/__init__.py @@ -73,10 +73,12 @@ def process_finished_workflow_jobs(workflow_jobs): dag = WorkflowDAG(workflow_job) if dag.is_workflow_done(): with transaction.atomic(): - # TODO: detect if wfj failed - workflow_job.status = 'completed' + if workflow_job._has_failed(): + workflow_job.status = 'failed' + else: + workflow_job.status = 'successful' workflow_job.save() - workflow_job.websocket_emit_status('completed') + workflow_job.websocket_emit_status(workflow_job.status) def rebuild_graph(): """Regenerate the task graph by refreshing known tasks from Tower, purging diff --git a/awx/main/tests/functional/models/test_workflow.py b/awx/main/tests/functional/models/test_workflow.py index cc61c34ed9..0c3d5d88be 100644 --- a/awx/main/tests/functional/models/test_workflow.py +++ b/awx/main/tests/functional/models/test_workflow.py @@ -90,3 +90,35 @@ class TestWorkflowJobTemplate: assert len(parent_qs) == 1 assert parent_qs[0] == wfjt.workflow_job_template_nodes.all()[1] +@pytest.mark.django_db +class TestWorkflowJobFailure: + """ + Tests to re-implement if workflow failure status is introduced in + a future Tower version. + """ + @pytest.fixture + def wfj(self): + return WorkflowJob.objects.create(name='test-wf-job') + + def test_workflow_not_failed_unran_job(self, wfj): + """ + Test that an un-ran node will not mark workflow job as failed + """ + WorkflowJobNode.objects.create(workflow_job=wfj) + assert not wfj._has_failed() + + def test_workflow_not_failed_successful_job(self, wfj): + """ + Test that a sucessful node will not mark workflow job as failed + """ + job = Job.objects.create(name='test-job', status='successful') + WorkflowJobNode.objects.create(workflow_job=wfj, job=job) + assert not wfj._has_failed() + + def test_workflow_not_failed_failed_job_but_okay(self, wfj): + """ + Test that a failed node will not mark workflow job as failed + """ + job = Job.objects.create(name='test-job', status='failed') + WorkflowJobNode.objects.create(workflow_job=wfj, job=job) + assert not wfj._has_failed() diff --git a/awx/main/tests/unit/models/test_workflow_unit.py b/awx/main/tests/unit/models/test_workflow_unit.py index 9df61ffe97..add8379727 100644 --- a/awx/main/tests/unit/models/test_workflow_unit.py +++ b/awx/main/tests/unit/models/test_workflow_unit.py @@ -215,7 +215,7 @@ class TestWorkflowWarnings: def test_warn_scan_errors_node_prompts(self, job_node_with_prompts): job_node_with_prompts.unified_job_template.job_type = 'scan' - job_node_with_prompts.job_type = 'run' + job_node_with_prompts.char_prompts['job_type'] = 'run' job_node_with_prompts.inventory = Inventory(name='different-inventory', pk=23) assert 'ignored' in job_node_with_prompts.get_prompts_warnings() assert 'job_type' in job_node_with_prompts.get_prompts_warnings()['ignored']