diff --git a/awx/main/migrations/0034_v310_add_workflows.py b/awx/main/migrations/0034_v310_add_workflows.py index c80a4ecc88..4dfb84177a 100644 --- a/awx/main/migrations/0034_v310_add_workflows.py +++ b/awx/main/migrations/0034_v310_add_workflows.py @@ -15,6 +15,11 @@ class Migration(migrations.Migration): ] operations = [ + migrations.AlterField( + model_name='unifiedjob', + name='launch_type', + field=models.CharField(default=b'manual', max_length=20, editable=False, choices=[(b'manual', 'Manual'), (b'relaunch', 'Relaunch'), (b'callback', 'Callback'), (b'scheduled', 'Scheduled'), (b'dependency', 'Dependency'), (b'workflow', 'Workflow')]), + ), migrations.CreateModel( name='WorkflowJob', fields=[ @@ -34,7 +39,7 @@ class Migration(migrations.Migration): ('modified', models.DateTimeField(default=None, editable=False)), ('always_nodes', models.ManyToManyField(related_name='workflowjobnodes_always', to='main.WorkflowJobNode', blank=True)), ('failure_nodes', models.ManyToManyField(related_name='workflowjobnodes_failure', to='main.WorkflowJobNode', blank=True)), - ('job', models.ForeignKey(related_name='unified_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True)), + ('job', models.OneToOneField(related_name='unified_job_node', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True)), ('success_nodes', models.ManyToManyField(related_name='workflowjobnodes_success', to='main.WorkflowJobNode', blank=True)), ], options={ diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index ef536fea6f..7e95e5abd7 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -394,6 +394,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique ('callback', _('Callback')), # Job was started via host callback. ('scheduled', _('Scheduled')), # Job was started from a schedule. ('dependency', _('Dependency')), # Job was started as a dependency of another job. + ('workflow', _('Workflow')), # Job was started from a workflow job. ] PASSWORD_FIELDS = ('start_args',) @@ -757,6 +758,16 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique def result_stdout_limited(self, start_line=0, end_line=None, redact_sensitive=False): return self._result_stdout_raw_limited(start_line, end_line, redact_sensitive, escape_ascii=True) + @property + def spawned_by_workflow(self): + return self.launch_type == 'workflow' + + @property + def workflow_job_id(self): + if self.spawned_by_workflow: + return self.unified_job_node.workflow_job.pk + return None + @property def celery_task(self): try: @@ -781,7 +792,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique def websocket_emit_data(self): ''' Return extra data that should be included when submitting data to the browser over the websocket connection ''' - return {} + return {'workflow_job_id': self.workflow_job_id} def websocket_emit_status(self, status): status_data = dict(unified_job_id=self.id, status=status) @@ -789,6 +800,11 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique status_data['group_name'] = 'jobs' emit_channel_notification('jobs-status_changed', status_data) + if self.spawned_by_workflow: + status_data['group_name'] = "workflow_events" + emit_channel_notification('workflow_events-' + str(self.workflow_job_id), status_data) + + def notification_data(self): return dict(id=self.id, name=self.name, diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index c43fef3863..3b368aaedf 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -187,9 +187,9 @@ class WorkflowJobTemplateNode(WorkflowNodeBase): class WorkflowJobNode(WorkflowNodeBase): - job = models.ForeignKey( + job = models.OneToOneField( 'UnifiedJob', - related_name='unified_job_nodes', + related_name='unified_job_node', blank=True, null=True, default=None, @@ -258,6 +258,8 @@ class WorkflowJobNode(WorkflowNodeBase): extra_vars.update(functional_aa_dict) if extra_vars: data['extra_vars'] = extra_vars + # ensure that unified jobs created by WorkflowJobs are marked + data['launch_type'] = 'workflow' return data diff --git a/awx/main/tests/unit/models/test_unified_job.py b/awx/main/tests/unit/models/test_unified_job.py new file mode 100644 index 0000000000..af8833482a --- /dev/null +++ b/awx/main/tests/unit/models/test_unified_job.py @@ -0,0 +1,16 @@ +import mock + +from awx.main.models import ( + UnifiedJob, + WorkflowJob, + WorkflowJobNode, +) + + +def test_unified_job_workflow_attributes(): + with mock.patch('django.db.ConnectionRouter.db_for_write'): + job = UnifiedJob(id=1, name="job-1", launch_type="workflow") + job.unified_job_node = WorkflowJobNode(workflow_job=WorkflowJob(pk=1)) + + assert job.spawned_by_workflow is True + assert job.workflow_job_id == 1 diff --git a/awx/ui/client/src/shared/socket/socket.service.js b/awx/ui/client/src/shared/socket/socket.service.js index 64933f8fbd..b636cc1db8 100644 --- a/awx/ui/client/src/shared/socket/socket.service.js +++ b/awx/ui/client/src/shared/socket/socket.service.js @@ -215,6 +215,9 @@ export default if(state.data && state.data.socket && state.data.socket.groups.hasOwnProperty( "ad_hoc_command_events")){ state.data.socket.groups.ad_hoc_command_events = [id]; } + if(state.data && state.data.socket && state.data.socket.groups.hasOwnProperty( "workflow_events")){ + state.data.socket.groups.workflow_events = [id]; + } self.subscribe(state); } return true; diff --git a/awx/ui/client/src/workflow-results/workflow-results.route.js b/awx/ui/client/src/workflow-results/workflow-results.route.js index 4d15d50777..8e70daf17a 100644 --- a/awx/ui/client/src/workflow-results/workflow-results.route.js +++ b/awx/ui/client/src/workflow-results/workflow-results.route.js @@ -18,11 +18,7 @@ export default { data: { socket: { "groups":{ - "jobs": ["status_changed", "summary"], - // not sure if you're gonna need to use job_events - // or if y'all will come up w/ a new socket group specifically - // for workflows - // "job_events": [] + "workflow_events": [] } } }, diff --git a/docs/websockets.md b/docs/websockets.md index 960926e2d7..43c903f7b1 100644 --- a/docs/websockets.md +++ b/docs/websockets.md @@ -21,8 +21,9 @@ Once you''ve connected you are not subscribed to any event groups. You subscribe 'groups': { 'jobs': ['status_changed', 'summary'], 'schedules': ['changed'], - 'ad_hoc_command_events': [ids,], - 'job_events': [ids,], + 'ad_hoc_command_events': [ids...], + 'job_events': [ids...], + 'workflow_events': [ids...] 'control': ['limit_reached'], } @@ -39,12 +40,13 @@ production and development deployments that I will point out, but the actual ser between the two environments. ### Services -| Name | Details | -|:---------:|:-----------------------------------------------------------------------------------------------------------:| -| nginx | listens on ports 80/443, handles HTTPS proxying, serves static assets, routes requests for daphne and uwsgi | -| uwsgi | listens on port 8050, handles API requests | -| daphne | listens on port 8051, handles Websocket requests | -| runworker | no listening port, watches and processes the message queue | +| Name | Details | +|:-----------:|:-----------------------------------------------------------------------------------------------------------:| +| nginx | listens on ports 80/443, handles HTTPS proxying, serves static assets, routes requests for daphne and uwsgi | +| uwsgi | listens on port 8050, handles API requests | +| daphne | listens on port 8051, handles Websocket requests | +| runworker | no listening port, watches and processes the message queue | +| supervisord | (production-only) handles the process management of all the services except nginx | When a request comes in to *nginx* and have the `Upgrade` header and is for the path `/websocket`, then *nginx* knows that it should be routing that request to our *daphne* service.