From a458bb3c89ad21c9796fe406aa37195f396462dc Mon Sep 17 00:00:00 2001 From: Aaron Tan Date: Tue, 29 Nov 2016 11:51:42 -0500 Subject: [PATCH] Remove source_workflow_job and use workflow_job_id instead. --- awx/api/generics.py | 5 +++- awx/api/serializers.py | 6 ++--- .../0053_v310_linkage_back_to_workflow_job.py | 20 ---------------- awx/main/models/unified_jobs.py | 8 ------- awx/main/scheduler/__init__.py | 24 +++++++------------ .../api/serializers/test_job_serializers.py | 3 +-- 6 files changed, 17 insertions(+), 49 deletions(-) delete mode 100644 awx/main/migrations/0053_v310_linkage_back_to_workflow_job.py diff --git a/awx/api/generics.py b/awx/api/generics.py index 0c593925c0..8f8d76e3e8 100644 --- a/awx/api/generics.py +++ b/awx/api/generics.py @@ -241,7 +241,10 @@ class ListAPIView(generics.ListAPIView, GenericAPIView): # Base class for a read-only list view. def get_queryset(self): - return self.request.user.get_queryset(self.model) + qs = self.request.user.get_queryset(self.model) + if getattr(self.model, 'spawned_by_workflow', False): + qs = qs.select_related('unified_job_node__workflow_job') + return qs def paginate_queryset(self, queryset): page = super(ListAPIView, self).paginate_queryset(queryset) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index a68b413a13..fde4bdcecb 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -566,7 +566,7 @@ class UnifiedJobSerializer(BaseSerializer): fields = ('*', 'unified_job_template', 'launch_type', 'status', 'failed', 'started', 'finished', 'elapsed', 'job_args', 'job_cwd', 'job_env', 'job_explanation', 'result_stdout', - 'execution_node', 'result_traceback', 'source_workflow_job') + 'execution_node', 'result_traceback', 'workflow_job_id') extra_kwargs = { 'unified_job_template': { 'source': 'unified_job_template_id', @@ -598,8 +598,8 @@ class UnifiedJobSerializer(BaseSerializer): res['stdout'] = reverse('api:job_stdout', args=(obj.pk,)) elif isinstance(obj, AdHocCommand): res['stdout'] = reverse('api:ad_hoc_command_stdout', args=(obj.pk,)) - if obj.source_workflow_job: - res['source_workflow_job'] = reverse('api:workflow_job_detail', args=(obj.source_workflow_job.pk,)) + if obj.workflow_job_id: + res['source_worklflow_job'] = reverse('api:workflow_job_detail', args=(obj.workflow_job_id,)) return res def to_representation(self, obj): diff --git a/awx/main/migrations/0053_v310_linkage_back_to_workflow_job.py b/awx/main/migrations/0053_v310_linkage_back_to_workflow_job.py deleted file mode 100644 index f98840c3e4..0000000000 --- a/awx/main/migrations/0053_v310_linkage_back_to_workflow_job.py +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ('main', '0052_v310_inventory_name_non_unique'), - ] - - operations = [ - migrations.AddField( - model_name='unifiedjob', - name='source_workflow_job', - field=models.ForeignKey(related_name='spawned_jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.WorkflowJob', null=True), - ), - ] diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index 92903a376e..43cdfd07f0 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -528,14 +528,6 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique blank=True, related_name='%(class)s_labels' ) - source_workflow_job = models.ForeignKey( - 'WorkflowJob', - null=True, - default=None, - editable=False, - related_name='spawned_jobs', - on_delete=models.SET_NULL, - ) def get_absolute_url(self): real_instance = self.get_real_instance() diff --git a/awx/main/scheduler/__init__.py b/awx/main/scheduler/__init__.py index ba4aa5eed5..8569fb5cfc 100644 --- a/awx/main/scheduler/__init__.py +++ b/awx/main/scheduler/__init__.py @@ -18,8 +18,8 @@ from awx.main.scheduler.dag_workflow import WorkflowDAG from awx.main.scheduler.dependency_graph import DependencyGraph from awx.main.scheduler.partial import ( - JobDict, - ProjectUpdateDict, + JobDict, + ProjectUpdateDict, ProjectUpdateLatestDict, InventoryUpdateDict, InventoryUpdateLatestDict, @@ -103,7 +103,7 @@ class TaskManager(): for task in all_sorted_tasks: if type(task) is JobDict: inventory_ids.add(task['inventory_id']) - + for inventory_id in inventory_ids: results.append((inventory_id, InventorySourceDict.filter_partial(inventory_id))) @@ -116,11 +116,6 @@ class TaskManager(): for spawn_node in spawn_nodes: kv = spawn_node.get_job_kwargs() job = spawn_node.unified_job_template.create_unified_job(**kv) - # source_workflow_job is a job-specific field rather than a field copied from job - # template, therefore does not fit into the copy routine and should be put outside - # of create_unified_job. - job.source_workflow_job = workflow_job - job.save() spawn_node.job = job spawn_node.save() can_start = job.signal_start(**kv) @@ -179,10 +174,10 @@ class TaskManager(): 'id': task['id'], } dependencies = [{'type': t.get_job_type_str(), 'id': t['id']} for t in dependent_tasks] - + error_handler = handle_work_error.s(subtasks=[task_actual] + dependencies) success_handler = handle_work_success.s(task_actual=task_actual) - + job_obj = task.get_full() job_obj.status = 'waiting' @@ -206,7 +201,7 @@ class TaskManager(): job_obj.websocket_emit_status(job_obj.status) if job_obj.status != 'failed': job_obj.start_celery_task(opts, error_callback=error_handler, success_callback=success_handler) - + connection.on_commit(post_commit) def process_runnable_tasks(self, runnable_tasks): @@ -284,7 +279,7 @@ class TaskManager(): if not self.graph.is_job_blocked(task): dependencies = self.generate_dependencies(task) self.process_dependencies(task, dependencies) - + # Spawning deps might have blocked us if not self.graph.is_job_blocked(task): self.graph.add_job(task) @@ -300,7 +295,7 @@ class TaskManager(): for task in all_running_sorted_tasks: if (task['celery_task_id'] not in active_tasks and not hasattr(settings, 'IGNORE_CELERY_INSPECTOR')): - # NOTE: Pull status again and make sure it didn't finish in + # NOTE: Pull status again and make sure it didn't finish in # the meantime? # TODO: try catch the getting of the job. The job COULD have been deleted task_obj = task.get_full() @@ -351,7 +346,7 @@ class TaskManager(): latest_inventory_updates = self.get_latest_inventory_update_tasks(all_sorted_tasks) self.process_latest_inventory_updates(latest_inventory_updates) - + inventory_id_sources = self.get_inventory_source_tasks(all_sorted_tasks) self.process_inventory_sources(inventory_id_sources) @@ -376,4 +371,3 @@ class TaskManager(): # Operations whose queries rely on modifications made during the atomic scheduling session for wfj in WorkflowJob.objects.filter(id__in=finished_wfjs): _send_notification_templates(wfj, 'succeeded' if wfj.status == 'successful' else 'failed') - diff --git a/awx/main/tests/unit/api/serializers/test_job_serializers.py b/awx/main/tests/unit/api/serializers/test_job_serializers.py index d76bf4cfcb..2ab393f698 100644 --- a/awx/main/tests/unit/api/serializers/test_job_serializers.py +++ b/awx/main/tests/unit/api/serializers/test_job_serializers.py @@ -34,8 +34,7 @@ def project_update(mocker): @pytest.fixture def job(mocker, job_template, project_update): - return mocker.MagicMock(pk=5, job_template=job_template, project_update=project_update, - source_workflow_job=None) + return mocker.MagicMock(pk=5, job_template=job_template, project_update=project_update) @pytest.fixture