Remove source_workflow_job and use workflow_job_id instead.

This commit is contained in:
Aaron Tan
2016-11-29 11:51:42 -05:00
parent 3e8e9480d1
commit a458bb3c89
6 changed files with 17 additions and 49 deletions

View File

@@ -241,7 +241,10 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
# Base class for a read-only list view.
def get_queryset(self):
return self.request.user.get_queryset(self.model)
qs = self.request.user.get_queryset(self.model)
if getattr(self.model, 'spawned_by_workflow', False):
qs = qs.select_related('unified_job_node__workflow_job')
return qs
def paginate_queryset(self, queryset):
page = super(ListAPIView, self).paginate_queryset(queryset)

View File

@@ -566,7 +566,7 @@ class UnifiedJobSerializer(BaseSerializer):
fields = ('*', 'unified_job_template', 'launch_type', 'status',
'failed', 'started', 'finished', 'elapsed', 'job_args',
'job_cwd', 'job_env', 'job_explanation', 'result_stdout',
'execution_node', 'result_traceback', 'source_workflow_job')
'execution_node', 'result_traceback', 'workflow_job_id')
extra_kwargs = {
'unified_job_template': {
'source': 'unified_job_template_id',
@@ -598,8 +598,8 @@ class UnifiedJobSerializer(BaseSerializer):
res['stdout'] = reverse('api:job_stdout', args=(obj.pk,))
elif isinstance(obj, AdHocCommand):
res['stdout'] = reverse('api:ad_hoc_command_stdout', args=(obj.pk,))
if obj.source_workflow_job:
res['source_workflow_job'] = reverse('api:workflow_job_detail', args=(obj.source_workflow_job.pk,))
if obj.workflow_job_id:
res['source_worklflow_job'] = reverse('api:workflow_job_detail', args=(obj.workflow_job_id,))
return res
def to_representation(self, obj):

View File

@@ -1,20 +0,0 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0052_v310_inventory_name_non_unique'),
]
operations = [
migrations.AddField(
model_name='unifiedjob',
name='source_workflow_job',
field=models.ForeignKey(related_name='spawned_jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.WorkflowJob', null=True),
),
]

View File

@@ -528,14 +528,6 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
blank=True,
related_name='%(class)s_labels'
)
source_workflow_job = models.ForeignKey(
'WorkflowJob',
null=True,
default=None,
editable=False,
related_name='spawned_jobs',
on_delete=models.SET_NULL,
)
def get_absolute_url(self):
real_instance = self.get_real_instance()

View File

@@ -18,8 +18,8 @@ from awx.main.scheduler.dag_workflow import WorkflowDAG
from awx.main.scheduler.dependency_graph import DependencyGraph
from awx.main.scheduler.partial import (
JobDict,
ProjectUpdateDict,
JobDict,
ProjectUpdateDict,
ProjectUpdateLatestDict,
InventoryUpdateDict,
InventoryUpdateLatestDict,
@@ -103,7 +103,7 @@ class TaskManager():
for task in all_sorted_tasks:
if type(task) is JobDict:
inventory_ids.add(task['inventory_id'])
for inventory_id in inventory_ids:
results.append((inventory_id, InventorySourceDict.filter_partial(inventory_id)))
@@ -116,11 +116,6 @@ class TaskManager():
for spawn_node in spawn_nodes:
kv = spawn_node.get_job_kwargs()
job = spawn_node.unified_job_template.create_unified_job(**kv)
# source_workflow_job is a job-specific field rather than a field copied from job
# template, therefore does not fit into the copy routine and should be put outside
# of create_unified_job.
job.source_workflow_job = workflow_job
job.save()
spawn_node.job = job
spawn_node.save()
can_start = job.signal_start(**kv)
@@ -179,10 +174,10 @@ class TaskManager():
'id': task['id'],
}
dependencies = [{'type': t.get_job_type_str(), 'id': t['id']} for t in dependent_tasks]
error_handler = handle_work_error.s(subtasks=[task_actual] + dependencies)
success_handler = handle_work_success.s(task_actual=task_actual)
job_obj = task.get_full()
job_obj.status = 'waiting'
@@ -206,7 +201,7 @@ class TaskManager():
job_obj.websocket_emit_status(job_obj.status)
if job_obj.status != 'failed':
job_obj.start_celery_task(opts, error_callback=error_handler, success_callback=success_handler)
connection.on_commit(post_commit)
def process_runnable_tasks(self, runnable_tasks):
@@ -284,7 +279,7 @@ class TaskManager():
if not self.graph.is_job_blocked(task):
dependencies = self.generate_dependencies(task)
self.process_dependencies(task, dependencies)
# Spawning deps might have blocked us
if not self.graph.is_job_blocked(task):
self.graph.add_job(task)
@@ -300,7 +295,7 @@ class TaskManager():
for task in all_running_sorted_tasks:
if (task['celery_task_id'] not in active_tasks and not hasattr(settings, 'IGNORE_CELERY_INSPECTOR')):
# NOTE: Pull status again and make sure it didn't finish in
# NOTE: Pull status again and make sure it didn't finish in
# the meantime?
# TODO: try catch the getting of the job. The job COULD have been deleted
task_obj = task.get_full()
@@ -351,7 +346,7 @@ class TaskManager():
latest_inventory_updates = self.get_latest_inventory_update_tasks(all_sorted_tasks)
self.process_latest_inventory_updates(latest_inventory_updates)
inventory_id_sources = self.get_inventory_source_tasks(all_sorted_tasks)
self.process_inventory_sources(inventory_id_sources)
@@ -376,4 +371,3 @@ class TaskManager():
# Operations whose queries rely on modifications made during the atomic scheduling session
for wfj in WorkflowJob.objects.filter(id__in=finished_wfjs):
_send_notification_templates(wfj, 'succeeded' if wfj.status == 'successful' else 'failed')

View File

@@ -34,8 +34,7 @@ def project_update(mocker):
@pytest.fixture
def job(mocker, job_template, project_update):
return mocker.MagicMock(pk=5, job_template=job_template, project_update=project_update,
source_workflow_job=None)
return mocker.MagicMock(pk=5, job_template=job_template, project_update=project_update)
@pytest.fixture