mirror of
https://github.com/ansible/awx.git
synced 2026-01-11 10:00:01 -03:30
turn job templates in jobs via launch
This commit is contained in:
parent
4c876b40e4
commit
2cfdee3b21
@ -2172,7 +2172,6 @@ class SystemJobCancelSerializer(SystemJobSerializer):
|
||||
|
||||
|
||||
|
||||
|
||||
# TODO:
|
||||
class WorkflowJobSerializer(UnifiedJobSerializer):
|
||||
|
||||
@ -2182,13 +2181,17 @@ class WorkflowJobSerializer(UnifiedJobSerializer):
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(WorkflowJobSerializer, self).get_related(obj)
|
||||
if obj.system_job_template:
|
||||
if obj.workflow_job_template:
|
||||
res['workflow_job_template'] = reverse('api:workflow_job_template_detail',
|
||||
args=(obj.workflow_job_template.pk,))
|
||||
# TODO:
|
||||
#res['notifications'] = reverse('api:system_job_notifications_list', args=(obj.pk,))
|
||||
res['workflow_nodes'] = reverse('api:workflow_job_workflow_nodes_list', args=(obj.pk,))
|
||||
# TODO: Cancel job
|
||||
'''
|
||||
if obj.can_cancel or True:
|
||||
res['cancel'] = reverse('api:workflow_job_cancel', args=(obj.pk,))
|
||||
'''
|
||||
return res
|
||||
|
||||
|
||||
|
||||
@ -267,6 +267,7 @@ workflow_job_template_urls = patterns('awx.api.views',
|
||||
workflow_job_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'workflow_job_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'workflow_job_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', 'workflow_job_workflow_nodes_list'),
|
||||
# url(r'^(?P<pk>[0-9]+)/cancel/$', 'workflow_job_cancel'),
|
||||
#url(r'^(?P<pk>[0-9]+)/notifications/$', 'workflow_job_notifications_list'),
|
||||
)
|
||||
|
||||
@ -1775,7 +1775,7 @@ class EnforceParentRelationshipMixin(object):
|
||||
# HACK: Make request data mutable.
|
||||
if getattr(data, '_mutable', None) is False:
|
||||
data._mutable = True
|
||||
data[self.enforce_parent_relationship] = getattr(self.get_parent_object(), '%s_id' % relationship)
|
||||
data[self.enforce_parent_relationship] = getattr(self.get_parent_object(), '%s_id' % self.enforce_parent_relationship)
|
||||
return super(EnforceParentRelationshipMixin, self).create(request, *args, **kwargs)
|
||||
|
||||
class GroupChildrenList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView):
|
||||
@ -2702,7 +2702,7 @@ class WorkflowJobTemplateLaunch(GenericAPIView):
|
||||
|
||||
new_job = obj.create_unified_job(**request.data)
|
||||
new_job.signal_start(**request.data)
|
||||
data = dict(system_job=new_job.id)
|
||||
data = dict(workflow_job=new_job.id)
|
||||
return Response(data, status=status.HTTP_201_CREATED)
|
||||
|
||||
# TODO:
|
||||
@ -2723,6 +2723,7 @@ class WorkflowJobTemplateJobsList(SubListAPIView):
|
||||
parent_model = WorkflowJobTemplate
|
||||
relationship = 'jobs'
|
||||
parent_key = 'workflow_job_template'
|
||||
|
||||
# TODO:
|
||||
class WorkflowJobList(ListCreateAPIView):
|
||||
|
||||
@ -2740,6 +2741,16 @@ class WorkflowJobDetail(RetrieveDestroyAPIView):
|
||||
model = WorkflowJob
|
||||
serializer_class = WorkflowJobSerializer
|
||||
|
||||
class WorkflowJobWorkflowNodesList(SubListAPIView):
|
||||
|
||||
model = WorkflowNode
|
||||
serializer_class = WorkflowNodeListSerializer
|
||||
always_allow_superuser = True # TODO: RBAC
|
||||
parent_model = WorkflowJob
|
||||
relationship = 'workflow_job_nodes'
|
||||
parent_key = 'job'
|
||||
|
||||
|
||||
class SystemJobTemplateList(ListAPIView):
|
||||
|
||||
model = SystemJobTemplate
|
||||
|
||||
@ -137,6 +137,13 @@ class SimpleDAG(object):
|
||||
leafs.append(n)
|
||||
return leafs
|
||||
|
||||
def get_root_nodes(self):
|
||||
roots = []
|
||||
for n in self.nodes:
|
||||
if len(self.get_dependents(n['node_object'])) < 1:
|
||||
roots.append(n)
|
||||
return roots
|
||||
|
||||
def get_tasks():
|
||||
"""Fetch all Tower tasks that are relevant to the task management
|
||||
system.
|
||||
@ -151,9 +158,11 @@ def get_tasks():
|
||||
ProjectUpdate.objects.filter(status__in=RELEVANT_JOBS)]
|
||||
graph_system_jobs = [sj for sj in
|
||||
SystemJob.objects.filter(status__in=RELEVANT_JOBS)]
|
||||
|
||||
graph_workflow_jobs = [wf for wf in
|
||||
WorkflowJob.objects.filter(status__in=RELEVANT_JOBS)]
|
||||
all_actions = sorted(graph_jobs + graph_ad_hoc_commands + graph_inventory_updates +
|
||||
graph_project_updates + graph_system_jobs,
|
||||
graph_project_updates + graph_system_jobs +
|
||||
graph_workflow_jobs,
|
||||
key=lambda task: task.created)
|
||||
return all_actions
|
||||
|
||||
|
||||
20
awx/main/migrations/0034_auto_20160830_1716.py
Normal file
20
awx/main/migrations/0034_auto_20160830_1716.py
Normal file
@ -0,0 +1,20 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0033_v301_workflow_create'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='workflownode',
|
||||
name='job',
|
||||
field=models.ForeignKey(related_name='workflow_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True),
|
||||
),
|
||||
]
|
||||
@ -8,6 +8,7 @@ from django.core.urlresolvers import reverse
|
||||
|
||||
# AWX
|
||||
from awx.main.models import UnifiedJobTemplate, UnifiedJob
|
||||
from awx.main.models.notifications import JobNotificationMixin
|
||||
from awx.main.models.base import BaseModel, CreatedModifiedModel, VarsDictProperty
|
||||
from awx.main.models.rbac import (
|
||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||
@ -61,7 +62,7 @@ class WorkflowNode(CreatedModifiedModel):
|
||||
)
|
||||
job = models.ForeignKey(
|
||||
'UnifiedJob',
|
||||
related_name='workflow_node',
|
||||
related_name='workflow_job_nodes',
|
||||
blank=True,
|
||||
null=True,
|
||||
default=None,
|
||||
@ -96,7 +97,7 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions):
|
||||
@classmethod
|
||||
def _get_unified_job_field_names(cls):
|
||||
# TODO: ADD LABELS
|
||||
return ['name', 'description', 'extra_vars', 'workflow_nodes']
|
||||
return ['name', 'description', 'extra_vars',]
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('api:workflow_job_template_detail', args=(self.pk,))
|
||||
@ -109,14 +110,53 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions):
|
||||
# TODO: Notifications
|
||||
# TODO: Surveys
|
||||
|
||||
def create_job(self, **kwargs):
|
||||
'''
|
||||
Create a new job based on this template.
|
||||
'''
|
||||
return self.create_unified_job(**kwargs)
|
||||
#def create_job(self, **kwargs):
|
||||
# '''
|
||||
# Create a new job based on this template.
|
||||
# '''
|
||||
# return self.create_unified_job(**kwargs)
|
||||
|
||||
# TODO: Delete create_unified_job here and explicitly call create_workflow_job() .. figure out where the call is
|
||||
def create_unified_job(self, **kwargs):
|
||||
|
||||
class WorkflowJob(UnifiedJob, WorkflowJobOptions):
|
||||
#def create_workflow_job(self, **kwargs):
|
||||
#workflow_job = self.create_unified_job(**kwargs)
|
||||
workflow_job = super(WorkflowJobTemplate, self).create_unified_job(**kwargs)
|
||||
workflow_job.inherit_jt_workflow_nodes()
|
||||
return workflow_job
|
||||
|
||||
class WorkflowJobInheritNodesMixin(object):
|
||||
def _inherit_relationship(self, old_node, new_node, node_ids_map, node_type):
|
||||
old_related_nodes = getattr(old_node, node_type).all()
|
||||
new_node_type_mgr = getattr(new_node, node_type)
|
||||
|
||||
for old_related_node in old_related_nodes:
|
||||
new_related_node_id = node_ids_map[old_related_node.id]
|
||||
new_related_node = WorkflowNode.objects.get(id=new_related_node_id)
|
||||
new_node_type_mgr.add(new_related_node)
|
||||
|
||||
def inherit_jt_workflow_nodes(self):
|
||||
new_nodes = []
|
||||
old_nodes = self.workflow_job_template.workflow_nodes.all()
|
||||
|
||||
node_ids_map = {}
|
||||
|
||||
for old_node in old_nodes:
|
||||
new_node = WorkflowNode.objects.get(id=old_node.pk)
|
||||
new_node.job = self
|
||||
new_node.pk = None
|
||||
new_node.save()
|
||||
new_nodes.append(new_node)
|
||||
|
||||
node_ids_map[old_node.id] = new_node.id
|
||||
|
||||
for index, old_node in enumerate(old_nodes):
|
||||
new_node = new_nodes[index]
|
||||
for node_type in ['success_nodes', 'failure_nodes', 'always_nodes']:
|
||||
self._inherit_relationship(old_node, new_node, node_ids_map, node_type)
|
||||
|
||||
|
||||
class WorkflowJob(UnifiedJob, WorkflowJobOptions, JobNotificationMixin, WorkflowJobInheritNodesMixin):
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
@ -158,3 +198,11 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions):
|
||||
def task_impact(self):
|
||||
return 0
|
||||
|
||||
# TODO: workflow job notifications
|
||||
def get_notification_templates(self):
|
||||
return []
|
||||
|
||||
# TODO: workflow job notifications
|
||||
def get_notification_friendly_name(self):
|
||||
return "Workflow Job"
|
||||
|
||||
|
||||
@ -191,7 +191,6 @@ def notify_task_runner(metadata_dict):
|
||||
def _send_notification_templates(instance, status_str):
|
||||
if status_str not in ['succeeded', 'failed']:
|
||||
raise ValueError("status_str must be either succeeded or failed")
|
||||
print("Instance has some shit in it %s" % instance)
|
||||
notification_templates = instance.get_notification_templates()
|
||||
if notification_templates:
|
||||
all_notification_templates = set(notification_templates.get('success', []) + notification_templates.get('any', []))
|
||||
@ -239,8 +238,6 @@ def handle_work_error(self, task_id, subtasks=None):
|
||||
instance.socketio_emit_status("failed")
|
||||
|
||||
if first_instance:
|
||||
print("Instance type is %s" % first_instance_type)
|
||||
print("Instance passing along %s" % first_instance.name)
|
||||
_send_notification_templates(first_instance, 'failed')
|
||||
|
||||
@task()
|
||||
@ -1675,7 +1672,6 @@ class RunWorkflowJob(BaseTask):
|
||||
status, rc, tb = 'error', None, ''
|
||||
output_replacements = []
|
||||
try:
|
||||
self.pre_run_hook(instance, **kwargs)
|
||||
if instance.cancel_flag:
|
||||
instance = self.update_model(instance.pk, status='canceled')
|
||||
if instance.status != 'running':
|
||||
@ -1692,8 +1688,8 @@ class RunWorkflowJob(BaseTask):
|
||||
except Exception:
|
||||
if status != 'canceled':
|
||||
tb = traceback.format_exc()
|
||||
status = 'successful'
|
||||
instance = self.update_model(pk, status=status, result_traceback=tb)
|
||||
self.post_run_hook(instance, **kwargs)
|
||||
instance.socketio_emit_status(status)
|
||||
if status != 'successful' and not hasattr(settings, 'CELERY_UNIT_TEST'):
|
||||
# Raising an exception will mark the job as 'failed' in celery
|
||||
|
||||
@ -15,6 +15,7 @@ services:
|
||||
# - sync
|
||||
volumes:
|
||||
- "../:/tower_devel"
|
||||
privileged: true
|
||||
|
||||
# Postgres Database Container
|
||||
postgres:
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user