initial models and endpoints added for workflows

This commit is contained in:
Chris Meyers 2016-08-16 17:45:18 -04:00
parent 0007df0976
commit 4c876b40e4
16 changed files with 766 additions and 19 deletions

View File

@ -528,6 +528,8 @@ class UnifiedJobTemplateSerializer(BaseSerializer):
serializer_class = JobTemplateSerializer
elif isinstance(obj, SystemJobTemplate):
serializer_class = SystemJobTemplateSerializer
elif isinstance(obj, WorkflowJobTemplateSerializer):
serializer_class = WorkflowJobTemplateSerializer
if serializer_class:
serializer = serializer_class(instance=obj, context=self.context)
return serializer.to_representation(obj)
@ -2168,6 +2170,95 @@ class SystemJobCancelSerializer(SystemJobSerializer):
class Meta:
fields = ('can_cancel',)
# TODO:
class WorkflowJobSerializer(UnifiedJobSerializer):
class Meta:
model = WorkflowJob
fields = ('*', 'workflow_job_template', 'extra_vars')
def get_related(self, obj):
res = super(WorkflowJobSerializer, self).get_related(obj)
if obj.system_job_template:
res['workflow_job_template'] = reverse('api:workflow_job_template_detail',
args=(obj.workflow_job_template.pk,))
# TODO:
#res['notifications'] = reverse('api:system_job_notifications_list', args=(obj.pk,))
if obj.can_cancel or True:
res['cancel'] = reverse('api:workflow_job_cancel', args=(obj.pk,))
return res
# TODO:
class WorkflowJobListSerializer(WorkflowJobSerializer, UnifiedJobListSerializer):
pass
# TODO:
class WorkflowJobTemplateListSerializer(UnifiedJobTemplateSerializer):
class Meta:
model = WorkflowJobTemplate
fields = ('*',)
def get_related(self, obj):
res = super(WorkflowJobTemplateListSerializer, self).get_related(obj)
res.update(dict(
jobs = reverse('api:workflow_job_template_jobs_list', args=(obj.pk,)),
#schedules = reverse('api:workflow_job_template_schedules_list', args=(obj.pk,)),
launch = reverse('api:workflow_job_template_launch', args=(obj.pk,)),
workflow_nodes = reverse('api:workflow_job_template_workflow_nodes_list', args=(obj.pk,)),
# TODO: Implement notifications
#notification_templates_any = reverse('api:system_job_template_notification_templates_any_list', args=(obj.pk,)),
#notification_templates_success = reverse('api:system_job_template_notification_templates_success_list', args=(obj.pk,)),
#notification_templates_error = reverse('api:system_job_template_notification_templates_error_list', args=(obj.pk,)),
))
return res
class WorkflowJobTemplateSerializer(WorkflowJobTemplateListSerializer):
pass
class WorkflowNodeSerializer(BaseSerializer):
#workflow_job_template = UnifiedJobTemplateSerializer()
class Meta:
model = WorkflowNode
fields = ('id', 'url', 'related', 'workflow_job_template', 'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes',)
def get_related(self, obj):
res = super(WorkflowNodeSerializer, self).get_related(obj)
res['workflow_job_template'] = reverse('api:workflow_job_template_detail', args=(obj.workflow_job_template.pk,))
if obj.unified_job_template:
res['unified_job_template'] = obj.unified_job_template.get_absolute_url()
res['success_nodes'] = reverse('api:workflow_node_success_nodes_list', args=(obj.pk,))
res['failure_nodes'] = reverse('api:workflow_node_failure_nodes_list', args=(obj.pk,))
res['always_nodes'] = reverse('api:workflow_node_always_nodes_list', args=(obj.pk,))
return res
class WorkflowNodeDetailSerializer(WorkflowNodeSerializer):
'''
Influence the api browser sample data to not include workflow_job_template
when editing a WorkflowNode.
Note: I was not able to accomplish this trough the use of extra_kwargs.
Maybe something to do with workflow_job_template being a relational field?
'''
def build_relational_field(self, field_name, relation_info):
field_class, field_kwargs = super(WorkflowNodeDetailSerializer, self).build_relational_field(field_name, relation_info)
if self.instance and field_name == 'workflow_job_template':
field_kwargs['read_only'] = True
field_kwargs.pop('queryset', None)
return field_class, field_kwargs
class WorkflowNodeListSerializer(WorkflowNodeSerializer):
pass
class JobListSerializer(JobSerializer, UnifiedJobListSerializer):
pass

View File

@ -255,6 +255,23 @@ system_job_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/notifications/$', 'system_job_notifications_list'),
)
workflow_job_template_urls = patterns('awx.api.views',
url(r'^$', 'workflow_job_template_list'),
url(r'^(?P<pk>[0-9]+)/$', 'workflow_job_template_detail'),
url(r'^(?P<pk>[0-9]+)/jobs/$', 'workflow_job_template_jobs_list'),
url(r'^(?P<pk>[0-9]+)/launch/$', 'workflow_job_template_launch'),
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', 'workflow_job_template_workflow_nodes_list'),
# url(r'^(?P<pk>[0-9]+)/cancel/$', 'workflow_job_template_cancel'),
#url(r'^(?P<pk>[0-9]+)/nodes/$', 'workflow_job_template_node_list'),
)
workflow_job_urls = patterns('awx.api.views',
url(r'^$', 'workflow_job_list'),
url(r'^(?P<pk>[0-9]+)/$', 'workflow_job_detail'),
# url(r'^(?P<pk>[0-9]+)/cancel/$', 'workflow_job_cancel'),
#url(r'^(?P<pk>[0-9]+)/notifications/$', 'workflow_job_notifications_list'),
)
notification_template_urls = patterns('awx.api.views',
url(r'^$', 'notification_template_list'),
url(r'^(?P<pk>[0-9]+)/$', 'notification_template_detail'),
@ -272,6 +289,14 @@ label_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/$', 'label_detail'),
)
workflow_node_urls = patterns('awx.api.views',
url(r'^$', 'workflow_node_list'),
url(r'^(?P<pk>[0-9]+)/$', 'workflow_node_detail'),
url(r'^(?P<pk>[0-9]+)/success_nodes/$', 'workflow_node_success_nodes_list'),
url(r'^(?P<pk>[0-9]+)/failure_nodes/$', 'workflow_node_failure_nodes_list'),
url(r'^(?P<pk>[0-9]+)/always_nodes/$', 'workflow_node_always_nodes_list'),
)
schedule_urls = patterns('awx.api.views',
url(r'^$', 'schedule_list'),
url(r'^(?P<pk>[0-9]+)/$', 'schedule_detail'),
@ -321,7 +346,10 @@ v1_urls = patterns('awx.api.views',
url(r'^system_jobs/', include(system_job_urls)),
url(r'^notification_templates/', include(notification_template_urls)),
url(r'^notifications/', include(notification_urls)),
url(r'^workflow_job_templates/',include(workflow_job_template_urls)),
url(r'^workflow_jobs/' ,include(workflow_job_urls)),
url(r'^labels/', include(label_urls)),
url(r'^workflow_nodes/', include(workflow_node_urls)),
url(r'^unified_job_templates/$','unified_job_template_list'),
url(r'^unified_jobs/$', 'unified_job_list'),
url(r'^activity_stream/', include(activity_stream_urls)),

View File

@ -11,6 +11,7 @@ import socket
import sys
import errno
import logging
import copy
from base64 import b64encode
from collections import OrderedDict
@ -145,6 +146,8 @@ class ApiV1RootView(APIView):
data['unified_job_templates'] = reverse('api:unified_job_template_list')
data['unified_jobs'] = reverse('api:unified_job_list')
data['activity_stream'] = reverse('api:activity_stream_list')
data['workflow_job_templates'] = reverse('api:workflow_job_template_list')
data['workflow_jobs'] = reverse('api:workflow_job_list')
return Response(data)
@ -1747,16 +1750,24 @@ class GroupList(ListCreateAPIView):
model = Group
serializer_class = GroupSerializer
class GroupChildrenList(SubListCreateAttachDetachAPIView):
'''
Useful when you have a self-refering ManyToManyRelationship.
* Tower uses a shallow (2-deep only) url pattern. For example:
model = Group
serializer_class = GroupSerializer
parent_model = Group
relationship = 'children'
When an object hangs off of a parent object you would have the url of the
form /api/v1/parent_model/34/child_model. If you then wanted a child of the
child model you would NOT do /api/v1/parent_model/34/child_model/87/child_child_model
Instead, you would access the child_child_model via /api/v1/child_child_model/87/
and you would create child_child_model's off of /api/v1/child_model/87/child_child_model_set
Now, when creating child_child_model related to child_model you still want to
link child_child_model to parent_model. That's what this class is for
'''
class EnforceParentRelationshipMixin(object):
enforce_parent_relationship = ''
def update_raw_data(self, data):
data.pop('inventory', None)
return super(GroupChildrenList, self).update_raw_data(data)
data.pop(self.enforce_parent_relationship, None)
return super(EnforceParentRelationshipMixin, self).update_raw_data(data)
def create(self, request, *args, **kwargs):
# Inject parent group inventory ID into new group data.
@ -1764,16 +1775,16 @@ class GroupChildrenList(SubListCreateAttachDetachAPIView):
# HACK: Make request data mutable.
if getattr(data, '_mutable', None) is False:
data._mutable = True
data['inventory'] = self.get_parent_object().inventory_id
return super(GroupChildrenList, self).create(request, *args, **kwargs)
data[self.enforce_parent_relationship] = getattr(self.get_parent_object(), '%s_id' % relationship)
return super(EnforceParentRelationshipMixin, self).create(request, *args, **kwargs)
def unattach(self, request, *args, **kwargs):
sub_id = request.data.get('id', None)
if sub_id is not None:
return super(GroupChildrenList, self).unattach(request, *args, **kwargs)
parent = self.get_parent_object()
parent.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class GroupChildrenList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView):
model = Group
serializer_class = GroupSerializer
parent_model = Group
relationship = 'children'
enforce_parent_relationship = 'inventory'
class GroupPotentialChildrenList(SubListAPIView):
@ -2604,6 +2615,131 @@ class JobTemplateObjectRolesList(SubListAPIView):
content_type = ContentType.objects.get_for_model(self.parent_model)
return Role.objects.filter(content_type=content_type, object_id=po.pk)
# TODO:
class WorkflowNodeList(ListCreateAPIView):
model = WorkflowNode
serializer_class = WorkflowNodeSerializer
new_in_310 = True
# TODO:
class WorkflowNodeDetail(RetrieveUpdateDestroyAPIView):
model = WorkflowNode
serializer_class = WorkflowNodeDetailSerializer
parent_model = WorkflowJobTemplate
relationship = 'workflow_job_template'
new_in_310 = True
class WorkflowNodeChildrenBaseList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView):
model = WorkflowNode
serializer_class = WorkflowNodeListSerializer
always_allow_superuser = True # TODO: RBAC
parent_model = WorkflowNode
relationship = ''
enforce_parent_relationship = 'workflow_job_template'
new_in_310 = True
'''
Limit the set of WorkflowNodes to the related nodes of specified by
'relationship'
'''
def get_queryset(self):
parent = self.get_parent_object()
self.check_parent_access(parent)
return getattr(parent, self.relationship).all()
class WorkflowNodeSuccessNodesList(WorkflowNodeChildrenBaseList):
relationship = 'success_nodes'
class WorkflowNodeFailureNodesList(WorkflowNodeChildrenBaseList):
relationship = 'failure_nodes'
class WorkflowNodeAlwaysNodesList(WorkflowNodeChildrenBaseList):
relationship = 'always_nodes'
# TODO:
class WorkflowJobTemplateList(ListCreateAPIView):
model = WorkflowJobTemplate
serializer_class = WorkflowJobTemplateListSerializer
always_allow_superuser = False
# TODO: RBAC
'''
def post(self, request, *args, **kwargs):
ret = super(WorkflowJobTemplateList, self).post(request, *args, **kwargs)
if ret.status_code == 201:
workflow_job_template = WorkflowJobTemplate.objects.get(id=ret.data['id'])
workflow_job_template.admin_role.members.add(request.user)
return ret
'''
# TODO:
class WorkflowJobTemplateDetail(RetrieveUpdateDestroyAPIView):
model = WorkflowJobTemplate
serializer_class = WorkflowJobTemplateSerializer
always_allow_superuser = False
# TODO:
class WorkflowJobTemplateLaunch(GenericAPIView):
model = WorkflowJobTemplate
serializer_class = EmptySerializer
def get(self, request, *args, **kwargs):
return Response({})
def post(self, request, *args, **kwargs):
obj = self.get_object()
if not request.user.can_access(self.model, 'start', obj):
raise PermissionDenied()
new_job = obj.create_unified_job(**request.data)
new_job.signal_start(**request.data)
data = dict(system_job=new_job.id)
return Response(data, status=status.HTTP_201_CREATED)
# TODO:
class WorkflowJobTemplateWorkflowNodesList(SubListCreateAPIView):
model = WorkflowNode
serializer_class = WorkflowNodeListSerializer
always_allow_superuser = True # TODO: RBAC
parent_model = WorkflowJobTemplate
relationship = 'workflow_nodes'
parent_key = 'workflow_job_template'
# TODO:
class WorkflowJobTemplateJobsList(SubListAPIView):
model = WorkflowJob
serializer_class = WorkflowJobListSerializer
parent_model = WorkflowJobTemplate
relationship = 'jobs'
parent_key = 'workflow_job_template'
# TODO:
class WorkflowJobList(ListCreateAPIView):
model = WorkflowJob
serializer_class = WorkflowJobListSerializer
def get(self, request, *args, **kwargs):
if not request.user.is_superuser and not request.user.is_system_auditor:
raise PermissionDenied("Superuser privileges needed.")
return super(WorkflowJobList, self).get(request, *args, **kwargs)
# TODO:
class WorkflowJobDetail(RetrieveDestroyAPIView):
model = WorkflowJob
serializer_class = WorkflowJobSerializer
class SystemJobTemplateList(ListAPIView):
model = SystemJobTemplate

View File

@ -1132,6 +1132,142 @@ class SystemJobAccess(BaseAccess):
'''
model = SystemJob
# TODO:
class WorkflowNodeAccess(BaseAccess):
'''
I can see/use a WorkflowNode if I have permission to associated Workflow Job Template
'''
model = WorkflowNode
def get_queryset(self):
if self.user.is_superuser or self.user.is_system_auditor:
return self.model.objects.all()
@check_superuser
def can_read(self, obj):
return True
@check_superuser
def can_add(self, data):
if not data: # So the browseable API will work
return True
return True
@check_superuser
def can_change(self, obj, data):
if self.can_add(data) is False:
return False
return True
def can_delete(self, obj):
return self.can_change(obj, None)
# TODO:
class WorkflowJobTemplateAccess(BaseAccess):
'''
I can only see/manage Workflow Job Templates if I'm a super user
'''
model = WorkflowJobTemplate
def can_start(self, obj):
return self.can_read(obj)
def get_queryset(self):
if self.user.is_superuser or self.user.is_system_auditor:
qs = self.model.objects.all()
else:
qs = self.model.accessible_objects(self.user, 'read_role')
return qs.select_related('created_by', 'modified_by', 'next_schedule').all()
@check_superuser
def can_read(self, obj):
return self.user in obj.read_role
def can_add(self, data):
'''
a user can create a job template if they are a superuser, an org admin
of any org that the project is a member, or if they have user or team
based permissions tying the project to the inventory source for the
given action as well as the 'create' deploy permission.
Users who are able to create deploy jobs can also run normal and check (dry run) jobs.
'''
if not data: # So the browseable API will work
return True
# if reference_obj is provided, determine if it can be coppied
reference_obj = data.pop('reference_obj', None)
if 'survey_enabled' in data and data['survey_enabled']:
self.check_license(feature='surveys')
if self.user.is_superuser:
return True
def get_value(Class, field):
if reference_obj:
return getattr(reference_obj, field, None)
else:
pk = get_pk_from_dict(data, field)
if pk:
return get_object_or_400(Class, pk=pk)
else:
return None
return False
def can_start(self, obj, validate_license=True):
# TODO: Are workflows allowed for all licenses ??
# Check license.
'''
if validate_license:
self.check_license()
if obj.job_type == PERM_INVENTORY_SCAN:
self.check_license(feature='system_tracking')
if obj.survey_enabled:
self.check_license(feature='surveys')
'''
# Super users can start any job
if self.user.is_superuser:
return True
return self.user in obj.execute_role
def can_change(self, obj, data):
data_for_change = data
if self.user not in obj.admin_role and not self.user.is_superuser:
return False
if data is not None:
data = dict(data)
if 'survey_enabled' in data and obj.survey_enabled != data['survey_enabled'] and data['survey_enabled']:
self.check_license(feature='surveys')
return True
return self.can_read(obj) and self.can_add(data_for_change)
def can_delete(self, obj):
is_delete_allowed = self.user.is_superuser or self.user in obj.admin_role
if not is_delete_allowed:
return False
active_jobs = [dict(type="job", id=o.id)
for o in obj.jobs.filter(status__in=ACTIVE_STATES)]
if len(active_jobs) > 0:
raise StateConflict({"conflict": "Resource is being used by running jobs",
"active_jobs": active_jobs})
return True
class WorkflowJobAccess(BaseAccess):
'''
I can only see Workflow Jobs if I'm a super user
'''
model = WorkflowJob
class AdHocCommandAccess(BaseAccess):
'''
I can only see/run ad hoc commands when:
@ -1724,3 +1860,6 @@ register_access(Role, RoleAccess)
register_access(NotificationTemplate, NotificationTemplateAccess)
register_access(Notification, NotificationAccess)
register_access(Label, LabelAccess)
register_access(WorkflowNode, WorkflowNodeAccess)
register_access(WorkflowJobTemplate, WorkflowJobTemplateAccess)
register_access(WorkflowJob, WorkflowJobAccess)

View File

@ -110,6 +110,8 @@ class SimpleDAG(object):
return "project_update"
elif type(obj) == SystemJob:
return "system_job"
elif type(obj) == WorkflowJob:
return "workflow_job"
return "unknown"
def get_dependencies(self, obj):
@ -149,6 +151,7 @@ def get_tasks():
ProjectUpdate.objects.filter(status__in=RELEVANT_JOBS)]
graph_system_jobs = [sj for sj in
SystemJob.objects.filter(status__in=RELEVANT_JOBS)]
all_actions = sorted(graph_jobs + graph_ad_hoc_commands + graph_inventory_updates +
graph_project_updates + graph_system_jobs,
key=lambda task: task.created)

View File

@ -0,0 +1,70 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import awx.main.fields
class Migration(migrations.Migration):
dependencies = [
('main', '0032_v302_credential_permissions_update'),
]
operations = [
migrations.CreateModel(
name='WorkflowJob',
fields=[
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
('extra_vars', models.TextField(default=b'', blank=True)),
],
options={
'ordering': ('id',),
},
bases=('main.unifiedjob', models.Model),
),
migrations.CreateModel(
name='WorkflowJobTemplate',
fields=[
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
('extra_vars', models.TextField(default=b'', blank=True)),
('admin_role', awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'singleton:system_administrator', to='main.Role', null=b'True')),
],
bases=('main.unifiedjobtemplate', models.Model),
),
migrations.CreateModel(
name='WorkflowNode',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(default=None, editable=False)),
('modified', models.DateTimeField(default=None, editable=False)),
('always_nodes', models.ManyToManyField(related_name='parent_always_nodes', to='main.WorkflowNode', blank=True)),
('failure_nodes', models.ManyToManyField(related_name='parent_failure_nodes', to='main.WorkflowNode', blank=True)),
('job', models.ForeignKey(related_name='workflow_node', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True)),
('success_nodes', models.ManyToManyField(related_name='parent_success_nodes', to='main.WorkflowNode', blank=True)),
('unified_job_template', models.ForeignKey(related_name='unified_jt_workflow_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJobTemplate', null=True)),
('workflow_job_template', models.ForeignKey(related_name='workflow_nodes', to='main.WorkflowJobTemplate')),
],
),
migrations.AddField(
model_name='workflowjob',
name='workflow_job_template',
field=models.ForeignKey(related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.WorkflowJobTemplate', null=True),
),
migrations.AddField(
model_name='activitystream',
name='workflow_job',
field=models.ManyToManyField(to='main.WorkflowJob', blank=True),
),
migrations.AddField(
model_name='activitystream',
name='workflow_job_template',
field=models.ManyToManyField(to='main.WorkflowJobTemplate', blank=True),
),
migrations.AddField(
model_name='activitystream',
name='workflow_node',
field=models.ManyToManyField(to='main.WorkflowNode', blank=True),
),
]

View File

@ -22,6 +22,7 @@ from awx.main.models.mixins import * # noqa
from awx.main.models.notifications import * # noqa
from awx.main.models.fact import * # noqa
from awx.main.models.label import * # noqa
from awx.main.models.workflow import * # noqa
# Monkeypatch Django serializer to ignore django-taggit fields (which break
# the dumpdata command; see https://github.com/alex/django-taggit/issues/155).

View File

@ -49,6 +49,9 @@ class ActivityStream(models.Model):
permission = models.ManyToManyField("Permission", blank=True)
job_template = models.ManyToManyField("JobTemplate", blank=True)
job = models.ManyToManyField("Job", blank=True)
workflow_node = models.ManyToManyField("WorkflowNode", blank=True)
workflow_job_template = models.ManyToManyField("WorkflowJobTemplate", blank=True)
workflow_job = models.ManyToManyField("WorkflowJob", blank=True)
unified_job_template = models.ManyToManyField("UnifiedJobTemplate", blank=True, related_name='activity_stream_as_unified_job_template+')
unified_job = models.ManyToManyField("UnifiedJob", blank=True, related_name='activity_stream_as_unified_job+')
ad_hoc_command = models.ManyToManyField("AdHocCommand", blank=True)

160
awx/main/models/workflow.py Normal file
View File

@ -0,0 +1,160 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
# Django
from django.db import models
from django.core.urlresolvers import reverse
#from django import settings as tower_settings
# AWX
from awx.main.models import UnifiedJobTemplate, UnifiedJob
from awx.main.models.base import BaseModel, CreatedModifiedModel, VarsDictProperty
from awx.main.models.rbac import (
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
)
from awx.main.fields import ImplicitRoleField
__all__ = ['WorkflowJobTemplate', 'WorkflowJob', 'WorkflowJobOptions', 'WorkflowNode']
class WorkflowNode(CreatedModifiedModel):
class Meta:
app_label = 'main'
# TODO: RBAC
'''
admin_role = ImplicitRoleField(
parent_role='workflow_job_template.admin_role',
)
'''
workflow_job_template = models.ForeignKey(
'WorkflowJobTemplate',
related_name='workflow_nodes',
on_delete=models.CASCADE,
)
unified_job_template = models.ForeignKey(
'UnifiedJobTemplate',
related_name='unified_jt_workflow_nodes',
blank=True,
null=True,
default=None,
on_delete=models.SET_NULL,
)
success_nodes = models.ManyToManyField(
'self',
related_name='parent_success_nodes',
blank=True,
symmetrical=False,
)
failure_nodes = models.ManyToManyField(
'self',
related_name='parent_failure_nodes',
blank=True,
symmetrical=False,
)
always_nodes = models.ManyToManyField(
'self',
related_name='parent_always_nodes',
blank=True,
symmetrical=False,
)
job = models.ForeignKey(
'UnifiedJob',
related_name='workflow_node',
blank=True,
null=True,
default=None,
on_delete=models.SET_NULL,
)
def get_absolute_url(self):
return reverse('api:workflow_node_detail', args=(self.pk,))
class WorkflowJobOptions(BaseModel):
class Meta:
abstract = True
extra_vars = models.TextField(
blank=True,
default='',
)
class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions):
class Meta:
app_label = 'main'
admin_role = ImplicitRoleField(
parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
)
@classmethod
def _get_unified_job_class(cls):
return WorkflowJob
@classmethod
def _get_unified_job_field_names(cls):
# TODO: ADD LABELS
return ['name', 'description', 'extra_vars', 'workflow_nodes']
def get_absolute_url(self):
return reverse('api:workflow_job_template_detail', args=(self.pk,))
@property
def cache_timeout_blocked(self):
# TODO: don't allow running of job template if same workflow template running
return False
# TODO: Notifications
# TODO: Surveys
def create_job(self, **kwargs):
'''
Create a new job based on this template.
'''
return self.create_unified_job(**kwargs)
class WorkflowJob(UnifiedJob, WorkflowJobOptions):
class Meta:
app_label = 'main'
ordering = ('id',)
workflow_job_template = models.ForeignKey(
'WorkflowJobTemplate',
related_name='jobs',
blank=True,
null=True,
default=None,
on_delete=models.SET_NULL,
)
extra_vars_dict = VarsDictProperty('extra_vars', True)
@classmethod
def _get_parent_field_name(cls):
return 'workflow_job_template'
@classmethod
def _get_task_class(cls):
from awx.main.tasks import RunWorkflowJob
return RunWorkflowJob
def socketio_emit_data(self):
return {}
def get_absolute_url(self):
return reverse('api:workflow_job_detail', args=(self.pk,))
def get_ui_url(self):
return urljoin(tower_settings.TOWER_URL_BASE, "/#/workflow_jobs/{}".format(self.pk))
def is_blocked_by(self, obj):
return True
@property
def task_impact(self):
return 0

View File

@ -55,8 +55,10 @@ from awx.main.utils import (get_ansible_version, get_ssh_version, decrypt_field,
check_proot_installed, build_proot_temp_dir, wrap_args_with_proot)
__all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate',
'RunAdHocCommand', 'handle_work_error', 'handle_work_success',
'update_inventory_computed_fields', 'send_notifications', 'run_administrative_checks']
'RunAdHocCommand', 'RunWorkflowJob', 'handle_work_error',
'handle_work_success', 'update_inventory_computed_fields',
'send_notifications', 'run_administrative_checks',
'run_workflow_job']
HIDDEN_PASSWORD = '**********'
@ -1658,3 +1660,48 @@ class RunSystemJob(BaseTask):
def build_cwd(self, instance, **kwargs):
return settings.BASE_DIR
class RunWorkflowJob(BaseTask):
name = 'awx.main.tasks.run_workflow_job'
model = WorkflowJob
def run(self, pk, **kwargs):
'''
Run the job/task and capture its output.
'''
instance = self.update_model(pk, status='running', celery_task_id=self.request.id)
instance.socketio_emit_status("running")
status, rc, tb = 'error', None, ''
output_replacements = []
try:
self.pre_run_hook(instance, **kwargs)
if instance.cancel_flag:
instance = self.update_model(instance.pk, status='canceled')
if instance.status != 'running':
if hasattr(settings, 'CELERY_UNIT_TEST'):
return
else:
# Stop the task chain and prevent starting the job if it has
# already been canceled.
instance = self.update_model(pk)
status = instance.status
raise RuntimeError('not starting %s task' % instance.status)
#status, rc = self.run_pexpect(instance, args, cwd, env, kwargs['passwords'], stdout_handle)
# TODO: Do the workflow logic here
except Exception:
if status != 'canceled':
tb = traceback.format_exc()
instance = self.update_model(pk, status=status, result_traceback=tb)
self.post_run_hook(instance, **kwargs)
instance.socketio_emit_status(status)
if status != 'successful' and not hasattr(settings, 'CELERY_UNIT_TEST'):
# Raising an exception will mark the job as 'failed' in celery
# and will stop a task chain from continuing to execute
if status == 'canceled':
raise Exception("Task %s(pk:%s) was canceled (rc=%s)" % (str(self.model.__class__), str(pk), str(rc)))
else:
raise Exception("Task %s(pk:%s) encountered an error (rc=%s)" % (str(self.model.__class__), str(pk), str(rc)))
if not hasattr(settings, 'CELERY_UNIT_TEST'):
self.signal_finished(pk)

View File

@ -7,6 +7,7 @@ from awx.main.tests.factories import (
create_job_template,
create_notification_template,
create_survey_spec,
create_workflow_job_template,
)
@pytest.fixture
@ -40,6 +41,10 @@ def job_template_with_survey_passwords_factory(job_template_factory):
def job_with_secret_key_unit(job_with_secret_key_factory):
return job_with_secret_key_factory(persisted=False)
@pytest.fixture
def workflow_job_template_factory():
return create_workflow_job_template
@pytest.fixture
def get_ssh_version(mocker):
return mocker.patch('awx.main.tasks.get_ssh_version', return_value='OpenSSH_6.9p1, LibreSSL 2.1.8')

View File

@ -3,6 +3,7 @@ from .tower import (
create_job_template,
create_notification_template,
create_survey_spec,
create_workflow_job_template,
)
from .exc import (
@ -14,5 +15,6 @@ __all__ = [
'create_job_template',
'create_notification_template',
'create_survey_spec',
'create_workflow_job_template',
'NotUnique',
]

View File

@ -13,6 +13,7 @@ from awx.main.models import (
Credential,
Inventory,
Label,
WorkflowJobTemplate,
)
# mk methods should create only a single object of a single type.
@ -152,3 +153,28 @@ def mk_job_template(name, job_type='run',
if persisted:
jt.save()
return jt
def mk_workflow_job_template(name, extra_vars='', spec=None, persisted=True):
wfjt = WorkflowJobTemplate(name=name, extra_vars=extra_vars)
wfjt.survey_spec = spec
if wfjt.survey_spec is not None:
wfjt.survey_enabled = True
if persisted:
wfjt.save()
return wfjt
def mk_workflow_node(workflow_job_template=None, unified_job_template=None,
success_nodes=None, failure_nodes=None, always_nodes=None,
job=None, persisted=True):
workflow_node = WorkflowNode(workflow_job_template=workflow_job_template,
unified_job_template=job_template,
success_nodes=success_nodes,
failure_nodes=failure_nodes,
always_nodes=always_nodes,
job=job)
if persisted:
workflow_node.save()
return workflow_node

View File

@ -9,6 +9,7 @@ from awx.main.models import (
Inventory,
Job,
Label,
WorkflowJobTemplate,
)
from .objects import (
@ -28,6 +29,7 @@ from .fixtures import (
mk_project,
mk_label,
mk_notification_template,
mk_workflow_job_template,
)
@ -343,3 +345,35 @@ def create_notification_template(name, roles=None, persisted=True, **kwargs):
users=_Mapped(users),
superusers=_Mapped(superusers),
teams=teams)
def create_workflow_job_template(name, persisted=True, **kwargs):
Objects = generate_objects(["workflow_job_template",
"survey",], kwargs)
spec = None
jobs = None
extra_vars = kwargs.get('extra_vars', '')
if 'survey' in kwargs:
spec = create_survey_spec(kwargs['survey'])
wfjt = mk_workflow_job_template(name, spec=spec, extra_vars=extra_vars,
persisted=persisted)
if 'jobs' in kwargs:
for i in kwargs['jobs']:
if type(i) is Job:
jobs[i.pk] = i
else:
# Fill in default survey answers
job_extra_vars = {}
for question in spec['spec']:
job_extra_vars[question['variable']] = question['default']
jobs[i] = mk_job(job_template=wfjt, extra_vars=job_extra_vars,
persisted=persisted)
return Objects(workflow_job_template=wfjt,
#jobs=jobs,
survey=spec,)

View File

@ -43,6 +43,8 @@ class TestApiV1RootView:
'unified_job_templates',
'unified_jobs',
'activity_stream',
'workflow_job_templates',
'workflow_jobs',
]
view = ApiV1RootView()
ret = view.get(mocker.MagicMock())

View File

@ -1,2 +1,2 @@
#!/bin/bash
ansible-playbook -i "127.0.0.1," tools/git_hooks/pre_commit.yml
#ansible-playbook -i "127.0.0.1," tools/git_hooks/pre_commit.yml