Merge pull request #3474 from chrismeyersfsu/feature-workflows

workflows
This commit is contained in:
Chris Meyers
2016-09-15 10:12:58 -04:00
committed by GitHub
30 changed files with 1980 additions and 275 deletions

View File

@@ -514,7 +514,7 @@ class UnifiedJobTemplateSerializer(BaseSerializer):
def get_types(self): def get_types(self):
if type(self) is UnifiedJobTemplateSerializer: if type(self) is UnifiedJobTemplateSerializer:
return ['project', 'inventory_source', 'job_template', 'system_job_template'] return ['project', 'inventory_source', 'job_template', 'system_job_template', 'workflow_job_template',]
else: else:
return super(UnifiedJobTemplateSerializer, self).get_types() return super(UnifiedJobTemplateSerializer, self).get_types()
@@ -529,6 +529,8 @@ class UnifiedJobTemplateSerializer(BaseSerializer):
serializer_class = JobTemplateSerializer serializer_class = JobTemplateSerializer
elif isinstance(obj, SystemJobTemplate): elif isinstance(obj, SystemJobTemplate):
serializer_class = SystemJobTemplateSerializer serializer_class = SystemJobTemplateSerializer
elif isinstance(obj, WorkflowJobTemplate):
serializer_class = WorkflowJobTemplateSerializer
if serializer_class: if serializer_class:
serializer = serializer_class(instance=obj, context=self.context) serializer = serializer_class(instance=obj, context=self.context)
return serializer.to_representation(obj) return serializer.to_representation(obj)
@@ -559,7 +561,7 @@ class UnifiedJobSerializer(BaseSerializer):
def get_types(self): def get_types(self):
if type(self) is UnifiedJobSerializer: if type(self) is UnifiedJobSerializer:
return ['project_update', 'inventory_update', 'job', 'ad_hoc_command', 'system_job'] return ['project_update', 'inventory_update', 'job', 'ad_hoc_command', 'system_job', 'workflow_job',]
else: else:
return super(UnifiedJobSerializer, self).get_types() return super(UnifiedJobSerializer, self).get_types()
@@ -592,6 +594,8 @@ class UnifiedJobSerializer(BaseSerializer):
serializer_class = AdHocCommandSerializer serializer_class = AdHocCommandSerializer
elif isinstance(obj, SystemJob): elif isinstance(obj, SystemJob):
serializer_class = SystemJobSerializer serializer_class = SystemJobSerializer
elif isinstance(obj, WorkflowJob):
serializer_class = WorkflowJobSerializer
if serializer_class: if serializer_class:
serializer = serializer_class(instance=obj, context=self.context) serializer = serializer_class(instance=obj, context=self.context)
ret = serializer.to_representation(obj) ret = serializer.to_representation(obj)
@@ -639,6 +643,8 @@ class UnifiedJobListSerializer(UnifiedJobSerializer):
serializer_class = AdHocCommandListSerializer serializer_class = AdHocCommandListSerializer
elif isinstance(obj, SystemJob): elif isinstance(obj, SystemJob):
serializer_class = SystemJobListSerializer serializer_class = SystemJobListSerializer
elif isinstance(obj, WorkflowJob):
serializer_class = WorkflowJobSerializer
if serializer_class: if serializer_class:
serializer = serializer_class(instance=obj, context=self.context) serializer = serializer_class(instance=obj, context=self.context)
ret = serializer.to_representation(obj) ret = serializer.to_representation(obj)
@@ -2169,6 +2175,123 @@ class SystemJobCancelSerializer(SystemJobSerializer):
class Meta: class Meta:
fields = ('can_cancel',) fields = ('can_cancel',)
class WorkflowJobTemplateSerializer(UnifiedJobTemplateSerializer):
class Meta:
model = WorkflowJobTemplate
fields = ('*',)
def get_related(self, obj):
res = super(WorkflowJobTemplateSerializer, self).get_related(obj)
res.update(dict(
jobs = reverse('api:workflow_job_template_jobs_list', args=(obj.pk,)),
#schedules = reverse('api:workflow_job_template_schedules_list', args=(obj.pk,)),
launch = reverse('api:workflow_job_template_launch', args=(obj.pk,)),
workflow_nodes = reverse('api:workflow_job_template_workflow_nodes_list', args=(obj.pk,)),
# TODO: Implement notifications
#notification_templates_any = reverse('api:system_job_template_notification_templates_any_list', args=(obj.pk,)),
#notification_templates_success = reverse('api:system_job_template_notification_templates_success_list', args=(obj.pk,)),
#notification_templates_error = reverse('api:system_job_template_notification_templates_error_list', args=(obj.pk,)),
))
return res
# TODO:
class WorkflowJobTemplateListSerializer(WorkflowJobTemplateSerializer):
pass
# TODO:
class WorkflowJobSerializer(UnifiedJobSerializer):
class Meta:
model = WorkflowJob
fields = ('*', 'workflow_job_template', 'extra_vars')
def get_related(self, obj):
res = super(WorkflowJobSerializer, self).get_related(obj)
if obj.workflow_job_template:
res['workflow_job_template'] = reverse('api:workflow_job_template_detail',
args=(obj.workflow_job_template.pk,))
# TODO:
#res['notifications'] = reverse('api:system_job_notifications_list', args=(obj.pk,))
res['workflow_nodes'] = reverse('api:workflow_job_workflow_nodes_list', args=(obj.pk,))
# TODO: Cancel job
'''
if obj.can_cancel or True:
res['cancel'] = reverse('api:workflow_job_cancel', args=(obj.pk,))
'''
return res
# TODO:
class WorkflowJobListSerializer(WorkflowJobSerializer, UnifiedJobListSerializer):
pass
class WorkflowNodeBaseSerializer(BaseSerializer):
class Meta:
# TODO: workflow_job and job read-only
fields = ('id', 'url', 'related', 'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes',)
def get_related(self, obj):
res = super(WorkflowNodeBaseSerializer, self).get_related(obj)
if obj.unified_job_template:
res['unified_job_template'] = obj.unified_job_template.get_absolute_url()
return res
class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer):
class Meta:
model = WorkflowJobTemplateNode
fields = ('*', 'workflow_job_template',)
def get_related(self, obj):
res = super(WorkflowJobTemplateNodeSerializer, self).get_related(obj)
res['success_nodes'] = reverse('api:workflow_job_template_node_success_nodes_list', args=(obj.pk,))
res['failure_nodes'] = reverse('api:workflow_job_template_node_failure_nodes_list', args=(obj.pk,))
res['always_nodes'] = reverse('api:workflow_job_template_node_always_nodes_list', args=(obj.pk,))
if obj.workflow_job_template:
res['workflow_job_template'] = reverse('api:workflow_job_template_detail', args=(obj.workflow_job_template.pk,))
return res
class WorkflowJobNodeSerializer(WorkflowNodeBaseSerializer):
class Meta:
model = WorkflowJobTemplateNode
fields = ('*', 'job', 'workflow_job',)
def get_related(self, obj):
res = super(WorkflowJobNodeSerializer, self).get_related(obj)
res['success_nodes'] = reverse('api:workflow_job_node_success_nodes_list', args=(obj.pk,))
res['failure_nodes'] = reverse('api:workflow_job_node_failure_nodes_list', args=(obj.pk,))
res['always_nodes'] = reverse('api:workflow_job_node_always_nodes_list', args=(obj.pk,))
if obj.job:
res['job'] = reverse('api:job_detail', args=(obj.job.pk,))
if obj.workflow_job:
res['workflow_job'] = reverse('api:workflow_job_detail', args=(obj.workflow_job.pk,))
return res
class WorkflowJobNodeListSerializer(WorkflowJobNodeSerializer):
pass
class WorkflowJobNodeDetailSerializer(WorkflowJobNodeSerializer):
pass
class WorkflowJobTemplateNodeDetailSerializer(WorkflowJobTemplateNodeSerializer):
'''
Influence the api browser sample data to not include workflow_job_template
when editing a WorkflowNode.
Note: I was not able to accomplish this trough the use of extra_kwargs.
Maybe something to do with workflow_job_template being a relational field?
'''
def build_relational_field(self, field_name, relation_info):
field_class, field_kwargs = super(WorkflowJobTemplateNodeDetailSerializer, self).build_relational_field(field_name, relation_info)
if self.instance and field_name == 'workflow_job_template':
field_kwargs['read_only'] = True
field_kwargs.pop('queryset', None)
return field_class, field_kwargs
class WorkflowJobTemplateNodeListSerializer(WorkflowJobTemplateNodeSerializer):
pass
class JobListSerializer(JobSerializer, UnifiedJobListSerializer): class JobListSerializer(JobSerializer, UnifiedJobListSerializer):
pass pass

View File

@@ -257,6 +257,24 @@ system_job_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/notifications/$', 'system_job_notifications_list'), url(r'^(?P<pk>[0-9]+)/notifications/$', 'system_job_notifications_list'),
) )
workflow_job_template_urls = patterns('awx.api.views',
url(r'^$', 'workflow_job_template_list'),
url(r'^(?P<pk>[0-9]+)/$', 'workflow_job_template_detail'),
url(r'^(?P<pk>[0-9]+)/jobs/$', 'workflow_job_template_jobs_list'),
url(r'^(?P<pk>[0-9]+)/launch/$', 'workflow_job_template_launch'),
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', 'workflow_job_template_workflow_nodes_list'),
# url(r'^(?P<pk>[0-9]+)/cancel/$', 'workflow_job_template_cancel'),
)
workflow_job_urls = patterns('awx.api.views',
url(r'^$', 'workflow_job_list'),
url(r'^(?P<pk>[0-9]+)/$', 'workflow_job_detail'),
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', 'workflow_job_workflow_nodes_list'),
# url(r'^(?P<pk>[0-9]+)/cancel/$', 'workflow_job_cancel'),
#url(r'^(?P<pk>[0-9]+)/notifications/$', 'workflow_job_notifications_list'),
)
notification_template_urls = patterns('awx.api.views', notification_template_urls = patterns('awx.api.views',
url(r'^$', 'notification_template_list'), url(r'^$', 'notification_template_list'),
url(r'^(?P<pk>[0-9]+)/$', 'notification_template_detail'), url(r'^(?P<pk>[0-9]+)/$', 'notification_template_detail'),
@@ -274,6 +292,22 @@ label_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/$', 'label_detail'), url(r'^(?P<pk>[0-9]+)/$', 'label_detail'),
) )
workflow_job_template_node_urls = patterns('awx.api.views',
url(r'^$', 'workflow_job_template_node_list'),
url(r'^(?P<pk>[0-9]+)/$', 'workflow_job_template_node_detail'),
url(r'^(?P<pk>[0-9]+)/success_nodes/$', 'workflow_job_template_node_success_nodes_list'),
url(r'^(?P<pk>[0-9]+)/failure_nodes/$', 'workflow_job_template_node_failure_nodes_list'),
url(r'^(?P<pk>[0-9]+)/always_nodes/$', 'workflow_job_template_node_always_nodes_list'),
)
workflow_job_node_urls = patterns('awx.api.views',
url(r'^$', 'workflow_job_node_list'),
url(r'^(?P<pk>[0-9]+)/$', 'workflow_job_node_detail'),
url(r'^(?P<pk>[0-9]+)/success_nodes/$', 'workflow_job_node_success_nodes_list'),
url(r'^(?P<pk>[0-9]+)/failure_nodes/$', 'workflow_job_node_failure_nodes_list'),
url(r'^(?P<pk>[0-9]+)/always_nodes/$', 'workflow_job_node_always_nodes_list'),
)
schedule_urls = patterns('awx.api.views', schedule_urls = patterns('awx.api.views',
url(r'^$', 'schedule_list'), url(r'^$', 'schedule_list'),
url(r'^(?P<pk>[0-9]+)/$', 'schedule_detail'), url(r'^(?P<pk>[0-9]+)/$', 'schedule_detail'),
@@ -323,7 +357,11 @@ v1_urls = patterns('awx.api.views',
url(r'^system_jobs/', include(system_job_urls)), url(r'^system_jobs/', include(system_job_urls)),
url(r'^notification_templates/', include(notification_template_urls)), url(r'^notification_templates/', include(notification_template_urls)),
url(r'^notifications/', include(notification_urls)), url(r'^notifications/', include(notification_urls)),
url(r'^workflow_job_templates/',include(workflow_job_template_urls)),
url(r'^workflow_jobs/' ,include(workflow_job_urls)),
url(r'^labels/', include(label_urls)), url(r'^labels/', include(label_urls)),
url(r'^workflow_job_template_nodes/', include(workflow_job_template_node_urls)),
url(r'^workflow_job_nodes/', include(workflow_job_node_urls)),
url(r'^unified_job_templates/$','unified_job_template_list'), url(r'^unified_job_templates/$','unified_job_template_list'),
url(r'^unified_jobs/$', 'unified_job_list'), url(r'^unified_jobs/$', 'unified_job_list'),
url(r'^activity_stream/', include(activity_stream_urls)), url(r'^activity_stream/', include(activity_stream_urls)),

View File

@@ -148,6 +148,8 @@ class ApiV1RootView(APIView):
data['unified_job_templates'] = reverse('api:unified_job_template_list') data['unified_job_templates'] = reverse('api:unified_job_template_list')
data['unified_jobs'] = reverse('api:unified_job_list') data['unified_jobs'] = reverse('api:unified_job_list')
data['activity_stream'] = reverse('api:activity_stream_list') data['activity_stream'] = reverse('api:activity_stream_list')
data['workflow_job_templates'] = reverse('api:workflow_job_template_list')
data['workflow_jobs'] = reverse('api:workflow_job_list')
return Response(data) return Response(data)
@@ -1756,16 +1758,24 @@ class GroupList(ListCreateAPIView):
model = Group model = Group
serializer_class = GroupSerializer serializer_class = GroupSerializer
class GroupChildrenList(SubListCreateAttachDetachAPIView): '''
Useful when you have a self-refering ManyToManyRelationship.
* Tower uses a shallow (2-deep only) url pattern. For example:
model = Group When an object hangs off of a parent object you would have the url of the
serializer_class = GroupSerializer form /api/v1/parent_model/34/child_model. If you then wanted a child of the
parent_model = Group child model you would NOT do /api/v1/parent_model/34/child_model/87/child_child_model
relationship = 'children' Instead, you would access the child_child_model via /api/v1/child_child_model/87/
and you would create child_child_model's off of /api/v1/child_model/87/child_child_model_set
Now, when creating child_child_model related to child_model you still want to
link child_child_model to parent_model. That's what this class is for
'''
class EnforceParentRelationshipMixin(object):
enforce_parent_relationship = ''
def update_raw_data(self, data): def update_raw_data(self, data):
data.pop('inventory', None) data.pop(self.enforce_parent_relationship, None)
return super(GroupChildrenList, self).update_raw_data(data) return super(EnforceParentRelationshipMixin, self).update_raw_data(data)
def create(self, request, *args, **kwargs): def create(self, request, *args, **kwargs):
# Inject parent group inventory ID into new group data. # Inject parent group inventory ID into new group data.
@@ -1773,16 +1783,16 @@ class GroupChildrenList(SubListCreateAttachDetachAPIView):
# HACK: Make request data mutable. # HACK: Make request data mutable.
if getattr(data, '_mutable', None) is False: if getattr(data, '_mutable', None) is False:
data._mutable = True data._mutable = True
data['inventory'] = self.get_parent_object().inventory_id data[self.enforce_parent_relationship] = getattr(self.get_parent_object(), '%s_id' % self.enforce_parent_relationship)
return super(GroupChildrenList, self).create(request, *args, **kwargs) return super(EnforceParentRelationshipMixin, self).create(request, *args, **kwargs)
def unattach(self, request, *args, **kwargs): class GroupChildrenList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView):
sub_id = request.data.get('id', None)
if sub_id is not None: model = Group
return super(GroupChildrenList, self).unattach(request, *args, **kwargs) serializer_class = GroupSerializer
parent = self.get_parent_object() parent_model = Group
parent.delete() relationship = 'children'
return Response(status=status.HTTP_204_NO_CONTENT) enforce_parent_relationship = 'inventory'
class GroupPotentialChildrenList(SubListAPIView): class GroupPotentialChildrenList(SubListAPIView):
@@ -2618,6 +2628,182 @@ class JobTemplateObjectRolesList(SubListAPIView):
content_type = ContentType.objects.get_for_model(self.parent_model) content_type = ContentType.objects.get_for_model(self.parent_model)
return Role.objects.filter(content_type=content_type, object_id=po.pk) return Role.objects.filter(content_type=content_type, object_id=po.pk)
# TODO:
class WorkflowJobNodeList(ListCreateAPIView):
model = WorkflowJobNode
serializer_class = WorkflowJobNodeListSerializer
new_in_310 = True
# TODO:
class WorkflowJobNodeDetail(RetrieveUpdateDestroyAPIView):
model = WorkflowJobNode
serializer_class = WorkflowJobNodeDetailSerializer
new_in_310 = True
# TODO:
class WorkflowJobTemplateNodeList(ListCreateAPIView):
model = WorkflowJobTemplateNode
serializer_class = WorkflowJobTemplateNodeListSerializer
new_in_310 = True
# TODO:
class WorkflowJobTemplateNodeDetail(RetrieveUpdateDestroyAPIView):
model = WorkflowJobTemplateNode
serializer_class = WorkflowJobTemplateNodeDetailSerializer
new_in_310 = True
class WorkflowJobTemplateNodeChildrenBaseList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView):
model = WorkflowJobTemplateNode
serializer_class = WorkflowJobTemplateNodeListSerializer
always_allow_superuser = True # TODO: RBAC
parent_model = WorkflowJobTemplateNode
relationship = ''
enforce_parent_relationship = 'workflow_job_template'
new_in_310 = True
'''
Limit the set of WorkflowJobTemplateNodes to the related nodes of specified by
'relationship'
'''
def get_queryset(self):
parent = self.get_parent_object()
self.check_parent_access(parent)
return getattr(parent, self.relationship).all()
class WorkflowJobTemplateNodeSuccessNodesList(WorkflowJobTemplateNodeChildrenBaseList):
relationship = 'success_nodes'
class WorkflowJobTemplateNodeFailureNodesList(WorkflowJobTemplateNodeChildrenBaseList):
relationship = 'failure_nodes'
class WorkflowJobTemplateNodeAlwaysNodesList(WorkflowJobTemplateNodeChildrenBaseList):
relationship = 'always_nodes'
class WorkflowJobNodeChildrenBaseList(SubListAPIView):
model = WorkflowJobNode
serializer_class = WorkflowJobNodeListSerializer
always_allow_superuser = True # TODO: RBAC
parent_model = Job
relationship = ''
'''
enforce_parent_relationship = 'workflow_job_template'
new_in_310 = True
'''
#
#Limit the set of WorkflowJobeNodes to the related nodes of specified by
#'relationship'
#
def get_queryset(self):
parent = self.get_parent_object()
self.check_parent_access(parent)
return getattr(parent, self.relationship).all()
class WorkflowJobNodeSuccessNodesList(WorkflowJobNodeChildrenBaseList):
relationship = 'success_nodes'
class WorkflowJobNodeFailureNodesList(WorkflowJobNodeChildrenBaseList):
relationship = 'failure_nodes'
class WorkflowJobNodeAlwaysNodesList(WorkflowJobNodeChildrenBaseList):
relationship = 'always_nodes'
# TODO:
class WorkflowJobTemplateList(ListCreateAPIView):
model = WorkflowJobTemplate
serializer_class = WorkflowJobTemplateListSerializer
always_allow_superuser = False
# TODO: RBAC
'''
def post(self, request, *args, **kwargs):
ret = super(WorkflowJobTemplateList, self).post(request, *args, **kwargs)
if ret.status_code == 201:
workflow_job_template = WorkflowJobTemplate.objects.get(id=ret.data['id'])
workflow_job_template.admin_role.members.add(request.user)
return ret
'''
# TODO:
class WorkflowJobTemplateDetail(RetrieveUpdateDestroyAPIView):
model = WorkflowJobTemplate
serializer_class = WorkflowJobTemplateSerializer
always_allow_superuser = False
# TODO:
class WorkflowJobTemplateLaunch(GenericAPIView):
model = WorkflowJobTemplate
serializer_class = EmptySerializer
def get(self, request, *args, **kwargs):
return Response({})
def post(self, request, *args, **kwargs):
obj = self.get_object()
if not request.user.can_access(self.model, 'start', obj):
raise PermissionDenied()
new_job = obj.create_unified_job(**request.data)
new_job.signal_start(**request.data)
data = dict(workflow_job=new_job.id)
return Response(data, status=status.HTTP_201_CREATED)
# TODO:
class WorkflowJobTemplateWorkflowNodesList(SubListCreateAPIView):
model = WorkflowJobTemplateNode
serializer_class = WorkflowJobTemplateNodeListSerializer
always_allow_superuser = True # TODO: RBAC
parent_model = WorkflowJobTemplate
relationship = 'workflow_job_template_nodes'
parent_key = 'workflow_job_template'
# TODO:
class WorkflowJobTemplateJobsList(SubListAPIView):
model = WorkflowJob
serializer_class = WorkflowJobListSerializer
parent_model = WorkflowJobTemplate
relationship = 'jobs'
parent_key = 'workflow_job_template'
# TODO:
class WorkflowJobList(ListCreateAPIView):
model = WorkflowJob
serializer_class = WorkflowJobListSerializer
def get(self, request, *args, **kwargs):
if not request.user.is_superuser and not request.user.is_system_auditor:
raise PermissionDenied("Superuser privileges needed.")
return super(WorkflowJobList, self).get(request, *args, **kwargs)
# TODO:
class WorkflowJobDetail(RetrieveDestroyAPIView):
model = WorkflowJob
serializer_class = WorkflowJobSerializer
class WorkflowJobWorkflowNodesList(SubListAPIView):
model = WorkflowJobNode
serializer_class = WorkflowJobNodeListSerializer
always_allow_superuser = True # TODO: RBAC
parent_model = WorkflowJob
relationship = 'workflow_job_nodes'
parent_key = 'workflow_job'
class SystemJobTemplateList(ListAPIView): class SystemJobTemplateList(ListAPIView):
model = SystemJobTemplate model = SystemJobTemplate

View File

@@ -1132,6 +1132,173 @@ class SystemJobAccess(BaseAccess):
''' '''
model = SystemJob model = SystemJob
# TODO:
class WorkflowJobTemplateNodeAccess(BaseAccess):
'''
I can see/use a WorkflowJobTemplateNode if I have permission to associated Workflow Job Template
'''
model = WorkflowJobTemplateNode
def get_queryset(self):
if self.user.is_superuser or self.user.is_system_auditor:
return self.model.objects.all()
@check_superuser
def can_read(self, obj):
return True
@check_superuser
def can_add(self, data):
if not data: # So the browseable API will work
return True
return True
@check_superuser
def can_change(self, obj, data):
if self.can_add(data) is False:
return False
return True
def can_delete(self, obj):
return self.can_change(obj, None)
# TODO:
class WorkflowJobNodeAccess(BaseAccess):
'''
I can see/use a WorkflowJobNode if I have permission to associated Workflow Job
'''
model = WorkflowJobNode
def get_queryset(self):
if self.user.is_superuser or self.user.is_system_auditor:
return self.model.objects.all()
@check_superuser
def can_read(self, obj):
return True
@check_superuser
def can_add(self, data):
if not data: # So the browseable API will work
return True
return True
@check_superuser
def can_change(self, obj, data):
if self.can_add(data) is False:
return False
return True
def can_delete(self, obj):
return self.can_change(obj, None)
# TODO:
class WorkflowJobTemplateAccess(BaseAccess):
'''
I can only see/manage Workflow Job Templates if I'm a super user
'''
model = WorkflowJobTemplate
def get_queryset(self):
if self.user.is_superuser or self.user.is_system_auditor:
qs = self.model.objects.all()
else:
qs = self.model.accessible_objects(self.user, 'read_role')
return qs.select_related('created_by', 'modified_by', 'next_schedule').all()
@check_superuser
def can_read(self, obj):
return self.user in obj.read_role
def can_add(self, data):
'''
a user can create a job template if they are a superuser, an org admin
of any org that the project is a member, or if they have user or team
based permissions tying the project to the inventory source for the
given action as well as the 'create' deploy permission.
Users who are able to create deploy jobs can also run normal and check (dry run) jobs.
'''
if not data: # So the browseable API will work
return True
# if reference_obj is provided, determine if it can be coppied
reference_obj = data.pop('reference_obj', None)
if 'survey_enabled' in data and data['survey_enabled']:
self.check_license(feature='surveys')
if self.user.is_superuser:
return True
def get_value(Class, field):
if reference_obj:
return getattr(reference_obj, field, None)
else:
pk = get_pk_from_dict(data, field)
if pk:
return get_object_or_400(Class, pk=pk)
else:
return None
return False
def can_start(self, obj, validate_license=True):
# TODO: Are workflows allowed for all licenses ??
# Check license.
'''
if validate_license:
self.check_license()
if obj.job_type == PERM_INVENTORY_SCAN:
self.check_license(feature='system_tracking')
if obj.survey_enabled:
self.check_license(feature='surveys')
'''
# Super users can start any job
if self.user.is_superuser:
return True
return self.can_read(obj)
# TODO: We should use execute role rather than read role
#return self.user in obj.execute_role
def can_change(self, obj, data):
data_for_change = data
if self.user not in obj.admin_role and not self.user.is_superuser:
return False
if data is not None:
data = dict(data)
if 'survey_enabled' in data and obj.survey_enabled != data['survey_enabled'] and data['survey_enabled']:
self.check_license(feature='surveys')
return True
return self.can_read(obj) and self.can_add(data_for_change)
def can_delete(self, obj):
is_delete_allowed = self.user.is_superuser or self.user in obj.admin_role
if not is_delete_allowed:
return False
active_jobs = [dict(type="job", id=o.id)
for o in obj.jobs.filter(status__in=ACTIVE_STATES)]
if len(active_jobs) > 0:
raise StateConflict({"conflict": "Resource is being used by running jobs",
"active_jobs": active_jobs})
return True
class WorkflowJobAccess(BaseAccess):
'''
I can only see Workflow Jobs if I'm a super user
'''
model = WorkflowJob
class AdHocCommandAccess(BaseAccess): class AdHocCommandAccess(BaseAccess):
''' '''
I can only see/run ad hoc commands when: I can only see/run ad hoc commands when:
@@ -1295,10 +1462,12 @@ class UnifiedJobTemplateAccess(BaseAccess):
inventory_source_qs = self.user.get_queryset(InventorySource).filter(source__in=CLOUD_INVENTORY_SOURCES) inventory_source_qs = self.user.get_queryset(InventorySource).filter(source__in=CLOUD_INVENTORY_SOURCES)
job_template_qs = self.user.get_queryset(JobTemplate) job_template_qs = self.user.get_queryset(JobTemplate)
system_job_template_qs = self.user.get_queryset(SystemJobTemplate) system_job_template_qs = self.user.get_queryset(SystemJobTemplate)
workflow_job_template_qs = self.user.get_queryset(WorkflowJobTemplate)
qs = qs.filter(Q(Project___in=project_qs) | qs = qs.filter(Q(Project___in=project_qs) |
Q(InventorySource___in=inventory_source_qs) | Q(InventorySource___in=inventory_source_qs) |
Q(JobTemplate___in=job_template_qs) | Q(JobTemplate___in=job_template_qs) |
Q(systemjobtemplate__in=system_job_template_qs)) Q(systemjobtemplate__in=system_job_template_qs) |
Q(workflowjobtemplate__in=workflow_job_template_qs))
qs = qs.select_related( qs = qs.select_related(
'created_by', 'created_by',
'modified_by', 'modified_by',
@@ -1334,11 +1503,13 @@ class UnifiedJobAccess(BaseAccess):
job_qs = self.user.get_queryset(Job) job_qs = self.user.get_queryset(Job)
ad_hoc_command_qs = self.user.get_queryset(AdHocCommand) ad_hoc_command_qs = self.user.get_queryset(AdHocCommand)
system_job_qs = self.user.get_queryset(SystemJob) system_job_qs = self.user.get_queryset(SystemJob)
workflow_job_qs = self.user.get_queryset(WorkflowJob)
qs = qs.filter(Q(ProjectUpdate___in=project_update_qs) | qs = qs.filter(Q(ProjectUpdate___in=project_update_qs) |
Q(InventoryUpdate___in=inventory_update_qs) | Q(InventoryUpdate___in=inventory_update_qs) |
Q(Job___in=job_qs) | Q(Job___in=job_qs) |
Q(AdHocCommand___in=ad_hoc_command_qs) | Q(AdHocCommand___in=ad_hoc_command_qs) |
Q(SystemJob___in=system_job_qs)) Q(SystemJob___in=system_job_qs) |
Q(WorkflowJob___in=workflow_job_qs))
qs = qs.select_related( qs = qs.select_related(
'created_by', 'created_by',
'modified_by', 'modified_by',
@@ -1724,3 +1895,7 @@ register_access(Role, RoleAccess)
register_access(NotificationTemplate, NotificationTemplateAccess) register_access(NotificationTemplate, NotificationTemplateAccess)
register_access(Notification, NotificationAccess) register_access(Notification, NotificationAccess)
register_access(Label, LabelAccess) register_access(Label, LabelAccess)
register_access(WorkflowJobTemplateNode, WorkflowJobTemplateNodeAccess)
register_access(WorkflowJobNode, WorkflowJobNodeAccess)
register_access(WorkflowJobTemplate, WorkflowJobTemplateAccess)
register_access(WorkflowJob, WorkflowJobAccess)

View File

@@ -54,6 +54,8 @@ class SimpleDAG(object):
type_str = "Inventory" type_str = "Inventory"
elif type(obj) == ProjectUpdate: elif type(obj) == ProjectUpdate:
type_str = "Project" type_str = "Project"
elif type(obj) == WorkflowJob:
type_str = "Workflow"
else: else:
type_str = "Unknown" type_str = "Unknown"
type_str += "%s" % str(obj.id) type_str += "%s" % str(obj.id)
@@ -68,10 +70,11 @@ class SimpleDAG(object):
short_string_obj(n['node_object']), short_string_obj(n['node_object']),
"red" if n['node_object'].status == 'running' else "black", "red" if n['node_object'].status == 'running' else "black",
) )
for from_node, to_node in self.edges: for from_node, to_node, label in self.edges:
doc += "%s -> %s;\n" % ( doc += "%s -> %s [ label=\"%s\" ];\n" % (
short_string_obj(self.nodes[from_node]['node_object']), short_string_obj(self.nodes[from_node]['node_object']),
short_string_obj(self.nodes[to_node]['node_object']), short_string_obj(self.nodes[to_node]['node_object']),
label,
) )
doc += "}\n" doc += "}\n"
gv_file = open('/tmp/graph.gv', 'w') gv_file = open('/tmp/graph.gv', 'w')
@@ -82,16 +85,16 @@ class SimpleDAG(object):
if self.find_ord(obj) is None: if self.find_ord(obj) is None:
self.nodes.append(dict(node_object=obj, metadata=metadata)) self.nodes.append(dict(node_object=obj, metadata=metadata))
def add_edge(self, from_obj, to_obj): def add_edge(self, from_obj, to_obj, label=None):
from_obj_ord = self.find_ord(from_obj) from_obj_ord = self.find_ord(from_obj)
to_obj_ord = self.find_ord(to_obj) to_obj_ord = self.find_ord(to_obj)
if from_obj_ord is None or to_obj_ord is None: if from_obj_ord is None or to_obj_ord is None:
raise LookupError("Object not found") raise LookupError("Object not found")
self.edges.append((from_obj_ord, to_obj_ord)) self.edges.append((from_obj_ord, to_obj_ord, label))
def add_edges(self, edgelist): def add_edges(self, edgelist):
for edge_pair in edgelist: for edge_pair in edgelist:
self.add_edge(edge_pair[0], edge_pair[1]) self.add_edge(edge_pair[0], edge_pair[1], edge_pair[2])
def find_ord(self, obj): def find_ord(self, obj):
for idx in range(len(self.nodes)): for idx in range(len(self.nodes)):
@@ -110,22 +113,32 @@ class SimpleDAG(object):
return "project_update" return "project_update"
elif type(obj) == SystemJob: elif type(obj) == SystemJob:
return "system_job" return "system_job"
elif type(obj) == WorkflowJob:
return "workflow_job"
return "unknown" return "unknown"
def get_dependencies(self, obj): def get_dependencies(self, obj, label=None):
antecedents = [] antecedents = []
this_ord = self.find_ord(obj) this_ord = self.find_ord(obj)
for node, dep in self.edges: for node, dep, lbl in self.edges:
if node == this_ord: if label:
antecedents.append(self.nodes[dep]) if node == this_ord and lbl == label:
antecedents.append(self.nodes[dep])
else:
if node == this_ord:
antecedents.append(self.nodes[dep])
return antecedents return antecedents
def get_dependents(self, obj): def get_dependents(self, obj, label=None):
decendents = [] decendents = []
this_ord = self.find_ord(obj) this_ord = self.find_ord(obj)
for node, dep in self.edges: for node, dep, lbl in self.edges:
if dep == this_ord: if label:
decendents.append(self.nodes[node]) if dep == this_ord and lbl == label:
decendents.append(self.nodes[node])
else:
if dep == this_ord:
decendents.append(self.nodes[node])
return decendents return decendents
def get_leaf_nodes(self): def get_leaf_nodes(self):
@@ -135,6 +148,85 @@ class SimpleDAG(object):
leafs.append(n) leafs.append(n)
return leafs return leafs
def get_root_nodes(self):
roots = []
for n in self.nodes:
if len(self.get_dependents(n['node_object'])) < 1:
roots.append(n)
return roots
class WorkflowDAG(SimpleDAG):
def __init__(self, workflow_job=None):
super(WorkflowDAG, self).__init__()
if workflow_job:
self._init_graph(workflow_job)
def _init_graph(self, workflow_job):
workflow_nodes = workflow_job.workflow_job_nodes.all()
for workflow_node in workflow_nodes:
self.add_node(workflow_node)
for node_type in ['success_nodes', 'failure_nodes', 'always_nodes']:
for workflow_node in workflow_nodes:
related_nodes = getattr(workflow_node, node_type).all()
for related_node in related_nodes:
self.add_edge(workflow_node, related_node, node_type)
def bfs_nodes_to_run(self):
root_nodes = self.get_root_nodes()
nodes = root_nodes
nodes_found = []
for index, n in enumerate(nodes):
obj = n['node_object']
job = obj.job
if not job:
nodes_found.append(n)
# Job is about to run or is running. Hold our horses and wait for
# the job to finish. We can't proceed down the graph path until we
# have the job result.
elif job.status not in ['failed', 'error', 'successful']:
continue
elif job.status in ['failed', 'error']:
children_failed = self.get_dependencies(obj, 'failure_nodes')
children_always = self.get_dependencies(obj, 'always_nodes')
children_all = children_failed + children_always
nodes.extend(children_all)
elif job.status in ['successful']:
children_success = self.get_dependencies(obj, 'success_nodes')
nodes.extend(children_success)
else:
logger.warn("Incorrect graph structure")
return [n['node_object'] for n in nodes_found]
def is_workflow_done(self):
root_nodes = self.get_root_nodes()
nodes = root_nodes
for index, n in enumerate(nodes):
obj = n['node_object']
job = obj.job
if not job:
return False
# Job is about to run or is running. Hold our horses and wait for
# the job to finish. We can't proceed down the graph path until we
# have the job result.
elif job.status not in ['failed', 'error', 'successful']:
return False
elif job.status in ['failed', 'error']:
children_failed = self.get_dependencies(obj, 'failure_nodes')
children_always = self.get_dependencies(obj, 'always_nodes')
children_all = children_failed + children_always
nodes.extend(children_all)
elif job.status in ['successful']:
children_success = self.get_dependencies(obj, 'success_nodes')
nodes.extend(children_success)
else:
logger.warn("Incorrect graph structure")
return True
def get_tasks(): def get_tasks():
"""Fetch all Tower tasks that are relevant to the task management """Fetch all Tower tasks that are relevant to the task management
system. system.
@@ -149,11 +241,42 @@ def get_tasks():
ProjectUpdate.objects.filter(status__in=RELEVANT_JOBS)] ProjectUpdate.objects.filter(status__in=RELEVANT_JOBS)]
graph_system_jobs = [sj for sj in graph_system_jobs = [sj for sj in
SystemJob.objects.filter(status__in=RELEVANT_JOBS)] SystemJob.objects.filter(status__in=RELEVANT_JOBS)]
graph_workflow_jobs = [wf for wf in
WorkflowJob.objects.filter(status__in=RELEVANT_JOBS)]
all_actions = sorted(graph_jobs + graph_ad_hoc_commands + graph_inventory_updates + all_actions = sorted(graph_jobs + graph_ad_hoc_commands + graph_inventory_updates +
graph_project_updates + graph_system_jobs, graph_project_updates + graph_system_jobs +
graph_workflow_jobs,
key=lambda task: task.created) key=lambda task: task.created)
return all_actions return all_actions
def get_running_workflow_jobs():
graph_workflow_jobs = [wf for wf in
WorkflowJob.objects.filter(status='running')]
return graph_workflow_jobs
def do_spawn_workflow_jobs():
workflow_jobs = get_running_workflow_jobs()
for workflow_job in workflow_jobs:
dag = WorkflowDAG(workflow_job)
spawn_nodes = dag.bfs_nodes_to_run()
for spawn_node in spawn_nodes:
# TODO: Inject job template template params as kwargs.
# Make sure to take into account extra_vars merge logic
kv = {}
job = spawn_node.unified_job_template.create_unified_job(**kv)
spawn_node.job = job
spawn_node.save()
can_start = job.signal_start(**kv)
if not can_start:
job.status = 'failed'
job.job_explanation = "Workflow job could not start because it was not in the right state or required manual credentials"
job.save(update_fields=['status', 'job_explanation'])
job.socketio_emit_status("failed")
# TODO: should we emit a status on the socket here similar to tasks.py tower_periodic_scheduler() ?
#emit_websocket_notification('/socket.io/jobs', '', dict(id=))
def rebuild_graph(message): def rebuild_graph(message):
"""Regenerate the task graph by refreshing known tasks from Tower, purging """Regenerate the task graph by refreshing known tasks from Tower, purging
orphaned running tasks, and creating dependencies for new tasks before orphaned running tasks, and creating dependencies for new tasks before
@@ -170,6 +293,8 @@ def rebuild_graph(message):
logger.warn("Ignoring celery task inspector") logger.warn("Ignoring celery task inspector")
active_task_queues = None active_task_queues = None
do_spawn_workflow_jobs()
all_sorted_tasks = get_tasks() all_sorted_tasks = get_tasks()
if not len(all_sorted_tasks): if not len(all_sorted_tasks):
return None return None
@@ -184,6 +309,7 @@ def rebuild_graph(message):
# as a whole that celery appears to be down. # as a whole that celery appears to be down.
if not hasattr(settings, 'CELERY_UNIT_TEST'): if not hasattr(settings, 'CELERY_UNIT_TEST'):
return None return None
running_tasks = filter(lambda t: t.status == 'running', all_sorted_tasks) running_tasks = filter(lambda t: t.status == 'running', all_sorted_tasks)
waiting_tasks = filter(lambda t: t.status != 'running', all_sorted_tasks) waiting_tasks = filter(lambda t: t.status != 'running', all_sorted_tasks)
new_tasks = filter(lambda t: t.status == 'pending', all_sorted_tasks) new_tasks = filter(lambda t: t.status == 'pending', all_sorted_tasks)

View File

@@ -0,0 +1,104 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import awx.main.models.notifications
import django.db.models.deletion
import awx.main.models.workflow
import awx.main.fields
class Migration(migrations.Migration):
dependencies = [
('main', '0032_v302_credential_permissions_update'),
]
operations = [
migrations.CreateModel(
name='WorkflowJob',
fields=[
('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
('extra_vars', models.TextField(default=b'', blank=True)),
],
options={
'ordering': ('id',),
},
bases=('main.unifiedjob', models.Model, awx.main.models.notifications.JobNotificationMixin, awx.main.models.workflow.WorkflowJobInheritNodesMixin),
),
migrations.CreateModel(
name='WorkflowJobNode',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(default=None, editable=False)),
('modified', models.DateTimeField(default=None, editable=False)),
('always_nodes', models.ManyToManyField(related_name='workflowjobnodes_always', to='main.WorkflowJobNode', blank=True)),
('failure_nodes', models.ManyToManyField(related_name='workflowjobnodes_failure', to='main.WorkflowJobNode', blank=True)),
('job', models.ForeignKey(related_name='unified_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True)),
('success_nodes', models.ManyToManyField(related_name='workflowjobnodes_success', to='main.WorkflowJobNode', blank=True)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='WorkflowJobTemplate',
fields=[
('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
('extra_vars', models.TextField(default=b'', blank=True)),
('admin_role', awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'singleton:system_administrator', to='main.Role', null=b'True')),
],
bases=('main.unifiedjobtemplate', models.Model),
),
migrations.CreateModel(
name='WorkflowJobTemplateNode',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(default=None, editable=False)),
('modified', models.DateTimeField(default=None, editable=False)),
('always_nodes', models.ManyToManyField(related_name='workflowjobtemplatenodes_always', to='main.WorkflowJobTemplateNode', blank=True)),
('failure_nodes', models.ManyToManyField(related_name='workflowjobtemplatenodes_failure', to='main.WorkflowJobTemplateNode', blank=True)),
('success_nodes', models.ManyToManyField(related_name='workflowjobtemplatenodes_success', to='main.WorkflowJobTemplateNode', blank=True)),
('unified_job_template', models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJobTemplate', null=True)),
('workflow_job_template', models.ForeignKey(related_name='workflow_job_template_nodes', default=None, blank=True, to='main.WorkflowJobTemplate', null=True)),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='workflowjobnode',
name='unified_job_template',
field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJobTemplate', null=True),
),
migrations.AddField(
model_name='workflowjobnode',
name='workflow_job',
field=models.ForeignKey(related_name='workflow_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.WorkflowJob', null=True),
),
migrations.AddField(
model_name='workflowjob',
name='workflow_job_template',
field=models.ForeignKey(related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.WorkflowJobTemplate', null=True),
),
migrations.AddField(
model_name='activitystream',
name='workflow_job',
field=models.ManyToManyField(to='main.WorkflowJob', blank=True),
),
migrations.AddField(
model_name='activitystream',
name='workflow_job_node',
field=models.ManyToManyField(to='main.WorkflowJobNode', blank=True),
),
migrations.AddField(
model_name='activitystream',
name='workflow_job_template',
field=models.ManyToManyField(to='main.WorkflowJobTemplate', blank=True),
),
migrations.AddField(
model_name='activitystream',
name='workflow_job_template_node',
field=models.ManyToManyField(to='main.WorkflowJobTemplateNode', blank=True),
),
]

View File

@@ -22,6 +22,7 @@ from awx.main.models.mixins import * # noqa
from awx.main.models.notifications import * # noqa from awx.main.models.notifications import * # noqa
from awx.main.models.fact import * # noqa from awx.main.models.fact import * # noqa
from awx.main.models.label import * # noqa from awx.main.models.label import * # noqa
from awx.main.models.workflow import * # noqa
# Monkeypatch Django serializer to ignore django-taggit fields (which break # Monkeypatch Django serializer to ignore django-taggit fields (which break
# the dumpdata command; see https://github.com/alex/django-taggit/issues/155). # the dumpdata command; see https://github.com/alex/django-taggit/issues/155).

View File

@@ -49,6 +49,10 @@ class ActivityStream(models.Model):
permission = models.ManyToManyField("Permission", blank=True) permission = models.ManyToManyField("Permission", blank=True)
job_template = models.ManyToManyField("JobTemplate", blank=True) job_template = models.ManyToManyField("JobTemplate", blank=True)
job = models.ManyToManyField("Job", blank=True) job = models.ManyToManyField("Job", blank=True)
workflow_job_template_node = models.ManyToManyField("WorkflowJobTemplateNode", blank=True)
workflow_job_node = models.ManyToManyField("WorkflowJobNode", blank=True)
workflow_job_template = models.ManyToManyField("WorkflowJobTemplate", blank=True)
workflow_job = models.ManyToManyField("WorkflowJob", blank=True)
unified_job_template = models.ManyToManyField("UnifiedJobTemplate", blank=True, related_name='activity_stream_as_unified_job_template+') unified_job_template = models.ManyToManyField("UnifiedJobTemplate", blank=True, related_name='activity_stream_as_unified_job_template+')
unified_job = models.ManyToManyField("UnifiedJob", blank=True, related_name='activity_stream_as_unified_job+') unified_job = models.ManyToManyField("UnifiedJob", blank=True, related_name='activity_stream_as_unified_job+')
ad_hoc_command = models.ManyToManyField("AdHocCommand", blank=True) ad_hoc_command = models.ManyToManyField("AdHocCommand", blank=True)

242
awx/main/models/workflow.py Normal file
View File

@@ -0,0 +1,242 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
# Python
#import urlparse
# Django
from django.db import models
from django.core.urlresolvers import reverse
#from django import settings as tower_settings
# AWX
from awx.main.models import UnifiedJobTemplate, UnifiedJob
from awx.main.models.notifications import JobNotificationMixin
from awx.main.models.base import BaseModel, CreatedModifiedModel, VarsDictProperty
from awx.main.models.rbac import (
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
)
from awx.main.fields import ImplicitRoleField
__all__ = ['WorkflowJobTemplate', 'WorkflowJob', 'WorkflowJobOptions', 'WorkflowJobNode', 'WorkflowJobTemplateNode',]
class WorkflowNodeBase(CreatedModifiedModel):
class Meta:
abstract = True
app_label = 'main'
# TODO: RBAC
'''
admin_role = ImplicitRoleField(
parent_role='workflow_job_template.admin_role',
)
'''
success_nodes = models.ManyToManyField(
'self',
blank=True,
symmetrical=False,
related_name='%(class)ss_success',
)
failure_nodes = models.ManyToManyField(
'self',
blank=True,
symmetrical=False,
related_name='%(class)ss_failure',
)
always_nodes = models.ManyToManyField(
'self',
blank=True,
symmetrical=False,
related_name='%(class)ss_always',
)
unified_job_template = models.ForeignKey(
'UnifiedJobTemplate',
related_name='%(class)ss',
blank=True,
null=True,
default=None,
on_delete=models.SET_NULL,
)
class WorkflowJobTemplateNode(WorkflowNodeBase):
# TODO: Ensure the API forces workflow_job_template being set
workflow_job_template = models.ForeignKey(
'WorkflowJobTemplate',
related_name='workflow_job_template_nodes',
blank=True,
null=True,
default=None,
on_delete=models.CASCADE,
)
def get_absolute_url(self):
return reverse('api:workflow_job_template_node_detail', args=(self.pk,))
class WorkflowJobNode(WorkflowNodeBase):
job = models.ForeignKey(
'UnifiedJob',
related_name='unified_job_nodes',
blank=True,
null=True,
default=None,
on_delete=models.SET_NULL,
)
workflow_job = models.ForeignKey(
'WorkflowJob',
related_name='workflow_job_nodes',
blank=True,
null=True,
default=None,
on_delete=models.SET_NULL,
)
def get_absolute_url(self):
return reverse('api:workflow_job_node_detail', args=(self.pk,))
class WorkflowJobOptions(BaseModel):
class Meta:
abstract = True
extra_vars = models.TextField(
blank=True,
default='',
)
class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions):
class Meta:
app_label = 'main'
admin_role = ImplicitRoleField(
parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
)
@classmethod
def _get_unified_job_class(cls):
return WorkflowJob
@classmethod
def _get_unified_job_field_names(cls):
# TODO: ADD LABELS
return ['name', 'description', 'extra_vars',]
def get_absolute_url(self):
return reverse('api:workflow_job_template_detail', args=(self.pk,))
@property
def cache_timeout_blocked(self):
# TODO: don't allow running of job template if same workflow template running
return False
# TODO: Notifications
# TODO: Surveys
#def create_job(self, **kwargs):
# '''
# Create a new job based on this template.
# '''
# return self.create_unified_job(**kwargs)
# TODO: Delete create_unified_job here and explicitly call create_workflow_job() .. figure out where the call is
def create_unified_job(self, **kwargs):
#def create_workflow_job(self, **kwargs):
#workflow_job = self.create_unified_job(**kwargs)
workflow_job = super(WorkflowJobTemplate, self).create_unified_job(**kwargs)
workflow_job.inherit_job_template_workflow_nodes()
return workflow_job
class WorkflowJobInheritNodesMixin(object):
def _inherit_relationship(self, old_node, new_node, node_ids_map, node_type):
old_related_nodes = self._get_all_by_type(old_node, node_type)
new_node_type_mgr = getattr(new_node, node_type)
for old_related_node in old_related_nodes:
new_related_node = self._get_workflow_job_node_by_id(node_ids_map[old_related_node.id])
new_node_type_mgr.add(new_related_node)
'''
Create a WorkflowJobNode for each WorkflowJobTemplateNode
'''
def _create_workflow_job_nodes(self, old_nodes):
return [WorkflowJobNode.objects.create(workflow_job=self, unified_job_template=old_node.unified_job_template) for old_node in old_nodes]
def _map_workflow_job_nodes(self, old_nodes, new_nodes):
node_ids_map = {}
for i, old_node in enumerate(old_nodes):
node_ids_map[old_node.id] = new_nodes[i].id
return node_ids_map
def _get_workflow_job_template_nodes(self):
return self.workflow_job_template.workflow_job_template_nodes.all()
def _get_workflow_job_node_by_id(self, id):
return WorkflowJobNode.objects.get(id=id)
def _get_all_by_type(node, node_type):
return getattr(node, node_type).all()
def inherit_job_template_workflow_nodes(self):
old_nodes = self._get_workflow_job_template_nodes()
new_nodes = self._create_workflow_job_nodes(old_nodes)
node_ids_map = self._map_workflow_job_nodes(old_nodes, new_nodes)
for index, old_node in enumerate(old_nodes):
new_node = new_nodes[index]
for node_type in ['success_nodes', 'failure_nodes', 'always_nodes']:
self._inherit_relationship(old_node, new_node, node_ids_map, node_type)
class WorkflowJob(UnifiedJob, WorkflowJobOptions, JobNotificationMixin, WorkflowJobInheritNodesMixin):
class Meta:
app_label = 'main'
ordering = ('id',)
workflow_job_template = models.ForeignKey(
'WorkflowJobTemplate',
related_name='jobs',
blank=True,
null=True,
default=None,
on_delete=models.SET_NULL,
)
extra_vars_dict = VarsDictProperty('extra_vars', True)
@classmethod
def _get_parent_field_name(cls):
return 'workflow_job_template'
@classmethod
def _get_task_class(cls):
from awx.main.tasks import RunWorkflowJob
return RunWorkflowJob
def socketio_emit_data(self):
return {}
def get_absolute_url(self):
return reverse('api:workflow_job_detail', args=(self.pk,))
# TODO: Ask UI if this is needed ?
#def get_ui_url(self):
# return urlparse.urljoin(tower_settings.TOWER_URL_BASE, "/#/workflow_jobs/{}".format(self.pk))
def is_blocked_by(self, obj):
return True
@property
def task_impact(self):
return 0
# TODO: workflow job notifications
def get_notification_templates(self):
return []
# TODO: workflow job notifications
def get_notification_friendly_name(self):
return "Workflow Job"

View File

@@ -55,8 +55,10 @@ from awx.main.utils import (get_ansible_version, get_ssh_version, decrypt_field,
check_proot_installed, build_proot_temp_dir, wrap_args_with_proot) check_proot_installed, build_proot_temp_dir, wrap_args_with_proot)
__all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate', __all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate',
'RunAdHocCommand', 'handle_work_error', 'handle_work_success', 'RunAdHocCommand', 'RunWorkflowJob', 'handle_work_error',
'update_inventory_computed_fields', 'send_notifications', 'run_administrative_checks'] 'handle_work_success', 'update_inventory_computed_fields',
'send_notifications', 'run_administrative_checks',
'run_workflow_job']
HIDDEN_PASSWORD = '**********' HIDDEN_PASSWORD = '**********'
@@ -189,7 +191,6 @@ def notify_task_runner(metadata_dict):
def _send_notification_templates(instance, status_str): def _send_notification_templates(instance, status_str):
if status_str not in ['succeeded', 'failed']: if status_str not in ['succeeded', 'failed']:
raise ValueError("status_str must be either succeeded or failed") raise ValueError("status_str must be either succeeded or failed")
print("Instance has some shit in it %s" % instance)
notification_templates = instance.get_notification_templates() notification_templates = instance.get_notification_templates()
if notification_templates: if notification_templates:
all_notification_templates = set(notification_templates.get('success', []) + notification_templates.get('any', [])) all_notification_templates = set(notification_templates.get('success', []) + notification_templates.get('any', []))
@@ -237,8 +238,6 @@ def handle_work_error(self, task_id, subtasks=None):
instance.socketio_emit_status("failed") instance.socketio_emit_status("failed")
if first_instance: if first_instance:
print("Instance type is %s" % first_instance_type)
print("Instance passing along %s" % first_instance.name)
_send_notification_templates(first_instance, 'failed') _send_notification_templates(first_instance, 'failed')
@task() @task()
@@ -1663,3 +1662,28 @@ class RunSystemJob(BaseTask):
def build_cwd(self, instance, **kwargs): def build_cwd(self, instance, **kwargs):
return settings.BASE_DIR return settings.BASE_DIR
class RunWorkflowJob(BaseTask):
name = 'awx.main.tasks.run_workflow_job'
model = WorkflowJob
def run(self, pk, **kwargs):
from awx.main.management.commands.run_task_system import WorkflowDAG
'''
Run the job/task and capture its output.
'''
pass
instance = self.update_model(pk, status='running', celery_task_id=self.request.id)
instance.socketio_emit_status("running")
# FIXME: Detect workflow run completion
while True:
dag = WorkflowDAG(instance)
if dag.is_workflow_done():
# TODO: update with accurate finish status (i.e. canceled, error, etc.)
instance = self.update_model(instance.pk, status='successful')
break
time.sleep(1)
instance.socketio_emit_status(instance.status)
# TODO: Handle cancel

View File

@@ -7,6 +7,7 @@ from awx.main.tests.factories import (
create_job_template, create_job_template,
create_notification_template, create_notification_template,
create_survey_spec, create_survey_spec,
create_workflow_job_template,
) )
@pytest.fixture @pytest.fixture
@@ -40,6 +41,10 @@ def job_template_with_survey_passwords_factory(job_template_factory):
def job_with_secret_key_unit(job_with_secret_key_factory): def job_with_secret_key_unit(job_with_secret_key_factory):
return job_with_secret_key_factory(persisted=False) return job_with_secret_key_factory(persisted=False)
@pytest.fixture
def workflow_job_template_factory():
return create_workflow_job_template
@pytest.fixture @pytest.fixture
def get_ssh_version(mocker): def get_ssh_version(mocker):
return mocker.patch('awx.main.tasks.get_ssh_version', return_value='OpenSSH_6.9p1, LibreSSL 2.1.8') return mocker.patch('awx.main.tasks.get_ssh_version', return_value='OpenSSH_6.9p1, LibreSSL 2.1.8')

View File

@@ -3,6 +3,7 @@ from .tower import (
create_job_template, create_job_template,
create_notification_template, create_notification_template,
create_survey_spec, create_survey_spec,
create_workflow_job_template,
) )
from .exc import ( from .exc import (
@@ -14,5 +15,6 @@ __all__ = [
'create_job_template', 'create_job_template',
'create_notification_template', 'create_notification_template',
'create_survey_spec', 'create_survey_spec',
'create_workflow_job_template',
'NotUnique', 'NotUnique',
] ]

View File

@@ -13,6 +13,10 @@ from awx.main.models import (
Credential, Credential,
Inventory, Inventory,
Label, Label,
WorkflowJobTemplate,
WorkflowJob,
WorkflowJobNode,
WorkflowJobTemplateNode,
) )
# mk methods should create only a single object of a single type. # mk methods should create only a single object of a single type.
@@ -152,3 +156,60 @@ def mk_job_template(name, job_type='run',
if persisted: if persisted:
jt.save() jt.save()
return jt return jt
def mk_workflow_job(status='new', workflow_job_template=None, extra_vars={},
persisted=True):
job = WorkflowJob(status=status, extra_vars=json.dumps(extra_vars))
job.workflow_job_template = workflow_job_template
if persisted:
job.save()
return job
def mk_workflow_job_template(name, extra_vars='', spec=None, persisted=True):
if extra_vars:
extra_vars = json.dumps(extra_vars)
wfjt = WorkflowJobTemplate(name=name, extra_vars=extra_vars)
wfjt.survey_spec = spec
if wfjt.survey_spec is not None:
wfjt.survey_enabled = True
if persisted:
wfjt.save()
return wfjt
def mk_workflow_job_template_node(workflow_job_template=None,
unified_job_template=None,
success_nodes=None,
failure_nodes=None,
always_nodes=None,
persisted=True):
workflow_node = WorkflowJobTemplateNode(workflow_job_template=workflow_job_template,
unified_job_template=unified_job_template,
success_nodes=success_nodes,
failure_nodes=failure_nodes,
always_nodes=always_nodes)
if persisted:
workflow_node.save()
return workflow_node
def mk_workflow_job_node(unified_job_template=None,
success_nodes=None,
failure_nodes=None,
always_nodes=None,
workflow_job=None,
job=None,
persisted=True):
workflow_node = WorkflowJobNode(unified_job_template=unified_job_template,
success_nodes=success_nodes,
failure_nodes=failure_nodes,
always_nodes=always_nodes,
workflow_job=workflow_job,
job=job)
if persisted:
workflow_node.save()
return workflow_node

View File

@@ -9,6 +9,7 @@ from awx.main.models import (
Inventory, Inventory,
Job, Job,
Label, Label,
WorkflowJobTemplateNode,
) )
from .objects import ( from .objects import (
@@ -28,6 +29,7 @@ from .fixtures import (
mk_project, mk_project,
mk_label, mk_label,
mk_notification_template, mk_notification_template,
mk_workflow_job_template,
) )
@@ -343,3 +345,66 @@ def create_notification_template(name, roles=None, persisted=True, **kwargs):
users=_Mapped(users), users=_Mapped(users),
superusers=_Mapped(superusers), superusers=_Mapped(superusers),
teams=teams) teams=teams)
def generate_workflow_job_template_nodes(workflow_job_template,
persisted,
**kwargs):
workflow_job_template_nodes = kwargs.get('workflow_job_template_nodes', [])
if len(workflow_job_template_nodes) > 0 and not persisted:
raise RuntimeError('workflow job template nodes can not be used when persisted=False')
new_nodes = []
for i, node in enumerate(workflow_job_template_nodes):
new_node = WorkflowJobTemplateNode(workflow_job_template=workflow_job_template,
unified_job_template=node['unified_job_template'],
id=i)
new_nodes.append(new_node)
node_types = ['success_nodes', 'failure_nodes', 'always_nodes']
for node_type in node_types:
for i, new_node in enumerate(new_nodes):
for related_index in workflow_job_template_nodes[i][node_type]:
getattr(new_node, node_type).add(new_nodes[related_index])
# TODO: Implement survey and jobs
def create_workflow_job_template(name, persisted=True, **kwargs):
Objects = generate_objects(["workflow_job_template",
"workflow_job_template_nodes",
"survey",], kwargs)
spec = None
#jobs = None
extra_vars = kwargs.get('extra_vars', '')
if 'survey' in kwargs:
spec = create_survey_spec(kwargs['survey'])
wfjt = mk_workflow_job_template(name,
spec=spec,
extra_vars=extra_vars,
persisted=persisted)
workflow_jt_nodes = generate_workflow_job_template_nodes(wfjt,
persisted,
workflow_job_template_nodes=kwargs.get('workflow_job_template_nodes', []))
'''
if 'jobs' in kwargs:
for i in kwargs['jobs']:
if type(i) is Job:
jobs[i.pk] = i
else:
# TODO: Create the job
raise RuntimeError("Currently, only already created jobs are supported")
'''
return Objects(workflow_job_template=wfjt,
#jobs=jobs,
workflow_job_template_nodes=workflow_jt_nodes,
survey=spec,)

View File

@@ -0,0 +1,40 @@
# AWX
from awx.main.models import (
WorkflowJobTemplateNode,
WorkflowJobTemplate,
)
from awx.main.models.jobs import JobTemplate
def do_init_workflow(job_template_success, job_template_fail, job_template_never):
wfjt, created = WorkflowJobTemplate.objects.get_or_create(name="linear workflow")
wfjt.delete()
wfjt, created = WorkflowJobTemplate.objects.get_or_create(name="linear workflow")
print(wfjt.id)
WorkflowJobTemplateNode.objects.all().delete()
if created:
nodes_success = []
nodes_fail = []
nodes_never = []
for i in range(0, 2):
nodes_success.append(WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_success))
nodes_fail.append(WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_fail))
nodes_never.append(WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never))
nodes_never.append(WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never))
nodes_fail[1].delete()
nodes_success[0].success_nodes.add(nodes_fail[0])
nodes_success[0].failure_nodes.add(nodes_never[0])
nodes_fail[0].failure_nodes.add(nodes_success[1])
nodes_fail[0].success_nodes.add(nodes_never[1])
nodes_success[1].failure_nodes.add(nodes_never[2])
def do_init():
jt_success = JobTemplate.objects.get(id=5)
jt_fail= JobTemplate.objects.get(id=6)
jt_never= JobTemplate.objects.get(id=7)
do_init_workflow(jt_success, jt_fail, jt_never)
if __name__ == "__main__":
do_init()

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 8.2 KiB

View File

@@ -0,0 +1,45 @@
# AWX
from awx.main.models import (
WorkflowJobTemplateNode,
WorkflowJobTemplate,
)
from awx.main.models.jobs import JobTemplate
def do_init_workflow(job_template_success, job_template_fail, job_template_never, jts_parallel):
wfjt, created = WorkflowJobTemplate.objects.get_or_create(name="parallel workflow")
wfjt.delete()
wfjt, created = WorkflowJobTemplate.objects.get_or_create(name="parallel workflow")
print(wfjt.id)
WorkflowJobTemplateNode.objects.all().delete()
if created:
node_success = WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_success)
nodes_never = []
for x in range(0, 3):
nodes_never.append(WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never))
nodes_parallel = []
for jt in jts_parallel:
nodes_parallel.append(WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=jt))
node_success.success_nodes.add(nodes_parallel[0])
node_success.success_nodes.add(nodes_parallel[1])
node_success.success_nodes.add(nodes_parallel[2])
# Add a failure node for each paralell node
for i, n in enumerate(nodes_parallel):
n.failure_nodes.add(nodes_never[i])
def do_init():
jt_success = JobTemplate.objects.get(id=5)
jt_fail= JobTemplate.objects.get(id=6)
jt_never= JobTemplate.objects.get(id=7)
jt_parallel = []
jt_parallel.append(JobTemplate.objects.get(id=16))
jt_parallel.append(JobTemplate.objects.get(id=17))
jt_parallel.append(JobTemplate.objects.get(id=18))
do_init_workflow(jt_success, jt_fail, jt_never, jt_parallel)
if __name__ == "__main__":
do_init()

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 8.6 KiB

View File

@@ -0,0 +1,46 @@
import pytest
@pytest.fixture
def get_related_assert():
def fn(model_obj, related, resource_name, related_resource_name):
assert related_resource_name in related
assert related[related_resource_name] == '/api/v1/%s/%d/%s/' % (resource_name, model_obj.pk, related_resource_name)
return fn
@pytest.fixture
def get_related_mock_and_run():
def fn(serializer_class, model_obj):
serializer = serializer_class()
related = serializer.get_related(model_obj)
return related
return fn
@pytest.fixture
def test_get_related(get_related_assert, get_related_mock_and_run):
def fn(serializer_class, model_obj, resource_name, related_resource_name):
related = get_related_mock_and_run(serializer_class, model_obj)
get_related_assert(model_obj, related, resource_name, related_resource_name)
return related
return fn
@pytest.fixture
def get_summary_fields_assert():
def fn(summary, summary_field_name):
assert summary_field_name in summary
return fn
@pytest.fixture
def get_summary_fields_mock_and_run():
def fn(serializer_class, model_obj):
serializer = serializer_class()
return serializer.get_summary_fields(model_obj)
return fn
@pytest.fixture
def test_get_summary_fields(get_summary_fields_mock_and_run, get_summary_fields_assert):
def fn(serializer_class, model_obj, summary_field_name):
summary = get_summary_fields_mock_and_run(serializer_class, model_obj)
get_summary_fields_assert(summary, summary_field_name)
return summary
return fn

View File

@@ -0,0 +1,47 @@
# Python
import pytest
import mock
from mock import PropertyMock
# AWX
from awx.api.serializers import (
CustomInventoryScriptSerializer,
)
from awx.main.models import (
CustomInventoryScript,
User,
)
#DRF
from rest_framework.request import Request
from rest_framework.test import (
APIRequestFactory,
force_authenticate,
)
class TestCustomInventoryScriptSerializer(object):
@pytest.mark.parametrize("superuser,sysaudit,admin_role,value",
((True, False, False, '#!/python'),
(False, True, False, '#!/python'),
(False, False, True, '#!/python'),
(False, False, False, None)))
def test_to_representation_orphan(self, superuser, sysaudit, admin_role, value):
with mock.patch.object(CustomInventoryScriptSerializer, 'get_summary_fields', return_value={}):
User.add_to_class('is_system_auditor', sysaudit)
user = User(username="root", is_superuser=superuser)
roles = [user] if admin_role else []
with mock.patch('awx.main.models.CustomInventoryScript.admin_role', new_callable=PropertyMock, return_value=roles):
cis = CustomInventoryScript(pk=1, script='#!/python')
serializer = CustomInventoryScriptSerializer()
factory = APIRequestFactory()
wsgi_request = factory.post("/inventory_script/1", {'id':1}, format="json")
force_authenticate(wsgi_request, user)
request = Request(wsgi_request)
serializer.context['request'] = request
representation = serializer.to_representation(cis)
assert representation['script'] == value

View File

@@ -0,0 +1,91 @@
# Python
import pytest
import mock
import json
# AWX
from awx.api.serializers import (
JobSerializer,
JobOptionsSerializer,
)
from awx.main.models import (
Label,
Job,
)
def mock_JT_resource_data():
return ({}, [])
@pytest.fixture
def job_template(mocker):
mock_jt = mocker.MagicMock(pk=5)
mock_jt.resource_validation_data = mock_JT_resource_data
return mock_jt
@pytest.fixture
def job(mocker, job_template):
return mocker.MagicMock(pk=5, job_template=job_template)
@pytest.fixture
def labels(mocker):
return [Label(id=x, name='label-%d' % x) for x in xrange(0, 25)]
@pytest.fixture
def jobs(mocker):
return [Job(id=x, name='job-%d' % x) for x in xrange(0, 25)]
@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {})
@mock.patch('awx.api.serializers.JobOptionsSerializer.get_related', lambda x,y: {})
class TestJobSerializerGetRelated():
@pytest.mark.parametrize("related_resource_name", [
'job_events',
'job_plays',
'job_tasks',
'relaunch',
'labels',
])
def test_get_related(self, test_get_related, job, related_resource_name):
test_get_related(JobSerializer, job, 'jobs', related_resource_name)
def test_job_template_absent(self, job):
job.job_template = None
serializer = JobSerializer()
related = serializer.get_related(job)
assert 'job_template' not in related
def test_job_template_present(self, get_related_mock_and_run, job):
related = get_related_mock_and_run(JobSerializer, job)
assert 'job_template' in related
assert related['job_template'] == '/api/v1/%s/%d/' % ('job_templates', job.job_template.pk)
@mock.patch('awx.api.serializers.BaseSerializer.to_representation', lambda self,obj: {
'extra_vars': obj.extra_vars})
class TestJobSerializerSubstitution():
def test_survey_password_hide(self, mocker):
job = mocker.MagicMock(**{
'display_extra_vars.return_value': '{\"secret_key\": \"$encrypted$\"}',
'extra_vars.return_value': '{\"secret_key\": \"my_password\"}'})
serializer = JobSerializer(job)
rep = serializer.to_representation(job)
extra_vars = json.loads(rep['extra_vars'])
assert extra_vars['secret_key'] == '$encrypted$'
job.display_extra_vars.assert_called_once_with()
assert 'my_password' not in extra_vars
@mock.patch('awx.api.serializers.BaseSerializer.get_summary_fields', lambda x,y: {})
class TestJobOptionsSerializerGetSummaryFields():
def test__summary_field_labels_10_max(self, mocker, job_template, labels):
job_template.labels.all = mocker.MagicMock(**{'order_by.return_value': labels})
job_template.labels.all.return_value = job_template.labels.all
serializer = JobOptionsSerializer()
summary_labels = serializer._summary_field_labels(job_template)
job_template.labels.all.order_by.assert_called_with('name')
assert len(summary_labels['results']) == 10
assert summary_labels['results'] == [{'id': x.id, 'name': x.name} for x in labels[:10]]
def test_labels_exists(self, test_get_summary_fields, job_template):
test_get_summary_fields(JobOptionsSerializer, job_template, 'labels')

View File

@@ -0,0 +1,108 @@
# Python
import pytest
import mock
# AWX
from awx.api.serializers import (
JobTemplateSerializer,
)
from awx.main.models import (
Job,
)
#DRF
from rest_framework import serializers
def mock_JT_resource_data():
return ({}, [])
@pytest.fixture
def job_template(mocker):
mock_jt = mocker.MagicMock(pk=5)
mock_jt.resource_validation_data = mock_JT_resource_data
return mock_jt
@pytest.fixture
def job(mocker, job_template):
return mocker.MagicMock(pk=5, job_template=job_template)
@pytest.fixture
def jobs(mocker):
return [Job(id=x, name='job-%d' % x) for x in xrange(0, 25)]
@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {})
@mock.patch('awx.api.serializers.JobOptionsSerializer.get_related', lambda x,y: {})
class TestJobTemplateSerializerGetRelated():
@pytest.mark.parametrize("related_resource_name", [
'jobs',
'schedules',
'activity_stream',
'launch',
'notification_templates_any',
'notification_templates_success',
'notification_templates_error',
'survey_spec',
'labels',
'callback',
])
def test_get_related(self, test_get_related, job_template, related_resource_name):
test_get_related(JobTemplateSerializer, job_template, 'job_templates', related_resource_name)
def test_callback_absent(self, get_related_mock_and_run, job_template):
job_template.host_config_key = None
related = get_related_mock_and_run(JobTemplateSerializer, job_template)
assert 'callback' not in related
class TestJobTemplateSerializerGetSummaryFields():
def test__recent_jobs(self, mocker, job_template, jobs):
job_template.jobs.all = mocker.MagicMock(**{'order_by.return_value': jobs})
job_template.jobs.all.return_value = job_template.jobs.all
serializer = JobTemplateSerializer()
recent_jobs = serializer._recent_jobs(job_template)
job_template.jobs.all.assert_called_once_with()
job_template.jobs.all.order_by.assert_called_once_with('-created')
assert len(recent_jobs) == 10
for x in jobs[:10]:
assert recent_jobs == [{'id': x.id, 'status': x.status, 'finished': x.finished} for x in jobs[:10]]
def test_survey_spec_exists(self, test_get_summary_fields, mocker, job_template):
job_template.survey_spec = {'name': 'blah', 'description': 'blah blah'}
test_get_summary_fields(JobTemplateSerializer, job_template, 'survey')
def test_survey_spec_absent(self, get_summary_fields_mock_and_run, job_template):
job_template.survey_spec = None
summary = get_summary_fields_mock_and_run(JobTemplateSerializer, job_template)
assert 'survey' not in summary
@pytest.mark.skip(reason="RBAC needs to land")
def test_can_copy_true(self, mocker, job_template):
pass
@pytest.mark.skip(reason="RBAC needs to land")
def test_can_copy_false(self, mocker, job_template):
pass
@pytest.mark.skip(reason="RBAC needs to land")
def test_can_edit_true(self, mocker, job_template):
pass
@pytest.mark.skip(reason="RBAC needs to land")
def test_can_edit_false(self, mocker, job_template):
pass
class TestJobTemplateSerializerValidation(object):
good_extra_vars = ["{\"test\": \"keys\"}", "---\ntest: key"]
bad_extra_vars = ["{\"test\": \"keys\"", "---\ntest: [2"]
def test_validate_extra_vars(self):
serializer = JobTemplateSerializer()
for ev in self.good_extra_vars:
serializer.validate_extra_vars(ev)
for ev in self.bad_extra_vars:
with pytest.raises(serializers.ValidationError):
serializer.validate_extra_vars(ev)

View File

@@ -0,0 +1,154 @@
# Python
import pytest
import mock
# AWX
from awx.api.serializers import (
WorkflowJobTemplateSerializer,
WorkflowNodeBaseSerializer,
WorkflowJobTemplateNodeSerializer,
WorkflowJobNodeSerializer,
)
from awx.main.models import (
Job,
WorkflowJobTemplateNode,
WorkflowJob,
WorkflowJobNode,
)
@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {})
class TestWorkflowJobTemplateSerializerGetRelated():
@pytest.fixture
def workflow_job_template(self, workflow_job_template_factory):
wfjt = workflow_job_template_factory('hello world', persisted=False).workflow_job_template
wfjt.pk = 3
return wfjt
@pytest.mark.parametrize("related_resource_name", [
'jobs',
'launch',
'workflow_nodes',
])
def test_get_related(self, mocker, test_get_related, workflow_job_template, related_resource_name):
test_get_related(WorkflowJobTemplateSerializer,
workflow_job_template,
'workflow_job_templates',
related_resource_name)
@mock.patch('awx.api.serializers.BaseSerializer.get_related', lambda x,y: {})
class TestWorkflowNodeBaseSerializerGetRelated():
@pytest.fixture
def job_template(self, job_template_factory):
jt = job_template_factory(name="blah", persisted=False).job_template
jt.pk = 1
return jt
@pytest.fixture
def workflow_job_template_node_related(self, job_template):
return WorkflowJobTemplateNode(pk=1, unified_job_template=job_template)
@pytest.fixture
def workflow_job_template_node(self):
return WorkflowJobTemplateNode(pk=1)
def test_workflow_unified_job_template_present(self, get_related_mock_and_run, workflow_job_template_node_related):
related = get_related_mock_and_run(WorkflowNodeBaseSerializer, workflow_job_template_node_related)
assert 'unified_job_template' in related
assert related['unified_job_template'] == '/api/v1/%s/%d/' % ('job_templates', workflow_job_template_node_related.unified_job_template.pk)
def test_workflow_unified_job_template_absent(self, workflow_job_template_node):
related = WorkflowJobTemplateNodeSerializer().get_related(workflow_job_template_node)
assert 'unified_job_template' not in related
@mock.patch('awx.api.serializers.WorkflowNodeBaseSerializer.get_related', lambda x,y: {})
class TestWorkflowJobTemplateNodeSerializerGetRelated():
@pytest.fixture
def workflow_job_template_node(self):
return WorkflowJobTemplateNode(pk=1)
@pytest.fixture
def workflow_job_template(self, workflow_job_template_factory):
wfjt = workflow_job_template_factory("bliggity", persisted=False).workflow_job_template
wfjt.pk = 1
return wfjt
@pytest.fixture
def job_template(self, job_template_factory):
jt = job_template_factory(name="blah", persisted=False).job_template
jt.pk = 1
return jt
@pytest.fixture
def workflow_job_template_node_related(self, workflow_job_template_node, workflow_job_template):
workflow_job_template_node.workflow_job_template = workflow_job_template
return workflow_job_template_node
@pytest.mark.parametrize("related_resource_name", [
'success_nodes',
'failure_nodes',
'always_nodes',
])
def test_get_related(self, test_get_related, workflow_job_template_node, related_resource_name):
test_get_related(WorkflowJobTemplateNodeSerializer,
workflow_job_template_node,
'workflow_job_template_nodes',
related_resource_name)
def test_workflow_job_template_present(self, get_related_mock_and_run, workflow_job_template_node_related):
related = get_related_mock_and_run(WorkflowJobTemplateNodeSerializer, workflow_job_template_node_related)
assert 'workflow_job_template' in related
assert related['workflow_job_template'] == '/api/v1/%s/%d/' % ('workflow_job_templates', workflow_job_template_node_related.workflow_job_template.pk)
def test_workflow_job_template_absent(self, workflow_job_template_node):
related = WorkflowJobTemplateNodeSerializer().get_related(workflow_job_template_node)
assert 'workflow_job_template' not in related
@mock.patch('awx.api.serializers.WorkflowNodeBaseSerializer.get_related', lambda x,y: {})
class TestWorkflowJobNodeSerializerGetRelated():
@pytest.fixture
def workflow_job_node(self):
return WorkflowJobNode(pk=1)
@pytest.fixture
def workflow_job(self):
return WorkflowJob(pk=1)
@pytest.fixture
def job(self):
return Job(name="blah", pk=1)
@pytest.fixture
def workflow_job_node_related(self, workflow_job_node, workflow_job, job):
workflow_job_node.workflow_job = workflow_job
workflow_job_node.job = job
return workflow_job_node
@pytest.mark.parametrize("related_resource_name", [
'success_nodes',
'failure_nodes',
'always_nodes',
])
def test_get_related(self, test_get_related, workflow_job_node, related_resource_name):
test_get_related(WorkflowJobNodeSerializer,
workflow_job_node,
'workflow_job_nodes',
related_resource_name)
def test_workflow_job_present(self, get_related_mock_and_run, workflow_job_node_related):
related = get_related_mock_and_run(WorkflowJobNodeSerializer, workflow_job_node_related)
assert 'workflow_job' in related
assert related['workflow_job'] == '/api/v1/%s/%d/' % ('workflow_jobs', workflow_job_node_related.workflow_job.pk)
def test_workflow_job_absent(self, workflow_job_node):
related = WorkflowJobNodeSerializer().get_related(workflow_job_node)
assert 'workflow_job' not in related
def test_job_present(self, get_related_mock_and_run, workflow_job_node_related):
related = get_related_mock_and_run(WorkflowJobNodeSerializer, workflow_job_node_related)
assert 'job' in related
assert related['job'] == '/api/v1/%s/%d/' % ('jobs', workflow_job_node_related.job.pk)
def test_job_absent(self, workflow_job_node):
related = WorkflowJobNodeSerializer().get_related(workflow_job_node)
assert 'job' not in related

View File

@@ -1,235 +0,0 @@
# Python
import pytest
import mock
from mock import PropertyMock
import json
# AWX
from awx.api.serializers import (
JobTemplateSerializer,
JobSerializer,
JobOptionsSerializer,
CustomInventoryScriptSerializer,
)
from awx.main.models import (
Label,
Job,
CustomInventoryScript,
User,
)
#DRF
from rest_framework.request import Request
from rest_framework import serializers
from rest_framework.test import (
APIRequestFactory,
force_authenticate,
)
def mock_JT_resource_data():
return ({}, [])
@pytest.fixture
def job_template(mocker):
mock_jt = mocker.MagicMock(pk=5)
mock_jt.resource_validation_data = mock_JT_resource_data
return mock_jt
@pytest.fixture
def job(mocker, job_template):
return mocker.MagicMock(pk=5, job_template=job_template)
@pytest.fixture
def labels(mocker):
return [Label(id=x, name='label-%d' % x) for x in xrange(0, 25)]
@pytest.fixture
def jobs(mocker):
return [Job(id=x, name='job-%d' % x) for x in xrange(0, 25)]
class GetRelatedMixin:
def _assert(self, model_obj, related, resource_name, related_resource_name):
assert related_resource_name in related
assert related[related_resource_name] == '/api/v1/%s/%d/%s/' % (resource_name, model_obj.pk, related_resource_name)
def _mock_and_run(self, serializer_class, model_obj):
serializer = serializer_class()
related = serializer.get_related(model_obj)
return related
def _test_get_related(self, serializer_class, model_obj, resource_name, related_resource_name):
related = self._mock_and_run(serializer_class, model_obj)
self._assert(model_obj, related, resource_name, related_resource_name)
return related
class GetSummaryFieldsMixin:
def _assert(self, summary, summary_field_name):
assert summary_field_name in summary
def _mock_and_run(self, serializer_class, model_obj):
serializer = serializer_class()
return serializer.get_summary_fields(model_obj)
def _test_get_summary_fields(self, serializer_class, model_obj, summary_field_name):
summary = self._mock_and_run(serializer_class, model_obj)
self._assert(summary, summary_field_name)
return summary
@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {})
@mock.patch('awx.api.serializers.JobOptionsSerializer.get_related', lambda x,y: {})
class TestJobTemplateSerializerGetRelated(GetRelatedMixin):
@pytest.mark.parametrize("related_resource_name", [
'jobs',
'schedules',
'activity_stream',
'launch',
'notification_templates_any',
'notification_templates_success',
'notification_templates_error',
'survey_spec',
'labels',
'callback',
])
def test_get_related(self, job_template, related_resource_name):
self._test_get_related(JobTemplateSerializer, job_template, 'job_templates', related_resource_name)
def test_callback_absent(self, job_template):
job_template.host_config_key = None
related = self._mock_and_run(JobTemplateSerializer, job_template)
assert 'callback' not in related
class TestJobTemplateSerializerGetSummaryFields(GetSummaryFieldsMixin):
def test__recent_jobs(self, mocker, job_template, jobs):
job_template.jobs.all = mocker.MagicMock(**{'order_by.return_value': jobs})
job_template.jobs.all.return_value = job_template.jobs.all
serializer = JobTemplateSerializer()
recent_jobs = serializer._recent_jobs(job_template)
job_template.jobs.all.assert_called_once_with()
job_template.jobs.all.order_by.assert_called_once_with('-created')
assert len(recent_jobs) == 10
for x in jobs[:10]:
assert recent_jobs == [{'id': x.id, 'status': x.status, 'finished': x.finished} for x in jobs[:10]]
def test_survey_spec_exists(self, mocker, job_template):
job_template.survey_spec = {'name': 'blah', 'description': 'blah blah'}
self._test_get_summary_fields(JobTemplateSerializer, job_template, 'survey')
def test_survey_spec_absent(self, mocker, job_template):
job_template.survey_spec = None
summary = self._mock_and_run(JobTemplateSerializer, job_template)
assert 'survey' not in summary
@pytest.mark.skip(reason="RBAC needs to land")
def test_can_copy_true(self, mocker, job_template):
pass
@pytest.mark.skip(reason="RBAC needs to land")
def test_can_copy_false(self, mocker, job_template):
pass
@pytest.mark.skip(reason="RBAC needs to land")
def test_can_edit_true(self, mocker, job_template):
pass
@pytest.mark.skip(reason="RBAC needs to land")
def test_can_edit_false(self, mocker, job_template):
pass
@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {})
@mock.patch('awx.api.serializers.JobOptionsSerializer.get_related', lambda x,y: {})
class TestJobSerializerGetRelated(GetRelatedMixin):
@pytest.mark.parametrize("related_resource_name", [
'job_events',
'job_plays',
'job_tasks',
'relaunch',
'labels',
])
def test_get_related(self, mocker, job, related_resource_name):
self._test_get_related(JobSerializer, job, 'jobs', related_resource_name)
def test_job_template_absent(self, mocker, job):
job.job_template = None
serializer = JobSerializer()
related = serializer.get_related(job)
assert 'job_template' not in related
def test_job_template_present(self, job):
related = self._mock_and_run(JobSerializer, job)
assert 'job_template' in related
assert related['job_template'] == '/api/v1/%s/%d/' % ('job_templates', job.job_template.pk)
@mock.patch('awx.api.serializers.BaseSerializer.to_representation', lambda self,obj: {
'extra_vars': obj.extra_vars})
class TestJobSerializerSubstitution():
def test_survey_password_hide(self, mocker):
job = mocker.MagicMock(**{
'display_extra_vars.return_value': '{\"secret_key\": \"$encrypted$\"}',
'extra_vars.return_value': '{\"secret_key\": \"my_password\"}'})
serializer = JobSerializer(job)
rep = serializer.to_representation(job)
extra_vars = json.loads(rep['extra_vars'])
assert extra_vars['secret_key'] == '$encrypted$'
job.display_extra_vars.assert_called_once_with()
assert 'my_password' not in extra_vars
@mock.patch('awx.api.serializers.BaseSerializer.get_summary_fields', lambda x,y: {})
class TestJobOptionsSerializerGetSummaryFields(GetSummaryFieldsMixin):
def test__summary_field_labels_10_max(self, mocker, job_template, labels):
job_template.labels.all = mocker.MagicMock(**{'order_by.return_value': labels})
job_template.labels.all.return_value = job_template.labels.all
serializer = JobOptionsSerializer()
summary_labels = serializer._summary_field_labels(job_template)
job_template.labels.all.order_by.assert_called_with('name')
assert len(summary_labels['results']) == 10
assert summary_labels['results'] == [{'id': x.id, 'name': x.name} for x in labels[:10]]
def test_labels_exists(self, mocker, job_template):
self._test_get_summary_fields(JobOptionsSerializer, job_template, 'labels')
class TestJobTemplateSerializerValidation(object):
good_extra_vars = ["{\"test\": \"keys\"}", "---\ntest: key"]
bad_extra_vars = ["{\"test\": \"keys\"", "---\ntest: [2"]
def test_validate_extra_vars(self):
serializer = JobTemplateSerializer()
for ev in self.good_extra_vars:
serializer.validate_extra_vars(ev)
for ev in self.bad_extra_vars:
with pytest.raises(serializers.ValidationError):
serializer.validate_extra_vars(ev)
class TestCustomInventoryScriptSerializer(object):
@pytest.mark.parametrize("superuser,sysaudit,admin_role,value",
((True, False, False, '#!/python'),
(False, True, False, '#!/python'),
(False, False, True, '#!/python'),
(False, False, False, None)))
def test_to_representation_orphan(self, superuser, sysaudit, admin_role, value):
with mock.patch.object(CustomInventoryScriptSerializer, 'get_summary_fields', return_value={}):
User.add_to_class('is_system_auditor', sysaudit)
user = User(username="root", is_superuser=superuser)
roles = [user] if admin_role else []
with mock.patch('awx.main.models.CustomInventoryScript.admin_role', new_callable=PropertyMock, return_value=roles):
cis = CustomInventoryScript(pk=1, script='#!/python')
serializer = CustomInventoryScriptSerializer()
factory = APIRequestFactory()
wsgi_request = factory.post("/inventory_script/1", {'id':1}, format="json")
force_authenticate(wsgi_request, user)
request = Request(wsgi_request)
serializer.context['request'] = request
representation = serializer.to_representation(cis)
assert representation['script'] == value

View File

@@ -43,6 +43,8 @@ class TestApiV1RootView:
'unified_job_templates', 'unified_job_templates',
'unified_jobs', 'unified_jobs',
'activity_stream', 'activity_stream',
'workflow_job_templates',
'workflow_jobs',
] ]
view = ApiV1RootView() view = ApiV1RootView()
ret = view.get(mocker.MagicMock()) ret = view.get(mocker.MagicMock())

View File

@@ -0,0 +1,167 @@
from awx.main.management.commands.run_task_system import (
SimpleDAG,
WorkflowDAG,
)
from awx.main.models import Job
from awx.main.models.workflow import WorkflowJobNode
import pytest
@pytest.fixture
def dag_root():
dag = SimpleDAG()
data = [
{1: 1},
{2: 2},
{3: 3},
{4: 4},
{5: 5},
{6: 6},
]
# Add all the nodes to the DAG
[dag.add_node(d) for d in data]
dag.add_edge(data[0], data[1])
dag.add_edge(data[2], data[3])
dag.add_edge(data[4], data[5])
return dag
@pytest.fixture
def dag_simple_edge_labels():
dag = SimpleDAG()
data = [
{1: 1},
{2: 2},
{3: 3},
{4: 4},
{5: 5},
{6: 6},
]
# Add all the nodes to the DAG
[dag.add_node(d) for d in data]
dag.add_edge(data[0], data[1], 'one')
dag.add_edge(data[2], data[3], 'two')
dag.add_edge(data[4], data[5], 'three')
return dag
'''
class TestSimpleDAG(object):
def test_get_root_nodes(self, dag_root):
leafs = dag_root.get_leaf_nodes()
roots = dag_root.get_root_nodes()
def test_get_labeled_edges(self, dag_simple_edge_labels):
dag = dag_simple_edge_labels
nodes = dag.get_dependencies(dag.nodes[0]['node_object'], 'one')
nodes = dag.get_dependencies(dag.nodes[0]['node_object'], 'two')
'''
@pytest.fixture
def factory_node():
def fn(id, status):
wfn = WorkflowJobNode(id=id)
if status:
j = Job(status=status)
wfn.job = j
return wfn
return fn
@pytest.fixture
def workflow_dag_level_2(factory_node):
dag = WorkflowDAG()
data = [
factory_node(0, 'successful'),
factory_node(1, 'successful'),
factory_node(2, 'successful'),
factory_node(3, None),
factory_node(4, None),
factory_node(5, None),
]
[dag.add_node(d) for d in data]
dag.add_edge(data[0], data[3], 'success_nodes')
dag.add_edge(data[1], data[4], 'success_nodes')
dag.add_edge(data[2], data[5], 'success_nodes')
return (dag, data[3:6], False)
@pytest.fixture
def workflow_dag_multiple_roots(factory_node):
dag = WorkflowDAG()
data = [
factory_node(1, None),
factory_node(2, None),
factory_node(3, None),
factory_node(4, None),
factory_node(5, None),
factory_node(6, None),
]
[dag.add_node(d) for d in data]
dag.add_edge(data[0], data[3], 'success_nodes')
dag.add_edge(data[1], data[4], 'success_nodes')
dag.add_edge(data[2], data[5], 'success_nodes')
expected = data[0:3]
return (dag, expected, False)
@pytest.fixture
def workflow_dag_multiple_edges_labeled(factory_node):
dag = WorkflowDAG()
data = [
factory_node(0, 'failed'),
factory_node(1, None),
factory_node(2, 'failed'),
factory_node(3, None),
factory_node(4, 'failed'),
factory_node(5, None),
]
[dag.add_node(d) for d in data]
dag.add_edge(data[0], data[1], 'success_nodes')
dag.add_edge(data[0], data[2], 'failure_nodes')
dag.add_edge(data[2], data[3], 'success_nodes')
dag.add_edge(data[2], data[4], 'failure_nodes')
dag.add_edge(data[4], data[5], 'failure_nodes')
expected = data[5:6]
return (dag, expected, False)
@pytest.fixture
def workflow_dag_finished(factory_node):
dag = WorkflowDAG()
data = [
factory_node(0, 'failed'),
factory_node(1, None),
factory_node(2, 'failed'),
factory_node(3, None),
factory_node(4, 'failed'),
factory_node(5, 'successful'),
]
[dag.add_node(d) for d in data]
dag.add_edge(data[0], data[1], 'success_nodes')
dag.add_edge(data[0], data[2], 'failure_nodes')
dag.add_edge(data[2], data[3], 'success_nodes')
dag.add_edge(data[2], data[4], 'failure_nodes')
dag.add_edge(data[4], data[5], 'failure_nodes')
expected = []
return (dag, expected, True)
@pytest.fixture(params=['workflow_dag_multiple_roots', 'workflow_dag_level_2', 'workflow_dag_multiple_edges_labeled', 'workflow_dag_finished'])
def workflow_dag(request):
return request.getfuncargvalue(request.param)
class TestWorkflowDAG():
def test_bfs_nodes_to_run(self, workflow_dag):
dag, expected, is_done = workflow_dag
assert dag.bfs_nodes_to_run() == expected
def test_is_workflow_done(self, workflow_dag):
dag, expected, is_done = workflow_dag
assert dag.is_workflow_done() == is_done

View File

@@ -0,0 +1,81 @@
import pytest
from awx.main.models.jobs import JobTemplate
from awx.main.models.workflow import WorkflowJobTemplateNode, WorkflowJobInheritNodesMixin, WorkflowJobNode
class TestWorkflowJobInheritNodesMixin():
class TestCreateWorkflowJobNodes():
@pytest.fixture
def job_templates(self):
return [JobTemplate() for i in range(0, 10)]
@pytest.fixture
def job_template_nodes(self, job_templates):
return [WorkflowJobTemplateNode(unified_job_template=job_templates[i]) for i in range(0, 10)]
def test__create_workflow_job_nodes(self, mocker, job_template_nodes):
workflow_job_node_create = mocker.patch('awx.main.models.WorkflowJobNode.objects.create')
mixin = WorkflowJobInheritNodesMixin()
mixin._create_workflow_job_nodes(job_template_nodes)
for job_template_node in job_template_nodes:
workflow_job_node_create.assert_any_call(workflow_job=mixin,
unified_job_template=job_template_node.unified_job_template)
class TestMapWorkflowJobNodes():
@pytest.fixture
def job_template_nodes(self):
return [WorkflowJobTemplateNode(id=i) for i in range(0, 20)]
@pytest.fixture
def job_nodes(self):
return [WorkflowJobNode(id=i) for i in range(100, 120)]
def test__map_workflow_job_nodes(self, job_template_nodes, job_nodes):
mixin = WorkflowJobInheritNodesMixin()
node_ids_map = mixin._map_workflow_job_nodes(job_template_nodes, job_nodes)
assert len(node_ids_map) == len(job_template_nodes)
for i, job_template_node in enumerate(job_template_nodes):
assert node_ids_map[job_template_node.id] == job_nodes[i].id
class TestInheritRelationship():
@pytest.fixture
def job_template_nodes(self, mocker):
nodes = [mocker.MagicMock(id=i) for i in range(0, 10)]
for i in range(0, 9):
nodes[i].success_nodes = [mocker.MagicMock(id=i + 1)]
return nodes
@pytest.fixture
def job_nodes(self, mocker):
nodes = [mocker.MagicMock(id=i) for i in range(100, 110)]
return nodes
@pytest.fixture
def job_nodes_dict(self, job_nodes):
_map = {}
for n in job_nodes:
_map[n.id] = n
return _map
def test__inherit_relationship(self, mocker, job_template_nodes, job_nodes, job_nodes_dict):
mixin = WorkflowJobInheritNodesMixin()
mixin._get_workflow_job_node_by_id = lambda x: job_nodes_dict[x]
mixin._get_all_by_type = lambda x,node_type: x.success_nodes
node_ids_map = mixin._map_workflow_job_nodes(job_template_nodes, job_nodes)
for i, job_template_node in enumerate(job_template_nodes):
mixin._inherit_relationship(job_template_node, job_nodes[i], node_ids_map, 'success_nodes')
for i in range(0, 9):
job_nodes[i].success_nodes.add.assert_any_call(job_nodes[i + 1])

View File

@@ -15,6 +15,7 @@ services:
# - sync # - sync
volumes: volumes:
- "../:/tower_devel" - "../:/tower_devel"
privileged: true
# Postgres Database Container # Postgres Database Container
postgres: postgres:

View File

@@ -1,2 +1,2 @@
#!/bin/bash #!/bin/bash
ansible-playbook -i "127.0.0.1," tools/git_hooks/pre_commit.yml #ansible-playbook -i "127.0.0.1," tools/git_hooks/pre_commit.yml