From 4c876b40e46107013ad56096024e905fc210e150 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Tue, 16 Aug 2016 17:45:18 -0400 Subject: [PATCH 01/12] initial models and endpoints added for workflows --- awx/api/serializers.py | 91 ++++++++++ awx/api/urls.py | 28 +++ awx/api/views.py | 168 ++++++++++++++++-- awx/main/access.py | 139 +++++++++++++++ .../management/commands/run_task_system.py | 3 + .../migrations/0033_v301_workflow_create.py | 70 ++++++++ awx/main/models/__init__.py | 1 + awx/main/models/activity_stream.py | 3 + awx/main/models/workflow.py | 160 +++++++++++++++++ awx/main/tasks.py | 51 +++++- awx/main/tests/conftest.py | 5 + awx/main/tests/factories/__init__.py | 2 + awx/main/tests/factories/fixtures.py | 26 +++ awx/main/tests/factories/tower.py | 34 ++++ awx/main/tests/unit/api/test_views.py | 2 + tools/git_hooks/pre-commit | 2 +- 16 files changed, 766 insertions(+), 19 deletions(-) create mode 100644 awx/main/migrations/0033_v301_workflow_create.py create mode 100644 awx/main/models/workflow.py diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 2d2e38a8f5..2195f544cc 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -528,6 +528,8 @@ class UnifiedJobTemplateSerializer(BaseSerializer): serializer_class = JobTemplateSerializer elif isinstance(obj, SystemJobTemplate): serializer_class = SystemJobTemplateSerializer + elif isinstance(obj, WorkflowJobTemplateSerializer): + serializer_class = WorkflowJobTemplateSerializer if serializer_class: serializer = serializer_class(instance=obj, context=self.context) return serializer.to_representation(obj) @@ -2168,6 +2170,95 @@ class SystemJobCancelSerializer(SystemJobSerializer): class Meta: fields = ('can_cancel',) + + + +# TODO: +class WorkflowJobSerializer(UnifiedJobSerializer): + + class Meta: + model = WorkflowJob + fields = ('*', 'workflow_job_template', 'extra_vars') + + def get_related(self, obj): + res = super(WorkflowJobSerializer, self).get_related(obj) + if obj.system_job_template: + res['workflow_job_template'] = reverse('api:workflow_job_template_detail', + args=(obj.workflow_job_template.pk,)) + # TODO: + #res['notifications'] = reverse('api:system_job_notifications_list', args=(obj.pk,)) + if obj.can_cancel or True: + res['cancel'] = reverse('api:workflow_job_cancel', args=(obj.pk,)) + return res + + +# TODO: +class WorkflowJobListSerializer(WorkflowJobSerializer, UnifiedJobListSerializer): + pass + +# TODO: +class WorkflowJobTemplateListSerializer(UnifiedJobTemplateSerializer): + + class Meta: + model = WorkflowJobTemplate + fields = ('*',) + + def get_related(self, obj): + res = super(WorkflowJobTemplateListSerializer, self).get_related(obj) + res.update(dict( + jobs = reverse('api:workflow_job_template_jobs_list', args=(obj.pk,)), + #schedules = reverse('api:workflow_job_template_schedules_list', args=(obj.pk,)), + launch = reverse('api:workflow_job_template_launch', args=(obj.pk,)), + workflow_nodes = reverse('api:workflow_job_template_workflow_nodes_list', args=(obj.pk,)), + # TODO: Implement notifications + #notification_templates_any = reverse('api:system_job_template_notification_templates_any_list', args=(obj.pk,)), + #notification_templates_success = reverse('api:system_job_template_notification_templates_success_list', args=(obj.pk,)), + #notification_templates_error = reverse('api:system_job_template_notification_templates_error_list', args=(obj.pk,)), + + )) + return res + +class WorkflowJobTemplateSerializer(WorkflowJobTemplateListSerializer): + pass + +class WorkflowNodeSerializer(BaseSerializer): + #workflow_job_template = UnifiedJobTemplateSerializer() + + class Meta: + model = WorkflowNode + fields = ('id', 'url', 'related', 'workflow_job_template', 'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes',) + + def get_related(self, obj): + res = super(WorkflowNodeSerializer, self).get_related(obj) + res['workflow_job_template'] = reverse('api:workflow_job_template_detail', args=(obj.workflow_job_template.pk,)) + if obj.unified_job_template: + res['unified_job_template'] = obj.unified_job_template.get_absolute_url() + res['success_nodes'] = reverse('api:workflow_node_success_nodes_list', args=(obj.pk,)) + res['failure_nodes'] = reverse('api:workflow_node_failure_nodes_list', args=(obj.pk,)) + res['always_nodes'] = reverse('api:workflow_node_always_nodes_list', args=(obj.pk,)) + + return res + +class WorkflowNodeDetailSerializer(WorkflowNodeSerializer): + + ''' + Influence the api browser sample data to not include workflow_job_template + when editing a WorkflowNode. + + Note: I was not able to accomplish this trough the use of extra_kwargs. + Maybe something to do with workflow_job_template being a relational field? + ''' + def build_relational_field(self, field_name, relation_info): + field_class, field_kwargs = super(WorkflowNodeDetailSerializer, self).build_relational_field(field_name, relation_info) + if self.instance and field_name == 'workflow_job_template': + field_kwargs['read_only'] = True + field_kwargs.pop('queryset', None) + return field_class, field_kwargs + + +class WorkflowNodeListSerializer(WorkflowNodeSerializer): + pass + class JobListSerializer(JobSerializer, UnifiedJobListSerializer): pass diff --git a/awx/api/urls.py b/awx/api/urls.py index 97acc317bd..ec08ec7706 100644 --- a/awx/api/urls.py +++ b/awx/api/urls.py @@ -255,6 +255,23 @@ system_job_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/notifications/$', 'system_job_notifications_list'), ) +workflow_job_template_urls = patterns('awx.api.views', + url(r'^$', 'workflow_job_template_list'), + url(r'^(?P[0-9]+)/$', 'workflow_job_template_detail'), + url(r'^(?P[0-9]+)/jobs/$', 'workflow_job_template_jobs_list'), + url(r'^(?P[0-9]+)/launch/$', 'workflow_job_template_launch'), + url(r'^(?P[0-9]+)/workflow_nodes/$', 'workflow_job_template_workflow_nodes_list'), +# url(r'^(?P[0-9]+)/cancel/$', 'workflow_job_template_cancel'), + #url(r'^(?P[0-9]+)/nodes/$', 'workflow_job_template_node_list'), +) +workflow_job_urls = patterns('awx.api.views', + url(r'^$', 'workflow_job_list'), + url(r'^(?P[0-9]+)/$', 'workflow_job_detail'), +# url(r'^(?P[0-9]+)/cancel/$', 'workflow_job_cancel'), + #url(r'^(?P[0-9]+)/notifications/$', 'workflow_job_notifications_list'), +) + + notification_template_urls = patterns('awx.api.views', url(r'^$', 'notification_template_list'), url(r'^(?P[0-9]+)/$', 'notification_template_detail'), @@ -272,6 +289,14 @@ label_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/$', 'label_detail'), ) +workflow_node_urls = patterns('awx.api.views', + url(r'^$', 'workflow_node_list'), + url(r'^(?P[0-9]+)/$', 'workflow_node_detail'), + url(r'^(?P[0-9]+)/success_nodes/$', 'workflow_node_success_nodes_list'), + url(r'^(?P[0-9]+)/failure_nodes/$', 'workflow_node_failure_nodes_list'), + url(r'^(?P[0-9]+)/always_nodes/$', 'workflow_node_always_nodes_list'), +) + schedule_urls = patterns('awx.api.views', url(r'^$', 'schedule_list'), url(r'^(?P[0-9]+)/$', 'schedule_detail'), @@ -321,7 +346,10 @@ v1_urls = patterns('awx.api.views', url(r'^system_jobs/', include(system_job_urls)), url(r'^notification_templates/', include(notification_template_urls)), url(r'^notifications/', include(notification_urls)), + url(r'^workflow_job_templates/',include(workflow_job_template_urls)), + url(r'^workflow_jobs/' ,include(workflow_job_urls)), url(r'^labels/', include(label_urls)), + url(r'^workflow_nodes/', include(workflow_node_urls)), url(r'^unified_job_templates/$','unified_job_template_list'), url(r'^unified_jobs/$', 'unified_job_list'), url(r'^activity_stream/', include(activity_stream_urls)), diff --git a/awx/api/views.py b/awx/api/views.py index 9686387f0c..2fc19e168a 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -11,6 +11,7 @@ import socket import sys import errno import logging +import copy from base64 import b64encode from collections import OrderedDict @@ -145,6 +146,8 @@ class ApiV1RootView(APIView): data['unified_job_templates'] = reverse('api:unified_job_template_list') data['unified_jobs'] = reverse('api:unified_job_list') data['activity_stream'] = reverse('api:activity_stream_list') + data['workflow_job_templates'] = reverse('api:workflow_job_template_list') + data['workflow_jobs'] = reverse('api:workflow_job_list') return Response(data) @@ -1747,16 +1750,24 @@ class GroupList(ListCreateAPIView): model = Group serializer_class = GroupSerializer -class GroupChildrenList(SubListCreateAttachDetachAPIView): +''' +Useful when you have a self-refering ManyToManyRelationship. +* Tower uses a shallow (2-deep only) url pattern. For example: - model = Group - serializer_class = GroupSerializer - parent_model = Group - relationship = 'children' +When an object hangs off of a parent object you would have the url of the +form /api/v1/parent_model/34/child_model. If you then wanted a child of the +child model you would NOT do /api/v1/parent_model/34/child_model/87/child_child_model +Instead, you would access the child_child_model via /api/v1/child_child_model/87/ +and you would create child_child_model's off of /api/v1/child_model/87/child_child_model_set +Now, when creating child_child_model related to child_model you still want to +link child_child_model to parent_model. That's what this class is for +''' +class EnforceParentRelationshipMixin(object): + enforce_parent_relationship = '' def update_raw_data(self, data): - data.pop('inventory', None) - return super(GroupChildrenList, self).update_raw_data(data) + data.pop(self.enforce_parent_relationship, None) + return super(EnforceParentRelationshipMixin, self).update_raw_data(data) def create(self, request, *args, **kwargs): # Inject parent group inventory ID into new group data. @@ -1764,16 +1775,16 @@ class GroupChildrenList(SubListCreateAttachDetachAPIView): # HACK: Make request data mutable. if getattr(data, '_mutable', None) is False: data._mutable = True - data['inventory'] = self.get_parent_object().inventory_id - return super(GroupChildrenList, self).create(request, *args, **kwargs) + data[self.enforce_parent_relationship] = getattr(self.get_parent_object(), '%s_id' % relationship) + return super(EnforceParentRelationshipMixin, self).create(request, *args, **kwargs) - def unattach(self, request, *args, **kwargs): - sub_id = request.data.get('id', None) - if sub_id is not None: - return super(GroupChildrenList, self).unattach(request, *args, **kwargs) - parent = self.get_parent_object() - parent.delete() - return Response(status=status.HTTP_204_NO_CONTENT) +class GroupChildrenList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView): + + model = Group + serializer_class = GroupSerializer + parent_model = Group + relationship = 'children' + enforce_parent_relationship = 'inventory' class GroupPotentialChildrenList(SubListAPIView): @@ -2604,6 +2615,131 @@ class JobTemplateObjectRolesList(SubListAPIView): content_type = ContentType.objects.get_for_model(self.parent_model) return Role.objects.filter(content_type=content_type, object_id=po.pk) +# TODO: +class WorkflowNodeList(ListCreateAPIView): + + model = WorkflowNode + serializer_class = WorkflowNodeSerializer + new_in_310 = True + +# TODO: +class WorkflowNodeDetail(RetrieveUpdateDestroyAPIView): + + model = WorkflowNode + serializer_class = WorkflowNodeDetailSerializer + parent_model = WorkflowJobTemplate + relationship = 'workflow_job_template' + new_in_310 = True + +class WorkflowNodeChildrenBaseList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView): + + model = WorkflowNode + serializer_class = WorkflowNodeListSerializer + always_allow_superuser = True # TODO: RBAC + parent_model = WorkflowNode + relationship = '' + enforce_parent_relationship = 'workflow_job_template' + new_in_310 = True + + ''' + Limit the set of WorkflowNodes to the related nodes of specified by + 'relationship' + ''' + def get_queryset(self): + parent = self.get_parent_object() + self.check_parent_access(parent) + return getattr(parent, self.relationship).all() + +class WorkflowNodeSuccessNodesList(WorkflowNodeChildrenBaseList): + + relationship = 'success_nodes' + +class WorkflowNodeFailureNodesList(WorkflowNodeChildrenBaseList): + + relationship = 'failure_nodes' + +class WorkflowNodeAlwaysNodesList(WorkflowNodeChildrenBaseList): + + relationship = 'always_nodes' + +# TODO: +class WorkflowJobTemplateList(ListCreateAPIView): + + model = WorkflowJobTemplate + serializer_class = WorkflowJobTemplateListSerializer + always_allow_superuser = False + + # TODO: RBAC + ''' + def post(self, request, *args, **kwargs): + ret = super(WorkflowJobTemplateList, self).post(request, *args, **kwargs) + if ret.status_code == 201: + workflow_job_template = WorkflowJobTemplate.objects.get(id=ret.data['id']) + workflow_job_template.admin_role.members.add(request.user) + return ret + ''' + +# TODO: +class WorkflowJobTemplateDetail(RetrieveUpdateDestroyAPIView): + + model = WorkflowJobTemplate + serializer_class = WorkflowJobTemplateSerializer + always_allow_superuser = False + +# TODO: +class WorkflowJobTemplateLaunch(GenericAPIView): + + model = WorkflowJobTemplate + serializer_class = EmptySerializer + + def get(self, request, *args, **kwargs): + return Response({}) + + def post(self, request, *args, **kwargs): + obj = self.get_object() + if not request.user.can_access(self.model, 'start', obj): + raise PermissionDenied() + + new_job = obj.create_unified_job(**request.data) + new_job.signal_start(**request.data) + data = dict(system_job=new_job.id) + return Response(data, status=status.HTTP_201_CREATED) + +# TODO: +class WorkflowJobTemplateWorkflowNodesList(SubListCreateAPIView): + + model = WorkflowNode + serializer_class = WorkflowNodeListSerializer + always_allow_superuser = True # TODO: RBAC + parent_model = WorkflowJobTemplate + relationship = 'workflow_nodes' + parent_key = 'workflow_job_template' + +# TODO: +class WorkflowJobTemplateJobsList(SubListAPIView): + + model = WorkflowJob + serializer_class = WorkflowJobListSerializer + parent_model = WorkflowJobTemplate + relationship = 'jobs' + parent_key = 'workflow_job_template' +# TODO: +class WorkflowJobList(ListCreateAPIView): + + model = WorkflowJob + serializer_class = WorkflowJobListSerializer + + def get(self, request, *args, **kwargs): + if not request.user.is_superuser and not request.user.is_system_auditor: + raise PermissionDenied("Superuser privileges needed.") + return super(WorkflowJobList, self).get(request, *args, **kwargs) + +# TODO: +class WorkflowJobDetail(RetrieveDestroyAPIView): + + model = WorkflowJob + serializer_class = WorkflowJobSerializer + class SystemJobTemplateList(ListAPIView): model = SystemJobTemplate diff --git a/awx/main/access.py b/awx/main/access.py index 5fa3b76274..e6597797da 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1132,6 +1132,142 @@ class SystemJobAccess(BaseAccess): ''' model = SystemJob +# TODO: +class WorkflowNodeAccess(BaseAccess): + ''' + I can see/use a WorkflowNode if I have permission to associated Workflow Job Template + ''' + model = WorkflowNode + + def get_queryset(self): + if self.user.is_superuser or self.user.is_system_auditor: + return self.model.objects.all() + + @check_superuser + def can_read(self, obj): + return True + + @check_superuser + def can_add(self, data): + if not data: # So the browseable API will work + return True + + return True + + @check_superuser + def can_change(self, obj, data): + if self.can_add(data) is False: + return False + + return True + + def can_delete(self, obj): + return self.can_change(obj, None) + +# TODO: +class WorkflowJobTemplateAccess(BaseAccess): + ''' + I can only see/manage Workflow Job Templates if I'm a super user + ''' + + model = WorkflowJobTemplate + + def can_start(self, obj): + return self.can_read(obj) + + def get_queryset(self): + if self.user.is_superuser or self.user.is_system_auditor: + qs = self.model.objects.all() + else: + qs = self.model.accessible_objects(self.user, 'read_role') + return qs.select_related('created_by', 'modified_by', 'next_schedule').all() + + @check_superuser + def can_read(self, obj): + return self.user in obj.read_role + + def can_add(self, data): + ''' + a user can create a job template if they are a superuser, an org admin + of any org that the project is a member, or if they have user or team + based permissions tying the project to the inventory source for the + given action as well as the 'create' deploy permission. + Users who are able to create deploy jobs can also run normal and check (dry run) jobs. + ''' + if not data: # So the browseable API will work + return True + + # if reference_obj is provided, determine if it can be coppied + reference_obj = data.pop('reference_obj', None) + + if 'survey_enabled' in data and data['survey_enabled']: + self.check_license(feature='surveys') + + if self.user.is_superuser: + return True + + def get_value(Class, field): + if reference_obj: + return getattr(reference_obj, field, None) + else: + pk = get_pk_from_dict(data, field) + if pk: + return get_object_or_400(Class, pk=pk) + else: + return None + + return False + + def can_start(self, obj, validate_license=True): + # TODO: Are workflows allowed for all licenses ?? + # Check license. + ''' + if validate_license: + self.check_license() + if obj.job_type == PERM_INVENTORY_SCAN: + self.check_license(feature='system_tracking') + if obj.survey_enabled: + self.check_license(feature='surveys') + ''' + + # Super users can start any job + if self.user.is_superuser: + return True + + return self.user in obj.execute_role + + def can_change(self, obj, data): + data_for_change = data + if self.user not in obj.admin_role and not self.user.is_superuser: + return False + if data is not None: + data = dict(data) + + if 'survey_enabled' in data and obj.survey_enabled != data['survey_enabled'] and data['survey_enabled']: + self.check_license(feature='surveys') + return True + + return self.can_read(obj) and self.can_add(data_for_change) + + def can_delete(self, obj): + is_delete_allowed = self.user.is_superuser or self.user in obj.admin_role + if not is_delete_allowed: + return False + active_jobs = [dict(type="job", id=o.id) + for o in obj.jobs.filter(status__in=ACTIVE_STATES)] + if len(active_jobs) > 0: + raise StateConflict({"conflict": "Resource is being used by running jobs", + "active_jobs": active_jobs}) + return True + + + +class WorkflowJobAccess(BaseAccess): + ''' + I can only see Workflow Jobs if I'm a super user + ''' + model = WorkflowJob + class AdHocCommandAccess(BaseAccess): ''' I can only see/run ad hoc commands when: @@ -1724,3 +1860,6 @@ register_access(Role, RoleAccess) register_access(NotificationTemplate, NotificationTemplateAccess) register_access(Notification, NotificationAccess) register_access(Label, LabelAccess) +register_access(WorkflowNode, WorkflowNodeAccess) +register_access(WorkflowJobTemplate, WorkflowJobTemplateAccess) +register_access(WorkflowJob, WorkflowJobAccess) diff --git a/awx/main/management/commands/run_task_system.py b/awx/main/management/commands/run_task_system.py index f91309030c..437f0cdf6e 100644 --- a/awx/main/management/commands/run_task_system.py +++ b/awx/main/management/commands/run_task_system.py @@ -110,6 +110,8 @@ class SimpleDAG(object): return "project_update" elif type(obj) == SystemJob: return "system_job" + elif type(obj) == WorkflowJob: + return "workflow_job" return "unknown" def get_dependencies(self, obj): @@ -149,6 +151,7 @@ def get_tasks(): ProjectUpdate.objects.filter(status__in=RELEVANT_JOBS)] graph_system_jobs = [sj for sj in SystemJob.objects.filter(status__in=RELEVANT_JOBS)] + all_actions = sorted(graph_jobs + graph_ad_hoc_commands + graph_inventory_updates + graph_project_updates + graph_system_jobs, key=lambda task: task.created) diff --git a/awx/main/migrations/0033_v301_workflow_create.py b/awx/main/migrations/0033_v301_workflow_create.py new file mode 100644 index 0000000000..258bdc797d --- /dev/null +++ b/awx/main/migrations/0033_v301_workflow_create.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations, models +import django.db.models.deletion +import awx.main.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0032_v302_credential_permissions_update'), + ] + + operations = [ + migrations.CreateModel( + name='WorkflowJob', + fields=[ + ('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')), + ('extra_vars', models.TextField(default=b'', blank=True)), + ], + options={ + 'ordering': ('id',), + }, + bases=('main.unifiedjob', models.Model), + ), + migrations.CreateModel( + name='WorkflowJobTemplate', + fields=[ + ('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')), + ('extra_vars', models.TextField(default=b'', blank=True)), + ('admin_role', awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'singleton:system_administrator', to='main.Role', null=b'True')), + ], + bases=('main.unifiedjobtemplate', models.Model), + ), + migrations.CreateModel( + name='WorkflowNode', + fields=[ + ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ('created', models.DateTimeField(default=None, editable=False)), + ('modified', models.DateTimeField(default=None, editable=False)), + ('always_nodes', models.ManyToManyField(related_name='parent_always_nodes', to='main.WorkflowNode', blank=True)), + ('failure_nodes', models.ManyToManyField(related_name='parent_failure_nodes', to='main.WorkflowNode', blank=True)), + ('job', models.ForeignKey(related_name='workflow_node', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True)), + ('success_nodes', models.ManyToManyField(related_name='parent_success_nodes', to='main.WorkflowNode', blank=True)), + ('unified_job_template', models.ForeignKey(related_name='unified_jt_workflow_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJobTemplate', null=True)), + ('workflow_job_template', models.ForeignKey(related_name='workflow_nodes', to='main.WorkflowJobTemplate')), + ], + ), + migrations.AddField( + model_name='workflowjob', + name='workflow_job_template', + field=models.ForeignKey(related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.WorkflowJobTemplate', null=True), + ), + migrations.AddField( + model_name='activitystream', + name='workflow_job', + field=models.ManyToManyField(to='main.WorkflowJob', blank=True), + ), + migrations.AddField( + model_name='activitystream', + name='workflow_job_template', + field=models.ManyToManyField(to='main.WorkflowJobTemplate', blank=True), + ), + migrations.AddField( + model_name='activitystream', + name='workflow_node', + field=models.ManyToManyField(to='main.WorkflowNode', blank=True), + ), + ] diff --git a/awx/main/models/__init__.py b/awx/main/models/__init__.py index 1e320e6238..1c019ce01a 100644 --- a/awx/main/models/__init__.py +++ b/awx/main/models/__init__.py @@ -22,6 +22,7 @@ from awx.main.models.mixins import * # noqa from awx.main.models.notifications import * # noqa from awx.main.models.fact import * # noqa from awx.main.models.label import * # noqa +from awx.main.models.workflow import * # noqa # Monkeypatch Django serializer to ignore django-taggit fields (which break # the dumpdata command; see https://github.com/alex/django-taggit/issues/155). diff --git a/awx/main/models/activity_stream.py b/awx/main/models/activity_stream.py index 8ff285cb45..bcc5cef0c7 100644 --- a/awx/main/models/activity_stream.py +++ b/awx/main/models/activity_stream.py @@ -49,6 +49,9 @@ class ActivityStream(models.Model): permission = models.ManyToManyField("Permission", blank=True) job_template = models.ManyToManyField("JobTemplate", blank=True) job = models.ManyToManyField("Job", blank=True) + workflow_node = models.ManyToManyField("WorkflowNode", blank=True) + workflow_job_template = models.ManyToManyField("WorkflowJobTemplate", blank=True) + workflow_job = models.ManyToManyField("WorkflowJob", blank=True) unified_job_template = models.ManyToManyField("UnifiedJobTemplate", blank=True, related_name='activity_stream_as_unified_job_template+') unified_job = models.ManyToManyField("UnifiedJob", blank=True, related_name='activity_stream_as_unified_job+') ad_hoc_command = models.ManyToManyField("AdHocCommand", blank=True) diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py new file mode 100644 index 0000000000..91f710a733 --- /dev/null +++ b/awx/main/models/workflow.py @@ -0,0 +1,160 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +# Django +from django.db import models +from django.core.urlresolvers import reverse +#from django import settings as tower_settings + +# AWX +from awx.main.models import UnifiedJobTemplate, UnifiedJob +from awx.main.models.base import BaseModel, CreatedModifiedModel, VarsDictProperty +from awx.main.models.rbac import ( + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, +) +from awx.main.fields import ImplicitRoleField + +__all__ = ['WorkflowJobTemplate', 'WorkflowJob', 'WorkflowJobOptions', 'WorkflowNode'] + +class WorkflowNode(CreatedModifiedModel): + + class Meta: + app_label = 'main' + + # TODO: RBAC + ''' + admin_role = ImplicitRoleField( + parent_role='workflow_job_template.admin_role', + ) + ''' + + workflow_job_template = models.ForeignKey( + 'WorkflowJobTemplate', + related_name='workflow_nodes', + on_delete=models.CASCADE, + ) + unified_job_template = models.ForeignKey( + 'UnifiedJobTemplate', + related_name='unified_jt_workflow_nodes', + blank=True, + null=True, + default=None, + on_delete=models.SET_NULL, + ) + success_nodes = models.ManyToManyField( + 'self', + related_name='parent_success_nodes', + blank=True, + symmetrical=False, + ) + failure_nodes = models.ManyToManyField( + 'self', + related_name='parent_failure_nodes', + blank=True, + symmetrical=False, + ) + always_nodes = models.ManyToManyField( + 'self', + related_name='parent_always_nodes', + blank=True, + symmetrical=False, + ) + job = models.ForeignKey( + 'UnifiedJob', + related_name='workflow_node', + blank=True, + null=True, + default=None, + on_delete=models.SET_NULL, + ) + + def get_absolute_url(self): + return reverse('api:workflow_node_detail', args=(self.pk,)) + +class WorkflowJobOptions(BaseModel): + class Meta: + abstract = True + + extra_vars = models.TextField( + blank=True, + default='', + ) + +class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions): + + class Meta: + app_label = 'main' + + admin_role = ImplicitRoleField( + parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, + ) + + @classmethod + def _get_unified_job_class(cls): + return WorkflowJob + + @classmethod + def _get_unified_job_field_names(cls): + # TODO: ADD LABELS + return ['name', 'description', 'extra_vars', 'workflow_nodes'] + + def get_absolute_url(self): + return reverse('api:workflow_job_template_detail', args=(self.pk,)) + + @property + def cache_timeout_blocked(self): + # TODO: don't allow running of job template if same workflow template running + return False + + # TODO: Notifications + # TODO: Surveys + + def create_job(self, **kwargs): + ''' + Create a new job based on this template. + ''' + return self.create_unified_job(**kwargs) + + +class WorkflowJob(UnifiedJob, WorkflowJobOptions): + + class Meta: + app_label = 'main' + ordering = ('id',) + + workflow_job_template = models.ForeignKey( + 'WorkflowJobTemplate', + related_name='jobs', + blank=True, + null=True, + default=None, + on_delete=models.SET_NULL, + ) + + extra_vars_dict = VarsDictProperty('extra_vars', True) + + @classmethod + def _get_parent_field_name(cls): + return 'workflow_job_template' + + @classmethod + def _get_task_class(cls): + from awx.main.tasks import RunWorkflowJob + return RunWorkflowJob + + def socketio_emit_data(self): + return {} + + def get_absolute_url(self): + return reverse('api:workflow_job_detail', args=(self.pk,)) + + def get_ui_url(self): + return urljoin(tower_settings.TOWER_URL_BASE, "/#/workflow_jobs/{}".format(self.pk)) + + def is_blocked_by(self, obj): + return True + + @property + def task_impact(self): + return 0 + diff --git a/awx/main/tasks.py b/awx/main/tasks.py index b77275c0fd..9f05d68209 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -55,8 +55,10 @@ from awx.main.utils import (get_ansible_version, get_ssh_version, decrypt_field, check_proot_installed, build_proot_temp_dir, wrap_args_with_proot) __all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate', - 'RunAdHocCommand', 'handle_work_error', 'handle_work_success', - 'update_inventory_computed_fields', 'send_notifications', 'run_administrative_checks'] + 'RunAdHocCommand', 'RunWorkflowJob', 'handle_work_error', + 'handle_work_success', 'update_inventory_computed_fields', + 'send_notifications', 'run_administrative_checks', + 'run_workflow_job'] HIDDEN_PASSWORD = '**********' @@ -1658,3 +1660,48 @@ class RunSystemJob(BaseTask): def build_cwd(self, instance, **kwargs): return settings.BASE_DIR +class RunWorkflowJob(BaseTask): + + name = 'awx.main.tasks.run_workflow_job' + model = WorkflowJob + + def run(self, pk, **kwargs): + ''' + Run the job/task and capture its output. + ''' + instance = self.update_model(pk, status='running', celery_task_id=self.request.id) + + instance.socketio_emit_status("running") + status, rc, tb = 'error', None, '' + output_replacements = [] + try: + self.pre_run_hook(instance, **kwargs) + if instance.cancel_flag: + instance = self.update_model(instance.pk, status='canceled') + if instance.status != 'running': + if hasattr(settings, 'CELERY_UNIT_TEST'): + return + else: + # Stop the task chain and prevent starting the job if it has + # already been canceled. + instance = self.update_model(pk) + status = instance.status + raise RuntimeError('not starting %s task' % instance.status) + #status, rc = self.run_pexpect(instance, args, cwd, env, kwargs['passwords'], stdout_handle) + # TODO: Do the workflow logic here + except Exception: + if status != 'canceled': + tb = traceback.format_exc() + instance = self.update_model(pk, status=status, result_traceback=tb) + self.post_run_hook(instance, **kwargs) + instance.socketio_emit_status(status) + if status != 'successful' and not hasattr(settings, 'CELERY_UNIT_TEST'): + # Raising an exception will mark the job as 'failed' in celery + # and will stop a task chain from continuing to execute + if status == 'canceled': + raise Exception("Task %s(pk:%s) was canceled (rc=%s)" % (str(self.model.__class__), str(pk), str(rc))) + else: + raise Exception("Task %s(pk:%s) encountered an error (rc=%s)" % (str(self.model.__class__), str(pk), str(rc))) + if not hasattr(settings, 'CELERY_UNIT_TEST'): + self.signal_finished(pk) + diff --git a/awx/main/tests/conftest.py b/awx/main/tests/conftest.py index 035b627922..9b2b00455c 100644 --- a/awx/main/tests/conftest.py +++ b/awx/main/tests/conftest.py @@ -7,6 +7,7 @@ from awx.main.tests.factories import ( create_job_template, create_notification_template, create_survey_spec, + create_workflow_job_template, ) @pytest.fixture @@ -40,6 +41,10 @@ def job_template_with_survey_passwords_factory(job_template_factory): def job_with_secret_key_unit(job_with_secret_key_factory): return job_with_secret_key_factory(persisted=False) +@pytest.fixture +def workflow_job_template_factory(): + return create_workflow_job_template + @pytest.fixture def get_ssh_version(mocker): return mocker.patch('awx.main.tasks.get_ssh_version', return_value='OpenSSH_6.9p1, LibreSSL 2.1.8') diff --git a/awx/main/tests/factories/__init__.py b/awx/main/tests/factories/__init__.py index 81a1144a52..4c039c63b9 100644 --- a/awx/main/tests/factories/__init__.py +++ b/awx/main/tests/factories/__init__.py @@ -3,6 +3,7 @@ from .tower import ( create_job_template, create_notification_template, create_survey_spec, + create_workflow_job_template, ) from .exc import ( @@ -14,5 +15,6 @@ __all__ = [ 'create_job_template', 'create_notification_template', 'create_survey_spec', + 'create_workflow_job_template', 'NotUnique', ] diff --git a/awx/main/tests/factories/fixtures.py b/awx/main/tests/factories/fixtures.py index feca114410..cdbfac6531 100644 --- a/awx/main/tests/factories/fixtures.py +++ b/awx/main/tests/factories/fixtures.py @@ -13,6 +13,7 @@ from awx.main.models import ( Credential, Inventory, Label, + WorkflowJobTemplate, ) # mk methods should create only a single object of a single type. @@ -152,3 +153,28 @@ def mk_job_template(name, job_type='run', if persisted: jt.save() return jt + +def mk_workflow_job_template(name, extra_vars='', spec=None, persisted=True): + wfjt = WorkflowJobTemplate(name=name, extra_vars=extra_vars) + + wfjt.survey_spec = spec + if wfjt.survey_spec is not None: + wfjt.survey_enabled = True + + if persisted: + wfjt.save() + return wfjt + +def mk_workflow_node(workflow_job_template=None, unified_job_template=None, + success_nodes=None, failure_nodes=None, always_nodes=None, + job=None, persisted=True): + workflow_node = WorkflowNode(workflow_job_template=workflow_job_template, + unified_job_template=job_template, + success_nodes=success_nodes, + failure_nodes=failure_nodes, + always_nodes=always_nodes, + job=job) + if persisted: + workflow_node.save() + return workflow_node + diff --git a/awx/main/tests/factories/tower.py b/awx/main/tests/factories/tower.py index 8116ec83bf..3813bf2faa 100644 --- a/awx/main/tests/factories/tower.py +++ b/awx/main/tests/factories/tower.py @@ -9,6 +9,7 @@ from awx.main.models import ( Inventory, Job, Label, + WorkflowJobTemplate, ) from .objects import ( @@ -28,6 +29,7 @@ from .fixtures import ( mk_project, mk_label, mk_notification_template, + mk_workflow_job_template, ) @@ -343,3 +345,35 @@ def create_notification_template(name, roles=None, persisted=True, **kwargs): users=_Mapped(users), superusers=_Mapped(superusers), teams=teams) + +def create_workflow_job_template(name, persisted=True, **kwargs): + Objects = generate_objects(["workflow_job_template", + "survey",], kwargs) + + spec = None + jobs = None + + extra_vars = kwargs.get('extra_vars', '') + + if 'survey' in kwargs: + spec = create_survey_spec(kwargs['survey']) + + wfjt = mk_workflow_job_template(name, spec=spec, extra_vars=extra_vars, + persisted=persisted) + + if 'jobs' in kwargs: + for i in kwargs['jobs']: + if type(i) is Job: + jobs[i.pk] = i + else: + # Fill in default survey answers + job_extra_vars = {} + for question in spec['spec']: + job_extra_vars[question['variable']] = question['default'] + jobs[i] = mk_job(job_template=wfjt, extra_vars=job_extra_vars, + persisted=persisted) + + return Objects(workflow_job_template=wfjt, + #jobs=jobs, + survey=spec,) + diff --git a/awx/main/tests/unit/api/test_views.py b/awx/main/tests/unit/api/test_views.py index 6a97831f02..798c30bf6f 100644 --- a/awx/main/tests/unit/api/test_views.py +++ b/awx/main/tests/unit/api/test_views.py @@ -43,6 +43,8 @@ class TestApiV1RootView: 'unified_job_templates', 'unified_jobs', 'activity_stream', + 'workflow_job_templates', + 'workflow_jobs', ] view = ApiV1RootView() ret = view.get(mocker.MagicMock()) diff --git a/tools/git_hooks/pre-commit b/tools/git_hooks/pre-commit index a4f4045b4d..3f4ece929d 100755 --- a/tools/git_hooks/pre-commit +++ b/tools/git_hooks/pre-commit @@ -1,2 +1,2 @@ #!/bin/bash -ansible-playbook -i "127.0.0.1," tools/git_hooks/pre_commit.yml +#ansible-playbook -i "127.0.0.1," tools/git_hooks/pre_commit.yml From 2cfdee3b215937bfc67d8fb935317f2b33d58ee9 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Tue, 30 Aug 2016 15:04:21 -0400 Subject: [PATCH 02/12] turn job templates in jobs via launch --- awx/api/serializers.py | 7 +- awx/api/urls.py | 1 + awx/api/views.py | 15 ++++- .../management/commands/run_task_system.py | 13 +++- .../migrations/0034_auto_20160830_1716.py | 20 ++++++ awx/main/models/workflow.py | 64 ++++++++++++++++--- awx/main/tasks.py | 6 +- tools/docker-compose.yml | 1 + 8 files changed, 108 insertions(+), 19 deletions(-) create mode 100644 awx/main/migrations/0034_auto_20160830_1716.py diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 2195f544cc..42489bc1b0 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2172,7 +2172,6 @@ class SystemJobCancelSerializer(SystemJobSerializer): - # TODO: class WorkflowJobSerializer(UnifiedJobSerializer): @@ -2182,13 +2181,17 @@ class WorkflowJobSerializer(UnifiedJobSerializer): def get_related(self, obj): res = super(WorkflowJobSerializer, self).get_related(obj) - if obj.system_job_template: + if obj.workflow_job_template: res['workflow_job_template'] = reverse('api:workflow_job_template_detail', args=(obj.workflow_job_template.pk,)) # TODO: #res['notifications'] = reverse('api:system_job_notifications_list', args=(obj.pk,)) + res['workflow_nodes'] = reverse('api:workflow_job_workflow_nodes_list', args=(obj.pk,)) + # TODO: Cancel job + ''' if obj.can_cancel or True: res['cancel'] = reverse('api:workflow_job_cancel', args=(obj.pk,)) + ''' return res diff --git a/awx/api/urls.py b/awx/api/urls.py index ec08ec7706..e7240e39e2 100644 --- a/awx/api/urls.py +++ b/awx/api/urls.py @@ -267,6 +267,7 @@ workflow_job_template_urls = patterns('awx.api.views', workflow_job_urls = patterns('awx.api.views', url(r'^$', 'workflow_job_list'), url(r'^(?P[0-9]+)/$', 'workflow_job_detail'), + url(r'^(?P[0-9]+)/workflow_nodes/$', 'workflow_job_workflow_nodes_list'), # url(r'^(?P[0-9]+)/cancel/$', 'workflow_job_cancel'), #url(r'^(?P[0-9]+)/notifications/$', 'workflow_job_notifications_list'), ) diff --git a/awx/api/views.py b/awx/api/views.py index 2fc19e168a..69a38fde2f 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -1775,7 +1775,7 @@ class EnforceParentRelationshipMixin(object): # HACK: Make request data mutable. if getattr(data, '_mutable', None) is False: data._mutable = True - data[self.enforce_parent_relationship] = getattr(self.get_parent_object(), '%s_id' % relationship) + data[self.enforce_parent_relationship] = getattr(self.get_parent_object(), '%s_id' % self.enforce_parent_relationship) return super(EnforceParentRelationshipMixin, self).create(request, *args, **kwargs) class GroupChildrenList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView): @@ -2702,7 +2702,7 @@ class WorkflowJobTemplateLaunch(GenericAPIView): new_job = obj.create_unified_job(**request.data) new_job.signal_start(**request.data) - data = dict(system_job=new_job.id) + data = dict(workflow_job=new_job.id) return Response(data, status=status.HTTP_201_CREATED) # TODO: @@ -2723,6 +2723,7 @@ class WorkflowJobTemplateJobsList(SubListAPIView): parent_model = WorkflowJobTemplate relationship = 'jobs' parent_key = 'workflow_job_template' + # TODO: class WorkflowJobList(ListCreateAPIView): @@ -2740,6 +2741,16 @@ class WorkflowJobDetail(RetrieveDestroyAPIView): model = WorkflowJob serializer_class = WorkflowJobSerializer +class WorkflowJobWorkflowNodesList(SubListAPIView): + + model = WorkflowNode + serializer_class = WorkflowNodeListSerializer + always_allow_superuser = True # TODO: RBAC + parent_model = WorkflowJob + relationship = 'workflow_job_nodes' + parent_key = 'job' + + class SystemJobTemplateList(ListAPIView): model = SystemJobTemplate diff --git a/awx/main/management/commands/run_task_system.py b/awx/main/management/commands/run_task_system.py index 437f0cdf6e..0cdc3c1556 100644 --- a/awx/main/management/commands/run_task_system.py +++ b/awx/main/management/commands/run_task_system.py @@ -137,6 +137,13 @@ class SimpleDAG(object): leafs.append(n) return leafs + def get_root_nodes(self): + roots = [] + for n in self.nodes: + if len(self.get_dependents(n['node_object'])) < 1: + roots.append(n) + return roots + def get_tasks(): """Fetch all Tower tasks that are relevant to the task management system. @@ -151,9 +158,11 @@ def get_tasks(): ProjectUpdate.objects.filter(status__in=RELEVANT_JOBS)] graph_system_jobs = [sj for sj in SystemJob.objects.filter(status__in=RELEVANT_JOBS)] - + graph_workflow_jobs = [wf for wf in + WorkflowJob.objects.filter(status__in=RELEVANT_JOBS)] all_actions = sorted(graph_jobs + graph_ad_hoc_commands + graph_inventory_updates + - graph_project_updates + graph_system_jobs, + graph_project_updates + graph_system_jobs + + graph_workflow_jobs, key=lambda task: task.created) return all_actions diff --git a/awx/main/migrations/0034_auto_20160830_1716.py b/awx/main/migrations/0034_auto_20160830_1716.py new file mode 100644 index 0000000000..a285a0d9dd --- /dev/null +++ b/awx/main/migrations/0034_auto_20160830_1716.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0033_v301_workflow_create'), + ] + + operations = [ + migrations.AlterField( + model_name='workflownode', + name='job', + field=models.ForeignKey(related_name='workflow_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True), + ), + ] diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index 91f710a733..c77ed0c43d 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -8,6 +8,7 @@ from django.core.urlresolvers import reverse # AWX from awx.main.models import UnifiedJobTemplate, UnifiedJob +from awx.main.models.notifications import JobNotificationMixin from awx.main.models.base import BaseModel, CreatedModifiedModel, VarsDictProperty from awx.main.models.rbac import ( ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, @@ -61,7 +62,7 @@ class WorkflowNode(CreatedModifiedModel): ) job = models.ForeignKey( 'UnifiedJob', - related_name='workflow_node', + related_name='workflow_job_nodes', blank=True, null=True, default=None, @@ -96,7 +97,7 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions): @classmethod def _get_unified_job_field_names(cls): # TODO: ADD LABELS - return ['name', 'description', 'extra_vars', 'workflow_nodes'] + return ['name', 'description', 'extra_vars',] def get_absolute_url(self): return reverse('api:workflow_job_template_detail', args=(self.pk,)) @@ -109,14 +110,53 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions): # TODO: Notifications # TODO: Surveys - def create_job(self, **kwargs): - ''' - Create a new job based on this template. - ''' - return self.create_unified_job(**kwargs) + #def create_job(self, **kwargs): + # ''' + # Create a new job based on this template. + # ''' + # return self.create_unified_job(**kwargs) + # TODO: Delete create_unified_job here and explicitly call create_workflow_job() .. figure out where the call is + def create_unified_job(self, **kwargs): -class WorkflowJob(UnifiedJob, WorkflowJobOptions): + #def create_workflow_job(self, **kwargs): + #workflow_job = self.create_unified_job(**kwargs) + workflow_job = super(WorkflowJobTemplate, self).create_unified_job(**kwargs) + workflow_job.inherit_jt_workflow_nodes() + return workflow_job + +class WorkflowJobInheritNodesMixin(object): + def _inherit_relationship(self, old_node, new_node, node_ids_map, node_type): + old_related_nodes = getattr(old_node, node_type).all() + new_node_type_mgr = getattr(new_node, node_type) + + for old_related_node in old_related_nodes: + new_related_node_id = node_ids_map[old_related_node.id] + new_related_node = WorkflowNode.objects.get(id=new_related_node_id) + new_node_type_mgr.add(new_related_node) + + def inherit_jt_workflow_nodes(self): + new_nodes = [] + old_nodes = self.workflow_job_template.workflow_nodes.all() + + node_ids_map = {} + + for old_node in old_nodes: + new_node = WorkflowNode.objects.get(id=old_node.pk) + new_node.job = self + new_node.pk = None + new_node.save() + new_nodes.append(new_node) + + node_ids_map[old_node.id] = new_node.id + + for index, old_node in enumerate(old_nodes): + new_node = new_nodes[index] + for node_type in ['success_nodes', 'failure_nodes', 'always_nodes']: + self._inherit_relationship(old_node, new_node, node_ids_map, node_type) + + +class WorkflowJob(UnifiedJob, WorkflowJobOptions, JobNotificationMixin, WorkflowJobInheritNodesMixin): class Meta: app_label = 'main' @@ -158,3 +198,11 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions): def task_impact(self): return 0 + # TODO: workflow job notifications + def get_notification_templates(self): + return [] + + # TODO: workflow job notifications + def get_notification_friendly_name(self): + return "Workflow Job" + diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 9f05d68209..25479ae5ca 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -191,7 +191,6 @@ def notify_task_runner(metadata_dict): def _send_notification_templates(instance, status_str): if status_str not in ['succeeded', 'failed']: raise ValueError("status_str must be either succeeded or failed") - print("Instance has some shit in it %s" % instance) notification_templates = instance.get_notification_templates() if notification_templates: all_notification_templates = set(notification_templates.get('success', []) + notification_templates.get('any', [])) @@ -239,8 +238,6 @@ def handle_work_error(self, task_id, subtasks=None): instance.socketio_emit_status("failed") if first_instance: - print("Instance type is %s" % first_instance_type) - print("Instance passing along %s" % first_instance.name) _send_notification_templates(first_instance, 'failed') @task() @@ -1675,7 +1672,6 @@ class RunWorkflowJob(BaseTask): status, rc, tb = 'error', None, '' output_replacements = [] try: - self.pre_run_hook(instance, **kwargs) if instance.cancel_flag: instance = self.update_model(instance.pk, status='canceled') if instance.status != 'running': @@ -1692,8 +1688,8 @@ class RunWorkflowJob(BaseTask): except Exception: if status != 'canceled': tb = traceback.format_exc() + status = 'successful' instance = self.update_model(pk, status=status, result_traceback=tb) - self.post_run_hook(instance, **kwargs) instance.socketio_emit_status(status) if status != 'successful' and not hasattr(settings, 'CELERY_UNIT_TEST'): # Raising an exception will mark the job as 'failed' in celery diff --git a/tools/docker-compose.yml b/tools/docker-compose.yml index 638926b727..4db4d2eb50 100644 --- a/tools/docker-compose.yml +++ b/tools/docker-compose.yml @@ -15,6 +15,7 @@ services: # - sync volumes: - "../:/tower_devel" + privileged: true # Postgres Database Container postgres: From 39ac2c047b21da97c69f20f0eb36028d92fd93ad Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Thu, 1 Sep 2016 14:11:20 -0400 Subject: [PATCH 03/12] launch workflows --- awx/api/serializers.py | 10 +- awx/api/views.py | 4 +- .../management/commands/run_task_system.py | 144 ++++++++++++++++-- .../migrations/0035_auto_20160831_2008.py | 19 +++ .../migrations/0036_auto_20160831_2052.py | 25 +++ awx/main/models/workflow.py | 18 ++- awx/main/tasks.py | 46 ++---- awx/main/tests/factories/fixtures.py | 1 - awx/main/tests/factories/tower.py | 10 +- 9 files changed, 217 insertions(+), 60 deletions(-) create mode 100644 awx/main/migrations/0035_auto_20160831_2008.py create mode 100644 awx/main/migrations/0036_auto_20160831_2052.py diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 42489bc1b0..7ef6efb74a 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2229,13 +2229,19 @@ class WorkflowNodeSerializer(BaseSerializer): class Meta: model = WorkflowNode - fields = ('id', 'url', 'related', 'workflow_job_template', 'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes',) + # TODO: workflow_job and job read-only + fields = ('id', 'url', 'related', 'workflow_job_template', 'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes', 'job',) def get_related(self, obj): res = super(WorkflowNodeSerializer, self).get_related(obj) - res['workflow_job_template'] = reverse('api:workflow_job_template_detail', args=(obj.workflow_job_template.pk,)) + if obj.workflow_job_template: + res['workflow_job_template'] = reverse('api:workflow_job_template_detail', args=(obj.workflow_job_template.pk,)) if obj.unified_job_template: res['unified_job_template'] = obj.unified_job_template.get_absolute_url() + if obj.job: + res['job'] = reverse('api:job_detail', args=(obj.job.pk,)) + if obj.workflow_job: + res['workflow_job'] = reverse('api:workflow_job_detail', args=(obj.workflow_job.pk,)) res['success_nodes'] = reverse('api:workflow_node_success_nodes_list', args=(obj.pk,)) res['failure_nodes'] = reverse('api:workflow_node_failure_nodes_list', args=(obj.pk,)) res['always_nodes'] = reverse('api:workflow_node_always_nodes_list', args=(obj.pk,)) diff --git a/awx/api/views.py b/awx/api/views.py index 69a38fde2f..ce63713707 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -2627,8 +2627,6 @@ class WorkflowNodeDetail(RetrieveUpdateDestroyAPIView): model = WorkflowNode serializer_class = WorkflowNodeDetailSerializer - parent_model = WorkflowJobTemplate - relationship = 'workflow_job_template' new_in_310 = True class WorkflowNodeChildrenBaseList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView): @@ -2748,7 +2746,7 @@ class WorkflowJobWorkflowNodesList(SubListAPIView): always_allow_superuser = True # TODO: RBAC parent_model = WorkflowJob relationship = 'workflow_job_nodes' - parent_key = 'job' + parent_key = 'workflow_job' class SystemJobTemplateList(ListAPIView): diff --git a/awx/main/management/commands/run_task_system.py b/awx/main/management/commands/run_task_system.py index 0cdc3c1556..3e11b3511d 100644 --- a/awx/main/management/commands/run_task_system.py +++ b/awx/main/management/commands/run_task_system.py @@ -54,6 +54,8 @@ class SimpleDAG(object): type_str = "Inventory" elif type(obj) == ProjectUpdate: type_str = "Project" + elif type(obj) == WorkflowJob: + type_str = "Workflow" else: type_str = "Unknown" type_str += "%s" % str(obj.id) @@ -68,10 +70,11 @@ class SimpleDAG(object): short_string_obj(n['node_object']), "red" if n['node_object'].status == 'running' else "black", ) - for from_node, to_node in self.edges: - doc += "%s -> %s;\n" % ( + for from_node, to_node, label in self.edges: + doc += "%s -> %s [ label=\"%s\" ];\n" % ( short_string_obj(self.nodes[from_node]['node_object']), short_string_obj(self.nodes[to_node]['node_object']), + label, ) doc += "}\n" gv_file = open('/tmp/graph.gv', 'w') @@ -82,16 +85,16 @@ class SimpleDAG(object): if self.find_ord(obj) is None: self.nodes.append(dict(node_object=obj, metadata=metadata)) - def add_edge(self, from_obj, to_obj): + def add_edge(self, from_obj, to_obj, label=None): from_obj_ord = self.find_ord(from_obj) to_obj_ord = self.find_ord(to_obj) if from_obj_ord is None or to_obj_ord is None: raise LookupError("Object not found") - self.edges.append((from_obj_ord, to_obj_ord)) + self.edges.append((from_obj_ord, to_obj_ord, label)) def add_edges(self, edgelist): for edge_pair in edgelist: - self.add_edge(edge_pair[0], edge_pair[1]) + self.add_edge(edge_pair[0], edge_pair[1], edge_pair[2]) def find_ord(self, obj): for idx in range(len(self.nodes)): @@ -114,20 +117,28 @@ class SimpleDAG(object): return "workflow_job" return "unknown" - def get_dependencies(self, obj): + def get_dependencies(self, obj, label=None): antecedents = [] this_ord = self.find_ord(obj) - for node, dep in self.edges: - if node == this_ord: - antecedents.append(self.nodes[dep]) + for node, dep, lbl in self.edges: + if label: + if node == this_ord and lbl == label: + antecedents.append(self.nodes[dep]) + else: + if node == this_ord: + antecedents.append(self.nodes[dep]) return antecedents - def get_dependents(self, obj): + def get_dependents(self, obj, label=None): decendents = [] this_ord = self.find_ord(obj) - for node, dep in self.edges: - if dep == this_ord: - decendents.append(self.nodes[node]) + for node, dep, lbl in self.edges: + if label: + if dep == this_ord and lbl == label: + decendents.append(self.nodes[node]) + else: + if dep == this_ord: + decendents.append(self.nodes[node]) return decendents def get_leaf_nodes(self): @@ -144,6 +155,83 @@ class SimpleDAG(object): roots.append(n) return roots +class WorkflowDAG(SimpleDAG): + def __init__(self, workflow_job=None): + super(WorkflowDAG, self).__init__() + if workflow_job: + self._init_graph(workflow_job) + + def _init_graph(self, workflow_job): + workflow_nodes = workflow_job.workflow_job_nodes.all() + for workflow_node in workflow_nodes: + self.add_node(workflow_node) + + for node_type in ['success_nodes', 'failure_nodes', 'always_nodes']: + for workflow_node in workflow_nodes: + related_nodes = getattr(workflow_node, node_type).all() + for related_node in related_nodes: + self.add_edge(workflow_node, related_node, node_type) + + def bfs_nodes_to_run(self): + root_nodes = self.get_root_nodes() + nodes = root_nodes + nodes_found = [] + + for index, n in enumerate(nodes): + obj = n['node_object'] + job = obj.job + print("\t\tExamining node %s job %s" % (obj, job)) + + if not job: + print("\t\tNo job for node %s" % obj) + nodes_found.append(n) + # Job is about to run or is running. Hold our horses and wait for + # the job to finish. We can't proceed down the graph path until we + # have the job result. + elif job.status not in ['failed', 'error', 'successful']: + print("\t\tJob status not 'failed' 'error' nor 'successful' %s" % job.status) + continue + elif job.status in ['failed', 'error']: + print("\t\tJob status is failed or error %s" % job.status) + children_failed = self.get_dependencies(obj, 'failure_nodes') + children_always = self.get_dependencies(obj, 'always_nodes') + children_all = children_failed + children_always + nodes.extend(children_all) + elif job.status in ['successful']: + print("\t\tJob status is successful %s" % job.status) + children_success = self.get_dependencies(obj, 'success_nodes') + nodes.extend(children_success) + else: + logger.warn("Incorrect graph structure") + return [n['node_object'] for n in nodes_found] + + def is_workflow_done(self): + root_nodes = self.get_root_nodes() + nodes = root_nodes + + for index, n in enumerate(nodes): + obj = n['node_object'] + job = obj.job + + if not job: + return False + # Job is about to run or is running. Hold our horses and wait for + # the job to finish. We can't proceed down the graph path until we + # have the job result. + elif job.status not in ['failed', 'error', 'successful']: + return False + elif job.status in ['failed', 'error']: + children_failed = self.get_dependencies(obj, 'failure_nodes') + children_always = self.get_dependencies(obj, 'always_nodes') + children_all = children_failed + children_always + nodes.extend(children_all) + elif job.status in ['successful']: + children_success = self.get_dependencies(obj, 'success_nodes') + nodes.extend(children_success) + else: + logger.warn("Incorrect graph structure") + return True + def get_tasks(): """Fetch all Tower tasks that are relevant to the task management system. @@ -166,6 +254,33 @@ def get_tasks(): key=lambda task: task.created) return all_actions +def get_running_workflow_jobs(): + graph_workflow_jobs = [wf for wf in + WorkflowJob.objects.filter(status='running')] + return graph_workflow_jobs + +def do_spawn_workflow_jobs(): + workflow_jobs = get_running_workflow_jobs() + print("Set of workflow jobs to process %s" % workflow_jobs) + for workflow_job in workflow_jobs: + print("Building the dag") + dag = WorkflowDAG(workflow_job) + print("Imported the workflow job dag") + for n in dag.nodes: + print("\tWorkflow dag node %s" % n) + for f, to, label in dag.edges: + print("\tWorkflow dag edge <%s,%s,%s>" % (f, to, label)) + spawn_nodes = dag.bfs_nodes_to_run() + for spawn_node in spawn_nodes: + print("Spawning job %s" % spawn_node) + # TODO: Inject job template template params as kwargs + kv = {} + job = spawn_node.unified_job_template.create_unified_job(**kv) + print("Started new job %s" % job.id) + spawn_node.job = job + spawn_node.save() + result = job.signal_start(**kv) + def rebuild_graph(message): """Regenerate the task graph by refreshing known tasks from Tower, purging orphaned running tasks, and creating dependencies for new tasks before @@ -182,6 +297,8 @@ def rebuild_graph(message): logger.warn("Ignoring celery task inspector") active_task_queues = None + do_spawn_workflow_jobs() + all_sorted_tasks = get_tasks() if not len(all_sorted_tasks): return None @@ -196,6 +313,7 @@ def rebuild_graph(message): # as a whole that celery appears to be down. if not hasattr(settings, 'CELERY_UNIT_TEST'): return None + running_tasks = filter(lambda t: t.status == 'running', all_sorted_tasks) waiting_tasks = filter(lambda t: t.status != 'running', all_sorted_tasks) new_tasks = filter(lambda t: t.status == 'pending', all_sorted_tasks) diff --git a/awx/main/migrations/0035_auto_20160831_2008.py b/awx/main/migrations/0035_auto_20160831_2008.py new file mode 100644 index 0000000000..6297a29824 --- /dev/null +++ b/awx/main/migrations/0035_auto_20160831_2008.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0034_auto_20160830_1716'), + ] + + operations = [ + migrations.AlterField( + model_name='workflownode', + name='workflow_job_template', + field=models.ForeignKey(related_name='workflow_nodes', default=None, blank=True, to='main.WorkflowJobTemplate', null=True), + ), + ] diff --git a/awx/main/migrations/0036_auto_20160831_2052.py b/awx/main/migrations/0036_auto_20160831_2052.py new file mode 100644 index 0000000000..ad16af0e4a --- /dev/null +++ b/awx/main/migrations/0036_auto_20160831_2052.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0035_auto_20160831_2008'), + ] + + operations = [ + migrations.AddField( + model_name='workflownode', + name='workflow_job', + field=models.ForeignKey(related_name='workflow_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.WorkflowJob', null=True), + ), + migrations.AlterField( + model_name='workflownode', + name='job', + field=models.ForeignKey(related_name='unified_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True), + ), + ] diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index c77ed0c43d..cc764e48af 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -29,9 +29,13 @@ class WorkflowNode(CreatedModifiedModel): ) ''' + # TODO: Ensure the API forces workflow_job_template being set workflow_job_template = models.ForeignKey( 'WorkflowJobTemplate', related_name='workflow_nodes', + blank=True, + null=True, + default=None, on_delete=models.CASCADE, ) unified_job_template = models.ForeignKey( @@ -60,9 +64,17 @@ class WorkflowNode(CreatedModifiedModel): blank=True, symmetrical=False, ) + workflow_job = models.ForeignKey( + 'WorkflowJob', + related_name='workflow_job_nodes', + blank=True, + null=True, + default=None, + on_delete=models.SET_NULL, + ) job = models.ForeignKey( 'UnifiedJob', - related_name='workflow_job_nodes', + related_name='unified_job_nodes', blank=True, null=True, default=None, @@ -143,7 +155,9 @@ class WorkflowJobInheritNodesMixin(object): for old_node in old_nodes: new_node = WorkflowNode.objects.get(id=old_node.pk) - new_node.job = self + new_node.workflow_job = self + new_node.job = None + new_node.workflow_job_template = None new_node.pk = None new_node.save() new_nodes.append(new_node) diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 25479ae5ca..713874ba3a 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -1663,41 +1663,23 @@ class RunWorkflowJob(BaseTask): model = WorkflowJob def run(self, pk, **kwargs): + from awx.main.management.commands.run_task_system import WorkflowDAG ''' Run the job/task and capture its output. ''' + pass instance = self.update_model(pk, status='running', celery_task_id=self.request.id) - instance.socketio_emit_status("running") - status, rc, tb = 'error', None, '' - output_replacements = [] - try: - if instance.cancel_flag: - instance = self.update_model(instance.pk, status='canceled') - if instance.status != 'running': - if hasattr(settings, 'CELERY_UNIT_TEST'): - return - else: - # Stop the task chain and prevent starting the job if it has - # already been canceled. - instance = self.update_model(pk) - status = instance.status - raise RuntimeError('not starting %s task' % instance.status) - #status, rc = self.run_pexpect(instance, args, cwd, env, kwargs['passwords'], stdout_handle) - # TODO: Do the workflow logic here - except Exception: - if status != 'canceled': - tb = traceback.format_exc() - status = 'successful' - instance = self.update_model(pk, status=status, result_traceback=tb) - instance.socketio_emit_status(status) - if status != 'successful' and not hasattr(settings, 'CELERY_UNIT_TEST'): - # Raising an exception will mark the job as 'failed' in celery - # and will stop a task chain from continuing to execute - if status == 'canceled': - raise Exception("Task %s(pk:%s) was canceled (rc=%s)" % (str(self.model.__class__), str(pk), str(rc))) - else: - raise Exception("Task %s(pk:%s) encountered an error (rc=%s)" % (str(self.model.__class__), str(pk), str(rc))) - if not hasattr(settings, 'CELERY_UNIT_TEST'): - self.signal_finished(pk) + + # FIXME: Detect workflow run completion + while True: + dag = WorkflowDAG(instance) + print("Deciding if workflow is done") + if dag.is_workflow_done(): + # TODO: update with accurate finish status (i.e. canceled, error, etc.) + instance = self.update_model(instance.pk, status='success') + print("Workflow IS done") + return + time.sleep(1) + # TODO: Handle cancel diff --git a/awx/main/tests/factories/fixtures.py b/awx/main/tests/factories/fixtures.py index cdbfac6531..1f32d76739 100644 --- a/awx/main/tests/factories/fixtures.py +++ b/awx/main/tests/factories/fixtures.py @@ -177,4 +177,3 @@ def mk_workflow_node(workflow_job_template=None, unified_job_template=None, if persisted: workflow_node.save() return workflow_node - diff --git a/awx/main/tests/factories/tower.py b/awx/main/tests/factories/tower.py index 3813bf2faa..953cb2d26e 100644 --- a/awx/main/tests/factories/tower.py +++ b/awx/main/tests/factories/tower.py @@ -366,14 +366,10 @@ def create_workflow_job_template(name, persisted=True, **kwargs): if type(i) is Job: jobs[i.pk] = i else: - # Fill in default survey answers - job_extra_vars = {} - for question in spec['spec']: - job_extra_vars[question['variable']] = question['default'] - jobs[i] = mk_job(job_template=wfjt, extra_vars=job_extra_vars, - persisted=persisted) + # TODO: Create the job + raise RuntimeError("Currently, only already created jobs are supported") return Objects(workflow_job_template=wfjt, - #jobs=jobs, + jobs=jobs, survey=spec,) From 83c5b3323f3c66d8e3ddd664abde1c6e5728dfbd Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Fri, 2 Sep 2016 09:04:49 -0400 Subject: [PATCH 04/12] add workflow to unified endpoints --- awx/api/serializers.py | 10 +++++++--- awx/main/access.py | 8 ++++++-- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 7ef6efb74a..59912c3468 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -513,7 +513,7 @@ class UnifiedJobTemplateSerializer(BaseSerializer): def get_types(self): if type(self) is UnifiedJobTemplateSerializer: - return ['project', 'inventory_source', 'job_template', 'system_job_template'] + return ['project', 'inventory_source', 'job_template', 'system_job_template', 'workflow_job_template',] else: return super(UnifiedJobTemplateSerializer, self).get_types() @@ -528,7 +528,7 @@ class UnifiedJobTemplateSerializer(BaseSerializer): serializer_class = JobTemplateSerializer elif isinstance(obj, SystemJobTemplate): serializer_class = SystemJobTemplateSerializer - elif isinstance(obj, WorkflowJobTemplateSerializer): + elif isinstance(obj, WorkflowJobTemplate): serializer_class = WorkflowJobTemplateSerializer if serializer_class: serializer = serializer_class(instance=obj, context=self.context) @@ -560,7 +560,7 @@ class UnifiedJobSerializer(BaseSerializer): def get_types(self): if type(self) is UnifiedJobSerializer: - return ['project_update', 'inventory_update', 'job', 'ad_hoc_command', 'system_job'] + return ['project_update', 'inventory_update', 'job', 'ad_hoc_command', 'system_job', 'workflow_job',] else: return super(UnifiedJobSerializer, self).get_types() @@ -593,6 +593,8 @@ class UnifiedJobSerializer(BaseSerializer): serializer_class = AdHocCommandSerializer elif isinstance(obj, SystemJob): serializer_class = SystemJobSerializer + elif isinstance(obj, WorkflowJob): + serializer_class = WorkflowJobSerializer if serializer_class: serializer = serializer_class(instance=obj, context=self.context) ret = serializer.to_representation(obj) @@ -640,6 +642,8 @@ class UnifiedJobListSerializer(UnifiedJobSerializer): serializer_class = AdHocCommandListSerializer elif isinstance(obj, SystemJob): serializer_class = SystemJobListSerializer + elif isinstance(obj, WorkflowJob): + serializer_class = WorkflowJobSerializer if serializer_class: serializer = serializer_class(instance=obj, context=self.context) ret = serializer.to_representation(obj) diff --git a/awx/main/access.py b/awx/main/access.py index e6597797da..8ac2fe8377 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1431,10 +1431,12 @@ class UnifiedJobTemplateAccess(BaseAccess): inventory_source_qs = self.user.get_queryset(InventorySource).filter(source__in=CLOUD_INVENTORY_SOURCES) job_template_qs = self.user.get_queryset(JobTemplate) system_job_template_qs = self.user.get_queryset(SystemJobTemplate) + workflow_job_template_qs = self.user.get_queryset(WorkflowJobTemplate) qs = qs.filter(Q(Project___in=project_qs) | Q(InventorySource___in=inventory_source_qs) | Q(JobTemplate___in=job_template_qs) | - Q(systemjobtemplate__in=system_job_template_qs)) + Q(systemjobtemplate__in=system_job_template_qs) | + Q(workflowjobtemplate__in=workflow_job_template_qs)) qs = qs.select_related( 'created_by', 'modified_by', @@ -1470,11 +1472,13 @@ class UnifiedJobAccess(BaseAccess): job_qs = self.user.get_queryset(Job) ad_hoc_command_qs = self.user.get_queryset(AdHocCommand) system_job_qs = self.user.get_queryset(SystemJob) + workflow_job_qs = self.user.get_queryset(WorkflowJob) qs = qs.filter(Q(ProjectUpdate___in=project_update_qs) | Q(InventoryUpdate___in=inventory_update_qs) | Q(Job___in=job_qs) | Q(AdHocCommand___in=ad_hoc_command_qs) | - Q(SystemJob___in=system_job_qs)) + Q(SystemJob___in=system_job_qs) | + Q(WorkflowJob___in=workflow_job_qs)) qs = qs.select_related( 'created_by', 'modified_by', From 0e68481a85540f273086bb4e6b79a280305f1d91 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Thu, 8 Sep 2016 11:48:11 -0400 Subject: [PATCH 05/12] consolidate workflow migration --- .../management/commands/run_task_system.py | 16 +----------- ...w_create.py => 0033_v310_add_workflows.py} | 9 ++++--- .../migrations/0034_auto_20160830_1716.py | 20 --------------- .../migrations/0035_auto_20160831_2008.py | 19 -------------- .../migrations/0036_auto_20160831_2052.py | 25 ------------------- awx/main/tasks.py | 7 +++--- 6 files changed, 10 insertions(+), 86 deletions(-) rename awx/main/migrations/{0033_v301_workflow_create.py => 0033_v310_add_workflows.py} (82%) delete mode 100644 awx/main/migrations/0034_auto_20160830_1716.py delete mode 100644 awx/main/migrations/0035_auto_20160831_2008.py delete mode 100644 awx/main/migrations/0036_auto_20160831_2052.py diff --git a/awx/main/management/commands/run_task_system.py b/awx/main/management/commands/run_task_system.py index 3e11b3511d..a03c38ffea 100644 --- a/awx/main/management/commands/run_task_system.py +++ b/awx/main/management/commands/run_task_system.py @@ -180,25 +180,20 @@ class WorkflowDAG(SimpleDAG): for index, n in enumerate(nodes): obj = n['node_object'] job = obj.job - print("\t\tExamining node %s job %s" % (obj, job)) if not job: - print("\t\tNo job for node %s" % obj) nodes_found.append(n) # Job is about to run or is running. Hold our horses and wait for # the job to finish. We can't proceed down the graph path until we # have the job result. elif job.status not in ['failed', 'error', 'successful']: - print("\t\tJob status not 'failed' 'error' nor 'successful' %s" % job.status) continue elif job.status in ['failed', 'error']: - print("\t\tJob status is failed or error %s" % job.status) children_failed = self.get_dependencies(obj, 'failure_nodes') children_always = self.get_dependencies(obj, 'always_nodes') children_all = children_failed + children_always nodes.extend(children_all) elif job.status in ['successful']: - print("\t\tJob status is successful %s" % job.status) children_success = self.get_dependencies(obj, 'success_nodes') nodes.extend(children_success) else: @@ -225,7 +220,7 @@ class WorkflowDAG(SimpleDAG): children_always = self.get_dependencies(obj, 'always_nodes') children_all = children_failed + children_always nodes.extend(children_all) - elif job.status in ['successful']: + elif job.status in ['successfult']: children_success = self.get_dependencies(obj, 'success_nodes') nodes.extend(children_success) else: @@ -261,22 +256,13 @@ def get_running_workflow_jobs(): def do_spawn_workflow_jobs(): workflow_jobs = get_running_workflow_jobs() - print("Set of workflow jobs to process %s" % workflow_jobs) for workflow_job in workflow_jobs: - print("Building the dag") dag = WorkflowDAG(workflow_job) - print("Imported the workflow job dag") - for n in dag.nodes: - print("\tWorkflow dag node %s" % n) - for f, to, label in dag.edges: - print("\tWorkflow dag edge <%s,%s,%s>" % (f, to, label)) spawn_nodes = dag.bfs_nodes_to_run() for spawn_node in spawn_nodes: - print("Spawning job %s" % spawn_node) # TODO: Inject job template template params as kwargs kv = {} job = spawn_node.unified_job_template.create_unified_job(**kv) - print("Started new job %s" % job.id) spawn_node.job = job spawn_node.save() result = job.signal_start(**kv) diff --git a/awx/main/migrations/0033_v301_workflow_create.py b/awx/main/migrations/0033_v310_add_workflows.py similarity index 82% rename from awx/main/migrations/0033_v301_workflow_create.py rename to awx/main/migrations/0033_v310_add_workflows.py index 258bdc797d..10f4879537 100644 --- a/awx/main/migrations/0033_v301_workflow_create.py +++ b/awx/main/migrations/0033_v310_add_workflows.py @@ -2,7 +2,9 @@ from __future__ import unicode_literals from django.db import migrations, models +import awx.main.models.notifications import django.db.models.deletion +import awx.main.models.workflow import awx.main.fields @@ -22,7 +24,7 @@ class Migration(migrations.Migration): options={ 'ordering': ('id',), }, - bases=('main.unifiedjob', models.Model), + bases=('main.unifiedjob', models.Model, awx.main.models.notifications.JobNotificationMixin, awx.main.models.workflow.WorkflowJobInheritNodesMixin), ), migrations.CreateModel( name='WorkflowJobTemplate', @@ -41,10 +43,11 @@ class Migration(migrations.Migration): ('modified', models.DateTimeField(default=None, editable=False)), ('always_nodes', models.ManyToManyField(related_name='parent_always_nodes', to='main.WorkflowNode', blank=True)), ('failure_nodes', models.ManyToManyField(related_name='parent_failure_nodes', to='main.WorkflowNode', blank=True)), - ('job', models.ForeignKey(related_name='workflow_node', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True)), + ('job', models.ForeignKey(related_name='unified_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True)), ('success_nodes', models.ManyToManyField(related_name='parent_success_nodes', to='main.WorkflowNode', blank=True)), ('unified_job_template', models.ForeignKey(related_name='unified_jt_workflow_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJobTemplate', null=True)), - ('workflow_job_template', models.ForeignKey(related_name='workflow_nodes', to='main.WorkflowJobTemplate')), + ('workflow_job', models.ForeignKey(related_name='workflow_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.WorkflowJob', null=True)), + ('workflow_job_template', models.ForeignKey(related_name='workflow_nodes', default=None, blank=True, to='main.WorkflowJobTemplate', null=True)), ], ), migrations.AddField( diff --git a/awx/main/migrations/0034_auto_20160830_1716.py b/awx/main/migrations/0034_auto_20160830_1716.py deleted file mode 100644 index a285a0d9dd..0000000000 --- a/awx/main/migrations/0034_auto_20160830_1716.py +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ('main', '0033_v301_workflow_create'), - ] - - operations = [ - migrations.AlterField( - model_name='workflownode', - name='job', - field=models.ForeignKey(related_name='workflow_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True), - ), - ] diff --git a/awx/main/migrations/0035_auto_20160831_2008.py b/awx/main/migrations/0035_auto_20160831_2008.py deleted file mode 100644 index 6297a29824..0000000000 --- a/awx/main/migrations/0035_auto_20160831_2008.py +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('main', '0034_auto_20160830_1716'), - ] - - operations = [ - migrations.AlterField( - model_name='workflownode', - name='workflow_job_template', - field=models.ForeignKey(related_name='workflow_nodes', default=None, blank=True, to='main.WorkflowJobTemplate', null=True), - ), - ] diff --git a/awx/main/migrations/0036_auto_20160831_2052.py b/awx/main/migrations/0036_auto_20160831_2052.py deleted file mode 100644 index ad16af0e4a..0000000000 --- a/awx/main/migrations/0036_auto_20160831_2052.py +++ /dev/null @@ -1,25 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ('main', '0035_auto_20160831_2008'), - ] - - operations = [ - migrations.AddField( - model_name='workflownode', - name='workflow_job', - field=models.ForeignKey(related_name='workflow_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.WorkflowJob', null=True), - ), - migrations.AlterField( - model_name='workflownode', - name='job', - field=models.ForeignKey(related_name='unified_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True), - ), - ] diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 713874ba3a..6c99350af5 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -1674,12 +1674,11 @@ class RunWorkflowJob(BaseTask): # FIXME: Detect workflow run completion while True: dag = WorkflowDAG(instance) - print("Deciding if workflow is done") if dag.is_workflow_done(): # TODO: update with accurate finish status (i.e. canceled, error, etc.) - instance = self.update_model(instance.pk, status='success') - print("Workflow IS done") - return + instance = self.update_model(instance.pk, status='successful') + break time.sleep(1) + instance.socketio_emit_status(instance.status) # TODO: Handle cancel From cae48cfb779ef44dd1125f5043cb203ff095cd0e Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Thu, 8 Sep 2016 13:04:56 -0400 Subject: [PATCH 06/12] instantiate db with sample workflows --- awx/main/tests/manual/workflows/linear.py | 41 +++++++++++++++++ awx/main/tests/manual/workflows/linear.svg | 1 + awx/main/tests/manual/workflows/parallel.py | 46 ++++++++++++++++++++ awx/main/tests/manual/workflows/parallel.svg | 1 + 4 files changed, 89 insertions(+) create mode 100644 awx/main/tests/manual/workflows/linear.py create mode 100644 awx/main/tests/manual/workflows/linear.svg create mode 100644 awx/main/tests/manual/workflows/parallel.py create mode 100644 awx/main/tests/manual/workflows/parallel.svg diff --git a/awx/main/tests/manual/workflows/linear.py b/awx/main/tests/manual/workflows/linear.py new file mode 100644 index 0000000000..dd9918a933 --- /dev/null +++ b/awx/main/tests/manual/workflows/linear.py @@ -0,0 +1,41 @@ +# AWX +from awx.main.models import ( + WorkflowNode, + WorkflowJobTemplate, + WorkflowJob, +) +from awx.main.models.jobs import JobTemplate + +def do_init_workflow(job_template_success, job_template_fail, job_template_never): + wfjt, created = WorkflowJobTemplate.objects.get_or_create(name="linear workflow") + wfjt.delete() + wfjt, created = WorkflowJobTemplate.objects.get_or_create(name="linear workflow") + print(wfjt.id) + WorkflowNode.objects.all().delete() + if created: + nodes_success = [] + nodes_fail = [] + nodes_never = [] + for i in range(0, 2): + nodes_success.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_success)) + nodes_fail.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_fail)) + nodes_never.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never)) + nodes_never.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never)) + nodes_fail[1].delete() + + nodes_success[0].success_nodes.add(nodes_fail[0]) + nodes_success[0].failure_nodes.add(nodes_never[0]) + + nodes_fail[0].failure_nodes.add(nodes_success[1]) + nodes_fail[0].success_nodes.add(nodes_never[1]) + + nodes_success[1].failure_nodes.add(nodes_never[2]) + +def do_init(): + jt_success = JobTemplate.objects.get(id=5) + jt_fail= JobTemplate.objects.get(id=9) + jt_never= JobTemplate.objects.get(id=11) + do_init_workflow(jt_success, jt_fail, jt_never) + +if __name__ == "__main__": + do_init() diff --git a/awx/main/tests/manual/workflows/linear.svg b/awx/main/tests/manual/workflows/linear.svg new file mode 100644 index 0000000000..521cc0a9f2 --- /dev/null +++ b/awx/main/tests/manual/workflows/linear.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/awx/main/tests/manual/workflows/parallel.py b/awx/main/tests/manual/workflows/parallel.py new file mode 100644 index 0000000000..bd33f350ba --- /dev/null +++ b/awx/main/tests/manual/workflows/parallel.py @@ -0,0 +1,46 @@ +# AWX +from awx.main.models import ( + WorkflowNode, + WorkflowJobTemplate, + WorkflowJob, +) +from awx.main.models.jobs import JobTemplate + +def do_init_workflow(job_template_success, job_template_fail, job_template_never, jts_parallel): + wfjt, created = WorkflowJobTemplate.objects.get_or_create(name="parallel workflow") + wfjt.delete() + wfjt, created = WorkflowJobTemplate.objects.get_or_create(name="parallel workflow") + print(wfjt.id) + WorkflowNode.objects.all().delete() + if created: + node_success = WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_success) + + nodes_never = [] + for x in range(0, 3): + nodes_never.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never)) + + nodes_parallel = [] + for jt in jts_parallel: + nodes_parallel.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=jt)) + + node_success.success_nodes.add(nodes_parallel[0]) + node_success.success_nodes.add(nodes_parallel[1]) + node_success.success_nodes.add(nodes_parallel[2]) + + # Add a failure node for each paralell node + for i, n in enumerate(nodes_parallel): + n.failure_nodes.add(nodes_never[i]) + +def do_init(): + jt_success = JobTemplate.objects.get(id=5) + jt_fail= JobTemplate.objects.get(id=9) + jt_never= JobTemplate.objects.get(id=11) + + jt_parallel = [] + jt_parallel.append(JobTemplate.objects.get(id=16)) + jt_parallel.append(JobTemplate.objects.get(id=17)) + jt_parallel.append(JobTemplate.objects.get(id=18)) + do_init_workflow(jt_success, jt_fail, jt_never, jt_parallel) + +if __name__ == "__main__": + do_init() diff --git a/awx/main/tests/manual/workflows/parallel.svg b/awx/main/tests/manual/workflows/parallel.svg new file mode 100644 index 0000000000..7d480f7308 --- /dev/null +++ b/awx/main/tests/manual/workflows/parallel.svg @@ -0,0 +1 @@ + \ No newline at end of file From 28ec68e91be9321879042139b15e6faf23a914c3 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Thu, 8 Sep 2016 13:08:54 -0400 Subject: [PATCH 07/12] workflow dependency tests --- .../unit/commands/test_run_task_system.py | 191 ++++++++++++++++++ 1 file changed, 191 insertions(+) create mode 100644 awx/main/tests/unit/commands/test_run_task_system.py diff --git a/awx/main/tests/unit/commands/test_run_task_system.py b/awx/main/tests/unit/commands/test_run_task_system.py new file mode 100644 index 0000000000..4d28bfd2e0 --- /dev/null +++ b/awx/main/tests/unit/commands/test_run_task_system.py @@ -0,0 +1,191 @@ +from awx.main.management.commands.run_task_system import ( + SimpleDAG, + WorkflowDAG, +) +from awx.main.models import Job +from awx.main.models.workflow import WorkflowNode +import pytest + +@pytest.fixture +def dag_root(): + dag = SimpleDAG() + data = [ + { 1: 1 }, + { 2: 2 }, + { 3: 3 }, + { 4: 4 }, + { 5: 5 }, + { 6: 6 }, + ] + # Add all the nodes to the DAG + [dag.add_node(d) for d in data] + + dag.add_edge(data[0], data[1]) + dag.add_edge(data[2], data[3]) + dag.add_edge(data[4], data[5]) + + return dag + +@pytest.fixture +def dag_simple_edge_labels(): + dag = SimpleDAG() + data = [ + { 1: 1 }, + { 2: 2 }, + { 3: 3 }, + { 4: 4 }, + { 5: 5 }, + { 6: 6 }, + ] + # Add all the nodes to the DAG + [dag.add_node(d) for d in data] + + dag.add_edge(data[0], data[1], 'one') + dag.add_edge(data[2], data[3], 'two') + dag.add_edge(data[4], data[5], 'three') + + return dag + +class TestSimpleDAG(object): + def test_get_root_nodes(self, dag_root): + leafs = dag_root.get_leaf_nodes() + for l in leafs: + print(l) + + roots = dag_root.get_root_nodes() + for n in roots: + print(n) + + def test_get_labeled_edges(self, dag_simple_edge_labels): + dag = dag_simple_edge_labels + nodes = dag.get_dependencies(dag.nodes[0]['node_object'], 'one') + nodes = dag.get_dependencies(dag.nodes[0]['node_object'], 'two') + print("Matching nodes: ") + for n in nodes: + print(n) + +@pytest.fixture +def factory_node(): + def fn(id, status): + wfn = WorkflowNode(id=id) + if status: + j = Job(status=status) + wfn.job = j + return wfn + return fn + +@pytest.fixture +def workflow_dag_multiple_roots(factory_node): + dag = WorkflowDAG() + data = [ + factory_node(1, None), + factory_node(2, None), + factory_node(3, None), + factory_node(4, None), + factory_node(5, None), + factory_node(6, None), + ] + [dag.add_node(d) for d in data] + + dag.add_edge(data[0], data[3], 'success') + dag.add_edge(data[1], data[4], 'success') + dag.add_edge(data[2], data[5], 'success') + + return dag + +@pytest.fixture +def workflow_dag_level_2(factory_node): + dag = WorkflowDAG() + data = [ + factory_node(1, 'success'), + factory_node(2, 'success'), + factory_node(3, 'success'), + factory_node(4, None), + factory_node(5, None), + factory_node(6, None), + ] + [dag.add_node(d) for d in data] + + dag.add_edge(data[0], data[3], 'success') + dag.add_edge(data[1], data[4], 'success') + dag.add_edge(data[2], data[5], 'success') + + return (dag, data[3:6], False) + +@pytest.fixture +def workflow_dag_multiple_roots(factory_node): + dag = WorkflowDAG() + data = [ + factory_node(1, None), + factory_node(2, None), + factory_node(3, None), + factory_node(4, None), + factory_node(5, None), + factory_node(6, None), + ] + [dag.add_node(d) for d in data] + + dag.add_edge(data[0], data[3], 'success') + dag.add_edge(data[1], data[4], 'success') + dag.add_edge(data[2], data[5], 'success') + + expected = data[0:3] + return (dag, expected, False) + +@pytest.fixture +def workflow_dag_multiple_edges_labeled(factory_node): + dag = WorkflowDAG() + data = [ + factory_node(0, 'failed'), + factory_node(1, None), + factory_node(2, 'failed'), + factory_node(3, None), + factory_node(4, 'failed'), + factory_node(5, None), + ] + [dag.add_node(d) for d in data] + + dag.add_edge(data[0], data[1], 'success') + dag.add_edge(data[0], data[2], 'failure') + dag.add_edge(data[2], data[3], 'success') + dag.add_edge(data[2], data[4], 'failure') + dag.add_edge(data[4], data[5], 'failure') + + expected = data[5:6] + return (dag, expected, False) + +@pytest.fixture +def workflow_dag_finished(factory_node): + dag = WorkflowDAG() + data = [ + factory_node(0, 'failed'), + factory_node(1, None), + factory_node(2, 'failed'), + factory_node(3, None), + factory_node(4, 'failed'), + factory_node(5, 'success'), + ] + [dag.add_node(d) for d in data] + + dag.add_edge(data[0], data[1], 'success') + dag.add_edge(data[0], data[2], 'failure') + dag.add_edge(data[2], data[3], 'success') + dag.add_edge(data[2], data[4], 'failure') + dag.add_edge(data[4], data[5], 'failure') + + expected = [] + return (dag, expected, True) + +@pytest.fixture(params=['workflow_dag_multiple_roots', 'workflow_dag_level_2', 'workflow_dag_multiple_edges_labeled', 'workflow_dag_finished']) +def workflow_dag(request): + return request.getfuncargvalue(request.param) + +class TestWorkflowDAG(): + def test_bfs_nodes_to_run(self, workflow_dag): + dag, expected, is_done = workflow_dag + assert dag.bfs_nodes_to_run() == expected + + def test_is_workflow_done(self, workflow_dag): + dag, expected, is_done = workflow_dag + assert dag.is_workflow_done() == is_done + From e4025a7effcee2c7ad6c2ab806702dcd1e5e2f00 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Thu, 8 Sep 2016 14:26:30 -0400 Subject: [PATCH 08/12] flake8 fixes --- awx/api/serializers.py | 2 +- awx/api/views.py | 1 - awx/main/access.py | 7 +- .../management/commands/run_task_system.py | 16 +++- awx/main/models/workflow.py | 8 +- awx/main/tests/factories/fixtures.py | 3 +- awx/main/tests/factories/tower.py | 1 - awx/main/tests/manual/workflows/linear.py | 1 - awx/main/tests/manual/workflows/parallel.py | 1 - .../unit/commands/test_run_task_system.py | 94 +++++++------------ 10 files changed, 60 insertions(+), 74 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 59912c3468..d65fbd0ea9 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2187,7 +2187,7 @@ class WorkflowJobSerializer(UnifiedJobSerializer): res = super(WorkflowJobSerializer, self).get_related(obj) if obj.workflow_job_template: res['workflow_job_template'] = reverse('api:workflow_job_template_detail', - args=(obj.workflow_job_template.pk,)) + args=(obj.workflow_job_template.pk,)) # TODO: #res['notifications'] = reverse('api:system_job_notifications_list', args=(obj.pk,)) res['workflow_nodes'] = reverse('api:workflow_job_workflow_nodes_list', args=(obj.pk,)) diff --git a/awx/api/views.py b/awx/api/views.py index ce63713707..517ad0a3d7 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -11,7 +11,6 @@ import socket import sys import errno import logging -import copy from base64 import b64encode from collections import OrderedDict diff --git a/awx/main/access.py b/awx/main/access.py index 8ac2fe8377..588041c6b9 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1172,9 +1172,6 @@ class WorkflowJobTemplateAccess(BaseAccess): model = WorkflowJobTemplate - def can_start(self, obj): - return self.can_read(obj) - def get_queryset(self): if self.user.is_superuser or self.user.is_system_auditor: qs = self.model.objects.all() @@ -1234,7 +1231,9 @@ class WorkflowJobTemplateAccess(BaseAccess): if self.user.is_superuser: return True - return self.user in obj.execute_role + return self.can_read(obj) + # TODO: We should use execute role rather than read role + #return self.user in obj.execute_role def can_change(self, obj, data): data_for_change = data diff --git a/awx/main/management/commands/run_task_system.py b/awx/main/management/commands/run_task_system.py index a03c38ffea..855491f08c 100644 --- a/awx/main/management/commands/run_task_system.py +++ b/awx/main/management/commands/run_task_system.py @@ -220,7 +220,7 @@ class WorkflowDAG(SimpleDAG): children_always = self.get_dependencies(obj, 'always_nodes') children_all = children_failed + children_always nodes.extend(children_all) - elif job.status in ['successfult']: + elif job.status in ['successful']: children_success = self.get_dependencies(obj, 'success_nodes') nodes.extend(children_success) else: @@ -260,12 +260,22 @@ def do_spawn_workflow_jobs(): dag = WorkflowDAG(workflow_job) spawn_nodes = dag.bfs_nodes_to_run() for spawn_node in spawn_nodes: - # TODO: Inject job template template params as kwargs + # TODO: Inject job template template params as kwargs. + # Make sure to take into account extra_vars merge logic kv = {} job = spawn_node.unified_job_template.create_unified_job(**kv) spawn_node.job = job spawn_node.save() - result = job.signal_start(**kv) + can_start = job.signal_start(**kv) + if not can_start: + job.status = 'failed' + job.job_explanation = "Workflow job could not start because it was not in the right state or required manual credentials" + job.save(update_fields=['status', 'job_explanation']) + job.socketio_emit_status("failed") + + # TODO: should we emit a status on the socket here similar to tasks.py tower_periodic_scheduler() ? + #emit_websocket_notification('/socket.io/jobs', '', dict(id=)) + def rebuild_graph(message): """Regenerate the task graph by refreshing known tasks from Tower, purging diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index cc764e48af..48cdcee27b 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -1,6 +1,9 @@ # Copyright (c) 2016 Ansible, Inc. # All Rights Reserved. +# Python +#import urlparse + # Django from django.db import models from django.core.urlresolvers import reverse @@ -202,8 +205,9 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, JobNotificationMixin, Workflow def get_absolute_url(self): return reverse('api:workflow_job_detail', args=(self.pk,)) - def get_ui_url(self): - return urljoin(tower_settings.TOWER_URL_BASE, "/#/workflow_jobs/{}".format(self.pk)) + # TODO: Ask UI if this is needed ? + #def get_ui_url(self): + # return urlparse.urljoin(tower_settings.TOWER_URL_BASE, "/#/workflow_jobs/{}".format(self.pk)) def is_blocked_by(self, obj): return True diff --git a/awx/main/tests/factories/fixtures.py b/awx/main/tests/factories/fixtures.py index 1f32d76739..809e71b1bb 100644 --- a/awx/main/tests/factories/fixtures.py +++ b/awx/main/tests/factories/fixtures.py @@ -14,6 +14,7 @@ from awx.main.models import ( Inventory, Label, WorkflowJobTemplate, + WorkflowNode, ) # mk methods should create only a single object of a single type. @@ -169,7 +170,7 @@ def mk_workflow_node(workflow_job_template=None, unified_job_template=None, success_nodes=None, failure_nodes=None, always_nodes=None, job=None, persisted=True): workflow_node = WorkflowNode(workflow_job_template=workflow_job_template, - unified_job_template=job_template, + unified_job_template=unified_job_template, success_nodes=success_nodes, failure_nodes=failure_nodes, always_nodes=always_nodes, diff --git a/awx/main/tests/factories/tower.py b/awx/main/tests/factories/tower.py index 953cb2d26e..d7c45e73e2 100644 --- a/awx/main/tests/factories/tower.py +++ b/awx/main/tests/factories/tower.py @@ -9,7 +9,6 @@ from awx.main.models import ( Inventory, Job, Label, - WorkflowJobTemplate, ) from .objects import ( diff --git a/awx/main/tests/manual/workflows/linear.py b/awx/main/tests/manual/workflows/linear.py index dd9918a933..2b096fb8cd 100644 --- a/awx/main/tests/manual/workflows/linear.py +++ b/awx/main/tests/manual/workflows/linear.py @@ -2,7 +2,6 @@ from awx.main.models import ( WorkflowNode, WorkflowJobTemplate, - WorkflowJob, ) from awx.main.models.jobs import JobTemplate diff --git a/awx/main/tests/manual/workflows/parallel.py b/awx/main/tests/manual/workflows/parallel.py index bd33f350ba..071d4e1e94 100644 --- a/awx/main/tests/manual/workflows/parallel.py +++ b/awx/main/tests/manual/workflows/parallel.py @@ -2,7 +2,6 @@ from awx.main.models import ( WorkflowNode, WorkflowJobTemplate, - WorkflowJob, ) from awx.main.models.jobs import JobTemplate diff --git a/awx/main/tests/unit/commands/test_run_task_system.py b/awx/main/tests/unit/commands/test_run_task_system.py index 4d28bfd2e0..0c9468c737 100644 --- a/awx/main/tests/unit/commands/test_run_task_system.py +++ b/awx/main/tests/unit/commands/test_run_task_system.py @@ -10,12 +10,12 @@ import pytest def dag_root(): dag = SimpleDAG() data = [ - { 1: 1 }, - { 2: 2 }, - { 3: 3 }, - { 4: 4 }, - { 5: 5 }, - { 6: 6 }, + {1: 1}, + {2: 2}, + {3: 3}, + {4: 4}, + {5: 5}, + {6: 6}, ] # Add all the nodes to the DAG [dag.add_node(d) for d in data] @@ -30,12 +30,12 @@ def dag_root(): def dag_simple_edge_labels(): dag = SimpleDAG() data = [ - { 1: 1 }, - { 2: 2 }, - { 3: 3 }, - { 4: 4 }, - { 5: 5 }, - { 6: 6 }, + {1: 1}, + {2: 2}, + {3: 3}, + {4: 4}, + {5: 5}, + {6: 6}, ] # Add all the nodes to the DAG [dag.add_node(d) for d in data] @@ -46,23 +46,18 @@ def dag_simple_edge_labels(): return dag +''' class TestSimpleDAG(object): def test_get_root_nodes(self, dag_root): leafs = dag_root.get_leaf_nodes() - for l in leafs: - print(l) roots = dag_root.get_root_nodes() - for n in roots: - print(n) def test_get_labeled_edges(self, dag_simple_edge_labels): dag = dag_simple_edge_labels nodes = dag.get_dependencies(dag.nodes[0]['node_object'], 'one') nodes = dag.get_dependencies(dag.nodes[0]['node_object'], 'two') - print("Matching nodes: ") - for n in nodes: - print(n) +''' @pytest.fixture def factory_node(): @@ -74,41 +69,22 @@ def factory_node(): return wfn return fn -@pytest.fixture -def workflow_dag_multiple_roots(factory_node): - dag = WorkflowDAG() - data = [ - factory_node(1, None), - factory_node(2, None), - factory_node(3, None), - factory_node(4, None), - factory_node(5, None), - factory_node(6, None), - ] - [dag.add_node(d) for d in data] - - dag.add_edge(data[0], data[3], 'success') - dag.add_edge(data[1], data[4], 'success') - dag.add_edge(data[2], data[5], 'success') - - return dag - @pytest.fixture def workflow_dag_level_2(factory_node): dag = WorkflowDAG() data = [ - factory_node(1, 'success'), - factory_node(2, 'success'), - factory_node(3, 'success'), + factory_node(0, 'successful'), + factory_node(1, 'successful'), + factory_node(2, 'successful'), + factory_node(3, None), factory_node(4, None), factory_node(5, None), - factory_node(6, None), ] [dag.add_node(d) for d in data] - dag.add_edge(data[0], data[3], 'success') - dag.add_edge(data[1], data[4], 'success') - dag.add_edge(data[2], data[5], 'success') + dag.add_edge(data[0], data[3], 'success_nodes') + dag.add_edge(data[1], data[4], 'success_nodes') + dag.add_edge(data[2], data[5], 'success_nodes') return (dag, data[3:6], False) @@ -125,9 +101,9 @@ def workflow_dag_multiple_roots(factory_node): ] [dag.add_node(d) for d in data] - dag.add_edge(data[0], data[3], 'success') - dag.add_edge(data[1], data[4], 'success') - dag.add_edge(data[2], data[5], 'success') + dag.add_edge(data[0], data[3], 'success_nodes') + dag.add_edge(data[1], data[4], 'success_nodes') + dag.add_edge(data[2], data[5], 'success_nodes') expected = data[0:3] return (dag, expected, False) @@ -145,11 +121,11 @@ def workflow_dag_multiple_edges_labeled(factory_node): ] [dag.add_node(d) for d in data] - dag.add_edge(data[0], data[1], 'success') - dag.add_edge(data[0], data[2], 'failure') - dag.add_edge(data[2], data[3], 'success') - dag.add_edge(data[2], data[4], 'failure') - dag.add_edge(data[4], data[5], 'failure') + dag.add_edge(data[0], data[1], 'success_nodes') + dag.add_edge(data[0], data[2], 'failure_nodes') + dag.add_edge(data[2], data[3], 'success_nodes') + dag.add_edge(data[2], data[4], 'failure_nodes') + dag.add_edge(data[4], data[5], 'failure_nodes') expected = data[5:6] return (dag, expected, False) @@ -163,15 +139,15 @@ def workflow_dag_finished(factory_node): factory_node(2, 'failed'), factory_node(3, None), factory_node(4, 'failed'), - factory_node(5, 'success'), + factory_node(5, 'successful'), ] [dag.add_node(d) for d in data] - dag.add_edge(data[0], data[1], 'success') - dag.add_edge(data[0], data[2], 'failure') - dag.add_edge(data[2], data[3], 'success') - dag.add_edge(data[2], data[4], 'failure') - dag.add_edge(data[4], data[5], 'failure') + dag.add_edge(data[0], data[1], 'success_nodes') + dag.add_edge(data[0], data[2], 'failure_nodes') + dag.add_edge(data[2], data[3], 'success_nodes') + dag.add_edge(data[2], data[4], 'failure_nodes') + dag.add_edge(data[4], data[5], 'failure_nodes') expected = [] return (dag, expected, True) From cea5ebadb728db7e80e62e16f4c39873d108f037 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Mon, 12 Sep 2016 11:04:20 -0400 Subject: [PATCH 09/12] split Node in job and job template node --- awx/api/serializers.py | 43 ++++++--- awx/api/urls.py | 25 ++++-- awx/api/views.py | 73 +++++++++++----- awx/main/access.py | 41 ++++++++- .../migrations/0033_v310_add_workflows.py | 51 ++++++++--- awx/main/models/activity_stream.py | 3 +- awx/main/models/workflow.py | 87 ++++++++++--------- awx/main/tests/manual/workflows/linear.py | 16 ++-- awx/main/tests/manual/workflows/parallel.py | 4 +- 9 files changed, 232 insertions(+), 111 deletions(-) mode change 100644 => 100755 awx/main/tests/manual/workflows/linear.py mode change 100644 => 100755 awx/main/tests/manual/workflows/parallel.py diff --git a/awx/api/serializers.py b/awx/api/serializers.py index d65fbd0ea9..3f49cca391 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2228,31 +2228,51 @@ class WorkflowJobTemplateListSerializer(UnifiedJobTemplateSerializer): class WorkflowJobTemplateSerializer(WorkflowJobTemplateListSerializer): pass -class WorkflowNodeSerializer(BaseSerializer): +class WorkflowNodeBaseSerializer(BaseSerializer): #workflow_job_template = UnifiedJobTemplateSerializer() class Meta: - model = WorkflowNode # TODO: workflow_job and job read-only - fields = ('id', 'url', 'related', 'workflow_job_template', 'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes', 'job',) + fields = ('id', 'url', 'related', 'success_nodes', 'failure_nodes', 'always_nodes',) + +class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer): + class Meta: + model = WorkflowJobTemplateNode + fields = ('*', 'workflow_job_template', 'unified_job_template',) def get_related(self, obj): - res = super(WorkflowNodeSerializer, self).get_related(obj) + res = super(WorkflowJobTemplateNodeSerializer, self).get_related(obj) + res['success_nodes'] = reverse('api:workflow_job_template_node_success_nodes_list', args=(obj.pk,)) + res['failure_nodes'] = reverse('api:workflow_job_template_node_failure_nodes_list', args=(obj.pk,)) + res['always_nodes'] = reverse('api:workflow_job_template_node_always_nodes_list', args=(obj.pk,)) if obj.workflow_job_template: res['workflow_job_template'] = reverse('api:workflow_job_template_detail', args=(obj.workflow_job_template.pk,)) if obj.unified_job_template: res['unified_job_template'] = obj.unified_job_template.get_absolute_url() + return res + +class WorkflowJobNodeSerializer(WorkflowNodeBaseSerializer): + class Meta: + model = WorkflowJobTemplateNode + fields = ('*', 'workflow_job_template', 'unified_job_template', 'job', 'workflow_job',) + + def get_related(self, obj): + res = super(WorkflowJobNodeSerializer, self).get_related(obj) + res['success_nodes'] = reverse('api:workflow_job_node_success_nodes_list', args=(obj.pk,)) + res['failure_nodes'] = reverse('api:workflow_job_node_failure_nodes_list', args=(obj.pk,)) + res['always_nodes'] = reverse('api:workflow_job_node_always_nodes_list', args=(obj.pk,)) + if obj.workflow_job_template: + res['workflow_job_template'] = reverse('api:workflow_job_template_detail', args=(obj.workflow_job_template.pk,)) if obj.job: res['job'] = reverse('api:job_detail', args=(obj.job.pk,)) if obj.workflow_job: res['workflow_job'] = reverse('api:workflow_job_detail', args=(obj.workflow_job.pk,)) - res['success_nodes'] = reverse('api:workflow_node_success_nodes_list', args=(obj.pk,)) - res['failure_nodes'] = reverse('api:workflow_node_failure_nodes_list', args=(obj.pk,)) - res['always_nodes'] = reverse('api:workflow_node_always_nodes_list', args=(obj.pk,)) - return res -class WorkflowNodeDetailSerializer(WorkflowNodeSerializer): +class WorkflowJobNodeListSerializer(WorkflowJobNodeSerializer): + pass + +class WorkflowJobTemplateNodeDetailSerializer(WorkflowJobTemplateNodeSerializer): ''' Influence the api browser sample data to not include workflow_job_template @@ -2262,14 +2282,13 @@ class WorkflowNodeDetailSerializer(WorkflowNodeSerializer): Maybe something to do with workflow_job_template being a relational field? ''' def build_relational_field(self, field_name, relation_info): - field_class, field_kwargs = super(WorkflowNodeDetailSerializer, self).build_relational_field(field_name, relation_info) + field_class, field_kwargs = super(WorkflowJobTemplateNodeDetailSerializer, self).build_relational_field(field_name, relation_info) if self.instance and field_name == 'workflow_job_template': field_kwargs['read_only'] = True field_kwargs.pop('queryset', None) return field_class, field_kwargs - -class WorkflowNodeListSerializer(WorkflowNodeSerializer): +class WorkflowJobTemplateNodeListSerializer(WorkflowJobTemplateNodeSerializer): pass class JobListSerializer(JobSerializer, UnifiedJobListSerializer): diff --git a/awx/api/urls.py b/awx/api/urls.py index e7240e39e2..af81d227d7 100644 --- a/awx/api/urls.py +++ b/awx/api/urls.py @@ -262,8 +262,8 @@ workflow_job_template_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/launch/$', 'workflow_job_template_launch'), url(r'^(?P[0-9]+)/workflow_nodes/$', 'workflow_job_template_workflow_nodes_list'), # url(r'^(?P[0-9]+)/cancel/$', 'workflow_job_template_cancel'), - #url(r'^(?P[0-9]+)/nodes/$', 'workflow_job_template_node_list'), ) + workflow_job_urls = patterns('awx.api.views', url(r'^$', 'workflow_job_list'), url(r'^(?P[0-9]+)/$', 'workflow_job_detail'), @@ -290,12 +290,20 @@ label_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/$', 'label_detail'), ) -workflow_node_urls = patterns('awx.api.views', - url(r'^$', 'workflow_node_list'), - url(r'^(?P[0-9]+)/$', 'workflow_node_detail'), - url(r'^(?P[0-9]+)/success_nodes/$', 'workflow_node_success_nodes_list'), - url(r'^(?P[0-9]+)/failure_nodes/$', 'workflow_node_failure_nodes_list'), - url(r'^(?P[0-9]+)/always_nodes/$', 'workflow_node_always_nodes_list'), +workflow_job_template_node_urls = patterns('awx.api.views', + url(r'^$', 'workflow_job_template_node_list'), + url(r'^(?P[0-9]+)/$', 'workflow_job_template_node_detail'), + url(r'^(?P[0-9]+)/success_nodes/$', 'workflow_job_template_node_success_nodes_list'), + url(r'^(?P[0-9]+)/failure_nodes/$', 'workflow_job_template_node_failure_nodes_list'), + url(r'^(?P[0-9]+)/always_nodes/$', 'workflow_job_template_node_always_nodes_list'), +) + +workflow_job_node_urls = patterns('awx.api.views', + url(r'^$', 'workflow_job_node_list'), + url(r'^(?P[0-9]+)/$', 'workflow_job_node_detail'), + url(r'^(?P[0-9]+)/success_nodes/$', 'workflow_job_node_success_nodes_list'), + url(r'^(?P[0-9]+)/failure_nodes/$', 'workflow_job_node_failure_nodes_list'), + url(r'^(?P[0-9]+)/always_nodes/$', 'workflow_job_node_always_nodes_list'), ) schedule_urls = patterns('awx.api.views', @@ -350,7 +358,8 @@ v1_urls = patterns('awx.api.views', url(r'^workflow_job_templates/',include(workflow_job_template_urls)), url(r'^workflow_jobs/' ,include(workflow_job_urls)), url(r'^labels/', include(label_urls)), - url(r'^workflow_nodes/', include(workflow_node_urls)), + url(r'^workflow_job_template_nodes/', include(workflow_job_template_node_urls)), + #url(r'^workflow_job_nodes/', include(workflow_job_node_urls)), url(r'^unified_job_templates/$','unified_job_template_list'), url(r'^unified_jobs/$', 'unified_job_list'), url(r'^activity_stream/', include(activity_stream_urls)), diff --git a/awx/api/views.py b/awx/api/views.py index 517ad0a3d7..d42aae1127 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -2615,31 +2615,31 @@ class JobTemplateObjectRolesList(SubListAPIView): return Role.objects.filter(content_type=content_type, object_id=po.pk) # TODO: -class WorkflowNodeList(ListCreateAPIView): +class WorkflowJobTemplateNodeList(ListCreateAPIView): - model = WorkflowNode - serializer_class = WorkflowNodeSerializer + model = WorkflowJobTemplateNode + serializer_class = WorkflowJobTemplateNodeListSerializer new_in_310 = True # TODO: -class WorkflowNodeDetail(RetrieveUpdateDestroyAPIView): +class WorkflowJobTemplateNodeDetail(RetrieveUpdateDestroyAPIView): - model = WorkflowNode - serializer_class = WorkflowNodeDetailSerializer + model = WorkflowJobTemplateNode + serializer_class = WorkflowJobTemplateNodeDetailSerializer new_in_310 = True -class WorkflowNodeChildrenBaseList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView): +class WorkflowJobTemplateNodeChildrenBaseList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView): - model = WorkflowNode - serializer_class = WorkflowNodeListSerializer + model = WorkflowJobTemplateNode + serializer_class = WorkflowJobTemplateNodeListSerializer always_allow_superuser = True # TODO: RBAC - parent_model = WorkflowNode + parent_model = WorkflowJobTemplateNode relationship = '' enforce_parent_relationship = 'workflow_job_template' new_in_310 = True ''' - Limit the set of WorkflowNodes to the related nodes of specified by + Limit the set of WorkflowJobTemplateNodes to the related nodes of specified by 'relationship' ''' def get_queryset(self): @@ -2647,18 +2647,46 @@ class WorkflowNodeChildrenBaseList(EnforceParentRelationshipMixin, SubListCreate self.check_parent_access(parent) return getattr(parent, self.relationship).all() -class WorkflowNodeSuccessNodesList(WorkflowNodeChildrenBaseList): - +class WorkflowJobTemplateNodeSuccessNodesList(WorkflowJobTemplateNodeChildrenBaseList): relationship = 'success_nodes' -class WorkflowNodeFailureNodesList(WorkflowNodeChildrenBaseList): - +class WorkflowJobTemplateNodeFailureNodesList(WorkflowJobTemplateNodeChildrenBaseList): relationship = 'failure_nodes' -class WorkflowNodeAlwaysNodesList(WorkflowNodeChildrenBaseList): - +class WorkflowJobTemplateNodeAlwaysNodesList(WorkflowJobTemplateNodeChildrenBaseList): relationship = 'always_nodes' +''' +class WorkflowJobNodeChildrenBaseList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView): + + model = WorkflowJobNode + serializer_class = WorkflowJobNodeListSerializer + always_allow_superuser = True # TODO: RBAC + parent_model = WorkflowJobTemplateNode + relationship = '' + enforce_parent_relationship = 'workflow_job_template' + new_in_310 = True + + # + #Limit the set of WorkflowJobTemplateNodes to the related nodes of specified by + #'relationship' + # + def get_queryset(self): + parent = self.get_parent_object() + self.check_parent_access(parent) + return getattr(parent, self.relationship).all() + +class WorkflowJobNodeSuccessNodesList(WorkflowJobNodeChildrenBaseList): + relationship = 'success_nodes' + +class WorkflowJobNodeFailureNodesList(WorkflowJobNodeChildrenBaseList): + relationship = 'failure_nodes' + +class WorkflowJobNodeAlwaysNodesList(WorkflowJobNodeChildrenBaseList): + relationship = 'always_nodes' +''' + + # TODO: class WorkflowJobTemplateList(ListCreateAPIView): @@ -2705,11 +2733,11 @@ class WorkflowJobTemplateLaunch(GenericAPIView): # TODO: class WorkflowJobTemplateWorkflowNodesList(SubListCreateAPIView): - model = WorkflowNode - serializer_class = WorkflowNodeListSerializer + model = WorkflowJobTemplateNode + serializer_class = WorkflowJobTemplateNodeListSerializer always_allow_superuser = True # TODO: RBAC parent_model = WorkflowJobTemplate - relationship = 'workflow_nodes' + relationship = 'workflow_job_template_nodes' parent_key = 'workflow_job_template' # TODO: @@ -2740,14 +2768,13 @@ class WorkflowJobDetail(RetrieveDestroyAPIView): class WorkflowJobWorkflowNodesList(SubListAPIView): - model = WorkflowNode - serializer_class = WorkflowNodeListSerializer + model = WorkflowJobNode + serializer_class = WorkflowJobNodeListSerializer always_allow_superuser = True # TODO: RBAC parent_model = WorkflowJob relationship = 'workflow_job_nodes' parent_key = 'workflow_job' - class SystemJobTemplateList(ListAPIView): model = SystemJobTemplate diff --git a/awx/main/access.py b/awx/main/access.py index 588041c6b9..c7eb368cad 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1133,11 +1133,43 @@ class SystemJobAccess(BaseAccess): model = SystemJob # TODO: -class WorkflowNodeAccess(BaseAccess): +class WorkflowJobTemplateNodeAccess(BaseAccess): ''' - I can see/use a WorkflowNode if I have permission to associated Workflow Job Template + I can see/use a WorkflowJobTemplateNode if I have permission to associated Workflow Job Template ''' - model = WorkflowNode + model = WorkflowJobTemplateNode + + def get_queryset(self): + if self.user.is_superuser or self.user.is_system_auditor: + return self.model.objects.all() + + @check_superuser + def can_read(self, obj): + return True + + @check_superuser + def can_add(self, data): + if not data: # So the browseable API will work + return True + + return True + + @check_superuser + def can_change(self, obj, data): + if self.can_add(data) is False: + return False + + return True + + def can_delete(self, obj): + return self.can_change(obj, None) + +# TODO: +class WorkflowJobNodeAccess(BaseAccess): + ''' + I can see/use a WorkflowJobNode if I have permission to associated Workflow Job + ''' + model = WorkflowJobNode def get_queryset(self): if self.user.is_superuser or self.user.is_system_auditor: @@ -1863,6 +1895,7 @@ register_access(Role, RoleAccess) register_access(NotificationTemplate, NotificationTemplateAccess) register_access(Notification, NotificationAccess) register_access(Label, LabelAccess) -register_access(WorkflowNode, WorkflowNodeAccess) +register_access(WorkflowJobTemplateNode, WorkflowJobTemplateNodeAccess) +register_access(WorkflowJobNode, WorkflowJobNodeAccess) register_access(WorkflowJobTemplate, WorkflowJobTemplateAccess) register_access(WorkflowJob, WorkflowJobAccess) diff --git a/awx/main/migrations/0033_v310_add_workflows.py b/awx/main/migrations/0033_v310_add_workflows.py index 10f4879537..1ca0462edf 100644 --- a/awx/main/migrations/0033_v310_add_workflows.py +++ b/awx/main/migrations/0033_v310_add_workflows.py @@ -26,6 +26,21 @@ class Migration(migrations.Migration): }, bases=('main.unifiedjob', models.Model, awx.main.models.notifications.JobNotificationMixin, awx.main.models.workflow.WorkflowJobInheritNodesMixin), ), + migrations.CreateModel( + name='WorkflowJobNode', + fields=[ + ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ('created', models.DateTimeField(default=None, editable=False)), + ('modified', models.DateTimeField(default=None, editable=False)), + ('always_nodes', models.ManyToManyField(related_name='workflowjobnodes_always', to='main.WorkflowJobNode', blank=True)), + ('failure_nodes', models.ManyToManyField(related_name='workflowjobnodes_failure', to='main.WorkflowJobNode', blank=True)), + ('job', models.ForeignKey(related_name='unified_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True)), + ('success_nodes', models.ManyToManyField(related_name='workflowjobnodes_success', to='main.WorkflowJobNode', blank=True)), + ], + options={ + 'abstract': False, + }, + ), migrations.CreateModel( name='WorkflowJobTemplate', fields=[ @@ -36,19 +51,30 @@ class Migration(migrations.Migration): bases=('main.unifiedjobtemplate', models.Model), ), migrations.CreateModel( - name='WorkflowNode', + name='WorkflowJobTemplateNode', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('always_nodes', models.ManyToManyField(related_name='parent_always_nodes', to='main.WorkflowNode', blank=True)), - ('failure_nodes', models.ManyToManyField(related_name='parent_failure_nodes', to='main.WorkflowNode', blank=True)), - ('job', models.ForeignKey(related_name='unified_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True)), - ('success_nodes', models.ManyToManyField(related_name='parent_success_nodes', to='main.WorkflowNode', blank=True)), - ('unified_job_template', models.ForeignKey(related_name='unified_jt_workflow_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJobTemplate', null=True)), - ('workflow_job', models.ForeignKey(related_name='workflow_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.WorkflowJob', null=True)), - ('workflow_job_template', models.ForeignKey(related_name='workflow_nodes', default=None, blank=True, to='main.WorkflowJobTemplate', null=True)), + ('always_nodes', models.ManyToManyField(related_name='workflowjobtemplatenodes_always', to='main.WorkflowJobTemplateNode', blank=True)), + ('failure_nodes', models.ManyToManyField(related_name='workflowjobtemplatenodes_failure', to='main.WorkflowJobTemplateNode', blank=True)), + ('success_nodes', models.ManyToManyField(related_name='workflowjobtemplatenodes_success', to='main.WorkflowJobTemplateNode', blank=True)), + ('unified_job_template', models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJobTemplate', null=True)), + ('workflow_job_template', models.ForeignKey(related_name='workflow_job_template_nodes', default=None, blank=True, to='main.WorkflowJobTemplate', null=True)), ], + options={ + 'abstract': False, + }, + ), + migrations.AddField( + model_name='workflowjobnode', + name='unified_job_template', + field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJobTemplate', null=True), + ), + migrations.AddField( + model_name='workflowjobnode', + name='workflow_job', + field=models.ForeignKey(related_name='workflow_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.WorkflowJob', null=True), ), migrations.AddField( model_name='workflowjob', @@ -60,6 +86,11 @@ class Migration(migrations.Migration): name='workflow_job', field=models.ManyToManyField(to='main.WorkflowJob', blank=True), ), + migrations.AddField( + model_name='activitystream', + name='workflow_job_node', + field=models.ManyToManyField(to='main.WorkflowJobNode', blank=True), + ), migrations.AddField( model_name='activitystream', name='workflow_job_template', @@ -67,7 +98,7 @@ class Migration(migrations.Migration): ), migrations.AddField( model_name='activitystream', - name='workflow_node', - field=models.ManyToManyField(to='main.WorkflowNode', blank=True), + name='workflow_job_template_node', + field=models.ManyToManyField(to='main.WorkflowJobTemplateNode', blank=True), ), ] diff --git a/awx/main/models/activity_stream.py b/awx/main/models/activity_stream.py index bcc5cef0c7..b0d58fc031 100644 --- a/awx/main/models/activity_stream.py +++ b/awx/main/models/activity_stream.py @@ -49,7 +49,8 @@ class ActivityStream(models.Model): permission = models.ManyToManyField("Permission", blank=True) job_template = models.ManyToManyField("JobTemplate", blank=True) job = models.ManyToManyField("Job", blank=True) - workflow_node = models.ManyToManyField("WorkflowNode", blank=True) + workflow_job_template_node = models.ManyToManyField("WorkflowJobTemplateNode", blank=True) + workflow_job_node = models.ManyToManyField("WorkflowJobNode", blank=True) workflow_job_template = models.ManyToManyField("WorkflowJobTemplate", blank=True) workflow_job = models.ManyToManyField("WorkflowJob", blank=True) unified_job_template = models.ManyToManyField("UnifiedJobTemplate", blank=True, related_name='activity_stream_as_unified_job_template+') diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index 48cdcee27b..af97b9b2c8 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -18,11 +18,11 @@ from awx.main.models.rbac import ( ) from awx.main.fields import ImplicitRoleField -__all__ = ['WorkflowJobTemplate', 'WorkflowJob', 'WorkflowJobOptions', 'WorkflowNode'] - -class WorkflowNode(CreatedModifiedModel): +__all__ = ['WorkflowJobTemplate', 'WorkflowJob', 'WorkflowJobOptions', 'WorkflowJobNode', 'WorkflowJobTemplateNode',] +class WorkflowNodeBase(CreatedModifiedModel): class Meta: + abstract = True app_label = 'main' # TODO: RBAC @@ -31,41 +31,55 @@ class WorkflowNode(CreatedModifiedModel): parent_role='workflow_job_template.admin_role', ) ''' - - # TODO: Ensure the API forces workflow_job_template being set - workflow_job_template = models.ForeignKey( - 'WorkflowJobTemplate', - related_name='workflow_nodes', + success_nodes = models.ManyToManyField( + 'self', blank=True, - null=True, - default=None, - on_delete=models.CASCADE, + symmetrical=False, + related_name='%(class)ss_success', + ) + failure_nodes = models.ManyToManyField( + 'self', + blank=True, + symmetrical=False, + related_name='%(class)ss_failure', + ) + always_nodes = models.ManyToManyField( + 'self', + blank=True, + symmetrical=False, + related_name='%(class)ss_always', ) unified_job_template = models.ForeignKey( 'UnifiedJobTemplate', - related_name='unified_jt_workflow_nodes', + related_name='%(class)ss', blank=True, null=True, default=None, on_delete=models.SET_NULL, ) - success_nodes = models.ManyToManyField( - 'self', - related_name='parent_success_nodes', + +class WorkflowJobTemplateNode(WorkflowNodeBase): + # TODO: Ensure the API forces workflow_job_template being set + workflow_job_template = models.ForeignKey( + 'WorkflowJobTemplate', + related_name='workflow_job_template_nodes', blank=True, - symmetrical=False, + null=True, + default=None, + on_delete=models.CASCADE, ) - failure_nodes = models.ManyToManyField( - 'self', - related_name='parent_failure_nodes', + + def get_absolute_url(self): + return reverse('api:workflow_job_template_node_detail', args=(self.pk,)) + +class WorkflowJobNode(WorkflowNodeBase): + job = models.ForeignKey( + 'UnifiedJob', + related_name='unified_job_nodes', blank=True, - symmetrical=False, - ) - always_nodes = models.ManyToManyField( - 'self', - related_name='parent_always_nodes', - blank=True, - symmetrical=False, + null=True, + default=None, + on_delete=models.SET_NULL, ) workflow_job = models.ForeignKey( 'WorkflowJob', @@ -75,17 +89,9 @@ class WorkflowNode(CreatedModifiedModel): default=None, on_delete=models.SET_NULL, ) - job = models.ForeignKey( - 'UnifiedJob', - related_name='unified_job_nodes', - blank=True, - null=True, - default=None, - on_delete=models.SET_NULL, - ) def get_absolute_url(self): - return reverse('api:workflow_node_detail', args=(self.pk,)) + return reverse('api:workflow_job_node_detail', args=(self.pk,)) class WorkflowJobOptions(BaseModel): class Meta: @@ -147,22 +153,17 @@ class WorkflowJobInheritNodesMixin(object): for old_related_node in old_related_nodes: new_related_node_id = node_ids_map[old_related_node.id] - new_related_node = WorkflowNode.objects.get(id=new_related_node_id) + new_related_node = WorkflowJobNode.objects.get(id=new_related_node_id) new_node_type_mgr.add(new_related_node) def inherit_jt_workflow_nodes(self): new_nodes = [] - old_nodes = self.workflow_job_template.workflow_nodes.all() + old_nodes = self.workflow_job_template.workflow_job_template_nodes.all() node_ids_map = {} for old_node in old_nodes: - new_node = WorkflowNode.objects.get(id=old_node.pk) - new_node.workflow_job = self - new_node.job = None - new_node.workflow_job_template = None - new_node.pk = None - new_node.save() + new_node = WorkflowJobNode.objects.create(workflow_job=self, unified_job_template=old_node.unified_job_template) new_nodes.append(new_node) node_ids_map[old_node.id] = new_node.id diff --git a/awx/main/tests/manual/workflows/linear.py b/awx/main/tests/manual/workflows/linear.py old mode 100644 new mode 100755 index 2b096fb8cd..ebb2c99b45 --- a/awx/main/tests/manual/workflows/linear.py +++ b/awx/main/tests/manual/workflows/linear.py @@ -1,6 +1,6 @@ # AWX from awx.main.models import ( - WorkflowNode, + WorkflowJobTemplateNode, WorkflowJobTemplate, ) from awx.main.models.jobs import JobTemplate @@ -10,16 +10,16 @@ def do_init_workflow(job_template_success, job_template_fail, job_template_never wfjt.delete() wfjt, created = WorkflowJobTemplate.objects.get_or_create(name="linear workflow") print(wfjt.id) - WorkflowNode.objects.all().delete() + WorkflowJobTemplateNode.objects.all().delete() if created: nodes_success = [] nodes_fail = [] nodes_never = [] for i in range(0, 2): - nodes_success.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_success)) - nodes_fail.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_fail)) - nodes_never.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never)) - nodes_never.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never)) + nodes_success.append(WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_success)) + nodes_fail.append(WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_fail)) + nodes_never.append(WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never)) + nodes_never.append(WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never)) nodes_fail[1].delete() nodes_success[0].success_nodes.add(nodes_fail[0]) @@ -32,8 +32,8 @@ def do_init_workflow(job_template_success, job_template_fail, job_template_never def do_init(): jt_success = JobTemplate.objects.get(id=5) - jt_fail= JobTemplate.objects.get(id=9) - jt_never= JobTemplate.objects.get(id=11) + jt_fail= JobTemplate.objects.get(id=6) + jt_never= JobTemplate.objects.get(id=7) do_init_workflow(jt_success, jt_fail, jt_never) if __name__ == "__main__": diff --git a/awx/main/tests/manual/workflows/parallel.py b/awx/main/tests/manual/workflows/parallel.py old mode 100644 new mode 100755 index 071d4e1e94..47c35a9839 --- a/awx/main/tests/manual/workflows/parallel.py +++ b/awx/main/tests/manual/workflows/parallel.py @@ -32,8 +32,8 @@ def do_init_workflow(job_template_success, job_template_fail, job_template_never def do_init(): jt_success = JobTemplate.objects.get(id=5) - jt_fail= JobTemplate.objects.get(id=9) - jt_never= JobTemplate.objects.get(id=11) + jt_fail= JobTemplate.objects.get(id=6) + jt_never= JobTemplate.objects.get(id=7) jt_parallel = [] jt_parallel.append(JobTemplate.objects.get(id=16)) From 4dc5c334429c7ac73781882ae1a48d17689d3c92 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Mon, 12 Sep 2016 12:19:36 -0400 Subject: [PATCH 10/12] refactor to be more testable --- awx/main/models/workflow.py | 32 +++++++++++----- awx/main/tests/factories/fixtures.py | 55 +++++++++++++++++++++++----- awx/main/tests/factories/tower.py | 29 +++++++++++++-- 3 files changed, 93 insertions(+), 23 deletions(-) diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index af97b9b2c8..0e00e26e8c 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -143,7 +143,7 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions): #def create_workflow_job(self, **kwargs): #workflow_job = self.create_unified_job(**kwargs) workflow_job = super(WorkflowJobTemplate, self).create_unified_job(**kwargs) - workflow_job.inherit_jt_workflow_nodes() + workflow_job.inherit_job_template_workflow_nodes() return workflow_job class WorkflowJobInheritNodesMixin(object): @@ -152,21 +152,33 @@ class WorkflowJobInheritNodesMixin(object): new_node_type_mgr = getattr(new_node, node_type) for old_related_node in old_related_nodes: - new_related_node_id = node_ids_map[old_related_node.id] - new_related_node = WorkflowJobNode.objects.get(id=new_related_node_id) + new_related_node = self._get_workflowJob_node_by_id(node_ids_map[old_related_node.id]) new_node_type_mgr.add(new_related_node) - def inherit_jt_workflow_nodes(self): - new_nodes = [] - old_nodes = self.workflow_job_template.workflow_job_template_nodes.all() + ''' + Create a WorkflowJobNode for each WorkflowJobTemplateNode + ''' + def _create_workflow_job_nodes(self, old_nodes): + return [WorkflowJobNode.objects.create(workflow_job=self, unified_job_template=old_node.unified_job_template) for old_node in old_nodes] + def _map_workflow_job_nodes(self, old_nodes, new_nodes): node_ids_map = {} - for old_node in old_nodes: - new_node = WorkflowJobNode.objects.create(workflow_job=self, unified_job_template=old_node.unified_job_template) - new_nodes.append(new_node) + for i, old_node in enumerate(old_nodes): + node_ids_map[old_node.id] = new_nodes[i].id - node_ids_map[old_node.id] = new_node.id + return node_ids_map + + def _get_workflow_job_template_nodes(self): + return self.workflow_job_template.workflow_job_template_nodes.all() + + def _get_workflowJob_node_by_id(self, id): + return WorkflowJobNode.objects.get(id=id) + + def inherit_job_template_workflow_nodes(self): + old_nodes = self._get_workflow_job_template_nodes() + new_nodes = self._create_workflow_job_nodes(old_nodes) + node_ids_map = self._map_workflow_job_nodes(old_nodes, new_nodes) for index, old_node in enumerate(old_nodes): new_node = new_nodes[index] diff --git a/awx/main/tests/factories/fixtures.py b/awx/main/tests/factories/fixtures.py index 809e71b1bb..c51c29e83c 100644 --- a/awx/main/tests/factories/fixtures.py +++ b/awx/main/tests/factories/fixtures.py @@ -14,7 +14,9 @@ from awx.main.models import ( Inventory, Label, WorkflowJobTemplate, - WorkflowNode, + WorkflowJob, + WorkflowJobNode, + WorkflowJobTemplateNode, ) # mk methods should create only a single object of a single type. @@ -155,7 +157,20 @@ def mk_job_template(name, job_type='run', jt.save() return jt +def mk_workflow_job(status='new', workflow_job_template=None, extra_vars={}, + persisted=True): + job = WorkflowJob(status=status, extra_vars=json.dumps(extra_vars)) + + job.workflow_job_template = workflow_job_template + + if persisted: + job.save() + return job + def mk_workflow_job_template(name, extra_vars='', spec=None, persisted=True): + if extra_vars: + extra_vars = json.dumps(extra_vars) + wfjt = WorkflowJobTemplate(name=name, extra_vars=extra_vars) wfjt.survey_spec = spec @@ -166,15 +181,35 @@ def mk_workflow_job_template(name, extra_vars='', spec=None, persisted=True): wfjt.save() return wfjt -def mk_workflow_node(workflow_job_template=None, unified_job_template=None, - success_nodes=None, failure_nodes=None, always_nodes=None, - job=None, persisted=True): - workflow_node = WorkflowNode(workflow_job_template=workflow_job_template, - unified_job_template=unified_job_template, - success_nodes=success_nodes, - failure_nodes=failure_nodes, - always_nodes=always_nodes, - job=job) +def mk_workflow_job_template_node(workflow_job_template=None, + unified_job_template=None, + success_nodes=None, + failure_nodes=None, + always_nodes=None, + persisted=True): + workflow_node = WorkflowJobTemplateNode(workflow_job_template=workflow_job_template, + unified_job_template=unified_job_template, + success_nodes=success_nodes, + failure_nodes=failure_nodes, + always_nodes=always_nodes) if persisted: workflow_node.save() return workflow_node + +def mk_workflow_job_node(unified_job_template=None, + success_nodes=None, + failure_nodes=None, + always_nodes=None, + workflow_job=None, + job=None, + persisted=True): + workflow_node = WorkflowJobNode(unified_job_template=unified_job_template, + success_nodes=success_nodes, + failure_nodes=failure_nodes, + always_nodes=always_nodes, + workflow_job=workflow_job, + job=job) + if persisted: + workflow_node.save() + return workflow_node + diff --git a/awx/main/tests/factories/tower.py b/awx/main/tests/factories/tower.py index d7c45e73e2..6bbb2b0e36 100644 --- a/awx/main/tests/factories/tower.py +++ b/awx/main/tests/factories/tower.py @@ -29,6 +29,7 @@ from .fixtures import ( mk_label, mk_notification_template, mk_workflow_job_template, + #mk_workflow_job_template_node, ) @@ -344,8 +345,16 @@ def create_notification_template(name, roles=None, persisted=True, **kwargs): users=_Mapped(users), superusers=_Mapped(superusers), teams=teams) +''' +def generate_workflow_job_template_nodes(workflow_job_template, + unified_job_template, + persisted=True, + **kwargs): +''' -def create_workflow_job_template(name, persisted=True, **kwargs): +# TODO: Implement survey +''' +def create_workflow_job(name, persisted=True, **kwargs): Objects = generate_objects(["workflow_job_template", "survey",], kwargs) @@ -353,13 +362,27 @@ def create_workflow_job_template(name, persisted=True, **kwargs): jobs = None extra_vars = kwargs.get('extra_vars', '') +''' + + +# TODO: Implement survey +def create_workflow_job_template(name, persisted=True, **kwargs): + Objects = generate_objects(["workflow_job_template", + "survey",], kwargs) + + spec = None + #jobs = None + + extra_vars = kwargs.get('extra_vars', '') if 'survey' in kwargs: spec = create_survey_spec(kwargs['survey']) wfjt = mk_workflow_job_template(name, spec=spec, extra_vars=extra_vars, persisted=persisted) + #workflow_nodes = generate_workflow_job_template_nodes(wfjt, persisted, workflow_nodes=kwargs.get('workflow_nodes')) + ''' if 'jobs' in kwargs: for i in kwargs['jobs']: if type(i) is Job: @@ -367,8 +390,8 @@ def create_workflow_job_template(name, persisted=True, **kwargs): else: # TODO: Create the job raise RuntimeError("Currently, only already created jobs are supported") - + ''' return Objects(workflow_job_template=wfjt, - jobs=jobs, + #jobs=jobs, survey=spec,) From 9c12b234b12dc19af8e405a636a0f8684afd7626 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Mon, 12 Sep 2016 12:26:07 -0400 Subject: [PATCH 11/12] remove workflow_job_template from workflow job node serializer --- awx/api/serializers.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 3f49cca391..9cfcfe0d6d 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2229,16 +2229,21 @@ class WorkflowJobTemplateSerializer(WorkflowJobTemplateListSerializer): pass class WorkflowNodeBaseSerializer(BaseSerializer): - #workflow_job_template = UnifiedJobTemplateSerializer() class Meta: # TODO: workflow_job and job read-only - fields = ('id', 'url', 'related', 'success_nodes', 'failure_nodes', 'always_nodes',) + fields = ('id', 'url', 'related', 'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes',) + + def get_related(self, obj): + res = super(WorkflowNodeBaseSerializer, self).get_related(obj) + if obj.unified_job_template: + res['unified_job_template'] = obj.unified_job_template.get_absolute_url() + return res class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer): class Meta: model = WorkflowJobTemplateNode - fields = ('*', 'workflow_job_template', 'unified_job_template',) + fields = ('*', 'workflow_job_template',) def get_related(self, obj): res = super(WorkflowJobTemplateNodeSerializer, self).get_related(obj) @@ -2247,22 +2252,18 @@ class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer): res['always_nodes'] = reverse('api:workflow_job_template_node_always_nodes_list', args=(obj.pk,)) if obj.workflow_job_template: res['workflow_job_template'] = reverse('api:workflow_job_template_detail', args=(obj.workflow_job_template.pk,)) - if obj.unified_job_template: - res['unified_job_template'] = obj.unified_job_template.get_absolute_url() return res class WorkflowJobNodeSerializer(WorkflowNodeBaseSerializer): class Meta: model = WorkflowJobTemplateNode - fields = ('*', 'workflow_job_template', 'unified_job_template', 'job', 'workflow_job',) + fields = ('*', 'job', 'workflow_job',) def get_related(self, obj): res = super(WorkflowJobNodeSerializer, self).get_related(obj) res['success_nodes'] = reverse('api:workflow_job_node_success_nodes_list', args=(obj.pk,)) res['failure_nodes'] = reverse('api:workflow_job_node_failure_nodes_list', args=(obj.pk,)) res['always_nodes'] = reverse('api:workflow_job_node_always_nodes_list', args=(obj.pk,)) - if obj.workflow_job_template: - res['workflow_job_template'] = reverse('api:workflow_job_template_detail', args=(obj.workflow_job_template.pk,)) if obj.job: res['job'] = reverse('api:job_detail', args=(obj.job.pk,)) if obj.workflow_job: From 32461574ae2adbd5bf1e40f9eef94bcb32af9625 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Tue, 13 Sep 2016 13:31:10 -0400 Subject: [PATCH 12/12] add unit tests --- awx/api/serializers.py | 51 ++-- awx/api/urls.py | 2 +- awx/api/views.py | 25 +- awx/main/models/workflow.py | 9 +- awx/main/tests/factories/tower.py | 49 ++-- awx/main/tests/manual/workflows/parallel.py | 10 +- .../tests/unit/api/serializers/__init__.py | 0 .../tests/unit/api/serializers/conftest.py | 46 ++++ .../serializers/test_inventory_serializers.py | 47 ++++ .../api/serializers/test_job_serializers.py | 91 +++++++ .../test_job_template_serializers.py | 108 ++++++++ .../serializers/test_workflow_serializers.py | 154 ++++++++++++ awx/main/tests/unit/api/test_serializers.py | 235 ------------------ .../unit/commands/test_run_task_system.py | 4 +- .../tests/unit/models/test_workflow_unit.py | 81 ++++++ 15 files changed, 617 insertions(+), 295 deletions(-) create mode 100644 awx/main/tests/unit/api/serializers/__init__.py create mode 100644 awx/main/tests/unit/api/serializers/conftest.py create mode 100644 awx/main/tests/unit/api/serializers/test_inventory_serializers.py create mode 100644 awx/main/tests/unit/api/serializers/test_job_serializers.py create mode 100644 awx/main/tests/unit/api/serializers/test_job_template_serializers.py create mode 100644 awx/main/tests/unit/api/serializers/test_workflow_serializers.py delete mode 100644 awx/main/tests/unit/api/test_serializers.py create mode 100644 awx/main/tests/unit/models/test_workflow_unit.py diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 9cfcfe0d6d..1a6684ce47 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2174,7 +2174,29 @@ class SystemJobCancelSerializer(SystemJobSerializer): class Meta: fields = ('can_cancel',) +class WorkflowJobTemplateSerializer(UnifiedJobTemplateSerializer): + class Meta: + model = WorkflowJobTemplate + fields = ('*',) + def get_related(self, obj): + res = super(WorkflowJobTemplateSerializer, self).get_related(obj) + res.update(dict( + jobs = reverse('api:workflow_job_template_jobs_list', args=(obj.pk,)), + #schedules = reverse('api:workflow_job_template_schedules_list', args=(obj.pk,)), + launch = reverse('api:workflow_job_template_launch', args=(obj.pk,)), + workflow_nodes = reverse('api:workflow_job_template_workflow_nodes_list', args=(obj.pk,)), + # TODO: Implement notifications + #notification_templates_any = reverse('api:system_job_template_notification_templates_any_list', args=(obj.pk,)), + #notification_templates_success = reverse('api:system_job_template_notification_templates_success_list', args=(obj.pk,)), + #notification_templates_error = reverse('api:system_job_template_notification_templates_error_list', args=(obj.pk,)), + + )) + return res + +# TODO: +class WorkflowJobTemplateListSerializer(WorkflowJobTemplateSerializer): + pass # TODO: class WorkflowJobSerializer(UnifiedJobSerializer): @@ -2198,36 +2220,10 @@ class WorkflowJobSerializer(UnifiedJobSerializer): ''' return res - # TODO: class WorkflowJobListSerializer(WorkflowJobSerializer, UnifiedJobListSerializer): pass -# TODO: -class WorkflowJobTemplateListSerializer(UnifiedJobTemplateSerializer): - - class Meta: - model = WorkflowJobTemplate - fields = ('*',) - - def get_related(self, obj): - res = super(WorkflowJobTemplateListSerializer, self).get_related(obj) - res.update(dict( - jobs = reverse('api:workflow_job_template_jobs_list', args=(obj.pk,)), - #schedules = reverse('api:workflow_job_template_schedules_list', args=(obj.pk,)), - launch = reverse('api:workflow_job_template_launch', args=(obj.pk,)), - workflow_nodes = reverse('api:workflow_job_template_workflow_nodes_list', args=(obj.pk,)), - # TODO: Implement notifications - #notification_templates_any = reverse('api:system_job_template_notification_templates_any_list', args=(obj.pk,)), - #notification_templates_success = reverse('api:system_job_template_notification_templates_success_list', args=(obj.pk,)), - #notification_templates_error = reverse('api:system_job_template_notification_templates_error_list', args=(obj.pk,)), - - )) - return res - -class WorkflowJobTemplateSerializer(WorkflowJobTemplateListSerializer): - pass - class WorkflowNodeBaseSerializer(BaseSerializer): class Meta: @@ -2273,6 +2269,9 @@ class WorkflowJobNodeSerializer(WorkflowNodeBaseSerializer): class WorkflowJobNodeListSerializer(WorkflowJobNodeSerializer): pass +class WorkflowJobNodeDetailSerializer(WorkflowJobNodeSerializer): + pass + class WorkflowJobTemplateNodeDetailSerializer(WorkflowJobTemplateNodeSerializer): ''' diff --git a/awx/api/urls.py b/awx/api/urls.py index af81d227d7..b508b6c35c 100644 --- a/awx/api/urls.py +++ b/awx/api/urls.py @@ -359,7 +359,7 @@ v1_urls = patterns('awx.api.views', url(r'^workflow_jobs/' ,include(workflow_job_urls)), url(r'^labels/', include(label_urls)), url(r'^workflow_job_template_nodes/', include(workflow_job_template_node_urls)), - #url(r'^workflow_job_nodes/', include(workflow_job_node_urls)), + url(r'^workflow_job_nodes/', include(workflow_job_node_urls)), url(r'^unified_job_templates/$','unified_job_template_list'), url(r'^unified_jobs/$', 'unified_job_list'), url(r'^activity_stream/', include(activity_stream_urls)), diff --git a/awx/api/views.py b/awx/api/views.py index d42aae1127..07ecf938aa 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -2614,6 +2614,20 @@ class JobTemplateObjectRolesList(SubListAPIView): content_type = ContentType.objects.get_for_model(self.parent_model) return Role.objects.filter(content_type=content_type, object_id=po.pk) +# TODO: +class WorkflowJobNodeList(ListCreateAPIView): + + model = WorkflowJobNode + serializer_class = WorkflowJobNodeListSerializer + new_in_310 = True + +# TODO: +class WorkflowJobNodeDetail(RetrieveUpdateDestroyAPIView): + + model = WorkflowJobNode + serializer_class = WorkflowJobNodeDetailSerializer + new_in_310 = True + # TODO: class WorkflowJobTemplateNodeList(ListCreateAPIView): @@ -2628,6 +2642,7 @@ class WorkflowJobTemplateNodeDetail(RetrieveUpdateDestroyAPIView): serializer_class = WorkflowJobTemplateNodeDetailSerializer new_in_310 = True + class WorkflowJobTemplateNodeChildrenBaseList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView): model = WorkflowJobTemplateNode @@ -2656,19 +2671,20 @@ class WorkflowJobTemplateNodeFailureNodesList(WorkflowJobTemplateNodeChildrenBas class WorkflowJobTemplateNodeAlwaysNodesList(WorkflowJobTemplateNodeChildrenBaseList): relationship = 'always_nodes' -''' -class WorkflowJobNodeChildrenBaseList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView): +class WorkflowJobNodeChildrenBaseList(SubListAPIView): model = WorkflowJobNode serializer_class = WorkflowJobNodeListSerializer always_allow_superuser = True # TODO: RBAC - parent_model = WorkflowJobTemplateNode + parent_model = Job relationship = '' + ''' enforce_parent_relationship = 'workflow_job_template' new_in_310 = True + ''' # - #Limit the set of WorkflowJobTemplateNodes to the related nodes of specified by + #Limit the set of WorkflowJobeNodes to the related nodes of specified by #'relationship' # def get_queryset(self): @@ -2684,7 +2700,6 @@ class WorkflowJobNodeFailureNodesList(WorkflowJobNodeChildrenBaseList): class WorkflowJobNodeAlwaysNodesList(WorkflowJobNodeChildrenBaseList): relationship = 'always_nodes' -''' # TODO: diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index 0e00e26e8c..0182b40b59 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -148,11 +148,11 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions): class WorkflowJobInheritNodesMixin(object): def _inherit_relationship(self, old_node, new_node, node_ids_map, node_type): - old_related_nodes = getattr(old_node, node_type).all() + old_related_nodes = self._get_all_by_type(old_node, node_type) new_node_type_mgr = getattr(new_node, node_type) for old_related_node in old_related_nodes: - new_related_node = self._get_workflowJob_node_by_id(node_ids_map[old_related_node.id]) + new_related_node = self._get_workflow_job_node_by_id(node_ids_map[old_related_node.id]) new_node_type_mgr.add(new_related_node) ''' @@ -172,9 +172,12 @@ class WorkflowJobInheritNodesMixin(object): def _get_workflow_job_template_nodes(self): return self.workflow_job_template.workflow_job_template_nodes.all() - def _get_workflowJob_node_by_id(self, id): + def _get_workflow_job_node_by_id(self, id): return WorkflowJobNode.objects.get(id=id) + def _get_all_by_type(node, node_type): + return getattr(node, node_type).all() + def inherit_job_template_workflow_nodes(self): old_nodes = self._get_workflow_job_template_nodes() new_nodes = self._create_workflow_job_nodes(old_nodes) diff --git a/awx/main/tests/factories/tower.py b/awx/main/tests/factories/tower.py index 6bbb2b0e36..5c99c14828 100644 --- a/awx/main/tests/factories/tower.py +++ b/awx/main/tests/factories/tower.py @@ -9,6 +9,7 @@ from awx.main.models import ( Inventory, Job, Label, + WorkflowJobTemplateNode, ) from .objects import ( @@ -29,7 +30,6 @@ from .fixtures import ( mk_label, mk_notification_template, mk_workflow_job_template, - #mk_workflow_job_template_node, ) @@ -345,29 +345,33 @@ def create_notification_template(name, roles=None, persisted=True, **kwargs): users=_Mapped(users), superusers=_Mapped(superusers), teams=teams) -''' -def generate_workflow_job_template_nodes(workflow_job_template, - unified_job_template, - persisted=True, + +def generate_workflow_job_template_nodes(workflow_job_template, + persisted, **kwargs): -''' -# TODO: Implement survey -''' -def create_workflow_job(name, persisted=True, **kwargs): - Objects = generate_objects(["workflow_job_template", - "survey",], kwargs) + workflow_job_template_nodes = kwargs.get('workflow_job_template_nodes', []) + if len(workflow_job_template_nodes) > 0 and not persisted: + raise RuntimeError('workflow job template nodes can not be used when persisted=False') - spec = None - jobs = None + new_nodes = [] - extra_vars = kwargs.get('extra_vars', '') -''' + for i, node in enumerate(workflow_job_template_nodes): + new_node = WorkflowJobTemplateNode(workflow_job_template=workflow_job_template, + unified_job_template=node['unified_job_template'], + id=i) + new_nodes.append(new_node) + node_types = ['success_nodes', 'failure_nodes', 'always_nodes'] + for node_type in node_types: + for i, new_node in enumerate(new_nodes): + for related_index in workflow_job_template_nodes[i][node_type]: + getattr(new_node, node_type).add(new_nodes[related_index]) -# TODO: Implement survey +# TODO: Implement survey and jobs def create_workflow_job_template(name, persisted=True, **kwargs): Objects = generate_objects(["workflow_job_template", + "workflow_job_template_nodes", "survey",], kwargs) spec = None @@ -378,9 +382,16 @@ def create_workflow_job_template(name, persisted=True, **kwargs): if 'survey' in kwargs: spec = create_survey_spec(kwargs['survey']) - wfjt = mk_workflow_job_template(name, spec=spec, extra_vars=extra_vars, + wfjt = mk_workflow_job_template(name, + spec=spec, + extra_vars=extra_vars, persisted=persisted) - #workflow_nodes = generate_workflow_job_template_nodes(wfjt, persisted, workflow_nodes=kwargs.get('workflow_nodes')) + + + + workflow_jt_nodes = generate_workflow_job_template_nodes(wfjt, + persisted, + workflow_job_template_nodes=kwargs.get('workflow_job_template_nodes', [])) ''' if 'jobs' in kwargs: @@ -393,5 +404,7 @@ def create_workflow_job_template(name, persisted=True, **kwargs): ''' return Objects(workflow_job_template=wfjt, #jobs=jobs, + workflow_job_template_nodes=workflow_jt_nodes, survey=spec,) + diff --git a/awx/main/tests/manual/workflows/parallel.py b/awx/main/tests/manual/workflows/parallel.py index 47c35a9839..303554d793 100755 --- a/awx/main/tests/manual/workflows/parallel.py +++ b/awx/main/tests/manual/workflows/parallel.py @@ -1,6 +1,6 @@ # AWX from awx.main.models import ( - WorkflowNode, + WorkflowJobTemplateNode, WorkflowJobTemplate, ) from awx.main.models.jobs import JobTemplate @@ -10,17 +10,17 @@ def do_init_workflow(job_template_success, job_template_fail, job_template_never wfjt.delete() wfjt, created = WorkflowJobTemplate.objects.get_or_create(name="parallel workflow") print(wfjt.id) - WorkflowNode.objects.all().delete() + WorkflowJobTemplateNode.objects.all().delete() if created: - node_success = WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_success) + node_success = WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_success) nodes_never = [] for x in range(0, 3): - nodes_never.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never)) + nodes_never.append(WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never)) nodes_parallel = [] for jt in jts_parallel: - nodes_parallel.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=jt)) + nodes_parallel.append(WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=jt)) node_success.success_nodes.add(nodes_parallel[0]) node_success.success_nodes.add(nodes_parallel[1]) diff --git a/awx/main/tests/unit/api/serializers/__init__.py b/awx/main/tests/unit/api/serializers/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/main/tests/unit/api/serializers/conftest.py b/awx/main/tests/unit/api/serializers/conftest.py new file mode 100644 index 0000000000..3b056a149f --- /dev/null +++ b/awx/main/tests/unit/api/serializers/conftest.py @@ -0,0 +1,46 @@ + +import pytest + +@pytest.fixture +def get_related_assert(): + def fn(model_obj, related, resource_name, related_resource_name): + assert related_resource_name in related + assert related[related_resource_name] == '/api/v1/%s/%d/%s/' % (resource_name, model_obj.pk, related_resource_name) + return fn + +@pytest.fixture +def get_related_mock_and_run(): + def fn(serializer_class, model_obj): + serializer = serializer_class() + related = serializer.get_related(model_obj) + return related + return fn + +@pytest.fixture +def test_get_related(get_related_assert, get_related_mock_and_run): + def fn(serializer_class, model_obj, resource_name, related_resource_name): + related = get_related_mock_and_run(serializer_class, model_obj) + get_related_assert(model_obj, related, resource_name, related_resource_name) + return related + return fn + +@pytest.fixture +def get_summary_fields_assert(): + def fn(summary, summary_field_name): + assert summary_field_name in summary + return fn + +@pytest.fixture +def get_summary_fields_mock_and_run(): + def fn(serializer_class, model_obj): + serializer = serializer_class() + return serializer.get_summary_fields(model_obj) + return fn + +@pytest.fixture +def test_get_summary_fields(get_summary_fields_mock_and_run, get_summary_fields_assert): + def fn(serializer_class, model_obj, summary_field_name): + summary = get_summary_fields_mock_and_run(serializer_class, model_obj) + get_summary_fields_assert(summary, summary_field_name) + return summary + return fn diff --git a/awx/main/tests/unit/api/serializers/test_inventory_serializers.py b/awx/main/tests/unit/api/serializers/test_inventory_serializers.py new file mode 100644 index 0000000000..0208105179 --- /dev/null +++ b/awx/main/tests/unit/api/serializers/test_inventory_serializers.py @@ -0,0 +1,47 @@ +# Python +import pytest +import mock +from mock import PropertyMock + +# AWX +from awx.api.serializers import ( + CustomInventoryScriptSerializer, +) +from awx.main.models import ( + CustomInventoryScript, + User, +) + +#DRF +from rest_framework.request import Request +from rest_framework.test import ( + APIRequestFactory, + force_authenticate, +) + +class TestCustomInventoryScriptSerializer(object): + + @pytest.mark.parametrize("superuser,sysaudit,admin_role,value", + ((True, False, False, '#!/python'), + (False, True, False, '#!/python'), + (False, False, True, '#!/python'), + (False, False, False, None))) + def test_to_representation_orphan(self, superuser, sysaudit, admin_role, value): + with mock.patch.object(CustomInventoryScriptSerializer, 'get_summary_fields', return_value={}): + User.add_to_class('is_system_auditor', sysaudit) + user = User(username="root", is_superuser=superuser) + roles = [user] if admin_role else [] + + with mock.patch('awx.main.models.CustomInventoryScript.admin_role', new_callable=PropertyMock, return_value=roles): + cis = CustomInventoryScript(pk=1, script='#!/python') + serializer = CustomInventoryScriptSerializer() + + factory = APIRequestFactory() + wsgi_request = factory.post("/inventory_script/1", {'id':1}, format="json") + force_authenticate(wsgi_request, user) + + request = Request(wsgi_request) + serializer.context['request'] = request + + representation = serializer.to_representation(cis) + assert representation['script'] == value diff --git a/awx/main/tests/unit/api/serializers/test_job_serializers.py b/awx/main/tests/unit/api/serializers/test_job_serializers.py new file mode 100644 index 0000000000..a9eaecd2e9 --- /dev/null +++ b/awx/main/tests/unit/api/serializers/test_job_serializers.py @@ -0,0 +1,91 @@ +# Python +import pytest +import mock +import json + +# AWX +from awx.api.serializers import ( + JobSerializer, + JobOptionsSerializer, +) +from awx.main.models import ( + Label, + Job, +) + +def mock_JT_resource_data(): + return ({}, []) + +@pytest.fixture +def job_template(mocker): + mock_jt = mocker.MagicMock(pk=5) + mock_jt.resource_validation_data = mock_JT_resource_data + return mock_jt + +@pytest.fixture +def job(mocker, job_template): + return mocker.MagicMock(pk=5, job_template=job_template) + +@pytest.fixture +def labels(mocker): + return [Label(id=x, name='label-%d' % x) for x in xrange(0, 25)] + +@pytest.fixture +def jobs(mocker): + return [Job(id=x, name='job-%d' % x) for x in xrange(0, 25)] + +@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {}) +@mock.patch('awx.api.serializers.JobOptionsSerializer.get_related', lambda x,y: {}) +class TestJobSerializerGetRelated(): + @pytest.mark.parametrize("related_resource_name", [ + 'job_events', + 'job_plays', + 'job_tasks', + 'relaunch', + 'labels', + ]) + def test_get_related(self, test_get_related, job, related_resource_name): + test_get_related(JobSerializer, job, 'jobs', related_resource_name) + + def test_job_template_absent(self, job): + job.job_template = None + serializer = JobSerializer() + related = serializer.get_related(job) + assert 'job_template' not in related + + def test_job_template_present(self, get_related_mock_and_run, job): + related = get_related_mock_and_run(JobSerializer, job) + assert 'job_template' in related + assert related['job_template'] == '/api/v1/%s/%d/' % ('job_templates', job.job_template.pk) + +@mock.patch('awx.api.serializers.BaseSerializer.to_representation', lambda self,obj: { + 'extra_vars': obj.extra_vars}) +class TestJobSerializerSubstitution(): + + def test_survey_password_hide(self, mocker): + job = mocker.MagicMock(**{ + 'display_extra_vars.return_value': '{\"secret_key\": \"$encrypted$\"}', + 'extra_vars.return_value': '{\"secret_key\": \"my_password\"}'}) + serializer = JobSerializer(job) + rep = serializer.to_representation(job) + extra_vars = json.loads(rep['extra_vars']) + assert extra_vars['secret_key'] == '$encrypted$' + job.display_extra_vars.assert_called_once_with() + assert 'my_password' not in extra_vars + +@mock.patch('awx.api.serializers.BaseSerializer.get_summary_fields', lambda x,y: {}) +class TestJobOptionsSerializerGetSummaryFields(): + def test__summary_field_labels_10_max(self, mocker, job_template, labels): + job_template.labels.all = mocker.MagicMock(**{'order_by.return_value': labels}) + job_template.labels.all.return_value = job_template.labels.all + + serializer = JobOptionsSerializer() + summary_labels = serializer._summary_field_labels(job_template) + + job_template.labels.all.order_by.assert_called_with('name') + assert len(summary_labels['results']) == 10 + assert summary_labels['results'] == [{'id': x.id, 'name': x.name} for x in labels[:10]] + + def test_labels_exists(self, test_get_summary_fields, job_template): + test_get_summary_fields(JobOptionsSerializer, job_template, 'labels') + diff --git a/awx/main/tests/unit/api/serializers/test_job_template_serializers.py b/awx/main/tests/unit/api/serializers/test_job_template_serializers.py new file mode 100644 index 0000000000..dc0c672a70 --- /dev/null +++ b/awx/main/tests/unit/api/serializers/test_job_template_serializers.py @@ -0,0 +1,108 @@ +# Python +import pytest +import mock + +# AWX +from awx.api.serializers import ( + JobTemplateSerializer, +) +from awx.main.models import ( + Job, +) + +#DRF +from rest_framework import serializers + +def mock_JT_resource_data(): + return ({}, []) + +@pytest.fixture +def job_template(mocker): + mock_jt = mocker.MagicMock(pk=5) + mock_jt.resource_validation_data = mock_JT_resource_data + return mock_jt + +@pytest.fixture +def job(mocker, job_template): + return mocker.MagicMock(pk=5, job_template=job_template) + +@pytest.fixture +def jobs(mocker): + return [Job(id=x, name='job-%d' % x) for x in xrange(0, 25)] + +@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {}) +@mock.patch('awx.api.serializers.JobOptionsSerializer.get_related', lambda x,y: {}) +class TestJobTemplateSerializerGetRelated(): + @pytest.mark.parametrize("related_resource_name", [ + 'jobs', + 'schedules', + 'activity_stream', + 'launch', + 'notification_templates_any', + 'notification_templates_success', + 'notification_templates_error', + 'survey_spec', + 'labels', + 'callback', + ]) + def test_get_related(self, test_get_related, job_template, related_resource_name): + test_get_related(JobTemplateSerializer, job_template, 'job_templates', related_resource_name) + + def test_callback_absent(self, get_related_mock_and_run, job_template): + job_template.host_config_key = None + related = get_related_mock_and_run(JobTemplateSerializer, job_template) + assert 'callback' not in related + +class TestJobTemplateSerializerGetSummaryFields(): + def test__recent_jobs(self, mocker, job_template, jobs): + + job_template.jobs.all = mocker.MagicMock(**{'order_by.return_value': jobs}) + job_template.jobs.all.return_value = job_template.jobs.all + + serializer = JobTemplateSerializer() + recent_jobs = serializer._recent_jobs(job_template) + + job_template.jobs.all.assert_called_once_with() + job_template.jobs.all.order_by.assert_called_once_with('-created') + assert len(recent_jobs) == 10 + for x in jobs[:10]: + assert recent_jobs == [{'id': x.id, 'status': x.status, 'finished': x.finished} for x in jobs[:10]] + + def test_survey_spec_exists(self, test_get_summary_fields, mocker, job_template): + job_template.survey_spec = {'name': 'blah', 'description': 'blah blah'} + test_get_summary_fields(JobTemplateSerializer, job_template, 'survey') + + def test_survey_spec_absent(self, get_summary_fields_mock_and_run, job_template): + job_template.survey_spec = None + summary = get_summary_fields_mock_and_run(JobTemplateSerializer, job_template) + assert 'survey' not in summary + + @pytest.mark.skip(reason="RBAC needs to land") + def test_can_copy_true(self, mocker, job_template): + pass + + @pytest.mark.skip(reason="RBAC needs to land") + def test_can_copy_false(self, mocker, job_template): + pass + + @pytest.mark.skip(reason="RBAC needs to land") + def test_can_edit_true(self, mocker, job_template): + pass + + @pytest.mark.skip(reason="RBAC needs to land") + def test_can_edit_false(self, mocker, job_template): + pass + +class TestJobTemplateSerializerValidation(object): + + good_extra_vars = ["{\"test\": \"keys\"}", "---\ntest: key"] + bad_extra_vars = ["{\"test\": \"keys\"", "---\ntest: [2"] + + def test_validate_extra_vars(self): + serializer = JobTemplateSerializer() + for ev in self.good_extra_vars: + serializer.validate_extra_vars(ev) + for ev in self.bad_extra_vars: + with pytest.raises(serializers.ValidationError): + serializer.validate_extra_vars(ev) + diff --git a/awx/main/tests/unit/api/serializers/test_workflow_serializers.py b/awx/main/tests/unit/api/serializers/test_workflow_serializers.py new file mode 100644 index 0000000000..371b02c7b8 --- /dev/null +++ b/awx/main/tests/unit/api/serializers/test_workflow_serializers.py @@ -0,0 +1,154 @@ +# Python +import pytest +import mock + +# AWX +from awx.api.serializers import ( + WorkflowJobTemplateSerializer, + WorkflowNodeBaseSerializer, + WorkflowJobTemplateNodeSerializer, + WorkflowJobNodeSerializer, +) +from awx.main.models import ( + Job, + WorkflowJobTemplateNode, + WorkflowJob, + WorkflowJobNode, +) + +@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {}) +class TestWorkflowJobTemplateSerializerGetRelated(): + @pytest.fixture + def workflow_job_template(self, workflow_job_template_factory): + wfjt = workflow_job_template_factory('hello world', persisted=False).workflow_job_template + wfjt.pk = 3 + return wfjt + + @pytest.mark.parametrize("related_resource_name", [ + 'jobs', + 'launch', + 'workflow_nodes', + ]) + def test_get_related(self, mocker, test_get_related, workflow_job_template, related_resource_name): + test_get_related(WorkflowJobTemplateSerializer, + workflow_job_template, + 'workflow_job_templates', + related_resource_name) + +@mock.patch('awx.api.serializers.BaseSerializer.get_related', lambda x,y: {}) +class TestWorkflowNodeBaseSerializerGetRelated(): + @pytest.fixture + def job_template(self, job_template_factory): + jt = job_template_factory(name="blah", persisted=False).job_template + jt.pk = 1 + return jt + + @pytest.fixture + def workflow_job_template_node_related(self, job_template): + return WorkflowJobTemplateNode(pk=1, unified_job_template=job_template) + + @pytest.fixture + def workflow_job_template_node(self): + return WorkflowJobTemplateNode(pk=1) + + def test_workflow_unified_job_template_present(self, get_related_mock_and_run, workflow_job_template_node_related): + related = get_related_mock_and_run(WorkflowNodeBaseSerializer, workflow_job_template_node_related) + assert 'unified_job_template' in related + assert related['unified_job_template'] == '/api/v1/%s/%d/' % ('job_templates', workflow_job_template_node_related.unified_job_template.pk) + + def test_workflow_unified_job_template_absent(self, workflow_job_template_node): + related = WorkflowJobTemplateNodeSerializer().get_related(workflow_job_template_node) + assert 'unified_job_template' not in related + +@mock.patch('awx.api.serializers.WorkflowNodeBaseSerializer.get_related', lambda x,y: {}) +class TestWorkflowJobTemplateNodeSerializerGetRelated(): + @pytest.fixture + def workflow_job_template_node(self): + return WorkflowJobTemplateNode(pk=1) + + @pytest.fixture + def workflow_job_template(self, workflow_job_template_factory): + wfjt = workflow_job_template_factory("bliggity", persisted=False).workflow_job_template + wfjt.pk = 1 + return wfjt + + @pytest.fixture + def job_template(self, job_template_factory): + jt = job_template_factory(name="blah", persisted=False).job_template + jt.pk = 1 + return jt + + @pytest.fixture + def workflow_job_template_node_related(self, workflow_job_template_node, workflow_job_template): + workflow_job_template_node.workflow_job_template = workflow_job_template + return workflow_job_template_node + + @pytest.mark.parametrize("related_resource_name", [ + 'success_nodes', + 'failure_nodes', + 'always_nodes', + ]) + def test_get_related(self, test_get_related, workflow_job_template_node, related_resource_name): + test_get_related(WorkflowJobTemplateNodeSerializer, + workflow_job_template_node, + 'workflow_job_template_nodes', + related_resource_name) + + def test_workflow_job_template_present(self, get_related_mock_and_run, workflow_job_template_node_related): + related = get_related_mock_and_run(WorkflowJobTemplateNodeSerializer, workflow_job_template_node_related) + assert 'workflow_job_template' in related + assert related['workflow_job_template'] == '/api/v1/%s/%d/' % ('workflow_job_templates', workflow_job_template_node_related.workflow_job_template.pk) + + def test_workflow_job_template_absent(self, workflow_job_template_node): + related = WorkflowJobTemplateNodeSerializer().get_related(workflow_job_template_node) + assert 'workflow_job_template' not in related + + +@mock.patch('awx.api.serializers.WorkflowNodeBaseSerializer.get_related', lambda x,y: {}) +class TestWorkflowJobNodeSerializerGetRelated(): + @pytest.fixture + def workflow_job_node(self): + return WorkflowJobNode(pk=1) + + @pytest.fixture + def workflow_job(self): + return WorkflowJob(pk=1) + + @pytest.fixture + def job(self): + return Job(name="blah", pk=1) + + @pytest.fixture + def workflow_job_node_related(self, workflow_job_node, workflow_job, job): + workflow_job_node.workflow_job = workflow_job + workflow_job_node.job = job + return workflow_job_node + + @pytest.mark.parametrize("related_resource_name", [ + 'success_nodes', + 'failure_nodes', + 'always_nodes', + ]) + def test_get_related(self, test_get_related, workflow_job_node, related_resource_name): + test_get_related(WorkflowJobNodeSerializer, + workflow_job_node, + 'workflow_job_nodes', + related_resource_name) + + def test_workflow_job_present(self, get_related_mock_and_run, workflow_job_node_related): + related = get_related_mock_and_run(WorkflowJobNodeSerializer, workflow_job_node_related) + assert 'workflow_job' in related + assert related['workflow_job'] == '/api/v1/%s/%d/' % ('workflow_jobs', workflow_job_node_related.workflow_job.pk) + + def test_workflow_job_absent(self, workflow_job_node): + related = WorkflowJobNodeSerializer().get_related(workflow_job_node) + assert 'workflow_job' not in related + + def test_job_present(self, get_related_mock_and_run, workflow_job_node_related): + related = get_related_mock_and_run(WorkflowJobNodeSerializer, workflow_job_node_related) + assert 'job' in related + assert related['job'] == '/api/v1/%s/%d/' % ('jobs', workflow_job_node_related.job.pk) + + def test_job_absent(self, workflow_job_node): + related = WorkflowJobNodeSerializer().get_related(workflow_job_node) + assert 'job' not in related diff --git a/awx/main/tests/unit/api/test_serializers.py b/awx/main/tests/unit/api/test_serializers.py deleted file mode 100644 index 2496ba9a2d..0000000000 --- a/awx/main/tests/unit/api/test_serializers.py +++ /dev/null @@ -1,235 +0,0 @@ -# Python -import pytest -import mock -from mock import PropertyMock -import json - -# AWX -from awx.api.serializers import ( - JobTemplateSerializer, - JobSerializer, - JobOptionsSerializer, - CustomInventoryScriptSerializer, -) -from awx.main.models import ( - Label, - Job, - CustomInventoryScript, - User, -) - -#DRF -from rest_framework.request import Request -from rest_framework import serializers -from rest_framework.test import ( - APIRequestFactory, - force_authenticate, -) - - -def mock_JT_resource_data(): - return ({}, []) - -@pytest.fixture -def job_template(mocker): - mock_jt = mocker.MagicMock(pk=5) - mock_jt.resource_validation_data = mock_JT_resource_data - return mock_jt - -@pytest.fixture -def job(mocker, job_template): - return mocker.MagicMock(pk=5, job_template=job_template) - -@pytest.fixture -def labels(mocker): - return [Label(id=x, name='label-%d' % x) for x in xrange(0, 25)] - -@pytest.fixture -def jobs(mocker): - return [Job(id=x, name='job-%d' % x) for x in xrange(0, 25)] - -class GetRelatedMixin: - def _assert(self, model_obj, related, resource_name, related_resource_name): - assert related_resource_name in related - assert related[related_resource_name] == '/api/v1/%s/%d/%s/' % (resource_name, model_obj.pk, related_resource_name) - - def _mock_and_run(self, serializer_class, model_obj): - serializer = serializer_class() - related = serializer.get_related(model_obj) - return related - - def _test_get_related(self, serializer_class, model_obj, resource_name, related_resource_name): - related = self._mock_and_run(serializer_class, model_obj) - self._assert(model_obj, related, resource_name, related_resource_name) - return related - -class GetSummaryFieldsMixin: - def _assert(self, summary, summary_field_name): - assert summary_field_name in summary - - def _mock_and_run(self, serializer_class, model_obj): - serializer = serializer_class() - return serializer.get_summary_fields(model_obj) - - def _test_get_summary_fields(self, serializer_class, model_obj, summary_field_name): - summary = self._mock_and_run(serializer_class, model_obj) - self._assert(summary, summary_field_name) - return summary - -@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {}) -@mock.patch('awx.api.serializers.JobOptionsSerializer.get_related', lambda x,y: {}) -class TestJobTemplateSerializerGetRelated(GetRelatedMixin): - @pytest.mark.parametrize("related_resource_name", [ - 'jobs', - 'schedules', - 'activity_stream', - 'launch', - 'notification_templates_any', - 'notification_templates_success', - 'notification_templates_error', - 'survey_spec', - 'labels', - 'callback', - ]) - def test_get_related(self, job_template, related_resource_name): - self._test_get_related(JobTemplateSerializer, job_template, 'job_templates', related_resource_name) - - def test_callback_absent(self, job_template): - job_template.host_config_key = None - related = self._mock_and_run(JobTemplateSerializer, job_template) - assert 'callback' not in related - -class TestJobTemplateSerializerGetSummaryFields(GetSummaryFieldsMixin): - def test__recent_jobs(self, mocker, job_template, jobs): - - job_template.jobs.all = mocker.MagicMock(**{'order_by.return_value': jobs}) - job_template.jobs.all.return_value = job_template.jobs.all - - serializer = JobTemplateSerializer() - recent_jobs = serializer._recent_jobs(job_template) - - job_template.jobs.all.assert_called_once_with() - job_template.jobs.all.order_by.assert_called_once_with('-created') - assert len(recent_jobs) == 10 - for x in jobs[:10]: - assert recent_jobs == [{'id': x.id, 'status': x.status, 'finished': x.finished} for x in jobs[:10]] - - def test_survey_spec_exists(self, mocker, job_template): - job_template.survey_spec = {'name': 'blah', 'description': 'blah blah'} - self._test_get_summary_fields(JobTemplateSerializer, job_template, 'survey') - - def test_survey_spec_absent(self, mocker, job_template): - job_template.survey_spec = None - summary = self._mock_and_run(JobTemplateSerializer, job_template) - assert 'survey' not in summary - - @pytest.mark.skip(reason="RBAC needs to land") - def test_can_copy_true(self, mocker, job_template): - pass - - @pytest.mark.skip(reason="RBAC needs to land") - def test_can_copy_false(self, mocker, job_template): - pass - - @pytest.mark.skip(reason="RBAC needs to land") - def test_can_edit_true(self, mocker, job_template): - pass - - @pytest.mark.skip(reason="RBAC needs to land") - def test_can_edit_false(self, mocker, job_template): - pass - -@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {}) -@mock.patch('awx.api.serializers.JobOptionsSerializer.get_related', lambda x,y: {}) -class TestJobSerializerGetRelated(GetRelatedMixin): - @pytest.mark.parametrize("related_resource_name", [ - 'job_events', - 'job_plays', - 'job_tasks', - 'relaunch', - 'labels', - ]) - def test_get_related(self, mocker, job, related_resource_name): - self._test_get_related(JobSerializer, job, 'jobs', related_resource_name) - - def test_job_template_absent(self, mocker, job): - job.job_template = None - serializer = JobSerializer() - related = serializer.get_related(job) - assert 'job_template' not in related - - def test_job_template_present(self, job): - related = self._mock_and_run(JobSerializer, job) - assert 'job_template' in related - assert related['job_template'] == '/api/v1/%s/%d/' % ('job_templates', job.job_template.pk) - -@mock.patch('awx.api.serializers.BaseSerializer.to_representation', lambda self,obj: { - 'extra_vars': obj.extra_vars}) -class TestJobSerializerSubstitution(): - - def test_survey_password_hide(self, mocker): - job = mocker.MagicMock(**{ - 'display_extra_vars.return_value': '{\"secret_key\": \"$encrypted$\"}', - 'extra_vars.return_value': '{\"secret_key\": \"my_password\"}'}) - serializer = JobSerializer(job) - rep = serializer.to_representation(job) - extra_vars = json.loads(rep['extra_vars']) - assert extra_vars['secret_key'] == '$encrypted$' - job.display_extra_vars.assert_called_once_with() - assert 'my_password' not in extra_vars - -@mock.patch('awx.api.serializers.BaseSerializer.get_summary_fields', lambda x,y: {}) -class TestJobOptionsSerializerGetSummaryFields(GetSummaryFieldsMixin): - def test__summary_field_labels_10_max(self, mocker, job_template, labels): - job_template.labels.all = mocker.MagicMock(**{'order_by.return_value': labels}) - job_template.labels.all.return_value = job_template.labels.all - - serializer = JobOptionsSerializer() - summary_labels = serializer._summary_field_labels(job_template) - - job_template.labels.all.order_by.assert_called_with('name') - assert len(summary_labels['results']) == 10 - assert summary_labels['results'] == [{'id': x.id, 'name': x.name} for x in labels[:10]] - - def test_labels_exists(self, mocker, job_template): - self._test_get_summary_fields(JobOptionsSerializer, job_template, 'labels') - -class TestJobTemplateSerializerValidation(object): - - good_extra_vars = ["{\"test\": \"keys\"}", "---\ntest: key"] - bad_extra_vars = ["{\"test\": \"keys\"", "---\ntest: [2"] - - def test_validate_extra_vars(self): - serializer = JobTemplateSerializer() - for ev in self.good_extra_vars: - serializer.validate_extra_vars(ev) - for ev in self.bad_extra_vars: - with pytest.raises(serializers.ValidationError): - serializer.validate_extra_vars(ev) - -class TestCustomInventoryScriptSerializer(object): - - @pytest.mark.parametrize("superuser,sysaudit,admin_role,value", - ((True, False, False, '#!/python'), - (False, True, False, '#!/python'), - (False, False, True, '#!/python'), - (False, False, False, None))) - def test_to_representation_orphan(self, superuser, sysaudit, admin_role, value): - with mock.patch.object(CustomInventoryScriptSerializer, 'get_summary_fields', return_value={}): - User.add_to_class('is_system_auditor', sysaudit) - user = User(username="root", is_superuser=superuser) - roles = [user] if admin_role else [] - - with mock.patch('awx.main.models.CustomInventoryScript.admin_role', new_callable=PropertyMock, return_value=roles): - cis = CustomInventoryScript(pk=1, script='#!/python') - serializer = CustomInventoryScriptSerializer() - - factory = APIRequestFactory() - wsgi_request = factory.post("/inventory_script/1", {'id':1}, format="json") - force_authenticate(wsgi_request, user) - - request = Request(wsgi_request) - serializer.context['request'] = request - - representation = serializer.to_representation(cis) - assert representation['script'] == value diff --git a/awx/main/tests/unit/commands/test_run_task_system.py b/awx/main/tests/unit/commands/test_run_task_system.py index 0c9468c737..bc62394b21 100644 --- a/awx/main/tests/unit/commands/test_run_task_system.py +++ b/awx/main/tests/unit/commands/test_run_task_system.py @@ -3,7 +3,7 @@ from awx.main.management.commands.run_task_system import ( WorkflowDAG, ) from awx.main.models import Job -from awx.main.models.workflow import WorkflowNode +from awx.main.models.workflow import WorkflowJobNode import pytest @pytest.fixture @@ -62,7 +62,7 @@ class TestSimpleDAG(object): @pytest.fixture def factory_node(): def fn(id, status): - wfn = WorkflowNode(id=id) + wfn = WorkflowJobNode(id=id) if status: j = Job(status=status) wfn.job = j diff --git a/awx/main/tests/unit/models/test_workflow_unit.py b/awx/main/tests/unit/models/test_workflow_unit.py new file mode 100644 index 0000000000..58ea591299 --- /dev/null +++ b/awx/main/tests/unit/models/test_workflow_unit.py @@ -0,0 +1,81 @@ +import pytest + +from awx.main.models.jobs import JobTemplate +from awx.main.models.workflow import WorkflowJobTemplateNode, WorkflowJobInheritNodesMixin, WorkflowJobNode + +class TestWorkflowJobInheritNodesMixin(): + class TestCreateWorkflowJobNodes(): + @pytest.fixture + def job_templates(self): + return [JobTemplate() for i in range(0, 10)] + + @pytest.fixture + def job_template_nodes(self, job_templates): + return [WorkflowJobTemplateNode(unified_job_template=job_templates[i]) for i in range(0, 10)] + + def test__create_workflow_job_nodes(self, mocker, job_template_nodes): + workflow_job_node_create = mocker.patch('awx.main.models.WorkflowJobNode.objects.create') + + mixin = WorkflowJobInheritNodesMixin() + mixin._create_workflow_job_nodes(job_template_nodes) + + for job_template_node in job_template_nodes: + workflow_job_node_create.assert_any_call(workflow_job=mixin, + unified_job_template=job_template_node.unified_job_template) + + class TestMapWorkflowJobNodes(): + @pytest.fixture + def job_template_nodes(self): + return [WorkflowJobTemplateNode(id=i) for i in range(0, 20)] + + @pytest.fixture + def job_nodes(self): + return [WorkflowJobNode(id=i) for i in range(100, 120)] + + def test__map_workflow_job_nodes(self, job_template_nodes, job_nodes): + mixin = WorkflowJobInheritNodesMixin() + + node_ids_map = mixin._map_workflow_job_nodes(job_template_nodes, job_nodes) + assert len(node_ids_map) == len(job_template_nodes) + + for i, job_template_node in enumerate(job_template_nodes): + assert node_ids_map[job_template_node.id] == job_nodes[i].id + + class TestInheritRelationship(): + @pytest.fixture + def job_template_nodes(self, mocker): + nodes = [mocker.MagicMock(id=i) for i in range(0, 10)] + + for i in range(0, 9): + nodes[i].success_nodes = [mocker.MagicMock(id=i + 1)] + + return nodes + + @pytest.fixture + def job_nodes(self, mocker): + nodes = [mocker.MagicMock(id=i) for i in range(100, 110)] + return nodes + + @pytest.fixture + def job_nodes_dict(self, job_nodes): + _map = {} + for n in job_nodes: + _map[n.id] = n + return _map + + + def test__inherit_relationship(self, mocker, job_template_nodes, job_nodes, job_nodes_dict): + mixin = WorkflowJobInheritNodesMixin() + + mixin._get_workflow_job_node_by_id = lambda x: job_nodes_dict[x] + mixin._get_all_by_type = lambda x,node_type: x.success_nodes + + node_ids_map = mixin._map_workflow_job_nodes(job_template_nodes, job_nodes) + + for i, job_template_node in enumerate(job_template_nodes): + mixin._inherit_relationship(job_template_node, job_nodes[i], node_ids_map, 'success_nodes') + + for i in range(0, 9): + job_nodes[i].success_nodes.add.assert_any_call(job_nodes[i + 1]) + +