From 43e4e45ea526ab59d282cf645ce4f5c8b9518f22 Mon Sep 17 00:00:00 2001 From: nitzmahone Date: Mon, 8 Aug 2016 10:45:01 -0700 Subject: [PATCH 01/47] update requirements versions - current Ansible stuff (2.1.1) requires azure SDK 2.0.0rc5 - requests 2.10.0 is the published minimum version for pywinrm, as it contains a bugfix for catastrophic SSL tunnel failure on large payloads that pywinrm hits frequently, 2.11.0 is best tested. --- requirements/requirements_ansible.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/requirements_ansible.txt b/requirements/requirements_ansible.txt index b35cb6fcbb..e62e3e5ead 100644 --- a/requirements/requirements_ansible.txt +++ b/requirements/requirements_ansible.txt @@ -1,7 +1,7 @@ anyjson==0.3.3 apache-libcloud==0.20.1 appdirs==1.4.0 -azure==2.0.0rc2 +azure==2.0.0rc5 Babel==2.2.0 boto==2.40.0 cliff==1.15.0 @@ -69,7 +69,7 @@ rackspace-auth-openstack==1.3 rackspace-novaclient==1.5 rax-default-network-flags-python-novaclient-ext==0.3.2 rax-scheduled-images-python-novaclient-ext==0.3.1 -requests==2.5.1 +requests==2.11.0 requestsexceptions==1.1.1 shade==1.4.0 simplejson==3.8.1 From 3f0311a969fc0da45778bb074cb9e683547df5a6 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Fri, 26 Aug 2016 14:58:20 -0400 Subject: [PATCH 02/47] Integrate packaging for qpid/memcached Also * Remove redis packaging * Fix typo in contributing --- requirements/requirements.txt | 4 ---- 1 file changed, 4 deletions(-) diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 34176e14ed..ea7376531e 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -21,8 +21,6 @@ django-extensions==1.5.9 git+https://github.com/chrismeyersfsu/django-jsonbfield@fix-sqlite_serialization#egg=jsonbfield django-polymorphic==0.7.2 django-radius==1.0.0 -# NOTE: Remove when we transition packaging -django-redis-cache==1.6.5 djangorestframework==3.3.2 djangorestframework-yaml==1.0.2 django-split-settings==0.1.1 @@ -115,8 +113,6 @@ rackspace-auth-openstack==1.3 rackspace-novaclient==1.5 rax-default-network-flags-python-novaclient-ext==0.3.2 rax-scheduled-images-python-novaclient-ext==0.3.1 -# NOTE: Remove this when we transition packaging -redis==2.10.3 requests-oauthlib==0.5.0 requests==2.9.1 requestsexceptions==1.1.1 From 5e626cfe2e90d03dfe4ceac65be973fde27f5784 Mon Sep 17 00:00:00 2001 From: Alan Rominger Date: Mon, 29 Aug 2016 15:54:45 -0400 Subject: [PATCH 03/47] test_credential bug fixes Credential detail view was looked up with the organization's primary key. Works fine when the database arbitrarily gives them both pk=1 in a isolated test, but not a great thing to depend on. --- awx/main/tests/functional/api/test_credential.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/awx/main/tests/functional/api/test_credential.py b/awx/main/tests/functional/api/test_credential.py index 3c79e62e33..54f0bb0e20 100644 --- a/awx/main/tests/functional/api/test_credential.py +++ b/awx/main/tests/functional/api/test_credential.py @@ -317,18 +317,18 @@ def test_cant_change_organization(patch, credential, organization, org_admin): credential.organization = organization credential.save() - response = patch(reverse('api:credential_detail', args=(organization.id,)), { + response = patch(reverse('api:credential_detail', args=(credential.id,)), { 'name': 'Some new name', }, org_admin) assert response.status_code == 200 - response = patch(reverse('api:credential_detail', args=(organization.id,)), { + response = patch(reverse('api:credential_detail', args=(credential.id,)), { 'name': 'Some new name2', 'organization': organization.id, # fine for it to be the same }, org_admin) assert response.status_code == 200 - response = patch(reverse('api:credential_detail', args=(organization.id,)), { + response = patch(reverse('api:credential_detail', args=(credential.id,)), { 'name': 'Some new name3', 'organization': None }, org_admin) @@ -337,7 +337,7 @@ def test_cant_change_organization(patch, credential, organization, org_admin): @pytest.mark.django_db def test_cant_add_organization(patch, credential, organization, org_admin): assert credential.organization is None - response = patch(reverse('api:credential_detail', args=(organization.id,)), { + response = patch(reverse('api:credential_detail', args=(credential.id,)), { 'name': 'Some new name', 'organization': organization.id }, org_admin) From cf81199bbfc5a5ce27f7e6cfa98e7c9fee09c199 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Mon, 22 Aug 2016 14:27:39 -0400 Subject: [PATCH 04/47] Remove extra project-related DB hit, follow same pattern as unified jobs --- awx/api/serializers.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 2d2e38a8f5..c5e8559486 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -285,7 +285,8 @@ class BaseSerializer(serializers.ModelSerializer): # because it results in additional queries. if fk == 'job' and isinstance(obj, UnifiedJob): continue - if fk == 'project' and isinstance(obj, InventorySource): + if fk == 'project' and (isinstance(obj, InventorySource) or + isinstance(obj, Project)): continue fkval = getattr(obj, fk, None) From c112fc3cf4d4f99763246748e7ccf85045a47d11 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Tue, 30 Aug 2016 10:46:13 -0400 Subject: [PATCH 05/47] Update qpid packaging, remove migrations * Remove old system migrations * Update qpid install deps for RH --- pytest.ini | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pytest.ini b/pytest.ini index 03c814599c..2993b1f577 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,7 +1,7 @@ [pytest] DJANGO_SETTINGS_MODULE = awx.settings.development -python_paths = venv/tower/lib/python2.7/site-packages -site_dirs = venv/tower/lib/python2.7/site-packages +python_paths = /venv/tower/lib/python2.7/site-packages +site_dirs = /venv/tower/lib/python2.7/site-packages python_files = *.py addopts = --reuse-db --nomigrations --tb=native markers = From 4c876b40e46107013ad56096024e905fc210e150 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Tue, 16 Aug 2016 17:45:18 -0400 Subject: [PATCH 06/47] initial models and endpoints added for workflows --- awx/api/serializers.py | 91 ++++++++++ awx/api/urls.py | 28 +++ awx/api/views.py | 168 ++++++++++++++++-- awx/main/access.py | 139 +++++++++++++++ .../management/commands/run_task_system.py | 3 + .../migrations/0033_v301_workflow_create.py | 70 ++++++++ awx/main/models/__init__.py | 1 + awx/main/models/activity_stream.py | 3 + awx/main/models/workflow.py | 160 +++++++++++++++++ awx/main/tasks.py | 51 +++++- awx/main/tests/conftest.py | 5 + awx/main/tests/factories/__init__.py | 2 + awx/main/tests/factories/fixtures.py | 26 +++ awx/main/tests/factories/tower.py | 34 ++++ awx/main/tests/unit/api/test_views.py | 2 + tools/git_hooks/pre-commit | 2 +- 16 files changed, 766 insertions(+), 19 deletions(-) create mode 100644 awx/main/migrations/0033_v301_workflow_create.py create mode 100644 awx/main/models/workflow.py diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 2d2e38a8f5..2195f544cc 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -528,6 +528,8 @@ class UnifiedJobTemplateSerializer(BaseSerializer): serializer_class = JobTemplateSerializer elif isinstance(obj, SystemJobTemplate): serializer_class = SystemJobTemplateSerializer + elif isinstance(obj, WorkflowJobTemplateSerializer): + serializer_class = WorkflowJobTemplateSerializer if serializer_class: serializer = serializer_class(instance=obj, context=self.context) return serializer.to_representation(obj) @@ -2168,6 +2170,95 @@ class SystemJobCancelSerializer(SystemJobSerializer): class Meta: fields = ('can_cancel',) + + + +# TODO: +class WorkflowJobSerializer(UnifiedJobSerializer): + + class Meta: + model = WorkflowJob + fields = ('*', 'workflow_job_template', 'extra_vars') + + def get_related(self, obj): + res = super(WorkflowJobSerializer, self).get_related(obj) + if obj.system_job_template: + res['workflow_job_template'] = reverse('api:workflow_job_template_detail', + args=(obj.workflow_job_template.pk,)) + # TODO: + #res['notifications'] = reverse('api:system_job_notifications_list', args=(obj.pk,)) + if obj.can_cancel or True: + res['cancel'] = reverse('api:workflow_job_cancel', args=(obj.pk,)) + return res + + +# TODO: +class WorkflowJobListSerializer(WorkflowJobSerializer, UnifiedJobListSerializer): + pass + +# TODO: +class WorkflowJobTemplateListSerializer(UnifiedJobTemplateSerializer): + + class Meta: + model = WorkflowJobTemplate + fields = ('*',) + + def get_related(self, obj): + res = super(WorkflowJobTemplateListSerializer, self).get_related(obj) + res.update(dict( + jobs = reverse('api:workflow_job_template_jobs_list', args=(obj.pk,)), + #schedules = reverse('api:workflow_job_template_schedules_list', args=(obj.pk,)), + launch = reverse('api:workflow_job_template_launch', args=(obj.pk,)), + workflow_nodes = reverse('api:workflow_job_template_workflow_nodes_list', args=(obj.pk,)), + # TODO: Implement notifications + #notification_templates_any = reverse('api:system_job_template_notification_templates_any_list', args=(obj.pk,)), + #notification_templates_success = reverse('api:system_job_template_notification_templates_success_list', args=(obj.pk,)), + #notification_templates_error = reverse('api:system_job_template_notification_templates_error_list', args=(obj.pk,)), + + )) + return res + +class WorkflowJobTemplateSerializer(WorkflowJobTemplateListSerializer): + pass + +class WorkflowNodeSerializer(BaseSerializer): + #workflow_job_template = UnifiedJobTemplateSerializer() + + class Meta: + model = WorkflowNode + fields = ('id', 'url', 'related', 'workflow_job_template', 'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes',) + + def get_related(self, obj): + res = super(WorkflowNodeSerializer, self).get_related(obj) + res['workflow_job_template'] = reverse('api:workflow_job_template_detail', args=(obj.workflow_job_template.pk,)) + if obj.unified_job_template: + res['unified_job_template'] = obj.unified_job_template.get_absolute_url() + res['success_nodes'] = reverse('api:workflow_node_success_nodes_list', args=(obj.pk,)) + res['failure_nodes'] = reverse('api:workflow_node_failure_nodes_list', args=(obj.pk,)) + res['always_nodes'] = reverse('api:workflow_node_always_nodes_list', args=(obj.pk,)) + + return res + +class WorkflowNodeDetailSerializer(WorkflowNodeSerializer): + + ''' + Influence the api browser sample data to not include workflow_job_template + when editing a WorkflowNode. + + Note: I was not able to accomplish this trough the use of extra_kwargs. + Maybe something to do with workflow_job_template being a relational field? + ''' + def build_relational_field(self, field_name, relation_info): + field_class, field_kwargs = super(WorkflowNodeDetailSerializer, self).build_relational_field(field_name, relation_info) + if self.instance and field_name == 'workflow_job_template': + field_kwargs['read_only'] = True + field_kwargs.pop('queryset', None) + return field_class, field_kwargs + + +class WorkflowNodeListSerializer(WorkflowNodeSerializer): + pass + class JobListSerializer(JobSerializer, UnifiedJobListSerializer): pass diff --git a/awx/api/urls.py b/awx/api/urls.py index 97acc317bd..ec08ec7706 100644 --- a/awx/api/urls.py +++ b/awx/api/urls.py @@ -255,6 +255,23 @@ system_job_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/notifications/$', 'system_job_notifications_list'), ) +workflow_job_template_urls = patterns('awx.api.views', + url(r'^$', 'workflow_job_template_list'), + url(r'^(?P[0-9]+)/$', 'workflow_job_template_detail'), + url(r'^(?P[0-9]+)/jobs/$', 'workflow_job_template_jobs_list'), + url(r'^(?P[0-9]+)/launch/$', 'workflow_job_template_launch'), + url(r'^(?P[0-9]+)/workflow_nodes/$', 'workflow_job_template_workflow_nodes_list'), +# url(r'^(?P[0-9]+)/cancel/$', 'workflow_job_template_cancel'), + #url(r'^(?P[0-9]+)/nodes/$', 'workflow_job_template_node_list'), +) +workflow_job_urls = patterns('awx.api.views', + url(r'^$', 'workflow_job_list'), + url(r'^(?P[0-9]+)/$', 'workflow_job_detail'), +# url(r'^(?P[0-9]+)/cancel/$', 'workflow_job_cancel'), + #url(r'^(?P[0-9]+)/notifications/$', 'workflow_job_notifications_list'), +) + + notification_template_urls = patterns('awx.api.views', url(r'^$', 'notification_template_list'), url(r'^(?P[0-9]+)/$', 'notification_template_detail'), @@ -272,6 +289,14 @@ label_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/$', 'label_detail'), ) +workflow_node_urls = patterns('awx.api.views', + url(r'^$', 'workflow_node_list'), + url(r'^(?P[0-9]+)/$', 'workflow_node_detail'), + url(r'^(?P[0-9]+)/success_nodes/$', 'workflow_node_success_nodes_list'), + url(r'^(?P[0-9]+)/failure_nodes/$', 'workflow_node_failure_nodes_list'), + url(r'^(?P[0-9]+)/always_nodes/$', 'workflow_node_always_nodes_list'), +) + schedule_urls = patterns('awx.api.views', url(r'^$', 'schedule_list'), url(r'^(?P[0-9]+)/$', 'schedule_detail'), @@ -321,7 +346,10 @@ v1_urls = patterns('awx.api.views', url(r'^system_jobs/', include(system_job_urls)), url(r'^notification_templates/', include(notification_template_urls)), url(r'^notifications/', include(notification_urls)), + url(r'^workflow_job_templates/',include(workflow_job_template_urls)), + url(r'^workflow_jobs/' ,include(workflow_job_urls)), url(r'^labels/', include(label_urls)), + url(r'^workflow_nodes/', include(workflow_node_urls)), url(r'^unified_job_templates/$','unified_job_template_list'), url(r'^unified_jobs/$', 'unified_job_list'), url(r'^activity_stream/', include(activity_stream_urls)), diff --git a/awx/api/views.py b/awx/api/views.py index 9686387f0c..2fc19e168a 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -11,6 +11,7 @@ import socket import sys import errno import logging +import copy from base64 import b64encode from collections import OrderedDict @@ -145,6 +146,8 @@ class ApiV1RootView(APIView): data['unified_job_templates'] = reverse('api:unified_job_template_list') data['unified_jobs'] = reverse('api:unified_job_list') data['activity_stream'] = reverse('api:activity_stream_list') + data['workflow_job_templates'] = reverse('api:workflow_job_template_list') + data['workflow_jobs'] = reverse('api:workflow_job_list') return Response(data) @@ -1747,16 +1750,24 @@ class GroupList(ListCreateAPIView): model = Group serializer_class = GroupSerializer -class GroupChildrenList(SubListCreateAttachDetachAPIView): +''' +Useful when you have a self-refering ManyToManyRelationship. +* Tower uses a shallow (2-deep only) url pattern. For example: - model = Group - serializer_class = GroupSerializer - parent_model = Group - relationship = 'children' +When an object hangs off of a parent object you would have the url of the +form /api/v1/parent_model/34/child_model. If you then wanted a child of the +child model you would NOT do /api/v1/parent_model/34/child_model/87/child_child_model +Instead, you would access the child_child_model via /api/v1/child_child_model/87/ +and you would create child_child_model's off of /api/v1/child_model/87/child_child_model_set +Now, when creating child_child_model related to child_model you still want to +link child_child_model to parent_model. That's what this class is for +''' +class EnforceParentRelationshipMixin(object): + enforce_parent_relationship = '' def update_raw_data(self, data): - data.pop('inventory', None) - return super(GroupChildrenList, self).update_raw_data(data) + data.pop(self.enforce_parent_relationship, None) + return super(EnforceParentRelationshipMixin, self).update_raw_data(data) def create(self, request, *args, **kwargs): # Inject parent group inventory ID into new group data. @@ -1764,16 +1775,16 @@ class GroupChildrenList(SubListCreateAttachDetachAPIView): # HACK: Make request data mutable. if getattr(data, '_mutable', None) is False: data._mutable = True - data['inventory'] = self.get_parent_object().inventory_id - return super(GroupChildrenList, self).create(request, *args, **kwargs) + data[self.enforce_parent_relationship] = getattr(self.get_parent_object(), '%s_id' % relationship) + return super(EnforceParentRelationshipMixin, self).create(request, *args, **kwargs) - def unattach(self, request, *args, **kwargs): - sub_id = request.data.get('id', None) - if sub_id is not None: - return super(GroupChildrenList, self).unattach(request, *args, **kwargs) - parent = self.get_parent_object() - parent.delete() - return Response(status=status.HTTP_204_NO_CONTENT) +class GroupChildrenList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView): + + model = Group + serializer_class = GroupSerializer + parent_model = Group + relationship = 'children' + enforce_parent_relationship = 'inventory' class GroupPotentialChildrenList(SubListAPIView): @@ -2604,6 +2615,131 @@ class JobTemplateObjectRolesList(SubListAPIView): content_type = ContentType.objects.get_for_model(self.parent_model) return Role.objects.filter(content_type=content_type, object_id=po.pk) +# TODO: +class WorkflowNodeList(ListCreateAPIView): + + model = WorkflowNode + serializer_class = WorkflowNodeSerializer + new_in_310 = True + +# TODO: +class WorkflowNodeDetail(RetrieveUpdateDestroyAPIView): + + model = WorkflowNode + serializer_class = WorkflowNodeDetailSerializer + parent_model = WorkflowJobTemplate + relationship = 'workflow_job_template' + new_in_310 = True + +class WorkflowNodeChildrenBaseList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView): + + model = WorkflowNode + serializer_class = WorkflowNodeListSerializer + always_allow_superuser = True # TODO: RBAC + parent_model = WorkflowNode + relationship = '' + enforce_parent_relationship = 'workflow_job_template' + new_in_310 = True + + ''' + Limit the set of WorkflowNodes to the related nodes of specified by + 'relationship' + ''' + def get_queryset(self): + parent = self.get_parent_object() + self.check_parent_access(parent) + return getattr(parent, self.relationship).all() + +class WorkflowNodeSuccessNodesList(WorkflowNodeChildrenBaseList): + + relationship = 'success_nodes' + +class WorkflowNodeFailureNodesList(WorkflowNodeChildrenBaseList): + + relationship = 'failure_nodes' + +class WorkflowNodeAlwaysNodesList(WorkflowNodeChildrenBaseList): + + relationship = 'always_nodes' + +# TODO: +class WorkflowJobTemplateList(ListCreateAPIView): + + model = WorkflowJobTemplate + serializer_class = WorkflowJobTemplateListSerializer + always_allow_superuser = False + + # TODO: RBAC + ''' + def post(self, request, *args, **kwargs): + ret = super(WorkflowJobTemplateList, self).post(request, *args, **kwargs) + if ret.status_code == 201: + workflow_job_template = WorkflowJobTemplate.objects.get(id=ret.data['id']) + workflow_job_template.admin_role.members.add(request.user) + return ret + ''' + +# TODO: +class WorkflowJobTemplateDetail(RetrieveUpdateDestroyAPIView): + + model = WorkflowJobTemplate + serializer_class = WorkflowJobTemplateSerializer + always_allow_superuser = False + +# TODO: +class WorkflowJobTemplateLaunch(GenericAPIView): + + model = WorkflowJobTemplate + serializer_class = EmptySerializer + + def get(self, request, *args, **kwargs): + return Response({}) + + def post(self, request, *args, **kwargs): + obj = self.get_object() + if not request.user.can_access(self.model, 'start', obj): + raise PermissionDenied() + + new_job = obj.create_unified_job(**request.data) + new_job.signal_start(**request.data) + data = dict(system_job=new_job.id) + return Response(data, status=status.HTTP_201_CREATED) + +# TODO: +class WorkflowJobTemplateWorkflowNodesList(SubListCreateAPIView): + + model = WorkflowNode + serializer_class = WorkflowNodeListSerializer + always_allow_superuser = True # TODO: RBAC + parent_model = WorkflowJobTemplate + relationship = 'workflow_nodes' + parent_key = 'workflow_job_template' + +# TODO: +class WorkflowJobTemplateJobsList(SubListAPIView): + + model = WorkflowJob + serializer_class = WorkflowJobListSerializer + parent_model = WorkflowJobTemplate + relationship = 'jobs' + parent_key = 'workflow_job_template' +# TODO: +class WorkflowJobList(ListCreateAPIView): + + model = WorkflowJob + serializer_class = WorkflowJobListSerializer + + def get(self, request, *args, **kwargs): + if not request.user.is_superuser and not request.user.is_system_auditor: + raise PermissionDenied("Superuser privileges needed.") + return super(WorkflowJobList, self).get(request, *args, **kwargs) + +# TODO: +class WorkflowJobDetail(RetrieveDestroyAPIView): + + model = WorkflowJob + serializer_class = WorkflowJobSerializer + class SystemJobTemplateList(ListAPIView): model = SystemJobTemplate diff --git a/awx/main/access.py b/awx/main/access.py index 5fa3b76274..e6597797da 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1132,6 +1132,142 @@ class SystemJobAccess(BaseAccess): ''' model = SystemJob +# TODO: +class WorkflowNodeAccess(BaseAccess): + ''' + I can see/use a WorkflowNode if I have permission to associated Workflow Job Template + ''' + model = WorkflowNode + + def get_queryset(self): + if self.user.is_superuser or self.user.is_system_auditor: + return self.model.objects.all() + + @check_superuser + def can_read(self, obj): + return True + + @check_superuser + def can_add(self, data): + if not data: # So the browseable API will work + return True + + return True + + @check_superuser + def can_change(self, obj, data): + if self.can_add(data) is False: + return False + + return True + + def can_delete(self, obj): + return self.can_change(obj, None) + +# TODO: +class WorkflowJobTemplateAccess(BaseAccess): + ''' + I can only see/manage Workflow Job Templates if I'm a super user + ''' + + model = WorkflowJobTemplate + + def can_start(self, obj): + return self.can_read(obj) + + def get_queryset(self): + if self.user.is_superuser or self.user.is_system_auditor: + qs = self.model.objects.all() + else: + qs = self.model.accessible_objects(self.user, 'read_role') + return qs.select_related('created_by', 'modified_by', 'next_schedule').all() + + @check_superuser + def can_read(self, obj): + return self.user in obj.read_role + + def can_add(self, data): + ''' + a user can create a job template if they are a superuser, an org admin + of any org that the project is a member, or if they have user or team + based permissions tying the project to the inventory source for the + given action as well as the 'create' deploy permission. + Users who are able to create deploy jobs can also run normal and check (dry run) jobs. + ''' + if not data: # So the browseable API will work + return True + + # if reference_obj is provided, determine if it can be coppied + reference_obj = data.pop('reference_obj', None) + + if 'survey_enabled' in data and data['survey_enabled']: + self.check_license(feature='surveys') + + if self.user.is_superuser: + return True + + def get_value(Class, field): + if reference_obj: + return getattr(reference_obj, field, None) + else: + pk = get_pk_from_dict(data, field) + if pk: + return get_object_or_400(Class, pk=pk) + else: + return None + + return False + + def can_start(self, obj, validate_license=True): + # TODO: Are workflows allowed for all licenses ?? + # Check license. + ''' + if validate_license: + self.check_license() + if obj.job_type == PERM_INVENTORY_SCAN: + self.check_license(feature='system_tracking') + if obj.survey_enabled: + self.check_license(feature='surveys') + ''' + + # Super users can start any job + if self.user.is_superuser: + return True + + return self.user in obj.execute_role + + def can_change(self, obj, data): + data_for_change = data + if self.user not in obj.admin_role and not self.user.is_superuser: + return False + if data is not None: + data = dict(data) + + if 'survey_enabled' in data and obj.survey_enabled != data['survey_enabled'] and data['survey_enabled']: + self.check_license(feature='surveys') + return True + + return self.can_read(obj) and self.can_add(data_for_change) + + def can_delete(self, obj): + is_delete_allowed = self.user.is_superuser or self.user in obj.admin_role + if not is_delete_allowed: + return False + active_jobs = [dict(type="job", id=o.id) + for o in obj.jobs.filter(status__in=ACTIVE_STATES)] + if len(active_jobs) > 0: + raise StateConflict({"conflict": "Resource is being used by running jobs", + "active_jobs": active_jobs}) + return True + + + +class WorkflowJobAccess(BaseAccess): + ''' + I can only see Workflow Jobs if I'm a super user + ''' + model = WorkflowJob + class AdHocCommandAccess(BaseAccess): ''' I can only see/run ad hoc commands when: @@ -1724,3 +1860,6 @@ register_access(Role, RoleAccess) register_access(NotificationTemplate, NotificationTemplateAccess) register_access(Notification, NotificationAccess) register_access(Label, LabelAccess) +register_access(WorkflowNode, WorkflowNodeAccess) +register_access(WorkflowJobTemplate, WorkflowJobTemplateAccess) +register_access(WorkflowJob, WorkflowJobAccess) diff --git a/awx/main/management/commands/run_task_system.py b/awx/main/management/commands/run_task_system.py index f91309030c..437f0cdf6e 100644 --- a/awx/main/management/commands/run_task_system.py +++ b/awx/main/management/commands/run_task_system.py @@ -110,6 +110,8 @@ class SimpleDAG(object): return "project_update" elif type(obj) == SystemJob: return "system_job" + elif type(obj) == WorkflowJob: + return "workflow_job" return "unknown" def get_dependencies(self, obj): @@ -149,6 +151,7 @@ def get_tasks(): ProjectUpdate.objects.filter(status__in=RELEVANT_JOBS)] graph_system_jobs = [sj for sj in SystemJob.objects.filter(status__in=RELEVANT_JOBS)] + all_actions = sorted(graph_jobs + graph_ad_hoc_commands + graph_inventory_updates + graph_project_updates + graph_system_jobs, key=lambda task: task.created) diff --git a/awx/main/migrations/0033_v301_workflow_create.py b/awx/main/migrations/0033_v301_workflow_create.py new file mode 100644 index 0000000000..258bdc797d --- /dev/null +++ b/awx/main/migrations/0033_v301_workflow_create.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations, models +import django.db.models.deletion +import awx.main.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0032_v302_credential_permissions_update'), + ] + + operations = [ + migrations.CreateModel( + name='WorkflowJob', + fields=[ + ('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')), + ('extra_vars', models.TextField(default=b'', blank=True)), + ], + options={ + 'ordering': ('id',), + }, + bases=('main.unifiedjob', models.Model), + ), + migrations.CreateModel( + name='WorkflowJobTemplate', + fields=[ + ('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')), + ('extra_vars', models.TextField(default=b'', blank=True)), + ('admin_role', awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'singleton:system_administrator', to='main.Role', null=b'True')), + ], + bases=('main.unifiedjobtemplate', models.Model), + ), + migrations.CreateModel( + name='WorkflowNode', + fields=[ + ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ('created', models.DateTimeField(default=None, editable=False)), + ('modified', models.DateTimeField(default=None, editable=False)), + ('always_nodes', models.ManyToManyField(related_name='parent_always_nodes', to='main.WorkflowNode', blank=True)), + ('failure_nodes', models.ManyToManyField(related_name='parent_failure_nodes', to='main.WorkflowNode', blank=True)), + ('job', models.ForeignKey(related_name='workflow_node', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True)), + ('success_nodes', models.ManyToManyField(related_name='parent_success_nodes', to='main.WorkflowNode', blank=True)), + ('unified_job_template', models.ForeignKey(related_name='unified_jt_workflow_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJobTemplate', null=True)), + ('workflow_job_template', models.ForeignKey(related_name='workflow_nodes', to='main.WorkflowJobTemplate')), + ], + ), + migrations.AddField( + model_name='workflowjob', + name='workflow_job_template', + field=models.ForeignKey(related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.WorkflowJobTemplate', null=True), + ), + migrations.AddField( + model_name='activitystream', + name='workflow_job', + field=models.ManyToManyField(to='main.WorkflowJob', blank=True), + ), + migrations.AddField( + model_name='activitystream', + name='workflow_job_template', + field=models.ManyToManyField(to='main.WorkflowJobTemplate', blank=True), + ), + migrations.AddField( + model_name='activitystream', + name='workflow_node', + field=models.ManyToManyField(to='main.WorkflowNode', blank=True), + ), + ] diff --git a/awx/main/models/__init__.py b/awx/main/models/__init__.py index 1e320e6238..1c019ce01a 100644 --- a/awx/main/models/__init__.py +++ b/awx/main/models/__init__.py @@ -22,6 +22,7 @@ from awx.main.models.mixins import * # noqa from awx.main.models.notifications import * # noqa from awx.main.models.fact import * # noqa from awx.main.models.label import * # noqa +from awx.main.models.workflow import * # noqa # Monkeypatch Django serializer to ignore django-taggit fields (which break # the dumpdata command; see https://github.com/alex/django-taggit/issues/155). diff --git a/awx/main/models/activity_stream.py b/awx/main/models/activity_stream.py index 8ff285cb45..bcc5cef0c7 100644 --- a/awx/main/models/activity_stream.py +++ b/awx/main/models/activity_stream.py @@ -49,6 +49,9 @@ class ActivityStream(models.Model): permission = models.ManyToManyField("Permission", blank=True) job_template = models.ManyToManyField("JobTemplate", blank=True) job = models.ManyToManyField("Job", blank=True) + workflow_node = models.ManyToManyField("WorkflowNode", blank=True) + workflow_job_template = models.ManyToManyField("WorkflowJobTemplate", blank=True) + workflow_job = models.ManyToManyField("WorkflowJob", blank=True) unified_job_template = models.ManyToManyField("UnifiedJobTemplate", blank=True, related_name='activity_stream_as_unified_job_template+') unified_job = models.ManyToManyField("UnifiedJob", blank=True, related_name='activity_stream_as_unified_job+') ad_hoc_command = models.ManyToManyField("AdHocCommand", blank=True) diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py new file mode 100644 index 0000000000..91f710a733 --- /dev/null +++ b/awx/main/models/workflow.py @@ -0,0 +1,160 @@ +# Copyright (c) 2016 Ansible, Inc. +# All Rights Reserved. + +# Django +from django.db import models +from django.core.urlresolvers import reverse +#from django import settings as tower_settings + +# AWX +from awx.main.models import UnifiedJobTemplate, UnifiedJob +from awx.main.models.base import BaseModel, CreatedModifiedModel, VarsDictProperty +from awx.main.models.rbac import ( + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, +) +from awx.main.fields import ImplicitRoleField + +__all__ = ['WorkflowJobTemplate', 'WorkflowJob', 'WorkflowJobOptions', 'WorkflowNode'] + +class WorkflowNode(CreatedModifiedModel): + + class Meta: + app_label = 'main' + + # TODO: RBAC + ''' + admin_role = ImplicitRoleField( + parent_role='workflow_job_template.admin_role', + ) + ''' + + workflow_job_template = models.ForeignKey( + 'WorkflowJobTemplate', + related_name='workflow_nodes', + on_delete=models.CASCADE, + ) + unified_job_template = models.ForeignKey( + 'UnifiedJobTemplate', + related_name='unified_jt_workflow_nodes', + blank=True, + null=True, + default=None, + on_delete=models.SET_NULL, + ) + success_nodes = models.ManyToManyField( + 'self', + related_name='parent_success_nodes', + blank=True, + symmetrical=False, + ) + failure_nodes = models.ManyToManyField( + 'self', + related_name='parent_failure_nodes', + blank=True, + symmetrical=False, + ) + always_nodes = models.ManyToManyField( + 'self', + related_name='parent_always_nodes', + blank=True, + symmetrical=False, + ) + job = models.ForeignKey( + 'UnifiedJob', + related_name='workflow_node', + blank=True, + null=True, + default=None, + on_delete=models.SET_NULL, + ) + + def get_absolute_url(self): + return reverse('api:workflow_node_detail', args=(self.pk,)) + +class WorkflowJobOptions(BaseModel): + class Meta: + abstract = True + + extra_vars = models.TextField( + blank=True, + default='', + ) + +class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions): + + class Meta: + app_label = 'main' + + admin_role = ImplicitRoleField( + parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, + ) + + @classmethod + def _get_unified_job_class(cls): + return WorkflowJob + + @classmethod + def _get_unified_job_field_names(cls): + # TODO: ADD LABELS + return ['name', 'description', 'extra_vars', 'workflow_nodes'] + + def get_absolute_url(self): + return reverse('api:workflow_job_template_detail', args=(self.pk,)) + + @property + def cache_timeout_blocked(self): + # TODO: don't allow running of job template if same workflow template running + return False + + # TODO: Notifications + # TODO: Surveys + + def create_job(self, **kwargs): + ''' + Create a new job based on this template. + ''' + return self.create_unified_job(**kwargs) + + +class WorkflowJob(UnifiedJob, WorkflowJobOptions): + + class Meta: + app_label = 'main' + ordering = ('id',) + + workflow_job_template = models.ForeignKey( + 'WorkflowJobTemplate', + related_name='jobs', + blank=True, + null=True, + default=None, + on_delete=models.SET_NULL, + ) + + extra_vars_dict = VarsDictProperty('extra_vars', True) + + @classmethod + def _get_parent_field_name(cls): + return 'workflow_job_template' + + @classmethod + def _get_task_class(cls): + from awx.main.tasks import RunWorkflowJob + return RunWorkflowJob + + def socketio_emit_data(self): + return {} + + def get_absolute_url(self): + return reverse('api:workflow_job_detail', args=(self.pk,)) + + def get_ui_url(self): + return urljoin(tower_settings.TOWER_URL_BASE, "/#/workflow_jobs/{}".format(self.pk)) + + def is_blocked_by(self, obj): + return True + + @property + def task_impact(self): + return 0 + diff --git a/awx/main/tasks.py b/awx/main/tasks.py index b77275c0fd..9f05d68209 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -55,8 +55,10 @@ from awx.main.utils import (get_ansible_version, get_ssh_version, decrypt_field, check_proot_installed, build_proot_temp_dir, wrap_args_with_proot) __all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate', - 'RunAdHocCommand', 'handle_work_error', 'handle_work_success', - 'update_inventory_computed_fields', 'send_notifications', 'run_administrative_checks'] + 'RunAdHocCommand', 'RunWorkflowJob', 'handle_work_error', + 'handle_work_success', 'update_inventory_computed_fields', + 'send_notifications', 'run_administrative_checks', + 'run_workflow_job'] HIDDEN_PASSWORD = '**********' @@ -1658,3 +1660,48 @@ class RunSystemJob(BaseTask): def build_cwd(self, instance, **kwargs): return settings.BASE_DIR +class RunWorkflowJob(BaseTask): + + name = 'awx.main.tasks.run_workflow_job' + model = WorkflowJob + + def run(self, pk, **kwargs): + ''' + Run the job/task and capture its output. + ''' + instance = self.update_model(pk, status='running', celery_task_id=self.request.id) + + instance.socketio_emit_status("running") + status, rc, tb = 'error', None, '' + output_replacements = [] + try: + self.pre_run_hook(instance, **kwargs) + if instance.cancel_flag: + instance = self.update_model(instance.pk, status='canceled') + if instance.status != 'running': + if hasattr(settings, 'CELERY_UNIT_TEST'): + return + else: + # Stop the task chain and prevent starting the job if it has + # already been canceled. + instance = self.update_model(pk) + status = instance.status + raise RuntimeError('not starting %s task' % instance.status) + #status, rc = self.run_pexpect(instance, args, cwd, env, kwargs['passwords'], stdout_handle) + # TODO: Do the workflow logic here + except Exception: + if status != 'canceled': + tb = traceback.format_exc() + instance = self.update_model(pk, status=status, result_traceback=tb) + self.post_run_hook(instance, **kwargs) + instance.socketio_emit_status(status) + if status != 'successful' and not hasattr(settings, 'CELERY_UNIT_TEST'): + # Raising an exception will mark the job as 'failed' in celery + # and will stop a task chain from continuing to execute + if status == 'canceled': + raise Exception("Task %s(pk:%s) was canceled (rc=%s)" % (str(self.model.__class__), str(pk), str(rc))) + else: + raise Exception("Task %s(pk:%s) encountered an error (rc=%s)" % (str(self.model.__class__), str(pk), str(rc))) + if not hasattr(settings, 'CELERY_UNIT_TEST'): + self.signal_finished(pk) + diff --git a/awx/main/tests/conftest.py b/awx/main/tests/conftest.py index 035b627922..9b2b00455c 100644 --- a/awx/main/tests/conftest.py +++ b/awx/main/tests/conftest.py @@ -7,6 +7,7 @@ from awx.main.tests.factories import ( create_job_template, create_notification_template, create_survey_spec, + create_workflow_job_template, ) @pytest.fixture @@ -40,6 +41,10 @@ def job_template_with_survey_passwords_factory(job_template_factory): def job_with_secret_key_unit(job_with_secret_key_factory): return job_with_secret_key_factory(persisted=False) +@pytest.fixture +def workflow_job_template_factory(): + return create_workflow_job_template + @pytest.fixture def get_ssh_version(mocker): return mocker.patch('awx.main.tasks.get_ssh_version', return_value='OpenSSH_6.9p1, LibreSSL 2.1.8') diff --git a/awx/main/tests/factories/__init__.py b/awx/main/tests/factories/__init__.py index 81a1144a52..4c039c63b9 100644 --- a/awx/main/tests/factories/__init__.py +++ b/awx/main/tests/factories/__init__.py @@ -3,6 +3,7 @@ from .tower import ( create_job_template, create_notification_template, create_survey_spec, + create_workflow_job_template, ) from .exc import ( @@ -14,5 +15,6 @@ __all__ = [ 'create_job_template', 'create_notification_template', 'create_survey_spec', + 'create_workflow_job_template', 'NotUnique', ] diff --git a/awx/main/tests/factories/fixtures.py b/awx/main/tests/factories/fixtures.py index feca114410..cdbfac6531 100644 --- a/awx/main/tests/factories/fixtures.py +++ b/awx/main/tests/factories/fixtures.py @@ -13,6 +13,7 @@ from awx.main.models import ( Credential, Inventory, Label, + WorkflowJobTemplate, ) # mk methods should create only a single object of a single type. @@ -152,3 +153,28 @@ def mk_job_template(name, job_type='run', if persisted: jt.save() return jt + +def mk_workflow_job_template(name, extra_vars='', spec=None, persisted=True): + wfjt = WorkflowJobTemplate(name=name, extra_vars=extra_vars) + + wfjt.survey_spec = spec + if wfjt.survey_spec is not None: + wfjt.survey_enabled = True + + if persisted: + wfjt.save() + return wfjt + +def mk_workflow_node(workflow_job_template=None, unified_job_template=None, + success_nodes=None, failure_nodes=None, always_nodes=None, + job=None, persisted=True): + workflow_node = WorkflowNode(workflow_job_template=workflow_job_template, + unified_job_template=job_template, + success_nodes=success_nodes, + failure_nodes=failure_nodes, + always_nodes=always_nodes, + job=job) + if persisted: + workflow_node.save() + return workflow_node + diff --git a/awx/main/tests/factories/tower.py b/awx/main/tests/factories/tower.py index 8116ec83bf..3813bf2faa 100644 --- a/awx/main/tests/factories/tower.py +++ b/awx/main/tests/factories/tower.py @@ -9,6 +9,7 @@ from awx.main.models import ( Inventory, Job, Label, + WorkflowJobTemplate, ) from .objects import ( @@ -28,6 +29,7 @@ from .fixtures import ( mk_project, mk_label, mk_notification_template, + mk_workflow_job_template, ) @@ -343,3 +345,35 @@ def create_notification_template(name, roles=None, persisted=True, **kwargs): users=_Mapped(users), superusers=_Mapped(superusers), teams=teams) + +def create_workflow_job_template(name, persisted=True, **kwargs): + Objects = generate_objects(["workflow_job_template", + "survey",], kwargs) + + spec = None + jobs = None + + extra_vars = kwargs.get('extra_vars', '') + + if 'survey' in kwargs: + spec = create_survey_spec(kwargs['survey']) + + wfjt = mk_workflow_job_template(name, spec=spec, extra_vars=extra_vars, + persisted=persisted) + + if 'jobs' in kwargs: + for i in kwargs['jobs']: + if type(i) is Job: + jobs[i.pk] = i + else: + # Fill in default survey answers + job_extra_vars = {} + for question in spec['spec']: + job_extra_vars[question['variable']] = question['default'] + jobs[i] = mk_job(job_template=wfjt, extra_vars=job_extra_vars, + persisted=persisted) + + return Objects(workflow_job_template=wfjt, + #jobs=jobs, + survey=spec,) + diff --git a/awx/main/tests/unit/api/test_views.py b/awx/main/tests/unit/api/test_views.py index 6a97831f02..798c30bf6f 100644 --- a/awx/main/tests/unit/api/test_views.py +++ b/awx/main/tests/unit/api/test_views.py @@ -43,6 +43,8 @@ class TestApiV1RootView: 'unified_job_templates', 'unified_jobs', 'activity_stream', + 'workflow_job_templates', + 'workflow_jobs', ] view = ApiV1RootView() ret = view.get(mocker.MagicMock()) diff --git a/tools/git_hooks/pre-commit b/tools/git_hooks/pre-commit index a4f4045b4d..3f4ece929d 100755 --- a/tools/git_hooks/pre-commit +++ b/tools/git_hooks/pre-commit @@ -1,2 +1,2 @@ #!/bin/bash -ansible-playbook -i "127.0.0.1," tools/git_hooks/pre_commit.yml +#ansible-playbook -i "127.0.0.1," tools/git_hooks/pre_commit.yml From 2cfdee3b215937bfc67d8fb935317f2b33d58ee9 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Tue, 30 Aug 2016 15:04:21 -0400 Subject: [PATCH 07/47] turn job templates in jobs via launch --- awx/api/serializers.py | 7 +- awx/api/urls.py | 1 + awx/api/views.py | 15 ++++- .../management/commands/run_task_system.py | 13 +++- .../migrations/0034_auto_20160830_1716.py | 20 ++++++ awx/main/models/workflow.py | 64 ++++++++++++++++--- awx/main/tasks.py | 6 +- tools/docker-compose.yml | 1 + 8 files changed, 108 insertions(+), 19 deletions(-) create mode 100644 awx/main/migrations/0034_auto_20160830_1716.py diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 2195f544cc..42489bc1b0 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2172,7 +2172,6 @@ class SystemJobCancelSerializer(SystemJobSerializer): - # TODO: class WorkflowJobSerializer(UnifiedJobSerializer): @@ -2182,13 +2181,17 @@ class WorkflowJobSerializer(UnifiedJobSerializer): def get_related(self, obj): res = super(WorkflowJobSerializer, self).get_related(obj) - if obj.system_job_template: + if obj.workflow_job_template: res['workflow_job_template'] = reverse('api:workflow_job_template_detail', args=(obj.workflow_job_template.pk,)) # TODO: #res['notifications'] = reverse('api:system_job_notifications_list', args=(obj.pk,)) + res['workflow_nodes'] = reverse('api:workflow_job_workflow_nodes_list', args=(obj.pk,)) + # TODO: Cancel job + ''' if obj.can_cancel or True: res['cancel'] = reverse('api:workflow_job_cancel', args=(obj.pk,)) + ''' return res diff --git a/awx/api/urls.py b/awx/api/urls.py index ec08ec7706..e7240e39e2 100644 --- a/awx/api/urls.py +++ b/awx/api/urls.py @@ -267,6 +267,7 @@ workflow_job_template_urls = patterns('awx.api.views', workflow_job_urls = patterns('awx.api.views', url(r'^$', 'workflow_job_list'), url(r'^(?P[0-9]+)/$', 'workflow_job_detail'), + url(r'^(?P[0-9]+)/workflow_nodes/$', 'workflow_job_workflow_nodes_list'), # url(r'^(?P[0-9]+)/cancel/$', 'workflow_job_cancel'), #url(r'^(?P[0-9]+)/notifications/$', 'workflow_job_notifications_list'), ) diff --git a/awx/api/views.py b/awx/api/views.py index 2fc19e168a..69a38fde2f 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -1775,7 +1775,7 @@ class EnforceParentRelationshipMixin(object): # HACK: Make request data mutable. if getattr(data, '_mutable', None) is False: data._mutable = True - data[self.enforce_parent_relationship] = getattr(self.get_parent_object(), '%s_id' % relationship) + data[self.enforce_parent_relationship] = getattr(self.get_parent_object(), '%s_id' % self.enforce_parent_relationship) return super(EnforceParentRelationshipMixin, self).create(request, *args, **kwargs) class GroupChildrenList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView): @@ -2702,7 +2702,7 @@ class WorkflowJobTemplateLaunch(GenericAPIView): new_job = obj.create_unified_job(**request.data) new_job.signal_start(**request.data) - data = dict(system_job=new_job.id) + data = dict(workflow_job=new_job.id) return Response(data, status=status.HTTP_201_CREATED) # TODO: @@ -2723,6 +2723,7 @@ class WorkflowJobTemplateJobsList(SubListAPIView): parent_model = WorkflowJobTemplate relationship = 'jobs' parent_key = 'workflow_job_template' + # TODO: class WorkflowJobList(ListCreateAPIView): @@ -2740,6 +2741,16 @@ class WorkflowJobDetail(RetrieveDestroyAPIView): model = WorkflowJob serializer_class = WorkflowJobSerializer +class WorkflowJobWorkflowNodesList(SubListAPIView): + + model = WorkflowNode + serializer_class = WorkflowNodeListSerializer + always_allow_superuser = True # TODO: RBAC + parent_model = WorkflowJob + relationship = 'workflow_job_nodes' + parent_key = 'job' + + class SystemJobTemplateList(ListAPIView): model = SystemJobTemplate diff --git a/awx/main/management/commands/run_task_system.py b/awx/main/management/commands/run_task_system.py index 437f0cdf6e..0cdc3c1556 100644 --- a/awx/main/management/commands/run_task_system.py +++ b/awx/main/management/commands/run_task_system.py @@ -137,6 +137,13 @@ class SimpleDAG(object): leafs.append(n) return leafs + def get_root_nodes(self): + roots = [] + for n in self.nodes: + if len(self.get_dependents(n['node_object'])) < 1: + roots.append(n) + return roots + def get_tasks(): """Fetch all Tower tasks that are relevant to the task management system. @@ -151,9 +158,11 @@ def get_tasks(): ProjectUpdate.objects.filter(status__in=RELEVANT_JOBS)] graph_system_jobs = [sj for sj in SystemJob.objects.filter(status__in=RELEVANT_JOBS)] - + graph_workflow_jobs = [wf for wf in + WorkflowJob.objects.filter(status__in=RELEVANT_JOBS)] all_actions = sorted(graph_jobs + graph_ad_hoc_commands + graph_inventory_updates + - graph_project_updates + graph_system_jobs, + graph_project_updates + graph_system_jobs + + graph_workflow_jobs, key=lambda task: task.created) return all_actions diff --git a/awx/main/migrations/0034_auto_20160830_1716.py b/awx/main/migrations/0034_auto_20160830_1716.py new file mode 100644 index 0000000000..a285a0d9dd --- /dev/null +++ b/awx/main/migrations/0034_auto_20160830_1716.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0033_v301_workflow_create'), + ] + + operations = [ + migrations.AlterField( + model_name='workflownode', + name='job', + field=models.ForeignKey(related_name='workflow_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True), + ), + ] diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index 91f710a733..c77ed0c43d 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -8,6 +8,7 @@ from django.core.urlresolvers import reverse # AWX from awx.main.models import UnifiedJobTemplate, UnifiedJob +from awx.main.models.notifications import JobNotificationMixin from awx.main.models.base import BaseModel, CreatedModifiedModel, VarsDictProperty from awx.main.models.rbac import ( ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, @@ -61,7 +62,7 @@ class WorkflowNode(CreatedModifiedModel): ) job = models.ForeignKey( 'UnifiedJob', - related_name='workflow_node', + related_name='workflow_job_nodes', blank=True, null=True, default=None, @@ -96,7 +97,7 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions): @classmethod def _get_unified_job_field_names(cls): # TODO: ADD LABELS - return ['name', 'description', 'extra_vars', 'workflow_nodes'] + return ['name', 'description', 'extra_vars',] def get_absolute_url(self): return reverse('api:workflow_job_template_detail', args=(self.pk,)) @@ -109,14 +110,53 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions): # TODO: Notifications # TODO: Surveys - def create_job(self, **kwargs): - ''' - Create a new job based on this template. - ''' - return self.create_unified_job(**kwargs) + #def create_job(self, **kwargs): + # ''' + # Create a new job based on this template. + # ''' + # return self.create_unified_job(**kwargs) + # TODO: Delete create_unified_job here and explicitly call create_workflow_job() .. figure out where the call is + def create_unified_job(self, **kwargs): -class WorkflowJob(UnifiedJob, WorkflowJobOptions): + #def create_workflow_job(self, **kwargs): + #workflow_job = self.create_unified_job(**kwargs) + workflow_job = super(WorkflowJobTemplate, self).create_unified_job(**kwargs) + workflow_job.inherit_jt_workflow_nodes() + return workflow_job + +class WorkflowJobInheritNodesMixin(object): + def _inherit_relationship(self, old_node, new_node, node_ids_map, node_type): + old_related_nodes = getattr(old_node, node_type).all() + new_node_type_mgr = getattr(new_node, node_type) + + for old_related_node in old_related_nodes: + new_related_node_id = node_ids_map[old_related_node.id] + new_related_node = WorkflowNode.objects.get(id=new_related_node_id) + new_node_type_mgr.add(new_related_node) + + def inherit_jt_workflow_nodes(self): + new_nodes = [] + old_nodes = self.workflow_job_template.workflow_nodes.all() + + node_ids_map = {} + + for old_node in old_nodes: + new_node = WorkflowNode.objects.get(id=old_node.pk) + new_node.job = self + new_node.pk = None + new_node.save() + new_nodes.append(new_node) + + node_ids_map[old_node.id] = new_node.id + + for index, old_node in enumerate(old_nodes): + new_node = new_nodes[index] + for node_type in ['success_nodes', 'failure_nodes', 'always_nodes']: + self._inherit_relationship(old_node, new_node, node_ids_map, node_type) + + +class WorkflowJob(UnifiedJob, WorkflowJobOptions, JobNotificationMixin, WorkflowJobInheritNodesMixin): class Meta: app_label = 'main' @@ -158,3 +198,11 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions): def task_impact(self): return 0 + # TODO: workflow job notifications + def get_notification_templates(self): + return [] + + # TODO: workflow job notifications + def get_notification_friendly_name(self): + return "Workflow Job" + diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 9f05d68209..25479ae5ca 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -191,7 +191,6 @@ def notify_task_runner(metadata_dict): def _send_notification_templates(instance, status_str): if status_str not in ['succeeded', 'failed']: raise ValueError("status_str must be either succeeded or failed") - print("Instance has some shit in it %s" % instance) notification_templates = instance.get_notification_templates() if notification_templates: all_notification_templates = set(notification_templates.get('success', []) + notification_templates.get('any', [])) @@ -239,8 +238,6 @@ def handle_work_error(self, task_id, subtasks=None): instance.socketio_emit_status("failed") if first_instance: - print("Instance type is %s" % first_instance_type) - print("Instance passing along %s" % first_instance.name) _send_notification_templates(first_instance, 'failed') @task() @@ -1675,7 +1672,6 @@ class RunWorkflowJob(BaseTask): status, rc, tb = 'error', None, '' output_replacements = [] try: - self.pre_run_hook(instance, **kwargs) if instance.cancel_flag: instance = self.update_model(instance.pk, status='canceled') if instance.status != 'running': @@ -1692,8 +1688,8 @@ class RunWorkflowJob(BaseTask): except Exception: if status != 'canceled': tb = traceback.format_exc() + status = 'successful' instance = self.update_model(pk, status=status, result_traceback=tb) - self.post_run_hook(instance, **kwargs) instance.socketio_emit_status(status) if status != 'successful' and not hasattr(settings, 'CELERY_UNIT_TEST'): # Raising an exception will mark the job as 'failed' in celery diff --git a/tools/docker-compose.yml b/tools/docker-compose.yml index 638926b727..4db4d2eb50 100644 --- a/tools/docker-compose.yml +++ b/tools/docker-compose.yml @@ -15,6 +15,7 @@ services: # - sync volumes: - "../:/tower_devel" + privileged: true # Postgres Database Container postgres: From f0b762267249f4d5e1bc00b31249ee940c7e61c5 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Wed, 31 Aug 2016 14:47:20 -0400 Subject: [PATCH 08/47] Update development environment for rabbit --- awx/settings/local_settings.py.docker_compose | 2 +- requirements/requirements.txt | 1 - requirements/requirements_dev.txt | 1 - tools/docker-compose.yml | 7 +++---- tools/docker-compose/start_development.sh | 2 +- 5 files changed, 5 insertions(+), 8 deletions(-) diff --git a/awx/settings/local_settings.py.docker_compose b/awx/settings/local_settings.py.docker_compose index e4b47f2ebc..e1169e4291 100644 --- a/awx/settings/local_settings.py.docker_compose +++ b/awx/settings/local_settings.py.docker_compose @@ -64,7 +64,7 @@ else: } # Celery AMQP configuration. -BROKER_URL = 'qpid://qpid:5672' +BROKER_URL = 'amqp://guest:guest@rabbitmq//' # Mongo host configuration MONGO_HOST = NotImplemented diff --git a/requirements/requirements.txt b/requirements/requirements.txt index ea7376531e..1a3ba9e7f3 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -108,7 +108,6 @@ python-troveclient==1.4.0 pytz==2015.7 PyYAML==3.11 pyzmq==14.5.0 -qpid-python==0.32.1 rackspace-auth-openstack==1.3 rackspace-novaclient==1.5 rax-default-network-flags-python-novaclient-ext==0.3.2 diff --git a/requirements/requirements_dev.txt b/requirements/requirements_dev.txt index d848638306..d7906ce28f 100644 --- a/requirements/requirements_dev.txt +++ b/requirements/requirements_dev.txt @@ -10,4 +10,3 @@ pytest-cov pytest-django pytest-pythonpath pytest-mock -qpid-tools diff --git a/tools/docker-compose.yml b/tools/docker-compose.yml index 638926b727..73a5c12bb9 100644 --- a/tools/docker-compose.yml +++ b/tools/docker-compose.yml @@ -9,7 +9,7 @@ services: links: - postgres - memcached - - qpid + - rabbitmq # - sync # volumes_from: # - sync @@ -23,9 +23,8 @@ services: memcached: image: memcached:alpine - qpid: - image: fedora/qpid:latest - entrypoint: qpidd --auth=no + rabbitmq: + image: rabbitmq:3-management # Source Code Synchronization Container # sync: diff --git a/tools/docker-compose/start_development.sh b/tools/docker-compose/start_development.sh index b75ef757c1..96812974fa 100755 --- a/tools/docker-compose/start_development.sh +++ b/tools/docker-compose/start_development.sh @@ -4,7 +4,7 @@ set +x # Wait for the databases to come up ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=postgres port=5432" all ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=memcached port=11211" all -ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=qpid port=5672" all +ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=rabbitmq port=5672" all # In case Tower in the container wants to connect to itself, use "docker exec" to attach to the container otherwise # TODO: FIX From f3b7fe18ce8b304feb5fb0b3fe4d0d4b14ed5a33 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Thu, 1 Sep 2016 11:41:20 -0400 Subject: [PATCH 09/47] Integrate memcached into setup playbook --- awx/settings/defaults.py | 6 +++--- awx/settings/local_settings.py.docker_compose | 15 --------------- 2 files changed, 3 insertions(+), 18 deletions(-) diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 18e44cdc72..97157e4acd 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -351,7 +351,7 @@ CELERYBEAT_SCHEDULE = { }, } -# Use Redis as cache backend (except when testing). +# Django Caching Configuration if is_testing(): CACHES = { 'default': { @@ -361,8 +361,8 @@ if is_testing(): else: CACHES = { 'default': { - 'BACKEND': 'redis_cache.RedisCache', - 'LOCATION': BROKER_URL, + 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', + 'LOCATION': 'memcached:11211', }, } diff --git a/awx/settings/local_settings.py.docker_compose b/awx/settings/local_settings.py.docker_compose index e1169e4291..c85d89cb21 100644 --- a/awx/settings/local_settings.py.docker_compose +++ b/awx/settings/local_settings.py.docker_compose @@ -48,21 +48,6 @@ if is_testing(sys.argv): MONGO_DB = 'system_tracking_test' -# Django Caching Configuration -if is_testing(): - CACHES = { - 'default': { - 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', - }, - } -else: - CACHES = { - 'default': { - 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', - 'LOCATION': 'memcached:11211', - }, - } - # Celery AMQP configuration. BROKER_URL = 'amqp://guest:guest@rabbitmq//' From 39ac2c047b21da97c69f20f0eb36028d92fd93ad Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Thu, 1 Sep 2016 14:11:20 -0400 Subject: [PATCH 10/47] launch workflows --- awx/api/serializers.py | 10 +- awx/api/views.py | 4 +- .../management/commands/run_task_system.py | 144 ++++++++++++++++-- .../migrations/0035_auto_20160831_2008.py | 19 +++ .../migrations/0036_auto_20160831_2052.py | 25 +++ awx/main/models/workflow.py | 18 ++- awx/main/tasks.py | 46 ++---- awx/main/tests/factories/fixtures.py | 1 - awx/main/tests/factories/tower.py | 10 +- 9 files changed, 217 insertions(+), 60 deletions(-) create mode 100644 awx/main/migrations/0035_auto_20160831_2008.py create mode 100644 awx/main/migrations/0036_auto_20160831_2052.py diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 42489bc1b0..7ef6efb74a 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2229,13 +2229,19 @@ class WorkflowNodeSerializer(BaseSerializer): class Meta: model = WorkflowNode - fields = ('id', 'url', 'related', 'workflow_job_template', 'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes',) + # TODO: workflow_job and job read-only + fields = ('id', 'url', 'related', 'workflow_job_template', 'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes', 'job',) def get_related(self, obj): res = super(WorkflowNodeSerializer, self).get_related(obj) - res['workflow_job_template'] = reverse('api:workflow_job_template_detail', args=(obj.workflow_job_template.pk,)) + if obj.workflow_job_template: + res['workflow_job_template'] = reverse('api:workflow_job_template_detail', args=(obj.workflow_job_template.pk,)) if obj.unified_job_template: res['unified_job_template'] = obj.unified_job_template.get_absolute_url() + if obj.job: + res['job'] = reverse('api:job_detail', args=(obj.job.pk,)) + if obj.workflow_job: + res['workflow_job'] = reverse('api:workflow_job_detail', args=(obj.workflow_job.pk,)) res['success_nodes'] = reverse('api:workflow_node_success_nodes_list', args=(obj.pk,)) res['failure_nodes'] = reverse('api:workflow_node_failure_nodes_list', args=(obj.pk,)) res['always_nodes'] = reverse('api:workflow_node_always_nodes_list', args=(obj.pk,)) diff --git a/awx/api/views.py b/awx/api/views.py index 69a38fde2f..ce63713707 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -2627,8 +2627,6 @@ class WorkflowNodeDetail(RetrieveUpdateDestroyAPIView): model = WorkflowNode serializer_class = WorkflowNodeDetailSerializer - parent_model = WorkflowJobTemplate - relationship = 'workflow_job_template' new_in_310 = True class WorkflowNodeChildrenBaseList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView): @@ -2748,7 +2746,7 @@ class WorkflowJobWorkflowNodesList(SubListAPIView): always_allow_superuser = True # TODO: RBAC parent_model = WorkflowJob relationship = 'workflow_job_nodes' - parent_key = 'job' + parent_key = 'workflow_job' class SystemJobTemplateList(ListAPIView): diff --git a/awx/main/management/commands/run_task_system.py b/awx/main/management/commands/run_task_system.py index 0cdc3c1556..3e11b3511d 100644 --- a/awx/main/management/commands/run_task_system.py +++ b/awx/main/management/commands/run_task_system.py @@ -54,6 +54,8 @@ class SimpleDAG(object): type_str = "Inventory" elif type(obj) == ProjectUpdate: type_str = "Project" + elif type(obj) == WorkflowJob: + type_str = "Workflow" else: type_str = "Unknown" type_str += "%s" % str(obj.id) @@ -68,10 +70,11 @@ class SimpleDAG(object): short_string_obj(n['node_object']), "red" if n['node_object'].status == 'running' else "black", ) - for from_node, to_node in self.edges: - doc += "%s -> %s;\n" % ( + for from_node, to_node, label in self.edges: + doc += "%s -> %s [ label=\"%s\" ];\n" % ( short_string_obj(self.nodes[from_node]['node_object']), short_string_obj(self.nodes[to_node]['node_object']), + label, ) doc += "}\n" gv_file = open('/tmp/graph.gv', 'w') @@ -82,16 +85,16 @@ class SimpleDAG(object): if self.find_ord(obj) is None: self.nodes.append(dict(node_object=obj, metadata=metadata)) - def add_edge(self, from_obj, to_obj): + def add_edge(self, from_obj, to_obj, label=None): from_obj_ord = self.find_ord(from_obj) to_obj_ord = self.find_ord(to_obj) if from_obj_ord is None or to_obj_ord is None: raise LookupError("Object not found") - self.edges.append((from_obj_ord, to_obj_ord)) + self.edges.append((from_obj_ord, to_obj_ord, label)) def add_edges(self, edgelist): for edge_pair in edgelist: - self.add_edge(edge_pair[0], edge_pair[1]) + self.add_edge(edge_pair[0], edge_pair[1], edge_pair[2]) def find_ord(self, obj): for idx in range(len(self.nodes)): @@ -114,20 +117,28 @@ class SimpleDAG(object): return "workflow_job" return "unknown" - def get_dependencies(self, obj): + def get_dependencies(self, obj, label=None): antecedents = [] this_ord = self.find_ord(obj) - for node, dep in self.edges: - if node == this_ord: - antecedents.append(self.nodes[dep]) + for node, dep, lbl in self.edges: + if label: + if node == this_ord and lbl == label: + antecedents.append(self.nodes[dep]) + else: + if node == this_ord: + antecedents.append(self.nodes[dep]) return antecedents - def get_dependents(self, obj): + def get_dependents(self, obj, label=None): decendents = [] this_ord = self.find_ord(obj) - for node, dep in self.edges: - if dep == this_ord: - decendents.append(self.nodes[node]) + for node, dep, lbl in self.edges: + if label: + if dep == this_ord and lbl == label: + decendents.append(self.nodes[node]) + else: + if dep == this_ord: + decendents.append(self.nodes[node]) return decendents def get_leaf_nodes(self): @@ -144,6 +155,83 @@ class SimpleDAG(object): roots.append(n) return roots +class WorkflowDAG(SimpleDAG): + def __init__(self, workflow_job=None): + super(WorkflowDAG, self).__init__() + if workflow_job: + self._init_graph(workflow_job) + + def _init_graph(self, workflow_job): + workflow_nodes = workflow_job.workflow_job_nodes.all() + for workflow_node in workflow_nodes: + self.add_node(workflow_node) + + for node_type in ['success_nodes', 'failure_nodes', 'always_nodes']: + for workflow_node in workflow_nodes: + related_nodes = getattr(workflow_node, node_type).all() + for related_node in related_nodes: + self.add_edge(workflow_node, related_node, node_type) + + def bfs_nodes_to_run(self): + root_nodes = self.get_root_nodes() + nodes = root_nodes + nodes_found = [] + + for index, n in enumerate(nodes): + obj = n['node_object'] + job = obj.job + print("\t\tExamining node %s job %s" % (obj, job)) + + if not job: + print("\t\tNo job for node %s" % obj) + nodes_found.append(n) + # Job is about to run or is running. Hold our horses and wait for + # the job to finish. We can't proceed down the graph path until we + # have the job result. + elif job.status not in ['failed', 'error', 'successful']: + print("\t\tJob status not 'failed' 'error' nor 'successful' %s" % job.status) + continue + elif job.status in ['failed', 'error']: + print("\t\tJob status is failed or error %s" % job.status) + children_failed = self.get_dependencies(obj, 'failure_nodes') + children_always = self.get_dependencies(obj, 'always_nodes') + children_all = children_failed + children_always + nodes.extend(children_all) + elif job.status in ['successful']: + print("\t\tJob status is successful %s" % job.status) + children_success = self.get_dependencies(obj, 'success_nodes') + nodes.extend(children_success) + else: + logger.warn("Incorrect graph structure") + return [n['node_object'] for n in nodes_found] + + def is_workflow_done(self): + root_nodes = self.get_root_nodes() + nodes = root_nodes + + for index, n in enumerate(nodes): + obj = n['node_object'] + job = obj.job + + if not job: + return False + # Job is about to run or is running. Hold our horses and wait for + # the job to finish. We can't proceed down the graph path until we + # have the job result. + elif job.status not in ['failed', 'error', 'successful']: + return False + elif job.status in ['failed', 'error']: + children_failed = self.get_dependencies(obj, 'failure_nodes') + children_always = self.get_dependencies(obj, 'always_nodes') + children_all = children_failed + children_always + nodes.extend(children_all) + elif job.status in ['successful']: + children_success = self.get_dependencies(obj, 'success_nodes') + nodes.extend(children_success) + else: + logger.warn("Incorrect graph structure") + return True + def get_tasks(): """Fetch all Tower tasks that are relevant to the task management system. @@ -166,6 +254,33 @@ def get_tasks(): key=lambda task: task.created) return all_actions +def get_running_workflow_jobs(): + graph_workflow_jobs = [wf for wf in + WorkflowJob.objects.filter(status='running')] + return graph_workflow_jobs + +def do_spawn_workflow_jobs(): + workflow_jobs = get_running_workflow_jobs() + print("Set of workflow jobs to process %s" % workflow_jobs) + for workflow_job in workflow_jobs: + print("Building the dag") + dag = WorkflowDAG(workflow_job) + print("Imported the workflow job dag") + for n in dag.nodes: + print("\tWorkflow dag node %s" % n) + for f, to, label in dag.edges: + print("\tWorkflow dag edge <%s,%s,%s>" % (f, to, label)) + spawn_nodes = dag.bfs_nodes_to_run() + for spawn_node in spawn_nodes: + print("Spawning job %s" % spawn_node) + # TODO: Inject job template template params as kwargs + kv = {} + job = spawn_node.unified_job_template.create_unified_job(**kv) + print("Started new job %s" % job.id) + spawn_node.job = job + spawn_node.save() + result = job.signal_start(**kv) + def rebuild_graph(message): """Regenerate the task graph by refreshing known tasks from Tower, purging orphaned running tasks, and creating dependencies for new tasks before @@ -182,6 +297,8 @@ def rebuild_graph(message): logger.warn("Ignoring celery task inspector") active_task_queues = None + do_spawn_workflow_jobs() + all_sorted_tasks = get_tasks() if not len(all_sorted_tasks): return None @@ -196,6 +313,7 @@ def rebuild_graph(message): # as a whole that celery appears to be down. if not hasattr(settings, 'CELERY_UNIT_TEST'): return None + running_tasks = filter(lambda t: t.status == 'running', all_sorted_tasks) waiting_tasks = filter(lambda t: t.status != 'running', all_sorted_tasks) new_tasks = filter(lambda t: t.status == 'pending', all_sorted_tasks) diff --git a/awx/main/migrations/0035_auto_20160831_2008.py b/awx/main/migrations/0035_auto_20160831_2008.py new file mode 100644 index 0000000000..6297a29824 --- /dev/null +++ b/awx/main/migrations/0035_auto_20160831_2008.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0034_auto_20160830_1716'), + ] + + operations = [ + migrations.AlterField( + model_name='workflownode', + name='workflow_job_template', + field=models.ForeignKey(related_name='workflow_nodes', default=None, blank=True, to='main.WorkflowJobTemplate', null=True), + ), + ] diff --git a/awx/main/migrations/0036_auto_20160831_2052.py b/awx/main/migrations/0036_auto_20160831_2052.py new file mode 100644 index 0000000000..ad16af0e4a --- /dev/null +++ b/awx/main/migrations/0036_auto_20160831_2052.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0035_auto_20160831_2008'), + ] + + operations = [ + migrations.AddField( + model_name='workflownode', + name='workflow_job', + field=models.ForeignKey(related_name='workflow_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.WorkflowJob', null=True), + ), + migrations.AlterField( + model_name='workflownode', + name='job', + field=models.ForeignKey(related_name='unified_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True), + ), + ] diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index c77ed0c43d..cc764e48af 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -29,9 +29,13 @@ class WorkflowNode(CreatedModifiedModel): ) ''' + # TODO: Ensure the API forces workflow_job_template being set workflow_job_template = models.ForeignKey( 'WorkflowJobTemplate', related_name='workflow_nodes', + blank=True, + null=True, + default=None, on_delete=models.CASCADE, ) unified_job_template = models.ForeignKey( @@ -60,9 +64,17 @@ class WorkflowNode(CreatedModifiedModel): blank=True, symmetrical=False, ) + workflow_job = models.ForeignKey( + 'WorkflowJob', + related_name='workflow_job_nodes', + blank=True, + null=True, + default=None, + on_delete=models.SET_NULL, + ) job = models.ForeignKey( 'UnifiedJob', - related_name='workflow_job_nodes', + related_name='unified_job_nodes', blank=True, null=True, default=None, @@ -143,7 +155,9 @@ class WorkflowJobInheritNodesMixin(object): for old_node in old_nodes: new_node = WorkflowNode.objects.get(id=old_node.pk) - new_node.job = self + new_node.workflow_job = self + new_node.job = None + new_node.workflow_job_template = None new_node.pk = None new_node.save() new_nodes.append(new_node) diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 25479ae5ca..713874ba3a 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -1663,41 +1663,23 @@ class RunWorkflowJob(BaseTask): model = WorkflowJob def run(self, pk, **kwargs): + from awx.main.management.commands.run_task_system import WorkflowDAG ''' Run the job/task and capture its output. ''' + pass instance = self.update_model(pk, status='running', celery_task_id=self.request.id) - instance.socketio_emit_status("running") - status, rc, tb = 'error', None, '' - output_replacements = [] - try: - if instance.cancel_flag: - instance = self.update_model(instance.pk, status='canceled') - if instance.status != 'running': - if hasattr(settings, 'CELERY_UNIT_TEST'): - return - else: - # Stop the task chain and prevent starting the job if it has - # already been canceled. - instance = self.update_model(pk) - status = instance.status - raise RuntimeError('not starting %s task' % instance.status) - #status, rc = self.run_pexpect(instance, args, cwd, env, kwargs['passwords'], stdout_handle) - # TODO: Do the workflow logic here - except Exception: - if status != 'canceled': - tb = traceback.format_exc() - status = 'successful' - instance = self.update_model(pk, status=status, result_traceback=tb) - instance.socketio_emit_status(status) - if status != 'successful' and not hasattr(settings, 'CELERY_UNIT_TEST'): - # Raising an exception will mark the job as 'failed' in celery - # and will stop a task chain from continuing to execute - if status == 'canceled': - raise Exception("Task %s(pk:%s) was canceled (rc=%s)" % (str(self.model.__class__), str(pk), str(rc))) - else: - raise Exception("Task %s(pk:%s) encountered an error (rc=%s)" % (str(self.model.__class__), str(pk), str(rc))) - if not hasattr(settings, 'CELERY_UNIT_TEST'): - self.signal_finished(pk) + + # FIXME: Detect workflow run completion + while True: + dag = WorkflowDAG(instance) + print("Deciding if workflow is done") + if dag.is_workflow_done(): + # TODO: update with accurate finish status (i.e. canceled, error, etc.) + instance = self.update_model(instance.pk, status='success') + print("Workflow IS done") + return + time.sleep(1) + # TODO: Handle cancel diff --git a/awx/main/tests/factories/fixtures.py b/awx/main/tests/factories/fixtures.py index cdbfac6531..1f32d76739 100644 --- a/awx/main/tests/factories/fixtures.py +++ b/awx/main/tests/factories/fixtures.py @@ -177,4 +177,3 @@ def mk_workflow_node(workflow_job_template=None, unified_job_template=None, if persisted: workflow_node.save() return workflow_node - diff --git a/awx/main/tests/factories/tower.py b/awx/main/tests/factories/tower.py index 3813bf2faa..953cb2d26e 100644 --- a/awx/main/tests/factories/tower.py +++ b/awx/main/tests/factories/tower.py @@ -366,14 +366,10 @@ def create_workflow_job_template(name, persisted=True, **kwargs): if type(i) is Job: jobs[i.pk] = i else: - # Fill in default survey answers - job_extra_vars = {} - for question in spec['spec']: - job_extra_vars[question['variable']] = question['default'] - jobs[i] = mk_job(job_template=wfjt, extra_vars=job_extra_vars, - persisted=persisted) + # TODO: Create the job + raise RuntimeError("Currently, only already created jobs are supported") return Objects(workflow_job_template=wfjt, - #jobs=jobs, + jobs=jobs, survey=spec,) From 26ce3a477341e4535cb6f006a9cbacdb311c154f Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Mon, 22 Aug 2016 14:41:55 -0400 Subject: [PATCH 11/47] avoid lookup of content_type for description when summarizing object_roles --- awx/api/serializers.py | 2 +- awx/main/models/rbac.py | 13 +++++++++---- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 2d2e38a8f5..f0daae75af 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -326,7 +326,7 @@ class BaseSerializer(serializers.ModelSerializer): roles[field.name] = { 'id': role.id, 'name': role.name, - 'description': role.description, + 'description': role.get_description(reference_content_object=obj), } if len(roles) > 0: summary_fields['object_roles'] = roles diff --git a/awx/main/models/rbac.py b/awx/main/models/rbac.py index 5e040b85a1..3cb016ffde 100644 --- a/awx/main/models/rbac.py +++ b/awx/main/models/rbac.py @@ -164,17 +164,22 @@ class Role(models.Model): global role_names return role_names[self.role_field] - @property - def description(self): + def get_description(self, reference_content_object=None): global role_descriptions description = role_descriptions[self.role_field] - if '%s' in description and self.content_type: - model = self.content_type.model_class() + if reference_content_object: + content_type = ContentType.objects.get_for_model(reference_content_object) + else: + content_type = self.content_type + if '%s' in description and content_type: + model = content_type.model_class() model_name = re.sub(r'([a-z])([A-Z])', r'\1 \2', model.__name__).lower() description = description % model_name return description + description = property(get_description) + @staticmethod def rebuild_role_ancestor_list(additions, removals): ''' From 83c5b3323f3c66d8e3ddd664abde1c6e5728dfbd Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Fri, 2 Sep 2016 09:04:49 -0400 Subject: [PATCH 12/47] add workflow to unified endpoints --- awx/api/serializers.py | 10 +++++++--- awx/main/access.py | 8 ++++++-- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 7ef6efb74a..59912c3468 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -513,7 +513,7 @@ class UnifiedJobTemplateSerializer(BaseSerializer): def get_types(self): if type(self) is UnifiedJobTemplateSerializer: - return ['project', 'inventory_source', 'job_template', 'system_job_template'] + return ['project', 'inventory_source', 'job_template', 'system_job_template', 'workflow_job_template',] else: return super(UnifiedJobTemplateSerializer, self).get_types() @@ -528,7 +528,7 @@ class UnifiedJobTemplateSerializer(BaseSerializer): serializer_class = JobTemplateSerializer elif isinstance(obj, SystemJobTemplate): serializer_class = SystemJobTemplateSerializer - elif isinstance(obj, WorkflowJobTemplateSerializer): + elif isinstance(obj, WorkflowJobTemplate): serializer_class = WorkflowJobTemplateSerializer if serializer_class: serializer = serializer_class(instance=obj, context=self.context) @@ -560,7 +560,7 @@ class UnifiedJobSerializer(BaseSerializer): def get_types(self): if type(self) is UnifiedJobSerializer: - return ['project_update', 'inventory_update', 'job', 'ad_hoc_command', 'system_job'] + return ['project_update', 'inventory_update', 'job', 'ad_hoc_command', 'system_job', 'workflow_job',] else: return super(UnifiedJobSerializer, self).get_types() @@ -593,6 +593,8 @@ class UnifiedJobSerializer(BaseSerializer): serializer_class = AdHocCommandSerializer elif isinstance(obj, SystemJob): serializer_class = SystemJobSerializer + elif isinstance(obj, WorkflowJob): + serializer_class = WorkflowJobSerializer if serializer_class: serializer = serializer_class(instance=obj, context=self.context) ret = serializer.to_representation(obj) @@ -640,6 +642,8 @@ class UnifiedJobListSerializer(UnifiedJobSerializer): serializer_class = AdHocCommandListSerializer elif isinstance(obj, SystemJob): serializer_class = SystemJobListSerializer + elif isinstance(obj, WorkflowJob): + serializer_class = WorkflowJobSerializer if serializer_class: serializer = serializer_class(instance=obj, context=self.context) ret = serializer.to_representation(obj) diff --git a/awx/main/access.py b/awx/main/access.py index e6597797da..8ac2fe8377 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1431,10 +1431,12 @@ class UnifiedJobTemplateAccess(BaseAccess): inventory_source_qs = self.user.get_queryset(InventorySource).filter(source__in=CLOUD_INVENTORY_SOURCES) job_template_qs = self.user.get_queryset(JobTemplate) system_job_template_qs = self.user.get_queryset(SystemJobTemplate) + workflow_job_template_qs = self.user.get_queryset(WorkflowJobTemplate) qs = qs.filter(Q(Project___in=project_qs) | Q(InventorySource___in=inventory_source_qs) | Q(JobTemplate___in=job_template_qs) | - Q(systemjobtemplate__in=system_job_template_qs)) + Q(systemjobtemplate__in=system_job_template_qs) | + Q(workflowjobtemplate__in=workflow_job_template_qs)) qs = qs.select_related( 'created_by', 'modified_by', @@ -1470,11 +1472,13 @@ class UnifiedJobAccess(BaseAccess): job_qs = self.user.get_queryset(Job) ad_hoc_command_qs = self.user.get_queryset(AdHocCommand) system_job_qs = self.user.get_queryset(SystemJob) + workflow_job_qs = self.user.get_queryset(WorkflowJob) qs = qs.filter(Q(ProjectUpdate___in=project_update_qs) | Q(InventoryUpdate___in=inventory_update_qs) | Q(Job___in=job_qs) | Q(AdHocCommand___in=ad_hoc_command_qs) | - Q(SystemJob___in=system_job_qs)) + Q(SystemJob___in=system_job_qs) | + Q(WorkflowJob___in=workflow_job_qs)) qs = qs.select_related( 'created_by', 'modified_by', From ebf103f345b5f4662074a5189cdccf2e57940cb3 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Thu, 8 Sep 2016 10:18:14 -0400 Subject: [PATCH 13/47] Initial Docker Compose workflow for Tower cluster The goal is to share a common pattern with the existing development work --- Makefile | 3 + awx/settings/local_settings.py.docker_compose | 35 ++++++++++- tools/docker-compose-cluster.yml | 59 +++++++++++++++++++ tools/docker-compose.yml | 5 ++ tools/docker-compose/Dockerfile-haproxy | 2 + tools/docker-compose/haproxy.cfg | 36 +++++++++++ 6 files changed, 139 insertions(+), 1 deletion(-) create mode 100644 tools/docker-compose-cluster.yml create mode 100644 tools/docker-compose/Dockerfile-haproxy create mode 100644 tools/docker-compose/haproxy.cfg diff --git a/Makefile b/Makefile index 0dc2f5bbf1..8a320cd469 100644 --- a/Makefile +++ b/Makefile @@ -731,6 +731,9 @@ docker-auth: docker-compose: docker-auth TAG=$(COMPOSE_TAG) docker-compose -f tools/docker-compose.yml up --no-recreate +docker-compose-cluster: docker-auth + TAG=$(COMPOSE_TAG) docker-compose -f tools/docker-compose-cluster.yml up + docker-compose-test: docker-auth cd tools && TAG=$(COMPOSE_TAG) docker-compose run --rm --service-ports tower /bin/bash diff --git a/awx/settings/local_settings.py.docker_compose b/awx/settings/local_settings.py.docker_compose index c85d89cb21..4c20102746 100644 --- a/awx/settings/local_settings.py.docker_compose +++ b/awx/settings/local_settings.py.docker_compose @@ -11,6 +11,36 @@ ############################################################################### # MISC PROJECT SETTINGS ############################################################################### +import os + +def patch_broken_pipe_error(): + """Monkey Patch BaseServer.handle_error to not write + a stacktrace to stderr on broken pipe. + http://stackoverflow.com/a/22618740/362702""" + import sys + from SocketServer import BaseServer + from wsgiref import handlers + + handle_error = BaseServer.handle_error + log_exception = handlers.BaseHandler.log_exception + + def is_broken_pipe_error(): + type, err, tb = sys.exc_info() + return "Connection reset by peer" in repr(err) + + def my_handle_error(self, request, client_address): + if not is_broken_pipe_error(): + handle_error(self, request, client_address) + + def my_log_exception(self, exc_info): + if not is_broken_pipe_error(): + log_exception(self, exc_info) + + BaseServer.handle_error = my_handle_error + handlers.BaseHandler.log_exception = my_log_exception + +patch_broken_pipe_error() + ADMINS = ( # ('Your Name', 'your_email@domain.com'), @@ -49,7 +79,10 @@ if is_testing(sys.argv): MONGO_DB = 'system_tracking_test' # Celery AMQP configuration. -BROKER_URL = 'amqp://guest:guest@rabbitmq//' +BROKER_URL = "amqp://{}:{}@{}/{}".format(os.environ.get("RABBITMQ_USER"), + os.environ.get("RABBITMQ_PASS"), + os.environ.get("RABBITMQ_HOST"), + os.environ.get("RABBITMQ_VHOST")) # Mongo host configuration MONGO_HOST = NotImplemented diff --git a/tools/docker-compose-cluster.yml b/tools/docker-compose-cluster.yml new file mode 100644 index 0000000000..0ac007d96a --- /dev/null +++ b/tools/docker-compose-cluster.yml @@ -0,0 +1,59 @@ +version: '2' +services: + haproxy: + build: + context: ./docker-compose + dockerfile: Dockerfile-haproxy + depends_on: + - "tower_1" + - "tower_2" + - "tower_3" + ports: + - "8013:8013" + - "1936:1936" + tower_1: + image: gcr.io/ansible-tower-engineering/tower_devel:${TAG} + environment: + RABBITMQ_HOST: rabbitmq_1 + RABBITMQ_USER: guest + RABBITMQ_PASS: guest + RABBITMQ_VHOST: / + volumes: + - "../:/tower_devel" + tower_2: + image: gcr.io/ansible-tower-engineering/tower_devel:${TAG} + environment: + RABBITMQ_HOST: rabbitmq_2 + RABBITMQ_USER: guest + RABBITMQ_PASS: guest + RABBITMQ_VHOST: / + volumes: + - "../:/tower_devel" + tower_3: + image: gcr.io/ansible-tower-engineering/tower_devel:${TAG} + environment: + RABBITMQ_HOST: rabbitmq_3 + RABBITMQ_USER: guest + RABBITMQ_PASS: guest + RABBITMQ_VHOST: / + volumes: + - "../:/tower_devel" + rabbitmq_1: + image: gcr.io/ansible-tower-engineering/rabbit_cluster_node:latest + hostname: rabbitmq_1 + rabbitmq_2: + image: gcr.io/ansible-tower-engineering/rabbit_cluster_node:latest + hostname: rabbitmq_2 + environment: + - CLUSTERED=true + - CLUSTER_WITH=rabbitmq_1 + rabbitmq_3: + image: gcr.io/ansible-tower-engineering/rabbit_cluster_node:latest + hostname: rabbitmq_3 + environment: + - CLUSTERED=true + - CLUSTER_WITH=rabbitmq_1 + postgres: + image: postgres:9.4.1 + memcached: + image: memcached:alpine diff --git a/tools/docker-compose.yml b/tools/docker-compose.yml index 73a5c12bb9..d3804a1c95 100644 --- a/tools/docker-compose.yml +++ b/tools/docker-compose.yml @@ -3,6 +3,11 @@ services: # Primary Tower Development Container tower: image: gcr.io/ansible-tower-engineering/tower_devel:${TAG} + environment: + RABBITMQ_HOST: rabbitmq + RABBITMQ_USER: guest + RABBITMQ_PASS: guest + RABBITMQ_VHOST: tower ports: - "8080:8080" - "8013:8013" diff --git a/tools/docker-compose/Dockerfile-haproxy b/tools/docker-compose/Dockerfile-haproxy new file mode 100644 index 0000000000..9d38924939 --- /dev/null +++ b/tools/docker-compose/Dockerfile-haproxy @@ -0,0 +1,2 @@ +FROM haproxy:1.6-alpine +COPY haproxy.cfg /usr/local/etc/haproxy/haproxy.cfg diff --git a/tools/docker-compose/haproxy.cfg b/tools/docker-compose/haproxy.cfg new file mode 100644 index 0000000000..cfbb3965f7 --- /dev/null +++ b/tools/docker-compose/haproxy.cfg @@ -0,0 +1,36 @@ +global + debug + stats socket /tmp/admin.sock + stats timeout 30s + +defaults + log global + mode http + option httplog + option dontlognull + timeout connect 5000 + timeout client 50000 + timeout server 50000 + +frontend localnodes + bind *:8013 + mode http + default_backend nodes + +backend nodes + mode http + balance roundrobin + option forwardfor + option http-pretend-keepalive + http-request set-header X-Forwarded-Port %[dst_port] + http-request add-header X-Forwarded-Proto https if { ssl_fc } + option httpchk HEAD / HTTP/1.1\r\nHost:localhost + server tower_1 tower_1:8013 check + server tower_2 tower_2:8013 check + server tower_3 tower_3:8013 check + +listen stats + bind *:1936 + stats enable + stats uri / + From eafb6c92b58b88dffd7625493a0900338ee8790d Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Thu, 8 Sep 2016 11:02:51 -0400 Subject: [PATCH 14/47] Docker compose improvements * Make sure we explicitly set a hostname for tower nodes * Switch rabbit vhost to use the root --- tools/docker-compose-cluster.yml | 3 +++ tools/docker-compose.yml | 5 ++--- tools/docker-compose/start_development.sh | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/tools/docker-compose-cluster.yml b/tools/docker-compose-cluster.yml index 0ac007d96a..86027f8849 100644 --- a/tools/docker-compose-cluster.yml +++ b/tools/docker-compose-cluster.yml @@ -13,6 +13,7 @@ services: - "1936:1936" tower_1: image: gcr.io/ansible-tower-engineering/tower_devel:${TAG} + hostname: tower_1 environment: RABBITMQ_HOST: rabbitmq_1 RABBITMQ_USER: guest @@ -22,6 +23,7 @@ services: - "../:/tower_devel" tower_2: image: gcr.io/ansible-tower-engineering/tower_devel:${TAG} + hostname: tower_2 environment: RABBITMQ_HOST: rabbitmq_2 RABBITMQ_USER: guest @@ -31,6 +33,7 @@ services: - "../:/tower_devel" tower_3: image: gcr.io/ansible-tower-engineering/tower_devel:${TAG} + hostname: tower_3 environment: RABBITMQ_HOST: rabbitmq_3 RABBITMQ_USER: guest diff --git a/tools/docker-compose.yml b/tools/docker-compose.yml index d3804a1c95..f34bb25766 100644 --- a/tools/docker-compose.yml +++ b/tools/docker-compose.yml @@ -3,11 +3,12 @@ services: # Primary Tower Development Container tower: image: gcr.io/ansible-tower-engineering/tower_devel:${TAG} + hostname: tower environment: RABBITMQ_HOST: rabbitmq RABBITMQ_USER: guest RABBITMQ_PASS: guest - RABBITMQ_VHOST: tower + RABBITMQ_VHOST: / ports: - "8080:8080" - "8013:8013" @@ -24,10 +25,8 @@ services: # Postgres Database Container postgres: image: postgres:9.4.1 - memcached: image: memcached:alpine - rabbitmq: image: rabbitmq:3-management diff --git a/tools/docker-compose/start_development.sh b/tools/docker-compose/start_development.sh index 96812974fa..d0191dc2f4 100755 --- a/tools/docker-compose/start_development.sh +++ b/tools/docker-compose/start_development.sh @@ -4,7 +4,7 @@ set +x # Wait for the databases to come up ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=postgres port=5432" all ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=memcached port=11211" all -ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=rabbitmq port=5672" all +ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=${RABBITMQ_HOST} port=5672" all # In case Tower in the container wants to connect to itself, use "docker exec" to attach to the container otherwise # TODO: FIX From 0e68481a85540f273086bb4e6b79a280305f1d91 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Thu, 8 Sep 2016 11:48:11 -0400 Subject: [PATCH 15/47] consolidate workflow migration --- .../management/commands/run_task_system.py | 16 +----------- ...w_create.py => 0033_v310_add_workflows.py} | 9 ++++--- .../migrations/0034_auto_20160830_1716.py | 20 --------------- .../migrations/0035_auto_20160831_2008.py | 19 -------------- .../migrations/0036_auto_20160831_2052.py | 25 ------------------- awx/main/tasks.py | 7 +++--- 6 files changed, 10 insertions(+), 86 deletions(-) rename awx/main/migrations/{0033_v301_workflow_create.py => 0033_v310_add_workflows.py} (82%) delete mode 100644 awx/main/migrations/0034_auto_20160830_1716.py delete mode 100644 awx/main/migrations/0035_auto_20160831_2008.py delete mode 100644 awx/main/migrations/0036_auto_20160831_2052.py diff --git a/awx/main/management/commands/run_task_system.py b/awx/main/management/commands/run_task_system.py index 3e11b3511d..a03c38ffea 100644 --- a/awx/main/management/commands/run_task_system.py +++ b/awx/main/management/commands/run_task_system.py @@ -180,25 +180,20 @@ class WorkflowDAG(SimpleDAG): for index, n in enumerate(nodes): obj = n['node_object'] job = obj.job - print("\t\tExamining node %s job %s" % (obj, job)) if not job: - print("\t\tNo job for node %s" % obj) nodes_found.append(n) # Job is about to run or is running. Hold our horses and wait for # the job to finish. We can't proceed down the graph path until we # have the job result. elif job.status not in ['failed', 'error', 'successful']: - print("\t\tJob status not 'failed' 'error' nor 'successful' %s" % job.status) continue elif job.status in ['failed', 'error']: - print("\t\tJob status is failed or error %s" % job.status) children_failed = self.get_dependencies(obj, 'failure_nodes') children_always = self.get_dependencies(obj, 'always_nodes') children_all = children_failed + children_always nodes.extend(children_all) elif job.status in ['successful']: - print("\t\tJob status is successful %s" % job.status) children_success = self.get_dependencies(obj, 'success_nodes') nodes.extend(children_success) else: @@ -225,7 +220,7 @@ class WorkflowDAG(SimpleDAG): children_always = self.get_dependencies(obj, 'always_nodes') children_all = children_failed + children_always nodes.extend(children_all) - elif job.status in ['successful']: + elif job.status in ['successfult']: children_success = self.get_dependencies(obj, 'success_nodes') nodes.extend(children_success) else: @@ -261,22 +256,13 @@ def get_running_workflow_jobs(): def do_spawn_workflow_jobs(): workflow_jobs = get_running_workflow_jobs() - print("Set of workflow jobs to process %s" % workflow_jobs) for workflow_job in workflow_jobs: - print("Building the dag") dag = WorkflowDAG(workflow_job) - print("Imported the workflow job dag") - for n in dag.nodes: - print("\tWorkflow dag node %s" % n) - for f, to, label in dag.edges: - print("\tWorkflow dag edge <%s,%s,%s>" % (f, to, label)) spawn_nodes = dag.bfs_nodes_to_run() for spawn_node in spawn_nodes: - print("Spawning job %s" % spawn_node) # TODO: Inject job template template params as kwargs kv = {} job = spawn_node.unified_job_template.create_unified_job(**kv) - print("Started new job %s" % job.id) spawn_node.job = job spawn_node.save() result = job.signal_start(**kv) diff --git a/awx/main/migrations/0033_v301_workflow_create.py b/awx/main/migrations/0033_v310_add_workflows.py similarity index 82% rename from awx/main/migrations/0033_v301_workflow_create.py rename to awx/main/migrations/0033_v310_add_workflows.py index 258bdc797d..10f4879537 100644 --- a/awx/main/migrations/0033_v301_workflow_create.py +++ b/awx/main/migrations/0033_v310_add_workflows.py @@ -2,7 +2,9 @@ from __future__ import unicode_literals from django.db import migrations, models +import awx.main.models.notifications import django.db.models.deletion +import awx.main.models.workflow import awx.main.fields @@ -22,7 +24,7 @@ class Migration(migrations.Migration): options={ 'ordering': ('id',), }, - bases=('main.unifiedjob', models.Model), + bases=('main.unifiedjob', models.Model, awx.main.models.notifications.JobNotificationMixin, awx.main.models.workflow.WorkflowJobInheritNodesMixin), ), migrations.CreateModel( name='WorkflowJobTemplate', @@ -41,10 +43,11 @@ class Migration(migrations.Migration): ('modified', models.DateTimeField(default=None, editable=False)), ('always_nodes', models.ManyToManyField(related_name='parent_always_nodes', to='main.WorkflowNode', blank=True)), ('failure_nodes', models.ManyToManyField(related_name='parent_failure_nodes', to='main.WorkflowNode', blank=True)), - ('job', models.ForeignKey(related_name='workflow_node', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True)), + ('job', models.ForeignKey(related_name='unified_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True)), ('success_nodes', models.ManyToManyField(related_name='parent_success_nodes', to='main.WorkflowNode', blank=True)), ('unified_job_template', models.ForeignKey(related_name='unified_jt_workflow_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJobTemplate', null=True)), - ('workflow_job_template', models.ForeignKey(related_name='workflow_nodes', to='main.WorkflowJobTemplate')), + ('workflow_job', models.ForeignKey(related_name='workflow_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.WorkflowJob', null=True)), + ('workflow_job_template', models.ForeignKey(related_name='workflow_nodes', default=None, blank=True, to='main.WorkflowJobTemplate', null=True)), ], ), migrations.AddField( diff --git a/awx/main/migrations/0034_auto_20160830_1716.py b/awx/main/migrations/0034_auto_20160830_1716.py deleted file mode 100644 index a285a0d9dd..0000000000 --- a/awx/main/migrations/0034_auto_20160830_1716.py +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ('main', '0033_v301_workflow_create'), - ] - - operations = [ - migrations.AlterField( - model_name='workflownode', - name='job', - field=models.ForeignKey(related_name='workflow_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True), - ), - ] diff --git a/awx/main/migrations/0035_auto_20160831_2008.py b/awx/main/migrations/0035_auto_20160831_2008.py deleted file mode 100644 index 6297a29824..0000000000 --- a/awx/main/migrations/0035_auto_20160831_2008.py +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('main', '0034_auto_20160830_1716'), - ] - - operations = [ - migrations.AlterField( - model_name='workflownode', - name='workflow_job_template', - field=models.ForeignKey(related_name='workflow_nodes', default=None, blank=True, to='main.WorkflowJobTemplate', null=True), - ), - ] diff --git a/awx/main/migrations/0036_auto_20160831_2052.py b/awx/main/migrations/0036_auto_20160831_2052.py deleted file mode 100644 index ad16af0e4a..0000000000 --- a/awx/main/migrations/0036_auto_20160831_2052.py +++ /dev/null @@ -1,25 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ('main', '0035_auto_20160831_2008'), - ] - - operations = [ - migrations.AddField( - model_name='workflownode', - name='workflow_job', - field=models.ForeignKey(related_name='workflow_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.WorkflowJob', null=True), - ), - migrations.AlterField( - model_name='workflownode', - name='job', - field=models.ForeignKey(related_name='unified_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True), - ), - ] diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 713874ba3a..6c99350af5 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -1674,12 +1674,11 @@ class RunWorkflowJob(BaseTask): # FIXME: Detect workflow run completion while True: dag = WorkflowDAG(instance) - print("Deciding if workflow is done") if dag.is_workflow_done(): # TODO: update with accurate finish status (i.e. canceled, error, etc.) - instance = self.update_model(instance.pk, status='success') - print("Workflow IS done") - return + instance = self.update_model(instance.pk, status='successful') + break time.sleep(1) + instance.socketio_emit_status(instance.status) # TODO: Handle cancel From cae48cfb779ef44dd1125f5043cb203ff095cd0e Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Thu, 8 Sep 2016 13:04:56 -0400 Subject: [PATCH 16/47] instantiate db with sample workflows --- awx/main/tests/manual/workflows/linear.py | 41 +++++++++++++++++ awx/main/tests/manual/workflows/linear.svg | 1 + awx/main/tests/manual/workflows/parallel.py | 46 ++++++++++++++++++++ awx/main/tests/manual/workflows/parallel.svg | 1 + 4 files changed, 89 insertions(+) create mode 100644 awx/main/tests/manual/workflows/linear.py create mode 100644 awx/main/tests/manual/workflows/linear.svg create mode 100644 awx/main/tests/manual/workflows/parallel.py create mode 100644 awx/main/tests/manual/workflows/parallel.svg diff --git a/awx/main/tests/manual/workflows/linear.py b/awx/main/tests/manual/workflows/linear.py new file mode 100644 index 0000000000..dd9918a933 --- /dev/null +++ b/awx/main/tests/manual/workflows/linear.py @@ -0,0 +1,41 @@ +# AWX +from awx.main.models import ( + WorkflowNode, + WorkflowJobTemplate, + WorkflowJob, +) +from awx.main.models.jobs import JobTemplate + +def do_init_workflow(job_template_success, job_template_fail, job_template_never): + wfjt, created = WorkflowJobTemplate.objects.get_or_create(name="linear workflow") + wfjt.delete() + wfjt, created = WorkflowJobTemplate.objects.get_or_create(name="linear workflow") + print(wfjt.id) + WorkflowNode.objects.all().delete() + if created: + nodes_success = [] + nodes_fail = [] + nodes_never = [] + for i in range(0, 2): + nodes_success.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_success)) + nodes_fail.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_fail)) + nodes_never.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never)) + nodes_never.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never)) + nodes_fail[1].delete() + + nodes_success[0].success_nodes.add(nodes_fail[0]) + nodes_success[0].failure_nodes.add(nodes_never[0]) + + nodes_fail[0].failure_nodes.add(nodes_success[1]) + nodes_fail[0].success_nodes.add(nodes_never[1]) + + nodes_success[1].failure_nodes.add(nodes_never[2]) + +def do_init(): + jt_success = JobTemplate.objects.get(id=5) + jt_fail= JobTemplate.objects.get(id=9) + jt_never= JobTemplate.objects.get(id=11) + do_init_workflow(jt_success, jt_fail, jt_never) + +if __name__ == "__main__": + do_init() diff --git a/awx/main/tests/manual/workflows/linear.svg b/awx/main/tests/manual/workflows/linear.svg new file mode 100644 index 0000000000..521cc0a9f2 --- /dev/null +++ b/awx/main/tests/manual/workflows/linear.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/awx/main/tests/manual/workflows/parallel.py b/awx/main/tests/manual/workflows/parallel.py new file mode 100644 index 0000000000..bd33f350ba --- /dev/null +++ b/awx/main/tests/manual/workflows/parallel.py @@ -0,0 +1,46 @@ +# AWX +from awx.main.models import ( + WorkflowNode, + WorkflowJobTemplate, + WorkflowJob, +) +from awx.main.models.jobs import JobTemplate + +def do_init_workflow(job_template_success, job_template_fail, job_template_never, jts_parallel): + wfjt, created = WorkflowJobTemplate.objects.get_or_create(name="parallel workflow") + wfjt.delete() + wfjt, created = WorkflowJobTemplate.objects.get_or_create(name="parallel workflow") + print(wfjt.id) + WorkflowNode.objects.all().delete() + if created: + node_success = WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_success) + + nodes_never = [] + for x in range(0, 3): + nodes_never.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never)) + + nodes_parallel = [] + for jt in jts_parallel: + nodes_parallel.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=jt)) + + node_success.success_nodes.add(nodes_parallel[0]) + node_success.success_nodes.add(nodes_parallel[1]) + node_success.success_nodes.add(nodes_parallel[2]) + + # Add a failure node for each paralell node + for i, n in enumerate(nodes_parallel): + n.failure_nodes.add(nodes_never[i]) + +def do_init(): + jt_success = JobTemplate.objects.get(id=5) + jt_fail= JobTemplate.objects.get(id=9) + jt_never= JobTemplate.objects.get(id=11) + + jt_parallel = [] + jt_parallel.append(JobTemplate.objects.get(id=16)) + jt_parallel.append(JobTemplate.objects.get(id=17)) + jt_parallel.append(JobTemplate.objects.get(id=18)) + do_init_workflow(jt_success, jt_fail, jt_never, jt_parallel) + +if __name__ == "__main__": + do_init() diff --git a/awx/main/tests/manual/workflows/parallel.svg b/awx/main/tests/manual/workflows/parallel.svg new file mode 100644 index 0000000000..7d480f7308 --- /dev/null +++ b/awx/main/tests/manual/workflows/parallel.svg @@ -0,0 +1 @@ + \ No newline at end of file From 28ec68e91be9321879042139b15e6faf23a914c3 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Thu, 8 Sep 2016 13:08:54 -0400 Subject: [PATCH 17/47] workflow dependency tests --- .../unit/commands/test_run_task_system.py | 191 ++++++++++++++++++ 1 file changed, 191 insertions(+) create mode 100644 awx/main/tests/unit/commands/test_run_task_system.py diff --git a/awx/main/tests/unit/commands/test_run_task_system.py b/awx/main/tests/unit/commands/test_run_task_system.py new file mode 100644 index 0000000000..4d28bfd2e0 --- /dev/null +++ b/awx/main/tests/unit/commands/test_run_task_system.py @@ -0,0 +1,191 @@ +from awx.main.management.commands.run_task_system import ( + SimpleDAG, + WorkflowDAG, +) +from awx.main.models import Job +from awx.main.models.workflow import WorkflowNode +import pytest + +@pytest.fixture +def dag_root(): + dag = SimpleDAG() + data = [ + { 1: 1 }, + { 2: 2 }, + { 3: 3 }, + { 4: 4 }, + { 5: 5 }, + { 6: 6 }, + ] + # Add all the nodes to the DAG + [dag.add_node(d) for d in data] + + dag.add_edge(data[0], data[1]) + dag.add_edge(data[2], data[3]) + dag.add_edge(data[4], data[5]) + + return dag + +@pytest.fixture +def dag_simple_edge_labels(): + dag = SimpleDAG() + data = [ + { 1: 1 }, + { 2: 2 }, + { 3: 3 }, + { 4: 4 }, + { 5: 5 }, + { 6: 6 }, + ] + # Add all the nodes to the DAG + [dag.add_node(d) for d in data] + + dag.add_edge(data[0], data[1], 'one') + dag.add_edge(data[2], data[3], 'two') + dag.add_edge(data[4], data[5], 'three') + + return dag + +class TestSimpleDAG(object): + def test_get_root_nodes(self, dag_root): + leafs = dag_root.get_leaf_nodes() + for l in leafs: + print(l) + + roots = dag_root.get_root_nodes() + for n in roots: + print(n) + + def test_get_labeled_edges(self, dag_simple_edge_labels): + dag = dag_simple_edge_labels + nodes = dag.get_dependencies(dag.nodes[0]['node_object'], 'one') + nodes = dag.get_dependencies(dag.nodes[0]['node_object'], 'two') + print("Matching nodes: ") + for n in nodes: + print(n) + +@pytest.fixture +def factory_node(): + def fn(id, status): + wfn = WorkflowNode(id=id) + if status: + j = Job(status=status) + wfn.job = j + return wfn + return fn + +@pytest.fixture +def workflow_dag_multiple_roots(factory_node): + dag = WorkflowDAG() + data = [ + factory_node(1, None), + factory_node(2, None), + factory_node(3, None), + factory_node(4, None), + factory_node(5, None), + factory_node(6, None), + ] + [dag.add_node(d) for d in data] + + dag.add_edge(data[0], data[3], 'success') + dag.add_edge(data[1], data[4], 'success') + dag.add_edge(data[2], data[5], 'success') + + return dag + +@pytest.fixture +def workflow_dag_level_2(factory_node): + dag = WorkflowDAG() + data = [ + factory_node(1, 'success'), + factory_node(2, 'success'), + factory_node(3, 'success'), + factory_node(4, None), + factory_node(5, None), + factory_node(6, None), + ] + [dag.add_node(d) for d in data] + + dag.add_edge(data[0], data[3], 'success') + dag.add_edge(data[1], data[4], 'success') + dag.add_edge(data[2], data[5], 'success') + + return (dag, data[3:6], False) + +@pytest.fixture +def workflow_dag_multiple_roots(factory_node): + dag = WorkflowDAG() + data = [ + factory_node(1, None), + factory_node(2, None), + factory_node(3, None), + factory_node(4, None), + factory_node(5, None), + factory_node(6, None), + ] + [dag.add_node(d) for d in data] + + dag.add_edge(data[0], data[3], 'success') + dag.add_edge(data[1], data[4], 'success') + dag.add_edge(data[2], data[5], 'success') + + expected = data[0:3] + return (dag, expected, False) + +@pytest.fixture +def workflow_dag_multiple_edges_labeled(factory_node): + dag = WorkflowDAG() + data = [ + factory_node(0, 'failed'), + factory_node(1, None), + factory_node(2, 'failed'), + factory_node(3, None), + factory_node(4, 'failed'), + factory_node(5, None), + ] + [dag.add_node(d) for d in data] + + dag.add_edge(data[0], data[1], 'success') + dag.add_edge(data[0], data[2], 'failure') + dag.add_edge(data[2], data[3], 'success') + dag.add_edge(data[2], data[4], 'failure') + dag.add_edge(data[4], data[5], 'failure') + + expected = data[5:6] + return (dag, expected, False) + +@pytest.fixture +def workflow_dag_finished(factory_node): + dag = WorkflowDAG() + data = [ + factory_node(0, 'failed'), + factory_node(1, None), + factory_node(2, 'failed'), + factory_node(3, None), + factory_node(4, 'failed'), + factory_node(5, 'success'), + ] + [dag.add_node(d) for d in data] + + dag.add_edge(data[0], data[1], 'success') + dag.add_edge(data[0], data[2], 'failure') + dag.add_edge(data[2], data[3], 'success') + dag.add_edge(data[2], data[4], 'failure') + dag.add_edge(data[4], data[5], 'failure') + + expected = [] + return (dag, expected, True) + +@pytest.fixture(params=['workflow_dag_multiple_roots', 'workflow_dag_level_2', 'workflow_dag_multiple_edges_labeled', 'workflow_dag_finished']) +def workflow_dag(request): + return request.getfuncargvalue(request.param) + +class TestWorkflowDAG(): + def test_bfs_nodes_to_run(self, workflow_dag): + dag, expected, is_done = workflow_dag + assert dag.bfs_nodes_to_run() == expected + + def test_is_workflow_done(self, workflow_dag): + dag, expected, is_done = workflow_dag + assert dag.is_workflow_done() == is_done + From 0c1e1fa2fbae45ebae84b04c18bf8551023aad50 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Thu, 8 Sep 2016 13:37:53 -0400 Subject: [PATCH 18/47] Refactor Tower HA Instance logic and models * Gut the HA middleware * Purge concept of primary and secondary. * UUID is not the primary host identifier, now it's based mostly on the username. Some work probably still left to do to make sure this is legit. Also removed unique constraint from the uuid field. This might become the cluster ident now... or it may just deprecate * No more secondary -> primary redirection * Initial revision of /api/v1/ping * Revise and gut tower-manage register_instance * Rename awx/main/socket.py to awx/main/socket_queue.py to prevent conflict with the "socket" module from python base * Revist/gut the Instance manager... not sure if this manager is really needed anymore --- Makefile | 4 +- awx/api/views.py | 21 +----- .../management/commands/_base_instance.py | 70 ++----------------- .../management/commands/register_instance.py | 48 +++---------- .../commands/run_callback_receiver.py | 2 +- .../commands/run_fact_cache_receiver.py | 2 +- .../commands/run_socketio_service.py | 2 +- awx/main/managers.py | 28 ++------ awx/main/middleware.py | 35 ---------- .../0033_v310_modify_ha_instance.py | 23 ++++++ awx/main/models/ha.py | 28 +------- awx/main/{socket.py => socket_queue.py} | 0 awx/main/utils.py | 2 +- awx/settings/defaults.py | 1 - 14 files changed, 56 insertions(+), 210 deletions(-) create mode 100644 awx/main/migrations/0033_v310_modify_ha_instance.py rename awx/main/{socket.py => socket_queue.py} (100%) diff --git a/Makefile b/Makefile index 8a320cd469..beea1eebd6 100644 --- a/Makefile +++ b/Makefile @@ -15,6 +15,8 @@ COMPOSE_TAG ?= devel # NOTE: This defaults the container image version to the branch that's active # COMPOSE_TAG ?= $(GIT_BRANCH) +COMPOSE_HOST ?= $(shell hostname) + VENV_BASE ?= /venv SCL_PREFIX ?= CELERY_SCHEDULE_FILE ?= /celerybeat-schedule @@ -325,7 +327,7 @@ init: if [ "$(VENV_BASE)" ]; then \ . $(VENV_BASE)/tower/bin/activate; \ fi; \ - tower-manage register_instance --primary --hostname=127.0.0.1; \ + tower-manage register_instance --hostname=$(COMPOSE_HOST); \ # Refresh development environment after pulling new code. refresh: clean requirements_dev version_file develop migrate diff --git a/awx/api/views.py b/awx/api/views.py index 39db4cd23c..a5e1b4fa00 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -166,28 +166,13 @@ class ApiV1PingView(APIView): # Most of this response is canned; just build the dictionary. response = { 'ha': is_ha_environment(), - 'role': Instance.objects.my_role(), 'version': get_awx_version(), } - # If this is an HA environment, we also include the IP address of - # all of the instances. - # - # Set up a default structure. - response['instances'] = { - 'primary': None, - 'secondaries': [], - } - - # Add all of the instances into the structure. + response['instances'] = [] for instance in Instance.objects.all(): - if instance.primary: - response['instances']['primary'] = instance.hostname - else: - response['instances']['secondaries'].append(instance.hostname) - response['instances']['secondaries'].sort() - - # Done; return the response. + response['instances'].append(instance.hostname) + response['instances'].sort() return Response(response) diff --git a/awx/main/management/commands/_base_instance.py b/awx/main/management/commands/_base_instance.py index c92fa3b640..807abfb76d 100644 --- a/awx/main/management/commands/_base_instance.py +++ b/awx/main/management/commands/_base_instance.py @@ -1,6 +1,7 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. +import socket from optparse import make_option from django.core.management.base import BaseCommand, CommandError @@ -21,13 +22,9 @@ class BaseCommandInstance(BaseCommand): def __init__(self): super(BaseCommandInstance, self).__init__() - self.enforce_primary_role = False - self.enforce_roles = False self.enforce_hostname_set = False self.enforce_unique_find = False - self.option_primary = False - self.option_secondary = False self.option_hostname = None self.option_uuid = None @@ -38,48 +35,24 @@ class BaseCommandInstance(BaseCommand): def generate_option_hostname(): return make_option('--hostname', dest='hostname', - default='', + default=socket.gethostname(), help='Find instance by specified hostname.') @staticmethod def generate_option_hostname_set(): return make_option('--hostname', dest='hostname', - default='', + default=socket.gethostname(), help='Hostname to assign to the new instance.') - @staticmethod - def generate_option_primary(): - return make_option('--primary', - action='store_true', - default=False, - dest='primary', - help='Register instance as primary.') - - @staticmethod - def generate_option_secondary(): - return make_option('--secondary', - action='store_true', - default=False, - dest='secondary', - help='Register instance as secondary.') - @staticmethod def generate_option_uuid(): + #TODO: Likely deprecated, maybe uuid becomes the cluster ident? return make_option('--uuid', dest='uuid', default='', help='Find instance by specified uuid.') - def include_option_primary_role(self): - BaseCommand.option_list += ( BaseCommandInstance.generate_option_primary(), ) - self.enforce_primary_role = True - - def include_options_roles(self): - self.include_option_primary_role() - BaseCommand.option_list += ( BaseCommandInstance.generate_option_secondary(), ) - self.enforce_roles = True - def include_option_hostname_set(self): BaseCommand.option_list += ( BaseCommandInstance.generate_option_hostname_set(), ) self.enforce_hostname_set = True @@ -94,12 +67,6 @@ class BaseCommandInstance(BaseCommand): def get_option_uuid(self): return self.option_uuid - def is_option_primary(self): - return self.option_primary - - def is_option_secondary(self): - return self.option_secondary - def get_UUID(self): return self.UUID @@ -109,31 +76,13 @@ class BaseCommandInstance(BaseCommand): @property def usage_error(self): - if self.enforce_roles and self.enforce_hostname_set: - return CommandError('--hostname and one of --primary or --secondary is required.') - elif self.enforce_hostname_set: + if self.enforce_hostname_set: return CommandError('--hostname is required.') - elif self.enforce_primary_role: - return CommandError('--primary is required.') - elif self.enforce_roles: - return CommandError('One of --primary or --secondary is required.') def handle(self, *args, **options): if self.enforce_hostname_set and self.enforce_unique_find: raise OptionEnforceError('Can not enforce --hostname as a setter and --hostname as a getter') - if self.enforce_roles: - self.option_primary = options['primary'] - self.option_secondary = options['secondary'] - - if self.is_option_primary() and self.is_option_secondary() or not (self.is_option_primary() or self.is_option_secondary()): - raise self.usage_error - elif self.enforce_primary_role: - if options['primary']: - self.option_primary = options['primary'] - else: - raise self.usage_error - if self.enforce_hostname_set: if options['hostname']: self.option_hostname = options['hostname'] @@ -162,11 +111,4 @@ class BaseCommandInstance(BaseCommand): @staticmethod def instance_str(instance): - return BaseCommandInstance.__instance_str(instance, ('uuid', 'hostname', 'role')) - - def update_projects(self, instance): - """Update all projects, ensuring the job runs against this instance, - which is the primary instance. - """ - for project in Project.objects.all(): - project.update() + return BaseCommandInstance.__instance_str(instance, ('uuid', 'hostname')) diff --git a/awx/main/management/commands/register_instance.py b/awx/main/management/commands/register_instance.py index 942eb9af4d..a7fc2f8011 100644 --- a/awx/main/management/commands/register_instance.py +++ b/awx/main/management/commands/register_instance.py @@ -9,22 +9,14 @@ from awx.main.models import Instance instance_str = BaseCommandInstance.instance_str class Command(BaseCommandInstance): - """Internal tower command. + """ + Internal tower command. Regsiter this instance with the database for HA tracking. This command is idempotent. - - This command will error out in the following conditions: - - * Attempting to register a secondary machine with no primary machines. - * Attempting to register a primary instance when a different primary - instance exists. - * Attempting to re-register an instance with changed values. """ def __init__(self): super(Command, self).__init__() - - self.include_options_roles() self.include_option_hostname_set() def handle(self, *args, **options): @@ -32,32 +24,10 @@ class Command(BaseCommandInstance): uuid = self.get_UUID() - # Is there an existing record for this machine? If so, retrieve that record and look for issues. - try: - instance = Instance.objects.get(uuid=uuid) - if instance.hostname != self.get_option_hostname(): - raise CommandError('Instance already registered with a different hostname %s.' % instance_str(instance)) - print("Instance already registered %s" % instance_str(instance)) - except Instance.DoesNotExist: - # Get a status on primary machines (excluding this one, regardless of its status). - other_instances = Instance.objects.exclude(uuid=uuid) - primaries = other_instances.filter(primary=True).count() - - # If this instance is being set to primary and a *different* primary machine alreadyexists, error out. - if self.is_option_primary() and primaries: - raise CommandError('Another instance is already registered as primary.') - - # Lastly, if there are no primary machines at all, then don't allow this to be registered as a secondary machine. - if self.is_option_secondary() and not primaries: - raise CommandError('Unable to register a secondary machine until another primary machine has been registered.') - - # Okay, we've checked for appropriate errata; perform the registration. - instance = Instance(uuid=uuid, primary=self.is_option_primary(), hostname=self.get_option_hostname()) - instance.save() - - # If this is a primary instance, update projects. - if instance.primary: - self.update_projects(instance) - - # Done! - print('Successfully registered instance %s.' % instance_str(instance)) + instance = Instance.objects.filter(hostname=self.get_option_hostname()) + if instance.exists(): + print("Instance already registered %s" % instance_str(instance[0])) + return + instance = Instance(uuid=uuid, hostname=self.get_option_hostname()) + instance.save() + print('Successfully registered instance %s.' % instance_str(instance)) diff --git a/awx/main/management/commands/run_callback_receiver.py b/awx/main/management/commands/run_callback_receiver.py index e6080fa419..15b9b8f483 100644 --- a/awx/main/management/commands/run_callback_receiver.py +++ b/awx/main/management/commands/run_callback_receiver.py @@ -21,7 +21,7 @@ from django.db import connection # AWX from awx.main.models import * # noqa -from awx.main.socket import Socket +from awx.main.socket_queue import Socket logger = logging.getLogger('awx.main.commands.run_callback_receiver') diff --git a/awx/main/management/commands/run_fact_cache_receiver.py b/awx/main/management/commands/run_fact_cache_receiver.py index 4241a2000c..90252e2a7d 100644 --- a/awx/main/management/commands/run_fact_cache_receiver.py +++ b/awx/main/management/commands/run_fact_cache_receiver.py @@ -14,7 +14,7 @@ from django.utils import timezone # AWX from awx.main.models.fact import Fact from awx.main.models.inventory import Host -from awx.main.socket import Socket +from awx.main.socket_queue import Socket logger = logging.getLogger('awx.main.commands.run_fact_cache_receiver') diff --git a/awx/main/management/commands/run_socketio_service.py b/awx/main/management/commands/run_socketio_service.py index 0e3df4ccaf..4c233aa312 100644 --- a/awx/main/management/commands/run_socketio_service.py +++ b/awx/main/management/commands/run_socketio_service.py @@ -16,7 +16,7 @@ from django.core.management.base import NoArgsCommand # AWX import awx from awx.main.models import * # noqa -from awx.main.socket import Socket +from awx.main.socket_queue import Socket # socketio from socketio import socketio_manage diff --git a/awx/main/managers.py b/awx/main/managers.py index 4825a74cf8..ca4578daf4 100644 --- a/awx/main/managers.py +++ b/awx/main/managers.py @@ -2,6 +2,7 @@ # All Rights Reserved. import sys +import socket from django.conf import settings from django.db import models @@ -28,31 +29,12 @@ class InstanceManager(models.Manager): # If we are running unit tests, return a stub record. if len(sys.argv) >= 2 and sys.argv[1] == 'test': return self.model(id=1, primary=True, + hostname='localhost', uuid='00000000-0000-0000-0000-000000000000') # Return the appropriate record from the database. - return self.get(uuid=settings.SYSTEM_UUID) + return self.get(hostname=socket.gethostname()) def my_role(self): - """Return the role of the currently active instance, as a string - ('primary' or 'secondary'). - """ - # If we are running unit tests, we are primary, because reasons. - if len(sys.argv) >= 2 and sys.argv[1] == 'test': - return 'primary' - - # Check if this instance is primary; if so, return "primary", otherwise - # "secondary". - if self.me().primary: - return 'primary' - return 'secondary' - - def primary(self): - """Return the primary instance.""" - # If we are running unit tests, return a stub record. - if len(sys.argv) >= 2 and sys.argv[1] == 'test': - return self.model(id=1, primary=True, - uuid='00000000-0000-0000-0000-000000000000') - - # Return the appropriate record from the database. - return self.get(primary=True) + # NOTE: TODO: Likely to repurpose this once standalone ramparts are a thing + return "tower" diff --git a/awx/main/middleware.py b/awx/main/middleware.py index 37903886ac..fda98f1176 100644 --- a/awx/main/middleware.py +++ b/awx/main/middleware.py @@ -71,41 +71,6 @@ class ActivityStreamMiddleware(threading.local): if instance.id not in self.instance_ids: self.instance_ids.append(instance.id) - -class HAMiddleware(object): - """A middleware class that checks to see whether the request is being - served on a secondary instance, and redirects the request back to the - primary instance if so. - """ - def process_request(self, request): - """Process the request, and redirect if this is a request on a - secondary node. - """ - # Is this the primary node? If so, we can just return None and be done; - # we just want normal behavior in this case. - if Instance.objects.my_role() == 'primary': - return None - - # Always allow the /ping/ endpoint. - if request.path.startswith('/api/v1/ping'): - return None - - # Get the primary instance. - primary = Instance.objects.primary() - - # If this is a request to /, then we return a special landing page that - # informs the user that they are on the secondary instance and will - # be redirected. - if request.path == '/': - return TemplateResponse(request, 'ha/redirect.html', { - 'primary': primary, - 'redirect_seconds': 30, - 'version': version, - }) - - # Redirect to the base page of the primary instance. - return HttpResponseRedirect('http://%s%s' % (primary.hostname, request.path)) - class AuthTokenTimeoutMiddleware(object): """Presume that when the user includes the auth header, they go through the authentication mechanism. Further, that mechanism is presumed to extend diff --git a/awx/main/migrations/0033_v310_modify_ha_instance.py b/awx/main/migrations/0033_v310_modify_ha_instance.py new file mode 100644 index 0000000000..e4321f0235 --- /dev/null +++ b/awx/main/migrations/0033_v310_modify_ha_instance.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0032_v302_credential_permissions_update'), + ] + + operations = [ + migrations.RemoveField( + model_name='instance', + name='primary', + ), + migrations.AlterField( + model_name='instance', + name='uuid', + field=models.CharField(max_length=40), + ), + ] diff --git a/awx/main/models/ha.py b/awx/main/models/ha.py index 3725e6afe5..a645c318e4 100644 --- a/awx/main/models/ha.py +++ b/awx/main/models/ha.py @@ -22,9 +22,8 @@ class Instance(models.Model): """ objects = InstanceManager() - uuid = models.CharField(max_length=40, unique=True) + uuid = models.CharField(max_length=40) hostname = models.CharField(max_length=250, unique=True) - primary = models.BooleanField(default=False) created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True) @@ -33,29 +32,8 @@ class Instance(models.Model): @property def role(self): - """Return the role of this instance, as a string.""" - if self.primary: - return 'primary' - return 'secondary' - - @functools.wraps(models.Model.save) - def save(self, *args, **kwargs): - """Save the instance. If this is a secondary instance, then ensure - that any currently-running jobs that this instance started are - canceled. - """ - # Perform the normal save. - result = super(Instance, self).save(*args, **kwargs) - - # If this is not a primary instance, then kill any jobs that this - # instance was responsible for starting. - if not self.primary: - for job in UnifiedJob.objects.filter(job_origin__instance=self, - status__in=CAN_CANCEL): - job.cancel() - - # Return back the original result. - return result + # NOTE: TODO: Likely to repurpose this once standalone ramparts are a thing + return "tower" class JobOrigin(models.Model): diff --git a/awx/main/socket.py b/awx/main/socket_queue.py similarity index 100% rename from awx/main/socket.py rename to awx/main/socket_queue.py diff --git a/awx/main/utils.py b/awx/main/utils.py index 63235ffca3..270d62e50f 100644 --- a/awx/main/utils.py +++ b/awx/main/utils.py @@ -425,7 +425,7 @@ def get_system_task_capacity(): def emit_websocket_notification(endpoint, event, payload, token_key=None): - from awx.main.socket import Socket + from awx.main.socket_queue import Socket try: with Socket('websocket', 'w', nowait=True, logger=logger) as websocket: diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 97157e4acd..4c3d605f4c 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -152,7 +152,6 @@ MIDDLEWARE_CLASSES = ( # NOQA 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', - 'awx.main.middleware.HAMiddleware', 'awx.main.middleware.ActivityStreamMiddleware', 'awx.sso.middleware.SocialAuthMiddleware', 'crum.CurrentRequestUserMiddleware', From e4025a7effcee2c7ad6c2ab806702dcd1e5e2f00 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Thu, 8 Sep 2016 14:26:30 -0400 Subject: [PATCH 19/47] flake8 fixes --- awx/api/serializers.py | 2 +- awx/api/views.py | 1 - awx/main/access.py | 7 +- .../management/commands/run_task_system.py | 16 +++- awx/main/models/workflow.py | 8 +- awx/main/tests/factories/fixtures.py | 3 +- awx/main/tests/factories/tower.py | 1 - awx/main/tests/manual/workflows/linear.py | 1 - awx/main/tests/manual/workflows/parallel.py | 1 - .../unit/commands/test_run_task_system.py | 94 +++++++------------ 10 files changed, 60 insertions(+), 74 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 59912c3468..d65fbd0ea9 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2187,7 +2187,7 @@ class WorkflowJobSerializer(UnifiedJobSerializer): res = super(WorkflowJobSerializer, self).get_related(obj) if obj.workflow_job_template: res['workflow_job_template'] = reverse('api:workflow_job_template_detail', - args=(obj.workflow_job_template.pk,)) + args=(obj.workflow_job_template.pk,)) # TODO: #res['notifications'] = reverse('api:system_job_notifications_list', args=(obj.pk,)) res['workflow_nodes'] = reverse('api:workflow_job_workflow_nodes_list', args=(obj.pk,)) diff --git a/awx/api/views.py b/awx/api/views.py index ce63713707..517ad0a3d7 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -11,7 +11,6 @@ import socket import sys import errno import logging -import copy from base64 import b64encode from collections import OrderedDict diff --git a/awx/main/access.py b/awx/main/access.py index 8ac2fe8377..588041c6b9 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1172,9 +1172,6 @@ class WorkflowJobTemplateAccess(BaseAccess): model = WorkflowJobTemplate - def can_start(self, obj): - return self.can_read(obj) - def get_queryset(self): if self.user.is_superuser or self.user.is_system_auditor: qs = self.model.objects.all() @@ -1234,7 +1231,9 @@ class WorkflowJobTemplateAccess(BaseAccess): if self.user.is_superuser: return True - return self.user in obj.execute_role + return self.can_read(obj) + # TODO: We should use execute role rather than read role + #return self.user in obj.execute_role def can_change(self, obj, data): data_for_change = data diff --git a/awx/main/management/commands/run_task_system.py b/awx/main/management/commands/run_task_system.py index a03c38ffea..855491f08c 100644 --- a/awx/main/management/commands/run_task_system.py +++ b/awx/main/management/commands/run_task_system.py @@ -220,7 +220,7 @@ class WorkflowDAG(SimpleDAG): children_always = self.get_dependencies(obj, 'always_nodes') children_all = children_failed + children_always nodes.extend(children_all) - elif job.status in ['successfult']: + elif job.status in ['successful']: children_success = self.get_dependencies(obj, 'success_nodes') nodes.extend(children_success) else: @@ -260,12 +260,22 @@ def do_spawn_workflow_jobs(): dag = WorkflowDAG(workflow_job) spawn_nodes = dag.bfs_nodes_to_run() for spawn_node in spawn_nodes: - # TODO: Inject job template template params as kwargs + # TODO: Inject job template template params as kwargs. + # Make sure to take into account extra_vars merge logic kv = {} job = spawn_node.unified_job_template.create_unified_job(**kv) spawn_node.job = job spawn_node.save() - result = job.signal_start(**kv) + can_start = job.signal_start(**kv) + if not can_start: + job.status = 'failed' + job.job_explanation = "Workflow job could not start because it was not in the right state or required manual credentials" + job.save(update_fields=['status', 'job_explanation']) + job.socketio_emit_status("failed") + + # TODO: should we emit a status on the socket here similar to tasks.py tower_periodic_scheduler() ? + #emit_websocket_notification('/socket.io/jobs', '', dict(id=)) + def rebuild_graph(message): """Regenerate the task graph by refreshing known tasks from Tower, purging diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index cc764e48af..48cdcee27b 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -1,6 +1,9 @@ # Copyright (c) 2016 Ansible, Inc. # All Rights Reserved. +# Python +#import urlparse + # Django from django.db import models from django.core.urlresolvers import reverse @@ -202,8 +205,9 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, JobNotificationMixin, Workflow def get_absolute_url(self): return reverse('api:workflow_job_detail', args=(self.pk,)) - def get_ui_url(self): - return urljoin(tower_settings.TOWER_URL_BASE, "/#/workflow_jobs/{}".format(self.pk)) + # TODO: Ask UI if this is needed ? + #def get_ui_url(self): + # return urlparse.urljoin(tower_settings.TOWER_URL_BASE, "/#/workflow_jobs/{}".format(self.pk)) def is_blocked_by(self, obj): return True diff --git a/awx/main/tests/factories/fixtures.py b/awx/main/tests/factories/fixtures.py index 1f32d76739..809e71b1bb 100644 --- a/awx/main/tests/factories/fixtures.py +++ b/awx/main/tests/factories/fixtures.py @@ -14,6 +14,7 @@ from awx.main.models import ( Inventory, Label, WorkflowJobTemplate, + WorkflowNode, ) # mk methods should create only a single object of a single type. @@ -169,7 +170,7 @@ def mk_workflow_node(workflow_job_template=None, unified_job_template=None, success_nodes=None, failure_nodes=None, always_nodes=None, job=None, persisted=True): workflow_node = WorkflowNode(workflow_job_template=workflow_job_template, - unified_job_template=job_template, + unified_job_template=unified_job_template, success_nodes=success_nodes, failure_nodes=failure_nodes, always_nodes=always_nodes, diff --git a/awx/main/tests/factories/tower.py b/awx/main/tests/factories/tower.py index 953cb2d26e..d7c45e73e2 100644 --- a/awx/main/tests/factories/tower.py +++ b/awx/main/tests/factories/tower.py @@ -9,7 +9,6 @@ from awx.main.models import ( Inventory, Job, Label, - WorkflowJobTemplate, ) from .objects import ( diff --git a/awx/main/tests/manual/workflows/linear.py b/awx/main/tests/manual/workflows/linear.py index dd9918a933..2b096fb8cd 100644 --- a/awx/main/tests/manual/workflows/linear.py +++ b/awx/main/tests/manual/workflows/linear.py @@ -2,7 +2,6 @@ from awx.main.models import ( WorkflowNode, WorkflowJobTemplate, - WorkflowJob, ) from awx.main.models.jobs import JobTemplate diff --git a/awx/main/tests/manual/workflows/parallel.py b/awx/main/tests/manual/workflows/parallel.py index bd33f350ba..071d4e1e94 100644 --- a/awx/main/tests/manual/workflows/parallel.py +++ b/awx/main/tests/manual/workflows/parallel.py @@ -2,7 +2,6 @@ from awx.main.models import ( WorkflowNode, WorkflowJobTemplate, - WorkflowJob, ) from awx.main.models.jobs import JobTemplate diff --git a/awx/main/tests/unit/commands/test_run_task_system.py b/awx/main/tests/unit/commands/test_run_task_system.py index 4d28bfd2e0..0c9468c737 100644 --- a/awx/main/tests/unit/commands/test_run_task_system.py +++ b/awx/main/tests/unit/commands/test_run_task_system.py @@ -10,12 +10,12 @@ import pytest def dag_root(): dag = SimpleDAG() data = [ - { 1: 1 }, - { 2: 2 }, - { 3: 3 }, - { 4: 4 }, - { 5: 5 }, - { 6: 6 }, + {1: 1}, + {2: 2}, + {3: 3}, + {4: 4}, + {5: 5}, + {6: 6}, ] # Add all the nodes to the DAG [dag.add_node(d) for d in data] @@ -30,12 +30,12 @@ def dag_root(): def dag_simple_edge_labels(): dag = SimpleDAG() data = [ - { 1: 1 }, - { 2: 2 }, - { 3: 3 }, - { 4: 4 }, - { 5: 5 }, - { 6: 6 }, + {1: 1}, + {2: 2}, + {3: 3}, + {4: 4}, + {5: 5}, + {6: 6}, ] # Add all the nodes to the DAG [dag.add_node(d) for d in data] @@ -46,23 +46,18 @@ def dag_simple_edge_labels(): return dag +''' class TestSimpleDAG(object): def test_get_root_nodes(self, dag_root): leafs = dag_root.get_leaf_nodes() - for l in leafs: - print(l) roots = dag_root.get_root_nodes() - for n in roots: - print(n) def test_get_labeled_edges(self, dag_simple_edge_labels): dag = dag_simple_edge_labels nodes = dag.get_dependencies(dag.nodes[0]['node_object'], 'one') nodes = dag.get_dependencies(dag.nodes[0]['node_object'], 'two') - print("Matching nodes: ") - for n in nodes: - print(n) +''' @pytest.fixture def factory_node(): @@ -74,41 +69,22 @@ def factory_node(): return wfn return fn -@pytest.fixture -def workflow_dag_multiple_roots(factory_node): - dag = WorkflowDAG() - data = [ - factory_node(1, None), - factory_node(2, None), - factory_node(3, None), - factory_node(4, None), - factory_node(5, None), - factory_node(6, None), - ] - [dag.add_node(d) for d in data] - - dag.add_edge(data[0], data[3], 'success') - dag.add_edge(data[1], data[4], 'success') - dag.add_edge(data[2], data[5], 'success') - - return dag - @pytest.fixture def workflow_dag_level_2(factory_node): dag = WorkflowDAG() data = [ - factory_node(1, 'success'), - factory_node(2, 'success'), - factory_node(3, 'success'), + factory_node(0, 'successful'), + factory_node(1, 'successful'), + factory_node(2, 'successful'), + factory_node(3, None), factory_node(4, None), factory_node(5, None), - factory_node(6, None), ] [dag.add_node(d) for d in data] - dag.add_edge(data[0], data[3], 'success') - dag.add_edge(data[1], data[4], 'success') - dag.add_edge(data[2], data[5], 'success') + dag.add_edge(data[0], data[3], 'success_nodes') + dag.add_edge(data[1], data[4], 'success_nodes') + dag.add_edge(data[2], data[5], 'success_nodes') return (dag, data[3:6], False) @@ -125,9 +101,9 @@ def workflow_dag_multiple_roots(factory_node): ] [dag.add_node(d) for d in data] - dag.add_edge(data[0], data[3], 'success') - dag.add_edge(data[1], data[4], 'success') - dag.add_edge(data[2], data[5], 'success') + dag.add_edge(data[0], data[3], 'success_nodes') + dag.add_edge(data[1], data[4], 'success_nodes') + dag.add_edge(data[2], data[5], 'success_nodes') expected = data[0:3] return (dag, expected, False) @@ -145,11 +121,11 @@ def workflow_dag_multiple_edges_labeled(factory_node): ] [dag.add_node(d) for d in data] - dag.add_edge(data[0], data[1], 'success') - dag.add_edge(data[0], data[2], 'failure') - dag.add_edge(data[2], data[3], 'success') - dag.add_edge(data[2], data[4], 'failure') - dag.add_edge(data[4], data[5], 'failure') + dag.add_edge(data[0], data[1], 'success_nodes') + dag.add_edge(data[0], data[2], 'failure_nodes') + dag.add_edge(data[2], data[3], 'success_nodes') + dag.add_edge(data[2], data[4], 'failure_nodes') + dag.add_edge(data[4], data[5], 'failure_nodes') expected = data[5:6] return (dag, expected, False) @@ -163,15 +139,15 @@ def workflow_dag_finished(factory_node): factory_node(2, 'failed'), factory_node(3, None), factory_node(4, 'failed'), - factory_node(5, 'success'), + factory_node(5, 'successful'), ] [dag.add_node(d) for d in data] - dag.add_edge(data[0], data[1], 'success') - dag.add_edge(data[0], data[2], 'failure') - dag.add_edge(data[2], data[3], 'success') - dag.add_edge(data[2], data[4], 'failure') - dag.add_edge(data[4], data[5], 'failure') + dag.add_edge(data[0], data[1], 'success_nodes') + dag.add_edge(data[0], data[2], 'failure_nodes') + dag.add_edge(data[2], data[3], 'success_nodes') + dag.add_edge(data[2], data[4], 'failure_nodes') + dag.add_edge(data[4], data[5], 'failure_nodes') expected = [] return (dag, expected, True) From 13a0fd749f03b79116ef35bd47a62e81da062b21 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Fri, 9 Sep 2016 15:17:16 -0400 Subject: [PATCH 20/47] Purge old munin monitors and tools --- tools/munin_monitors/callbackr_alive | 16 --------------- tools/munin_monitors/celery_alive | 16 --------------- tools/munin_monitors/postgres_alive | 16 --------------- tools/munin_monitors/redis_alive | 16 --------------- tools/munin_monitors/socketio_alive | 16 --------------- tools/munin_monitors/taskmanager_alive | 16 --------------- tools/munin_monitors/tower_jobs | 27 -------------------------- 7 files changed, 123 deletions(-) delete mode 100755 tools/munin_monitors/callbackr_alive delete mode 100755 tools/munin_monitors/celery_alive delete mode 100755 tools/munin_monitors/postgres_alive delete mode 100755 tools/munin_monitors/redis_alive delete mode 100755 tools/munin_monitors/socketio_alive delete mode 100755 tools/munin_monitors/taskmanager_alive delete mode 100755 tools/munin_monitors/tower_jobs diff --git a/tools/munin_monitors/callbackr_alive b/tools/munin_monitors/callbackr_alive deleted file mode 100755 index 25fb029be8..0000000000 --- a/tools/munin_monitors/callbackr_alive +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/sh - -case $1 in - config) - cat <<'EOM' -graph_title Callback Receiver Processes -graph_vlabel num processes -graph_category tower -callbackr.label Callback Receiver Processes -EOM - exit 0;; -esac - -printf "callbackr.value " -ps ax | grep run_callback_receiver | grep -v grep | wc -l -printf "\n" diff --git a/tools/munin_monitors/celery_alive b/tools/munin_monitors/celery_alive deleted file mode 100755 index d96bdedf41..0000000000 --- a/tools/munin_monitors/celery_alive +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/sh - -case $1 in - config) - cat <<'EOM' -graph_title Celery Processes -graph_vlabel num processes -graph_category tower -celeryd.label Celery Processes -EOM - exit 0;; -esac - -printf "celeryd.value " -ps ax | grep celeryd | grep -v grep | wc -l -printf "\n" diff --git a/tools/munin_monitors/postgres_alive b/tools/munin_monitors/postgres_alive deleted file mode 100755 index 2a8115dcb6..0000000000 --- a/tools/munin_monitors/postgres_alive +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/sh - -case $1 in - config) - cat <<'EOM' -graph_title Postmaster Processes -graph_vlabel num processes -graph_category tower -postmaster.label Postmaster Processes -EOM - exit 0;; -esac - -printf "postmaster.value " -ps ax | grep postmaster | grep -v grep | wc -l -printf "\n" diff --git a/tools/munin_monitors/redis_alive b/tools/munin_monitors/redis_alive deleted file mode 100755 index 3f3573a006..0000000000 --- a/tools/munin_monitors/redis_alive +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/sh - -case $1 in - config) - cat <<'EOM' -graph_title Redis Processes -graph_vlabel num processes -graph_category tower -redis.label Redis Processes -EOM - exit 0;; -esac - -printf "redis.value " -ps ax | grep redis | grep -v grep | wc -l -printf "\n" diff --git a/tools/munin_monitors/socketio_alive b/tools/munin_monitors/socketio_alive deleted file mode 100755 index d035be40ea..0000000000 --- a/tools/munin_monitors/socketio_alive +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/sh - -case $1 in - config) - cat <<'EOM' -graph_title SocketIO Service Processes -graph_vlabel num processes -graph_category tower -socketio.label SocketIO Service Processes -EOM - exit 0;; -esac - -printf "socketio.value " -ps ax | grep run_socketio_service | grep -v grep | wc -l -printf "\n" diff --git a/tools/munin_monitors/taskmanager_alive b/tools/munin_monitors/taskmanager_alive deleted file mode 100755 index 25b2054208..0000000000 --- a/tools/munin_monitors/taskmanager_alive +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/sh - -case $1 in - config) - cat <<'EOM' -graph_title Task Manager Processes -graph_vlabel num processes -graph_category tower -taskm.label Task Manager Processes -EOM - exit 0;; -esac - -printf "taskm.value " -ps ax | grep run_task_system | grep -v grep | wc -l -printf "\n" diff --git a/tools/munin_monitors/tower_jobs b/tools/munin_monitors/tower_jobs deleted file mode 100755 index 8781fc6b76..0000000000 --- a/tools/munin_monitors/tower_jobs +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/sh - -case $1 in - config) - cat <<'EOM' -multigraph tower_jobs -graph_title Running Jobs breakdown -graph_vlabel job count -graph_category tower -running.label Running jobs -waiting.label Waiting jobs -pending.label Pending jobs -EOM - exit 0;; -esac - -printf "running.value " -awx-manage stats --stat jobs_running -printf "\n" - -printf "waiting.value " -awx-manage stats --stat jobs_waiting -printf "\n" - -printf "pending.value " -awx-manage stats --stat jobs_pending -printf "\n" From 807cced57133cefc4c24189e52b0667650f17fb8 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Fri, 9 Sep 2016 15:18:18 -0400 Subject: [PATCH 21/47] Implement a more dynamic celery queue system * Meant to be a starting point to more efficiently manage work routing and to balance work across all tower nodes * Integrate flower as a dev tool that starts alongside other nodes. Helpful for observing and monitoring the queues/exchanges * For the moment, force the task manager to only run on one node (not sure if this is needed) * Define queues and routes for all task work * Bump celery version to 3.1.23 * Expose flower through haproxy --- Makefile | 15 +++++++++++++-- Procfile | 1 + awx/main/tasks.py | 16 ++++++++-------- awx/settings/defaults.py | 20 ++++++++++++++++++++ requirements/requirements.txt | 2 +- requirements/requirements_dev.txt | 1 + tools/docker-compose-cluster.yml | 1 + tools/docker-compose.yml | 1 + tools/docker-compose/haproxy.cfg | 17 +++++++++++++++++ 9 files changed, 63 insertions(+), 11 deletions(-) diff --git a/Makefile b/Makefile index beea1eebd6..87a78a13d3 100644 --- a/Makefile +++ b/Makefile @@ -378,6 +378,12 @@ honcho: fi; \ honcho start +flower: + @if [ "$(VENV_BASE)" ]; then \ + . $(VENV_BASE)/tower/bin/activate; \ + fi; \ + $(PYTHON) manage.py celery flower --address=0.0.0.0 --port=5555 --broker=amqp://guest:guest@$(RABBITMQ_HOST):5672// + # Run the built-in development webserver (by default on http://localhost:8013). runserver: @if [ "$(VENV_BASE)" ]; then \ @@ -390,7 +396,8 @@ celeryd: @if [ "$(VENV_BASE)" ]; then \ . $(VENV_BASE)/tower/bin/activate; \ fi; \ - $(PYTHON) manage.py celeryd -l DEBUG -B --autoscale=20,2 -Ofair --schedule=$(CELERY_SCHEDULE_FILE) + $(PYTHON) manage.py celeryd -l DEBUG -B --autoscale=20,3 --schedule=$(CELERY_SCHEDULE_FILE) -Q projects,jobs,default + #$(PYTHON) manage.py celery multi show projects jobs default -l DEBUG -Q:projects projects -Q:jobs jobs -Q:default default -c:projects 1 -c:jobs 3 -c:default 3 -Ofair -B --schedule=$(CELERY_SCHEDULE_FILE) # Run to start the zeromq callback receiver receiver: @@ -403,7 +410,11 @@ taskmanager: @if [ "$(VENV_BASE)" ]; then \ . $(VENV_BASE)/tower/bin/activate; \ fi; \ - $(PYTHON) manage.py run_task_system + if [ "$(COMPOSE_HOST)" == "tower_1" ] || [ "$(COMPOSE_HOST)" == "tower" ]; then \ + $(PYTHON) manage.py run_task_system; \ + else \ + while true; do sleep 2; done; \ + fi socketservice: @if [ "$(VENV_BASE)" ]; then \ diff --git a/Procfile b/Procfile index a301a6aa1a..433417f70b 100644 --- a/Procfile +++ b/Procfile @@ -4,3 +4,4 @@ taskmanager: make taskmanager receiver: make receiver socketservice: make socketservice factcacher: make factcacher +flower: make flower \ No newline at end of file diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 877ed4b2d2..806a819e3e 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -80,7 +80,7 @@ def celery_startup(conf=None, **kwargs): except Exception as e: logger.error("Failed to rebuild schedule {}: {}".format(sch, e)) -@task() +@task(queue='default') def send_notifications(notification_list, job_id=None): if not isinstance(notification_list, list): raise TypeError("notification_list should be of type list") @@ -101,7 +101,7 @@ def send_notifications(notification_list, job_id=None): if job_id is not None: job_actual.notifications.add(notification) -@task(bind=True) +@task(bind=True, queue='default') def run_administrative_checks(self): if not tower_settings.TOWER_ADMIN_ALERTS: return @@ -122,11 +122,11 @@ def run_administrative_checks(self): tower_admin_emails, fail_silently=True) -@task(bind=True) +@task(bind=True, queue='default') def cleanup_authtokens(self): AuthToken.objects.filter(expires__lt=now()).delete() -@task(bind=True) +@task(bind=True, queue='default') def tower_periodic_scheduler(self): def get_last_run(): if not os.path.exists(settings.SCHEDULE_METADATA_LOCATION): @@ -177,7 +177,7 @@ def tower_periodic_scheduler(self): new_unified_job.socketio_emit_status("failed") emit_websocket_notification('/socket.io/schedules', 'schedule_changed', dict(id=schedule.id)) -@task() +@task(queue='default') def notify_task_runner(metadata_dict): """Add the given task into the Tower task manager's queue, to be consumed by the task system. @@ -185,7 +185,7 @@ def notify_task_runner(metadata_dict): queue = FifoQueue('tower_task_manager') queue.push(metadata_dict) -@task(bind=True) +@task(bind=True, queue='default') def handle_work_success(self, result, task_actual): if task_actual['type'] == 'project_update': instance = ProjectUpdate.objects.get(id=task_actual['id']) @@ -227,7 +227,7 @@ def handle_work_success(self, result, task_actual): for n in all_notification_templates], job_id=task_actual['id']) -@task(bind=True) +@task(bind=True, queue='default') def handle_work_error(self, task_id, subtasks=None): print('Executing error task id %s, subtasks: %s' % (str(self.request.id), str(subtasks))) @@ -294,7 +294,7 @@ def handle_work_error(self, task_id, subtasks=None): job_id=first_task_id) -@task() +@task(queue='default') def update_inventory_computed_fields(inventory_id, should_update_hosts=True): ''' Signal handler and wrapper around inventory.update_computed_fields to diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 4c3d605f4c..325536b535 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -8,6 +8,9 @@ import ldap import djcelery from datetime import timedelta +from kombu import Queue, Exchange +from kombu.common import Broadcast + # Update this module's local settings from the global settings module. from django.conf import global_settings this_module = sys.modules[__name__] @@ -326,6 +329,7 @@ os.environ.setdefault('DJANGO_LIVE_TEST_SERVER_ADDRESS', 'localhost:9013-9199') djcelery.setup_loader() BROKER_URL = 'redis://localhost/' +CELERY_DEFAULT_QUEUE = 'default' CELERY_TASK_SERIALIZER = 'json' CELERY_RESULT_SERIALIZER = 'json' CELERY_ACCEPT_CONTENT = ['json'] @@ -335,6 +339,22 @@ CELERYD_TASK_SOFT_TIME_LIMIT = None CELERYBEAT_SCHEDULER = 'celery.beat.PersistentScheduler' CELERYBEAT_MAX_LOOP_INTERVAL = 60 CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend' +CELERY_QUEUES = ( + Queue('default', Exchange('default'), routing_key='default'), + Queue('jobs', Exchange('jobs'), routing_key='jobs'), + Broadcast('projects'), +) +CELERY_ROUTES = ({'awx.main.tasks.run_job': {'queue': 'jobs', + 'routing_key': 'jobs'}, + 'awx.main.tasks.run_project_update': {'queue': 'projects'}, + 'awx.main.tasks.run_inventory_update': {'queue': 'jobs', + 'routing_key': 'jobs'}, + 'awx.main.tasks.run_ad_hoc_command': {'queue': 'jobs', + 'routing_key': 'jobs'}, + 'awx.main.tasks.run_system_job': {'queue': 'jobs', + 'routing_key': 'jobs'} +}) + CELERYBEAT_SCHEDULE = { 'tower_scheduler': { 'task': 'awx.main.tasks.tower_periodic_scheduler', diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 1a3ba9e7f3..fb5872f572 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -6,7 +6,7 @@ azure==2.0.0rc2 Babel==2.2.0 billiard==3.3.0.16 boto==2.40.0 -celery==3.1.10 +celery==3.1.23 cliff==1.15.0 cmd2==0.6.8 d2to1==0.2.11 # TODO: Still needed? diff --git a/requirements/requirements_dev.txt b/requirements/requirements_dev.txt index d7906ce28f..f7fef4a0d4 100644 --- a/requirements/requirements_dev.txt +++ b/requirements/requirements_dev.txt @@ -10,3 +10,4 @@ pytest-cov pytest-django pytest-pythonpath pytest-mock +flower diff --git a/tools/docker-compose-cluster.yml b/tools/docker-compose-cluster.yml index 86027f8849..1b1dee4041 100644 --- a/tools/docker-compose-cluster.yml +++ b/tools/docker-compose-cluster.yml @@ -11,6 +11,7 @@ services: ports: - "8013:8013" - "1936:1936" + - "5555:5555" tower_1: image: gcr.io/ansible-tower-engineering/tower_devel:${TAG} hostname: tower_1 diff --git a/tools/docker-compose.yml b/tools/docker-compose.yml index f34bb25766..08aec5babd 100644 --- a/tools/docker-compose.yml +++ b/tools/docker-compose.yml @@ -12,6 +12,7 @@ services: ports: - "8080:8080" - "8013:8013" + - "5555:5555" links: - postgres - memcached diff --git a/tools/docker-compose/haproxy.cfg b/tools/docker-compose/haproxy.cfg index cfbb3965f7..01d3c94a4a 100644 --- a/tools/docker-compose/haproxy.cfg +++ b/tools/docker-compose/haproxy.cfg @@ -17,6 +17,11 @@ frontend localnodes mode http default_backend nodes +frontend flower + bind *:5555 + mode http + default_backend flower_nodes + backend nodes mode http balance roundrobin @@ -29,6 +34,18 @@ backend nodes server tower_2 tower_2:8013 check server tower_3 tower_3:8013 check +backend flower_nodes + mode http + balance roundrobin + option forwardfor + option http-pretend-keepalive + http-request set-header X-Forwarded-Port %[dst_port] + http-request add-header X-Forwarded-Proto https if { ssl_fc } + #option httpchk HEAD / HTTP/1.1\r\nHost:localhost + server tower_1 tower_1:5555 + server tower_2 tower_2:5555 + server tower_3 tower_3:5555 + listen stats bind *:1936 stats enable From 988bbc4f4f01844a508d25bff8cc19f0dcbe6e69 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Mon, 12 Sep 2016 09:50:27 -0400 Subject: [PATCH 22/47] Purge more qpid configuration --- awx/settings/defaults.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 97157e4acd..89389c02c7 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -1015,11 +1015,6 @@ LOGGING = { 'level': 'WARNING', 'propagate': False, }, - 'qpid.messaging': { - 'handlers': ['console', 'file', 'tower_warnings'], - 'propagate': False, - 'level': 'WARNING', - }, 'py.warnings': { 'handlers': ['console'], }, From 1bf0cf20bec3c5f06d0b25169bcfcf08ba05f938 Mon Sep 17 00:00:00 2001 From: Shane McDonald Date: Mon, 12 Sep 2016 10:40:43 -0400 Subject: [PATCH 23/47] Merge branch 'containerized-debs' into devel From 799f321760f9aab07fff9c24c2fa45f1d686b3e9 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Mon, 12 Sep 2016 10:43:40 -0400 Subject: [PATCH 24/47] Fix an issue running jobs in the cluster The old VENV_PATH settings were still in place --- awx/settings/defaults.py | 1 + awx/settings/development.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 325536b535..8619d695c5 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -342,6 +342,7 @@ CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend' CELERY_QUEUES = ( Queue('default', Exchange('default'), routing_key='default'), Queue('jobs', Exchange('jobs'), routing_key='jobs'), + # Projects use a fanout queue, this isn't super well supported Broadcast('projects'), ) CELERY_ROUTES = ({'awx.main.tasks.run_job': {'queue': 'jobs', diff --git a/awx/settings/development.py b/awx/settings/development.py index 438c152a0a..4c727e0bdc 100644 --- a/awx/settings/development.py +++ b/awx/settings/development.py @@ -71,9 +71,9 @@ include(optional('/etc/tower/settings.py'), scope=locals()) include(optional('/etc/tower/conf.d/*.py'), scope=locals()) ANSIBLE_USE_VENV = True -ANSIBLE_VENV_PATH = "/tower_devel/venv/ansible" +ANSIBLE_VENV_PATH = "/venv/ansible" TOWER_USE_VENV = True -TOWER_VENV_PATH = "/tower_devel/venv/tower" +TOWER_VENV_PATH = "/venv/tower" # If any local_*.py files are present in awx/settings/, use them to override # default settings for development. If not present, we can still run using From cea5ebadb728db7e80e62e16f4c39873d108f037 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Mon, 12 Sep 2016 11:04:20 -0400 Subject: [PATCH 25/47] split Node in job and job template node --- awx/api/serializers.py | 43 ++++++--- awx/api/urls.py | 25 ++++-- awx/api/views.py | 73 +++++++++++----- awx/main/access.py | 41 ++++++++- .../migrations/0033_v310_add_workflows.py | 51 ++++++++--- awx/main/models/activity_stream.py | 3 +- awx/main/models/workflow.py | 87 ++++++++++--------- awx/main/tests/manual/workflows/linear.py | 16 ++-- awx/main/tests/manual/workflows/parallel.py | 4 +- 9 files changed, 232 insertions(+), 111 deletions(-) mode change 100644 => 100755 awx/main/tests/manual/workflows/linear.py mode change 100644 => 100755 awx/main/tests/manual/workflows/parallel.py diff --git a/awx/api/serializers.py b/awx/api/serializers.py index d65fbd0ea9..3f49cca391 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2228,31 +2228,51 @@ class WorkflowJobTemplateListSerializer(UnifiedJobTemplateSerializer): class WorkflowJobTemplateSerializer(WorkflowJobTemplateListSerializer): pass -class WorkflowNodeSerializer(BaseSerializer): +class WorkflowNodeBaseSerializer(BaseSerializer): #workflow_job_template = UnifiedJobTemplateSerializer() class Meta: - model = WorkflowNode # TODO: workflow_job and job read-only - fields = ('id', 'url', 'related', 'workflow_job_template', 'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes', 'job',) + fields = ('id', 'url', 'related', 'success_nodes', 'failure_nodes', 'always_nodes',) + +class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer): + class Meta: + model = WorkflowJobTemplateNode + fields = ('*', 'workflow_job_template', 'unified_job_template',) def get_related(self, obj): - res = super(WorkflowNodeSerializer, self).get_related(obj) + res = super(WorkflowJobTemplateNodeSerializer, self).get_related(obj) + res['success_nodes'] = reverse('api:workflow_job_template_node_success_nodes_list', args=(obj.pk,)) + res['failure_nodes'] = reverse('api:workflow_job_template_node_failure_nodes_list', args=(obj.pk,)) + res['always_nodes'] = reverse('api:workflow_job_template_node_always_nodes_list', args=(obj.pk,)) if obj.workflow_job_template: res['workflow_job_template'] = reverse('api:workflow_job_template_detail', args=(obj.workflow_job_template.pk,)) if obj.unified_job_template: res['unified_job_template'] = obj.unified_job_template.get_absolute_url() + return res + +class WorkflowJobNodeSerializer(WorkflowNodeBaseSerializer): + class Meta: + model = WorkflowJobTemplateNode + fields = ('*', 'workflow_job_template', 'unified_job_template', 'job', 'workflow_job',) + + def get_related(self, obj): + res = super(WorkflowJobNodeSerializer, self).get_related(obj) + res['success_nodes'] = reverse('api:workflow_job_node_success_nodes_list', args=(obj.pk,)) + res['failure_nodes'] = reverse('api:workflow_job_node_failure_nodes_list', args=(obj.pk,)) + res['always_nodes'] = reverse('api:workflow_job_node_always_nodes_list', args=(obj.pk,)) + if obj.workflow_job_template: + res['workflow_job_template'] = reverse('api:workflow_job_template_detail', args=(obj.workflow_job_template.pk,)) if obj.job: res['job'] = reverse('api:job_detail', args=(obj.job.pk,)) if obj.workflow_job: res['workflow_job'] = reverse('api:workflow_job_detail', args=(obj.workflow_job.pk,)) - res['success_nodes'] = reverse('api:workflow_node_success_nodes_list', args=(obj.pk,)) - res['failure_nodes'] = reverse('api:workflow_node_failure_nodes_list', args=(obj.pk,)) - res['always_nodes'] = reverse('api:workflow_node_always_nodes_list', args=(obj.pk,)) - return res -class WorkflowNodeDetailSerializer(WorkflowNodeSerializer): +class WorkflowJobNodeListSerializer(WorkflowJobNodeSerializer): + pass + +class WorkflowJobTemplateNodeDetailSerializer(WorkflowJobTemplateNodeSerializer): ''' Influence the api browser sample data to not include workflow_job_template @@ -2262,14 +2282,13 @@ class WorkflowNodeDetailSerializer(WorkflowNodeSerializer): Maybe something to do with workflow_job_template being a relational field? ''' def build_relational_field(self, field_name, relation_info): - field_class, field_kwargs = super(WorkflowNodeDetailSerializer, self).build_relational_field(field_name, relation_info) + field_class, field_kwargs = super(WorkflowJobTemplateNodeDetailSerializer, self).build_relational_field(field_name, relation_info) if self.instance and field_name == 'workflow_job_template': field_kwargs['read_only'] = True field_kwargs.pop('queryset', None) return field_class, field_kwargs - -class WorkflowNodeListSerializer(WorkflowNodeSerializer): +class WorkflowJobTemplateNodeListSerializer(WorkflowJobTemplateNodeSerializer): pass class JobListSerializer(JobSerializer, UnifiedJobListSerializer): diff --git a/awx/api/urls.py b/awx/api/urls.py index e7240e39e2..af81d227d7 100644 --- a/awx/api/urls.py +++ b/awx/api/urls.py @@ -262,8 +262,8 @@ workflow_job_template_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/launch/$', 'workflow_job_template_launch'), url(r'^(?P[0-9]+)/workflow_nodes/$', 'workflow_job_template_workflow_nodes_list'), # url(r'^(?P[0-9]+)/cancel/$', 'workflow_job_template_cancel'), - #url(r'^(?P[0-9]+)/nodes/$', 'workflow_job_template_node_list'), ) + workflow_job_urls = patterns('awx.api.views', url(r'^$', 'workflow_job_list'), url(r'^(?P[0-9]+)/$', 'workflow_job_detail'), @@ -290,12 +290,20 @@ label_urls = patterns('awx.api.views', url(r'^(?P[0-9]+)/$', 'label_detail'), ) -workflow_node_urls = patterns('awx.api.views', - url(r'^$', 'workflow_node_list'), - url(r'^(?P[0-9]+)/$', 'workflow_node_detail'), - url(r'^(?P[0-9]+)/success_nodes/$', 'workflow_node_success_nodes_list'), - url(r'^(?P[0-9]+)/failure_nodes/$', 'workflow_node_failure_nodes_list'), - url(r'^(?P[0-9]+)/always_nodes/$', 'workflow_node_always_nodes_list'), +workflow_job_template_node_urls = patterns('awx.api.views', + url(r'^$', 'workflow_job_template_node_list'), + url(r'^(?P[0-9]+)/$', 'workflow_job_template_node_detail'), + url(r'^(?P[0-9]+)/success_nodes/$', 'workflow_job_template_node_success_nodes_list'), + url(r'^(?P[0-9]+)/failure_nodes/$', 'workflow_job_template_node_failure_nodes_list'), + url(r'^(?P[0-9]+)/always_nodes/$', 'workflow_job_template_node_always_nodes_list'), +) + +workflow_job_node_urls = patterns('awx.api.views', + url(r'^$', 'workflow_job_node_list'), + url(r'^(?P[0-9]+)/$', 'workflow_job_node_detail'), + url(r'^(?P[0-9]+)/success_nodes/$', 'workflow_job_node_success_nodes_list'), + url(r'^(?P[0-9]+)/failure_nodes/$', 'workflow_job_node_failure_nodes_list'), + url(r'^(?P[0-9]+)/always_nodes/$', 'workflow_job_node_always_nodes_list'), ) schedule_urls = patterns('awx.api.views', @@ -350,7 +358,8 @@ v1_urls = patterns('awx.api.views', url(r'^workflow_job_templates/',include(workflow_job_template_urls)), url(r'^workflow_jobs/' ,include(workflow_job_urls)), url(r'^labels/', include(label_urls)), - url(r'^workflow_nodes/', include(workflow_node_urls)), + url(r'^workflow_job_template_nodes/', include(workflow_job_template_node_urls)), + #url(r'^workflow_job_nodes/', include(workflow_job_node_urls)), url(r'^unified_job_templates/$','unified_job_template_list'), url(r'^unified_jobs/$', 'unified_job_list'), url(r'^activity_stream/', include(activity_stream_urls)), diff --git a/awx/api/views.py b/awx/api/views.py index 517ad0a3d7..d42aae1127 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -2615,31 +2615,31 @@ class JobTemplateObjectRolesList(SubListAPIView): return Role.objects.filter(content_type=content_type, object_id=po.pk) # TODO: -class WorkflowNodeList(ListCreateAPIView): +class WorkflowJobTemplateNodeList(ListCreateAPIView): - model = WorkflowNode - serializer_class = WorkflowNodeSerializer + model = WorkflowJobTemplateNode + serializer_class = WorkflowJobTemplateNodeListSerializer new_in_310 = True # TODO: -class WorkflowNodeDetail(RetrieveUpdateDestroyAPIView): +class WorkflowJobTemplateNodeDetail(RetrieveUpdateDestroyAPIView): - model = WorkflowNode - serializer_class = WorkflowNodeDetailSerializer + model = WorkflowJobTemplateNode + serializer_class = WorkflowJobTemplateNodeDetailSerializer new_in_310 = True -class WorkflowNodeChildrenBaseList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView): +class WorkflowJobTemplateNodeChildrenBaseList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView): - model = WorkflowNode - serializer_class = WorkflowNodeListSerializer + model = WorkflowJobTemplateNode + serializer_class = WorkflowJobTemplateNodeListSerializer always_allow_superuser = True # TODO: RBAC - parent_model = WorkflowNode + parent_model = WorkflowJobTemplateNode relationship = '' enforce_parent_relationship = 'workflow_job_template' new_in_310 = True ''' - Limit the set of WorkflowNodes to the related nodes of specified by + Limit the set of WorkflowJobTemplateNodes to the related nodes of specified by 'relationship' ''' def get_queryset(self): @@ -2647,18 +2647,46 @@ class WorkflowNodeChildrenBaseList(EnforceParentRelationshipMixin, SubListCreate self.check_parent_access(parent) return getattr(parent, self.relationship).all() -class WorkflowNodeSuccessNodesList(WorkflowNodeChildrenBaseList): - +class WorkflowJobTemplateNodeSuccessNodesList(WorkflowJobTemplateNodeChildrenBaseList): relationship = 'success_nodes' -class WorkflowNodeFailureNodesList(WorkflowNodeChildrenBaseList): - +class WorkflowJobTemplateNodeFailureNodesList(WorkflowJobTemplateNodeChildrenBaseList): relationship = 'failure_nodes' -class WorkflowNodeAlwaysNodesList(WorkflowNodeChildrenBaseList): - +class WorkflowJobTemplateNodeAlwaysNodesList(WorkflowJobTemplateNodeChildrenBaseList): relationship = 'always_nodes' +''' +class WorkflowJobNodeChildrenBaseList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView): + + model = WorkflowJobNode + serializer_class = WorkflowJobNodeListSerializer + always_allow_superuser = True # TODO: RBAC + parent_model = WorkflowJobTemplateNode + relationship = '' + enforce_parent_relationship = 'workflow_job_template' + new_in_310 = True + + # + #Limit the set of WorkflowJobTemplateNodes to the related nodes of specified by + #'relationship' + # + def get_queryset(self): + parent = self.get_parent_object() + self.check_parent_access(parent) + return getattr(parent, self.relationship).all() + +class WorkflowJobNodeSuccessNodesList(WorkflowJobNodeChildrenBaseList): + relationship = 'success_nodes' + +class WorkflowJobNodeFailureNodesList(WorkflowJobNodeChildrenBaseList): + relationship = 'failure_nodes' + +class WorkflowJobNodeAlwaysNodesList(WorkflowJobNodeChildrenBaseList): + relationship = 'always_nodes' +''' + + # TODO: class WorkflowJobTemplateList(ListCreateAPIView): @@ -2705,11 +2733,11 @@ class WorkflowJobTemplateLaunch(GenericAPIView): # TODO: class WorkflowJobTemplateWorkflowNodesList(SubListCreateAPIView): - model = WorkflowNode - serializer_class = WorkflowNodeListSerializer + model = WorkflowJobTemplateNode + serializer_class = WorkflowJobTemplateNodeListSerializer always_allow_superuser = True # TODO: RBAC parent_model = WorkflowJobTemplate - relationship = 'workflow_nodes' + relationship = 'workflow_job_template_nodes' parent_key = 'workflow_job_template' # TODO: @@ -2740,14 +2768,13 @@ class WorkflowJobDetail(RetrieveDestroyAPIView): class WorkflowJobWorkflowNodesList(SubListAPIView): - model = WorkflowNode - serializer_class = WorkflowNodeListSerializer + model = WorkflowJobNode + serializer_class = WorkflowJobNodeListSerializer always_allow_superuser = True # TODO: RBAC parent_model = WorkflowJob relationship = 'workflow_job_nodes' parent_key = 'workflow_job' - class SystemJobTemplateList(ListAPIView): model = SystemJobTemplate diff --git a/awx/main/access.py b/awx/main/access.py index 588041c6b9..c7eb368cad 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1133,11 +1133,43 @@ class SystemJobAccess(BaseAccess): model = SystemJob # TODO: -class WorkflowNodeAccess(BaseAccess): +class WorkflowJobTemplateNodeAccess(BaseAccess): ''' - I can see/use a WorkflowNode if I have permission to associated Workflow Job Template + I can see/use a WorkflowJobTemplateNode if I have permission to associated Workflow Job Template ''' - model = WorkflowNode + model = WorkflowJobTemplateNode + + def get_queryset(self): + if self.user.is_superuser or self.user.is_system_auditor: + return self.model.objects.all() + + @check_superuser + def can_read(self, obj): + return True + + @check_superuser + def can_add(self, data): + if not data: # So the browseable API will work + return True + + return True + + @check_superuser + def can_change(self, obj, data): + if self.can_add(data) is False: + return False + + return True + + def can_delete(self, obj): + return self.can_change(obj, None) + +# TODO: +class WorkflowJobNodeAccess(BaseAccess): + ''' + I can see/use a WorkflowJobNode if I have permission to associated Workflow Job + ''' + model = WorkflowJobNode def get_queryset(self): if self.user.is_superuser or self.user.is_system_auditor: @@ -1863,6 +1895,7 @@ register_access(Role, RoleAccess) register_access(NotificationTemplate, NotificationTemplateAccess) register_access(Notification, NotificationAccess) register_access(Label, LabelAccess) -register_access(WorkflowNode, WorkflowNodeAccess) +register_access(WorkflowJobTemplateNode, WorkflowJobTemplateNodeAccess) +register_access(WorkflowJobNode, WorkflowJobNodeAccess) register_access(WorkflowJobTemplate, WorkflowJobTemplateAccess) register_access(WorkflowJob, WorkflowJobAccess) diff --git a/awx/main/migrations/0033_v310_add_workflows.py b/awx/main/migrations/0033_v310_add_workflows.py index 10f4879537..1ca0462edf 100644 --- a/awx/main/migrations/0033_v310_add_workflows.py +++ b/awx/main/migrations/0033_v310_add_workflows.py @@ -26,6 +26,21 @@ class Migration(migrations.Migration): }, bases=('main.unifiedjob', models.Model, awx.main.models.notifications.JobNotificationMixin, awx.main.models.workflow.WorkflowJobInheritNodesMixin), ), + migrations.CreateModel( + name='WorkflowJobNode', + fields=[ + ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), + ('created', models.DateTimeField(default=None, editable=False)), + ('modified', models.DateTimeField(default=None, editable=False)), + ('always_nodes', models.ManyToManyField(related_name='workflowjobnodes_always', to='main.WorkflowJobNode', blank=True)), + ('failure_nodes', models.ManyToManyField(related_name='workflowjobnodes_failure', to='main.WorkflowJobNode', blank=True)), + ('job', models.ForeignKey(related_name='unified_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True)), + ('success_nodes', models.ManyToManyField(related_name='workflowjobnodes_success', to='main.WorkflowJobNode', blank=True)), + ], + options={ + 'abstract': False, + }, + ), migrations.CreateModel( name='WorkflowJobTemplate', fields=[ @@ -36,19 +51,30 @@ class Migration(migrations.Migration): bases=('main.unifiedjobtemplate', models.Model), ), migrations.CreateModel( - name='WorkflowNode', + name='WorkflowJobTemplateNode', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('always_nodes', models.ManyToManyField(related_name='parent_always_nodes', to='main.WorkflowNode', blank=True)), - ('failure_nodes', models.ManyToManyField(related_name='parent_failure_nodes', to='main.WorkflowNode', blank=True)), - ('job', models.ForeignKey(related_name='unified_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJob', null=True)), - ('success_nodes', models.ManyToManyField(related_name='parent_success_nodes', to='main.WorkflowNode', blank=True)), - ('unified_job_template', models.ForeignKey(related_name='unified_jt_workflow_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJobTemplate', null=True)), - ('workflow_job', models.ForeignKey(related_name='workflow_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.WorkflowJob', null=True)), - ('workflow_job_template', models.ForeignKey(related_name='workflow_nodes', default=None, blank=True, to='main.WorkflowJobTemplate', null=True)), + ('always_nodes', models.ManyToManyField(related_name='workflowjobtemplatenodes_always', to='main.WorkflowJobTemplateNode', blank=True)), + ('failure_nodes', models.ManyToManyField(related_name='workflowjobtemplatenodes_failure', to='main.WorkflowJobTemplateNode', blank=True)), + ('success_nodes', models.ManyToManyField(related_name='workflowjobtemplatenodes_success', to='main.WorkflowJobTemplateNode', blank=True)), + ('unified_job_template', models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJobTemplate', null=True)), + ('workflow_job_template', models.ForeignKey(related_name='workflow_job_template_nodes', default=None, blank=True, to='main.WorkflowJobTemplate', null=True)), ], + options={ + 'abstract': False, + }, + ), + migrations.AddField( + model_name='workflowjobnode', + name='unified_job_template', + field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.UnifiedJobTemplate', null=True), + ), + migrations.AddField( + model_name='workflowjobnode', + name='workflow_job', + field=models.ForeignKey(related_name='workflow_job_nodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.WorkflowJob', null=True), ), migrations.AddField( model_name='workflowjob', @@ -60,6 +86,11 @@ class Migration(migrations.Migration): name='workflow_job', field=models.ManyToManyField(to='main.WorkflowJob', blank=True), ), + migrations.AddField( + model_name='activitystream', + name='workflow_job_node', + field=models.ManyToManyField(to='main.WorkflowJobNode', blank=True), + ), migrations.AddField( model_name='activitystream', name='workflow_job_template', @@ -67,7 +98,7 @@ class Migration(migrations.Migration): ), migrations.AddField( model_name='activitystream', - name='workflow_node', - field=models.ManyToManyField(to='main.WorkflowNode', blank=True), + name='workflow_job_template_node', + field=models.ManyToManyField(to='main.WorkflowJobTemplateNode', blank=True), ), ] diff --git a/awx/main/models/activity_stream.py b/awx/main/models/activity_stream.py index bcc5cef0c7..b0d58fc031 100644 --- a/awx/main/models/activity_stream.py +++ b/awx/main/models/activity_stream.py @@ -49,7 +49,8 @@ class ActivityStream(models.Model): permission = models.ManyToManyField("Permission", blank=True) job_template = models.ManyToManyField("JobTemplate", blank=True) job = models.ManyToManyField("Job", blank=True) - workflow_node = models.ManyToManyField("WorkflowNode", blank=True) + workflow_job_template_node = models.ManyToManyField("WorkflowJobTemplateNode", blank=True) + workflow_job_node = models.ManyToManyField("WorkflowJobNode", blank=True) workflow_job_template = models.ManyToManyField("WorkflowJobTemplate", blank=True) workflow_job = models.ManyToManyField("WorkflowJob", blank=True) unified_job_template = models.ManyToManyField("UnifiedJobTemplate", blank=True, related_name='activity_stream_as_unified_job_template+') diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index 48cdcee27b..af97b9b2c8 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -18,11 +18,11 @@ from awx.main.models.rbac import ( ) from awx.main.fields import ImplicitRoleField -__all__ = ['WorkflowJobTemplate', 'WorkflowJob', 'WorkflowJobOptions', 'WorkflowNode'] - -class WorkflowNode(CreatedModifiedModel): +__all__ = ['WorkflowJobTemplate', 'WorkflowJob', 'WorkflowJobOptions', 'WorkflowJobNode', 'WorkflowJobTemplateNode',] +class WorkflowNodeBase(CreatedModifiedModel): class Meta: + abstract = True app_label = 'main' # TODO: RBAC @@ -31,41 +31,55 @@ class WorkflowNode(CreatedModifiedModel): parent_role='workflow_job_template.admin_role', ) ''' - - # TODO: Ensure the API forces workflow_job_template being set - workflow_job_template = models.ForeignKey( - 'WorkflowJobTemplate', - related_name='workflow_nodes', + success_nodes = models.ManyToManyField( + 'self', blank=True, - null=True, - default=None, - on_delete=models.CASCADE, + symmetrical=False, + related_name='%(class)ss_success', + ) + failure_nodes = models.ManyToManyField( + 'self', + blank=True, + symmetrical=False, + related_name='%(class)ss_failure', + ) + always_nodes = models.ManyToManyField( + 'self', + blank=True, + symmetrical=False, + related_name='%(class)ss_always', ) unified_job_template = models.ForeignKey( 'UnifiedJobTemplate', - related_name='unified_jt_workflow_nodes', + related_name='%(class)ss', blank=True, null=True, default=None, on_delete=models.SET_NULL, ) - success_nodes = models.ManyToManyField( - 'self', - related_name='parent_success_nodes', + +class WorkflowJobTemplateNode(WorkflowNodeBase): + # TODO: Ensure the API forces workflow_job_template being set + workflow_job_template = models.ForeignKey( + 'WorkflowJobTemplate', + related_name='workflow_job_template_nodes', blank=True, - symmetrical=False, + null=True, + default=None, + on_delete=models.CASCADE, ) - failure_nodes = models.ManyToManyField( - 'self', - related_name='parent_failure_nodes', + + def get_absolute_url(self): + return reverse('api:workflow_job_template_node_detail', args=(self.pk,)) + +class WorkflowJobNode(WorkflowNodeBase): + job = models.ForeignKey( + 'UnifiedJob', + related_name='unified_job_nodes', blank=True, - symmetrical=False, - ) - always_nodes = models.ManyToManyField( - 'self', - related_name='parent_always_nodes', - blank=True, - symmetrical=False, + null=True, + default=None, + on_delete=models.SET_NULL, ) workflow_job = models.ForeignKey( 'WorkflowJob', @@ -75,17 +89,9 @@ class WorkflowNode(CreatedModifiedModel): default=None, on_delete=models.SET_NULL, ) - job = models.ForeignKey( - 'UnifiedJob', - related_name='unified_job_nodes', - blank=True, - null=True, - default=None, - on_delete=models.SET_NULL, - ) def get_absolute_url(self): - return reverse('api:workflow_node_detail', args=(self.pk,)) + return reverse('api:workflow_job_node_detail', args=(self.pk,)) class WorkflowJobOptions(BaseModel): class Meta: @@ -147,22 +153,17 @@ class WorkflowJobInheritNodesMixin(object): for old_related_node in old_related_nodes: new_related_node_id = node_ids_map[old_related_node.id] - new_related_node = WorkflowNode.objects.get(id=new_related_node_id) + new_related_node = WorkflowJobNode.objects.get(id=new_related_node_id) new_node_type_mgr.add(new_related_node) def inherit_jt_workflow_nodes(self): new_nodes = [] - old_nodes = self.workflow_job_template.workflow_nodes.all() + old_nodes = self.workflow_job_template.workflow_job_template_nodes.all() node_ids_map = {} for old_node in old_nodes: - new_node = WorkflowNode.objects.get(id=old_node.pk) - new_node.workflow_job = self - new_node.job = None - new_node.workflow_job_template = None - new_node.pk = None - new_node.save() + new_node = WorkflowJobNode.objects.create(workflow_job=self, unified_job_template=old_node.unified_job_template) new_nodes.append(new_node) node_ids_map[old_node.id] = new_node.id diff --git a/awx/main/tests/manual/workflows/linear.py b/awx/main/tests/manual/workflows/linear.py old mode 100644 new mode 100755 index 2b096fb8cd..ebb2c99b45 --- a/awx/main/tests/manual/workflows/linear.py +++ b/awx/main/tests/manual/workflows/linear.py @@ -1,6 +1,6 @@ # AWX from awx.main.models import ( - WorkflowNode, + WorkflowJobTemplateNode, WorkflowJobTemplate, ) from awx.main.models.jobs import JobTemplate @@ -10,16 +10,16 @@ def do_init_workflow(job_template_success, job_template_fail, job_template_never wfjt.delete() wfjt, created = WorkflowJobTemplate.objects.get_or_create(name="linear workflow") print(wfjt.id) - WorkflowNode.objects.all().delete() + WorkflowJobTemplateNode.objects.all().delete() if created: nodes_success = [] nodes_fail = [] nodes_never = [] for i in range(0, 2): - nodes_success.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_success)) - nodes_fail.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_fail)) - nodes_never.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never)) - nodes_never.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never)) + nodes_success.append(WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_success)) + nodes_fail.append(WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_fail)) + nodes_never.append(WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never)) + nodes_never.append(WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never)) nodes_fail[1].delete() nodes_success[0].success_nodes.add(nodes_fail[0]) @@ -32,8 +32,8 @@ def do_init_workflow(job_template_success, job_template_fail, job_template_never def do_init(): jt_success = JobTemplate.objects.get(id=5) - jt_fail= JobTemplate.objects.get(id=9) - jt_never= JobTemplate.objects.get(id=11) + jt_fail= JobTemplate.objects.get(id=6) + jt_never= JobTemplate.objects.get(id=7) do_init_workflow(jt_success, jt_fail, jt_never) if __name__ == "__main__": diff --git a/awx/main/tests/manual/workflows/parallel.py b/awx/main/tests/manual/workflows/parallel.py old mode 100644 new mode 100755 index 071d4e1e94..47c35a9839 --- a/awx/main/tests/manual/workflows/parallel.py +++ b/awx/main/tests/manual/workflows/parallel.py @@ -32,8 +32,8 @@ def do_init_workflow(job_template_success, job_template_fail, job_template_never def do_init(): jt_success = JobTemplate.objects.get(id=5) - jt_fail= JobTemplate.objects.get(id=9) - jt_never= JobTemplate.objects.get(id=11) + jt_fail= JobTemplate.objects.get(id=6) + jt_never= JobTemplate.objects.get(id=7) jt_parallel = [] jt_parallel.append(JobTemplate.objects.get(id=16)) From 4dc5c334429c7ac73781882ae1a48d17689d3c92 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Mon, 12 Sep 2016 12:19:36 -0400 Subject: [PATCH 26/47] refactor to be more testable --- awx/main/models/workflow.py | 32 +++++++++++----- awx/main/tests/factories/fixtures.py | 55 +++++++++++++++++++++++----- awx/main/tests/factories/tower.py | 29 +++++++++++++-- 3 files changed, 93 insertions(+), 23 deletions(-) diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index af97b9b2c8..0e00e26e8c 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -143,7 +143,7 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions): #def create_workflow_job(self, **kwargs): #workflow_job = self.create_unified_job(**kwargs) workflow_job = super(WorkflowJobTemplate, self).create_unified_job(**kwargs) - workflow_job.inherit_jt_workflow_nodes() + workflow_job.inherit_job_template_workflow_nodes() return workflow_job class WorkflowJobInheritNodesMixin(object): @@ -152,21 +152,33 @@ class WorkflowJobInheritNodesMixin(object): new_node_type_mgr = getattr(new_node, node_type) for old_related_node in old_related_nodes: - new_related_node_id = node_ids_map[old_related_node.id] - new_related_node = WorkflowJobNode.objects.get(id=new_related_node_id) + new_related_node = self._get_workflowJob_node_by_id(node_ids_map[old_related_node.id]) new_node_type_mgr.add(new_related_node) - def inherit_jt_workflow_nodes(self): - new_nodes = [] - old_nodes = self.workflow_job_template.workflow_job_template_nodes.all() + ''' + Create a WorkflowJobNode for each WorkflowJobTemplateNode + ''' + def _create_workflow_job_nodes(self, old_nodes): + return [WorkflowJobNode.objects.create(workflow_job=self, unified_job_template=old_node.unified_job_template) for old_node in old_nodes] + def _map_workflow_job_nodes(self, old_nodes, new_nodes): node_ids_map = {} - for old_node in old_nodes: - new_node = WorkflowJobNode.objects.create(workflow_job=self, unified_job_template=old_node.unified_job_template) - new_nodes.append(new_node) + for i, old_node in enumerate(old_nodes): + node_ids_map[old_node.id] = new_nodes[i].id - node_ids_map[old_node.id] = new_node.id + return node_ids_map + + def _get_workflow_job_template_nodes(self): + return self.workflow_job_template.workflow_job_template_nodes.all() + + def _get_workflowJob_node_by_id(self, id): + return WorkflowJobNode.objects.get(id=id) + + def inherit_job_template_workflow_nodes(self): + old_nodes = self._get_workflow_job_template_nodes() + new_nodes = self._create_workflow_job_nodes(old_nodes) + node_ids_map = self._map_workflow_job_nodes(old_nodes, new_nodes) for index, old_node in enumerate(old_nodes): new_node = new_nodes[index] diff --git a/awx/main/tests/factories/fixtures.py b/awx/main/tests/factories/fixtures.py index 809e71b1bb..c51c29e83c 100644 --- a/awx/main/tests/factories/fixtures.py +++ b/awx/main/tests/factories/fixtures.py @@ -14,7 +14,9 @@ from awx.main.models import ( Inventory, Label, WorkflowJobTemplate, - WorkflowNode, + WorkflowJob, + WorkflowJobNode, + WorkflowJobTemplateNode, ) # mk methods should create only a single object of a single type. @@ -155,7 +157,20 @@ def mk_job_template(name, job_type='run', jt.save() return jt +def mk_workflow_job(status='new', workflow_job_template=None, extra_vars={}, + persisted=True): + job = WorkflowJob(status=status, extra_vars=json.dumps(extra_vars)) + + job.workflow_job_template = workflow_job_template + + if persisted: + job.save() + return job + def mk_workflow_job_template(name, extra_vars='', spec=None, persisted=True): + if extra_vars: + extra_vars = json.dumps(extra_vars) + wfjt = WorkflowJobTemplate(name=name, extra_vars=extra_vars) wfjt.survey_spec = spec @@ -166,15 +181,35 @@ def mk_workflow_job_template(name, extra_vars='', spec=None, persisted=True): wfjt.save() return wfjt -def mk_workflow_node(workflow_job_template=None, unified_job_template=None, - success_nodes=None, failure_nodes=None, always_nodes=None, - job=None, persisted=True): - workflow_node = WorkflowNode(workflow_job_template=workflow_job_template, - unified_job_template=unified_job_template, - success_nodes=success_nodes, - failure_nodes=failure_nodes, - always_nodes=always_nodes, - job=job) +def mk_workflow_job_template_node(workflow_job_template=None, + unified_job_template=None, + success_nodes=None, + failure_nodes=None, + always_nodes=None, + persisted=True): + workflow_node = WorkflowJobTemplateNode(workflow_job_template=workflow_job_template, + unified_job_template=unified_job_template, + success_nodes=success_nodes, + failure_nodes=failure_nodes, + always_nodes=always_nodes) if persisted: workflow_node.save() return workflow_node + +def mk_workflow_job_node(unified_job_template=None, + success_nodes=None, + failure_nodes=None, + always_nodes=None, + workflow_job=None, + job=None, + persisted=True): + workflow_node = WorkflowJobNode(unified_job_template=unified_job_template, + success_nodes=success_nodes, + failure_nodes=failure_nodes, + always_nodes=always_nodes, + workflow_job=workflow_job, + job=job) + if persisted: + workflow_node.save() + return workflow_node + diff --git a/awx/main/tests/factories/tower.py b/awx/main/tests/factories/tower.py index d7c45e73e2..6bbb2b0e36 100644 --- a/awx/main/tests/factories/tower.py +++ b/awx/main/tests/factories/tower.py @@ -29,6 +29,7 @@ from .fixtures import ( mk_label, mk_notification_template, mk_workflow_job_template, + #mk_workflow_job_template_node, ) @@ -344,8 +345,16 @@ def create_notification_template(name, roles=None, persisted=True, **kwargs): users=_Mapped(users), superusers=_Mapped(superusers), teams=teams) +''' +def generate_workflow_job_template_nodes(workflow_job_template, + unified_job_template, + persisted=True, + **kwargs): +''' -def create_workflow_job_template(name, persisted=True, **kwargs): +# TODO: Implement survey +''' +def create_workflow_job(name, persisted=True, **kwargs): Objects = generate_objects(["workflow_job_template", "survey",], kwargs) @@ -353,13 +362,27 @@ def create_workflow_job_template(name, persisted=True, **kwargs): jobs = None extra_vars = kwargs.get('extra_vars', '') +''' + + +# TODO: Implement survey +def create_workflow_job_template(name, persisted=True, **kwargs): + Objects = generate_objects(["workflow_job_template", + "survey",], kwargs) + + spec = None + #jobs = None + + extra_vars = kwargs.get('extra_vars', '') if 'survey' in kwargs: spec = create_survey_spec(kwargs['survey']) wfjt = mk_workflow_job_template(name, spec=spec, extra_vars=extra_vars, persisted=persisted) + #workflow_nodes = generate_workflow_job_template_nodes(wfjt, persisted, workflow_nodes=kwargs.get('workflow_nodes')) + ''' if 'jobs' in kwargs: for i in kwargs['jobs']: if type(i) is Job: @@ -367,8 +390,8 @@ def create_workflow_job_template(name, persisted=True, **kwargs): else: # TODO: Create the job raise RuntimeError("Currently, only already created jobs are supported") - + ''' return Objects(workflow_job_template=wfjt, - jobs=jobs, + #jobs=jobs, survey=spec,) From 9c12b234b12dc19af8e405a636a0f8684afd7626 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Mon, 12 Sep 2016 12:26:07 -0400 Subject: [PATCH 27/47] remove workflow_job_template from workflow job node serializer --- awx/api/serializers.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 3f49cca391..9cfcfe0d6d 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2229,16 +2229,21 @@ class WorkflowJobTemplateSerializer(WorkflowJobTemplateListSerializer): pass class WorkflowNodeBaseSerializer(BaseSerializer): - #workflow_job_template = UnifiedJobTemplateSerializer() class Meta: # TODO: workflow_job and job read-only - fields = ('id', 'url', 'related', 'success_nodes', 'failure_nodes', 'always_nodes',) + fields = ('id', 'url', 'related', 'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes',) + + def get_related(self, obj): + res = super(WorkflowNodeBaseSerializer, self).get_related(obj) + if obj.unified_job_template: + res['unified_job_template'] = obj.unified_job_template.get_absolute_url() + return res class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer): class Meta: model = WorkflowJobTemplateNode - fields = ('*', 'workflow_job_template', 'unified_job_template',) + fields = ('*', 'workflow_job_template',) def get_related(self, obj): res = super(WorkflowJobTemplateNodeSerializer, self).get_related(obj) @@ -2247,22 +2252,18 @@ class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer): res['always_nodes'] = reverse('api:workflow_job_template_node_always_nodes_list', args=(obj.pk,)) if obj.workflow_job_template: res['workflow_job_template'] = reverse('api:workflow_job_template_detail', args=(obj.workflow_job_template.pk,)) - if obj.unified_job_template: - res['unified_job_template'] = obj.unified_job_template.get_absolute_url() return res class WorkflowJobNodeSerializer(WorkflowNodeBaseSerializer): class Meta: model = WorkflowJobTemplateNode - fields = ('*', 'workflow_job_template', 'unified_job_template', 'job', 'workflow_job',) + fields = ('*', 'job', 'workflow_job',) def get_related(self, obj): res = super(WorkflowJobNodeSerializer, self).get_related(obj) res['success_nodes'] = reverse('api:workflow_job_node_success_nodes_list', args=(obj.pk,)) res['failure_nodes'] = reverse('api:workflow_job_node_failure_nodes_list', args=(obj.pk,)) res['always_nodes'] = reverse('api:workflow_job_node_always_nodes_list', args=(obj.pk,)) - if obj.workflow_job_template: - res['workflow_job_template'] = reverse('api:workflow_job_template_detail', args=(obj.workflow_job_template.pk,)) if obj.job: res['job'] = reverse('api:job_detail', args=(obj.job.pk,)) if obj.workflow_job: From 0dfe3b197a8c09cd18fdbef338dff094457efc6a Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Mon, 12 Sep 2016 12:45:01 -0400 Subject: [PATCH 28/47] bump pyflakes version --- requirements/requirements_dev.txt | 2 +- requirements/requirements_jenkins.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/requirements_dev.txt b/requirements/requirements_dev.txt index 5fa57df995..c301be2528 100644 --- a/requirements/requirements_dev.txt +++ b/requirements/requirements_dev.txt @@ -4,7 +4,7 @@ ipython unittest2 pep8 flake8 -pyflakes==1.0.0 # Pinned until PR merges https://gitlab.com/pycqa/flake8/merge_requests/56 +pyflakes pytest==2.9.2 pytest-cov pytest-django diff --git a/requirements/requirements_jenkins.txt b/requirements/requirements_jenkins.txt index 287a714939..1546b0ae3b 100644 --- a/requirements/requirements_jenkins.txt +++ b/requirements/requirements_jenkins.txt @@ -1,6 +1,6 @@ ansible==1.9.4 coverage -pyflakes==1.0.0 # Pinned until PR merges https://gitlab.com/pycqa/flake8/merge_requests/56 +pyflakes pep8 pylint flake8 From c91b10e62de6fd11eb3e832efd3b58fcc4dccffb Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Mon, 12 Sep 2016 14:06:20 -0400 Subject: [PATCH 29/47] use flake8 inside tower env --- Makefile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Makefile b/Makefile index 5cbc9ac400..64ecb71433 100644 --- a/Makefile +++ b/Makefile @@ -425,6 +425,9 @@ pep8: reports @(set -o pipefail && $@ | tee reports/$@.report) flake8: reports + @if [ "$(VENV_BASE)" ]; then \ + . $(VENV_BASE)/tower/bin/activate; \ + fi; \ @$@ --output-file=reports/$@.report pyflakes: reports From 7d95b15812086d001d8fc3f98fa506a2bf249ab0 Mon Sep 17 00:00:00 2001 From: James Laska Date: Tue, 13 Sep 2016 10:59:45 -0400 Subject: [PATCH 30/47] Ensure make docker-clean exits cleanly --- Makefile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 5cbc9ac400..d9c1d5f0f0 100644 --- a/Makefile +++ b/Makefile @@ -761,8 +761,7 @@ MACHINE?=default docker-clean: rm -f awx/lib/.deps_built eval $$(docker-machine env $(MACHINE)) - docker stop $$(docker ps -a -q) - -docker rm $$(docker ps -f name=tools_tower -a -q) + $(foreach container_id,$(shell docker ps -f name=tools_tower -aq),docker stop $(container_id); docker rm -f $(container_id);) -docker images | grep "tower_devel" | awk '{print $3}' | xargs docker rmi docker-refresh: docker-clean docker-compose From 183660a133f5c883ff280384776b2ed66cfb4e17 Mon Sep 17 00:00:00 2001 From: James Laska Date: Tue, 13 Sep 2016 11:00:40 -0400 Subject: [PATCH 31/47] Fix '..' typo when installing jenkins requirements --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index d9c1d5f0f0..feedbab684 100644 --- a/Makefile +++ b/Makefile @@ -299,7 +299,7 @@ requirements_jenkins: . $(VENV_BASE)/tower/bin/activate; \ $(VENV_BASE)/tower/bin/pip install -Ir requirements/requirements_jenkins.txt; \ else \ - pip install -Ir requirements/requirements_jenkins..txt; \ + pip install -Ir requirements/requirements_jenkins.txt; \ fi && \ $(NPM_BIN) install csslint From 32461574ae2adbd5bf1e40f9eef94bcb32af9625 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Tue, 13 Sep 2016 13:31:10 -0400 Subject: [PATCH 32/47] add unit tests --- awx/api/serializers.py | 51 ++-- awx/api/urls.py | 2 +- awx/api/views.py | 25 +- awx/main/models/workflow.py | 9 +- awx/main/tests/factories/tower.py | 49 ++-- awx/main/tests/manual/workflows/parallel.py | 10 +- .../tests/unit/api/serializers/__init__.py | 0 .../tests/unit/api/serializers/conftest.py | 46 ++++ .../serializers/test_inventory_serializers.py | 47 ++++ .../api/serializers/test_job_serializers.py | 91 +++++++ .../test_job_template_serializers.py | 108 ++++++++ .../serializers/test_workflow_serializers.py | 154 ++++++++++++ awx/main/tests/unit/api/test_serializers.py | 235 ------------------ .../unit/commands/test_run_task_system.py | 4 +- .../tests/unit/models/test_workflow_unit.py | 81 ++++++ 15 files changed, 617 insertions(+), 295 deletions(-) create mode 100644 awx/main/tests/unit/api/serializers/__init__.py create mode 100644 awx/main/tests/unit/api/serializers/conftest.py create mode 100644 awx/main/tests/unit/api/serializers/test_inventory_serializers.py create mode 100644 awx/main/tests/unit/api/serializers/test_job_serializers.py create mode 100644 awx/main/tests/unit/api/serializers/test_job_template_serializers.py create mode 100644 awx/main/tests/unit/api/serializers/test_workflow_serializers.py delete mode 100644 awx/main/tests/unit/api/test_serializers.py create mode 100644 awx/main/tests/unit/models/test_workflow_unit.py diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 9cfcfe0d6d..1a6684ce47 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2174,7 +2174,29 @@ class SystemJobCancelSerializer(SystemJobSerializer): class Meta: fields = ('can_cancel',) +class WorkflowJobTemplateSerializer(UnifiedJobTemplateSerializer): + class Meta: + model = WorkflowJobTemplate + fields = ('*',) + def get_related(self, obj): + res = super(WorkflowJobTemplateSerializer, self).get_related(obj) + res.update(dict( + jobs = reverse('api:workflow_job_template_jobs_list', args=(obj.pk,)), + #schedules = reverse('api:workflow_job_template_schedules_list', args=(obj.pk,)), + launch = reverse('api:workflow_job_template_launch', args=(obj.pk,)), + workflow_nodes = reverse('api:workflow_job_template_workflow_nodes_list', args=(obj.pk,)), + # TODO: Implement notifications + #notification_templates_any = reverse('api:system_job_template_notification_templates_any_list', args=(obj.pk,)), + #notification_templates_success = reverse('api:system_job_template_notification_templates_success_list', args=(obj.pk,)), + #notification_templates_error = reverse('api:system_job_template_notification_templates_error_list', args=(obj.pk,)), + + )) + return res + +# TODO: +class WorkflowJobTemplateListSerializer(WorkflowJobTemplateSerializer): + pass # TODO: class WorkflowJobSerializer(UnifiedJobSerializer): @@ -2198,36 +2220,10 @@ class WorkflowJobSerializer(UnifiedJobSerializer): ''' return res - # TODO: class WorkflowJobListSerializer(WorkflowJobSerializer, UnifiedJobListSerializer): pass -# TODO: -class WorkflowJobTemplateListSerializer(UnifiedJobTemplateSerializer): - - class Meta: - model = WorkflowJobTemplate - fields = ('*',) - - def get_related(self, obj): - res = super(WorkflowJobTemplateListSerializer, self).get_related(obj) - res.update(dict( - jobs = reverse('api:workflow_job_template_jobs_list', args=(obj.pk,)), - #schedules = reverse('api:workflow_job_template_schedules_list', args=(obj.pk,)), - launch = reverse('api:workflow_job_template_launch', args=(obj.pk,)), - workflow_nodes = reverse('api:workflow_job_template_workflow_nodes_list', args=(obj.pk,)), - # TODO: Implement notifications - #notification_templates_any = reverse('api:system_job_template_notification_templates_any_list', args=(obj.pk,)), - #notification_templates_success = reverse('api:system_job_template_notification_templates_success_list', args=(obj.pk,)), - #notification_templates_error = reverse('api:system_job_template_notification_templates_error_list', args=(obj.pk,)), - - )) - return res - -class WorkflowJobTemplateSerializer(WorkflowJobTemplateListSerializer): - pass - class WorkflowNodeBaseSerializer(BaseSerializer): class Meta: @@ -2273,6 +2269,9 @@ class WorkflowJobNodeSerializer(WorkflowNodeBaseSerializer): class WorkflowJobNodeListSerializer(WorkflowJobNodeSerializer): pass +class WorkflowJobNodeDetailSerializer(WorkflowJobNodeSerializer): + pass + class WorkflowJobTemplateNodeDetailSerializer(WorkflowJobTemplateNodeSerializer): ''' diff --git a/awx/api/urls.py b/awx/api/urls.py index af81d227d7..b508b6c35c 100644 --- a/awx/api/urls.py +++ b/awx/api/urls.py @@ -359,7 +359,7 @@ v1_urls = patterns('awx.api.views', url(r'^workflow_jobs/' ,include(workflow_job_urls)), url(r'^labels/', include(label_urls)), url(r'^workflow_job_template_nodes/', include(workflow_job_template_node_urls)), - #url(r'^workflow_job_nodes/', include(workflow_job_node_urls)), + url(r'^workflow_job_nodes/', include(workflow_job_node_urls)), url(r'^unified_job_templates/$','unified_job_template_list'), url(r'^unified_jobs/$', 'unified_job_list'), url(r'^activity_stream/', include(activity_stream_urls)), diff --git a/awx/api/views.py b/awx/api/views.py index d42aae1127..07ecf938aa 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -2614,6 +2614,20 @@ class JobTemplateObjectRolesList(SubListAPIView): content_type = ContentType.objects.get_for_model(self.parent_model) return Role.objects.filter(content_type=content_type, object_id=po.pk) +# TODO: +class WorkflowJobNodeList(ListCreateAPIView): + + model = WorkflowJobNode + serializer_class = WorkflowJobNodeListSerializer + new_in_310 = True + +# TODO: +class WorkflowJobNodeDetail(RetrieveUpdateDestroyAPIView): + + model = WorkflowJobNode + serializer_class = WorkflowJobNodeDetailSerializer + new_in_310 = True + # TODO: class WorkflowJobTemplateNodeList(ListCreateAPIView): @@ -2628,6 +2642,7 @@ class WorkflowJobTemplateNodeDetail(RetrieveUpdateDestroyAPIView): serializer_class = WorkflowJobTemplateNodeDetailSerializer new_in_310 = True + class WorkflowJobTemplateNodeChildrenBaseList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView): model = WorkflowJobTemplateNode @@ -2656,19 +2671,20 @@ class WorkflowJobTemplateNodeFailureNodesList(WorkflowJobTemplateNodeChildrenBas class WorkflowJobTemplateNodeAlwaysNodesList(WorkflowJobTemplateNodeChildrenBaseList): relationship = 'always_nodes' -''' -class WorkflowJobNodeChildrenBaseList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView): +class WorkflowJobNodeChildrenBaseList(SubListAPIView): model = WorkflowJobNode serializer_class = WorkflowJobNodeListSerializer always_allow_superuser = True # TODO: RBAC - parent_model = WorkflowJobTemplateNode + parent_model = Job relationship = '' + ''' enforce_parent_relationship = 'workflow_job_template' new_in_310 = True + ''' # - #Limit the set of WorkflowJobTemplateNodes to the related nodes of specified by + #Limit the set of WorkflowJobeNodes to the related nodes of specified by #'relationship' # def get_queryset(self): @@ -2684,7 +2700,6 @@ class WorkflowJobNodeFailureNodesList(WorkflowJobNodeChildrenBaseList): class WorkflowJobNodeAlwaysNodesList(WorkflowJobNodeChildrenBaseList): relationship = 'always_nodes' -''' # TODO: diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index 0e00e26e8c..0182b40b59 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -148,11 +148,11 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions): class WorkflowJobInheritNodesMixin(object): def _inherit_relationship(self, old_node, new_node, node_ids_map, node_type): - old_related_nodes = getattr(old_node, node_type).all() + old_related_nodes = self._get_all_by_type(old_node, node_type) new_node_type_mgr = getattr(new_node, node_type) for old_related_node in old_related_nodes: - new_related_node = self._get_workflowJob_node_by_id(node_ids_map[old_related_node.id]) + new_related_node = self._get_workflow_job_node_by_id(node_ids_map[old_related_node.id]) new_node_type_mgr.add(new_related_node) ''' @@ -172,9 +172,12 @@ class WorkflowJobInheritNodesMixin(object): def _get_workflow_job_template_nodes(self): return self.workflow_job_template.workflow_job_template_nodes.all() - def _get_workflowJob_node_by_id(self, id): + def _get_workflow_job_node_by_id(self, id): return WorkflowJobNode.objects.get(id=id) + def _get_all_by_type(node, node_type): + return getattr(node, node_type).all() + def inherit_job_template_workflow_nodes(self): old_nodes = self._get_workflow_job_template_nodes() new_nodes = self._create_workflow_job_nodes(old_nodes) diff --git a/awx/main/tests/factories/tower.py b/awx/main/tests/factories/tower.py index 6bbb2b0e36..5c99c14828 100644 --- a/awx/main/tests/factories/tower.py +++ b/awx/main/tests/factories/tower.py @@ -9,6 +9,7 @@ from awx.main.models import ( Inventory, Job, Label, + WorkflowJobTemplateNode, ) from .objects import ( @@ -29,7 +30,6 @@ from .fixtures import ( mk_label, mk_notification_template, mk_workflow_job_template, - #mk_workflow_job_template_node, ) @@ -345,29 +345,33 @@ def create_notification_template(name, roles=None, persisted=True, **kwargs): users=_Mapped(users), superusers=_Mapped(superusers), teams=teams) -''' -def generate_workflow_job_template_nodes(workflow_job_template, - unified_job_template, - persisted=True, + +def generate_workflow_job_template_nodes(workflow_job_template, + persisted, **kwargs): -''' -# TODO: Implement survey -''' -def create_workflow_job(name, persisted=True, **kwargs): - Objects = generate_objects(["workflow_job_template", - "survey",], kwargs) + workflow_job_template_nodes = kwargs.get('workflow_job_template_nodes', []) + if len(workflow_job_template_nodes) > 0 and not persisted: + raise RuntimeError('workflow job template nodes can not be used when persisted=False') - spec = None - jobs = None + new_nodes = [] - extra_vars = kwargs.get('extra_vars', '') -''' + for i, node in enumerate(workflow_job_template_nodes): + new_node = WorkflowJobTemplateNode(workflow_job_template=workflow_job_template, + unified_job_template=node['unified_job_template'], + id=i) + new_nodes.append(new_node) + node_types = ['success_nodes', 'failure_nodes', 'always_nodes'] + for node_type in node_types: + for i, new_node in enumerate(new_nodes): + for related_index in workflow_job_template_nodes[i][node_type]: + getattr(new_node, node_type).add(new_nodes[related_index]) -# TODO: Implement survey +# TODO: Implement survey and jobs def create_workflow_job_template(name, persisted=True, **kwargs): Objects = generate_objects(["workflow_job_template", + "workflow_job_template_nodes", "survey",], kwargs) spec = None @@ -378,9 +382,16 @@ def create_workflow_job_template(name, persisted=True, **kwargs): if 'survey' in kwargs: spec = create_survey_spec(kwargs['survey']) - wfjt = mk_workflow_job_template(name, spec=spec, extra_vars=extra_vars, + wfjt = mk_workflow_job_template(name, + spec=spec, + extra_vars=extra_vars, persisted=persisted) - #workflow_nodes = generate_workflow_job_template_nodes(wfjt, persisted, workflow_nodes=kwargs.get('workflow_nodes')) + + + + workflow_jt_nodes = generate_workflow_job_template_nodes(wfjt, + persisted, + workflow_job_template_nodes=kwargs.get('workflow_job_template_nodes', [])) ''' if 'jobs' in kwargs: @@ -393,5 +404,7 @@ def create_workflow_job_template(name, persisted=True, **kwargs): ''' return Objects(workflow_job_template=wfjt, #jobs=jobs, + workflow_job_template_nodes=workflow_jt_nodes, survey=spec,) + diff --git a/awx/main/tests/manual/workflows/parallel.py b/awx/main/tests/manual/workflows/parallel.py index 47c35a9839..303554d793 100755 --- a/awx/main/tests/manual/workflows/parallel.py +++ b/awx/main/tests/manual/workflows/parallel.py @@ -1,6 +1,6 @@ # AWX from awx.main.models import ( - WorkflowNode, + WorkflowJobTemplateNode, WorkflowJobTemplate, ) from awx.main.models.jobs import JobTemplate @@ -10,17 +10,17 @@ def do_init_workflow(job_template_success, job_template_fail, job_template_never wfjt.delete() wfjt, created = WorkflowJobTemplate.objects.get_or_create(name="parallel workflow") print(wfjt.id) - WorkflowNode.objects.all().delete() + WorkflowJobTemplateNode.objects.all().delete() if created: - node_success = WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_success) + node_success = WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_success) nodes_never = [] for x in range(0, 3): - nodes_never.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never)) + nodes_never.append(WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=job_template_never)) nodes_parallel = [] for jt in jts_parallel: - nodes_parallel.append(WorkflowNode.objects.create(workflow_job_template=wfjt, unified_job_template=jt)) + nodes_parallel.append(WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt, unified_job_template=jt)) node_success.success_nodes.add(nodes_parallel[0]) node_success.success_nodes.add(nodes_parallel[1]) diff --git a/awx/main/tests/unit/api/serializers/__init__.py b/awx/main/tests/unit/api/serializers/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/main/tests/unit/api/serializers/conftest.py b/awx/main/tests/unit/api/serializers/conftest.py new file mode 100644 index 0000000000..3b056a149f --- /dev/null +++ b/awx/main/tests/unit/api/serializers/conftest.py @@ -0,0 +1,46 @@ + +import pytest + +@pytest.fixture +def get_related_assert(): + def fn(model_obj, related, resource_name, related_resource_name): + assert related_resource_name in related + assert related[related_resource_name] == '/api/v1/%s/%d/%s/' % (resource_name, model_obj.pk, related_resource_name) + return fn + +@pytest.fixture +def get_related_mock_and_run(): + def fn(serializer_class, model_obj): + serializer = serializer_class() + related = serializer.get_related(model_obj) + return related + return fn + +@pytest.fixture +def test_get_related(get_related_assert, get_related_mock_and_run): + def fn(serializer_class, model_obj, resource_name, related_resource_name): + related = get_related_mock_and_run(serializer_class, model_obj) + get_related_assert(model_obj, related, resource_name, related_resource_name) + return related + return fn + +@pytest.fixture +def get_summary_fields_assert(): + def fn(summary, summary_field_name): + assert summary_field_name in summary + return fn + +@pytest.fixture +def get_summary_fields_mock_and_run(): + def fn(serializer_class, model_obj): + serializer = serializer_class() + return serializer.get_summary_fields(model_obj) + return fn + +@pytest.fixture +def test_get_summary_fields(get_summary_fields_mock_and_run, get_summary_fields_assert): + def fn(serializer_class, model_obj, summary_field_name): + summary = get_summary_fields_mock_and_run(serializer_class, model_obj) + get_summary_fields_assert(summary, summary_field_name) + return summary + return fn diff --git a/awx/main/tests/unit/api/serializers/test_inventory_serializers.py b/awx/main/tests/unit/api/serializers/test_inventory_serializers.py new file mode 100644 index 0000000000..0208105179 --- /dev/null +++ b/awx/main/tests/unit/api/serializers/test_inventory_serializers.py @@ -0,0 +1,47 @@ +# Python +import pytest +import mock +from mock import PropertyMock + +# AWX +from awx.api.serializers import ( + CustomInventoryScriptSerializer, +) +from awx.main.models import ( + CustomInventoryScript, + User, +) + +#DRF +from rest_framework.request import Request +from rest_framework.test import ( + APIRequestFactory, + force_authenticate, +) + +class TestCustomInventoryScriptSerializer(object): + + @pytest.mark.parametrize("superuser,sysaudit,admin_role,value", + ((True, False, False, '#!/python'), + (False, True, False, '#!/python'), + (False, False, True, '#!/python'), + (False, False, False, None))) + def test_to_representation_orphan(self, superuser, sysaudit, admin_role, value): + with mock.patch.object(CustomInventoryScriptSerializer, 'get_summary_fields', return_value={}): + User.add_to_class('is_system_auditor', sysaudit) + user = User(username="root", is_superuser=superuser) + roles = [user] if admin_role else [] + + with mock.patch('awx.main.models.CustomInventoryScript.admin_role', new_callable=PropertyMock, return_value=roles): + cis = CustomInventoryScript(pk=1, script='#!/python') + serializer = CustomInventoryScriptSerializer() + + factory = APIRequestFactory() + wsgi_request = factory.post("/inventory_script/1", {'id':1}, format="json") + force_authenticate(wsgi_request, user) + + request = Request(wsgi_request) + serializer.context['request'] = request + + representation = serializer.to_representation(cis) + assert representation['script'] == value diff --git a/awx/main/tests/unit/api/serializers/test_job_serializers.py b/awx/main/tests/unit/api/serializers/test_job_serializers.py new file mode 100644 index 0000000000..a9eaecd2e9 --- /dev/null +++ b/awx/main/tests/unit/api/serializers/test_job_serializers.py @@ -0,0 +1,91 @@ +# Python +import pytest +import mock +import json + +# AWX +from awx.api.serializers import ( + JobSerializer, + JobOptionsSerializer, +) +from awx.main.models import ( + Label, + Job, +) + +def mock_JT_resource_data(): + return ({}, []) + +@pytest.fixture +def job_template(mocker): + mock_jt = mocker.MagicMock(pk=5) + mock_jt.resource_validation_data = mock_JT_resource_data + return mock_jt + +@pytest.fixture +def job(mocker, job_template): + return mocker.MagicMock(pk=5, job_template=job_template) + +@pytest.fixture +def labels(mocker): + return [Label(id=x, name='label-%d' % x) for x in xrange(0, 25)] + +@pytest.fixture +def jobs(mocker): + return [Job(id=x, name='job-%d' % x) for x in xrange(0, 25)] + +@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {}) +@mock.patch('awx.api.serializers.JobOptionsSerializer.get_related', lambda x,y: {}) +class TestJobSerializerGetRelated(): + @pytest.mark.parametrize("related_resource_name", [ + 'job_events', + 'job_plays', + 'job_tasks', + 'relaunch', + 'labels', + ]) + def test_get_related(self, test_get_related, job, related_resource_name): + test_get_related(JobSerializer, job, 'jobs', related_resource_name) + + def test_job_template_absent(self, job): + job.job_template = None + serializer = JobSerializer() + related = serializer.get_related(job) + assert 'job_template' not in related + + def test_job_template_present(self, get_related_mock_and_run, job): + related = get_related_mock_and_run(JobSerializer, job) + assert 'job_template' in related + assert related['job_template'] == '/api/v1/%s/%d/' % ('job_templates', job.job_template.pk) + +@mock.patch('awx.api.serializers.BaseSerializer.to_representation', lambda self,obj: { + 'extra_vars': obj.extra_vars}) +class TestJobSerializerSubstitution(): + + def test_survey_password_hide(self, mocker): + job = mocker.MagicMock(**{ + 'display_extra_vars.return_value': '{\"secret_key\": \"$encrypted$\"}', + 'extra_vars.return_value': '{\"secret_key\": \"my_password\"}'}) + serializer = JobSerializer(job) + rep = serializer.to_representation(job) + extra_vars = json.loads(rep['extra_vars']) + assert extra_vars['secret_key'] == '$encrypted$' + job.display_extra_vars.assert_called_once_with() + assert 'my_password' not in extra_vars + +@mock.patch('awx.api.serializers.BaseSerializer.get_summary_fields', lambda x,y: {}) +class TestJobOptionsSerializerGetSummaryFields(): + def test__summary_field_labels_10_max(self, mocker, job_template, labels): + job_template.labels.all = mocker.MagicMock(**{'order_by.return_value': labels}) + job_template.labels.all.return_value = job_template.labels.all + + serializer = JobOptionsSerializer() + summary_labels = serializer._summary_field_labels(job_template) + + job_template.labels.all.order_by.assert_called_with('name') + assert len(summary_labels['results']) == 10 + assert summary_labels['results'] == [{'id': x.id, 'name': x.name} for x in labels[:10]] + + def test_labels_exists(self, test_get_summary_fields, job_template): + test_get_summary_fields(JobOptionsSerializer, job_template, 'labels') + diff --git a/awx/main/tests/unit/api/serializers/test_job_template_serializers.py b/awx/main/tests/unit/api/serializers/test_job_template_serializers.py new file mode 100644 index 0000000000..dc0c672a70 --- /dev/null +++ b/awx/main/tests/unit/api/serializers/test_job_template_serializers.py @@ -0,0 +1,108 @@ +# Python +import pytest +import mock + +# AWX +from awx.api.serializers import ( + JobTemplateSerializer, +) +from awx.main.models import ( + Job, +) + +#DRF +from rest_framework import serializers + +def mock_JT_resource_data(): + return ({}, []) + +@pytest.fixture +def job_template(mocker): + mock_jt = mocker.MagicMock(pk=5) + mock_jt.resource_validation_data = mock_JT_resource_data + return mock_jt + +@pytest.fixture +def job(mocker, job_template): + return mocker.MagicMock(pk=5, job_template=job_template) + +@pytest.fixture +def jobs(mocker): + return [Job(id=x, name='job-%d' % x) for x in xrange(0, 25)] + +@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {}) +@mock.patch('awx.api.serializers.JobOptionsSerializer.get_related', lambda x,y: {}) +class TestJobTemplateSerializerGetRelated(): + @pytest.mark.parametrize("related_resource_name", [ + 'jobs', + 'schedules', + 'activity_stream', + 'launch', + 'notification_templates_any', + 'notification_templates_success', + 'notification_templates_error', + 'survey_spec', + 'labels', + 'callback', + ]) + def test_get_related(self, test_get_related, job_template, related_resource_name): + test_get_related(JobTemplateSerializer, job_template, 'job_templates', related_resource_name) + + def test_callback_absent(self, get_related_mock_and_run, job_template): + job_template.host_config_key = None + related = get_related_mock_and_run(JobTemplateSerializer, job_template) + assert 'callback' not in related + +class TestJobTemplateSerializerGetSummaryFields(): + def test__recent_jobs(self, mocker, job_template, jobs): + + job_template.jobs.all = mocker.MagicMock(**{'order_by.return_value': jobs}) + job_template.jobs.all.return_value = job_template.jobs.all + + serializer = JobTemplateSerializer() + recent_jobs = serializer._recent_jobs(job_template) + + job_template.jobs.all.assert_called_once_with() + job_template.jobs.all.order_by.assert_called_once_with('-created') + assert len(recent_jobs) == 10 + for x in jobs[:10]: + assert recent_jobs == [{'id': x.id, 'status': x.status, 'finished': x.finished} for x in jobs[:10]] + + def test_survey_spec_exists(self, test_get_summary_fields, mocker, job_template): + job_template.survey_spec = {'name': 'blah', 'description': 'blah blah'} + test_get_summary_fields(JobTemplateSerializer, job_template, 'survey') + + def test_survey_spec_absent(self, get_summary_fields_mock_and_run, job_template): + job_template.survey_spec = None + summary = get_summary_fields_mock_and_run(JobTemplateSerializer, job_template) + assert 'survey' not in summary + + @pytest.mark.skip(reason="RBAC needs to land") + def test_can_copy_true(self, mocker, job_template): + pass + + @pytest.mark.skip(reason="RBAC needs to land") + def test_can_copy_false(self, mocker, job_template): + pass + + @pytest.mark.skip(reason="RBAC needs to land") + def test_can_edit_true(self, mocker, job_template): + pass + + @pytest.mark.skip(reason="RBAC needs to land") + def test_can_edit_false(self, mocker, job_template): + pass + +class TestJobTemplateSerializerValidation(object): + + good_extra_vars = ["{\"test\": \"keys\"}", "---\ntest: key"] + bad_extra_vars = ["{\"test\": \"keys\"", "---\ntest: [2"] + + def test_validate_extra_vars(self): + serializer = JobTemplateSerializer() + for ev in self.good_extra_vars: + serializer.validate_extra_vars(ev) + for ev in self.bad_extra_vars: + with pytest.raises(serializers.ValidationError): + serializer.validate_extra_vars(ev) + diff --git a/awx/main/tests/unit/api/serializers/test_workflow_serializers.py b/awx/main/tests/unit/api/serializers/test_workflow_serializers.py new file mode 100644 index 0000000000..371b02c7b8 --- /dev/null +++ b/awx/main/tests/unit/api/serializers/test_workflow_serializers.py @@ -0,0 +1,154 @@ +# Python +import pytest +import mock + +# AWX +from awx.api.serializers import ( + WorkflowJobTemplateSerializer, + WorkflowNodeBaseSerializer, + WorkflowJobTemplateNodeSerializer, + WorkflowJobNodeSerializer, +) +from awx.main.models import ( + Job, + WorkflowJobTemplateNode, + WorkflowJob, + WorkflowJobNode, +) + +@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {}) +class TestWorkflowJobTemplateSerializerGetRelated(): + @pytest.fixture + def workflow_job_template(self, workflow_job_template_factory): + wfjt = workflow_job_template_factory('hello world', persisted=False).workflow_job_template + wfjt.pk = 3 + return wfjt + + @pytest.mark.parametrize("related_resource_name", [ + 'jobs', + 'launch', + 'workflow_nodes', + ]) + def test_get_related(self, mocker, test_get_related, workflow_job_template, related_resource_name): + test_get_related(WorkflowJobTemplateSerializer, + workflow_job_template, + 'workflow_job_templates', + related_resource_name) + +@mock.patch('awx.api.serializers.BaseSerializer.get_related', lambda x,y: {}) +class TestWorkflowNodeBaseSerializerGetRelated(): + @pytest.fixture + def job_template(self, job_template_factory): + jt = job_template_factory(name="blah", persisted=False).job_template + jt.pk = 1 + return jt + + @pytest.fixture + def workflow_job_template_node_related(self, job_template): + return WorkflowJobTemplateNode(pk=1, unified_job_template=job_template) + + @pytest.fixture + def workflow_job_template_node(self): + return WorkflowJobTemplateNode(pk=1) + + def test_workflow_unified_job_template_present(self, get_related_mock_and_run, workflow_job_template_node_related): + related = get_related_mock_and_run(WorkflowNodeBaseSerializer, workflow_job_template_node_related) + assert 'unified_job_template' in related + assert related['unified_job_template'] == '/api/v1/%s/%d/' % ('job_templates', workflow_job_template_node_related.unified_job_template.pk) + + def test_workflow_unified_job_template_absent(self, workflow_job_template_node): + related = WorkflowJobTemplateNodeSerializer().get_related(workflow_job_template_node) + assert 'unified_job_template' not in related + +@mock.patch('awx.api.serializers.WorkflowNodeBaseSerializer.get_related', lambda x,y: {}) +class TestWorkflowJobTemplateNodeSerializerGetRelated(): + @pytest.fixture + def workflow_job_template_node(self): + return WorkflowJobTemplateNode(pk=1) + + @pytest.fixture + def workflow_job_template(self, workflow_job_template_factory): + wfjt = workflow_job_template_factory("bliggity", persisted=False).workflow_job_template + wfjt.pk = 1 + return wfjt + + @pytest.fixture + def job_template(self, job_template_factory): + jt = job_template_factory(name="blah", persisted=False).job_template + jt.pk = 1 + return jt + + @pytest.fixture + def workflow_job_template_node_related(self, workflow_job_template_node, workflow_job_template): + workflow_job_template_node.workflow_job_template = workflow_job_template + return workflow_job_template_node + + @pytest.mark.parametrize("related_resource_name", [ + 'success_nodes', + 'failure_nodes', + 'always_nodes', + ]) + def test_get_related(self, test_get_related, workflow_job_template_node, related_resource_name): + test_get_related(WorkflowJobTemplateNodeSerializer, + workflow_job_template_node, + 'workflow_job_template_nodes', + related_resource_name) + + def test_workflow_job_template_present(self, get_related_mock_and_run, workflow_job_template_node_related): + related = get_related_mock_and_run(WorkflowJobTemplateNodeSerializer, workflow_job_template_node_related) + assert 'workflow_job_template' in related + assert related['workflow_job_template'] == '/api/v1/%s/%d/' % ('workflow_job_templates', workflow_job_template_node_related.workflow_job_template.pk) + + def test_workflow_job_template_absent(self, workflow_job_template_node): + related = WorkflowJobTemplateNodeSerializer().get_related(workflow_job_template_node) + assert 'workflow_job_template' not in related + + +@mock.patch('awx.api.serializers.WorkflowNodeBaseSerializer.get_related', lambda x,y: {}) +class TestWorkflowJobNodeSerializerGetRelated(): + @pytest.fixture + def workflow_job_node(self): + return WorkflowJobNode(pk=1) + + @pytest.fixture + def workflow_job(self): + return WorkflowJob(pk=1) + + @pytest.fixture + def job(self): + return Job(name="blah", pk=1) + + @pytest.fixture + def workflow_job_node_related(self, workflow_job_node, workflow_job, job): + workflow_job_node.workflow_job = workflow_job + workflow_job_node.job = job + return workflow_job_node + + @pytest.mark.parametrize("related_resource_name", [ + 'success_nodes', + 'failure_nodes', + 'always_nodes', + ]) + def test_get_related(self, test_get_related, workflow_job_node, related_resource_name): + test_get_related(WorkflowJobNodeSerializer, + workflow_job_node, + 'workflow_job_nodes', + related_resource_name) + + def test_workflow_job_present(self, get_related_mock_and_run, workflow_job_node_related): + related = get_related_mock_and_run(WorkflowJobNodeSerializer, workflow_job_node_related) + assert 'workflow_job' in related + assert related['workflow_job'] == '/api/v1/%s/%d/' % ('workflow_jobs', workflow_job_node_related.workflow_job.pk) + + def test_workflow_job_absent(self, workflow_job_node): + related = WorkflowJobNodeSerializer().get_related(workflow_job_node) + assert 'workflow_job' not in related + + def test_job_present(self, get_related_mock_and_run, workflow_job_node_related): + related = get_related_mock_and_run(WorkflowJobNodeSerializer, workflow_job_node_related) + assert 'job' in related + assert related['job'] == '/api/v1/%s/%d/' % ('jobs', workflow_job_node_related.job.pk) + + def test_job_absent(self, workflow_job_node): + related = WorkflowJobNodeSerializer().get_related(workflow_job_node) + assert 'job' not in related diff --git a/awx/main/tests/unit/api/test_serializers.py b/awx/main/tests/unit/api/test_serializers.py deleted file mode 100644 index 2496ba9a2d..0000000000 --- a/awx/main/tests/unit/api/test_serializers.py +++ /dev/null @@ -1,235 +0,0 @@ -# Python -import pytest -import mock -from mock import PropertyMock -import json - -# AWX -from awx.api.serializers import ( - JobTemplateSerializer, - JobSerializer, - JobOptionsSerializer, - CustomInventoryScriptSerializer, -) -from awx.main.models import ( - Label, - Job, - CustomInventoryScript, - User, -) - -#DRF -from rest_framework.request import Request -from rest_framework import serializers -from rest_framework.test import ( - APIRequestFactory, - force_authenticate, -) - - -def mock_JT_resource_data(): - return ({}, []) - -@pytest.fixture -def job_template(mocker): - mock_jt = mocker.MagicMock(pk=5) - mock_jt.resource_validation_data = mock_JT_resource_data - return mock_jt - -@pytest.fixture -def job(mocker, job_template): - return mocker.MagicMock(pk=5, job_template=job_template) - -@pytest.fixture -def labels(mocker): - return [Label(id=x, name='label-%d' % x) for x in xrange(0, 25)] - -@pytest.fixture -def jobs(mocker): - return [Job(id=x, name='job-%d' % x) for x in xrange(0, 25)] - -class GetRelatedMixin: - def _assert(self, model_obj, related, resource_name, related_resource_name): - assert related_resource_name in related - assert related[related_resource_name] == '/api/v1/%s/%d/%s/' % (resource_name, model_obj.pk, related_resource_name) - - def _mock_and_run(self, serializer_class, model_obj): - serializer = serializer_class() - related = serializer.get_related(model_obj) - return related - - def _test_get_related(self, serializer_class, model_obj, resource_name, related_resource_name): - related = self._mock_and_run(serializer_class, model_obj) - self._assert(model_obj, related, resource_name, related_resource_name) - return related - -class GetSummaryFieldsMixin: - def _assert(self, summary, summary_field_name): - assert summary_field_name in summary - - def _mock_and_run(self, serializer_class, model_obj): - serializer = serializer_class() - return serializer.get_summary_fields(model_obj) - - def _test_get_summary_fields(self, serializer_class, model_obj, summary_field_name): - summary = self._mock_and_run(serializer_class, model_obj) - self._assert(summary, summary_field_name) - return summary - -@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {}) -@mock.patch('awx.api.serializers.JobOptionsSerializer.get_related', lambda x,y: {}) -class TestJobTemplateSerializerGetRelated(GetRelatedMixin): - @pytest.mark.parametrize("related_resource_name", [ - 'jobs', - 'schedules', - 'activity_stream', - 'launch', - 'notification_templates_any', - 'notification_templates_success', - 'notification_templates_error', - 'survey_spec', - 'labels', - 'callback', - ]) - def test_get_related(self, job_template, related_resource_name): - self._test_get_related(JobTemplateSerializer, job_template, 'job_templates', related_resource_name) - - def test_callback_absent(self, job_template): - job_template.host_config_key = None - related = self._mock_and_run(JobTemplateSerializer, job_template) - assert 'callback' not in related - -class TestJobTemplateSerializerGetSummaryFields(GetSummaryFieldsMixin): - def test__recent_jobs(self, mocker, job_template, jobs): - - job_template.jobs.all = mocker.MagicMock(**{'order_by.return_value': jobs}) - job_template.jobs.all.return_value = job_template.jobs.all - - serializer = JobTemplateSerializer() - recent_jobs = serializer._recent_jobs(job_template) - - job_template.jobs.all.assert_called_once_with() - job_template.jobs.all.order_by.assert_called_once_with('-created') - assert len(recent_jobs) == 10 - for x in jobs[:10]: - assert recent_jobs == [{'id': x.id, 'status': x.status, 'finished': x.finished} for x in jobs[:10]] - - def test_survey_spec_exists(self, mocker, job_template): - job_template.survey_spec = {'name': 'blah', 'description': 'blah blah'} - self._test_get_summary_fields(JobTemplateSerializer, job_template, 'survey') - - def test_survey_spec_absent(self, mocker, job_template): - job_template.survey_spec = None - summary = self._mock_and_run(JobTemplateSerializer, job_template) - assert 'survey' not in summary - - @pytest.mark.skip(reason="RBAC needs to land") - def test_can_copy_true(self, mocker, job_template): - pass - - @pytest.mark.skip(reason="RBAC needs to land") - def test_can_copy_false(self, mocker, job_template): - pass - - @pytest.mark.skip(reason="RBAC needs to land") - def test_can_edit_true(self, mocker, job_template): - pass - - @pytest.mark.skip(reason="RBAC needs to land") - def test_can_edit_false(self, mocker, job_template): - pass - -@mock.patch('awx.api.serializers.UnifiedJobTemplateSerializer.get_related', lambda x,y: {}) -@mock.patch('awx.api.serializers.JobOptionsSerializer.get_related', lambda x,y: {}) -class TestJobSerializerGetRelated(GetRelatedMixin): - @pytest.mark.parametrize("related_resource_name", [ - 'job_events', - 'job_plays', - 'job_tasks', - 'relaunch', - 'labels', - ]) - def test_get_related(self, mocker, job, related_resource_name): - self._test_get_related(JobSerializer, job, 'jobs', related_resource_name) - - def test_job_template_absent(self, mocker, job): - job.job_template = None - serializer = JobSerializer() - related = serializer.get_related(job) - assert 'job_template' not in related - - def test_job_template_present(self, job): - related = self._mock_and_run(JobSerializer, job) - assert 'job_template' in related - assert related['job_template'] == '/api/v1/%s/%d/' % ('job_templates', job.job_template.pk) - -@mock.patch('awx.api.serializers.BaseSerializer.to_representation', lambda self,obj: { - 'extra_vars': obj.extra_vars}) -class TestJobSerializerSubstitution(): - - def test_survey_password_hide(self, mocker): - job = mocker.MagicMock(**{ - 'display_extra_vars.return_value': '{\"secret_key\": \"$encrypted$\"}', - 'extra_vars.return_value': '{\"secret_key\": \"my_password\"}'}) - serializer = JobSerializer(job) - rep = serializer.to_representation(job) - extra_vars = json.loads(rep['extra_vars']) - assert extra_vars['secret_key'] == '$encrypted$' - job.display_extra_vars.assert_called_once_with() - assert 'my_password' not in extra_vars - -@mock.patch('awx.api.serializers.BaseSerializer.get_summary_fields', lambda x,y: {}) -class TestJobOptionsSerializerGetSummaryFields(GetSummaryFieldsMixin): - def test__summary_field_labels_10_max(self, mocker, job_template, labels): - job_template.labels.all = mocker.MagicMock(**{'order_by.return_value': labels}) - job_template.labels.all.return_value = job_template.labels.all - - serializer = JobOptionsSerializer() - summary_labels = serializer._summary_field_labels(job_template) - - job_template.labels.all.order_by.assert_called_with('name') - assert len(summary_labels['results']) == 10 - assert summary_labels['results'] == [{'id': x.id, 'name': x.name} for x in labels[:10]] - - def test_labels_exists(self, mocker, job_template): - self._test_get_summary_fields(JobOptionsSerializer, job_template, 'labels') - -class TestJobTemplateSerializerValidation(object): - - good_extra_vars = ["{\"test\": \"keys\"}", "---\ntest: key"] - bad_extra_vars = ["{\"test\": \"keys\"", "---\ntest: [2"] - - def test_validate_extra_vars(self): - serializer = JobTemplateSerializer() - for ev in self.good_extra_vars: - serializer.validate_extra_vars(ev) - for ev in self.bad_extra_vars: - with pytest.raises(serializers.ValidationError): - serializer.validate_extra_vars(ev) - -class TestCustomInventoryScriptSerializer(object): - - @pytest.mark.parametrize("superuser,sysaudit,admin_role,value", - ((True, False, False, '#!/python'), - (False, True, False, '#!/python'), - (False, False, True, '#!/python'), - (False, False, False, None))) - def test_to_representation_orphan(self, superuser, sysaudit, admin_role, value): - with mock.patch.object(CustomInventoryScriptSerializer, 'get_summary_fields', return_value={}): - User.add_to_class('is_system_auditor', sysaudit) - user = User(username="root", is_superuser=superuser) - roles = [user] if admin_role else [] - - with mock.patch('awx.main.models.CustomInventoryScript.admin_role', new_callable=PropertyMock, return_value=roles): - cis = CustomInventoryScript(pk=1, script='#!/python') - serializer = CustomInventoryScriptSerializer() - - factory = APIRequestFactory() - wsgi_request = factory.post("/inventory_script/1", {'id':1}, format="json") - force_authenticate(wsgi_request, user) - - request = Request(wsgi_request) - serializer.context['request'] = request - - representation = serializer.to_representation(cis) - assert representation['script'] == value diff --git a/awx/main/tests/unit/commands/test_run_task_system.py b/awx/main/tests/unit/commands/test_run_task_system.py index 0c9468c737..bc62394b21 100644 --- a/awx/main/tests/unit/commands/test_run_task_system.py +++ b/awx/main/tests/unit/commands/test_run_task_system.py @@ -3,7 +3,7 @@ from awx.main.management.commands.run_task_system import ( WorkflowDAG, ) from awx.main.models import Job -from awx.main.models.workflow import WorkflowNode +from awx.main.models.workflow import WorkflowJobNode import pytest @pytest.fixture @@ -62,7 +62,7 @@ class TestSimpleDAG(object): @pytest.fixture def factory_node(): def fn(id, status): - wfn = WorkflowNode(id=id) + wfn = WorkflowJobNode(id=id) if status: j = Job(status=status) wfn.job = j diff --git a/awx/main/tests/unit/models/test_workflow_unit.py b/awx/main/tests/unit/models/test_workflow_unit.py new file mode 100644 index 0000000000..58ea591299 --- /dev/null +++ b/awx/main/tests/unit/models/test_workflow_unit.py @@ -0,0 +1,81 @@ +import pytest + +from awx.main.models.jobs import JobTemplate +from awx.main.models.workflow import WorkflowJobTemplateNode, WorkflowJobInheritNodesMixin, WorkflowJobNode + +class TestWorkflowJobInheritNodesMixin(): + class TestCreateWorkflowJobNodes(): + @pytest.fixture + def job_templates(self): + return [JobTemplate() for i in range(0, 10)] + + @pytest.fixture + def job_template_nodes(self, job_templates): + return [WorkflowJobTemplateNode(unified_job_template=job_templates[i]) for i in range(0, 10)] + + def test__create_workflow_job_nodes(self, mocker, job_template_nodes): + workflow_job_node_create = mocker.patch('awx.main.models.WorkflowJobNode.objects.create') + + mixin = WorkflowJobInheritNodesMixin() + mixin._create_workflow_job_nodes(job_template_nodes) + + for job_template_node in job_template_nodes: + workflow_job_node_create.assert_any_call(workflow_job=mixin, + unified_job_template=job_template_node.unified_job_template) + + class TestMapWorkflowJobNodes(): + @pytest.fixture + def job_template_nodes(self): + return [WorkflowJobTemplateNode(id=i) for i in range(0, 20)] + + @pytest.fixture + def job_nodes(self): + return [WorkflowJobNode(id=i) for i in range(100, 120)] + + def test__map_workflow_job_nodes(self, job_template_nodes, job_nodes): + mixin = WorkflowJobInheritNodesMixin() + + node_ids_map = mixin._map_workflow_job_nodes(job_template_nodes, job_nodes) + assert len(node_ids_map) == len(job_template_nodes) + + for i, job_template_node in enumerate(job_template_nodes): + assert node_ids_map[job_template_node.id] == job_nodes[i].id + + class TestInheritRelationship(): + @pytest.fixture + def job_template_nodes(self, mocker): + nodes = [mocker.MagicMock(id=i) for i in range(0, 10)] + + for i in range(0, 9): + nodes[i].success_nodes = [mocker.MagicMock(id=i + 1)] + + return nodes + + @pytest.fixture + def job_nodes(self, mocker): + nodes = [mocker.MagicMock(id=i) for i in range(100, 110)] + return nodes + + @pytest.fixture + def job_nodes_dict(self, job_nodes): + _map = {} + for n in job_nodes: + _map[n.id] = n + return _map + + + def test__inherit_relationship(self, mocker, job_template_nodes, job_nodes, job_nodes_dict): + mixin = WorkflowJobInheritNodesMixin() + + mixin._get_workflow_job_node_by_id = lambda x: job_nodes_dict[x] + mixin._get_all_by_type = lambda x,node_type: x.success_nodes + + node_ids_map = mixin._map_workflow_job_nodes(job_template_nodes, job_nodes) + + for i, job_template_node in enumerate(job_template_nodes): + mixin._inherit_relationship(job_template_node, job_nodes[i], node_ids_map, 'success_nodes') + + for i in range(0, 9): + job_nodes[i].success_nodes.add.assert_any_call(job_nodes[i + 1]) + + From 05add1a751577ad38d04c7512d059d7568675895 Mon Sep 17 00:00:00 2001 From: Shane McDonald Date: Tue, 13 Sep 2016 15:34:13 -0400 Subject: [PATCH 33/47] Add --unsafe-perm to ui-deps-built MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Node thinks we’re stupid. https://github.com/nodejs/node-gyp/issues/454 --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index feedbab684..4323239aff 100644 --- a/Makefile +++ b/Makefile @@ -472,7 +472,7 @@ test_jenkins : test_coverage # -------------------------------------- ui-deps-built: awx/ui/package.json - $(NPM_BIN) --prefix awx/ui install awx/ui + $(NPM_BIN) --unsafe-perm --prefix awx/ui install awx/ui touch awx/ui/.deps_built ui-docker-machine: ui-deps-built From ab395b00095366c8fdba00a531dcb4782af915dd Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Wed, 14 Sep 2016 11:42:13 -0400 Subject: [PATCH 34/47] Integrate callback receiver refactoring * Drop ZMQ as the communication mechanism between job_event_callback and callback_receiver * Setup queue and exchange for callback broker communication * Refactor event plugin and callback receiver to efficiently handle message submission and processing * Integrate django caching for parent processing --- .../commands/run_callback_receiver.py | 333 +++++------------- .../migrations/0034_v310_jobevent_uuid.py | 19 + awx/main/models/jobs.py | 5 + awx/main/tasks.py | 3 +- awx/plugins/callback/job_event_callback.py | 99 +++--- awx/settings/development.py | 2 + requirements/requirements.txt | 2 +- requirements/requirements_ansible.txt | 1 + 8 files changed, 171 insertions(+), 293 deletions(-) create mode 100644 awx/main/migrations/0034_v310_jobevent_uuid.py diff --git a/awx/main/management/commands/run_callback_receiver.py b/awx/main/management/commands/run_callback_receiver.py index 15b9b8f483..0e27ba06da 100644 --- a/awx/main/management/commands/run_callback_receiver.py +++ b/awx/main/management/commands/run_callback_receiver.py @@ -8,12 +8,17 @@ import datetime import logging import signal import time -from multiprocessing import Process, Queue -from Queue import Empty as QueueEmpty + +from kombu import Connection, Exchange, Queue +from kombu.mixins import ConsumerMixin +from kombu.log import get_logger +from kombu.utils import kwdict, reprcall +from kombu.utils.debug import setup_logging # Django from django.conf import settings from django.core.management.base import NoArgsCommand +from django.core.cache import cache from django.db import transaction, DatabaseError from django.utils.dateparse import parse_datetime from django.utils.timezone import FixedOffset @@ -25,156 +30,49 @@ from awx.main.socket_queue import Socket logger = logging.getLogger('awx.main.commands.run_callback_receiver') -WORKERS = 4 +class CallbackBrokerWorker(ConsumerMixin): -class CallbackReceiver(object): - def __init__(self): - self.parent_mappings = {} + def __init__(self, connection): + self.connection = connection - def run_subscriber(self, use_workers=True): - def shutdown_handler(active_workers): - def _handler(signum, frame): - try: - for active_worker in active_workers: - active_worker.terminate() - signal.signal(signum, signal.SIG_DFL) - os.kill(os.getpid(), signum) # Rethrow signal, this time without catching it - except Exception: - # TODO: LOG - pass - return _handler + def get_consumers(self, Consumer, channel): + return [Consumer(queues=[Queue(settings.CALLBACK_QUEUE, + Exchange(settings.CALLBACK_QUEUE, type='direct'), + routing_key=settings.CALLBACK_QUEUE)], + accept=['json'], + callbacks=[self.process_task])] - def check_pre_handle(data): - event = data.get('event', '') - if event == 'playbook_on_play_start': - return True - return False - - worker_queues = [] - - if use_workers: - connection.close() - for idx in range(WORKERS): - queue_actual = Queue(settings.JOB_EVENT_MAX_QUEUE_SIZE) - w = Process(target=self.callback_worker, args=(queue_actual, idx,)) - w.start() - if settings.DEBUG: - logger.info('Started worker %s' % str(idx)) - worker_queues.append([0, queue_actual, w]) - elif settings.DEBUG: - logger.warn('Started callback receiver (no workers)') - - main_process = Process( - target=self.callback_handler, - args=(use_workers, worker_queues,) - ) - main_process.daemon = True - main_process.start() - - signal.signal(signal.SIGINT, shutdown_handler([p[2] for p in worker_queues] + [main_process])) - signal.signal(signal.SIGTERM, shutdown_handler([p[2] for p in worker_queues] + [main_process])) - while True: - workers_changed = False - idx = 0 - for queue_worker in worker_queues: - if not queue_worker[2].is_alive(): - logger.warn("Worker %s was not alive, restarting" % str(queue_worker)) - workers_changed = True - queue_worker[2].join() - w = Process(target=self.callback_worker, args=(queue_worker[1], idx,)) - w.daemon = True - w.start() - signal.signal(signal.SIGINT, shutdown_handler([w])) - signal.signal(signal.SIGTERM, shutdown_handler([w])) - queue_worker[2] = w - idx += 1 - if workers_changed: - signal.signal(signal.SIGINT, shutdown_handler([p[2] for p in worker_queues] + [main_process])) - signal.signal(signal.SIGTERM, shutdown_handler([p[2] for p in worker_queues] + [main_process])) - if not main_process.is_alive(): - logger.error("Main process is not alive") - for queue_worker in worker_queues: - queue_worker[2].terminate() - break - time.sleep(0.1) - - def write_queue_worker(self, preferred_queue, worker_queues, message): - queue_order = sorted(range(WORKERS), cmp=lambda x, y: -1 if x==preferred_queue else 0) - for queue_actual in queue_order: - try: - worker_actual = worker_queues[queue_actual] - worker_actual[1].put(message, block=True, timeout=2) - worker_actual[0] += 1 - return queue_actual - except Exception: - logger.warn("Could not write to queue %s" % preferred_queue) - continue - return None - - def callback_handler(self, use_workers, worker_queues): - total_messages = 0 - last_parent_events = {} - with Socket('callbacks', 'r') as callbacks: - for message in callbacks.listen(): - total_messages += 1 - if 'ad_hoc_command_id' in message: - self.process_ad_hoc_event(message) - elif not use_workers: - self.process_job_event(message) - else: - job_parent_events = last_parent_events.get(message['job_id'], {}) - if message['event'] in ('playbook_on_play_start', 'playbook_on_stats', 'playbook_on_vars_prompt'): - parent = job_parent_events.get('playbook_on_start', None) - elif message['event'] in ('playbook_on_notify', - 'playbook_on_setup', - 'playbook_on_task_start', - 'playbook_on_no_hosts_matched', - 'playbook_on_no_hosts_remaining', - 'playbook_on_include', - 'playbook_on_import_for_host', - 'playbook_on_not_import_for_host'): - parent = job_parent_events.get('playbook_on_play_start', None) - elif message['event'].startswith('runner_on_') or message['event'].startswith('runner_item_on_'): - list_parents = [] - list_parents.append(job_parent_events.get('playbook_on_setup', None)) - list_parents.append(job_parent_events.get('playbook_on_task_start', None)) - list_parents = sorted(filter(lambda x: x is not None, list_parents), cmp=lambda x, y: y.id - x.id) - parent = list_parents[0] if len(list_parents) > 0 else None - else: - parent = None - if parent is not None: - message['parent'] = parent.id - if 'created' in message: - del(message['created']) - if message['event'] in ('playbook_on_start', 'playbook_on_play_start', - 'playbook_on_setup', 'playbook_on_task_start'): - job_parent_events[message['event']] = self.process_job_event(message) - else: - if message['event'] == 'playbook_on_stats': - job_parent_events = {} - - actual_queue = self.write_queue_worker(total_messages % WORKERS, worker_queues, message) - # NOTE: It might be better to recycle the entire callback receiver process if one or more of the queues are too full - # the drawback is that if we under extremely high load we may be legitimately taking a while to process messages - if actual_queue is None: - logger.error("All queues full!") - sys.exit(1) - last_parent_events[message['job_id']] = job_parent_events - - @transaction.atomic - def process_job_event(self, data): - # Sanity check: Do we need to do anything at all? - event = data.get('event', '') - parent_id = data.get('parent', None) - if not event or 'job_id' not in data: - return + def process_task(self, body, message): + try: + if "event" not in body: + raise Exception("Payload does not have an event") + if "job_id" not in body: + raise Exception("Payload does not have a job_id") + if settings.DEBUG: + logger.info("Body: {}".format(body)) + logger.info("Message: {}".format(message)) + self.process_job_event(body) + except Exception as exc: + import traceback + traceback.print_exc() + logger.error('Callback Task Processor Raised Exception: %r', exc) + message.ack() + def process_job_event(self, payload): # Get the correct "verbose" value from the job. # If for any reason there's a problem, just use 0. + if 'ad_hoc_command_id' in payload: + event_type_key = 'ad_hoc_command_id' + event_object_type = AdHocCommand + else: + event_type_key = 'job_id' + event_object_type = Job + try: - verbose = Job.objects.get(id=data['job_id']).verbosity + verbose = event_object_type.objects.get(id=payload[event_type_key]).verbosity except Exception as e: - verbose = 0 + verbose=0 + # TODO: cache # Convert the datetime for the job event's creation appropriately, # and include a time zone for it. @@ -182,120 +80,58 @@ class CallbackReceiver(object): # In the event of any issue, throw it out, and Django will just save # the current time. try: - if not isinstance(data['created'], datetime.datetime): - data['created'] = parse_datetime(data['created']) - if not data['created'].tzinfo: - data['created'] = data['created'].replace(tzinfo=FixedOffset(0)) + if not isinstance(payload['created'], datetime.datetime): + payload['created'] = parse_datetime(payload['created']) + if not payload['created'].tzinfo: + payload['created'] = payload['created'].replace(tzinfo=FixedOffset(0)) except (KeyError, ValueError): - data.pop('created', None) + payload.pop('created', None) - # Print the data to stdout if we're in DEBUG mode. - if settings.DEBUG: - print(data) + event_uuid = payload.get("uuid", '') + parent_event_uuid = payload.get("parent_uuid", '') # Sanity check: Don't honor keys that we don't recognize. - for key in data.keys(): - if key not in ('job_id', 'event', 'event_data', - 'created', 'counter'): - data.pop(key) + for key in payload.keys(): + if key not in (event_type_key, 'event', 'event_data', + 'created', 'counter', 'uuid'): + payload.pop(key) - # Save any modifications to the job event to the database. - # If we get a database error of some kind, bail out. try: # If we're not in verbose mode, wipe out any module # arguments. - res = data['event_data'].get('res', {}) + res = payload['event_data'].get('res', {}) if isinstance(res, dict): i = res.get('invocation', {}) if verbose == 0 and 'module_args' in i: i['module_args'] = '' - # Create a new JobEvent object. - job_event = JobEvent(**data) - if parent_id is not None: - job_event.parent = JobEvent.objects.get(id=parent_id) - job_event.save(post_process=True) - - # Retrun the job event object. - return job_event + if 'ad_hoc_command_id' in payload: + ad_hoc_command_event = AdHocCommandEvent.objects.create(**data) + return + + j = JobEvent(**payload) + if payload['event'] == 'playbook_on_start': + j.save() + cache.set("{}_{}".format(payload['job_id'], event_uuid), j.id, 300) + return + else: + if parent_event_uuid: + parent_id = cache.get("{}_{}".format(payload['job_id'], parent_event_uuid), None) + if parent_id is None: + parent_id_obj = JobEvent.objects.filter(uuid=parent_event_uuid, job_id=payload['job_id']) + if parent_id_obj.exists(): #Problematic if not there, means the parent hasn't been written yet... TODO + j.parent_id = parent_id_obj[0].id + print("Settings cache: {}_{} with value {}".format(payload['job_id'], parent_event_uuid, j.parent_id)) + cache.set("{}_{}".format(payload['job_id'], parent_event_uuid), j.parent_id, 300) + else: + print("Cache hit") + j.parent_id = parent_id + j.save() + if event_uuid: + cache.set("{}_{}".format(payload['job_id'], event_uuid), j.id, 300) except DatabaseError as e: - # Log the error and bail out. - logger.error('Database error saving job event: %s', e) - return None + logger.error("Database Error Saving Job Event: {}".format(e)) - @transaction.atomic - def process_ad_hoc_event(self, data): - # Sanity check: Do we need to do anything at all? - event = data.get('event', '') - if not event or 'ad_hoc_command_id' not in data: - return - - # Get the correct "verbose" value from the job. - # If for any reason there's a problem, just use 0. - try: - verbose = AdHocCommand.objects.get(id=data['ad_hoc_command_id']).verbosity - except Exception as e: - verbose = 0 - - # Convert the datetime for the job event's creation appropriately, - # and include a time zone for it. - # - # In the event of any issue, throw it out, and Django will just save - # the current time. - try: - if not isinstance(data['created'], datetime.datetime): - data['created'] = parse_datetime(data['created']) - if not data['created'].tzinfo: - data['created'] = data['created'].replace(tzinfo=FixedOffset(0)) - except (KeyError, ValueError): - data.pop('created', None) - - # Print the data to stdout if we're in DEBUG mode. - if settings.DEBUG: - print(data) - - # Sanity check: Don't honor keys that we don't recognize. - for key in data.keys(): - if key not in ('ad_hoc_command_id', 'event', 'event_data', - 'created', 'counter'): - data.pop(key) - - # Save any modifications to the ad hoc command event to the database. - # If we get a database error of some kind, bail out. - try: - # If we're not in verbose mode, wipe out any module - # arguments. FIXME: Needed for adhoc? - res = data['event_data'].get('res', {}) - if isinstance(res, dict): - i = res.get('invocation', {}) - if verbose == 0 and 'module_args' in i: - i['module_args'] = '' - - # Create a new AdHocCommandEvent object. - ad_hoc_command_event = AdHocCommandEvent.objects.create(**data) - - # Retrun the ad hoc comamnd event object. - return ad_hoc_command_event - except DatabaseError as e: - # Log the error and bail out. - logger.error('Database error saving ad hoc command event: %s', e) - return None - - def callback_worker(self, queue_actual, idx): - messages_processed = 0 - while True: - try: - message = queue_actual.get(block=True, timeout=1) - except QueueEmpty: - continue - except Exception as e: - logger.error("Exception on listen socket, restarting: " + str(e)) - break - self.process_job_event(message) - messages_processed += 1 - if messages_processed >= settings.JOB_EVENT_RECYCLE_THRESHOLD: - logger.info("Shutting down message receiver") - break class Command(NoArgsCommand): ''' @@ -306,9 +142,10 @@ class Command(NoArgsCommand): help = 'Launch the job callback receiver' def handle_noargs(self, **options): - cr = CallbackReceiver() - try: - cr.run_subscriber() - except KeyboardInterrupt: - pass + with Connection(settings.BROKER_URL) as conn: + try: + worker = CallbackBrokerWorker(conn) + worker.run() + except KeyboardInterrupt: + print('Terminating Callback Receiver') diff --git a/awx/main/migrations/0034_v310_jobevent_uuid.py b/awx/main/migrations/0034_v310_jobevent_uuid.py new file mode 100644 index 0000000000..4feade28ef --- /dev/null +++ b/awx/main/migrations/0034_v310_jobevent_uuid.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0033_v310_modify_ha_instance'), + ] + + operations = [ + migrations.AddField( + model_name='jobevent', + name='uuid', + field=models.CharField(default=b'', max_length=1024, editable=False), + ), + ] diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index c233269ce9..ddff3f9343 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -950,6 +950,11 @@ class JobEvent(CreatedModifiedModel): default=False, editable=False, ) + uuid = models.CharField( + max_length=1024, + default='', + editable=False, + ) host = models.ForeignKey( 'Host', related_name='job_events_as_primary_host', diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 806a819e3e..b4617f505d 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -794,7 +794,8 @@ class RunJob(BaseTask): env['ANSIBLE_CALLBACK_PLUGINS'] = plugin_path env['REST_API_URL'] = settings.INTERNAL_API_URL env['REST_API_TOKEN'] = job.task_auth_token or '' - env['CALLBACK_CONSUMER_PORT'] = str(settings.CALLBACK_CONSUMER_PORT) + env['CALLBACK_QUEUE'] = settings.CALLBACK_QUEUE + env['CALLBACK_CONNECTION'] = settings.BROKER_URL if getattr(settings, 'JOB_CALLBACK_DEBUG', False): env['JOB_CALLBACK_DEBUG'] = '2' elif settings.DEBUG: diff --git a/awx/plugins/callback/job_event_callback.py b/awx/plugins/callback/job_event_callback.py index a9c5b712ed..abec176b2f 100644 --- a/awx/plugins/callback/job_event_callback.py +++ b/awx/plugins/callback/job_event_callback.py @@ -39,37 +39,16 @@ import pwd import urlparse import re from copy import deepcopy +from uuid import uuid4 + +# Kombu +from kombu import Connection, Exchange, Producer # Requests import requests -# ZeroMQ -import zmq - import psutil -# Only use statsd if there's a statsd host in the environment -# otherwise just do a noop. -# NOTE: I've disabled this for the time being until we sort through the venv dependency around this -# if os.environ.get('GRAPHITE_PORT_8125_UDP_ADDR'): -# from statsd import StatsClient -# statsd = StatsClient(host=os.environ['GRAPHITE_PORT_8125_UDP_ADDR'], -# port=8125, -# prefix='tower.job.event_callback', -# maxudpsize=512) -# else: -# from statsd import StatsClient -# class NoStatsClient(StatsClient): -# def __init__(self, *args, **kwargs): -# pass -# def _prepare(self, stat, value, rate): -# pass -# def _send_stat(self, stat, value, rate): -# pass -# def _send(self, *args, **kwargs): -# pass -# statsd = NoStatsClient() - CENSOR_FIELD_WHITELIST = [ 'msg', 'failed', @@ -124,6 +103,7 @@ class TokenAuth(requests.auth.AuthBase): return request +# TODO: non v2_ events are deprecated and should be purge/refactored out class BaseCallbackModule(object): ''' Callback module for logging ansible-playbook job events via the REST API. @@ -132,12 +112,16 @@ class BaseCallbackModule(object): def __init__(self): self.base_url = os.getenv('REST_API_URL', '') self.auth_token = os.getenv('REST_API_TOKEN', '') - self.callback_consumer_port = os.getenv('CALLBACK_CONSUMER_PORT', '') - self.context = None - self.socket = None + self.callback_connection = os.getenv('CALLBACK_CONNECTION', None) + self.connection_queue = os.getenv('CALLBACK_QUEUE', '') + self.connection = None + self.exchange = None self._init_logging() self._init_connection() self.counter = 0 + self.active_playbook = None + self.active_play = None + self.active_task = None def _init_logging(self): try: @@ -158,15 +142,11 @@ class BaseCallbackModule(object): self.logger.propagate = False def _init_connection(self): - self.context = None - self.socket = None + self.connection = None def _start_connection(self): - self.context = zmq.Context() - self.socket = self.context.socket(zmq.REQ) - self.socket.setsockopt(zmq.RCVTIMEO, 4000) - self.socket.setsockopt(zmq.LINGER, 2000) - self.socket.connect(self.callback_consumer_port) + self.connection = Connection(self.callback_connection) + self.exchange = Exchange(self.connection_queue, type='direct') def _post_job_event_queue_msg(self, event, event_data): self.counter += 1 @@ -176,6 +156,29 @@ class BaseCallbackModule(object): 'counter': self.counter, 'created': datetime.datetime.utcnow().isoformat(), } + if event in ('playbook_on_play_start', + 'playbook_on_stats', + 'playbook_on_vars_prompt'): + msg['parent_uuid'] = str(self.active_playbook) + elif event in ('playbook_on_notify', + 'playbook_on_setup', + 'playbook_on_task_start', + 'playbook_on_no_hosts_matched', + 'playbook_on_no_hosts_remaining', + 'playbook_on_include', + 'playbook_on_import_for_host', + 'playbook_on_not_import_for_host'): + msg['parent_uuid'] = str(self.active_play) + elif event.startswith('runner_on_') or event.startswith('runner_item_on_'): + msg['parent_uuid'] = str(self.active_task) + else: + msg['parent_uuid'] = '' + + if "uuid" in event_data: + msg['uuid'] = str(event_data['uuid']) + else: + msg['uuid'] = '' + if getattr(self, 'job_id', None): msg['job_id'] = self.job_id if getattr(self, 'ad_hoc_command_id', None): @@ -192,11 +195,16 @@ class BaseCallbackModule(object): self.connection_pid = active_pid if self.connection_pid != active_pid: self._init_connection() - if self.context is None: + if self.connection is None: self._start_connection() - self.socket.send_json(msg) - self.socket.recv() + producer = Producer(self.connection) + producer.publish(msg, + serializer='json', + compression='bzip2', + exchange=self.exchange, + declare=[self.exchange], + routing_key=self.connection_queue) return except Exception, e: self.logger.info('Publish Job Event Exception: %r, retry=%d', e, @@ -230,7 +238,7 @@ class BaseCallbackModule(object): if 'res' in event_data: event_data['res'] = censor(deepcopy(event_data['res'])) - if self.callback_consumer_port: + if self.callback_connection: self._post_job_event_queue_msg(event, event_data) else: self._post_rest_api_event(event, event_data) @@ -416,7 +424,9 @@ class JobCallbackModule(BaseCallbackModule): def v2_playbook_on_start(self, playbook): # NOTE: the playbook parameter was added late in Ansible 2.0 development # so we don't currently utilize but could later. - self.playbook_on_start() + # NOTE: Ansible doesn't generate a UUID for playbook_on_start so we'll do it for them + self.active_playbook = str(uuid4()) + self._log_event('playbook_on_start', uuid=self.active_playbook) def playbook_on_notify(self, host, handler): self._log_event('playbook_on_notify', host=host, handler=handler) @@ -446,14 +456,16 @@ class JobCallbackModule(BaseCallbackModule): is_conditional=is_conditional) def v2_playbook_on_task_start(self, task, is_conditional): - self._log_event('playbook_on_task_start', task=task, + self.active_task = task._uuid + self._log_event('playbook_on_task_start', task=task, uuid=str(task._uuid), name=task.get_name(), is_conditional=is_conditional) def v2_playbook_on_cleanup_task_start(self, task): # re-using playbook_on_task_start event here for this v2-specific # event, though we may consider any changes necessary to distinguish # this from a normal task - self._log_event('playbook_on_task_start', task=task, + self.active_task = task._uuid + self._log_event('playbook_on_task_start', task=task, uuid=str(task._uuid), name=task.get_name()) def playbook_on_vars_prompt(self, varname, private=True, prompt=None, @@ -507,7 +519,8 @@ class JobCallbackModule(BaseCallbackModule): play.name = ','.join(play.hosts) else: play.name = play.hosts - self._log_event('playbook_on_play_start', name=play.name, + self.active_play = play._uuid + self._log_event('playbook_on_play_start', name=play.name, uuid=str(play._uuid), pattern=play.hosts) def playbook_on_stats(self, stats): diff --git a/awx/settings/development.py b/awx/settings/development.py index 4c727e0bdc..722f397900 100644 --- a/awx/settings/development.py +++ b/awx/settings/development.py @@ -28,6 +28,8 @@ if 'celeryd' in sys.argv: CALLBACK_CONSUMER_PORT = "tcp://127.0.0.1:5557" CALLBACK_QUEUE_PORT = "ipc:///tmp/callback_receiver_dev.ipc" +CALLBACK_QUEUE = "callback_tasks" + # Enable PROOT for tower-qa integration tests AWX_PROOT_ENABLED = True diff --git a/requirements/requirements.txt b/requirements/requirements.txt index fb5872f572..433ae22e00 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -50,7 +50,7 @@ jsonpatch==1.12 jsonpointer==1.10 jsonschema==2.5.1 keyring==4.1 -kombu==3.0.30 +kombu==3.0.35 apache-libcloud==0.20.1 lxml==3.4.4 Markdown==2.4.1 diff --git a/requirements/requirements_ansible.txt b/requirements/requirements_ansible.txt index b35cb6fcbb..fe9fe45aed 100644 --- a/requirements/requirements_ansible.txt +++ b/requirements/requirements_ansible.txt @@ -25,6 +25,7 @@ jsonpatch==1.12 jsonpointer==1.10 jsonschema==2.5.1 keyring==4.1 +kombu==3.0.35 lxml==3.4.4 mock==1.0.1 monotonic==0.6 From 3969cfa2a0ce210b76ba19e835714d3ee50c7b77 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Thu, 15 Sep 2016 08:45:13 -0400 Subject: [PATCH 35/47] Revert "use flake8 inside tower env" This reverts commit 90cdc5c83cb495e0d448f8e9eb7381d5fd84e9d4. --- Makefile | 3 --- 1 file changed, 3 deletions(-) diff --git a/Makefile b/Makefile index 64ecb71433..5cbc9ac400 100644 --- a/Makefile +++ b/Makefile @@ -425,9 +425,6 @@ pep8: reports @(set -o pipefail && $@ | tee reports/$@.report) flake8: reports - @if [ "$(VENV_BASE)" ]; then \ - . $(VENV_BASE)/tower/bin/activate; \ - fi; \ @$@ --output-file=reports/$@.report pyflakes: reports From 46da83b87f0fefb8d8d37cf862878e202269dd4c Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Thu, 15 Sep 2016 08:52:12 -0400 Subject: [PATCH 36/47] fix flake8 --- awx/sso/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/sso/__init__.py b/awx/sso/__init__.py index 347aedfeee..6596e4bf78 100644 --- a/awx/sso/__init__.py +++ b/awx/sso/__init__.py @@ -8,7 +8,7 @@ import threading xmlsec_init_lock = threading.Lock() xmlsec_initialized = False -import dm.xmlsec.binding +import dm.xmlsec.binding # noqa original_xmlsec_initialize = dm.xmlsec.binding.initialize def xmlsec_initialize(*args, **kwargs): From 574a0fde058d69d6b894ddf42b074cf61db220d3 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Thu, 15 Sep 2016 10:15:14 -0400 Subject: [PATCH 37/47] Rename database migrations for devel integration --- ...v310_modify_ha_instance.py => 0034_v310_modify_ha_instance.py} | 0 .../{0034_v310_jobevent_uuid.py => 0035_v310_jobevent_uuid.py} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename awx/main/migrations/{0033_v310_modify_ha_instance.py => 0034_v310_modify_ha_instance.py} (100%) rename awx/main/migrations/{0034_v310_jobevent_uuid.py => 0035_v310_jobevent_uuid.py} (100%) diff --git a/awx/main/migrations/0033_v310_modify_ha_instance.py b/awx/main/migrations/0034_v310_modify_ha_instance.py similarity index 100% rename from awx/main/migrations/0033_v310_modify_ha_instance.py rename to awx/main/migrations/0034_v310_modify_ha_instance.py diff --git a/awx/main/migrations/0034_v310_jobevent_uuid.py b/awx/main/migrations/0035_v310_jobevent_uuid.py similarity index 100% rename from awx/main/migrations/0034_v310_jobevent_uuid.py rename to awx/main/migrations/0035_v310_jobevent_uuid.py From 0a24a4f9b7c6682da60433cd80e59e89be85c415 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Thu, 15 Sep 2016 10:18:10 -0400 Subject: [PATCH 38/47] ignore import splat --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index ee1d85aa29..ea462fd7fa 100755 --- a/setup.cfg +++ b/setup.cfg @@ -17,5 +17,5 @@ ignore=E201,E203,E221,E225,E231,E241,E251,E261,E265,E302,E303,E501,W291,W391,W29 exclude=.tox,venv,awx/lib/site-packages,awx/plugins/inventory/ec2.py,awx/plugins/inventory/gce.py,awx/plugins/inventory/vmware.py,awx/plugins/inventory/windows_azure.py,awx/plugins/inventory/openstack.py,awx/ui,awx/api/urls.py,awx/main/migrations,awx/main/south_migrations,awx/main/tests/data [flake8] -ignore=E201,E203,E221,E225,E231,E241,E251,E261,E265,E302,E303,E501,W291,W391,W293,E731 +ignore=E201,E203,E221,E225,E231,E241,E251,E261,E265,E302,E303,E501,W291,W391,W293,E731,F405 exclude=.tox,venv,awx/lib/site-packages,awx/plugins/inventory,awx/ui,awx/api/urls.py,awx/main/migrations,awx/main/south_migrations,awx/main/tests/data,node_modules/,awx/projects/,tools/docker,awx/settings/local_settings.py From cb8b6ea29f895bfb1845496dd128e61c0ea3e597 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Thu, 15 Sep 2016 08:52:12 -0400 Subject: [PATCH 39/47] fix flake8 --- awx/sso/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/sso/__init__.py b/awx/sso/__init__.py index 347aedfeee..6596e4bf78 100644 --- a/awx/sso/__init__.py +++ b/awx/sso/__init__.py @@ -8,7 +8,7 @@ import threading xmlsec_init_lock = threading.Lock() xmlsec_initialized = False -import dm.xmlsec.binding +import dm.xmlsec.binding # noqa original_xmlsec_initialize = dm.xmlsec.binding.initialize def xmlsec_initialize(*args, **kwargs): From f06b212266897d2142759443a5450904baeffb44 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Thu, 15 Sep 2016 10:25:36 -0400 Subject: [PATCH 40/47] flake8 fixes --- awx/settings/defaults.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 89389c02c7..c05857cbd6 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -973,15 +973,6 @@ LOGGING = { 'backupCount': 5, 'formatter':'simple', }, - 'fact_receiver': { - 'level': 'WARNING', - 'class':'logging.handlers.RotatingFileHandler', - 'filters': ['require_debug_false'], - 'filename': os.path.join(LOG_ROOT, 'fact_receiver.log'), - 'maxBytes': 1024 * 1024 * 5, # 5 MB - 'backupCount': 5, - 'formatter':'simple', - }, 'system_tracking_migrations': { 'level': 'WARNING', 'class':'logging.handlers.RotatingFileHandler', From 3255a17856ac5966ed8162de08ede15124bbe3d8 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Thu, 15 Sep 2016 10:46:45 -0400 Subject: [PATCH 41/47] Exposing rabbitmq management console interface on compose --- tools/docker-compose.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tools/docker-compose.yml b/tools/docker-compose.yml index 3c8f1def1f..2e5b18460d 100644 --- a/tools/docker-compose.yml +++ b/tools/docker-compose.yml @@ -31,6 +31,8 @@ services: image: memcached:alpine rabbitmq: image: rabbitmq:3-management + ports: + - "15672:15672" # Source Code Synchronization Container # sync: From c6f87607f24840e71164cd21bd505e32e7c5d826 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Thu, 15 Sep 2016 10:55:00 -0400 Subject: [PATCH 42/47] Shifting migrations after integration with devel for ha --- awx/main/migrations/0034_v310_modify_ha_instance.py | 2 +- awx/main/migrations/0035_v310_jobevent_uuid.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/awx/main/migrations/0034_v310_modify_ha_instance.py b/awx/main/migrations/0034_v310_modify_ha_instance.py index e4321f0235..ad245ca4b4 100644 --- a/awx/main/migrations/0034_v310_modify_ha_instance.py +++ b/awx/main/migrations/0034_v310_modify_ha_instance.py @@ -7,7 +7,7 @@ from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ - ('main', '0032_v302_credential_permissions_update'), + ('main', '0033_v310_add_workflows'), ] operations = [ diff --git a/awx/main/migrations/0035_v310_jobevent_uuid.py b/awx/main/migrations/0035_v310_jobevent_uuid.py index 4feade28ef..bd667792e7 100644 --- a/awx/main/migrations/0035_v310_jobevent_uuid.py +++ b/awx/main/migrations/0035_v310_jobevent_uuid.py @@ -7,7 +7,7 @@ from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ - ('main', '0033_v310_modify_ha_instance'), + ('main', '0034_v310_modify_ha_instance'), ] operations = [ From 2872409271a51d26f381f2171d3ee25c88a18488 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Thu, 15 Sep 2016 12:06:44 -0400 Subject: [PATCH 43/47] Integrate rabbitmqctl interface to cluster and standalone containers --- tools/docker-compose-cluster.yml | 2 ++ tools/docker-compose/haproxy.cfg | 17 +++++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/tools/docker-compose-cluster.yml b/tools/docker-compose-cluster.yml index 1b1dee4041..c04e8e275c 100644 --- a/tools/docker-compose-cluster.yml +++ b/tools/docker-compose-cluster.yml @@ -12,6 +12,7 @@ services: - "8013:8013" - "1936:1936" - "5555:5555" + - "15672:15672" tower_1: image: gcr.io/ansible-tower-engineering/tower_devel:${TAG} hostname: tower_1 @@ -22,6 +23,7 @@ services: RABBITMQ_VHOST: / volumes: - "../:/tower_devel" + tower_2: image: gcr.io/ansible-tower-engineering/tower_devel:${TAG} hostname: tower_2 diff --git a/tools/docker-compose/haproxy.cfg b/tools/docker-compose/haproxy.cfg index 01d3c94a4a..d7f0a40094 100644 --- a/tools/docker-compose/haproxy.cfg +++ b/tools/docker-compose/haproxy.cfg @@ -22,6 +22,11 @@ frontend flower mode http default_backend flower_nodes +frontend rabbitctl + bind *:15672 + mode http + default_backend rabbitctl_nodes + backend nodes mode http balance roundrobin @@ -46,6 +51,18 @@ backend flower_nodes server tower_2 tower_2:5555 server tower_3 tower_3:5555 +backend rabbitctl_nodes + mode http + balance roundrobin + option forwardfor + option http-pretend-keepalive + http-request set-header X-Forwarded-Port %[dst_port] + http-request add-header X-Forwarded-Proto https if { ssl_fc } + #option httpchk HEAD / HTTP/1.1\r\nHost:localhost + server rabbitmq_1 rabbitmq_1:15672 + server rabbitmq_2 rabbitmq_2:15672 + server rabbitmq_3 rabbitmq_3:15672 + listen stats bind *:1936 stats enable From 3de4aae54804e54b09805b4016b06da4e2947897 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Thu, 15 Sep 2016 13:51:17 -0400 Subject: [PATCH 44/47] Fixing up HA induced flake8 issues --- awx/main/management/commands/_base_instance.py | 2 -- awx/main/management/commands/register_instance.py | 2 -- .../management/commands/run_callback_receiver.py | 15 +++------------ awx/main/managers.py | 1 - awx/main/middleware.py | 5 +---- awx/main/models/ha.py | 4 +--- awx/plugins/callback/job_event_callback.py | 14 +++++++------- awx/settings/defaults.py | 9 ++++----- 8 files changed, 16 insertions(+), 36 deletions(-) diff --git a/awx/main/management/commands/_base_instance.py b/awx/main/management/commands/_base_instance.py index 807abfb76d..ac42eced2a 100644 --- a/awx/main/management/commands/_base_instance.py +++ b/awx/main/management/commands/_base_instance.py @@ -7,8 +7,6 @@ from optparse import make_option from django.core.management.base import BaseCommand, CommandError from django.conf import settings -from awx.main.models import Project - class OptionEnforceError(Exception): def __init__(self, value): diff --git a/awx/main/management/commands/register_instance.py b/awx/main/management/commands/register_instance.py index a7fc2f8011..e8ba1160f2 100644 --- a/awx/main/management/commands/register_instance.py +++ b/awx/main/management/commands/register_instance.py @@ -1,8 +1,6 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved -from django.core.management.base import CommandError - from awx.main.management.commands._base_instance import BaseCommandInstance from awx.main.models import Instance diff --git a/awx/main/management/commands/run_callback_receiver.py b/awx/main/management/commands/run_callback_receiver.py index 0e27ba06da..dcb3906013 100644 --- a/awx/main/management/commands/run_callback_receiver.py +++ b/awx/main/management/commands/run_callback_receiver.py @@ -2,31 +2,22 @@ # All Rights Reserved. # Python -import os -import sys import datetime import logging -import signal -import time from kombu import Connection, Exchange, Queue from kombu.mixins import ConsumerMixin -from kombu.log import get_logger -from kombu.utils import kwdict, reprcall -from kombu.utils.debug import setup_logging # Django from django.conf import settings from django.core.management.base import NoArgsCommand from django.core.cache import cache -from django.db import transaction, DatabaseError +from django.db import DatabaseError from django.utils.dateparse import parse_datetime from django.utils.timezone import FixedOffset -from django.db import connection # AWX from awx.main.models import * # noqa -from awx.main.socket_queue import Socket logger = logging.getLogger('awx.main.commands.run_callback_receiver') @@ -106,7 +97,7 @@ class CallbackBrokerWorker(ConsumerMixin): i['module_args'] = '' if 'ad_hoc_command_id' in payload: - ad_hoc_command_event = AdHocCommandEvent.objects.create(**data) + AdHocCommandEvent.objects.create(**data) return j = JobEvent(**payload) @@ -119,7 +110,7 @@ class CallbackBrokerWorker(ConsumerMixin): parent_id = cache.get("{}_{}".format(payload['job_id'], parent_event_uuid), None) if parent_id is None: parent_id_obj = JobEvent.objects.filter(uuid=parent_event_uuid, job_id=payload['job_id']) - if parent_id_obj.exists(): #Problematic if not there, means the parent hasn't been written yet... TODO + if parent_id_obj.exists(): # Problematic if not there, means the parent hasn't been written yet... TODO j.parent_id = parent_id_obj[0].id print("Settings cache: {}_{} with value {}".format(payload['job_id'], parent_event_uuid, j.parent_id)) cache.set("{}_{}".format(payload['job_id'], parent_event_uuid), j.parent_id, 300) diff --git a/awx/main/managers.py b/awx/main/managers.py index ca4578daf4..b7a7f6d908 100644 --- a/awx/main/managers.py +++ b/awx/main/managers.py @@ -4,7 +4,6 @@ import sys import socket -from django.conf import settings from django.db import models diff --git a/awx/main/middleware.py b/awx/main/middleware.py index fda98f1176..021ff85ad5 100644 --- a/awx/main/middleware.py +++ b/awx/main/middleware.py @@ -8,12 +8,9 @@ import uuid from django.contrib.auth.models import User from django.db.models.signals import post_save from django.db import IntegrityError -from django.http import HttpResponseRedirect -from django.template.response import TemplateResponse from django.utils.functional import curry -from awx import __version__ as version -from awx.main.models import ActivityStream, Instance +from awx.main.models import ActivityStream from awx.main.conf import tower_settings from awx.api.authentication import TokenAuthentication diff --git a/awx/main/models/ha.py b/awx/main/models/ha.py index a645c318e4..3f92aebc12 100644 --- a/awx/main/models/ha.py +++ b/awx/main/models/ha.py @@ -1,8 +1,6 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. -import functools - from django.db import models from django.db.models.signals import post_save from django.dispatch import receiver @@ -11,7 +9,7 @@ from awx.main.managers import InstanceManager from awx.main.models.inventory import InventoryUpdate from awx.main.models.jobs import Job from awx.main.models.projects import ProjectUpdate -from awx.main.models.unified_jobs import UnifiedJob, CAN_CANCEL +from awx.main.models.unified_jobs import UnifiedJob __all__ = ('Instance', 'JobOrigin') diff --git a/awx/plugins/callback/job_event_callback.py b/awx/plugins/callback/job_event_callback.py index abec176b2f..1f0e41797d 100644 --- a/awx/plugins/callback/job_event_callback.py +++ b/awx/plugins/callback/job_event_callback.py @@ -161,13 +161,13 @@ class BaseCallbackModule(object): 'playbook_on_vars_prompt'): msg['parent_uuid'] = str(self.active_playbook) elif event in ('playbook_on_notify', - 'playbook_on_setup', - 'playbook_on_task_start', - 'playbook_on_no_hosts_matched', - 'playbook_on_no_hosts_remaining', - 'playbook_on_include', - 'playbook_on_import_for_host', - 'playbook_on_not_import_for_host'): + 'playbook_on_setup', + 'playbook_on_task_start', + 'playbook_on_no_hosts_matched', + 'playbook_on_no_hosts_remaining', + 'playbook_on_include', + 'playbook_on_import_for_host', + 'playbook_on_not_import_for_host'): msg['parent_uuid'] = str(self.active_play) elif event.startswith('runner_on_') or event.startswith('runner_item_on_'): msg['parent_uuid'] = str(self.active_task) diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 174824d8d3..4b6ce52b7a 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -346,15 +346,14 @@ CELERY_QUEUES = ( Broadcast('projects'), ) CELERY_ROUTES = ({'awx.main.tasks.run_job': {'queue': 'jobs', - 'routing_key': 'jobs'}, + 'routing_key': 'jobs'}, 'awx.main.tasks.run_project_update': {'queue': 'projects'}, 'awx.main.tasks.run_inventory_update': {'queue': 'jobs', - 'routing_key': 'jobs'}, + 'routing_key': 'jobs'}, 'awx.main.tasks.run_ad_hoc_command': {'queue': 'jobs', - 'routing_key': 'jobs'}, + 'routing_key': 'jobs'}, 'awx.main.tasks.run_system_job': {'queue': 'jobs', - 'routing_key': 'jobs'} -}) + 'routing_key': 'jobs'}}) CELERYBEAT_SCHEDULE = { 'tower_scheduler': { From c35c2d933190a8ee4ec6204ba5782ae33310facb Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Thu, 15 Sep 2016 13:51:23 -0400 Subject: [PATCH 45/47] fix class method --- awx/main/models/workflow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index 0182b40b59..3c95fb17e8 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -175,7 +175,7 @@ class WorkflowJobInheritNodesMixin(object): def _get_workflow_job_node_by_id(self, id): return WorkflowJobNode.objects.get(id=id) - def _get_all_by_type(node, node_type): + def _get_all_by_type(self, node, node_type): return getattr(node, node_type).all() def inherit_job_template_workflow_nodes(self): From 77cc7f9d008e9270d0ff27c578b1513864b0a834 Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Thu, 15 Sep 2016 15:03:08 -0400 Subject: [PATCH 46/47] functional workflow test for issue #3498 --- .../tests/functional/models/test_workflow.py | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 awx/main/tests/functional/models/test_workflow.py diff --git a/awx/main/tests/functional/models/test_workflow.py b/awx/main/tests/functional/models/test_workflow.py new file mode 100644 index 0000000000..53e2b75481 --- /dev/null +++ b/awx/main/tests/functional/models/test_workflow.py @@ -0,0 +1,34 @@ + +# Python +import pytest + +# AWX +from awx.main.models.workflow import WorkflowJob, WorkflowJobTemplateNode + +class TestWorkflowJob: + @pytest.fixture + def workflow_job(self, workflow_job_template_factory): + wfjt = workflow_job_template_factory('blah').workflow_job_template + wfj = WorkflowJob.objects.create(workflow_job_template=wfjt) + + nodes = [WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt) for i in range(0, 5)] + + nodes[0].success_nodes.add(nodes[1]) + nodes[1].success_nodes.add(nodes[2]) + + nodes[0].failure_nodes.add(nodes[3]) + nodes[3].failure_nodes.add(nodes[4]) + + return wfj + + @pytest.mark.django_db + def test_inherit_job_template_workflow_nodes(self, mocker, workflow_job): + workflow_job.inherit_job_template_workflow_nodes() + + nodes = WorkflowJob.objects.get(id=workflow_job.id).workflow_job_nodes.all().order_by('created') + assert nodes[0].success_nodes.filter(id=nodes[1].id).exists() == True + assert nodes[1].success_nodes.filter(id=nodes[2].id).exists() == True + assert nodes[0].failure_nodes.filter(id=nodes[3].id).exists() == True + assert nodes[3].failure_nodes.filter(id=nodes[4].id).exists() == True + + From 7f614a53db193857bdeab450ad1769544dfa470a Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Thu, 15 Sep 2016 15:28:55 -0400 Subject: [PATCH 47/47] flake8 fix --- awx/main/tests/functional/models/test_workflow.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/awx/main/tests/functional/models/test_workflow.py b/awx/main/tests/functional/models/test_workflow.py index 53e2b75481..48b0fecaf5 100644 --- a/awx/main/tests/functional/models/test_workflow.py +++ b/awx/main/tests/functional/models/test_workflow.py @@ -26,9 +26,9 @@ class TestWorkflowJob: workflow_job.inherit_job_template_workflow_nodes() nodes = WorkflowJob.objects.get(id=workflow_job.id).workflow_job_nodes.all().order_by('created') - assert nodes[0].success_nodes.filter(id=nodes[1].id).exists() == True - assert nodes[1].success_nodes.filter(id=nodes[2].id).exists() == True - assert nodes[0].failure_nodes.filter(id=nodes[3].id).exists() == True - assert nodes[3].failure_nodes.filter(id=nodes[4].id).exists() == True + assert nodes[0].success_nodes.filter(id=nodes[1].id).exists() + assert nodes[1].success_nodes.filter(id=nodes[2].id).exists() + assert nodes[0].failure_nodes.filter(id=nodes[3].id).exists() + assert nodes[3].failure_nodes.filter(id=nodes[4].id).exists()