From 2ffa7a91eccb0f0deb1997dc3877b9945ba0908e Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Tue, 20 Sep 2016 17:16:26 -0400 Subject: [PATCH 01/24] Workflow RBAC and prompting basic changes --- awx/api/serializers.py | 105 ++++++++---- awx/api/views.py | 35 ++-- awx/main/access.py | 159 ++++++++++++------ .../0036_v310_workflow_rbac_prompts.py | 82 +++++++++ awx/main/models/rbac.py | 2 +- awx/main/models/workflow.py | 128 ++++++++++++-- awx/main/tests/unit/test_access.py | 16 ++ 7 files changed, 409 insertions(+), 118 deletions(-) create mode 100644 awx/main/migrations/0036_v310_workflow_rbac_prompts.py diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 30325eba01..ee7784afad 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -110,6 +110,20 @@ def reverse_gfk(content_object): camelcase_to_underscore(content_object.__class__.__name__): content_object.get_absolute_url() } +def vars_validate_or_raise(vars_str): + # vars must be blank, a valid JSON or YAML dict, or ... + try: + json.loads((vars_str or '').strip() or '{}') + return vars_str + except ValueError: + pass + try: + yaml.safe_load(vars_str) + return vars_str + except yaml.YAMLError: + pass + raise serializers.ValidationError('Must be valid JSON or YAML.') + class BaseSerializerMetaclass(serializers.SerializerMetaclass): ''' @@ -996,14 +1010,7 @@ class ProjectUpdateCancelSerializer(ProjectUpdateSerializer): class BaseSerializerWithVariables(BaseSerializer): def validate_variables(self, value): - try: - json.loads(value.strip() or '{}') - except ValueError: - try: - yaml.safe_load(value) - except yaml.YAMLError: - raise serializers.ValidationError('Must be valid JSON or YAML.') - return value + return vars_validate_or_raise(value) class InventorySerializer(BaseSerializerWithVariables): @@ -1326,18 +1333,7 @@ class InventorySourceOptionsSerializer(BaseSerializer): return res def validate_source_vars(self, value): - # source_env must be blank, a valid JSON or YAML dict, or ... - try: - json.loads((value or '').strip() or '{}') - return value - except ValueError: - pass - try: - yaml.safe_load(value) - return value - except yaml.YAMLError: - pass - raise serializers.ValidationError('Must be valid JSON or YAML.') + return vars_validate_or_raise(value) def validate(self, attrs): # TODO: Validate source, validate source_regions @@ -1900,18 +1896,7 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer): return super(JobTemplateSerializer, self).validate(attrs) def validate_extra_vars(self, value): - # extra_vars must be blank, a valid JSON or YAML dict, or ... - try: - json.loads((value or '').strip() or '{}') - return value - except ValueError: - pass - try: - yaml.safe_load(value) - return value - except yaml.YAMLError: - pass - raise serializers.ValidationError('Must be valid JSON or YAML.') + return vars_validate_or_raise(value) class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer): @@ -2178,7 +2163,7 @@ class SystemJobCancelSerializer(SystemJobSerializer): class WorkflowJobTemplateSerializer(UnifiedJobTemplateSerializer): class Meta: model = WorkflowJobTemplate - fields = ('*',) + fields = ('*', 'extra_vars', 'organization') def get_related(self, obj): res = super(WorkflowJobTemplateSerializer, self).get_related(obj) @@ -2195,6 +2180,9 @@ class WorkflowJobTemplateSerializer(UnifiedJobTemplateSerializer): )) return res + def validate_extra_vars(self, value): + return vars_validate_or_raise(value) + # TODO: class WorkflowJobTemplateListSerializer(WorkflowJobTemplateSerializer): pass @@ -2226,10 +2214,15 @@ class WorkflowJobListSerializer(WorkflowJobSerializer, UnifiedJobListSerializer) pass class WorkflowNodeBaseSerializer(BaseSerializer): + job_type = serializers.SerializerMethodField() + job_tags = serializers.SerializerMethodField() + limit = serializers.SerializerMethodField() + skip_tags = serializers.SerializerMethodField() class Meta: - # TODO: workflow_job and job read-only - fields = ('id', 'url', 'related', 'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes',) + fields = ('id', 'url', 'related', 'unified_job_template', + 'inventory', 'credential', 'job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags') + read_only_fields = ('success_nodes', 'failure_nodes', 'always_nodes') def get_related(self, obj): res = super(WorkflowNodeBaseSerializer, self).get_related(obj) @@ -2237,6 +2230,19 @@ class WorkflowNodeBaseSerializer(BaseSerializer): res['unified_job_template'] = obj.unified_job_template.get_absolute_url() return res + def get_job_type(self, obj): + return obj.char_prompts.get('job_type', None) + + def get_job_tags(self, obj): + return obj.char_prompts.get('job_tags', None) + + def get_skip_tags(self, obj): + return obj.char_prompts.get('skip_tags', None) + + def get_limit(self, obj): + return obj.char_prompts.get('limit', None) + + class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer): class Meta: model = WorkflowJobTemplateNode @@ -2251,9 +2257,36 @@ class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer): res['workflow_job_template'] = reverse('api:workflow_job_template_detail', args=(obj.workflow_job_template.pk,)) return res + def to_internal_value(self, data): + internal_value = super(WorkflowNodeBaseSerializer, self).to_internal_value(data) + char_prompts = self.extract_char_prompts(data) + internal_value['char_prompts'] = char_prompts + return internal_value + + def extract_char_prompts(self, data): + char_prompts = {} + for fd in ['job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags']: + if data.get(fd, None): + char_prompts[fd] = data[fd] + return char_prompts + + def validate(self, attrs): + if 'char_prompts' in attrs: + if 'job_type' in attrs['char_prompts']: + job_types = [t for t, v in JOB_TYPE_CHOICES] + if attrs['char_prompts']['job_type'] not in job_types: + raise serializers.ValidationError({ + "job_type": "%s is not a valid job type. The choices are %s." % ( + attrs['char_prompts']['job_type'], job_types)}) + ujt_obj = attrs.get('unified_job_template', None) + if isinstance(ujt_obj, WorkflowJobTemplate): + raise serializers.ValidationError({"unified_job_template": "Can not nest Workflow Job Templates inside of Workflow Job Templates"}) + return super(WorkflowJobTemplateNodeSerializer, self).validate(attrs) + class WorkflowJobNodeSerializer(WorkflowNodeBaseSerializer): class Meta: - model = WorkflowJobTemplateNode + # TODO: workflow_job and job read-only + model = WorkflowJobNode fields = ('*', 'job', 'workflow_job',) def get_related(self, obj): diff --git a/awx/api/views.py b/awx/api/views.py index efbbecf10e..e9742c56e9 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -2613,34 +2613,45 @@ class JobTemplateObjectRolesList(SubListAPIView): content_type = ContentType.objects.get_for_model(self.parent_model) return Role.objects.filter(content_type=content_type, object_id=po.pk) -# TODO: -class WorkflowJobNodeList(ListCreateAPIView): +class WorkflowJobNodeList(ListAPIView): model = WorkflowJobNode serializer_class = WorkflowJobNodeListSerializer new_in_310 = True -# TODO: -class WorkflowJobNodeDetail(RetrieveUpdateDestroyAPIView): +class WorkflowJobNodeDetail(RetrieveAPIView): model = WorkflowJobNode serializer_class = WorkflowJobNodeDetailSerializer new_in_310 = True -# TODO: class WorkflowJobTemplateNodeList(ListCreateAPIView): model = WorkflowJobTemplateNode serializer_class = WorkflowJobTemplateNodeListSerializer new_in_310 = True -# TODO: + def update_raw_data(self, data): + for fd in ['job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags']: + data[fd] = None + return super(WorkflowJobTemplateNodeList, self).update_raw_data(data) + class WorkflowJobTemplateNodeDetail(RetrieveUpdateDestroyAPIView): model = WorkflowJobTemplateNode serializer_class = WorkflowJobTemplateNodeDetailSerializer new_in_310 = True + def update_raw_data(self, data): + for fd in ['job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags']: + data[fd] = None + try: + obj = self.get_object() + data.update(obj.char_prompts) + except: + pass + return super(WorkflowJobTemplateNodeDetail, self).update_raw_data(data) + class WorkflowJobTemplateNodeChildrenBaseList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView): @@ -2732,7 +2743,10 @@ class WorkflowJobTemplateLaunch(GenericAPIView): serializer_class = EmptySerializer def get(self, request, *args, **kwargs): - return Response({}) + data = {} + obj = self.get_object() + data['warnings'] = obj.get_warnings() + return Response(data) def post(self, request, *args, **kwargs): obj = self.get_object() @@ -2749,7 +2763,6 @@ class WorkflowJobTemplateWorkflowNodesList(SubListCreateAPIView): model = WorkflowJobTemplateNode serializer_class = WorkflowJobTemplateNodeListSerializer - always_allow_superuser = True # TODO: RBAC parent_model = WorkflowJobTemplate relationship = 'workflow_job_template_nodes' parent_key = 'workflow_job_template' @@ -2763,17 +2776,11 @@ class WorkflowJobTemplateJobsList(SubListAPIView): relationship = 'jobs' parent_key = 'workflow_job_template' -# TODO: class WorkflowJobList(ListCreateAPIView): model = WorkflowJob serializer_class = WorkflowJobListSerializer - def get(self, request, *args, **kwargs): - if not request.user.is_superuser and not request.user.is_system_auditor: - raise PermissionDenied("Superuser privileges needed.") - return super(WorkflowJobList, self).get(request, *args, **kwargs) - # TODO: class WorkflowJobDetail(RetrieveDestroyAPIView): diff --git a/awx/main/access.py b/awx/main/access.py index c7eb368cad..d86be2fadc 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1141,7 +1141,12 @@ class WorkflowJobTemplateNodeAccess(BaseAccess): def get_queryset(self): if self.user.is_superuser or self.user.is_system_auditor: - return self.model.objects.all() + qs = self.model.objects.all() + else: + qs = self.model.objects.filter( + workflow_job_template__in=WorkflowJobTemplate.accessible_objects( + self.user, 'read_role')) + return qs @check_superuser def can_read(self, obj): @@ -1164,37 +1169,35 @@ class WorkflowJobTemplateNodeAccess(BaseAccess): def can_delete(self, obj): return self.can_change(obj, None) -# TODO: class WorkflowJobNodeAccess(BaseAccess): ''' - I can see/use a WorkflowJobNode if I have permission to associated Workflow Job + I can see a WorkflowJobNode if I have permission to... + the workflow job template associated with... + the workflow job associated with the node. + + Any deletion of editing of individual nodes would undermine the integrity + of the graph structure. + Deletion must happen as a cascade delete from the workflow job. ''' model = WorkflowJobNode def get_queryset(self): if self.user.is_superuser or self.user.is_system_auditor: - return self.model.objects.all() + qs = self.model.objects.all() + else: + qs = self.model.objects.filter( + workflow_job__workflow_job_template__in=WorkflowJobTemplate.accessible_objects( + self.user, 'read_role')) + return qs - @check_superuser - def can_read(self, obj): - return True - - @check_superuser def can_add(self, data): - if not data: # So the browseable API will work - return True - - return True + return False - @check_superuser def can_change(self, obj, data): - if self.can_add(data) is False: - return False - - return True + return False def can_delete(self, obj): - return self.can_change(obj, None) + return False # TODO: class WorkflowJobTemplateAccess(BaseAccess): @@ -1209,7 +1212,8 @@ class WorkflowJobTemplateAccess(BaseAccess): qs = self.model.objects.all() else: qs = self.model.accessible_objects(self.user, 'read_role') - return qs.select_related('created_by', 'modified_by', 'next_schedule').all() + return qs.select_related('created_by', 'modified_by', 'next_schedule', + 'admin_role', 'execute_role', 'read_role').all() @check_superuser def can_read(self, obj): @@ -1224,61 +1228,79 @@ class WorkflowJobTemplateAccess(BaseAccess): Users who are able to create deploy jobs can also run normal and check (dry run) jobs. ''' if not data: # So the browseable API will work - return True + return Organization.accessible_objects(self.user, 'admin_role').exists() # if reference_obj is provided, determine if it can be coppied reference_obj = data.pop('reference_obj', None) - - if 'survey_enabled' in data and data['survey_enabled']: - self.check_license(feature='surveys') - - if self.user.is_superuser: + if reference_obj: + for node in reference_obj.workflow_job_template_nodes.all(): + if node.inventory and self.user not in node.inventory.use_role: + return False + if node.credential and self.user not in node.credential.use_role: + return False + if node.unified_job_template: + if isinstance(node.unified_job_template, SystemJobTemplate): + if not self.user.is_superuser: + return False + elif isinstance(node.unified_job_template, JobTemplate): + if self.user not in node.unified_job_template.execute_role: + return False + elif isinstance(node.unified_job_template, Project): + if self.user not in node.unified_job_template.update_role: + return False + elif isinstance(node.unified_job_template, InventorySource): + if not self.user.can_access(InventorySource, 'start', node.unified_job_template): + return False + else: + return False return True - def get_value(Class, field): - if reference_obj: - return getattr(reference_obj, field, None) - else: - pk = get_pk_from_dict(data, field) - if pk: - return get_object_or_400(Class, pk=pk) - else: - return None + # will check this if surveys are added to WFJT + # if 'survey_enabled' in data and data['survey_enabled']: + # self.check_license(feature='surveys') - return False + org_pk = get_pk_from_dict(data, 'organization') + if not org_pk: + # only superusers can create or manage orphan WFJTs + return self.user.is_superuser + + org = get_object_or_400(Organization, pk=org_pk) + return self.user in org.admin_role def can_start(self, obj, validate_license=True): - # TODO: Are workflows allowed for all licenses ?? - # Check license. - ''' if validate_license: + # check basic license, node count self.check_license() - if obj.job_type == PERM_INVENTORY_SCAN: - self.check_license(feature='system_tracking') - if obj.survey_enabled: - self.check_license(feature='surveys') - ''' + # if surveys are added to WFJTs, check license here + # if obj.survey_enabled: + # self.check_license(feature='surveys') # Super users can start any job if self.user.is_superuser: return True - return self.can_read(obj) - # TODO: We should use execute role rather than read role - #return self.user in obj.execute_role + return self.user in obj.execute_role def can_change(self, obj, data): - data_for_change = data - if self.user not in obj.admin_role and not self.user.is_superuser: - return False - if data is not None: - data = dict(data) + # # Check survey license if surveys are added to WFJTs + # if 'survey_enabled' in data and obj.survey_enabled != data['survey_enabled'] and data['survey_enabled']: + # self.check_license(feature='surveys') - if 'survey_enabled' in data and obj.survey_enabled != data['survey_enabled'] and data['survey_enabled']: - self.check_license(feature='surveys') + if self.user.is_superuser: return True - return self.can_read(obj) and self.can_add(data_for_change) + org_pk = get_pk_from_dict(data, 'organization') + if ('organization' not in data or + (org_pk is None and obj.organization is None) or + (obj.organization and obj.organization.pk == org_pk)): + # The simple case + return self.user in obj.admin_role + + # If it already has an organization set, must be admin of the org to change it + if obj.organization and self.user not in obj.organization.admin_role: + return False + org = get_object_or_400(Organization, pk=org_pk) + return self.user in org.admin_role def can_delete(self, obj): is_delete_allowed = self.user.is_superuser or self.user in obj.admin_role @@ -1295,10 +1317,35 @@ class WorkflowJobTemplateAccess(BaseAccess): class WorkflowJobAccess(BaseAccess): ''' - I can only see Workflow Jobs if I'm a super user + I can only see Workflow Jobs if I can see the associated + workflow job template that it was created from. ''' model = WorkflowJob + def get_queryset(self): + if self.user.is_superuser or self.user.is_system_auditor: + qs = self.model.objects.all() + else: + qs = WorkflowJob.objects.filter( + workflow_job_template__in=WorkflowJobTemplate.accessible_objects( + self.user, 'read_role')) + return qs.select_related('created_by', 'modified_by') + + def can_add(self, data): + # Old add-start system for launching jobs is being depreciated, and + # not supported for new types of resources + return False + + def can_change(self, obj, data): + return False + + def can_delete(self, obj): + if obj.workflow_job_template is None: + # only superusers can delete orphaned workflow jobs + return self.user.is_superuser + return self.user in obj.workflow_job_template.admin_role + + class AdHocCommandAccess(BaseAccess): ''' I can only see/run ad hoc commands when: diff --git a/awx/main/migrations/0036_v310_workflow_rbac_prompts.py b/awx/main/migrations/0036_v310_workflow_rbac_prompts.py new file mode 100644 index 0000000000..d126ac9c1d --- /dev/null +++ b/awx/main/migrations/0036_v310_workflow_rbac_prompts.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations, models +import jsonfield.fields +import django.db.models.deletion +import awx.main.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0035_v310_jobevent_uuid'), + ] + + operations = [ + migrations.AddField( + model_name='workflowjobnode', + name='char_prompts', + field=jsonfield.fields.JSONField(default={}, blank=True), + ), + migrations.AddField( + model_name='workflowjobnode', + name='credential', + field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True), + ), + migrations.AddField( + model_name='workflowjobnode', + name='inventory', + field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True), + ), + migrations.AddField( + model_name='workflowjobtemplate', + name='execute_role', + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'), + ), + migrations.AddField( + model_name='workflowjobtemplate', + name='organization', + field=models.ForeignKey(related_name='workflows', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='main.Organization', null=True), + ), + migrations.AddField( + model_name='workflowjobtemplate', + name='read_role', + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'organization.auditor_role', b'execute_role', b'admin_role'], to='main.Role', null=b'True'), + ), + migrations.AddField( + model_name='workflowjobtemplatenode', + name='char_prompts', + field=jsonfield.fields.JSONField(default={}, blank=True), + ), + migrations.AddField( + model_name='workflowjobtemplatenode', + name='credential', + field=models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True), + ), + migrations.AddField( + model_name='workflowjobtemplatenode', + name='inventory', + field=models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True), + ), + migrations.AlterField( + model_name='workflowjobnode', + name='unified_job_template', + field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, to='main.UnifiedJobTemplate', null=True), + ), + migrations.AlterField( + model_name='workflowjobnode', + name='workflow_job', + field=models.ForeignKey(related_name='workflow_job_nodes', default=None, blank=True, to='main.WorkflowJob', null=True), + ), + migrations.AlterField( + model_name='workflowjobtemplate', + name='admin_role', + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator', b'organization.admin_role'], to='main.Role', null=b'True'), + ), + migrations.AlterField( + model_name='workflowjobtemplatenode', + name='unified_job_template', + field=models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, to='main.UnifiedJobTemplate', null=True), + ), + ] diff --git a/awx/main/models/rbac.py b/awx/main/models/rbac.py index 3cb016ffde..27977e9030 100644 --- a/awx/main/models/rbac.py +++ b/awx/main/models/rbac.py @@ -51,7 +51,7 @@ role_descriptions = { 'adhoc_role' : 'May run ad hoc commands on an inventory', 'admin_role' : 'Can manage all aspects of the %s', 'auditor_role' : 'Can view all settings for the %s', - 'execute_role' : 'May run the job template', + 'execute_role' : 'May run the %s', 'member_role' : 'User is a member of the %s', 'read_role' : 'May view settings for the %s', 'update_role' : 'May update project or inventory or group using the configured source update system', diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index 3c95fb17e8..2831244b2a 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -9,28 +9,28 @@ from django.db import models from django.core.urlresolvers import reverse #from django import settings as tower_settings +from jsonfield import JSONField + # AWX from awx.main.models import UnifiedJobTemplate, UnifiedJob from awx.main.models.notifications import JobNotificationMixin from awx.main.models.base import BaseModel, CreatedModifiedModel, VarsDictProperty from awx.main.models.rbac import ( ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, + ROLE_SINGLETON_SYSTEM_AUDITOR ) from awx.main.fields import ImplicitRoleField +from awx.main.models.mixins import ResourceMixin __all__ = ['WorkflowJobTemplate', 'WorkflowJob', 'WorkflowJobOptions', 'WorkflowJobNode', 'WorkflowJobTemplateNode',] +CHAR_PROMPTS_LIST = ['job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags'] + class WorkflowNodeBase(CreatedModifiedModel): class Meta: abstract = True app_label = 'main' - # TODO: RBAC - ''' - admin_role = ImplicitRoleField( - parent_role='workflow_job_template.admin_role', - ) - ''' success_nodes = models.ManyToManyField( 'self', blank=True, @@ -52,11 +52,68 @@ class WorkflowNodeBase(CreatedModifiedModel): unified_job_template = models.ForeignKey( 'UnifiedJobTemplate', related_name='%(class)ss', + blank=False, + null=True, + default=None, + on_delete=models.SET_NULL, + ) + # Prompting-related fields + inventory = models.ForeignKey( + 'Inventory', + related_name='%(class)ss', blank=True, null=True, default=None, on_delete=models.SET_NULL, ) + credential = models.ForeignKey( + 'Credential', + related_name='%(class)ss', + blank=True, + null=True, + default=None, + on_delete=models.SET_NULL, + ) + char_prompts = JSONField( + blank=True, + default={} + ) + + def prompts_dict(self): + data = {} + if self.inventory: + data['inventory'] = self.inventory + if self.credential: + data['credential'] = self.credential + for fd in CHAR_PROMPTS_LIST: + if fd in self.char_prompts: + data[fd] = self.char_prompts[fd] + return data + + def get_prompts_warnings(self): + ujt_obj = self.unified_job_template + if ujt_obj is None: + return {} + prompts_dict = self.prompts_dict() + from awx.main.models import JobTemplate + if not isinstance(ujt_obj, JobTemplate): + return {'ignored': {'all': 'Can not use prompts on unified_job_template that is not type of job template'}} + ask_for_vars_dict = ujt_obj._ask_for_vars_dict() + ignored_dict = {} + missing_dict = {} + for fd in prompts_dict: + if not ask_for_vars_dict[fd]: + ignored_dict[fd] = 'Workflow node provided field, but job template is not set to ask on launch' + for fd in ask_for_vars_dict: + ujt_field = getattr(ujt_obj, fd) + if ujt_field is None and prompts_dict.get(fd, None) is None: + missing_dict[fd] = 'Job Template does not have this field and workflow node does not provide it' + data = {} + if ignored_dict: + data.update(ignored_dict) + if missing_dict: + data.update(missing_dict) + return data class WorkflowJobTemplateNode(WorkflowNodeBase): # TODO: Ensure the API forces workflow_job_template being set @@ -87,7 +144,7 @@ class WorkflowJobNode(WorkflowNodeBase): blank=True, null=True, default=None, - on_delete=models.SET_NULL, + on_delete=models.CASCADE, ) def get_absolute_url(self): @@ -102,14 +159,32 @@ class WorkflowJobOptions(BaseModel): default='', ) -class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions): +class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, ResourceMixin): class Meta: app_label = 'main' - admin_role = ImplicitRoleField( - parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, + # admin_role = ImplicitRoleField( + # parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, + # ) + organization = models.ForeignKey( + 'Organization', + blank=True, + null=True, + on_delete=models.SET_NULL, + related_name='workflows', ) + admin_role = ImplicitRoleField(parent_role=[ + 'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, + 'organization.admin_role' + ]) + execute_role = ImplicitRoleField(parent_role=[ + 'admin_role' + ]) + read_role = ImplicitRoleField(parent_role=[ + 'singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR, + 'organization.auditor_role', 'execute_role', 'admin_role' + ]) @classmethod def _get_unified_job_class(cls): @@ -146,6 +221,17 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions): workflow_job.inherit_job_template_workflow_nodes() return workflow_job + def get_warnings(self): + warning_data = {} + for node in self.workflow_job_template_nodes.all(): + if node.unified_job_template is None: + warning_data[node.pk] = 'Node is missing a linked unified_job_template' + continue + node_prompts_warnings = node.get_prompts_warnings() + if node_prompts_warnings: + warning_data[node.pk] = node_prompts_warnings + return warning_data + class WorkflowJobInheritNodesMixin(object): def _inherit_relationship(self, old_node, new_node, node_ids_map, node_type): old_related_nodes = self._get_all_by_type(old_node, node_type) @@ -159,7 +245,27 @@ class WorkflowJobInheritNodesMixin(object): Create a WorkflowJobNode for each WorkflowJobTemplateNode ''' def _create_workflow_job_nodes(self, old_nodes): - return [WorkflowJobNode.objects.create(workflow_job=self, unified_job_template=old_node.unified_job_template) for old_node in old_nodes] + new_node_list = [] + for old_node in old_nodes: + kwargs = dict( + workflow_job=self, + unified_job_template=old_node.unified_job_template, + ) + ujt_obj = old_node.unified_job_template + if ujt_obj: + ask_for_vars_dict = ujt_obj._ask_for_vars_dict() + if ask_for_vars_dict['inventory'] and old_node.inventory: + kwargs['inventory'] = old_node.inventory + if ask_for_vars_dict['credential'] and old_node.credential: + kwargs['credential'] = old_node.credential + for fd in CHAR_PROMPTS_LIST: + new_char_prompts = {} + if ask_for_vars_dict[fd] and old_node.char_prompts.get(fd, None): + new_char_prompts[fd] = old_node.char_prompts[fd] + if new_char_prompts: + kwargs['char_prompts'] = new_char_prompts + new_node_list.append(WorkflowJobNode.objects.create(**kwargs)) + return new_node_list def _map_workflow_job_nodes(self, old_nodes, new_nodes): node_ids_map = {} diff --git a/awx/main/tests/unit/test_access.py b/awx/main/tests/unit/test_access.py index 000d91268c..9e28b00480 100644 --- a/awx/main/tests/unit/test_access.py +++ b/awx/main/tests/unit/test_access.py @@ -8,6 +8,7 @@ from awx.main.access import ( BaseAccess, check_superuser, JobTemplateAccess, + WorkflowJobTemplateAccess, ) from awx.main.models import Credential, Inventory, Project, Role, Organization @@ -110,3 +111,18 @@ def test_jt_can_add_bad_data(user_unit): access = JobTemplateAccess(user_unit) assert not access.can_add({'asdf': 'asdf'}) + +class TestWorkflowAccessMethods: + @pytest.fixture + def workflow(self, workflow_job_template_factory): + objects = workflow_job_template_factory('test_workflow', persisted=False) + return objects.workflow_job_template + + class MockQuerySet(object): + pass + + def test_workflow_can_add(self, workflow, user_unit): + # user_unit.admin_of_organizations = self.MockQuerySet() + access = WorkflowJobTemplateAccess(user_unit) + assert access.can_add({'organization': 1}) + From 9acd50b8f3c77844e76ccf328332d71ac91100ea Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Wed, 21 Sep 2016 16:04:43 -0400 Subject: [PATCH 02/24] inject WF node prompts into new jobs, new workflow RBAC tests --- awx/api/views.py | 4 - awx/main/access.py | 96 ++++++++++++++++--- .../management/commands/run_task_system.py | 4 +- .../migrations/0033_v310_add_workflows.py | 67 +++++++++++++ .../0036_v310_workflow_rbac_prompts.py | 82 ---------------- awx/main/models/workflow.py | 33 ++++++- awx/main/tests/factories/fixtures.py | 4 +- awx/main/tests/factories/tower.py | 9 +- .../tests/functional/test_rbac_workflow.py | 73 ++++++++++++++ awx/main/tests/unit/test_access.py | 20 ++-- 10 files changed, 279 insertions(+), 113 deletions(-) delete mode 100644 awx/main/migrations/0036_v310_workflow_rbac_prompts.py create mode 100644 awx/main/tests/functional/test_rbac_workflow.py diff --git a/awx/api/views.py b/awx/api/views.py index e9742c56e9..e6bd86cae4 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -2631,10 +2631,6 @@ class WorkflowJobTemplateNodeList(ListCreateAPIView): serializer_class = WorkflowJobTemplateNodeListSerializer new_in_310 = True - def update_raw_data(self, data): - for fd in ['job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags']: - data[fd] = None - return super(WorkflowJobTemplateNodeList, self).update_raw_data(data) class WorkflowJobTemplateNodeDetail(RetrieveUpdateDestroyAPIView): diff --git a/awx/main/access.py b/awx/main/access.py index d86be2fadc..813851d226 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1132,10 +1132,26 @@ class SystemJobAccess(BaseAccess): ''' model = SystemJob -# TODO: class WorkflowJobTemplateNodeAccess(BaseAccess): ''' - I can see/use a WorkflowJobTemplateNode if I have permission to associated Workflow Job Template + I can see/use a WorkflowJobTemplateNode if I have read permission + to associated Workflow Job Template + + In order to add a node, I need: + - admin access to parent WFJT + - execute access to the unified job template being used + - access to any credential or inventory provided as the prompted fields + + In order to do anything to a node, I need admin access to its WFJT + + In order to edit fields on a node, I need: + - execute access to the unified job template of the node + - access to BOTH credential and inventory post-change, if present + + In order to delete a node, I only need the admin access its WFJT + + In order to manage connections (edges) between nodes I do not need anything + beyond the standard admin access to its WFJT ''' model = WorkflowJobTemplateNode @@ -1148,26 +1164,78 @@ class WorkflowJobTemplateNodeAccess(BaseAccess): self.user, 'read_role')) return qs - @check_superuser - def can_read(self, obj): + def can_use_prompted_resources(self, data): + cred_pk = data.get('credential', None) + inv_pk = data.get('inventory', None) + if cred_pk: + credential = get_object_or_400(Credential, pk=cred_pk) + if self.user not in credential.use_role: + return False + if inv_pk: + inventory = get_object_or_400(Inventory, pk=inv_pk) + if self.user not in inventory.use_role: + return False return True @check_superuser def can_add(self, data): if not data: # So the browseable API will work return True - + wfjt_pk = data.get('workflow_job_template', None) + if wfjt_pk: + wfjt = get_object_or_400(WorkflowJobTemplate, pk=wfjt_pk) + if self.user not in wfjt.admin_role: + return False + else: + return False + if not self.can_use_prompted_resources(data): + return False return True - @check_superuser + def wfjt_admin(self, obj): + if not obj.workflow_job_template: + return self.user.is_superuser + else: + return self.user in obj.workflow_job_template.admin_role + + def ujt_execute(self, obj): + if not obj.unified_job_template: + return self.wfjt_admin(obj) + else: + return self.user in obj.unified_job_template.execute_role and self.wfjt_admin(obj) + def can_change(self, obj, data): - if self.can_add(data) is False: + if not data: + return True + + if not self.ujt_execute(obj): + # should not be able to edit the prompts if lacking access to UJT return False + if 'credential' in data or 'inventory' in data: + new_data = data + if 'credential' not in data: + new_data['credential'] = self.credential + if 'inventory' not in data: + new_data['inventory'] = self.inventory + return self.can_use_prompted_resources(new_data) return True def can_delete(self, obj): - return self.can_change(obj, None) + return self.wfjt_admin(obj) + + def check_same_WFJT(self, obj, sub_obj): + if type(obj) != self.model or type(sub_obj) != self.model: + raise Exception('Attaching workflow nodes only allowed for other nodes') + if obj.workflow_job_template != sub_obj.workflow_job_template: + return False + return True + + def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False): + return self.wfjt_admin(obj) and self.check_same_WFJT(obj, sub_obj) + + def can_unattach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False): + return self.wfjt_admin(obj) and self.check_same_WFJT(obj, sub_obj) class WorkflowJobNodeAccess(BaseAccess): ''' @@ -1199,7 +1267,7 @@ class WorkflowJobNodeAccess(BaseAccess): def can_delete(self, obj): return False -# TODO: +# TODO: revisit for survey logic, notification attachments? class WorkflowJobTemplateAccess(BaseAccess): ''' I can only see/manage Workflow Job Templates if I'm a super user @@ -1293,7 +1361,7 @@ class WorkflowJobTemplateAccess(BaseAccess): if ('organization' not in data or (org_pk is None and obj.organization is None) or (obj.organization and obj.organization.pk == org_pk)): - # The simple case + # No organization changes return self.user in obj.admin_role # If it already has an organization set, must be admin of the org to change it @@ -1314,11 +1382,13 @@ class WorkflowJobTemplateAccess(BaseAccess): return True - class WorkflowJobAccess(BaseAccess): ''' I can only see Workflow Jobs if I can see the associated workflow job template that it was created from. + I can delete them if I am admin of their workflow job template + I can cancel one if I can delete it + I can also cancel it if I started it ''' model = WorkflowJob @@ -1345,6 +1415,10 @@ class WorkflowJobAccess(BaseAccess): return self.user.is_superuser return self.user in obj.workflow_job_template.admin_role + def can_cancel(self, obj): + if not obj.can_cancel: + return False + return self.can_delete(obj) or self.user == obj.created_by class AdHocCommandAccess(BaseAccess): ''' diff --git a/awx/main/management/commands/run_task_system.py b/awx/main/management/commands/run_task_system.py index 855491f08c..07b3ab3df5 100644 --- a/awx/main/management/commands/run_task_system.py +++ b/awx/main/management/commands/run_task_system.py @@ -260,9 +260,7 @@ def do_spawn_workflow_jobs(): dag = WorkflowDAG(workflow_job) spawn_nodes = dag.bfs_nodes_to_run() for spawn_node in spawn_nodes: - # TODO: Inject job template template params as kwargs. - # Make sure to take into account extra_vars merge logic - kv = {} + kv = spawn_node.get_job_kwargs() job = spawn_node.unified_job_template.create_unified_job(**kv) spawn_node.job = job spawn_node.save() diff --git a/awx/main/migrations/0033_v310_add_workflows.py b/awx/main/migrations/0033_v310_add_workflows.py index 1ca0462edf..3d2e5afc77 100644 --- a/awx/main/migrations/0033_v310_add_workflows.py +++ b/awx/main/migrations/0033_v310_add_workflows.py @@ -2,6 +2,7 @@ from __future__ import unicode_literals from django.db import migrations, models +import jsonfield.fields import awx.main.models.notifications import django.db.models.deletion import awx.main.models.workflow @@ -101,4 +102,70 @@ class Migration(migrations.Migration): name='workflow_job_template_node', field=models.ManyToManyField(to='main.WorkflowJobTemplateNode', blank=True), ), + # RBAC, prompting changes + migrations.AddField( + model_name='workflowjobnode', + name='char_prompts', + field=jsonfield.fields.JSONField(default={}, blank=True), + ), + migrations.AddField( + model_name='workflowjobnode', + name='credential', + field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True), + ), + migrations.AddField( + model_name='workflowjobnode', + name='inventory', + field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True), + ), + migrations.AddField( + model_name='workflowjobtemplate', + name='execute_role', + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'), + ), + migrations.AddField( + model_name='workflowjobtemplate', + name='organization', + field=models.ForeignKey(related_name='workflows', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='main.Organization', null=True), + ), + migrations.AddField( + model_name='workflowjobtemplate', + name='read_role', + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'organization.auditor_role', b'execute_role', b'admin_role'], to='main.Role', null=b'True'), + ), + migrations.AddField( + model_name='workflowjobtemplatenode', + name='char_prompts', + field=jsonfield.fields.JSONField(default={}, blank=True), + ), + migrations.AddField( + model_name='workflowjobtemplatenode', + name='credential', + field=models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True), + ), + migrations.AddField( + model_name='workflowjobtemplatenode', + name='inventory', + field=models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True), + ), + migrations.AlterField( + model_name='workflowjobnode', + name='unified_job_template', + field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, to='main.UnifiedJobTemplate', null=True), + ), + migrations.AlterField( + model_name='workflowjobnode', + name='workflow_job', + field=models.ForeignKey(related_name='workflow_job_nodes', default=None, blank=True, to='main.WorkflowJob', null=True), + ), + migrations.AlterField( + model_name='workflowjobtemplate', + name='admin_role', + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator', b'organization.admin_role'], to='main.Role', null=b'True'), + ), + migrations.AlterField( + model_name='workflowjobtemplatenode', + name='unified_job_template', + field=models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, to='main.UnifiedJobTemplate', null=True), + ), ] diff --git a/awx/main/migrations/0036_v310_workflow_rbac_prompts.py b/awx/main/migrations/0036_v310_workflow_rbac_prompts.py deleted file mode 100644 index d126ac9c1d..0000000000 --- a/awx/main/migrations/0036_v310_workflow_rbac_prompts.py +++ /dev/null @@ -1,82 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals - -from django.db import migrations, models -import jsonfield.fields -import django.db.models.deletion -import awx.main.fields - - -class Migration(migrations.Migration): - - dependencies = [ - ('main', '0035_v310_jobevent_uuid'), - ] - - operations = [ - migrations.AddField( - model_name='workflowjobnode', - name='char_prompts', - field=jsonfield.fields.JSONField(default={}, blank=True), - ), - migrations.AddField( - model_name='workflowjobnode', - name='credential', - field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True), - ), - migrations.AddField( - model_name='workflowjobnode', - name='inventory', - field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True), - ), - migrations.AddField( - model_name='workflowjobtemplate', - name='execute_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'), - ), - migrations.AddField( - model_name='workflowjobtemplate', - name='organization', - field=models.ForeignKey(related_name='workflows', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='main.Organization', null=True), - ), - migrations.AddField( - model_name='workflowjobtemplate', - name='read_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'organization.auditor_role', b'execute_role', b'admin_role'], to='main.Role', null=b'True'), - ), - migrations.AddField( - model_name='workflowjobtemplatenode', - name='char_prompts', - field=jsonfield.fields.JSONField(default={}, blank=True), - ), - migrations.AddField( - model_name='workflowjobtemplatenode', - name='credential', - field=models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True), - ), - migrations.AddField( - model_name='workflowjobtemplatenode', - name='inventory', - field=models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True), - ), - migrations.AlterField( - model_name='workflowjobnode', - name='unified_job_template', - field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, to='main.UnifiedJobTemplate', null=True), - ), - migrations.AlterField( - model_name='workflowjobnode', - name='workflow_job', - field=models.ForeignKey(related_name='workflow_job_nodes', default=None, blank=True, to='main.WorkflowJob', null=True), - ), - migrations.AlterField( - model_name='workflowjobtemplate', - name='admin_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator', b'organization.admin_role'], to='main.Role', null=b'True'), - ), - migrations.AlterField( - model_name='workflowjobtemplatenode', - name='unified_job_template', - field=models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, to='main.UnifiedJobTemplate', null=True), - ), - ] diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index 2831244b2a..683ef741a5 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -22,6 +22,9 @@ from awx.main.models.rbac import ( from awx.main.fields import ImplicitRoleField from awx.main.models.mixins import ResourceMixin +import yaml +import json + __all__ = ['WorkflowJobTemplate', 'WorkflowJob', 'WorkflowJobOptions', 'WorkflowJobNode', 'WorkflowJobTemplateNode',] CHAR_PROMPTS_LIST = ['job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags'] @@ -150,6 +153,33 @@ class WorkflowJobNode(WorkflowNodeBase): def get_absolute_url(self): return reverse('api:workflow_job_node_detail', args=(self.pk,)) + def get_job_kwargs(self): + data = {} + # rejecting/accepting prompting variables done with the node copy + if self.inventory: + data['inventory'] = self.inventory + if self.credential: + data['credential'] = self.credential + if self.char_prompts: + data.update(self.char_prompts) + # process extra_vars + extra_vars = {} + if self.workflow_job and self.workflow_job.extra_vars: + try: + WJ_json_extra_vars = json.loads( + (self.workflow_job.extra_vars or '').strip() or '{}') + except ValueError: + try: + WJ_json_extra_vars = yaml.safe_load(self.workflow_job.extra_vars) + except yaml.YAMLError: + WJ_json_extra_vars = {} + extra_vars.update(WJ_json_extra_vars) + # TODO: merge artifacts, add ancestor_artifacts to kwargs + if extra_vars: + data['extra_vars'] = extra_vars + print ' job KV data: ' + str(data) + return data + class WorkflowJobOptions(BaseModel): class Meta: abstract = True @@ -164,9 +194,6 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, ResourceMixin) class Meta: app_label = 'main' - # admin_role = ImplicitRoleField( - # parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, - # ) organization = models.ForeignKey( 'Organization', blank=True, diff --git a/awx/main/tests/factories/fixtures.py b/awx/main/tests/factories/fixtures.py index e52b627076..ac6c93348d 100644 --- a/awx/main/tests/factories/fixtures.py +++ b/awx/main/tests/factories/fixtures.py @@ -167,11 +167,11 @@ def mk_workflow_job(status='new', workflow_job_template=None, extra_vars={}, job.save() return job -def mk_workflow_job_template(name, extra_vars='', spec=None, persisted=True): +def mk_workflow_job_template(name, extra_vars='', spec=None, organization=None, persisted=True): if extra_vars: extra_vars = json.dumps(extra_vars) - wfjt = WorkflowJobTemplate(name=name, extra_vars=extra_vars) + wfjt = WorkflowJobTemplate(name=name, extra_vars=extra_vars, organization=organization) wfjt.survey_spec = spec if wfjt.survey_spec is not None: diff --git a/awx/main/tests/factories/tower.py b/awx/main/tests/factories/tower.py index 5c99c14828..5a23a01577 100644 --- a/awx/main/tests/factories/tower.py +++ b/awx/main/tests/factories/tower.py @@ -360,16 +360,20 @@ def generate_workflow_job_template_nodes(workflow_job_template, new_node = WorkflowJobTemplateNode(workflow_job_template=workflow_job_template, unified_job_template=node['unified_job_template'], id=i) + if persisted: + new_node.save() new_nodes.append(new_node) node_types = ['success_nodes', 'failure_nodes', 'always_nodes'] for node_type in node_types: for i, new_node in enumerate(new_nodes): + if node_type not in workflow_job_template_nodes[i]: + continue for related_index in workflow_job_template_nodes[i][node_type]: getattr(new_node, node_type).add(new_nodes[related_index]) # TODO: Implement survey and jobs -def create_workflow_job_template(name, persisted=True, **kwargs): +def create_workflow_job_template(name, organization=None, persisted=True, **kwargs): Objects = generate_objects(["workflow_job_template", "workflow_job_template_nodes", "survey",], kwargs) @@ -382,7 +386,8 @@ def create_workflow_job_template(name, persisted=True, **kwargs): if 'survey' in kwargs: spec = create_survey_spec(kwargs['survey']) - wfjt = mk_workflow_job_template(name, + wfjt = mk_workflow_job_template(name, + organization=organization, spec=spec, extra_vars=extra_vars, persisted=persisted) diff --git a/awx/main/tests/functional/test_rbac_workflow.py b/awx/main/tests/functional/test_rbac_workflow.py new file mode 100644 index 0000000000..663af2639a --- /dev/null +++ b/awx/main/tests/functional/test_rbac_workflow.py @@ -0,0 +1,73 @@ +import pytest + +from awx.main.access import ( + WorkflowJobTemplateAccess, + WorkflowJobTemplateNodeAccess, + WorkflowJobAccess, + # WorkflowJobNodeAccess +) + +@pytest.fixture +def wfjt(workflow_job_template_factory, organization): + objects = workflow_job_template_factory('test_workflow', organization=organization, persisted=True) + return objects.workflow_job_template + +@pytest.fixture +def wfjt_with_nodes(workflow_job_template_factory, organization, job_template): + objects = workflow_job_template_factory( + 'test_workflow', organization=organization, workflow_job_template_nodes=[{'unified_job_template': job_template}], persisted=True) + return objects.workflow_job_template + +@pytest.fixture +def wfjt_node(wfjt_with_nodes): + return wfjt_with_nodes.workflow_job_template_nodes.all()[0] + +@pytest.fixture +def workflow_job(wfjt): + return wfjt.jobs.create(name='test_workflow') + + +@pytest.mark.django_db +class TestWorkflowJobTemplateAccess: + + def test_random_user_no_edit(self, wfjt, rando): + access = WorkflowJobTemplateAccess(rando) + assert not access.can_change(wfjt, {'name': 'new name'}) + + def test_org_admin_edit(self, wfjt, org_admin): + access = WorkflowJobTemplateAccess(org_admin) + assert access.can_change(wfjt, {'name': 'new name'}) + + def test_org_admin_role_inheritance(self, wfjt, org_admin): + assert org_admin in wfjt.admin_role + assert org_admin in wfjt.execute_role + assert org_admin in wfjt.read_role + + def test_jt_blocks_copy(self, wfjt_with_nodes, org_admin): + """I want to copy a workflow JT in my organization, but someone + included a job template that I don't have access to, so I can + not copy the WFJT as-is""" + access = WorkflowJobTemplateAccess(org_admin) + assert not access.can_add({'reference_obj': wfjt_with_nodes}) + +@pytest.mark.django_db +class TestWorkflowJobTemplateNodeAccess: + + def test_jt_access_to_edit(self, wfjt_node, org_admin): + access = WorkflowJobTemplateNodeAccess(org_admin) + assert not access.can_change(wfjt_node, {'job_type': 'scan'}) + +@pytest.mark.django_db +class TestWorkflowJobAccess: + + def test_wfjt_admin_delete(self, wfjt, workflow_job, rando): + wfjt.admin_role.members.add(rando) + access = WorkflowJobAccess(rando) + assert access.can_delete(workflow_job) + + def test_cancel_your_own_job(self, wfjt, workflow_job, rando): + wfjt.execute_role.members.add(rando) + workflow_job.created_by = rando + workflow_job.save() + access = WorkflowJobAccess(rando) + assert access.can_cancel(workflow_job) diff --git a/awx/main/tests/unit/test_access.py b/awx/main/tests/unit/test_access.py index 9e28b00480..b400a09596 100644 --- a/awx/main/tests/unit/test_access.py +++ b/awx/main/tests/unit/test_access.py @@ -118,11 +118,19 @@ class TestWorkflowAccessMethods: objects = workflow_job_template_factory('test_workflow', persisted=False) return objects.workflow_job_template - class MockQuerySet(object): - pass - def test_workflow_can_add(self, workflow, user_unit): - # user_unit.admin_of_organizations = self.MockQuerySet() - access = WorkflowJobTemplateAccess(user_unit) - assert access.can_add({'organization': 1}) + organization = Organization(name='test-org') + workflow.organization = organization + organization.admin_role = Role() + + def mock_get_object(Class, **kwargs): + if Class == Organization: + return organization + else: + raise Exception('Item requested has not been mocked') + + access = WorkflowJobTemplateAccess(user_unit) + with mock.patch('awx.main.models.rbac.Role.__contains__', return_value=True): + with mock.patch('awx.main.access.get_object_or_400', mock_get_object): + assert access.can_add({'organization': 1}) From dd477d8de8e6bfa9506f6525f8b5fda7b24aa232 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Mon, 26 Sep 2016 13:26:43 -0400 Subject: [PATCH 03/24] workflow user_capabilities added --- awx/api/serializers.py | 2 ++ awx/main/access.py | 4 ++++ 2 files changed, 6 insertions(+) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index fa442192c2..76c7bc6712 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2184,6 +2184,8 @@ class SystemJobCancelSerializer(SystemJobSerializer): fields = ('can_cancel',) class WorkflowJobTemplateSerializer(UnifiedJobTemplateSerializer): + show_capabilities = ['start', 'edit', 'delete'] + class Meta: model = WorkflowJobTemplate fields = ('*', 'extra_vars', 'organization') diff --git a/awx/main/access.py b/awx/main/access.py index fae143a6de..3b8b63d4e6 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1524,6 +1524,10 @@ class WorkflowJobAccess(BaseAccess): return self.user.is_superuser return self.user in obj.workflow_job_template.admin_role + # TODO: add support for relaunching workflow jobs + def can_start(self, obj): + return False + def can_cancel(self, obj): if not obj.can_cancel: return False From ca4f2eb005a488c4132f9960bbcd81d7d8f970e8 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Mon, 26 Sep 2016 16:53:46 -0400 Subject: [PATCH 04/24] fix bug where not all char_prompts were coppied to workflow job --- awx/main/models/workflow.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index b27f7cd36b..cc89046815 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -278,18 +278,18 @@ class WorkflowJobInheritNodesMixin(object): unified_job_template=old_node.unified_job_template, ) ujt_obj = old_node.unified_job_template - if ujt_obj: + if ujt_obj and hasattr(ujt_obj, '_ask_for_vars_dict'): ask_for_vars_dict = ujt_obj._ask_for_vars_dict() if ask_for_vars_dict['inventory'] and old_node.inventory: kwargs['inventory'] = old_node.inventory if ask_for_vars_dict['credential'] and old_node.credential: kwargs['credential'] = old_node.credential + new_char_prompts = {} for fd in CHAR_PROMPTS_LIST: - new_char_prompts = {} if ask_for_vars_dict[fd] and old_node.char_prompts.get(fd, None): new_char_prompts[fd] = old_node.char_prompts[fd] - if new_char_prompts: - kwargs['char_prompts'] = new_char_prompts + if new_char_prompts: + kwargs['char_prompts'] = new_char_prompts new_node_list.append(WorkflowJobNode.objects.create(**kwargs)) return new_node_list From 1604e2fd025a80a62076fab28c77539342722de2 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Mon, 26 Sep 2016 16:57:23 -0400 Subject: [PATCH 05/24] Workflow RBAC and prompting migration moved back to its own migration file --- .../migrations/0033_v310_add_workflows.py | 67 --------------- .../0036_v310_workflow_rbac_prompts.py | 82 +++++++++++++++++++ 2 files changed, 82 insertions(+), 67 deletions(-) create mode 100644 awx/main/migrations/0036_v310_workflow_rbac_prompts.py diff --git a/awx/main/migrations/0033_v310_add_workflows.py b/awx/main/migrations/0033_v310_add_workflows.py index 3d2e5afc77..1ca0462edf 100644 --- a/awx/main/migrations/0033_v310_add_workflows.py +++ b/awx/main/migrations/0033_v310_add_workflows.py @@ -2,7 +2,6 @@ from __future__ import unicode_literals from django.db import migrations, models -import jsonfield.fields import awx.main.models.notifications import django.db.models.deletion import awx.main.models.workflow @@ -102,70 +101,4 @@ class Migration(migrations.Migration): name='workflow_job_template_node', field=models.ManyToManyField(to='main.WorkflowJobTemplateNode', blank=True), ), - # RBAC, prompting changes - migrations.AddField( - model_name='workflowjobnode', - name='char_prompts', - field=jsonfield.fields.JSONField(default={}, blank=True), - ), - migrations.AddField( - model_name='workflowjobnode', - name='credential', - field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True), - ), - migrations.AddField( - model_name='workflowjobnode', - name='inventory', - field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True), - ), - migrations.AddField( - model_name='workflowjobtemplate', - name='execute_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'), - ), - migrations.AddField( - model_name='workflowjobtemplate', - name='organization', - field=models.ForeignKey(related_name='workflows', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='main.Organization', null=True), - ), - migrations.AddField( - model_name='workflowjobtemplate', - name='read_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'organization.auditor_role', b'execute_role', b'admin_role'], to='main.Role', null=b'True'), - ), - migrations.AddField( - model_name='workflowjobtemplatenode', - name='char_prompts', - field=jsonfield.fields.JSONField(default={}, blank=True), - ), - migrations.AddField( - model_name='workflowjobtemplatenode', - name='credential', - field=models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True), - ), - migrations.AddField( - model_name='workflowjobtemplatenode', - name='inventory', - field=models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True), - ), - migrations.AlterField( - model_name='workflowjobnode', - name='unified_job_template', - field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, to='main.UnifiedJobTemplate', null=True), - ), - migrations.AlterField( - model_name='workflowjobnode', - name='workflow_job', - field=models.ForeignKey(related_name='workflow_job_nodes', default=None, blank=True, to='main.WorkflowJob', null=True), - ), - migrations.AlterField( - model_name='workflowjobtemplate', - name='admin_role', - field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator', b'organization.admin_role'], to='main.Role', null=b'True'), - ), - migrations.AlterField( - model_name='workflowjobtemplatenode', - name='unified_job_template', - field=models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, to='main.UnifiedJobTemplate', null=True), - ), ] diff --git a/awx/main/migrations/0036_v310_workflow_rbac_prompts.py b/awx/main/migrations/0036_v310_workflow_rbac_prompts.py new file mode 100644 index 0000000000..d126ac9c1d --- /dev/null +++ b/awx/main/migrations/0036_v310_workflow_rbac_prompts.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations, models +import jsonfield.fields +import django.db.models.deletion +import awx.main.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0035_v310_jobevent_uuid'), + ] + + operations = [ + migrations.AddField( + model_name='workflowjobnode', + name='char_prompts', + field=jsonfield.fields.JSONField(default={}, blank=True), + ), + migrations.AddField( + model_name='workflowjobnode', + name='credential', + field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True), + ), + migrations.AddField( + model_name='workflowjobnode', + name='inventory', + field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True), + ), + migrations.AddField( + model_name='workflowjobtemplate', + name='execute_role', + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'), + ), + migrations.AddField( + model_name='workflowjobtemplate', + name='organization', + field=models.ForeignKey(related_name='workflows', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='main.Organization', null=True), + ), + migrations.AddField( + model_name='workflowjobtemplate', + name='read_role', + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'organization.auditor_role', b'execute_role', b'admin_role'], to='main.Role', null=b'True'), + ), + migrations.AddField( + model_name='workflowjobtemplatenode', + name='char_prompts', + field=jsonfield.fields.JSONField(default={}, blank=True), + ), + migrations.AddField( + model_name='workflowjobtemplatenode', + name='credential', + field=models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True), + ), + migrations.AddField( + model_name='workflowjobtemplatenode', + name='inventory', + field=models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Inventory', null=True), + ), + migrations.AlterField( + model_name='workflowjobnode', + name='unified_job_template', + field=models.ForeignKey(related_name='workflowjobnodes', on_delete=django.db.models.deletion.SET_NULL, default=None, to='main.UnifiedJobTemplate', null=True), + ), + migrations.AlterField( + model_name='workflowjobnode', + name='workflow_job', + field=models.ForeignKey(related_name='workflow_job_nodes', default=None, blank=True, to='main.WorkflowJob', null=True), + ), + migrations.AlterField( + model_name='workflowjobtemplate', + name='admin_role', + field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator', b'organization.admin_role'], to='main.Role', null=b'True'), + ), + migrations.AlterField( + model_name='workflowjobtemplatenode', + name='unified_job_template', + field=models.ForeignKey(related_name='workflowjobtemplatenodes', on_delete=django.db.models.deletion.SET_NULL, default=None, to='main.UnifiedJobTemplate', null=True), + ), + ] From 549273e90f434e3397938b25549238472cf8442e Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Tue, 27 Sep 2016 09:17:51 -0400 Subject: [PATCH 06/24] unit test for node prompt values --- awx/api/serializers.py | 2 +- awx/main/models/workflow.py | 4 +- .../tests/unit/models/test_workflow_unit.py | 41 ++++++++++++++++++- 3 files changed, 43 insertions(+), 4 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 76c7bc6712..3f07657f84 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2290,7 +2290,7 @@ class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer): def extract_char_prompts(self, data): char_prompts = {} - for fd in ['job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags']: + for fd in ['job_type', 'job_tags', 'skip_tags', 'limit']: if data.get(fd, None): char_prompts[fd] = data[fd] return char_prompts diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index cc89046815..240e5f2841 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -27,7 +27,7 @@ import json __all__ = ['WorkflowJobTemplate', 'WorkflowJob', 'WorkflowJobOptions', 'WorkflowJobNode', 'WorkflowJobTemplateNode',] -CHAR_PROMPTS_LIST = ['job_type', 'job_tags', 'skip_tags', 'limit', 'skip_tags'] +CHAR_PROMPTS_LIST = ['job_type', 'job_tags', 'skip_tags', 'limit'] class WorkflowNodeBase(CreatedModifiedModel): class Meta: @@ -173,7 +173,7 @@ class WorkflowJobNode(WorkflowNodeBase): WJ_json_extra_vars = yaml.safe_load(self.workflow_job.extra_vars) except yaml.YAMLError: WJ_json_extra_vars = {} - extra_vars.update(WJ_json_extra_vars) + extra_vars.update(WJ_json_extra_vars) # TODO: merge artifacts, add ancestor_artifacts to kwargs if extra_vars: data['extra_vars'] = extra_vars diff --git a/awx/main/tests/unit/models/test_workflow_unit.py b/awx/main/tests/unit/models/test_workflow_unit.py index 58ea591299..e9b5fab280 100644 --- a/awx/main/tests/unit/models/test_workflow_unit.py +++ b/awx/main/tests/unit/models/test_workflow_unit.py @@ -1,7 +1,12 @@ import pytest from awx.main.models.jobs import JobTemplate -from awx.main.models.workflow import WorkflowJobTemplateNode, WorkflowJobInheritNodesMixin, WorkflowJobNode +from awx.main.models.inventory import Inventory +from awx.main.models.credential import Credential +from awx.main.models.workflow import ( + WorkflowJobTemplateNode, WorkflowJobInheritNodesMixin, + WorkflowJob, WorkflowJobNode +) class TestWorkflowJobInheritNodesMixin(): class TestCreateWorkflowJobNodes(): @@ -79,3 +84,37 @@ class TestWorkflowJobInheritNodesMixin(): job_nodes[i].success_nodes.add.assert_any_call(job_nodes[i + 1]) +class TestWorkflowJobHelperMethods: + + @pytest.fixture + def workflow_job_unit(self): + return WorkflowJob(name='workflow', status='new') + + @pytest.fixture + def workflow_job_node_unit(self, workflow_job_unit, job_template_factory): + # note: factory sets ask_inventory_on_launch to true when not provided + jt = job_template_factory(name='example-jt', persisted=False).job_template + return WorkflowJobNode(workflow_job=workflow_job_unit, unified_job_template=jt) + + def test_null_kwargs(self, workflow_job_node_unit): + assert workflow_job_node_unit.get_job_kwargs() == {} + + def test_inherit_workflow_job_extra_vars(self, workflow_job_node_unit): + workflow_job = workflow_job_node_unit.workflow_job + workflow_job.extra_vars = '{"a": 84}' + assert workflow_job_node_unit.get_job_kwargs() == {'extra_vars': {'a': 84}} + + def test_char_prompts_and_res_node_prompts(self, workflow_job_node_unit): + barnyard_kwargs = dict( + job_type='scan', + job_tags='quack', + limit='duck', + skip_tags='oink' + ) + workflow_job_node_unit.char_prompts = barnyard_kwargs + inv = Inventory(name='example-inv') + cred = Credential(name='example-inv', kind='ssh', username='asdf', password='asdf') + workflow_job_node_unit.inventory = inv + workflow_job_node_unit.credential = cred + assert workflow_job_node_unit.get_job_kwargs() == dict( + inventory=inv, credential=cred, **barnyard_kwargs) From fdca3b41adff835a72cf9761615cb37e217a7196 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Tue, 27 Sep 2016 11:47:07 -0400 Subject: [PATCH 07/24] copy WFJT node prompted fields to WJ nodes, reject or accept on unified job creation --- awx/main/models/workflow.py | 53 +++----- .../tests/unit/models/test_workflow_unit.py | 122 +++++++++++++----- 2 files changed, 110 insertions(+), 65 deletions(-) diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index 240e5f2841..4306878c93 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -98,9 +98,11 @@ class WorkflowNodeBase(CreatedModifiedModel): if ujt_obj is None: return {} prompts_dict = self.prompts_dict() - from awx.main.models import JobTemplate - if not isinstance(ujt_obj, JobTemplate): - return {'ignored': {'all': 'Can not use prompts on unified_job_template that is not type of job template'}} + if not hasattr(ujt_obj, '_ask_for_vars_dict'): + if prompts_dict: + return {'ignored': {'all': 'Can not use prompts on unified_job_template that is not type of job template'}} + else: + return {} ask_for_vars_dict = ujt_obj._ask_for_vars_dict() ignored_dict = {} missing_dict = {} @@ -113,9 +115,9 @@ class WorkflowNodeBase(CreatedModifiedModel): missing_dict[fd] = 'Job Template does not have this field and workflow node does not provide it' data = {} if ignored_dict: - data.update(ignored_dict) + data['ignored'] = ignored_dict if missing_dict: - data.update(missing_dict) + data['missing'] = missing_dict return data class WorkflowJobTemplateNode(WorkflowNodeBase): @@ -154,14 +156,15 @@ class WorkflowJobNode(WorkflowNodeBase): return reverse('api:workflow_job_node_detail', args=(self.pk,)) def get_job_kwargs(self): + # reject/accept prompted fields data = {} - # rejecting/accepting prompting variables done with the node copy - if self.inventory: - data['inventory'] = self.inventory - if self.credential: - data['credential'] = self.credential - if self.char_prompts: - data.update(self.char_prompts) + ujt_obj = self.unified_job_template + if ujt_obj and hasattr(ujt_obj, '_ask_for_vars_dict'): + ask_for_vars_dict = ujt_obj._ask_for_vars_dict() + prompts_dict = self.prompts_dict() + for fd in prompts_dict: + if ask_for_vars_dict.get(fd, False): + data[fd] = prompts_dict[fd] # process extra_vars extra_vars = {} if self.workflow_job and self.workflow_job.extra_vars: @@ -271,27 +274,11 @@ class WorkflowJobInheritNodesMixin(object): Create a WorkflowJobNode for each WorkflowJobTemplateNode ''' def _create_workflow_job_nodes(self, old_nodes): - new_node_list = [] - for old_node in old_nodes: - kwargs = dict( - workflow_job=self, - unified_job_template=old_node.unified_job_template, - ) - ujt_obj = old_node.unified_job_template - if ujt_obj and hasattr(ujt_obj, '_ask_for_vars_dict'): - ask_for_vars_dict = ujt_obj._ask_for_vars_dict() - if ask_for_vars_dict['inventory'] and old_node.inventory: - kwargs['inventory'] = old_node.inventory - if ask_for_vars_dict['credential'] and old_node.credential: - kwargs['credential'] = old_node.credential - new_char_prompts = {} - for fd in CHAR_PROMPTS_LIST: - if ask_for_vars_dict[fd] and old_node.char_prompts.get(fd, None): - new_char_prompts[fd] = old_node.char_prompts[fd] - if new_char_prompts: - kwargs['char_prompts'] = new_char_prompts - new_node_list.append(WorkflowJobNode.objects.create(**kwargs)) - return new_node_list + return [WorkflowJobNode.objects.create( + workflow_job=self, unified_job_template=old_node.unified_job_template, + inventory=old_node.inventory, credential=old_node.credential, + char_prompts=old_node.char_prompts + ) for old_node in old_nodes] def _map_workflow_job_nodes(self, old_nodes, new_nodes): node_ids_map = {} diff --git a/awx/main/tests/unit/models/test_workflow_unit.py b/awx/main/tests/unit/models/test_workflow_unit.py index e9b5fab280..e0a627d2a7 100644 --- a/awx/main/tests/unit/models/test_workflow_unit.py +++ b/awx/main/tests/unit/models/test_workflow_unit.py @@ -1,8 +1,7 @@ import pytest from awx.main.models.jobs import JobTemplate -from awx.main.models.inventory import Inventory -from awx.main.models.credential import Credential +from awx.main.models import Inventory, Credential, Project from awx.main.models.workflow import ( WorkflowJobTemplateNode, WorkflowJobInheritNodesMixin, WorkflowJob, WorkflowJobNode @@ -25,8 +24,9 @@ class TestWorkflowJobInheritNodesMixin(): mixin._create_workflow_job_nodes(job_template_nodes) for job_template_node in job_template_nodes: - workflow_job_node_create.assert_any_call(workflow_job=mixin, - unified_job_template=job_template_node.unified_job_template) + workflow_job_node_create.assert_any_call( + workflow_job=mixin, unified_job_template=job_template_node.unified_job_template, + credential=None, inventory=None, char_prompts={}) class TestMapWorkflowJobNodes(): @pytest.fixture @@ -84,37 +84,95 @@ class TestWorkflowJobInheritNodesMixin(): job_nodes[i].success_nodes.add.assert_any_call(job_nodes[i + 1]) -class TestWorkflowJobHelperMethods: +@pytest.fixture +def workflow_job_unit(): + return WorkflowJob(name='workflow', status='new') - @pytest.fixture - def workflow_job_unit(self): - return WorkflowJob(name='workflow', status='new') +@pytest.fixture +def node_no_prompts(workflow_job_unit, job_template_factory): + # note: factory sets ask_xxxx_on_launch to true for inventory & credential + jt = job_template_factory(name='example-jt', persisted=False).job_template + jt.ask_job_type_on_launch = True + jt.ask_skip_tags_on_launch = True + jt.ask_limit_on_launch = True + jt.ask_tags_on_launch = True + return WorkflowJobNode(workflow_job=workflow_job_unit, unified_job_template=jt) - @pytest.fixture - def workflow_job_node_unit(self, workflow_job_unit, job_template_factory): - # note: factory sets ask_inventory_on_launch to true when not provided - jt = job_template_factory(name='example-jt', persisted=False).job_template - return WorkflowJobNode(workflow_job=workflow_job_unit, unified_job_template=jt) +@pytest.fixture +def project_unit(): + return Project(name='example-proj') - def test_null_kwargs(self, workflow_job_node_unit): - assert workflow_job_node_unit.get_job_kwargs() == {} +example_prompts = dict(job_type='scan', job_tags='quack', limit='duck', skip_tags='oink') - def test_inherit_workflow_job_extra_vars(self, workflow_job_node_unit): - workflow_job = workflow_job_node_unit.workflow_job +@pytest.fixture +def node_with_prompts(node_no_prompts): + node_no_prompts.char_prompts = example_prompts + inv = Inventory(name='example-inv') + cred = Credential(name='example-inv', kind='ssh', username='asdf', password='asdf') + node_no_prompts.inventory = inv + node_no_prompts.credential = cred + return node_no_prompts + +class TestWorkflowJobNodeJobKWARGS: + """ + Tests for building the keyword arguments that go into creating and + launching a new job that corresponds to a workflow node. + """ + + def test_null_kwargs(self, node_no_prompts): + assert node_no_prompts.get_job_kwargs() == {} + + def test_inherit_workflow_job_extra_vars(self, node_no_prompts): + workflow_job = node_no_prompts.workflow_job workflow_job.extra_vars = '{"a": 84}' - assert workflow_job_node_unit.get_job_kwargs() == {'extra_vars': {'a': 84}} + assert node_no_prompts.get_job_kwargs() == {'extra_vars': {'a': 84}} - def test_char_prompts_and_res_node_prompts(self, workflow_job_node_unit): - barnyard_kwargs = dict( - job_type='scan', - job_tags='quack', - limit='duck', - skip_tags='oink' - ) - workflow_job_node_unit.char_prompts = barnyard_kwargs - inv = Inventory(name='example-inv') - cred = Credential(name='example-inv', kind='ssh', username='asdf', password='asdf') - workflow_job_node_unit.inventory = inv - workflow_job_node_unit.credential = cred - assert workflow_job_node_unit.get_job_kwargs() == dict( - inventory=inv, credential=cred, **barnyard_kwargs) + def test_char_prompts_and_res_node_prompts(self, node_with_prompts): + assert node_with_prompts.get_job_kwargs() == dict( + inventory=node_with_prompts.inventory, + credential=node_with_prompts.credential, + **example_prompts) + + def test_reject_some_node_prompts(self, node_with_prompts): + node_with_prompts.unified_job_template.ask_inventory_on_launch = False + node_with_prompts.unified_job_template.ask_job_type_on_launch = False + expect_kwargs = dict(inventory=node_with_prompts.inventory, + credential=node_with_prompts.credential, + **example_prompts) + expect_kwargs.pop('inventory') + expect_kwargs.pop('job_type') + assert node_with_prompts.get_job_kwargs() == expect_kwargs + + def test_no_accepted_project_node_prompts(self, node_with_prompts, project_unit): + node_with_prompts.unified_job_template = project_unit + assert node_with_prompts.get_job_kwargs() == {} + + +class TestWorkflowWarnings: + """ + Tests of warnings that show user errors in the construction of a workflow + """ + + def test_warn_project_node_no_prompts(self, node_no_prompts, project_unit): + node_no_prompts.unified_job_template = project_unit + assert node_no_prompts.get_prompts_warnings() == {} + + def test_warn_project_node_reject_all_prompts(self, node_with_prompts, project_unit): + node_with_prompts.unified_job_template = project_unit + assert 'ignored' in node_with_prompts.get_prompts_warnings() + assert 'all' in node_with_prompts.get_prompts_warnings()['ignored'] + + def test_warn_reject_some_prompts(self, node_with_prompts): + node_with_prompts.unified_job_template.ask_credential_on_launch = False + node_with_prompts.unified_job_template.ask_job_type_on_launch = False + assert 'ignored' in node_with_prompts.get_prompts_warnings() + assert 'job_type' in node_with_prompts.get_prompts_warnings()['ignored'] + assert 'credential' in node_with_prompts.get_prompts_warnings()['ignored'] + assert len(node_with_prompts.get_prompts_warnings()['ignored']) == 2 + + def test_warn_missing_fields(self, node_no_prompts): + node_no_prompts.inventory = None + assert 'missing' in node_no_prompts.get_prompts_warnings() + assert 'inventory' in node_no_prompts.get_prompts_warnings()['missing'] + assert 'credential' in node_no_prompts.get_prompts_warnings()['missing'] + assert len(node_no_prompts.get_prompts_warnings()['missing']) == 2 From a452acf21460a6fdf474554a5f9a86ec941f42a2 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Tue, 27 Sep 2016 17:24:09 -0400 Subject: [PATCH 08/24] include scan job_type errors in criteria for rejecting workflow node prompts --- awx/api/serializers.py | 8 +---- awx/main/models/jobs.py | 24 +++++++++++---- awx/main/models/workflow.py | 30 +++++++++++-------- .../tests/unit/models/test_workflow_unit.py | 24 +++++++++++---- 4 files changed, 54 insertions(+), 32 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 3f07657f84..8d7a2caf20 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2557,13 +2557,7 @@ class JobLaunchSerializer(BaseSerializer): errors['variables_needed_to_start'] = validation_errors # Special prohibited cases for scan jobs - if 'job_type' in data and obj.ask_job_type_on_launch: - if ((obj.job_type == PERM_INVENTORY_SCAN and not data['job_type'] == PERM_INVENTORY_SCAN) or - (data['job_type'] == PERM_INVENTORY_SCAN and not obj.job_type == PERM_INVENTORY_SCAN)): - errors['job_type'] = 'Can not override job_type to or from a scan job.' - if (obj.job_type == PERM_INVENTORY_SCAN and ('inventory' in data) and obj.ask_inventory_on_launch and - obj.inventory != data['inventory']): - errors['inventory'] = 'Inventory can not be changed at runtime for scan jobs.' + errors.update(obj._extra_job_type_errors(data)) if errors: raise serializers.ValidationError(errors) diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 34adcb73a4..9fe244f8a6 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -34,6 +34,7 @@ from awx.main.redact import PlainTextCleaner from awx.main.conf import tower_settings from awx.main.fields import ImplicitRoleField from awx.main.models.mixins import ResourceMixin +from awx.main.models.base import PERM_INVENTORY_SCAN logger = logging.getLogger('awx.main.models.jobs') @@ -272,7 +273,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin): validation_errors['credential'] = ["Job Template must provide 'credential' or allow prompting for it.",] # Job type dependent checks - if self.job_type == 'scan': + if self.job_type == PERM_INVENTORY_SCAN: if self.inventory is None or self.ask_inventory_on_launch: validation_errors['inventory'] = ["Scan jobs must be assigned a fixed inventory.",] elif self.project is None: @@ -474,13 +475,24 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin): else: ignored_fields[field] = kwargs[field] - # Special case to ignore inventory if it is a scan job - if prompted_fields.get('job_type', None) == 'scan' or self.job_type == 'scan': - if 'inventory' in prompted_fields: - ignored_fields['inventory'] = prompted_fields.pop('inventory') - return prompted_fields, ignored_fields + def _extra_job_type_errors(self, data): + """ + Used to enforce 2 special cases around scan jobs and prompting + - the inventory can not be changed on a scan job template + - scan jobs can not be switched to run/check type and vice versa + """ + errors = {} + if 'job_type' in data and self.ask_job_type_on_launch: + if ((self.job_type == PERM_INVENTORY_SCAN and not data['job_type'] == PERM_INVENTORY_SCAN) or + (data['job_type'] == PERM_INVENTORY_SCAN and not self.job_type == PERM_INVENTORY_SCAN)): + errors['job_type'] = 'Can not override job_type to or from a scan job.' + if (self.job_type == PERM_INVENTORY_SCAN and ('inventory' in data) and self.ask_inventory_on_launch and + self.inventory != data['inventory']): + errors['inventory'] = 'Inventory can not be changed at runtime for scan jobs.' + return errors + @property def cache_timeout_blocked(self): if Job.objects.filter(job_template=self, status__in=['pending', 'waiting', 'running']).count() > getattr(tower_settings, 'SCHEDULE_MAX_JOBS', 10): diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index 4306878c93..d300347d82 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -85,9 +85,9 @@ class WorkflowNodeBase(CreatedModifiedModel): def prompts_dict(self): data = {} if self.inventory: - data['inventory'] = self.inventory + data['inventory'] = self.inventory.pk if self.credential: - data['credential'] = self.credential + data['credential'] = self.credential.pk for fd in CHAR_PROMPTS_LIST: if fd in self.char_prompts: data[fd] = self.char_prompts[fd] @@ -103,16 +103,20 @@ class WorkflowNodeBase(CreatedModifiedModel): return {'ignored': {'all': 'Can not use prompts on unified_job_template that is not type of job template'}} else: return {} + + accepted_fields, ignored_fields = ujt_obj._accept_or_ignore_job_kwargs(**prompts_dict) ask_for_vars_dict = ujt_obj._ask_for_vars_dict() + ignored_dict = {} missing_dict = {} - for fd in prompts_dict: - if not ask_for_vars_dict[fd]: - ignored_dict[fd] = 'Workflow node provided field, but job template is not set to ask on launch' - for fd in ask_for_vars_dict: - ujt_field = getattr(ujt_obj, fd) - if ujt_field is None and prompts_dict.get(fd, None) is None: + for fd in ignored_fields: + ignored_dict[fd] = 'Workflow node provided field, but job template is not set to ask on launch' + scan_errors = ujt_obj._extra_job_type_errors(accepted_fields) + ignored_dict.update(scan_errors) + for fd in ['inventory', 'credential']: + if getattr(ujt_obj, fd) is None and not (ask_for_vars_dict.get(fd, False) and fd in prompts_dict): missing_dict[fd] = 'Job Template does not have this field and workflow node does not provide it' + data = {} if ignored_dict: data['ignored'] = ignored_dict @@ -160,11 +164,11 @@ class WorkflowJobNode(WorkflowNodeBase): data = {} ujt_obj = self.unified_job_template if ujt_obj and hasattr(ujt_obj, '_ask_for_vars_dict'): - ask_for_vars_dict = ujt_obj._ask_for_vars_dict() - prompts_dict = self.prompts_dict() - for fd in prompts_dict: - if ask_for_vars_dict.get(fd, False): - data[fd] = prompts_dict[fd] + accepted_fields, ignored_fields = ujt_obj._accept_or_ignore_job_kwargs(**self.prompts_dict()) + for fd in ujt_obj._extra_job_type_errors(accepted_fields): + accepted_fields.pop(fd) + data.update(accepted_fields) + # TODO: decide what to do in the event of missing fields # process extra_vars extra_vars = {} if self.workflow_job and self.workflow_job.extra_vars: diff --git a/awx/main/tests/unit/models/test_workflow_unit.py b/awx/main/tests/unit/models/test_workflow_unit.py index e0a627d2a7..5fe7bfebd7 100644 --- a/awx/main/tests/unit/models/test_workflow_unit.py +++ b/awx/main/tests/unit/models/test_workflow_unit.py @@ -102,7 +102,7 @@ def node_no_prompts(workflow_job_unit, job_template_factory): def project_unit(): return Project(name='example-proj') -example_prompts = dict(job_type='scan', job_tags='quack', limit='duck', skip_tags='oink') +example_prompts = dict(job_type='check', job_tags='quack', limit='duck', skip_tags='oink') @pytest.fixture def node_with_prompts(node_no_prompts): @@ -129,15 +129,15 @@ class TestWorkflowJobNodeJobKWARGS: def test_char_prompts_and_res_node_prompts(self, node_with_prompts): assert node_with_prompts.get_job_kwargs() == dict( - inventory=node_with_prompts.inventory, - credential=node_with_prompts.credential, + inventory=node_with_prompts.inventory.pk, + credential=node_with_prompts.credential.pk, **example_prompts) def test_reject_some_node_prompts(self, node_with_prompts): node_with_prompts.unified_job_template.ask_inventory_on_launch = False node_with_prompts.unified_job_template.ask_job_type_on_launch = False - expect_kwargs = dict(inventory=node_with_prompts.inventory, - credential=node_with_prompts.credential, + expect_kwargs = dict(inventory=node_with_prompts.inventory.pk, + credential=node_with_prompts.credential.pk, **example_prompts) expect_kwargs.pop('inventory') expect_kwargs.pop('job_type') @@ -153,7 +153,7 @@ class TestWorkflowWarnings: Tests of warnings that show user errors in the construction of a workflow """ - def test_warn_project_node_no_prompts(self, node_no_prompts, project_unit): + def test_no_warn_project_node_no_prompts(self, node_no_prompts, project_unit): node_no_prompts.unified_job_template = project_unit assert node_no_prompts.get_prompts_warnings() == {} @@ -162,6 +162,9 @@ class TestWorkflowWarnings: assert 'ignored' in node_with_prompts.get_prompts_warnings() assert 'all' in node_with_prompts.get_prompts_warnings()['ignored'] + def test_no_warn_accept_all_prompts(self, node_with_prompts): + assert node_with_prompts.get_prompts_warnings() == {} + def test_warn_reject_some_prompts(self, node_with_prompts): node_with_prompts.unified_job_template.ask_credential_on_launch = False node_with_prompts.unified_job_template.ask_job_type_on_launch = False @@ -170,6 +173,15 @@ class TestWorkflowWarnings: assert 'credential' in node_with_prompts.get_prompts_warnings()['ignored'] assert len(node_with_prompts.get_prompts_warnings()['ignored']) == 2 + def test_warn_scan_errors_node_prompts(self, node_with_prompts): + node_with_prompts.unified_job_template.job_type = 'scan' + node_with_prompts.job_type = 'run' + node_with_prompts.inventory = Inventory(name='different-inventory', pk=23) + assert 'ignored' in node_with_prompts.get_prompts_warnings() + assert 'job_type' in node_with_prompts.get_prompts_warnings()['ignored'] + assert 'inventory' in node_with_prompts.get_prompts_warnings()['ignored'] + assert len(node_with_prompts.get_prompts_warnings()['ignored']) == 2 + def test_warn_missing_fields(self, node_no_prompts): node_no_prompts.inventory = None assert 'missing' in node_no_prompts.get_prompts_warnings() From 6f7e61a5be672291ab12b536b65f4e91a9517a33 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Wed, 28 Sep 2016 09:09:26 -0400 Subject: [PATCH 09/24] bump migration number --- ...orkflow_rbac_prompts.py => 0037_v310_workflow_rbac_prompts.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename awx/main/migrations/{0036_v310_workflow_rbac_prompts.py => 0037_v310_workflow_rbac_prompts.py} (100%) diff --git a/awx/main/migrations/0036_v310_workflow_rbac_prompts.py b/awx/main/migrations/0037_v310_workflow_rbac_prompts.py similarity index 100% rename from awx/main/migrations/0036_v310_workflow_rbac_prompts.py rename to awx/main/migrations/0037_v310_workflow_rbac_prompts.py From 8036e294dd156f69e0f6f43b31ecb78d9fe99888 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Wed, 28 Sep 2016 09:32:16 -0400 Subject: [PATCH 10/24] move vars validation method into validators.py --- awx/api/serializers.py | 15 +-------------- awx/main/validators.py | 23 +++++++++++++++++++++++ 2 files changed, 24 insertions(+), 14 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 35d6c2348a..b373e492d7 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -40,6 +40,7 @@ from awx.main.models import * # noqa from awx.main.access import get_user_capabilities from awx.main.fields import ImplicitRoleField from awx.main.utils import get_type_for_model, get_model_for_type, build_url, timestamp_apiformat, camelcase_to_underscore, getattrd +from awx.main.validators import vars_validate_or_raise from awx.conf.license import feature_enabled from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, EncryptedPasswordField, VerbatimField @@ -110,20 +111,6 @@ def reverse_gfk(content_object): camelcase_to_underscore(content_object.__class__.__name__): content_object.get_absolute_url() } -def vars_validate_or_raise(vars_str): - # vars must be blank, a valid JSON or YAML dict, or ... - try: - json.loads((vars_str or '').strip() or '{}') - return vars_str - except ValueError: - pass - try: - yaml.safe_load(vars_str) - return vars_str - except yaml.YAMLError: - pass - raise serializers.ValidationError('Must be valid JSON or YAML.') - class BaseSerializerMetaclass(serializers.SerializerMetaclass): ''' diff --git a/awx/main/validators.py b/awx/main/validators.py index ca7c851772..438fb98bcd 100644 --- a/awx/main/validators.py +++ b/awx/main/validators.py @@ -4,11 +4,16 @@ # Python import base64 import re +import yaml +import json # Django from django.utils.translation import ugettext_lazy as _ from django.core.exceptions import ValidationError +# REST framework +from rest_framework.serializers import ValidationError as RestValidationError + def validate_pem(data, min_keys=0, max_keys=None, min_certs=0, max_certs=None): """ @@ -166,3 +171,21 @@ def validate_ssh_private_key(data): credential. """ return validate_pem(data, min_keys=1) + +def vars_validate_or_raise(vars_str): + """ + Validate that fields like extra_vars or variables on resources like + job templates, inventories, or hosts are either an acceptable + blank string, or are valid JSON or YAML dict + """ + try: + json.loads((vars_str or '').strip() or '{}') + return vars_str + except ValueError: + pass + try: + yaml.safe_load(vars_str) + return vars_str + except yaml.YAMLError: + pass + raise RestValidationError('Must be valid JSON or YAML.') From 78fc6df59d7991658cb4af7b8c5062fc9207266a Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Wed, 28 Sep 2016 14:34:10 -0400 Subject: [PATCH 11/24] fix up the PATCH scenario with the char_prompts in WFJT node --- awx/api/serializers.py | 17 +++++++-- .../0037_v310_workflow_rbac_prompts.py | 2 +- .../serializers/test_workflow_serializers.py | 36 +++++++++++++++++++ 3 files changed, 52 insertions(+), 3 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index b373e492d7..0254edcb3c 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2270,14 +2270,27 @@ class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer): def to_internal_value(self, data): internal_value = super(WorkflowNodeBaseSerializer, self).to_internal_value(data) - char_prompts = self.extract_char_prompts(data) + view = self.context.get('view', None) + request_method = None + if view and view.request: + request_method = view.request.method + if request_method in ['PATCH']: + obj = view.get_object() + char_prompts = copy.copy(obj.char_prompts) + char_prompts.update(self.extract_char_prompts(data)) + else: + char_prompts = self.extract_char_prompts(data) + for fd in copy.copy(char_prompts): + if char_prompts[fd] is None: + char_prompts.pop(fd) internal_value['char_prompts'] = char_prompts return internal_value def extract_char_prompts(self, data): char_prompts = {} for fd in ['job_type', 'job_tags', 'skip_tags', 'limit']: - if data.get(fd, None): + # Accept null values, if given + if fd in data: char_prompts[fd] = data[fd] return char_prompts diff --git a/awx/main/migrations/0037_v310_workflow_rbac_prompts.py b/awx/main/migrations/0037_v310_workflow_rbac_prompts.py index d126ac9c1d..c134ed36fc 100644 --- a/awx/main/migrations/0037_v310_workflow_rbac_prompts.py +++ b/awx/main/migrations/0037_v310_workflow_rbac_prompts.py @@ -10,7 +10,7 @@ import awx.main.fields class Migration(migrations.Migration): dependencies = [ - ('main', '0035_v310_jobevent_uuid'), + ('main', '0036_v310_remove_tower_settings'), ] operations = [ diff --git a/awx/main/tests/unit/api/serializers/test_workflow_serializers.py b/awx/main/tests/unit/api/serializers/test_workflow_serializers.py index 371b02c7b8..3d86952f8d 100644 --- a/awx/main/tests/unit/api/serializers/test_workflow_serializers.py +++ b/awx/main/tests/unit/api/serializers/test_workflow_serializers.py @@ -104,6 +104,42 @@ class TestWorkflowJobTemplateNodeSerializerGetRelated(): assert 'workflow_job_template' not in related +class FakeView: + def __init__(self, obj): + self.obj = obj + + def get_object(self): + return self.obj + +class FakeRequest: + pass + +class TestWorkflowJobTemplateNodeSerializerCharPrompts(): + + @pytest.fixture + def WFJT_serializer(self): + serializer = WorkflowJobTemplateNodeSerializer() + node = WorkflowJobTemplateNode(pk=1) + node.char_prompts = {'limit': 'webservers'} + view = FakeView(node) + view.request = FakeRequest() + view.request.method = "PATCH" + serializer.context = {'view': view} + return serializer + + def test_change_single_field(self, WFJT_serializer): + "Test that a single prompt field can be changed without affecting other fields" + internal_value = WFJT_serializer.to_internal_value({'job_type': 'check'}) + assert internal_value['char_prompts']['job_type'] == 'check' + assert internal_value['char_prompts']['limit'] == 'webservers' + + def test_null_single_field(self, WFJT_serializer): + "Test that a single prompt field can be removed without affecting other fields" + internal_value = WFJT_serializer.to_internal_value({'job_type': None}) + assert 'job_type' not in internal_value['char_prompts'] + assert internal_value['char_prompts']['limit'] == 'webservers' + + @mock.patch('awx.api.serializers.WorkflowNodeBaseSerializer.get_related', lambda x,y: {}) class TestWorkflowJobNodeSerializerGetRelated(): @pytest.fixture From 37270bfb13760f65d0e10a48e1b52862900645a7 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Wed, 28 Sep 2016 15:14:27 -0400 Subject: [PATCH 12/24] block system JT from being used in workflows --- awx/api/serializers.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 0254edcb3c..1bdd81c2fc 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2303,8 +2303,9 @@ class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer): "job_type": "%s is not a valid job type. The choices are %s." % ( attrs['char_prompts']['job_type'], job_types)}) ujt_obj = attrs.get('unified_job_template', None) - if isinstance(ujt_obj, WorkflowJobTemplate): - raise serializers.ValidationError({"unified_job_template": "Can not nest Workflow Job Templates inside of Workflow Job Templates"}) + if isinstance(ujt_obj, (WorkflowJobTemplate, SystemJobTemplate)): + raise serializers.ValidationError({ + "unified_job_template": "Can not nest a %s inside a WorkflowJobTemplate" % ujt_obj.__class__.__name__}) return super(WorkflowJobTemplateNodeSerializer, self).validate(attrs) class WorkflowJobNodeSerializer(WorkflowNodeBaseSerializer): From 2776a56043cf462a0b19a78716b89144038d40ff Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Fri, 30 Sep 2016 09:03:31 -0400 Subject: [PATCH 13/24] bump migration after another one was merged --- ...orkflow_rbac_prompts.py => 0038_v310_workflow_rbac_prompts.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename awx/main/migrations/{0037_v310_workflow_rbac_prompts.py => 0038_v310_workflow_rbac_prompts.py} (100%) diff --git a/awx/main/migrations/0037_v310_workflow_rbac_prompts.py b/awx/main/migrations/0038_v310_workflow_rbac_prompts.py similarity index 100% rename from awx/main/migrations/0037_v310_workflow_rbac_prompts.py rename to awx/main/migrations/0038_v310_workflow_rbac_prompts.py From 020c66f6ce5a059813567549bdefab1d6f468d7d Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Mon, 3 Oct 2016 09:56:50 -0400 Subject: [PATCH 14/24] make validate_license common to all can_start methods --- awx/main/access.py | 38 ++++++++++--------- .../functional/api/test_rbac_displays.py | 11 +++++- 2 files changed, 31 insertions(+), 18 deletions(-) diff --git a/awx/main/access.py b/awx/main/access.py index 2be67f0849..a0dd395bda 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -275,8 +275,10 @@ class BaseAccess(object): access_method = getattr(self, "can_%s" % method) if method in ['change']: # 3 args user_capabilities[display_method] = access_method(obj, data) - elif method in ['delete', 'start', 'run_ad_hoc_commands']: # 2 args + elif method in ['delete', 'run_ad_hoc_commands']: user_capabilities[display_method] = access_method(obj) + elif method in ['start']: + user_capabilities[display_method] = access_method(obj, validate_license=False) elif method in ['add']: # 2 args with data user_capabilities[display_method] = access_method(data) elif method in ['attach', 'unattach']: # parent/sub-object call @@ -609,10 +611,10 @@ class GroupAccess(BaseAccess): "active_jobs": active_jobs}) return True - def can_start(self, obj): + def can_start(self, obj, validate_license=True): # Used as another alias to inventory_source start access for user_capabilities if obj and obj.inventory_source: - return self.user.can_access(InventorySource, 'start', obj.inventory_source) + return self.user.can_access(InventorySource, 'start', obj.inventory_source, validate_license=validate_license) return False class InventorySourceAccess(BaseAccess): @@ -651,7 +653,7 @@ class InventorySourceAccess(BaseAccess): else: return False - def can_start(self, obj): + def can_start(self, obj, validate_license=True): if obj and obj.group: return obj.can_update and self.user in obj.group.inventory.update_role elif obj and obj.inventory: @@ -683,11 +685,11 @@ class InventoryUpdateAccess(BaseAccess): # Inventory cascade deletes to inventory update, descends from org admin return self.user in obj.inventory_source.inventory.admin_role - def can_start(self, obj): + def can_start(self, obj, validate_license=True): # For relaunching if obj and obj.inventory_source: access = InventorySourceAccess(self.user) - return access.can_start(obj.inventory_source) + return access.can_start(obj.inventory_source, validate_license=validate_license) return False @check_superuser @@ -882,7 +884,7 @@ class ProjectAccess(BaseAccess): return True @check_superuser - def can_start(self, obj): + def can_start(self, obj, validate_license=True): return obj and self.user in obj.update_role class ProjectUpdateAccess(BaseAccess): @@ -911,7 +913,7 @@ class ProjectUpdateAccess(BaseAccess): # Project updates cascade delete with project, admin role descends from org admin return self.user in obj.project.admin_role - def can_start(self, obj): + def can_start(self, obj, validate_license=True): # for relaunching if obj and obj.project: return self.user in obj.project.update_role @@ -1192,8 +1194,9 @@ class JobAccess(BaseAccess): return True return False - def can_start(self, obj): - self.check_license() + def can_start(self, obj, validate_license=True): + if validate_license: + self.check_license() # A super user can relaunch a job if self.user.is_superuser: @@ -1227,7 +1230,7 @@ class SystemJobTemplateAccess(BaseAccess): model = SystemJobTemplate - def can_start(self, obj): + def can_start(self, obj, validate_license=True): return self.can_read(obj) class SystemJobAccess(BaseAccess): @@ -1236,7 +1239,7 @@ class SystemJobAccess(BaseAccess): ''' model = SystemJob - def can_start(self, obj): + def can_start(self, obj, validate_license=True): return False # no relaunching of system jobs # TODO: @@ -1424,11 +1427,12 @@ class AdHocCommandAccess(BaseAccess): inventory_qs = Inventory.accessible_objects(self.user, 'read_role') return qs.filter(inventory__in=inventory_qs) - def can_add(self, data): + def can_add(self, data, validate_license=True): if not data: # So the browseable API will work return True - self.check_license() + if validate_license: + self.check_license() # If a credential is provided, the user should have use access to it. credential_pk = get_pk_from_dict(data, 'credential') @@ -1454,11 +1458,11 @@ class AdHocCommandAccess(BaseAccess): def can_delete(self, obj): return obj.inventory is not None and self.user in obj.inventory.organization.admin_role - def can_start(self, obj): + def can_start(self, obj, validate_license=True): return self.can_add({ 'credential': obj.credential_id, 'inventory': obj.inventory_id, - }) + }, validate_license=validate_license) def can_cancel(self, obj): if not obj.can_cancel: @@ -1749,7 +1753,7 @@ class NotificationTemplateAccess(BaseAccess): return self.can_change(obj, None) @check_superuser - def can_start(self, obj): + def can_start(self, obj, validate_license=True): if obj.organization is None: return False return self.user in obj.organization.admin_role diff --git a/awx/main/tests/functional/api/test_rbac_displays.py b/awx/main/tests/functional/api/test_rbac_displays.py index 45b4a8f832..c92a7190ff 100644 --- a/awx/main/tests/functional/api/test_rbac_displays.py +++ b/awx/main/tests/functional/api/test_rbac_displays.py @@ -14,7 +14,7 @@ from awx.api.serializers import JobTemplateSerializer # This file covers special-cases of displays of user_capabilities # general functionality should be covered fully by unit tests, see: -# awx/main/tests/unit/api/test_serializers.py :: +# awx/main/tests/unit/api/serializers/test_job_template_serializers.py :: # TestJobTemplateSerializerGetSummaryFields.test_copy_edit_standard # awx/main/tests/unit/test_access.py :: # test_user_capabilities_method @@ -327,3 +327,12 @@ def test_group_update_capabilities_impossible(group, inventory_source, admin_use capabilities = get_user_capabilities(admin_user, group, method_list=['start']) assert not capabilities['start'] +@pytest.mark.django_db +def test_license_check_not_called(mocker, job_template, project, org_admin, get): + job_template.project = project + job_template.save() # need this to make the JT visible + mock_license_check = mocker.MagicMock() + with mocker.patch('awx.main.access.BaseAccess.check_license', mock_license_check): + get(reverse('api:job_template_detail', args=[job_template.pk]), org_admin, expect=200) + assert not mock_license_check.called + From 959342a5fd33f3b38d294c711b09d23f088e94b7 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Mon, 3 Oct 2016 10:39:59 -0400 Subject: [PATCH 15/24] Fixing up unit tests from HA/Clustering work * Also purge old fact migration tests * Upgrade some services on various container images --- awx/main/tests/base.py | 57 +++++++-- .../tests/functional/migrations/__init__.py | 0 .../tests/functional/migrations/conftest.py | 84 ------------- .../tests/functional/migrations/test_fact.py | 63 ---------- awx/main/tests/job_base.py | 4 +- .../tests/old/commands/commands_monolithic.py | 8 +- .../old/commands/run_socketio_service.py | 116 ------------------ awx/main/tests/old/inventory.py | 4 +- awx/main/tests/old/schedules.py | 4 +- awx/main/tests/old/scripts.py | 4 +- docs/notification_system.md | 2 - tools/docker-compose/Dockerfile | 2 +- tools/docker-compose/unit-tests/Dockerfile | 1 + 13 files changed, 59 insertions(+), 290 deletions(-) delete mode 100644 awx/main/tests/functional/migrations/__init__.py delete mode 100644 awx/main/tests/functional/migrations/conftest.py delete mode 100644 awx/main/tests/functional/migrations/test_fact.py delete mode 100644 awx/main/tests/old/commands/run_socketio_service.py diff --git a/awx/main/tests/base.py b/awx/main/tests/base.py index 21587c8ded..699e9885e2 100644 --- a/awx/main/tests/base.py +++ b/awx/main/tests/base.py @@ -41,6 +41,47 @@ TEST_PLAYBOOK = '''- hosts: mygroup command: test 1 = 1 ''' +class QueueTestMixin(object): + def start_queue(self): + self.start_rabbit() + receiver = CallbackReceiver() + self.queue_process = Process(target=receiver.run_subscriber, + args=(False,)) + self.queue_process.start() + + def terminate_queue(self): + if hasattr(self, 'queue_process'): + self.queue_process.terminate() + self.stop_rabbit() + + def start_rabbit(self): + if not getattr(self, 'redis_process', None): + # Centos 6.5 redis is runnable by non-root user but is not in a normal users path by default + env = dict(os.environ) + env['PATH'] = '%s:/usr/sbin/' % env['PATH'] + env['RABBITMQ_NODENAME'] = 'towerunittest' + env['RABBITMQ_NODE_PORT'] = '55672' + self.redis_process = Popen('rabbitmq-server > /dev/null', + shell=True, executable='/bin/bash', + env=env) + + def stop_rabbit(self): + if getattr(self, 'redis_process', None): + self.redis_process.kill() + self.redis_process = None + + +# The observed effect of not calling terminate_queue() if you call start_queue() are +# an hang on test cleanup database delete. Thus, to ensure terminate_queue() is called +# whenever start_queue() is called just inherit from this class when you want to use the queue. +class QueueStartStopTestMixin(QueueTestMixin): + def setUp(self): + super(QueueStartStopTestMixin, self).setUp() + self.start_queue() + + def tearDown(self): + super(QueueStartStopTestMixin, self).tearDown() + self.terminate_queue() class MockCommonlySlowTestMixin(object): def __init__(self, *args, **kwargs): @@ -87,17 +128,9 @@ class BaseTestMixin(MockCommonlySlowTestMixin): # Set flag so that task chain works with unit tests. settings.CELERY_UNIT_TEST = True settings.SYSTEM_UUID='00000000-0000-0000-0000-000000000000' - settings.BROKER_URL='redis://localhost:16379/' + settings.BROKER_URL='redis://localhost:55672/' + settings.CALLBACK_QUEUE = 'callback_tasks_unit' - # Create unique random consumer and queue ports for zeromq callback. - if settings.CALLBACK_CONSUMER_PORT: - callback_port = random.randint(55700, 55799) - settings.CALLBACK_CONSUMER_PORT = 'tcp://127.0.0.1:%d' % callback_port - os.environ['CALLBACK_CONSUMER_PORT'] = settings.CALLBACK_CONSUMER_PORT - callback_queue_path = '/tmp/callback_receiver_test_%d.ipc' % callback_port - self._temp_paths.append(callback_queue_path) - settings.CALLBACK_QUEUE_PORT = 'ipc://%s' % callback_queue_path - settings.TASK_COMMAND_PORT = 'ipc:///tmp/task_command_receiver_%d.ipc' % callback_port # Disable socket notifications for unit tests. settings.SOCKETIO_NOTIFICATION_PORT = None # Make temp job status directory for unit tests. @@ -140,7 +173,7 @@ class BaseTestMixin(MockCommonlySlowTestMixin): rnd_str = '____' + str(random.randint(1, 9999999)) return __name__ + '-generated-' + string + rnd_str - def create_test_license_file(self, instance_count=10000, license_date=int(time.time() + 3600), features=None): + def create_test_license_file(self, instance_count=10000, license_date=int(time.time() + 3600), features={}): settings.LICENSE = TaskEnhancer( company_name='AWX', contact_name='AWX Admin', @@ -343,7 +376,7 @@ class BaseTestMixin(MockCommonlySlowTestMixin): return cred def setup_instances(self): - instance = Instance(uuid=settings.SYSTEM_UUID, primary=True, hostname='127.0.0.1') + instance = Instance(uuid=settings.SYSTEM_UUID, hostname='127.0.0.1') instance.save() def setup_users(self, just_super_user=False): diff --git a/awx/main/tests/functional/migrations/__init__.py b/awx/main/tests/functional/migrations/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/awx/main/tests/functional/migrations/conftest.py b/awx/main/tests/functional/migrations/conftest.py deleted file mode 100644 index 0901f548d3..0000000000 --- a/awx/main/tests/functional/migrations/conftest.py +++ /dev/null @@ -1,84 +0,0 @@ -# Python -import pytest -from datetime import timedelta - -# Django -from django.utils import timezone -from django.conf import settings - -# AWX -from awx.fact.models.fact import Fact, FactHost - -# MongoEngine -from mongoengine.connection import ConnectionError - -@pytest.fixture(autouse=True) -def mongo_db(request): - marker = request.keywords.get('mongo_db', None) - if marker: - # Drop mongo database - try: - db = Fact._get_db() - db.connection.drop_database(settings.MONGO_DB) - except ConnectionError: - raise - -@pytest.fixture -def inventories(organization): - def rf(inventory_count=1): - invs = [] - for i in xrange(0, inventory_count): - inv = organization.inventories.create(name="test-inv-%d" % i, description="test-inv-desc") - invs.append(inv) - return invs - return rf - -''' -hosts naming convension should align with hosts_mongo -''' -@pytest.fixture -def hosts(organization): - def rf(host_count=1, inventories=[]): - hosts = [] - for inv in inventories: - for i in xrange(0, host_count): - name = '%s-host-%s' % (inv.name, i) - host = inv.hosts.create(name=name) - hosts.append(host) - return hosts - return rf - -@pytest.fixture -def hosts_mongo(organization): - def rf(host_count=1, inventories=[]): - hosts = [] - for inv in inventories: - for i in xrange(0, host_count): - name = '%s-host-%s' % (inv.name, i) - (host, created) = FactHost.objects.get_or_create(hostname=name, inventory_id=inv.id) - hosts.append(host) - return hosts - return rf - -@pytest.fixture -def fact_scans(organization, fact_ansible_json, fact_packages_json, fact_services_json): - def rf(fact_scans=1, inventories=[], timestamp_epoch=timezone.now()): - facts_json = {} - facts = [] - module_names = ['ansible', 'services', 'packages'] - - facts_json['ansible'] = fact_ansible_json - facts_json['packages'] = fact_packages_json - facts_json['services'] = fact_services_json - - for inv in inventories: - for host_obj in FactHost.objects.filter(inventory_id=inv.id): - timestamp_current = timestamp_epoch - for i in xrange(0, fact_scans): - for module_name in module_names: - facts.append(Fact.add_fact(timestamp_current, facts_json[module_name], host_obj, module_name)) - timestamp_current += timedelta(days=1) - return facts - return rf - - diff --git a/awx/main/tests/functional/migrations/test_fact.py b/awx/main/tests/functional/migrations/test_fact.py deleted file mode 100644 index 76dfcc4a40..0000000000 --- a/awx/main/tests/functional/migrations/test_fact.py +++ /dev/null @@ -1,63 +0,0 @@ -import pytest -import datetime - -from django.apps import apps -from django.conf import settings - -from awx.main.models.inventory import Host -from awx.main.models.fact import Fact - -from awx.main.migrations import _system_tracking as system_tracking - -def micro_to_milli(micro): - return micro - (((int)(micro / 1000)) * 1000) - -@pytest.mark.skipif(not getattr(settings, 'MONGO_DB', None), reason="MongoDB not configured") -@pytest.mark.django_db -@pytest.mark.mongo_db -def test_migrate_facts(inventories, hosts, hosts_mongo, fact_scans): - inventory_objs = inventories(2) - hosts(2, inventory_objs) - hosts_mongo(2, inventory_objs) - facts_known = fact_scans(2, inventory_objs) - - (migrated_count, not_migrated_count) = system_tracking.migrate_facts(apps, None) - # 4 hosts w/ 2 fact scans each, 3 modules each scan - assert migrated_count == 24 - assert not_migrated_count == 0 - - - for fact_mongo, fact_version in facts_known: - host = Host.objects.get(inventory_id=fact_mongo.host.inventory_id, name=fact_mongo.host.hostname) - t = fact_mongo.timestamp - datetime.timedelta(microseconds=micro_to_milli(fact_mongo.timestamp.microsecond)) - fact = Fact.objects.filter(host_id=host.id, timestamp=t, module=fact_mongo.module) - - assert len(fact) == 1 - assert fact[0] is not None - -@pytest.mark.skipif(not getattr(settings, 'MONGO_DB', None), reason="MongoDB not configured") -@pytest.mark.django_db -@pytest.mark.mongo_db -def test_migrate_facts_hostname_does_not_exist(inventories, hosts, hosts_mongo, fact_scans): - inventory_objs = inventories(2) - host_objs = hosts(1, inventory_objs) - hosts_mongo(2, inventory_objs) - facts_known = fact_scans(2, inventory_objs) - - (migrated_count, not_migrated_count) = system_tracking.migrate_facts(apps, None) - assert migrated_count == 12 - assert not_migrated_count == 12 - - - for fact_mongo, fact_version in facts_known: - # Facts that don't match the only host will not be migrated - if fact_mongo.host.hostname != host_objs[0].name: - continue - - host = Host.objects.get(inventory_id=fact_mongo.host.inventory_id, name=fact_mongo.host.hostname) - t = fact_mongo.timestamp - datetime.timedelta(microseconds=micro_to_milli(fact_mongo.timestamp.microsecond)) - fact = Fact.objects.filter(host_id=host.id, timestamp=t, module=fact_mongo.module) - - assert len(fact) == 1 - assert fact[0] is not None - diff --git a/awx/main/tests/job_base.py b/awx/main/tests/job_base.py index d215c0fb25..76460e5b68 100644 --- a/awx/main/tests/job_base.py +++ b/awx/main/tests/job_base.py @@ -616,12 +616,12 @@ class BaseJobTestMixin(BaseTestMixin): def setUp(self): super(BaseJobTestMixin, self).setUp() - self.start_redis() + self.start_rabbit() self.setup_instances() self.populate() self.start_queue() def tearDown(self): super(BaseJobTestMixin, self).tearDown() - self.stop_redis() + self.stop_rabbit() self.terminate_queue() diff --git a/awx/main/tests/old/commands/commands_monolithic.py b/awx/main/tests/old/commands/commands_monolithic.py index 39632673c1..28e0d79db1 100644 --- a/awx/main/tests/old/commands/commands_monolithic.py +++ b/awx/main/tests/old/commands/commands_monolithic.py @@ -389,13 +389,13 @@ class CleanupActivityStreamTest(BaseCommandMixin, BaseTest): ''' def setUp(self): - self.start_redis() super(CleanupActivityStreamTest, self).setUp() + self.start_rabbit() self.create_test_inventories() def tearDown(self): + self.stop_rabbit() super(CleanupActivityStreamTest, self).tearDown() - self.stop_redis() def test_cleanup(self): # Should already have entries due to test case setup. With no @@ -457,7 +457,7 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest): def setUp(self): super(InventoryImportTest, self).setUp() - self.start_redis() + self.start_rabbit() self.setup_instances() self.create_test_inventories() self.create_test_ini() @@ -465,7 +465,7 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest): def tearDown(self): super(InventoryImportTest, self).tearDown() - self.stop_redis() + self.stop_rabbit() def create_test_ini(self, inv_dir=None, ini_content=None): ini_content = ini_content or TEST_INVENTORY_INI diff --git a/awx/main/tests/old/commands/run_socketio_service.py b/awx/main/tests/old/commands/run_socketio_service.py deleted file mode 100644 index be882d6b20..0000000000 --- a/awx/main/tests/old/commands/run_socketio_service.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright (c) 2015 Ansible, Inc. -# All Rights Reserved - -# Python -from mock import MagicMock, Mock - -# Django -from django.test import SimpleTestCase - -# AWX -from awx.fact.models.fact import * # noqa -from awx.main.management.commands.run_socketio_service import SocketSessionManager, SocketSession, SocketController - -__all__ = ['SocketSessionManagerUnitTest', 'SocketControllerUnitTest',] - -class WeakRefable(): - pass - -class SocketSessionManagerUnitTest(SimpleTestCase): - - def setUp(self): - self.session_manager = SocketSessionManager() - super(SocketSessionManagerUnitTest, self).setUp() - - def create_sessions(self, count, token_key=None): - self.sessions = [] - self.count = count - for i in range(0, count): - self.sessions.append(SocketSession(i, token_key or i, WeakRefable())) - self.session_manager.add_session(self.sessions[i]) - - def test_multiple_session_diff_token(self): - self.create_sessions(10) - - for s in self.sessions: - self.assertIn(s.token_key, self.session_manager.socket_session_token_key_map) - self.assertEqual(s, self.session_manager.socket_session_token_key_map[s.token_key][s.session_id]) - - - def test_multiple_session_same_token(self): - self.create_sessions(10, token_key='foo') - - sessions_dict = self.session_manager.lookup("foo") - self.assertEqual(len(sessions_dict), 10) - for s in self.sessions: - self.assertIn(s.session_id, sessions_dict) - self.assertEqual(s, sessions_dict[s.session_id]) - - def test_prune_sessions_max(self): - self.create_sessions(self.session_manager.SESSIONS_MAX + 10) - - self.assertEqual(len(self.session_manager.socket_sessions), self.session_manager.SESSIONS_MAX) - - -class SocketControllerUnitTest(SimpleTestCase): - - def setUp(self): - self.socket_controller = SocketController(SocketSessionManager()) - server = Mock() - self.socket_controller.set_server(server) - super(SocketControllerUnitTest, self).setUp() - - def create_clients(self, count, token_key=None): - self.sessions = [] - self.sockets =[] - self.count = count - self.sockets_dict = {} - for i in range(0, count): - if isinstance(token_key, list): - token_key_actual = token_key[i] - else: - token_key_actual = token_key or i - socket = MagicMock(session=dict()) - socket_session = SocketSession(i, token_key_actual, socket) - self.sockets.append(socket) - self.sessions.append(socket_session) - self.sockets_dict[i] = socket - self.socket_controller.add_session(socket_session) - - socket.session['socket_session'] = socket_session - socket.send_packet = Mock() - self.socket_controller.server.sockets = self.sockets_dict - - def test_broadcast_packet(self): - self.create_clients(10) - packet = { - "hello": "world" - } - self.socket_controller.broadcast_packet(packet) - for s in self.sockets: - s.send_packet.assert_called_with(packet) - - def test_send_packet(self): - self.create_clients(5, token_key=[0, 1, 2, 3, 4]) - packet = { - "hello": "world" - } - self.socket_controller.send_packet(packet, 2) - self.assertEqual(0, len(self.sockets[0].send_packet.mock_calls)) - self.assertEqual(0, len(self.sockets[1].send_packet.mock_calls)) - self.sockets[2].send_packet.assert_called_once_with(packet) - self.assertEqual(0, len(self.sockets[3].send_packet.mock_calls)) - self.assertEqual(0, len(self.sockets[4].send_packet.mock_calls)) - - def test_send_packet_multiple_sessions_one_token(self): - self.create_clients(5, token_key=[0, 1, 1, 1, 2]) - packet = { - "hello": "world" - } - self.socket_controller.send_packet(packet, 1) - self.assertEqual(0, len(self.sockets[0].send_packet.mock_calls)) - self.sockets[1].send_packet.assert_called_once_with(packet) - self.sockets[2].send_packet.assert_called_once_with(packet) - self.sockets[3].send_packet.assert_called_once_with(packet) - self.assertEqual(0, len(self.sockets[4].send_packet.mock_calls)) - diff --git a/awx/main/tests/old/inventory.py b/awx/main/tests/old/inventory.py index efa952e64a..e7e751532c 100644 --- a/awx/main/tests/old/inventory.py +++ b/awx/main/tests/old/inventory.py @@ -43,7 +43,7 @@ print json.dumps(inventory) class InventoryTest(BaseTest): def setUp(self): - self.start_redis() + self.start_rabbit() super(InventoryTest, self).setUp() self.setup_instances() self.setup_users() @@ -63,7 +63,7 @@ class InventoryTest(BaseTest): def tearDown(self): super(InventoryTest, self).tearDown() - self.stop_redis() + self.stop_rabbit() def test_get_inventory_list(self): url = reverse('api:inventory_list') diff --git a/awx/main/tests/old/schedules.py b/awx/main/tests/old/schedules.py index 6433ee1351..916df4e89d 100644 --- a/awx/main/tests/old/schedules.py +++ b/awx/main/tests/old/schedules.py @@ -50,7 +50,7 @@ class ScheduleTest(BaseTest): def setUp(self): super(ScheduleTest, self).setUp() - self.start_redis() + self.start_rabbit() self.setup_instances() self.setup_users() self.organizations = self.make_organizations(self.super_django_user, 2) @@ -92,7 +92,7 @@ class ScheduleTest(BaseTest): def tearDown(self): super(ScheduleTest, self).tearDown() - self.stop_redis() + self.stop_rabbit() def test_schedules_list(self): url = reverse('api:schedule_list') diff --git a/awx/main/tests/old/scripts.py b/awx/main/tests/old/scripts.py index e4cb90042d..09df77ae45 100644 --- a/awx/main/tests/old/scripts.py +++ b/awx/main/tests/old/scripts.py @@ -62,7 +62,7 @@ class InventoryScriptTest(BaseScriptTest): def setUp(self): super(InventoryScriptTest, self).setUp() - self.start_redis() + self.start_rabbit() self.setup_instances() self.setup_users() self.organizations = self.make_organizations(self.super_django_user, 2) @@ -128,7 +128,7 @@ class InventoryScriptTest(BaseScriptTest): def tearDown(self): super(InventoryScriptTest, self).tearDown() - self.stop_redis() + self.stop_rabbit() def run_inventory_script(self, *args, **options): rest_api_url = self.live_server_url diff --git a/docs/notification_system.md b/docs/notification_system.md index fc8f99b9ee..5b4bb6e768 100644 --- a/docs/notification_system.md +++ b/docs/notification_system.md @@ -1,5 +1,3 @@ -Completion pending unit tests and acceptance info and instructions. The following documentation will likely be moved to the feature epic card and reproduced in our development documentation. - # Notification System Overview A Notifier is an instance of a notification type (Email, Slack, Webhook, etc) with a name, description, and a defined configuration (A few examples: Username, password, server, recipients for the Email type. Token and list of channels for Slack. Url and Headers for webhooks) diff --git a/tools/docker-compose/Dockerfile b/tools/docker-compose/Dockerfile index ddd54616bb..efa3f7709c 100644 --- a/tools/docker-compose/Dockerfile +++ b/tools/docker-compose/Dockerfile @@ -11,7 +11,7 @@ RUN yum -y update && yum -y install curl epel-release RUN curl --silent --location https://rpm.nodesource.com/setup_6.x | bash - RUN yum -y localinstall http://download.postgresql.org/pub/repos/yum/9.4/redhat/rhel-6-x86_64/pgdg-centos94-9.4-3.noarch.rpm ADD tools/docker-compose/proot.repo /etc/yum.repos.d/proot.repo -RUN yum -y update && yum -y install openssh-server ansible mg vim tmux git mercurial subversion python-devel python-psycopg2 make postgresql postgresql-devel nodejs python-psutil libxml2-devel libxslt-devel libstdc++.so.6 gcc cyrus-sasl-devel cyrus-sasl openldap-devel libffi-devel zeromq-devel proot python-pip xmlsec1-devel swig krb5-devel xmlsec1-openssl xmlsec1 xmlsec1-openssl-devel libtool-ltdl-devel +RUN yum -y update && yum -y install openssh-server ansible mg vim tmux git mercurial subversion python-devel python-psycopg2 make postgresql postgresql-devel nodejs python-psutil libxml2-devel libxslt-devel libstdc++.so.6 gcc cyrus-sasl-devel cyrus-sasl openldap-devel libffi-devel zeromq-devel proot python-pip xmlsec1-devel swig krb5-devel xmlsec1-openssl xmlsec1 xmlsec1-openssl-devel libtool-ltdl-devel rabbitmq-server RUN pip install flake8 pytest==2.9.2 pytest-pythonpath pytest-django pytest-cov pytest-mock dateutils django-debug-toolbar==1.4 pyflakes==1.0.0 virtualenv RUN /usr/bin/ssh-keygen -q -t rsa -N "" -f /root/.ssh/id_rsa RUN mkdir -p /etc/tower diff --git a/tools/docker-compose/unit-tests/Dockerfile b/tools/docker-compose/unit-tests/Dockerfile index e4b4dfb83a..cb95cfc7d8 100644 --- a/tools/docker-compose/unit-tests/Dockerfile +++ b/tools/docker-compose/unit-tests/Dockerfile @@ -20,6 +20,7 @@ RUN yum install -y \ /usr/bin/pg_config \ openldap-devel \ postgresql-devel \ + rabbitmq-server \ libtool-ltdl-devel # NOTE: The following steps work for tower-3.0.0 From 9e3528de31fd3437c4c051d7ad21acf674ebc91e Mon Sep 17 00:00:00 2001 From: Chris Meyers Date: Mon, 3 Oct 2016 10:06:44 -0400 Subject: [PATCH 16/24] change migration name to align with version semantic --- ..._allow_simultaneous.py => 0037_v310_job_allow_simultaneous.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename awx/main/migrations/{0037_job_allow_simultaneous.py => 0037_v310_job_allow_simultaneous.py} (100%) diff --git a/awx/main/migrations/0037_job_allow_simultaneous.py b/awx/main/migrations/0037_v310_job_allow_simultaneous.py similarity index 100% rename from awx/main/migrations/0037_job_allow_simultaneous.py rename to awx/main/migrations/0037_v310_job_allow_simultaneous.py From 3d38637b2d37a4c921aaf5ba2c61dff2de5e09dc Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Mon, 3 Oct 2016 11:16:08 -0400 Subject: [PATCH 17/24] update migration name --- awx/main/migrations/0038_v310_workflow_rbac_prompts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/main/migrations/0038_v310_workflow_rbac_prompts.py b/awx/main/migrations/0038_v310_workflow_rbac_prompts.py index 2778311dd8..6fa55f8469 100644 --- a/awx/main/migrations/0038_v310_workflow_rbac_prompts.py +++ b/awx/main/migrations/0038_v310_workflow_rbac_prompts.py @@ -10,7 +10,7 @@ import awx.main.fields class Migration(migrations.Migration): dependencies = [ - ('main', '0037_job_allow_simultaneous'), + ('main', '0037_v310_job_allow_simultaneous'), ] operations = [ From ff4d071be99b69aa681a3d48e1c36f1280dc3c0c Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Mon, 3 Oct 2016 14:50:09 -0400 Subject: [PATCH 18/24] Adding/updating documentation for Cluster/HA Tower --- docs/clustering.md | 178 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 178 insertions(+) create mode 100644 docs/clustering.md diff --git a/docs/clustering.md b/docs/clustering.md new file mode 100644 index 0000000000..942ea9a998 --- /dev/null +++ b/docs/clustering.md @@ -0,0 +1,178 @@ +## Tower Clustering/HA Overview + +Prior to 3.1 the Ansible Tower HA solution was not a true high-availability system. In 3.1 we have rewritten this system entirely with a new focus in mind: + +* Each node should be able to act as an entrypoint for UI and API Access. + This should enable Tower administrators to use load balancers in front of as many nodes as they wish + and maintain good data visibility. +* Each node should be able to join the Tower cluster and expand its ability to execute jobs. This is currently + a naive system where jobs can and will run anywhere rather than be directed on where to run. *That* work will + be done later when building out the Federation/Rampart system. +* Provisioning new nodes should be as simple as updating the `inventory` file and re-running the setup playbook +* Nodes can be deprovisioned with a simple management commands + +It's important to point out a few existing things: + +* PostgreSQL is still a standalone instance node and is not clustered. We also won't manage replica configuration or, + if the user configures standby replicas, database failover. +* All nodes should be reachable from all other nodes and they should be able to reach the database. It's also important + for the hosts to have a stable address and/or hostname (depending on how you configure the Tower host) +* RabbitMQ is the cornerstone of Tower's Clustering system. A lot of our configuration requirements and behavior is dictated + by its needs. Thus we are pretty inflexible to customization beyond what our setup playbook allows. Each Tower node has a + deployment of RabbitMQ that will cluster with the other nodes' RabbitMQ instances. +* Existing old-style HA deployments will be transitioned automatically to the new HA system during the upgrade process. + +## Important Changes + +* There is no concept of primary/secondary in the new Tower system. *All* systems are primary. +* Setup playbook changes to configure rabbitmq and give hints to the type of network the hosts are on. +* The `inventory` file for Tower deployments should be saved/persisted. If new nodes are to be provisioned + the passwords and configuration options as well as host names will need to be available to the installer. + +## Concepts and Configuration + +### Installation and the Inventory File +The current standalone node configuration doesn't change for a 3.1 deploy. The inventory file does change in some important ways: + +* Since there is no primary/secondary configuration those inventory groups go away and are replaced with a + single inventory group `tower`. The `database` group remains for specifying an external postgres, however: + ``` + [tower] + hostA + hostB + hostC + + [database] + hostDB + ``` +* The `redis_password` field is removed from `[all:vars]` +* There are various new fields for RabbitMQ: + - `rabbitmq_port=5672` - RabbitMQ is installed on each node and is not optional, it's also not possible to externalize. It is + possible to configure what port it listens on and this setting controls that. + - `rabbitmq_vhost=tower` - Tower configures a rabbitmq virtualhost to isolate itself. This controls that settings. + - `rabbitmq_username=tower` and `rabbitmq_password=tower` - Each node will be configured with these values and each node's Tower + instance will be configured with it also. This is similar to our other uses of usernames/passwords. + - `rabbitmq_cookie=` - This value is unused in a standalone deployment but is critical for clustered deployments. + This acts as the secret key that allows RabbitMQ cluster members to identify each other. + - `rabbitmq_use_long_names` - RabbitMQ is pretty sensitive to what each node is named. We are flexible enough to allow FQDNs + (host01.example.com), short names (host01), or ip addresses (192.168.5.73). Depending on what is used to identify each host + in the `inventory` file this value may need to be changed. For FQDNs and ip addresses this value needs to be `true`. For short + names it should be `false` + - `rabbitmq_enable_manager` - Setting this to `true` will expose the RabbitMQ management web console on each node. + +The most important field to point out for variability is `rabbitmq_use_long_name`. That's something we can't detect or provide a reasonable +default for so it's important to point out when it needs to be changed. + +Other than `rabbitmq_use_long_name` the defaults are pretty reasonable: +``` +rabbitmq_port=5672 +rabbitmq_vhost=tower +rabbitmq_username=tower +rabbitmq_password='' +rabbitmq_cookie=cookiemonster + +# Needs to be true for fqdns and ip addresses +rabbitmq_use_long_name=false +rabbitmq_enable_manager=false +``` + +### Provisioning and Deprovisioning Nodes + +* Provisioning +Provisioning Nodes after installation is supported by updating the `inventory` file and re-running the setup playbook. It's important that this file +contain all passwords and information used when installing the cluster or other nodes may be reconfigured (This could be intentional) + +* Deprovisioning +Tower does not automatically de-provision nodes since we can't distinguish between a node that was taken offline intentionally or due to failure. +Instead the procedure for deprovisioning a node is to shut it down (or stop the `ansible-tower-service`) and run the Tower deprovision command: + +``` +$ tower-manage deprovision-node +``` + +### Status and Monitoring + +Tower itself reports as much status as it can via the api at `/api/v1/ping` in order to provide validation of the health +of the Cluster. This includes: + +* The node servicing the HTTP request +* The last heartbeat time of all other nodes in the cluster +* The state of the Job Queue, any jobs each node is running +* The RabbitMQ cluster status + +### Node Services and Failure Behavior + +Each Tower node is made up of several different services working collaboratively: + +* HTTP Services - This includes the Tower application itself as well as external web services. +* Callback Receiver - Whose job it is to receive job events from running Ansible jobs. +* Celery - The worker queue, that processes and runs all jobs. +* RabbitMQ - Message Broker, this is used as a signaling mechanism for Celery as well as any event data propogated to the application. +* Memcached - local caching service for the node it lives on. + +Tower is configured in such a way that if any of these services or their components fail then all services are restarted. If these fail sufficiently +often in a short span of time then the entire node will be placed offline in an automated fashion in order to allow remediation without causing unexpected +behavior. + +### Job Runtime Behavior + +Ideally a regular user of Tower should not notice any semantic difference to the way jobs are run and reported. Behind the scenes its worth +pointing out the differences in how the system behaves. + +When a job is submitted from the API interface it gets pushed into the Celery queue on RabbitMQ. A single RabbitMQ node is the responsible master for +individual queues but each Tower node will connect to and receive jobs from that queue using a Fair scheduling algorithm. Any node in the cluster is just +as likely to receive the work and execute the task. If a node fails while executing jobs then the work is marked as permanently failed. + +As Tower nodes are brought online it effectively expands the work capacity of the Tower system which is measured as one entire unit (the cluster's capacity). +Conversely de-provisioning a node will remove capacity from the cluster. + +It's important to note that not all nodes are required to be provisioned with an equal capacity. + +Project updates behave differently than they did before. Previously they were ordinary jobs that ran on a single node. It's now important that +they run successfully on any node that could potentially run a job. Project updates will now fan out to all nodes in the cluster. Success or failure of +project updates will be conditional upon them succeeding on all nodes. + +## Acceptance Criteria + +When verifying acceptance we should ensure the following statements are true + +* Tower should install as a standalone Node +* Tower should install in a Clustered fashion +* Provisioning should be supported via the setup playbook +* De-provisioning should be supported via a management command +* All jobs, inventory updates, and project updates should run successfully +* Jobs should be able to run on all hosts +* Project updates should manifest their data on all hosts simultaneously +* Tower should be able to reasonably survive the removal of all nodes in the cluster +* Tower should behave in a predictable fashiong during network partitioning + +## Testing Considerations + +* Basic testing should be able to demonstrate parity with a standalone node for all integration testing. +* We should test behavior of large and small clusters. I would envision small clusters as 2 - 3 nodes and large + clusters as 10 - 15 nodes +* Failure testing should involve killing single nodes and killing multiple nodes while the cluster is performing work. + Job failures during the time period should be predictable and not catastrophic. +* Node downtime testing should also include recoverability testing. Killing single services and ensuring the system can + return itself to a working state +* Persistent failure should be tested by killing single services in such a way that the cluster node can not be recovered + and ensuring that the node is properly taken offline +* Network partitioning failures will be important also. In order to test this + - Disallow a single node from communicating with the other nodes but allow it to communicate with the database + - Break the link between nodes such that it forms 2 or more groups where groupA and groupB can't communicate but all nodes + can communicate with the database. +* Crucially when network partitioning is resolved all nodes should recover into a consistent state + +## Performance Testing + +Performance testing should be twofold. + +* Large volume of simultaneous jobs. +* Jobs that generate a large amount of output. + +These should also be benchmarked against the same playbooks using the 3.0.X Tower release and a stable Ansible version. +For a large volume playbook I might recommend a customer provided one that we've seen recently: + +https://gist.github.com/michelleperz/fe3a0eb4eda888221229730e34b28b89 + +Against 100+ hosts. From 0154c41eb2411bf685025d20eb2f3d8f6d7dfd06 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Mon, 3 Oct 2016 13:43:44 -0400 Subject: [PATCH 19/24] switch to UJT method of creating new jobs --- awx/main/models/workflow.py | 26 ++- .../tests/unit/models/test_workflow_unit.py | 162 +++++++++++------- 2 files changed, 121 insertions(+), 67 deletions(-) diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index 94a5706942..91983ca673 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -124,6 +124,14 @@ class WorkflowNodeBase(CreatedModifiedModel): data['missing'] = missing_dict return data + @classmethod + def _get_workflow_job_field_names(cls): + ''' + Return field names that should be copied from template node to job node. + ''' + return ['workflow_job', 'unified_job_template', + 'inventory', 'credential', 'char_prompts'] + class WorkflowJobTemplateNode(WorkflowNodeBase): # TODO: Ensure the API forces workflow_job_template being set workflow_job_template = models.ForeignKey( @@ -138,6 +146,18 @@ class WorkflowJobTemplateNode(WorkflowNodeBase): def get_absolute_url(self): return reverse('api:workflow_job_template_node_detail', args=(self.pk,)) + def create_workflow_job_node(self, **kwargs): + ''' + Create a new workflow job node based on this workflow node. + ''' + create_kwargs = {} + for field_name in self._get_workflow_job_field_names(): + if field_name in kwargs: + create_kwargs[field_name] = kwargs[field_name] + elif hasattr(self, field_name): + create_kwargs[field_name] = getattr(self, field_name) + return WorkflowJobNode.objects.create(**create_kwargs) + class WorkflowJobNode(WorkflowNodeBase): job = models.ForeignKey( 'UnifiedJob', @@ -278,11 +298,7 @@ class WorkflowJobInheritNodesMixin(object): Create a WorkflowJobNode for each WorkflowJobTemplateNode ''' def _create_workflow_job_nodes(self, old_nodes): - return [WorkflowJobNode.objects.create( - workflow_job=self, unified_job_template=old_node.unified_job_template, - inventory=old_node.inventory, credential=old_node.credential, - char_prompts=old_node.char_prompts - ) for old_node in old_nodes] + return [old_node.create_workflow_job_node(workflow_job=self) for old_node in old_nodes] def _map_workflow_job_nodes(self, old_nodes, new_nodes): node_ids_map = {} diff --git a/awx/main/tests/unit/models/test_workflow_unit.py b/awx/main/tests/unit/models/test_workflow_unit.py index 5fe7bfebd7..4445a758d9 100644 --- a/awx/main/tests/unit/models/test_workflow_unit.py +++ b/awx/main/tests/unit/models/test_workflow_unit.py @@ -3,7 +3,7 @@ import pytest from awx.main.models.jobs import JobTemplate from awx.main.models import Inventory, Credential, Project from awx.main.models.workflow import ( - WorkflowJobTemplateNode, WorkflowJobInheritNodesMixin, + WorkflowJobTemplate, WorkflowJobTemplateNode, WorkflowJobInheritNodesMixin, WorkflowJob, WorkflowJobNode ) @@ -18,15 +18,13 @@ class TestWorkflowJobInheritNodesMixin(): return [WorkflowJobTemplateNode(unified_job_template=job_templates[i]) for i in range(0, 10)] def test__create_workflow_job_nodes(self, mocker, job_template_nodes): - workflow_job_node_create = mocker.patch('awx.main.models.WorkflowJobNode.objects.create') + workflow_job_node_create = mocker.patch('awx.main.models.WorkflowJobTemplateNode.create_workflow_job_node') mixin = WorkflowJobInheritNodesMixin() mixin._create_workflow_job_nodes(job_template_nodes) for job_template_node in job_template_nodes: - workflow_job_node_create.assert_any_call( - workflow_job=mixin, unified_job_template=job_template_node.unified_job_template, - credential=None, inventory=None, char_prompts={}) + workflow_job_node_create.assert_any_call(workflow_job=mixin) class TestMapWorkflowJobNodes(): @pytest.fixture @@ -89,14 +87,18 @@ def workflow_job_unit(): return WorkflowJob(name='workflow', status='new') @pytest.fixture -def node_no_prompts(workflow_job_unit, job_template_factory): +def workflow_job_template_unit(): + return WorkflowJobTemplate(name='workflow') + +@pytest.fixture +def jt_ask(job_template_factory): # note: factory sets ask_xxxx_on_launch to true for inventory & credential jt = job_template_factory(name='example-jt', persisted=False).job_template jt.ask_job_type_on_launch = True jt.ask_skip_tags_on_launch = True jt.ask_limit_on_launch = True jt.ask_tags_on_launch = True - return WorkflowJobNode(workflow_job=workflow_job_unit, unified_job_template=jt) + return jt @pytest.fixture def project_unit(): @@ -105,13 +107,49 @@ def project_unit(): example_prompts = dict(job_type='check', job_tags='quack', limit='duck', skip_tags='oink') @pytest.fixture -def node_with_prompts(node_no_prompts): - node_no_prompts.char_prompts = example_prompts - inv = Inventory(name='example-inv') - cred = Credential(name='example-inv', kind='ssh', username='asdf', password='asdf') - node_no_prompts.inventory = inv - node_no_prompts.credential = cred - return node_no_prompts +def job_node_no_prompts(workflow_job_unit, jt_ask): + return WorkflowJobNode(workflow_job=workflow_job_unit, unified_job_template=jt_ask) + +@pytest.fixture +def job_node_with_prompts(job_node_no_prompts): + job_node_no_prompts.char_prompts = example_prompts + job_node_no_prompts.inventory = Inventory(name='example-inv') + job_node_no_prompts.credential = Credential(name='example-inv', kind='ssh', username='asdf', password='asdf') + return job_node_no_prompts + +@pytest.fixture +def wfjt_node_no_prompts(workflow_job_template_unit, jt_ask): + return WorkflowJobTemplateNode(workflow_job_template=workflow_job_template_unit, unified_job_template=jt_ask) + +@pytest.fixture +def wfjt_node_with_prompts(wfjt_node_no_prompts): + wfjt_node_no_prompts.char_prompts = example_prompts + wfjt_node_no_prompts.inventory = Inventory(name='example-inv') + wfjt_node_no_prompts.credential = Credential(name='example-inv', kind='ssh', username='asdf', password='asdf') + return wfjt_node_no_prompts + +class TestWorkflowJobCreate: + + def test_create_no_prompts(self, wfjt_node_no_prompts, workflow_job_unit, mocker): + mock_create = mocker.MagicMock() + with mocker.patch('awx.main.models.WorkflowJobNode.objects.create', mock_create): + wfjt_node_no_prompts.create_workflow_job_node(workflow_job=workflow_job_unit) + mock_create.assert_called_once_with( + char_prompts=wfjt_node_no_prompts.char_prompts, + inventory=None, credential=None, + unified_job_template=wfjt_node_no_prompts.unified_job_template, + workflow_job=workflow_job_unit) + + def test_create_with_prompts(self, wfjt_node_with_prompts, workflow_job_unit, mocker): + mock_create = mocker.MagicMock() + with mocker.patch('awx.main.models.WorkflowJobNode.objects.create', mock_create): + wfjt_node_with_prompts.create_workflow_job_node(workflow_job=workflow_job_unit) + mock_create.assert_called_once_with( + char_prompts=wfjt_node_with_prompts.char_prompts, + inventory=wfjt_node_with_prompts.inventory, + credential=wfjt_node_with_prompts.credential, + unified_job_template=wfjt_node_with_prompts.unified_job_template, + workflow_job=workflow_job_unit) class TestWorkflowJobNodeJobKWARGS: """ @@ -119,33 +157,33 @@ class TestWorkflowJobNodeJobKWARGS: launching a new job that corresponds to a workflow node. """ - def test_null_kwargs(self, node_no_prompts): - assert node_no_prompts.get_job_kwargs() == {} + def test_null_kwargs(self, job_node_no_prompts): + assert job_node_no_prompts.get_job_kwargs() == {} - def test_inherit_workflow_job_extra_vars(self, node_no_prompts): - workflow_job = node_no_prompts.workflow_job + def test_inherit_workflow_job_extra_vars(self, job_node_no_prompts): + workflow_job = job_node_no_prompts.workflow_job workflow_job.extra_vars = '{"a": 84}' - assert node_no_prompts.get_job_kwargs() == {'extra_vars': {'a': 84}} + assert job_node_no_prompts.get_job_kwargs() == {'extra_vars': {'a': 84}} - def test_char_prompts_and_res_node_prompts(self, node_with_prompts): - assert node_with_prompts.get_job_kwargs() == dict( - inventory=node_with_prompts.inventory.pk, - credential=node_with_prompts.credential.pk, + def test_char_prompts_and_res_node_prompts(self, job_node_with_prompts): + assert job_node_with_prompts.get_job_kwargs() == dict( + inventory=job_node_with_prompts.inventory.pk, + credential=job_node_with_prompts.credential.pk, **example_prompts) - def test_reject_some_node_prompts(self, node_with_prompts): - node_with_prompts.unified_job_template.ask_inventory_on_launch = False - node_with_prompts.unified_job_template.ask_job_type_on_launch = False - expect_kwargs = dict(inventory=node_with_prompts.inventory.pk, - credential=node_with_prompts.credential.pk, + def test_reject_some_node_prompts(self, job_node_with_prompts): + job_node_with_prompts.unified_job_template.ask_inventory_on_launch = False + job_node_with_prompts.unified_job_template.ask_job_type_on_launch = False + expect_kwargs = dict(inventory=job_node_with_prompts.inventory.pk, + credential=job_node_with_prompts.credential.pk, **example_prompts) expect_kwargs.pop('inventory') expect_kwargs.pop('job_type') - assert node_with_prompts.get_job_kwargs() == expect_kwargs + assert job_node_with_prompts.get_job_kwargs() == expect_kwargs - def test_no_accepted_project_node_prompts(self, node_with_prompts, project_unit): - node_with_prompts.unified_job_template = project_unit - assert node_with_prompts.get_job_kwargs() == {} + def test_no_accepted_project_node_prompts(self, job_node_no_prompts, project_unit): + job_node_no_prompts.unified_job_template = project_unit + assert job_node_no_prompts.get_job_kwargs() == {} class TestWorkflowWarnings: @@ -153,38 +191,38 @@ class TestWorkflowWarnings: Tests of warnings that show user errors in the construction of a workflow """ - def test_no_warn_project_node_no_prompts(self, node_no_prompts, project_unit): - node_no_prompts.unified_job_template = project_unit - assert node_no_prompts.get_prompts_warnings() == {} + def test_no_warn_project_node_no_prompts(self, job_node_no_prompts, project_unit): + job_node_no_prompts.unified_job_template = project_unit + assert job_node_no_prompts.get_prompts_warnings() == {} - def test_warn_project_node_reject_all_prompts(self, node_with_prompts, project_unit): - node_with_prompts.unified_job_template = project_unit - assert 'ignored' in node_with_prompts.get_prompts_warnings() - assert 'all' in node_with_prompts.get_prompts_warnings()['ignored'] + def test_warn_project_node_reject_all_prompts(self, job_node_with_prompts, project_unit): + job_node_with_prompts.unified_job_template = project_unit + assert 'ignored' in job_node_with_prompts.get_prompts_warnings() + assert 'all' in job_node_with_prompts.get_prompts_warnings()['ignored'] - def test_no_warn_accept_all_prompts(self, node_with_prompts): - assert node_with_prompts.get_prompts_warnings() == {} + def test_no_warn_accept_all_prompts(self, job_node_with_prompts): + assert job_node_with_prompts.get_prompts_warnings() == {} - def test_warn_reject_some_prompts(self, node_with_prompts): - node_with_prompts.unified_job_template.ask_credential_on_launch = False - node_with_prompts.unified_job_template.ask_job_type_on_launch = False - assert 'ignored' in node_with_prompts.get_prompts_warnings() - assert 'job_type' in node_with_prompts.get_prompts_warnings()['ignored'] - assert 'credential' in node_with_prompts.get_prompts_warnings()['ignored'] - assert len(node_with_prompts.get_prompts_warnings()['ignored']) == 2 + def test_warn_reject_some_prompts(self, job_node_with_prompts): + job_node_with_prompts.unified_job_template.ask_credential_on_launch = False + job_node_with_prompts.unified_job_template.ask_job_type_on_launch = False + assert 'ignored' in job_node_with_prompts.get_prompts_warnings() + assert 'job_type' in job_node_with_prompts.get_prompts_warnings()['ignored'] + assert 'credential' in job_node_with_prompts.get_prompts_warnings()['ignored'] + assert len(job_node_with_prompts.get_prompts_warnings()['ignored']) == 2 - def test_warn_scan_errors_node_prompts(self, node_with_prompts): - node_with_prompts.unified_job_template.job_type = 'scan' - node_with_prompts.job_type = 'run' - node_with_prompts.inventory = Inventory(name='different-inventory', pk=23) - assert 'ignored' in node_with_prompts.get_prompts_warnings() - assert 'job_type' in node_with_prompts.get_prompts_warnings()['ignored'] - assert 'inventory' in node_with_prompts.get_prompts_warnings()['ignored'] - assert len(node_with_prompts.get_prompts_warnings()['ignored']) == 2 + def test_warn_scan_errors_node_prompts(self, job_node_with_prompts): + job_node_with_prompts.unified_job_template.job_type = 'scan' + job_node_with_prompts.job_type = 'run' + job_node_with_prompts.inventory = Inventory(name='different-inventory', pk=23) + assert 'ignored' in job_node_with_prompts.get_prompts_warnings() + assert 'job_type' in job_node_with_prompts.get_prompts_warnings()['ignored'] + assert 'inventory' in job_node_with_prompts.get_prompts_warnings()['ignored'] + assert len(job_node_with_prompts.get_prompts_warnings()['ignored']) == 2 - def test_warn_missing_fields(self, node_no_prompts): - node_no_prompts.inventory = None - assert 'missing' in node_no_prompts.get_prompts_warnings() - assert 'inventory' in node_no_prompts.get_prompts_warnings()['missing'] - assert 'credential' in node_no_prompts.get_prompts_warnings()['missing'] - assert len(node_no_prompts.get_prompts_warnings()['missing']) == 2 + def test_warn_missing_fields(self, job_node_no_prompts): + job_node_no_prompts.inventory = None + assert 'missing' in job_node_no_prompts.get_prompts_warnings() + assert 'inventory' in job_node_no_prompts.get_prompts_warnings()['missing'] + assert 'credential' in job_node_no_prompts.get_prompts_warnings()['missing'] + assert len(job_node_no_prompts.get_prompts_warnings()['missing']) == 2 From 764ed8d576f8862fa970c6e38975dcd05e8bdecd Mon Sep 17 00:00:00 2001 From: Aaron Tan Date: Mon, 3 Oct 2016 15:37:33 -0400 Subject: [PATCH 20/24] Change JT can_change behavior. --- awx/main/access.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/main/access.py b/awx/main/access.py index a0dd395bda..a47e1a6445 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1066,7 +1066,7 @@ class JobTemplateAccess(BaseAccess): required_obj = getattr(obj, required_field, None) if required_field not in data_for_change and required_obj is not None: data_for_change[required_field] = required_obj.pk - return self.can_read(obj) and self.can_add(data_for_change) + return self.can_read(obj) and (self.can_add(data_for_change) if data is not None else True) def changes_are_non_sensitive(self, obj, data): ''' From 2be5d2f23f372c18411d1c91606820d914e0c473 Mon Sep 17 00:00:00 2001 From: Aaron Tan Date: Mon, 3 Oct 2016 17:32:43 -0400 Subject: [PATCH 21/24] Unit test added. --- awx/main/tests/functional/test_rbac_job_templates.py | 7 +++++++ awx/main/tests/functional/test_rbac_label.py | 1 - 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/awx/main/tests/functional/test_rbac_job_templates.py b/awx/main/tests/functional/test_rbac_job_templates.py index c8cc2b8502..8905eb21a0 100644 --- a/awx/main/tests/functional/test_rbac_job_templates.py +++ b/awx/main/tests/functional/test_rbac_job_templates.py @@ -240,3 +240,10 @@ def test_job_template_creator_access(project, rando, post): jt_obj = JobTemplate.objects.get(pk=jt_pk) # Creating a JT should place the creator in the admin role assert rando in jt_obj.admin_role + +@pytest.mark.django_db +def test_associate_label(label, user, job_template): + access = JobTemplateAccess(user('joe', False)) + job_template.admin_role.members.add(user('joe', False)) + label.organization.read_role.members.add(user('joe', False)) + assert access.can_attach(job_template, label, 'labels', None) diff --git a/awx/main/tests/functional/test_rbac_label.py b/awx/main/tests/functional/test_rbac_label.py index e425d50908..98daa5cdec 100644 --- a/awx/main/tests/functional/test_rbac_label.py +++ b/awx/main/tests/functional/test_rbac_label.py @@ -61,4 +61,3 @@ def test_label_access_user(label, user): assert access.can_read(label) assert access.can_add({'organization': label.organization.id}) - From e1ace0ba0a5f27f748cdde42c8c0526d5d44b007 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Mon, 3 Oct 2016 20:32:11 -0400 Subject: [PATCH 22/24] extend license fix to workflow jobs --- awx/main/access.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/main/access.py b/awx/main/access.py index 7ab299d0e9..b0a15b2751 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1527,7 +1527,7 @@ class WorkflowJobAccess(BaseAccess): return self.user in obj.workflow_job_template.admin_role # TODO: add support for relaunching workflow jobs - def can_start(self, obj): + def can_start(self, obj, validate_license=True): return False def can_cancel(self, obj): From bdfed9c87f59de04ed8c97fe00636a1485ce0345 Mon Sep 17 00:00:00 2001 From: Leigh Johnson Date: Tue, 4 Oct 2016 00:47:03 -0400 Subject: [PATCH 23/24] Escape $ in docker-clean target Before: https://gist.github.com/leigh-johnson/98331b280b1679bd4c0e4b02f7c59b00 After: Yay! --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 86685855a8..01219e9c21 100644 --- a/Makefile +++ b/Makefile @@ -771,7 +771,7 @@ MACHINE?=default docker-clean: eval $$(docker-machine env $(MACHINE)) $(foreach container_id,$(shell docker ps -f name=tools_tower -aq),docker stop $(container_id); docker rm -f $(container_id);) - -docker images | grep "tower_devel" | awk '{print $3}' | xargs docker rmi + -docker images | grep "tower_devel" | awk '{print $$3}' | xargs docker rmi docker-refresh: docker-clean docker-compose From 4b57263adffffdebc0cf384e2354332a53fe6e25 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Tue, 4 Oct 2016 12:22:20 -0400 Subject: [PATCH 24/24] Updates to clustering doc * Notes about routing/dns * Mention upgrade testing --- docs/clustering.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/clustering.md b/docs/clustering.md index 942ea9a998..196d7fd6e6 100644 --- a/docs/clustering.md +++ b/docs/clustering.md @@ -149,6 +149,11 @@ When verifying acceptance we should ensure the following statements are true ## Testing Considerations * Basic testing should be able to demonstrate parity with a standalone node for all integration testing. +* Basic playbook testing to verify routing differences, including: + - Basic FQDN + - Short-name name resolution + - ip addresses + - /etc/hosts static routing information * We should test behavior of large and small clusters. I would envision small clusters as 2 - 3 nodes and large clusters as 10 - 15 nodes * Failure testing should involve killing single nodes and killing multiple nodes while the cluster is performing work. @@ -162,6 +167,7 @@ When verifying acceptance we should ensure the following statements are true - Break the link between nodes such that it forms 2 or more groups where groupA and groupB can't communicate but all nodes can communicate with the database. * Crucially when network partitioning is resolved all nodes should recover into a consistent state +* Upgrade Testing, verify behavior before and after are the same for the end user. ## Performance Testing