Merge branch 'release_3.0.0' into devel

* release_3.0.0: (638 commits)
  Disallow deleting job templates when there are jobs running
  M2M fix for project activity stream
  flake8
  Made it so org auditors can see all users if ORG_ADMINS_CAN_SEE_ALL_USERS is true
  made lebel delete find all associated labels by recursively getting the labels
  Fix up organization default exposed by unit test
  Fix flake8 'not in'
  remove unnecessary fixture from org_credential
  fix label dropdown population on edit of JT
  Allow anyone who can read an inventory to see adhoc command run against said inventory
  Added role visibility tests
  Typo
  remove duplicate playbook variable
  specify database on local restore
  do not log postgres restore
  add sudo true when sudoing to a user
  ansible 1.9 support
  filter_visible_roles performance enhancement
  fix order_by parameter not being applied to page 2+ of list results (#2786)
  Needed to bind removeSchedulesRefresh to the scope so that we could clear the listener properly
  ...
This commit is contained in:
Matthew Jones 2016-07-07 12:40:36 -04:00
commit 7337f5227b
438 changed files with 12497 additions and 34238 deletions

View File

@ -18,7 +18,6 @@ include tools/scripts/request_tower_configuration.sh
include tools/scripts/request_tower_configuration.ps1
include tools/scripts/ansible-tower-service
include tools/scripts/tower-python
include tools/munin_monitors/*
include tools/sosreport/*
include COPYING
include Makefile

View File

@ -21,6 +21,10 @@ CELERY_SCHEDULE_FILE ?= /celerybeat-schedule
CLIENT_TEST_DIR ?= build_test
# Python packages to install only from source (not from binary wheels)
# Comma separated list
SRC_ONLY_PKGS ?= cffi
# Determine appropriate shasum command
UNAME_S := $(shell uname -s)
ifeq ($(UNAME_S),Linux)
@ -258,7 +262,9 @@ virtualenv_ansible:
mkdir $(VENV_BASE); \
fi; \
if [ ! -d "$(VENV_BASE)/ansible" ]; then \
virtualenv --system-site-packages $(VENV_BASE)/ansible; \
virtualenv --system-site-packages --setuptools $(VENV_BASE)/ansible && \
$(VENV_BASE)/ansible/bin/pip install -I setuptools==23.0.0 && \
$(VENV_BASE)/ansible/bin/pip install -I pip==8.1.1; \
fi; \
fi
@ -268,29 +274,27 @@ virtualenv_tower:
mkdir $(VENV_BASE); \
fi; \
if [ ! -d "$(VENV_BASE)/tower" ]; then \
virtualenv --system-site-packages $(VENV_BASE)/tower; \
virtualenv --system-site-packages --setuptools $(VENV_BASE)/tower && \
$(VENV_BASE)/tower/bin/pip install -I setuptools==23.0.0 && \
$(VENV_BASE)/tower/bin/pip install -I pip==8.1.1; \
fi; \
fi
requirements_ansible: virtualenv_ansible
if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/ansible/bin/activate; \
$(VENV_BASE)/ansible/bin/pip install -U pip==8.1.1; \
$(VENV_BASE)/ansible/bin/pip install -r requirements/requirements_ansible.txt ;\
$(VENV_BASE)/ansible/bin/pip install --no-binary $(SRC_ONLY_PKGS) -r requirements/requirements_ansible.txt ;\
else \
pip install -U pip==8.1.1; \
pip install -r requirements/requirements_ansible.txt ; \
pip install --no-binary $(SRC_ONLY_PKGS) -r requirements/requirements_ansible.txt ; \
fi
# Install third-party requirements needed for Tower's environment.
requirements_tower: virtualenv_tower
if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/tower/bin/activate; \
$(VENV_BASE)/tower/bin/pip install -U pip==8.1.1; \
$(VENV_BASE)/tower/bin/pip install -r requirements/requirements.txt ;\
$(VENV_BASE)/tower/bin/pip install --no-binary $(SRC_ONLY_PKGS) -r requirements/requirements.txt ;\
else \
pip install -U pip==8.1.1; \
pip install -r requirements/requirements.txt ; \
pip install --no-binary $(SRC_ONLY_PKGS) -r requirements/requirements.txt ; \
fi
requirements_tower_dev:

View File

@ -58,7 +58,7 @@ class TypeFilterBackend(BaseFilterBackend):
else:
queryset = queryset.none()
return queryset
except FieldError, e:
except FieldError as e:
# Return a 400 for invalid field names.
raise ParseError(*e.args)
@ -139,7 +139,7 @@ class FieldLookupBackend(BaseFilterBackend):
elif new_lookup.endswith('__regex') or new_lookup.endswith('__iregex'):
try:
re.compile(value)
except re.error, e:
except re.error as e:
raise ValueError(e.args[0])
else:
value = self.value_to_python_for_field(field, value)
@ -219,11 +219,11 @@ class FieldLookupBackend(BaseFilterBackend):
else:
q = Q(**{k:v})
queryset = queryset.filter(q)
queryset = queryset.filter(*args)
queryset = queryset.filter(*args).distinct()
return queryset
except (FieldError, FieldDoesNotExist, ValueError), e:
except (FieldError, FieldDoesNotExist, ValueError) as e:
raise ParseError(e.args[0])
except ValidationError, e:
except ValidationError as e:
raise ParseError(e.messages)
class OrderByBackend(BaseFilterBackend):
@ -261,6 +261,6 @@ class OrderByBackend(BaseFilterBackend):
new_order_by.append(field)
queryset = queryset.order_by(*new_order_by)
return queryset
except FieldError, e:
except FieldError as e:
# Return a 400 for invalid field names.
raise ParseError(*e.args)

View File

@ -9,6 +9,7 @@ import time
# Django
from django.conf import settings
from django.db import connection
from django.http import QueryDict
from django.shortcuts import get_object_or_404
from django.template.loader import render_to_string
from django.utils.encoding import smart_text
@ -25,7 +26,6 @@ from rest_framework import views
# AWX
from awx.main.models import * # noqa
from awx.main.models import Label
from awx.main.utils import * # noqa
from awx.api.serializers import ResourceAccessListElementSerializer
@ -328,10 +328,11 @@ class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
# Make a copy of the data provided (since it's readonly) in order to
# inject additional data.
if hasattr(request.data, 'dict'):
data = request.data.dict()
if hasattr(request.data, 'copy'):
data = request.data.copy()
else:
data = request.data
data = QueryDict('')
data.update(request.data)
# add the parent key to the post data using the pk from the URL
parent_key = getattr(self, 'parent_key', None)
@ -360,6 +361,13 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
# Base class for a sublist view that allows for creating subobjects and
# attaching/detaching them from the parent.
def get_description_context(self):
d = super(SubListCreateAttachDetachAPIView, self).get_description_context()
d.update({
"has_attach": True,
})
return d
def attach(self, request, *args, **kwargs):
created = False
parent = self.get_parent_object()
@ -416,7 +424,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
sub = get_object_or_400(self.model, pk=sub_id)
if not request.user.can_access(self.parent_model, 'unattach', parent,
sub, self.relationship):
sub, self.relationship, request.data):
raise PermissionDenied()
if parent_key:
@ -441,18 +449,23 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
else:
return self.attach(request, *args, **kwargs)
'''
Models for which you want the last instance to be deleted from the database
when the last disassociate is called should inherit from this class. Further,
the model should implement is_detached()
'''
class DeleteLastUnattachLabelMixin(object):
def unattach(self, request, *args, **kwargs):
(sub_id, res) = super(DeleteLastUnattachLabelMixin, self).unattach_validate(request, *args, **kwargs)
(sub_id, res) = super(DeleteLastUnattachLabelMixin, self).unattach_validate(request)
if res:
return res
res = super(DeleteLastUnattachLabelMixin, self).unattach_by_id(request, sub_id)
label = Label.objects.get(id=sub_id)
obj = self.model.objects.get(id=sub_id)
if label.is_detached():
label.delete()
if obj.is_detached():
obj.delete()
return res

View File

@ -127,7 +127,7 @@ class ModelAccessPermission(permissions.BasePermission):
view.__class__.__name__, obj)
try:
response = self.check_permissions(request, view, obj)
except Exception, e:
except Exception as e:
logger.debug('has_permission raised %r', e, exc_info=True)
raise
else:
@ -195,13 +195,10 @@ class ProjectUpdatePermission(ModelAccessPermission):
'''
Permission check used by ProjectUpdateView to determine who can update projects
'''
def has_permission(self, request, view, obj=None):
if request.user.is_superuser:
return True
def check_get_permissions(self, request, view, obj=None):
project = get_object_or_400(view.model, pk=view.kwargs['pk'])
if project and request.user in project.update_role:
return True
return check_user_access(request.user, view.model, 'read', project)
return False
def check_post_permissions(self, request, view, obj=None):
project = get_object_or_400(view.model, pk=view.kwargs['pk'])
return check_user_access(request.user, view.model, 'start', project)

View File

@ -39,7 +39,6 @@ from awx.main.constants import SCHEDULEABLE_PROVIDERS
from awx.main.models import * # noqa
from awx.main.fields import ImplicitRoleField
from awx.main.utils import get_type_for_model, get_model_for_type, build_url, timestamp_apiformat, camelcase_to_underscore, getattrd
from awx.main.redact import REPLACE_STR
from awx.main.conf import tower_settings
from awx.api.license import feature_enabled
@ -89,7 +88,8 @@ SUMMARIZABLE_FK_FIELDS = {
'current_job': DEFAULT_SUMMARY_FIELDS + ('status', 'failed', 'license_error'),
'inventory_source': ('source', 'last_updated', 'status'),
'source_script': ('name', 'description'),
'role': ('id', 'role_field')
'role': ('id', 'role_field'),
'notification_template': DEFAULT_SUMMARY_FIELDS,
}
@ -218,8 +218,8 @@ class BaseSerializer(serializers.ModelSerializer):
class Meta:
fields = ('id', 'type', 'url', 'related', 'summary_fields', 'created',
'modified', 'name', 'description')
summary_fields = () # FIXME: List of field names from this serializer that should be used when included as part of another's summary_fields.
summarizable_fields = () # FIXME: List of field names on this serializer that should be included in summary_fields.
summary_fields = ()
summarizable_fields = ()
# add the URL and related resources
type = serializers.SerializerMethodField()
@ -317,9 +317,6 @@ class BaseSerializer(serializers.ModelSerializer):
summary_fields['modified_by'][field] = getattr(obj.modified_by, field)
# RBAC summary fields
request = self.context.get('request', None)
if request and isinstance(obj, ResourceMixin) and request.user.is_authenticated():
summary_fields['active_roles'] = obj.get_permissions(request.user)
roles = {}
for field in obj._meta.get_fields():
if type(field) is ImplicitRoleField:
@ -331,7 +328,7 @@ class BaseSerializer(serializers.ModelSerializer):
'description': role.description,
}
if len(roles) > 0:
summary_fields['roles'] = roles
summary_fields['object_roles'] = roles
return summary_fields
def get_created(self, obj):
@ -668,24 +665,20 @@ class UnifiedJobStdoutSerializer(UnifiedJobSerializer):
else:
return super(UnifiedJobStdoutSerializer, self).get_types()
# TODO: Needed?
#def to_representation(self, obj):
# ret = super(UnifiedJobStdoutSerializer, self).to_representation(obj)
# return ret.get('result_stdout', '')
class UserSerializer(BaseSerializer):
password = serializers.CharField(required=False, default='', write_only=True,
help_text='Write-only field used to change the password.')
ldap_dn = serializers.CharField(source='profile.ldap_dn', read_only=True)
external_account = serializers.SerializerMethodField(help_text='Set if the account is managed by an external service')
is_system_auditor = serializers.BooleanField(default=False)
class Meta:
model = User
fields = ('*', '-name', '-description', '-modified',
'-summary_fields', 'username', 'first_name', 'last_name',
'email', 'is_superuser', 'is_system_auditor', 'password', 'ldap_dn')
'email', 'is_superuser', 'is_system_auditor', 'password', 'ldap_dn', 'external_account')
def to_representation(self, obj):
ret = super(UserSerializer, self).to_representation(obj)
@ -719,6 +712,8 @@ class UserSerializer(BaseSerializer):
getattr(settings, 'SOCIAL_AUTH_GITHUB_TEAM_KEY', None) or
getattr(settings, 'SOCIAL_AUTH_SAML_ENABLED_IDPS', None)) and obj.social_auth.all():
new_password = None
if obj.pk and getattr(settings, 'RADIUS_SERVER', '') and not obj.has_usable_password():
new_password = None
if new_password:
obj.set_password(new_password)
obj.save(update_fields=['password'])
@ -726,6 +721,24 @@ class UserSerializer(BaseSerializer):
obj.set_unusable_password()
obj.save(update_fields=['password'])
def get_external_account(self, obj):
account_type = None
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None) and feature_enabled('ldap'):
try:
if obj.pk and obj.profile.ldap_dn and not obj.has_usable_password():
account_type = "ldap"
except AttributeError:
pass
if (getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None) or
getattr(settings, 'SOCIAL_AUTH_GITHUB_KEY', None) or
getattr(settings, 'SOCIAL_AUTH_GITHUB_ORG_KEY', None) or
getattr(settings, 'SOCIAL_AUTH_GITHUB_TEAM_KEY', None) or
getattr(settings, 'SOCIAL_AUTH_SAML_ENABLED_IDPS', None)) and obj.social_auth.all():
account_type = "social"
if obj.pk and getattr(settings, 'RADIUS_SERVER', '') and not obj.has_usable_password():
account_type = "radius"
return account_type
def create(self, validated_data):
new_password = validated_data.pop('password', None)
obj = super(UserSerializer, self).create(validated_data)
@ -804,7 +817,7 @@ class OrganizationSerializer(BaseSerializer):
notification_templates_any = reverse('api:organization_notification_templates_any_list', args=(obj.pk,)),
notification_templates_success = reverse('api:organization_notification_templates_success_list', args=(obj.pk,)),
notification_templates_error = reverse('api:organization_notification_templates_error_list', args=(obj.pk,)),
roles = reverse('api:organization_roles_list', args=(obj.pk,)),
object_roles = reverse('api:organization_object_roles_list', args=(obj.pk,)),
access_list = reverse('api:organization_access_list', args=(obj.pk,)),
))
return res
@ -890,7 +903,7 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
notification_templates_success = reverse('api:project_notification_templates_success_list', args=(obj.pk,)),
notification_templates_error = reverse('api:project_notification_templates_error_list', args=(obj.pk,)),
access_list = reverse('api:project_access_list', args=(obj.pk,)),
roles = reverse('api:project_roles_list', args=(obj.pk,)),
object_roles = reverse('api:project_object_roles_list', args=(obj.pk,)),
))
if obj.organization:
res['organization'] = reverse('api:organization_detail',
@ -994,7 +1007,7 @@ class InventorySerializer(BaseSerializerWithVariables):
scan_job_templates = reverse('api:inventory_scan_job_template_list', args=(obj.pk,)),
ad_hoc_commands = reverse('api:inventory_ad_hoc_commands_list', args=(obj.pk,)),
access_list = reverse('api:inventory_access_list', args=(obj.pk,)),
roles = reverse('api:inventory_roles_list', args=(obj.pk,)),
object_roles = reverse('api:inventory_object_roles_list', args=(obj.pk,)),
#single_fact = reverse('api:inventory_single_fact_view', args=(obj.pk,)),
))
if obj.organization:
@ -1165,8 +1178,6 @@ class GroupSerializer(BaseSerializerWithVariables):
activity_stream = reverse('api:group_activity_stream_list', args=(obj.pk,)),
inventory_sources = reverse('api:group_inventory_sources_list', args=(obj.pk,)),
ad_hoc_commands = reverse('api:group_ad_hoc_commands_list', args=(obj.pk,)),
access_list = reverse('api:group_access_list', args=(obj.pk,)),
roles = reverse('api:group_roles_list', args=(obj.pk,)),
#single_fact = reverse('api:group_single_fact_view', args=(obj.pk,)),
))
if obj.inventory:
@ -1189,7 +1200,7 @@ class GroupSerializer(BaseSerializerWithVariables):
class GroupTreeSerializer(GroupSerializer):
children = serializers.SerializerMethodField('get_children')
children = serializers.SerializerMethodField()
class Meta:
model = Group
@ -1258,14 +1269,14 @@ class CustomInventoryScriptSerializer(BaseSerializer):
if obj is None:
return ret
request = self.context.get('request', None)
if request is not None and request.user is not None and not request.user.is_superuser:
if request.user not in obj.admin_role:
ret['script'] = None
return ret
def get_related(self, obj):
res = super(CustomInventoryScriptSerializer, self).get_related(obj)
res.update(dict(
roles = reverse('api:inventory_script_roles_list', args=(obj.pk,)),
object_roles = reverse('api:inventory_script_object_roles_list', args=(obj.pk,)),
))
if obj.organization:
@ -1290,7 +1301,6 @@ class InventorySourceOptionsSerializer(BaseSerializer):
def validate_source_vars(self, value):
# source_env must be blank, a valid JSON or YAML dict, or ...
# FIXME: support key=value pairs.
try:
json.loads((value or '').strip() or '{}')
return value
@ -1316,9 +1326,9 @@ class InventorySourceOptionsSerializer(BaseSerializer):
try:
if source_script.organization != self.instance.inventory.organization:
errors['source_script'] = "The 'source_script' does not belong to the same organization as the inventory."
except Exception:
# TODO: Log
except Exception as exc:
errors['source_script'] = "'source_script' doesn't exist."
logger.error(str(exc))
if errors:
raise serializers.ValidationError(errors)
@ -1434,6 +1444,7 @@ class TeamSerializer(BaseSerializer):
users = reverse('api:team_users_list', args=(obj.pk,)),
credentials = reverse('api:team_credentials_list', args=(obj.pk,)),
roles = reverse('api:team_roles_list', args=(obj.pk,)),
object_roles = reverse('api:team_object_roles_list', args=(obj.pk,)),
activity_stream = reverse('api:team_activity_stream_list', args=(obj.pk,)),
access_list = reverse('api:team_access_list', args=(obj.pk,)),
))
@ -1509,7 +1520,6 @@ class ResourceAccessListElementSerializer(UserSerializer):
if 'summary_fields' not in ret:
ret['summary_fields'] = {}
ret['summary_fields']['active_roles'] = get_roles_on_resource(obj, user)
def format_role_perm(role):
role_dict = { 'id': role.id, 'name': role.name, 'description': role.description}
@ -1588,17 +1598,13 @@ class ResourceAccessListElementSerializer(UserSerializer):
return ret
class CredentialSerializer(BaseSerializer):
# FIXME: may want to make some fields filtered based on user accessing
class Meta:
model = Credential
fields = ('*', 'kind', 'cloud', 'host', 'username',
'password', 'security_token', 'project', 'domain',
'ssh_key_data', 'ssh_key_unlock',
'ssh_key_data', 'ssh_key_unlock', 'organization',
'become_method', 'become_username', 'become_password',
'vault_password', 'subscription', 'tenant', 'secret', 'client',
'authorize', 'authorize_password')
@ -1613,17 +1619,23 @@ class CredentialSerializer(BaseSerializer):
def get_related(self, obj):
res = super(CredentialSerializer, self).get_related(obj)
if obj.organization:
res['organization'] = reverse('api:organization_detail', args=(obj.organization.pk,))
res.update(dict(
activity_stream = reverse('api:credential_activity_stream_list', args=(obj.pk,)),
access_list = reverse('api:credential_access_list', args=(obj.pk,)),
roles = reverse('api:credential_roles_list', args=(obj.pk,)),
object_roles = reverse('api:credential_object_roles_list', args=(obj.pk,)),
owner_users = reverse('api:credential_owner_users_list', args=(obj.pk,)),
owner_teams = reverse('api:credential_owner_teams_list', args=(obj.pk,)),
))
parents = obj.owner_role.parents.exclude(object_id__isnull=True)
parents = obj.admin_role.parents.exclude(object_id__isnull=True)
if parents.count() > 0:
res.update({parents[0].content_type.name:parents[0].content_object.get_absolute_url()})
elif obj.owner_role.members.count() > 0:
user = obj.owner_role.members.first()
elif obj.admin_role.members.count() > 0:
user = obj.admin_role.members.first()
res.update({'user': reverse('api:user_detail', args=(user.pk,))})
return res
@ -1632,17 +1644,19 @@ class CredentialSerializer(BaseSerializer):
summary_dict = super(CredentialSerializer, self).get_summary_fields(obj)
summary_dict['owners'] = []
for user in obj.owner_role.members.all():
for user in obj.admin_role.members.all():
summary_dict['owners'].append({
'id': user.pk,
'type': 'user',
'name': user.username,
'description': ' '.join([user.first_name, user.last_name]),
'url': reverse('api:user_detail', args=(user.pk,)),
})
for parent in obj.owner_role.parents.exclude(object_id__isnull=True).all():
for parent in obj.admin_role.parents.exclude(object_id__isnull=True).all():
summary_dict['owners'].append({
'id': parent.content_object.pk,
'type': camelcase_to_underscore(parent.content_object.__class__.__name__),
'name': parent.content_object.name,
'description': parent.content_object.description,
'url': parent.content_object.get_absolute_url(),
@ -1671,13 +1685,53 @@ class CredentialSerializerCreate(CredentialSerializer):
class Meta:
model = Credential
fields = ('*', 'user', 'team', 'organization')
fields = ('*', 'user', 'team')
def validate(self, attrs):
owner_fields = set()
for field in ('user', 'team', 'organization'):
if field in attrs:
if attrs[field]:
owner_fields.add(field)
else:
attrs.pop(field)
if not owner_fields:
raise serializers.ValidationError({"detail": "Missing 'user', 'team', or 'organization'."})
elif len(owner_fields) > 1:
raise serializers.ValidationError({"detail": "Expecting exactly one of 'user', 'team', or 'organization'."})
return super(CredentialSerializerCreate, self).validate(attrs)
def create(self, validated_data):
# Remove the user, team, and organization processed in view
for field in ['user', 'team', 'organization']:
validated_data.pop(field, None)
return super(CredentialSerializer, self).create(validated_data)
user = validated_data.pop('user', None)
team = validated_data.pop('team', None)
credential = super(CredentialSerializerCreate, self).create(validated_data)
if user:
credential.admin_role.members.add(user)
if team:
credential.admin_role.parents.add(team.member_role)
return credential
class UserCredentialSerializerCreate(CredentialSerializerCreate):
class Meta:
model = Credential
fields = ('*', '-team', '-organization')
class TeamCredentialSerializerCreate(CredentialSerializerCreate):
class Meta:
model = Credential
fields = ('*', '-user', '-organization')
class OrganizationCredentialSerializerCreate(CredentialSerializerCreate):
class Meta:
model = Credential
fields = ('*', '-user', '-team')
class JobOptionsSerializer(BaseSerializer):
@ -1754,7 +1808,7 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer):
model = JobTemplate
fields = ('*', 'host_config_key', 'ask_variables_on_launch', 'ask_limit_on_launch',
'ask_tags_on_launch', 'ask_job_type_on_launch', 'ask_inventory_on_launch',
'ask_credential_on_launch', 'survey_enabled', 'become_enabled')
'ask_credential_on_launch', 'survey_enabled', 'become_enabled', 'allow_simultaneous')
def get_related(self, obj):
res = super(JobTemplateSerializer, self).get_related(obj)
@ -1766,10 +1820,10 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer):
notification_templates_any = reverse('api:job_template_notification_templates_any_list', args=(obj.pk,)),
notification_templates_success = reverse('api:job_template_notification_templates_success_list', args=(obj.pk,)),
notification_templates_error = reverse('api:job_template_notification_templates_error_list', args=(obj.pk,)),
access_list = reverse('api:job_template_access_list', args=(obj.pk,)),
access_list = reverse('api:job_template_access_list', args=(obj.pk,)),
survey_spec = reverse('api:job_template_survey_spec', args=(obj.pk,)),
labels = reverse('api:job_template_label_list', args=(obj.pk,)),
roles = reverse('api:job_template_roles_list', args=(obj.pk,)),
object_roles = reverse('api:job_template_object_roles_list', args=(obj.pk,)),
))
if obj.host_config_key:
res['callback'] = reverse('api:job_template_callback', args=(obj.pk,))
@ -1783,25 +1837,35 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer):
if obj.survey_spec is not None and ('name' in obj.survey_spec and 'description' in obj.survey_spec):
d['survey'] = dict(title=obj.survey_spec['name'], description=obj.survey_spec['description'])
request = self.context.get('request', None)
if request is not None and request.user is not None and obj.inventory is not None and obj.project is not None:
d['can_copy'] = request.user.can_access(JobTemplate, 'add',
{'inventory': obj.inventory.pk,
'project': obj.project.pk})
d['can_edit'] = request.user.can_access(JobTemplate, 'change', obj,
{'inventory': obj.inventory.pk,
'project': obj.project.pk})
elif request is not None and request.user is not None and request.user.is_superuser:
d['can_copy'] = True
d['can_edit'] = True
else:
# Check for conditions that would create a validation error if coppied
validation_errors, resources_needed_to_start = obj.resource_validation_data()
if request is None or request.user is None:
d['can_copy'] = False
d['can_edit'] = False
elif request.user.is_superuser:
d['can_copy'] = not validation_errors
d['can_edit'] = True
else:
d['can_copy'] = (not validation_errors) and request.user.can_access(JobTemplate, 'add', {"reference_obj": obj})
d['can_edit'] = request.user.can_access(JobTemplate, 'change', obj, {})
d['recent_jobs'] = self._recent_jobs(obj)
return d
def validate(self, attrs):
survey_enabled = attrs.get('survey_enabled', self.instance and self.instance.survey_enabled or False)
job_type = attrs.get('job_type', self.instance and self.instance.job_type or None)
inventory = attrs.get('inventory', self.instance and self.instance.inventory or None)
project = attrs.get('project', self.instance and self.instance.project or None)
if job_type == "scan":
if inventory is None or attrs.get('ask_inventory_on_launch', False):
raise serializers.ValidationError({'inventory': 'Scan jobs must be assigned a fixed inventory.'})
elif project is None:
raise serializers.ValidationError({'project': "Job types 'run' and 'check' must have assigned a project."})
if survey_enabled and job_type == PERM_INVENTORY_SCAN:
raise serializers.ValidationError({'survey_enabled': 'Survey Enabled can not be used with scan jobs.'})
@ -1809,7 +1873,6 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer):
def validate_extra_vars(self, value):
# extra_vars must be blank, a valid JSON or YAML dict, or ...
# FIXME: support key=value pairs.
try:
json.loads((value or '').strip() or '{}')
return value
@ -1899,17 +1962,8 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
return ret
if 'job_template' in ret and not obj.job_template:
ret['job_template'] = None
if obj.job_template and obj.job_template.survey_enabled:
if 'extra_vars' in ret:
try:
extra_vars = json.loads(ret['extra_vars'])
for key in obj.job_template.survey_password_variables():
if key in extra_vars:
extra_vars[key] = REPLACE_STR
ret['extra_vars'] = json.dumps(extra_vars)
except ValueError:
pass
if obj.job_template and obj.job_template.survey_enabled and 'extra_vars' in ret:
ret['extra_vars'] = obj.display_extra_vars()
return ret
@ -2259,12 +2313,14 @@ class JobLaunchSerializer(BaseSerializer):
obj = self.context.get('obj')
data = self.context.get('data')
for field in obj.resources_needed_to_start:
if not (attrs.get(field, False) and obj._ask_for_vars_dict().get(field, False)):
errors[field] = "Job Template '%s' is missing or undefined." % field
if (not obj.ask_credential_on_launch) or (not attrs.get('credential', None)):
credential = obj.credential
else:
credential = attrs.get('credential', None)
if not credential:
errors['credential'] = 'Credential not provided'
# fill passwords dict with request data passwords
if credential and credential.passwords_needed:
@ -2295,11 +2351,6 @@ class JobLaunchSerializer(BaseSerializer):
if validation_errors:
errors['variables_needed_to_start'] = validation_errors
if obj.job_type != PERM_INVENTORY_SCAN and (obj.project is None):
errors['project'] = 'Job Template Project is missing or undefined.'
if (obj.inventory is None) and not attrs.get('inventory', None):
errors['inventory'] = 'Job Template Inventory is missing or undefined.'
# Special prohibited cases for scan jobs
if 'job_type' in data and obj.ask_job_type_on_launch:
if ((obj.job_type == PERM_INVENTORY_SCAN and not data['job_type'] == PERM_INVENTORY_SCAN) or
@ -2369,12 +2420,29 @@ class NotificationTemplateSerializer(BaseSerializer):
return d
def validate(self, attrs):
notification_class = NotificationTemplate.CLASS_FOR_NOTIFICATION_TYPE[attrs['notification_type']]
from awx.api.views import NotificationTemplateDetail
notification_type = None
if 'notification_type' in attrs:
notification_type = attrs['notification_type']
elif self.instance:
notification_type = self.instance.notification_type
if 'organization' in attrs:
organization = attrs['organization']
elif self.instance:
organization = self.instance.organization
if not notification_type:
raise serializers.ValidationError('Missing required fields for Notification Configuration: notification_type')
if not organization:
raise serializers.ValidationError("Missing 'organization' from required fields")
notification_class = NotificationTemplate.CLASS_FOR_NOTIFICATION_TYPE[notification_type]
missing_fields = []
incorrect_type_fields = []
error_list = []
if 'notification_configuration' not in attrs:
return attrs
if self.context['view'].kwargs:
if self.context['view'].kwargs and isinstance(self.context['view'], NotificationTemplateDetail):
object_actual = self.context['view'].get_object()
else:
object_actual = None
@ -2388,9 +2456,11 @@ class NotificationTemplateSerializer(BaseSerializer):
if not type(field_val) in expected_types:
incorrect_type_fields.append((field, field_type))
continue
if field_type == "list" and len(field_val) < 1:
error_list.append("No values specified for field '{}'".format(field))
continue
if field_type == "password" and field_val == "$encrypted$" and object_actual is not None:
attrs['notification_configuration'][field] = object_actual.notification_configuration[field]
error_list = []
if missing_fields:
error_list.append("Missing required fields for Notification Configuration: {}.".format(missing_fields))
if incorrect_type_fields:
@ -2496,7 +2566,6 @@ class ScheduleSerializer(BaseSerializer):
try:
rrule.rrulestr(rrule_value)
except Exception:
# TODO: Log
raise serializers.ValidationError("rrule parsing failed validation.")
return value
@ -2531,7 +2600,6 @@ class ActivityStreamSerializer(BaseSerializer):
try:
return json.loads(obj.changes)
except Exception:
# TODO: Log
logger.warn("Error deserializing activity stream json changes")
return {}
@ -2549,8 +2617,8 @@ class ActivityStreamSerializer(BaseSerializer):
for fk, _ in SUMMARIZABLE_FK_FIELDS.items():
if not hasattr(obj, fk):
continue
allm2m = getattr(obj, fk).all()
if allm2m.count() > 0:
allm2m = getattr(obj, fk).distinct()
if getattr(obj, fk).exists():
rel[fk] = []
for thisItem in allm2m:
rel[fk].append(reverse('api:' + fk + '_detail', args=(thisItem.id,)))
@ -2564,8 +2632,8 @@ class ActivityStreamSerializer(BaseSerializer):
try:
if not hasattr(obj, fk):
continue
allm2m = getattr(obj, fk).all()
if allm2m.count() > 0:
allm2m = getattr(obj, fk).distinct()
if getattr(obj, fk).exists():
summary_fields[fk] = []
for thisItem in allm2m:
if fk == 'job':
@ -2703,4 +2771,3 @@ class FactSerializer(BaseFactSerializer):
res = super(FactSerializer, self).get_related(obj)
res['host'] = obj.host.get_absolute_url()
return res

View File

@ -1,5 +1,6 @@
{% for fn, fm in serializer_fields.items %}{% spaceless %}
{% if not write_only or not fm.read_only %}
{% if write_only and fm.read_only or not write_only and fm.write_only or write_only and fn == parent_key %}
{% else %}
* `{{ fn }}`: {{ fm.help_text|capfirst }} ({{ fm.type }}{% if write_only and fm.required %}, required{% endif %}{% if write_only and fm.read_only %}, read-only{% endif %}{% if write_only and not fm.choices and not fm.required %}, default=`{% if fm.type == "string" or fm.type == "email" %}"{% firstof fm.default "" %}"{% else %}{% if fm.type == "field" and not fm.default %}None{% else %}{{ fm.default }}{% endif %}{% endif %}`{% endif %}){% if fm.choices %}{% for c in fm.choices %}
- `{% if c.0 == "" %}""{% else %}{{ c.0 }}{% endif %}`{% if c.1 != c.0 %}: {{ c.1 }}{% endif %}{% if write_only and c.0 == fm.default %} (default){% endif %}{% endfor %}{% endif %}{% endif %}
{% endspaceless %}

View File

@ -3,7 +3,7 @@ POST requests to this resource should include the full specification for a Job T
Here is an example survey specification:
{
"name": "Simple Surveny",
"name": "Simple Survey",
"description": "Description of the simple survey",
"spec": [
{
@ -23,6 +23,7 @@ list of survey items.
Within each survey item `type` must be one of:
* text: For survey questions expecting a textual answer
* password: For survey questions expecting a password or other sensitive information
* integer: For survey questions expecting a whole number answer
* float: For survey questions expecting a decimal number
* multiplechoice: For survey questions where one option from a list is required
@ -116,4 +117,4 @@ Here is a more comprehensive example showing the various question types and thei
"default": ""
}
]
}
}

View File

@ -12,7 +12,7 @@ fields to create a new {{ model_verbose_name }} associated with this
{% block post_create %}{% endblock %}
{% if view.attach %}
{% if has_attach|default:False %}
{% if parent_key %}
# Remove {{ parent_model_verbose_name|title }} {{ model_verbose_name_plural|title }}:

View File

@ -25,7 +25,7 @@ organization_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'organization_notification_templates_any_list'),
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'organization_notification_templates_error_list'),
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'organization_notification_templates_success_list'),
url(r'^(?P<pk>[0-9]+)/roles/$', 'organization_roles_list'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'organization_object_roles_list'),
url(r'^(?P<pk>[0-9]+)/access_list/$', 'organization_access_list'),
)
@ -55,7 +55,7 @@ project_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'project_notification_templates_any_list'),
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'project_notification_templates_error_list'),
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'project_notification_templates_success_list'),
url(r'^(?P<pk>[0-9]+)/roles/$', 'project_roles_list'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'project_object_roles_list'),
url(r'^(?P<pk>[0-9]+)/access_list/$', 'project_access_list'),
)
@ -73,6 +73,7 @@ team_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/users/$', 'team_users_list'),
url(r'^(?P<pk>[0-9]+)/credentials/$', 'team_credentials_list'),
url(r'^(?P<pk>[0-9]+)/roles/$', 'team_roles_list'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'team_object_roles_list'),
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'team_activity_stream_list'),
url(r'^(?P<pk>[0-9]+)/access_list/$', 'team_access_list'),
)
@ -92,7 +93,7 @@ inventory_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/scan_job_templates/$', 'inventory_scan_job_template_list'),
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', 'inventory_ad_hoc_commands_list'),
url(r'^(?P<pk>[0-9]+)/access_list/$', 'inventory_access_list'),
url(r'^(?P<pk>[0-9]+)/roles/$', 'inventory_roles_list'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'inventory_object_roles_list'),
#url(r'^(?P<pk>[0-9]+)/single_fact/$', 'inventory_single_fact_view'),
)
@ -126,8 +127,6 @@ group_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'group_activity_stream_list'),
url(r'^(?P<pk>[0-9]+)/inventory_sources/$', 'group_inventory_sources_list'),
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', 'group_ad_hoc_commands_list'),
url(r'^(?P<pk>[0-9]+)/access_list/$', 'group_access_list'),
url(r'^(?P<pk>[0-9]+)/roles/$', 'group_roles_list'),
#url(r'^(?P<pk>[0-9]+)/single_fact/$', 'group_single_fact_view'),
)
@ -155,7 +154,7 @@ inventory_update_urls = patterns('awx.api.views',
inventory_script_urls = patterns('awx.api.views',
url(r'^$', 'inventory_script_list'),
url(r'^(?P<pk>[0-9]+)/$', 'inventory_script_detail'),
url(r'^(?P<pk>[0-9]+)/roles/$', 'inventory_script_roles_list'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'inventory_script_object_roles_list'),
)
credential_urls = patterns('awx.api.views',
@ -163,7 +162,9 @@ credential_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'credential_activity_stream_list'),
url(r'^(?P<pk>[0-9]+)/$', 'credential_detail'),
url(r'^(?P<pk>[0-9]+)/access_list/$', 'credential_access_list'),
url(r'^(?P<pk>[0-9]+)/roles/$', 'credential_roles_list'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'credential_object_roles_list'),
url(r'^(?P<pk>[0-9]+)/owner_users/$', 'credential_owner_users_list'),
url(r'^(?P<pk>[0-9]+)/owner_teams/$', 'credential_owner_teams_list'),
# See also credentials resources on users/teams.
)
@ -189,7 +190,7 @@ job_template_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'job_template_notification_templates_error_list'),
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'job_template_notification_templates_success_list'),
url(r'^(?P<pk>[0-9]+)/access_list/$', 'job_template_access_list'),
url(r'^(?P<pk>[0-9]+)/roles/$', 'job_template_roles_list'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'job_template_object_roles_list'),
url(r'^(?P<pk>[0-9]+)/labels/$', 'job_template_label_list'),
)

View File

@ -277,7 +277,7 @@ class ApiV1ConfigView(APIView):
for fname in (TEMPORARY_TASK_FILE, TASK_FILE):
try:
os.remove(fname)
except OSError, e:
except OSError as e:
if e.errno != errno.ENOENT:
has_error = e.errno
break
@ -820,7 +820,7 @@ class OrganizationAccessList(ResourceAccessList):
resource_model = Organization
new_in_300 = True
class OrganizationRolesList(SubListAPIView):
class OrganizationObjectRolesList(SubListAPIView):
model = Role
serializer_class = RoleSerializer
@ -867,7 +867,7 @@ class TeamRolesList(SubListCreateAttachDetachAPIView):
team = get_object_or_404(Team, pk=self.kwargs['pk'])
if not self.request.user.can_access(Team, 'read', team):
raise PermissionDenied()
return Role.filter_visible_roles(self.request.user, team.member_role.children.all())
return Role.filter_visible_roles(self.request.user, team.member_role.children.all().exclude(pk=team.read_role.pk))
def post(self, request, *args, **kwargs):
# Forbid implicit role creation here
@ -875,8 +875,27 @@ class TeamRolesList(SubListCreateAttachDetachAPIView):
if not sub_id:
data = dict(msg="Role 'id' field is missing.")
return Response(data, status=status.HTTP_400_BAD_REQUEST)
role = Role.objects.get(pk=sub_id)
content_type = ContentType.objects.get_for_model(Organization)
if role.content_type == content_type:
data = dict(msg="You cannot assign an Organization role as a child role for a Team.")
return Response(data, status=status.HTTP_400_BAD_REQUEST)
return super(TeamRolesList, self).post(request, *args, **kwargs)
class TeamObjectRolesList(SubListAPIView):
model = Role
serializer_class = RoleSerializer
parent_model = Team
new_in_300 = True
def get_queryset(self):
po = self.get_parent_object()
content_type = ContentType.objects.get_for_model(self.parent_model)
return Role.objects.filter(content_type=content_type, object_id=po.pk)
class TeamProjectsList(SubListAPIView):
model = Project
@ -886,10 +905,13 @@ class TeamProjectsList(SubListAPIView):
def get_queryset(self):
team = self.get_parent_object()
self.check_parent_access(team)
team_qs = Project.objects.filter(Q(use_role__parents=team.member_role) | Q(admin_role__parents=team.member_role)).distinct()
user_qs = Project.accessible_objects(self.request.user, 'read_role').distinct()
return team_qs & user_qs
model_ct = ContentType.objects.get_for_model(self.model)
parent_ct = ContentType.objects.get_for_model(self.parent_model)
proj_roles = Role.objects.filter(
Q(ancestors__content_type=parent_ct) & Q(ancestors__object_id=team.pk),
content_type=model_ct
)
return self.model.accessible_objects(self.request.user, 'read_role').filter(pk__in=[t.content_object.pk for t in proj_roles])
class TeamActivityStreamList(SubListAPIView):
@ -967,12 +989,19 @@ class ProjectPlaybooks(RetrieveAPIView):
model = Project
serializer_class = ProjectPlaybooksSerializer
class ProjectTeamsList(SubListCreateAttachDetachAPIView):
class ProjectTeamsList(ListAPIView):
model = Team
serializer_class = TeamSerializer
parent_model = Project
relationship = 'teams'
def get_queryset(self):
p = get_object_or_404(Project, pk=self.kwargs['pk'])
if not self.request.user.can_access(Project, 'read', p):
raise PermissionDenied()
project_ct = ContentType.objects.get_for_model(Project)
team_ct = ContentType.objects.get_for_model(self.model)
all_roles = Role.objects.filter(Q(descendents__content_type=project_ct) & Q(descendents__object_id=p.pk), content_type=team_ct)
return self.model.accessible_objects(self.request.user, 'read_role').filter(pk__in=[t.content_object.pk for t in all_roles])
class ProjectSchedulesList(SubListCreateAttachDetachAPIView):
@ -1011,7 +1040,7 @@ class ProjectActivityStreamList(SubListAPIView):
return qs
elif parent.credential is None:
return qs.filter(project=parent)
return qs.filter(Q(project=parent) | Q(credential__in=parent.credential))
return qs.filter(Q(project=parent) | Q(credential=parent.credential))
class ProjectNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
@ -1097,7 +1126,7 @@ class ProjectAccessList(ResourceAccessList):
resource_model = Project
new_in_300 = True
class ProjectRolesList(SubListAPIView):
class ProjectObjectRolesList(SubListAPIView):
model = Role
serializer_class = RoleSerializer
@ -1163,6 +1192,7 @@ class UserRolesList(SubListCreateAttachDetachAPIView):
if not self.request.user.can_access(User, 'read', u):
raise PermissionDenied()
content_type = ContentType.objects.get_for_model(User)
return Role.filter_visible_roles(self.request.user, u.roles.all()) \
.exclude(content_type=content_type, object_id=u.id)
@ -1182,8 +1212,6 @@ class UserRolesList(SubListCreateAttachDetachAPIView):
# We hide roles that shouldn't be seen in our queryset
return True
class UserProjectsList(SubListAPIView):
model = Project
@ -1293,99 +1321,82 @@ class UserAccessList(ResourceAccessList):
resource_model = User
new_in_300 = True
class CredentialList(ListCreateAPIView):
model = Credential
serializer_class = CredentialSerializerCreate
def post(self, request, *args, **kwargs):
# Check the validity of POST data, including special fields
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
class CredentialOwnerUsersList(SubListAPIView):
for field in [x for x in ['user', 'team', 'organization'] if x in request.data and request.data[x] in ('', None)]:
request.data.pop(field)
kwargs.pop(field, None)
model = User
serializer_class = UserSerializer
parent_model = Credential
relationship = 'admin_role.members'
new_in_300 = True
if not any([x in request.data for x in ['user', 'team', 'organization']]):
return Response({"detail": "Missing 'user', 'team', or 'organization'."}, status=status.HTTP_400_BAD_REQUEST)
if sum([1 if x in request.data else 0 for x in ['user', 'team', 'organization']]) != 1:
return Response({"detail": "Expecting exactly one of 'user', 'team', or 'organization'."}, status=status.HTTP_400_BAD_REQUEST)
class CredentialOwnerTeamsList(SubListAPIView):
if 'user' in request.data:
user = User.objects.get(pk=request.data['user'])
can_add_params = {'user': user.id}
if 'team' in request.data:
team = Team.objects.get(pk=request.data['team'])
can_add_params = {'team': team.id}
if 'organization' in request.data:
organization = Organization.objects.get(pk=request.data['organization'])
can_add_params = {'organization': organization.id}
if not self.request.user.can_access(Credential, 'add', can_add_params):
raise PermissionDenied()
ret = super(CredentialList, self).post(request, *args, **kwargs)
credential = Credential.objects.get(id=ret.data['id'])
if 'user' in request.data:
credential.owner_role.members.add(user)
if 'team' in request.data:
credential.owner_role.parents.add(team.member_role)
if 'organization' in request.data:
credential.owner_role.parents.add(organization.admin_role)
return ret
class UserCredentialsList(CredentialList):
model = Credential
serializer_class = CredentialSerializer
model = Team
serializer_class = TeamSerializer
parent_model = Credential
new_in_300 = True
def get_queryset(self):
user = get_object_or_404(User,pk=self.kwargs['pk'])
if not self.request.user.can_access(User, 'read', user):
credential = get_object_or_404(self.parent_model, pk=self.kwargs['pk'])
if not self.request.user.can_access(Credential, 'read', credential):
raise PermissionDenied()
content_type = ContentType.objects.get_for_model(self.model)
teams = [c.content_object.pk for c in credential.admin_role.parents.filter(content_type=content_type)]
return self.model.objects.filter(pk__in=teams)
class UserCredentialsList(SubListCreateAPIView):
model = Credential
serializer_class = UserCredentialSerializerCreate
parent_model = User
parent_key = 'user'
def get_queryset(self):
user = self.get_parent_object()
self.check_parent_access(user)
visible_creds = Credential.accessible_objects(self.request.user, 'read_role')
user_creds = Credential.accessible_objects(user, 'read_role')
return user_creds & visible_creds
def post(self, request, *args, **kwargs):
request.data['user'] = self.kwargs['pk']
# The following post takes care of ensuring the current user can add a cred to this user
return super(UserCredentialsList, self).post(request, args, kwargs)
class TeamCredentialsList(CredentialList):
class TeamCredentialsList(SubListCreateAPIView):
model = Credential
serializer_class = CredentialSerializer
serializer_class = TeamCredentialSerializerCreate
parent_model = Team
parent_key = 'team'
def get_queryset(self):
team = get_object_or_404(Team, pk=self.kwargs['pk'])
if not self.request.user.can_access(Team, 'read', team):
raise PermissionDenied()
team = self.get_parent_object()
self.check_parent_access(team)
visible_creds = Credential.accessible_objects(self.request.user, 'read_role')
team_creds = Credential.objects.filter(owner_role__parents=team.member_role)
team_creds = Credential.objects.filter(admin_role__parents=team.member_role)
return team_creds & visible_creds
def post(self, request, *args, **kwargs):
request.data['team'] = self.kwargs['pk']
# The following post takes care of ensuring the current user can add a cred to this user
return super(TeamCredentialsList, self).post(request, args, kwargs)
class OrganizationCredentialList(CredentialList):
class OrganizationCredentialList(SubListCreateAPIView):
model = Credential
serializer_class = CredentialSerializer
serializer_class = OrganizationCredentialSerializerCreate
parent_model = Organization
parent_key = 'organization'
def get_queryset(self):
organization = Organization.objects.get(pk=self.kwargs['pk'])
if not self.request.user.can_access(Organization, 'read', organization):
raise PermissionDenied()
organization = self.get_parent_object()
self.check_parent_access(organization)
user_visible = Credential.accessible_objects(self.request.user, 'read_role').all()
org_set = Credential.accessible_objects(organization.admin_role, 'read_role').all()
@ -1395,13 +1406,6 @@ class OrganizationCredentialList(CredentialList):
return org_set & user_visible
def post(self, request, *args, **kwargs):
organization = Organization.objects.get(pk=self.kwargs['pk'])
request.data['organization'] = organization.id
# The following post takes care of ensuring the current user can add a cred to this user
return super(OrganizationCredentialList, self).post(request, args, kwargs)
class CredentialDetail(RetrieveUpdateDestroyAPIView):
@ -1432,7 +1436,7 @@ class CredentialAccessList(ResourceAccessList):
resource_model = Credential
new_in_300 = True
class CredentialRolesList(SubListAPIView):
class CredentialObjectRolesList(SubListAPIView):
model = Role
serializer_class = RoleSerializer
@ -1464,7 +1468,7 @@ class InventoryScriptDetail(RetrieveUpdateDestroyAPIView):
inv_src.save()
return super(InventoryScriptDetail, self).destroy(request, *args, **kwargs)
class InventoryScriptRolesList(SubListAPIView):
class InventoryScriptObjectRolesList(SubListAPIView):
model = Role
serializer_class = RoleSerializer
@ -1526,7 +1530,7 @@ class InventoryAccessList(ResourceAccessList):
resource_model = Inventory
new_in_300 = True
class InventoryRolesList(SubListAPIView):
class InventoryObjectRolesList(SubListAPIView):
model = Role
serializer_class = RoleSerializer
@ -1739,33 +1743,6 @@ class GroupChildrenList(SubListCreateAttachDetachAPIView):
parent.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
def _unattach(self, request, *args, **kwargs): # FIXME: Disabled for now for UI support.
'''
Special case for disassociating a child group from the parent. If the
child group has no more parents, then automatically mark it inactive.
'''
sub_id = request.data.get('id', None)
if not sub_id:
data = dict(msg="'id' is required to disassociate.")
return Response(data, status=status.HTTP_400_BAD_REQUEST)
parent = self.get_parent_object()
# TODO: flake8 warns, pending removal if unneeded
# parent_key = getattr(self, 'parent_key', None)
relationship = getattr(parent, self.relationship)
sub = get_object_or_400(self.model, pk=sub_id)
if not request.user.can_access(self.parent_model, 'unattach', parent,
sub, self.relationship):
raise PermissionDenied()
if sub.parents.exclude(pk=parent.pk).count() == 0:
sub.delete()
else:
relationship.remove(sub)
return Response(status=status.HTTP_204_NO_CONTENT)
class GroupPotentialChildrenList(SubListAPIView):
model = Group
@ -1867,25 +1844,6 @@ class GroupDetail(RetrieveUpdateDestroyAPIView):
obj.delete_recursive()
return Response(status=status.HTTP_204_NO_CONTENT)
class GroupAccessList(ResourceAccessList):
model = User # needs to be User for AccessLists's
resource_model = Group
new_in_300 = True
class GroupRolesList(SubListAPIView):
model = Role
serializer_class = RoleSerializer
parent_model = Group
new_in_300 = True
def get_queryset(self):
po = self.get_parent_object()
content_type = ContentType.objects.get_for_model(self.parent_model)
return Role.objects.filter(content_type=content_type, object_id=po.pk)
class InventoryGroupsList(SubListCreateAttachDetachAPIView):
model = Group
@ -2212,6 +2170,13 @@ class JobTemplateList(ListCreateAPIView):
serializer_class = JobTemplateSerializer
always_allow_superuser = False
def post(self, request, *args, **kwargs):
ret = super(JobTemplateList, self).post(request, *args, **kwargs)
if ret.status_code == 201:
job_template = JobTemplate.objects.get(id=ret.data['id'])
job_template.admin_role.members.add(request.user)
return ret
class JobTemplateDetail(RetrieveUpdateDestroyAPIView):
model = JobTemplate
@ -2223,8 +2188,9 @@ class JobTemplateDetail(RetrieveUpdateDestroyAPIView):
can_delete = request.user.can_access(JobTemplate, 'delete', obj)
if not can_delete:
raise PermissionDenied("Cannot delete job template.")
for pu in obj.jobs.filter(status__in=['new', 'pending', 'waiting', 'running']):
pu.cancel()
if obj.jobs.filter(status__in=['new', 'pending', 'waiting', 'running']).exists():
return Response({"error": "Delete not allowed while there are jobs running"},
status=status.HTTP_405_METHOD_NOT_ALLOWED)
return super(JobTemplateDetail, self).destroy(request, *args, **kwargs)
@ -2274,12 +2240,12 @@ class JobTemplateLaunch(RetrieveAPIView, GenericAPIView):
prompted_fields, ignored_fields = obj._accept_or_ignore_job_kwargs(**request.data)
if 'credential' in prompted_fields and prompted_fields['credential'] != getattrd(obj, 'credential.pk', None):
new_credential = Credential.objects.get(pk=prompted_fields['credential'])
new_credential = get_object_or_400(Credential, pk=get_pk_from_dict(prompted_fields, 'credential'))
if request.user not in new_credential.use_role:
raise PermissionDenied()
if 'inventory' in prompted_fields and prompted_fields['inventory'] != getattrd(obj, 'inventory.pk', None):
new_inventory = Inventory.objects.get(pk=prompted_fields['inventory'])
new_inventory = get_object_or_400(Inventory, pk=get_pk_from_dict(prompted_fields, 'inventory'))
if request.user not in new_inventory.use_role:
raise PermissionDenied()
@ -2318,8 +2284,6 @@ class JobTemplateSurveySpec(GenericAPIView):
def get(self, request, *args, **kwargs):
obj = self.get_object()
# Sanity check: Are surveys available on this license?
# If not, do not allow them to be used.
if not feature_enabled('surveys'):
raise LicenseForbids('Your license does not allow '
'adding surveys.')
@ -2339,7 +2303,6 @@ class JobTemplateSurveySpec(GenericAPIView):
try:
obj.survey_spec = json.dumps(request.data)
except ValueError:
# TODO: Log
return Response(dict(error="Invalid JSON when parsing survey spec."), status=status.HTTP_400_BAD_REQUEST)
if "name" not in obj.survey_spec:
return Response(dict(error="'name' missing from survey spec."), status=status.HTTP_400_BAD_REQUEST)
@ -2419,13 +2382,24 @@ class JobTemplateNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIVi
parent_model = JobTemplate
relationship = 'notification_templates_success'
class JobTemplateLabelList(SubListCreateAttachDetachAPIView, DeleteLastUnattachLabelMixin):
class JobTemplateLabelList(DeleteLastUnattachLabelMixin, SubListCreateAttachDetachAPIView):
model = Label
serializer_class = LabelSerializer
parent_model = JobTemplate
relationship = 'labels'
parent_key = 'job_template'
def post(self, request, *args, **kwargs):
# If a label already exists in the database, attach it instead of erroring out
# that it already exists
if 'id' not in request.data and 'name' in request.data and 'organization' in request.data:
existing = Label.objects.filter(name=request.data['name'], organization_id=request.data['organization'])
if existing.exists():
existing = existing[0]
request.data['id'] = existing.id
del request.data['name']
del request.data['organization']
return super(JobTemplateLabelList, self).post(request, *args, **kwargs)
class JobTemplateCallback(GenericAPIView):
@ -2481,7 +2455,6 @@ class JobTemplateCallback(GenericAPIView):
ansible_ssh_host = host.variables_dict.get('ansible_ssh_host', '')
if ansible_ssh_host in remote_hosts:
matches.add(host)
# FIXME: Not entirely sure if this statement will ever be needed?
if host.name != ansible_ssh_host and host.name in remote_hosts:
matches.add(host)
if len(matches) == 1:
@ -2551,17 +2524,14 @@ class JobTemplateCallback(GenericAPIView):
# Check matching hosts.
if not matching_hosts:
data = dict(msg='No matching host could be found!')
# FIXME: Log!
return Response(data, status=status.HTTP_400_BAD_REQUEST)
elif len(matching_hosts) > 1:
data = dict(msg='Multiple hosts matched the request!')
# FIXME: Log!
return Response(data, status=status.HTTP_400_BAD_REQUEST)
else:
host = list(matching_hosts)[0]
if not job_template.can_start_without_user_input():
data = dict(msg='Cannot start automatically, user input required!')
# FIXME: Log!
return Response(data, status=status.HTTP_400_BAD_REQUEST)
limit = host.name
@ -2603,7 +2573,7 @@ class JobTemplateAccessList(ResourceAccessList):
resource_model = JobTemplate
new_in_300 = True
class JobTemplateRolesList(SubListAPIView):
class JobTemplateObjectRolesList(SubListAPIView):
model = Role
serializer_class = RoleSerializer
@ -3451,7 +3421,7 @@ class UnifiedJobStdout(RetrieveAPIView):
response = HttpResponse(FileWrapper(content_fd), content_type='text/plain')
response["Content-Disposition"] = 'attachment; filename="job_%s.txt"' % str(unified_job.id)
return response
except Exception, e:
except Exception as e:
return Response({"error": "Error generating stdout download file: %s" % str(e)}, status=status.HTTP_400_BAD_REQUEST)
elif request.accepted_renderer.format == 'txt':
return Response(unified_job.result_stdout)
@ -3487,6 +3457,15 @@ class NotificationTemplateDetail(RetrieveUpdateDestroyAPIView):
serializer_class = NotificationTemplateSerializer
new_in_300 = True
def delete(self, request, *args, **kwargs):
obj = self.get_object()
if not request.user.can_access(self.model, 'delete', obj):
return Response(status=status.HTTP_404_NOT_FOUND)
if obj.notifications.filter(status='pending').exists():
return Response({"error": "Delete not allowed while there are pending notifications"},
status=status.HTTP_405_METHOD_NOT_ALLOWED)
return super(NotificationTemplateDetail, self).delete(request, *args, **kwargs)
class NotificationTemplateTest(GenericAPIView):
view_name = 'NotificationTemplate Test'
@ -3681,7 +3660,7 @@ class RoleUsersList(SubListCreateAttachDetachAPIView):
return super(RoleUsersList, self).post(request, *args, **kwargs)
class RoleTeamsList(ListAPIView):
class RoleTeamsList(SubListAPIView):
model = Team
serializer_class = TeamSerializer
@ -3691,27 +3670,37 @@ class RoleTeamsList(ListAPIView):
new_in_300 = True
def get_queryset(self):
# TODO: Check
role = get_object_or_404(Role, pk=self.kwargs['pk'])
role = self.get_parent_object()
self.check_parent_access(role)
return Team.objects.filter(member_role__children=role)
def post(self, request, pk, *args, **kwargs):
# Forbid implicit role creation here
# Forbid implicit team creation here
sub_id = request.data.get('id', None)
if not sub_id:
data = dict(msg="Role 'id' field is missing.")
data = dict(msg="Team 'id' field is missing.")
return Response(data, status=status.HTTP_400_BAD_REQUEST)
# XXX: Need to pull in can_attach and can_unattach kinda code from SubListCreateAttachDetachAPIView
role = Role.objects.get(pk=self.kwargs['pk'])
content_type = ContentType.objects.get_for_model(Organization)
if role.content_type == content_type:
data = dict(msg="You cannot assign an Organization role as a child role for a Team.")
return Response(data, status=status.HTTP_400_BAD_REQUEST)
team = Team.objects.get(pk=sub_id)
action = 'attach'
if request.data.get('disassociate', None):
action = 'unattach'
if not request.user.can_access(self.parent_model, action, role, team,
self.relationship, request.data,
skip_sub_obj_read_check=False):
raise PermissionDenied()
if request.data.get('disassociate', None):
team.member_role.children.remove(role)
else:
team.member_role.children.add(role)
return Response(status=status.HTTP_204_NO_CONTENT)
# XXX attach/detach needs to ensure we have the appropriate perms
class RoleParentsList(SubListAPIView):
@ -3723,10 +3712,9 @@ class RoleParentsList(SubListAPIView):
new_in_300 = True
def get_queryset(self):
# XXX: This should be the intersection between the roles of the user
# and the roles that the requesting user has access to see
role = Role.objects.get(pk=self.kwargs['pk'])
return role.parents.all()
return Role.filter_visible_roles(self.request.user, role.parents.all())
class RoleChildrenList(SubListAPIView):
@ -3738,11 +3726,8 @@ class RoleChildrenList(SubListAPIView):
new_in_300 = True
def get_queryset(self):
# XXX: This should be the intersection between the roles of the user
# and the roles that the requesting user has access to see
role = Role.objects.get(pk=self.kwargs['pk'])
return role.children.all()
return Role.filter_visible_roles(self.request.user, role.children.all())

View File

@ -63,11 +63,14 @@ def register_access(model_class, access_class):
@property
def user_admin_role(self):
return Role.objects.get(
role = Role.objects.get(
content_type=ContentType.objects.get_for_model(User),
object_id=self.id,
role_field='admin_role'
)
# Trick the user.admin_role so that the signal filtering for RBAC activity stream works as intended.
role.parents = [org.admin_role.pk for org in self.organizations]
return role
def user_accessible_objects(user, role_name):
return ResourceMixin._accessible_objects(User, user, role_name)
@ -170,8 +173,8 @@ class BaseAccess(object):
return bool(self.can_change(obj, None) and
self.user.can_access(type(sub_obj), 'read', sub_obj))
def can_unattach(self, obj, sub_obj, relationship):
return self.can_change(obj, None)
def can_unattach(self, obj, sub_obj, relationship, data=None):
return self.can_change(obj, data)
def check_license(self, add_host=False, feature=None, check_expiration=True):
reader = TaskSerializer()
@ -221,7 +224,8 @@ class UserAccess(BaseAccess):
if self.user.is_superuser:
return User.objects.all()
if tower_settings.ORG_ADMINS_CAN_SEE_ALL_USERS and self.user.admin_of_organizations.exists():
if tower_settings.ORG_ADMINS_CAN_SEE_ALL_USERS and \
(self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
return User.objects.all()
return (
@ -270,6 +274,19 @@ class UserAccess(BaseAccess):
return True
return False
def can_attach(self, obj, sub_obj, relationship, *args, **kwargs):
"Reverse obj and sub_obj, defer to RoleAccess if this is a role assignment."
if relationship == 'roles':
role_access = RoleAccess(self.user)
return role_access.can_attach(sub_obj, obj, 'members', *args, **kwargs)
return super(UserAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
def can_unattach(self, obj, sub_obj, relationship, *args, **kwargs):
if relationship == 'roles':
role_access = RoleAccess(self.user)
return role_access.can_unattach(sub_obj, obj, 'members', *args, **kwargs)
return super(UserAccess, self).can_unattach(obj, sub_obj, relationship, *args, **kwargs)
class OrganizationAccess(BaseAccess):
'''
@ -341,15 +358,7 @@ class InventoryAccess(BaseAccess):
@check_superuser
def can_change(self, obj, data):
# Verify that the user has access to the new organization if moving an
# inventory to a new organization.
org_pk = get_pk_from_dict(data, 'organization')
if obj and org_pk and obj.organization.pk != org_pk:
org = get_object_or_400(Organization, pk=org_pk)
if self.user not in org.admin_role:
return False
# Otherwise, just check for write permission.
return self.user in obj.update_role
return self.can_admin(obj, data)
@check_superuser
def can_admin(self, obj, data):
@ -379,12 +388,7 @@ class HostAccess(BaseAccess):
def get_queryset(self):
inv_qs = Inventory.accessible_objects(self.user, 'read_role')
group_qs = Group.accessible_objects(self.user, 'read_role').exclude(inventory__in=inv_qs)
if group_qs.count():
qs = self.model.objects.filter(Q(inventory__in=inv_qs) | Q(groups__in=group_qs))
else:
qs = self.model.objects.filter(inventory__in=inv_qs)
qs = self.model.objects.filter(inventory__in=inv_qs)
qs = qs.select_related('created_by', 'modified_by', 'inventory',
'last_job__job_template',
'last_job_host_summary__job')
@ -392,7 +396,7 @@ class HostAccess(BaseAccess):
return qs
def can_read(self, obj):
return obj and any(self.user in grp.read_role for grp in obj.groups.all()) or self.user in obj.inventory.read_role
return obj and self.user in obj.inventory.read_role
def can_add(self, data):
if not data or 'inventory' not in data:
@ -401,7 +405,7 @@ class HostAccess(BaseAccess):
# Checks for admin or change permission on inventory.
inventory_pk = get_pk_from_dict(data, 'inventory')
inventory = get_object_or_400(Inventory, pk=inventory_pk)
if self.user not in inventory.update_role:
if self.user not in inventory.admin_role:
return False
# Check to see if we have enough licenses
@ -415,7 +419,7 @@ class HostAccess(BaseAccess):
raise PermissionDenied('Unable to change inventory on a host.')
# Checks for admin or change permission on inventory, controls whether
# the user can edit variable data.
return obj and self.user in obj.inventory.update_role
return obj and self.user in obj.inventory.admin_role
def can_attach(self, obj, sub_obj, relationship, data,
skip_sub_obj_read_check=False):
@ -439,7 +443,7 @@ class GroupAccess(BaseAccess):
model = Group
def get_queryset(self):
qs = self.model.accessible_objects(self.user, 'read_role')
qs = Group.objects.filter(inventory__in=Inventory.accessible_objects(self.user, 'read_role'))
qs = qs.select_related('created_by', 'modified_by', 'inventory')
return qs.prefetch_related('parents', 'children', 'inventory_source').all()
@ -452,7 +456,7 @@ class GroupAccess(BaseAccess):
# Checks for admin or change permission on inventory.
inventory_pk = get_pk_from_dict(data, 'inventory')
inventory = get_object_or_400(Inventory, pk=inventory_pk)
return self.user in inventory.update_role
return self.user in inventory.admin_role
def can_change(self, obj, data):
# Prevent moving a group to a different inventory.
@ -461,7 +465,7 @@ class GroupAccess(BaseAccess):
raise PermissionDenied('Unable to change inventory on a group.')
# Checks for admin or change permission on inventory, controls whether
# the user can attach subgroups or edit variable data.
return obj and self.user in obj.inventory.update_role
return obj and self.user in obj.inventory.admin_role
def can_attach(self, obj, sub_obj, relationship, data,
skip_sub_obj_read_check=False):
@ -501,9 +505,9 @@ class InventorySourceAccess(BaseAccess):
def can_read(self, obj):
if obj and obj.group:
return self.user in obj.group.read_role
return self.user.can_access(Group, 'read', obj.group)
elif obj and obj.inventory:
return self.user in obj.inventory.read_role
return self.user.can_access(Inventory, 'read', obj.inventory)
else:
return False
@ -514,14 +518,19 @@ class InventorySourceAccess(BaseAccess):
def can_change(self, obj, data):
# Checks for admin or change permission on group.
if obj and obj.group:
return self.user in obj.group.update_role
return self.user.can_access(Group, 'change', obj.group, None)
# Can't change inventory sources attached to only the inventory, since
# these are created automatically from the management command.
else:
return False
def can_start(self, obj):
return self.can_change(obj, {}) and obj.can_update
if obj and obj.group:
return obj.can_update and self.user in obj.group.inventory.update_role
elif obj and obj.inventory:
return obj.can_update and self.user in obj.inventory.update_role
return False
class InventoryUpdateAccess(BaseAccess):
'''
@ -540,7 +549,16 @@ class InventoryUpdateAccess(BaseAccess):
return qs.filter(inventory_source__in=inventory_sources_qs)
def can_cancel(self, obj):
return self.can_change(obj, {}) and obj.can_cancel
if not obj.can_cancel:
return False
if self.user.is_superuser or self.user == obj.created_by:
return True
# Inventory cascade deletes to inventory update, descends from org admin
return self.user in obj.inventory_source.inventory.admin_role
@check_superuser
def can_delete(self, obj):
return self.user in obj.inventory_source.inventory.admin_role
class CredentialAccess(BaseAccess):
'''
@ -588,16 +606,33 @@ class CredentialAccess(BaseAccess):
return check_user_access(self.user, Organization, 'change', organization_obj, None)
return False
@check_superuser
def can_use(self, obj):
return self.user in obj.use_role
@check_superuser
def can_change(self, obj, data):
if not self.can_add(data):
if not obj:
return False
return self.user in obj.owner_role
# Check access to organizations
organization_pk = get_pk_from_dict(data, 'organization')
if data and 'organization' in data and organization_pk != getattr(obj, 'organization_id', None):
if organization_pk:
# admin permission to destination organization is mandatory
new_organization_obj = get_object_or_400(Organization, pk=organization_pk)
if self.user not in new_organization_obj.admin_role:
return False
# admin permission to existing organization is also mandatory
if obj.organization:
if self.user not in obj.organization.admin_role:
return False
if obj.organization:
if self.user in obj.organization.admin_role:
return True
return self.user in obj.admin_role
def can_delete(self, obj):
# Unassociated credentials may be marked deleted by anyone, though we
@ -643,6 +678,24 @@ class TeamAccess(BaseAccess):
def can_delete(self, obj):
return self.can_change(obj, None)
def can_attach(self, obj, sub_obj, relationship, *args, **kwargs):
"""Reverse obj and sub_obj, defer to RoleAccess if this is an assignment
of a resource role to the team."""
if isinstance(sub_obj, Role) and isinstance(sub_obj.content_object, ResourceMixin):
role_access = RoleAccess(self.user)
return role_access.can_attach(sub_obj, obj, 'member_role.parents',
*args, **kwargs)
return super(TeamAccess, self).can_attach(obj, sub_obj, relationship,
*args, **kwargs)
def can_unattach(self, obj, sub_obj, relationship, *args, **kwargs):
if isinstance(sub_obj, Role) and isinstance(sub_obj.content_object, ResourceMixin):
role_access = RoleAccess(self.user)
return role_access.can_unattach(sub_obj, obj, 'member_role.parents',
*args, **kwargs)
return super(TeamAccess, self).can_unattach(obj, sub_obj, relationship,
*args, **kwargs)
class ProjectAccess(BaseAccess):
'''
I can see projects when:
@ -669,8 +722,9 @@ class ProjectAccess(BaseAccess):
@check_superuser
def can_add(self, data):
qs = Organization.accessible_objects(self.user, 'admin_role')
return qs.exists()
organization_pk = get_pk_from_dict(data, 'organization')
org = get_object_or_400(Organization, pk=organization_pk)
return self.user in org.admin_role
@check_superuser
def can_change(self, obj, data):
@ -679,8 +733,9 @@ class ProjectAccess(BaseAccess):
def can_delete(self, obj):
return self.can_change(obj, None)
@check_superuser
def can_start(self, obj):
return self.can_change(obj, {}) and obj.can_update
return obj and self.user in obj.update_role
class ProjectUpdateAccess(BaseAccess):
'''
@ -701,7 +756,12 @@ class ProjectUpdateAccess(BaseAccess):
@check_superuser
def can_cancel(self, obj):
return self.can_change(obj, {}) and obj.can_cancel
if not obj.can_cancel:
return False
if self.user == obj.created_by:
return True
# Project updates cascade delete with project, admin role descends from org admin
return self.user in obj.project.admin_role
@check_superuser
def can_delete(self, obj):
@ -745,6 +805,9 @@ class JobTemplateAccess(BaseAccess):
if not data or '_method' in data: # So the browseable API will work?
return True
# if reference_obj is provided, determine if it can be coppied
reference_obj = data.pop('reference_obj', None)
if 'job_type' in data and data['job_type'] == PERM_INVENTORY_SCAN:
self.check_license(feature='system_tracking')
@ -754,42 +817,57 @@ class JobTemplateAccess(BaseAccess):
if self.user.is_superuser:
return True
# If a credential is provided, the user should have read access to it.
credential_pk = get_pk_from_dict(data, 'credential')
if credential_pk:
credential = get_object_or_400(Credential, pk=credential_pk)
if self.user not in credential.read_role:
def get_value(Class, field):
if reference_obj:
return getattr(reference_obj, field, None)
else:
pk = get_pk_from_dict(data, field)
if pk:
return get_object_or_400(Class, pk=pk)
else:
return None
# If a credential is provided, the user should have use access to it.
credential = get_value(Credential, 'credential')
if credential:
if self.user not in credential.use_role:
return False
# If a cloud credential is provided, the user should have read access.
cloud_credential_pk = get_pk_from_dict(data, 'cloud_credential')
if cloud_credential_pk:
cloud_credential = get_object_or_400(Credential,
pk=cloud_credential_pk)
if self.user not in cloud_credential.read_role:
# If a cloud credential is provided, the user should have use access.
cloud_credential = get_value(Credential, 'cloud_credential')
if cloud_credential:
if self.user not in cloud_credential.use_role:
return False
# Check that the given inventory ID is valid.
inventory_pk = get_pk_from_dict(data, 'inventory')
inventory = Inventory.objects.filter(id=inventory_pk)
if not inventory.exists() and not data.get('ask_inventory_on_launch', False):
return False # Does this make sense? Maybe should check read access
# If a network credential is provided, the user should have use access.
network_credential = get_value(Credential, 'network_credential')
if network_credential:
if self.user not in network_credential.use_role:
return False
project_pk = get_pk_from_dict(data, 'project')
# If an inventory is provided, the user should have use access.
inventory = get_value(Inventory, 'inventory')
if inventory:
if self.user not in inventory.use_role:
return False
project = get_value(Project, 'project')
if 'job_type' in data and data['job_type'] == PERM_INVENTORY_SCAN:
org = inventory[0].organization
accessible = self.user in org.admin_role
if not project_pk and accessible:
if inventory:
org = inventory.organization
accessible = self.user in org.admin_role
else:
accessible = False
if not project and accessible:
return True
elif not accessible:
return False
# If the user has admin access to the project (as an org admin), should
# be able to proceed without additional checks.
project = get_object_or_400(Project, pk=project_pk)
if self.user in project.admin_role:
return True
return self.user in project.admin_role and self.user in inventory.read_role
if project:
return self.user in project.use_role
else:
return False
def can_start(self, obj, validate_license=True):
# Check license.
@ -814,20 +892,80 @@ class JobTemplateAccess(BaseAccess):
def can_change(self, obj, data):
data_for_change = data
if self.user not in obj.admin_role:
if self.user not in obj.admin_role and not self.user.is_superuser:
return False
if data is not None:
data_for_change = dict(data)
for required_field in ('credential', 'cloud_credential', 'inventory', 'project'):
data = dict(data)
if self.changes_are_non_sensitive(obj, data):
if 'job_type' in data and obj.job_type != data['job_type'] and data['job_type'] == PERM_INVENTORY_SCAN:
self.check_license(feature='system_tracking')
if 'survey_enabled' in data and obj.survey_enabled != data['survey_enabled'] and data['survey_enabled']:
self.check_license(feature='surveys')
return True
for required_field in ('credential', 'cloud_credential', 'network_credential', 'inventory', 'project'):
required_obj = getattr(obj, required_field, None)
if required_field not in data_for_change and required_obj is not None:
data_for_change[required_field] = required_obj.pk
return self.can_read(obj) and self.can_add(data_for_change)
def changes_are_non_sensitive(self, obj, data):
'''
Return true if the changes being made are considered nonsensitive, and
thus can be made by a job template administrator which may not have access
to the any inventory, project, or credentials associated with the template.
'''
# We are white listing fields that can
field_whitelist = [
'name', 'description', 'forks', 'limit', 'verbosity', 'extra_vars',
'job_tags', 'force_handlers', 'skip_tags', 'ask_variables_on_launch',
'ask_tags_on_launch', 'ask_job_type_on_launch', 'ask_inventory_on_launch',
'ask_credential_on_launch', 'survey_enabled'
]
for k, v in data.items():
if hasattr(obj, k) and getattr(obj, k) != v:
if k not in field_whitelist and v != getattr(obj, '%s_id' % k, None):
return False
return True
def can_update_sensitive_fields(self, obj, data):
project_id = data.get('project', obj.project.id if obj.project else None)
inventory_id = data.get('inventory', obj.inventory.id if obj.inventory else None)
credential_id = data.get('credential', obj.credential.id if obj.credential else None)
cloud_credential_id = data.get('cloud_credential', obj.cloud_credential.id if obj.cloud_credential else None)
network_credential_id = data.get('network_credential', obj.network_credential.id if obj.network_credential else None)
if project_id and self.user not in Project.objects.get(pk=project_id).use_role:
return False
if inventory_id and self.user not in Inventory.objects.get(pk=inventory_id).use_role:
return False
if credential_id and self.user not in Credential.objects.get(pk=credential_id).use_role:
return False
if cloud_credential_id and self.user not in Credential.objects.get(pk=cloud_credential_id).use_role:
return False
if network_credential_id and self.user not in Credential.objects.get(pk=network_credential_id).use_role:
return False
return True
@check_superuser
def can_delete(self, obj):
return self.user in obj.admin_role
class JobAccess(BaseAccess):
'''
I can see jobs when:
- I am a superuser.
- I can see its job template
- I am an admin or auditor of the organization which contains its inventory
- I am an admin or auditor of the organization which contains its project
I can delete jobs when:
- I am an admin of the organization which contains its inventory
- I am an admin of the organization which contains its project
'''
model = Job
@ -839,10 +977,20 @@ class JobAccess(BaseAccess):
if self.user.is_superuser:
return qs.all()
return qs.filter(
qs_jt = qs.filter(
job_template__in=JobTemplate.accessible_objects(self.user, 'read_role')
)
org_access_qs = Organization.objects.filter(
Q(admin_role__members=self.user) | Q(auditor_role__members=self.user))
if not org_access_qs.exists():
return qs_jt
return qs.filter(
Q(job_template__in=JobTemplate.accessible_objects(self.user, 'read_role')) |
Q(inventory__organization__in=org_access_qs) |
Q(project__organization__in=org_access_qs)).distinct()
def can_add(self, data):
if not data or '_method' in data: # So the browseable API will work?
return True
@ -871,7 +1019,11 @@ class JobAccess(BaseAccess):
@check_superuser
def can_delete(self, obj):
return self.user in obj.inventory.admin_role
if obj.inventory is not None and self.user in obj.inventory.organization.admin_role:
return True
if obj.project is not None and self.user in obj.project.organization.admin_role:
return True
return False
def can_start(self, obj):
self.check_license()
@ -894,7 +1046,12 @@ class JobAccess(BaseAccess):
return inventory_access and credential_access and (org_access or project_access)
def can_cancel(self, obj):
return self.can_read(obj) and obj.can_cancel
if not obj.can_cancel:
return False
# Delete access allows org admins to stop running jobs
if self.user == obj.created_by or self.can_delete(obj):
return True
return obj.job_template is not None and self.user in obj.job_template.admin_role
class SystemJobTemplateAccess(BaseAccess):
'''
@ -931,7 +1088,7 @@ class AdHocCommandAccess(BaseAccess):
return qs.all()
credential_ids = set(self.user.get_queryset(Credential).values_list('id', flat=True))
inventory_qs = Inventory.accessible_objects(self.user, 'adhoc_role')
inventory_qs = Inventory.accessible_objects(self.user, 'read_role')
return qs.filter(credential_id__in=credential_ids,
inventory__in=inventory_qs)
@ -962,8 +1119,9 @@ class AdHocCommandAccess(BaseAccess):
def can_change(self, obj, data):
return False
@check_superuser
def can_delete(self, obj):
return self.can_read(obj)
return obj.inventory is not None and self.user in obj.inventory.organization.admin_role
def can_start(self, obj):
return self.can_add({
@ -972,7 +1130,11 @@ class AdHocCommandAccess(BaseAccess):
})
def can_cancel(self, obj):
return self.can_read(obj) and obj.can_cancel
if not obj.can_cancel:
return False
if self.user == obj.created_by:
return True
return obj.inventory is not None and self.user in obj.inventory.admin_role
class AdHocCommandEventAccess(BaseAccess):
'''
@ -1228,14 +1390,15 @@ class NotificationTemplateAccess(BaseAccess):
@check_superuser
def can_change(self, obj, data):
if obj.organization is None:
# only superusers are allowed to edit orphan notification templates
return False
org_pk = get_pk_from_dict(data, 'organization')
if obj and org_pk and obj.organization.pk != org_pk:
org = get_object_or_400(Organization, pk=org_pk)
if self.user not in org.admin_role:
return False
if obj.organization is not None:
return self.user in obj.organization.admin_role
return False
return self.user in obj.organization.admin_role
def can_admin(self, obj, data):
return self.can_change(obj, data)
@ -1285,7 +1448,7 @@ class LabelAccess(BaseAccess):
org_pk = get_pk_from_dict(data, 'organization')
org = get_object_or_400(Organization, pk=org_pk)
return self.user in org.read_role
return self.user in org.member_role
@check_superuser
def can_change(self, obj, data):
@ -1329,7 +1492,8 @@ class ActivityStreamAccess(BaseAccess):
qs = qs.select_related('actor')
qs = qs.prefetch_related('organization', 'user', 'inventory', 'host', 'group', 'inventory_source',
'inventory_update', 'credential', 'team', 'project', 'project_update',
'permission', 'job_template', 'job')
'permission', 'job_template', 'job', 'ad_hoc_command',
'notification_template', 'notification', 'label', 'role')
if self.user.is_superuser:
return qs.all()
if self.user in Role.singleton('system_auditor'):
@ -1338,17 +1502,14 @@ class ActivityStreamAccess(BaseAccess):
inventory_set = Inventory.accessible_objects(self.user, 'read_role')
credential_set = Credential.accessible_objects(self.user, 'read_role')
organization_set = Organization.accessible_objects(self.user, 'read_role')
group_set = Group.accessible_objects(self.user, 'read_role')
admin_of_orgs = Organization.accessible_objects(self.user, 'admin_role')
group_set = Group.objects.filter(inventory__in=inventory_set)
project_set = Project.accessible_objects(self.user, 'read_role')
jt_set = JobTemplate.accessible_objects(self.user, 'read_role')
team_set = Team.accessible_objects(self.user, 'read_role')
ad_hoc_results = qs.filter(
ad_hoc_command__inventory__in=inventory_set,
ad_hoc_command__credential__in=credential_set
)
global_results = qs.filter(
return qs.filter(
Q(ad_hoc_command__inventory__in=inventory_set) |
Q(user__in=organization_set.values('member_role__members')) |
Q(user=self.user) |
Q(organization__in=organization_set) |
@ -1363,13 +1524,11 @@ class ActivityStreamAccess(BaseAccess):
Q(project_update__project__in=project_set) |
Q(job_template__in=jt_set) |
Q(job__job_template__in=jt_set) |
Q(notification_template__organization__admin_role__members__in=[self.user]) |
Q(notification__notification_template__organization__admin_role__members__in=[self.user]) |
Q(notification_template__organization__in=admin_of_orgs) |
Q(notification__notification_template__organization__in=admin_of_orgs) |
Q(label__organization__in=organization_set) |
Q(role__in=Role.visible_roles(self.user))
)
return (ad_hoc_results | global_results).distinct()
).distinct()
def can_add(self, data):
return False
@ -1389,10 +1548,24 @@ class CustomInventoryScriptAccess(BaseAccess):
return self.model.objects.distinct().all()
return self.model.accessible_objects(self.user, 'read_role').all()
@check_superuser
def can_add(self, data):
org_pk = get_pk_from_dict(data, 'organization')
org = get_object_or_400(Organization, pk=org_pk)
return self.user in org.admin_role
@check_superuser
def can_admin(self, obj):
return self.user in obj.admin_role
@check_superuser
def can_change(self, obj, data):
return self.can_admin(obj)
@check_superuser
def can_delete(self, obj):
return self.can_admin(obj)
@check_superuser
def can_read(self, obj):
return self.user in obj.read_role
@ -1443,10 +1616,14 @@ class RoleAccess(BaseAccess):
def can_attach(self, obj, sub_obj, relationship, data,
skip_sub_obj_read_check=False):
return self.can_unattach(obj, sub_obj, relationship)
return self.can_unattach(obj, sub_obj, relationship, data, skip_sub_obj_read_check)
@check_superuser
def can_unattach(self, obj, sub_obj, relationship):
def can_unattach(self, obj, sub_obj, relationship, data=None, skip_sub_obj_read_check=False):
if not skip_sub_obj_read_check and relationship in ['members', 'member_role.parents']:
if not check_user_access(self.user, sub_obj.__class__, 'read', sub_obj):
return False
if obj.object_id and \
isinstance(obj.content_object, ResourceMixin) and \
self.user in obj.content_object.admin_role:

View File

@ -1,5 +1,5 @@
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
CLOUD_PROVIDERS = ('azure', 'azure_rm', 'ec2', 'gce', 'rax', 'vmware', 'openstack', 'foreman', 'cloudforms')
CLOUD_PROVIDERS = ('azure', 'azure_rm', 'ec2', 'gce', 'rax', 'vmware', 'openstack', 'satellite6', 'cloudforms')
SCHEDULEABLE_PROVIDERS = CLOUD_PROVIDERS + ('custom',)

View File

@ -23,16 +23,17 @@ class Command(BaseCommand):
superuser = None
with impersonate(superuser):
o = Organization.objects.create(name='Default')
p = Project.objects.create(name='Demo Project',
scm_type='git',
scm_url='https://github.com/ansible/ansible-tower-samples',
scm_update_on_launch=True,
scm_update_cache_timeout=0,
organization=o)
p = Project(name='Demo Project',
scm_type='git',
scm_url='https://github.com/ansible/ansible-tower-samples',
scm_update_on_launch=True,
scm_update_cache_timeout=0,
organization=o)
p.save(skip_update=True)
c = Credential.objects.create(name='Demo Credential',
username=superuser.username,
created_by=superuser)
c.owner_role.members.add(superuser)
c.admin_role.members.add(superuser)
i = Inventory.objects.create(name='Demo Inventory',
organization=o,
created_by=superuser)

View File

@ -78,7 +78,7 @@ class MemObject(object):
v = yaml.safe_load(file(path, 'r').read())
if hasattr(v, 'items'): # is a dict
all_vars.update(v)
except yaml.YAMLError, e:
except yaml.YAMLError as e:
if hasattr(e, 'problem_mark'):
logger.error('Invalid YAML in %s:%s col %s', path,
e.problem_mark.line + 1,
@ -362,7 +362,19 @@ class ExecutableJsonLoader(BaseLoader):
raise RuntimeError("proot is not installed but is configured for use")
kwargs = {'proot_temp_dir': self.source_dir} # TODO: Remove proot dir
cmd = wrap_args_with_proot(cmd, self.source_dir, **kwargs)
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Use ansible venv if it's available and setup to use
env = dict(os.environ.items())
if settings.ANSIBLE_USE_VENV:
env['VIRTUAL_ENV'] = settings.ANSIBLE_VENV_PATH
env['PATH'] = os.path.join(settings.ANSIBLE_VENV_PATH, "bin") + ":" + env['PATH']
venv_libdir = os.path.join(settings.ANSIBLE_VENV_PATH, "lib")
env.pop('PYTHONPATH', None) # default to none if no python_ver matches
for python_ver in ["python2.7", "python2.6"]:
if os.path.isdir(os.path.join(venv_libdir, python_ver)):
env['PYTHONPATH'] = os.path.join(venv_libdir, python_ver, "site-packages") + ":"
break
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
raise RuntimeError('%r failed (rc=%d) with output: %s' % (cmd, proc.returncode, stderr))
@ -857,7 +869,7 @@ class Command(NoArgsCommand):
del_pks = del_host_pks[offset:(offset + self._batch_size)]
for db_host in db_hosts.filter(pk__in=del_pks):
group_host_count += 1
if db_host not in db_group.hosts:
if db_host not in db_group.hosts.all():
continue
db_group.hosts.remove(db_host)
self.logger.info('Host "%s" removed from group "%s"',
@ -1317,7 +1329,7 @@ class Command(NoArgsCommand):
self.logger.warning('Inventory import required %d queries '
'taking %0.3fs', len(queries_this_import),
sqltime)
except Exception, e:
except Exception as e:
if isinstance(e, KeyboardInterrupt):
status = 'canceled'
exc = e

View File

@ -173,7 +173,7 @@ class CallbackReceiver(object):
# If for any reason there's a problem, just use 0.
try:
verbose = Job.objects.get(id=data['job_id']).verbosity
except Exception, e:
except Exception as e:
verbose = 0
# Convert the datetime for the job event's creation appropriately,
@ -191,7 +191,7 @@ class CallbackReceiver(object):
# Print the data to stdout if we're in DEBUG mode.
if settings.DEBUG:
print data
print(data)
# Sanity check: Don't honor keys that we don't recognize.
for key in data.keys():
@ -234,7 +234,7 @@ class CallbackReceiver(object):
# If for any reason there's a problem, just use 0.
try:
verbose = AdHocCommand.objects.get(id=data['ad_hoc_command_id']).verbosity
except Exception, e:
except Exception as e:
verbose = 0
# Convert the datetime for the job event's creation appropriately,
@ -252,7 +252,7 @@ class CallbackReceiver(object):
# Print the data to stdout if we're in DEBUG mode.
if settings.DEBUG:
print data
print(data)
# Sanity check: Don't honor keys that we don't recognize.
for key in data.keys():
@ -288,7 +288,7 @@ class CallbackReceiver(object):
message = queue_actual.get(block=True, timeout=1)
except QueueEmpty:
continue
except Exception, e:
except Exception as e:
logger.error("Exception on listen socket, restarting: " + str(e))
break
self.process_job_event(message)

View File

@ -59,7 +59,7 @@ class FactCacheReceiver(object):
except Fact.MultipleObjectsReturned:
logger.warn('Database inconsistent. Multiple Hosts found for <hostname, inventory_id> <%s, %s>.' % (hostname, inventory_id))
return None
except Exception, e:
except Exception as e:
logger.error("Exception communicating with Fact Cache Database: %s" % str(e))
return None

View File

@ -96,7 +96,7 @@ class SocketController(object):
if socket_session and socket_session.is_valid():
try:
socket.send_packet(packet)
except Exception, e:
except Exception as e:
logger.error("Error sending client packet to %s: %s" % (str(session_id), str(packet)))
logger.error("Error was: " + str(e))
@ -116,7 +116,7 @@ class SocketController(object):
if socket:
try:
socket.send_packet(packet)
except Exception, e:
except Exception as e:
logger.error("Error sending client packet to %s: %s" % (str(socket_session.session_id), str(packet)))
logger.error("Error was: " + str(e))
@ -129,18 +129,18 @@ socketController = SocketController(SocketSessionManager())
#
# Socket session is attached to self.session['socket_session']
# self.session and self.socket.session point to the same dict
#
#
class TowerBaseNamespace(BaseNamespace):
def get_allowed_methods(self):
return ['recv_disconnect']
def get_initial_acl(self):
request_token = self._get_request_token()
if request_token:
# (1) This is the first time the socket has been seen (first
# (1) This is the first time the socket has been seen (first
# namespace joined).
# (2) This socket has already been seen (already joined and maybe
# (2) This socket has already been seen (already joined and maybe
# left a namespace)
#
# Note: Assume that the user token is valid if the session is found
@ -168,7 +168,7 @@ class TowerBaseNamespace(BaseNamespace):
if k == "Token":
token_actual = urllib.unquote_plus(v).decode().replace("\"","")
return token_actual
except Exception, e:
except Exception as e:
logger.error("Exception validating user: " + str(e))
return False
return False

View File

@ -207,7 +207,15 @@ def rebuild_graph(message):
# Create and process dependencies for new tasks
for task in new_tasks:
logger.debug("Checking dependencies for: %s" % str(task))
task_dependencies = task.generate_dependencies(running_tasks + waiting_tasks) # TODO: other 'new' tasks? Need to investigate this scenario
try:
task_dependencies = task.generate_dependencies(running_tasks + waiting_tasks)
except Exception, e:
logger.error("Failed processing dependencies for {}: {}".format(task, e))
task.status = 'failed'
task.job_explanation += 'Task failed to generate dependencies: {}'.format(e)
task.save()
task.socketio_emit_status("failed")
continue
logger.debug("New dependencies: %s" % str(task_dependencies))
for dep in task_dependencies:
# We recalculate the created time for the moment to ensure the

View File

@ -86,7 +86,11 @@ class Migration(migrations.Migration):
name='credential',
unique_together=set([]),
),
migrations.AddField(
model_name='credential',
name='organization',
field=models.ForeignKey(related_name='credentials', default=None, blank=True, to='main.Organization', null=True),
),
#
# New RBAC models and fields
@ -139,18 +143,18 @@ class Migration(migrations.Migration):
),
migrations.AddField(
model_name='credential',
name='owner_role',
name='admin_role',
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator'], to='main.Role', null=b'True'),
),
migrations.AddField(
model_name='credential',
name='use_role',
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'owner_role'], to='main.Role', null=b'True'),
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'),
),
migrations.AddField(
model_name='credential',
name='read_role',
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'use_role', b'owner_role'], to='main.Role', null=b'True'),
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'organization.auditor_role', b'use_role', b'admin_role'], to='main.Role', null=b'True'),
),
migrations.AddField(
model_name='custominventoryscript',
@ -162,31 +166,6 @@ class Migration(migrations.Migration):
name='read_role',
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'organization.member_role', b'admin_role'], to='main.Role', null=b'True'),
),
migrations.AddField(
model_name='group',
name='admin_role',
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'inventory.admin_role', b'parents.admin_role'], to='main.Role', null=b'True'),
),
migrations.AddField(
model_name='group',
name='adhoc_role',
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'inventory.adhoc_role', b'parents.adhoc_role', b'admin_role'], to='main.Role', null=b'True'),
),
migrations.AddField(
model_name='group',
name='use_role',
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'inventory.use_role', b'parents.use_role', b'adhoc_role'], to='main.Role', null=b'True'),
),
migrations.AddField(
model_name='group',
name='update_role',
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'inventory.update_role', b'parents.update_role', b'admin_role'], to='main.Role', null=b'True'),
),
migrations.AddField(
model_name='group',
name='read_role',
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'inventory.read_role', b'parents.read_role', b'use_role', b'update_role', b'admin_role'], to='main.Role', null=b'True'),
),
migrations.AddField(
model_name='inventory',
name='admin_role',

View File

@ -31,7 +31,7 @@ def create_system_job_templates(apps, schema_editor):
),
)
if created:
sjt.schedules.create(
sched = Schedule(
name='Cleanup Job Schedule',
rrule='DTSTART:%s RRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU' % now_str,
description='Automatically Generated Schedule',
@ -40,6 +40,8 @@ def create_system_job_templates(apps, schema_editor):
created=now_dt,
modified=now_dt,
)
sched.unified_job_template = sjt
sched.save()
existing_cd_jobs = SystemJobTemplate.objects.filter(job_type='cleanup_deleted')
Schedule.objects.filter(unified_job_template__in=existing_cd_jobs).delete()
@ -56,7 +58,7 @@ def create_system_job_templates(apps, schema_editor):
),
)
if created:
sjt.schedules.create(
sched = Schedule(
name='Cleanup Activity Schedule',
rrule='DTSTART:%s RRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=TU' % now_str,
description='Automatically Generated Schedule',
@ -65,6 +67,8 @@ def create_system_job_templates(apps, schema_editor):
created=now_dt,
modified=now_dt,
)
sched.unified_job_template = sjt
sched.save()
sjt, created = SystemJobTemplate.objects.get_or_create(
job_type='cleanup_facts',
@ -77,7 +81,7 @@ def create_system_job_templates(apps, schema_editor):
),
)
if created and feature_enabled('system_tracking', bypass_database=True):
sjt.schedules.create(
sched = Schedule(
name='Cleanup Fact Schedule',
rrule='DTSTART:%s RRULE:FREQ=MONTHLY;INTERVAL=1;BYMONTHDAY=1' % now_str,
description='Automatically Generated Schedule',
@ -86,7 +90,8 @@ def create_system_job_templates(apps, schema_editor):
created=now_dt,
modified=now_dt,
)
sched.unified_job_template = sjt
sched.save()
class Migration(migrations.Migration):

View File

@ -46,17 +46,17 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='credential',
name='kind',
field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'foreman', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'openstack', 'OpenStack')]),
field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'openstack', 'OpenStack')]),
),
migrations.AlterField(
model_name='inventorysource',
name='source',
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'foreman', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
),
migrations.AlterField(
model_name='inventoryupdate',
name='source',
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'foreman', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
),
migrations.AlterField(
model_name='team',

View File

@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from awx.main.migrations import _ask_for_variables as ask_for_variables
from awx.main.migrations import _migration_utils as migration_utils
from django.db import migrations
@ -15,4 +16,5 @@ class Migration(migrations.Migration):
operations = [
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
migrations.RunPython(ask_for_variables.migrate_credential),
migrations.RunPython(rbac.rebuild_role_hierarchy),
]

View File

@ -34,7 +34,7 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='credential',
name='kind',
field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'foreman', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'openstack', 'OpenStack')]),
field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'openstack', 'OpenStack')]),
),
migrations.AlterField(
model_name='host',
@ -44,12 +44,12 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='inventorysource',
name='source',
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'foreman', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
),
migrations.AlterField(
model_name='inventoryupdate',
name='source',
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'foreman', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Satellite 6'), (b'cloudforms', 'CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
),
]

View File

@ -19,16 +19,16 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='credential',
name='kind',
field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'foreman', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'openstack', 'OpenStack')]),
field=models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'openstack', 'OpenStack')]),
),
migrations.AlterField(
model_name='inventorysource',
name='source',
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'foreman', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
),
migrations.AlterField(
model_name='inventoryupdate',
name='source',
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'foreman', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
),
]

View File

@ -0,0 +1,19 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0023_v300_activity_stream_ordering'),
]
operations = [
migrations.AddField(
model_name='jobtemplate',
name='allow_simultaneous',
field=models.BooleanField(default=False),
),
]

View File

@ -0,0 +1,30 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import awx.main.fields
class Migration(migrations.Migration):
dependencies = [
('main', '0024_v300_jobtemplate_allow_simul'),
]
operations = [
migrations.AlterField(
model_name='credential',
name='use_role',
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.admin_role', b'admin_role'], to='main.Role', null=b'True'),
),
migrations.AlterField(
model_name='team',
name='member_role',
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=b'admin_role', to='main.Role', null=b'True'),
),
migrations.AlterField(
model_name='team',
name='read_role',
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'organization.auditor_role', b'member_role'], to='main.Role', null=b'True'),
),
]

View File

@ -0,0 +1,27 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from awx.main.migrations import _migration_utils as migration_utils
from django.db import migrations
import awx.main.fields
class Migration(migrations.Migration):
dependencies = [
('main', '0025_v300_update_rbac_parents'),
]
operations = [
migrations.AlterUniqueTogether(
name='credential',
unique_together=set([('organization', 'name', 'kind')]),
),
migrations.AlterField(
model_name='credential',
name='read_role',
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_auditor', b'use_role', b'admin_role', b'organization.auditor_role'], to='main.Role', null=b'True'),
),
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
migrations.RunPython(rbac.rebuild_role_hierarchy),
]

View File

@ -880,7 +880,6 @@ class JobTemplateAccess(BaseAccess):
team_ids = Team.objects.filter(deprecated_users__in=[self.user])
# TODO: I think the below queries can be combined
deploy_permissions_ids = Permission.objects.filter(
Q(user=self.user) | Q(team_id__in=team_ids),
permission_type__in=allowed_deploy,
@ -1094,7 +1093,6 @@ class JobAccess(BaseAccess):
allowed_check = [PERM_JOBTEMPLATE_CREATE, PERM_INVENTORY_DEPLOY, PERM_INVENTORY_CHECK]
team_ids = Team.objects.filter(deprecated_users__in=[self.user])
# TODO: I think the below queries can be combined
deploy_permissions_ids = Permission.objects.filter(
Q(user=self.user) | Q(team__in=team_ids),
permission_type__in=allowed_deploy,

View File

@ -43,7 +43,6 @@ def create_roles(apps, schema_editor):
'Organization',
'Team',
'Inventory',
'Group',
'Project',
'Credential',
'CustomInventoryScript',
@ -123,7 +122,7 @@ def attrfunc(attr_path):
return attr
def _update_credential_parents(org, cred):
org.admin_role.children.add(cred.owner_role)
cred.organization = org
cred.save()
def _discover_credentials(instances, cred, orgfunc):
@ -162,15 +161,15 @@ def _discover_credentials(instances, cred, orgfunc):
else:
# Create a new credential
cred.pk = None
cred.organization = None
cred.save()
# Unlink the old information from the new credential
cred.owner_role, cred.use_role = None, None
cred.save()
cred.admin_role, cred.use_role = None, None
for i in orgs[org]:
i.credential = cred
i.save()
_update_credential_parents(org, cred)
@log_migration
@ -199,11 +198,11 @@ def migrate_credential(apps, schema_editor):
logger.info(smart_text(u"added Credential(name={}, kind={}, host={}) at organization level".format(cred.name, cred.kind, cred.host)))
if cred.deprecated_team is not None:
cred.deprecated_team.member_role.children.add(cred.owner_role)
cred.deprecated_team.member_role.children.add(cred.admin_role)
cred.save()
logger.info(smart_text(u"added Credential(name={}, kind={}, host={}) at user level".format(cred.name, cred.kind, cred.host)))
elif cred.deprecated_user is not None:
cred.owner_role.members.add(cred.deprecated_user)
cred.admin_role.members.add(cred.deprecated_user)
cred.save()
logger.info(smart_text(u"added Credential(name={}, kind={}, host={}) at user level".format(cred.name, cred.kind, cred.host, )))
else:
@ -215,7 +214,7 @@ def migrate_inventory(apps, schema_editor):
Inventory = apps.get_model('main', 'Inventory')
Permission = apps.get_model('main', 'Permission')
def role_from_permission():
def role_from_permission(perm):
if perm.permission_type == 'admin':
return inventory.admin_role
elif perm.permission_type == 'read':
@ -233,7 +232,7 @@ def migrate_inventory(apps, schema_editor):
role = None
execrole = None
role = role_from_permission()
role = role_from_permission(perm)
if role is None:
raise Exception(smart_text(u'Unhandled permission type for inventory: {}'.format( perm.permission_type)))
@ -292,10 +291,13 @@ def migrate_projects(apps, schema_editor):
else:
new_prj = Project.objects.create(
created = project.created,
modified = project.modified,
polymorphic_ctype_id = project.polymorphic_ctype_id,
description = project.description,
name = smart_text(u'{} - {}'.format(org.name, original_project_name)),
old_pk = project.old_pk,
created_by_id = project.created_by_id,
modified_by_id = project.modified_by_id,
scm_type = project.scm_type,
scm_url = project.scm_url,
scm_branch = project.scm_branch,
@ -307,11 +309,31 @@ def migrate_projects(apps, schema_editor):
credential = project.credential,
organization = org
)
if project.scm_type == "":
new_prj.local_path = project.local_path
new_prj.save()
for team in project.deprecated_teams.iterator():
new_prj.deprecated_teams.add(team)
logger.warning(smart_text(u'cloning Project({}) onto {} as Project({})'.format(original_project_name, org, new_prj)))
job_templates = JobTemplate.objects.filter(inventory__organization=org).all()
job_templates = JobTemplate.objects.filter(project=project, inventory__organization=org).all()
for jt in job_templates:
jt.project = new_prj
jt.save()
for perm in Permission.objects.filter(project=project):
Permission.objects.create(
created = perm.created,
modified = perm.modified,
created_by = perm.created_by,
modified_by = perm.modified_by,
description = perm.description,
name = perm.name,
user = perm.user,
team = perm.team,
project = new_prj,
inventory = perm.inventory,
permission_type = perm.permission_type,
run_ad_hoc_commands = perm.run_ad_hoc_commands,
)
# Migrate permissions
for project in Project.objects.iterator():
@ -320,24 +342,30 @@ def migrate_projects(apps, schema_editor):
logger.warn(smart_text(u'adding Project({}) admin: {}'.format(project.name, project.created_by.username)))
for team in project.deprecated_teams.all():
team.member_role.children.add(project.use_role)
team.member_role.children.add(project.read_role)
logger.info(smart_text(u'adding Team({}) access for Project({})'.format(team.name, project.name)))
if project.organization is not None:
for user in project.organization.deprecated_users.all():
project.use_role.members.add(user)
logger.info(smart_text(u'adding Organization({}) member access to Project({})'.format(project.organization.name, project.name)))
for perm in Permission.objects.filter(project=project):
# All perms at this level just imply a user or team can read
if perm.permission_type == 'create':
role = project.use_role
else:
role = project.read_role
if perm.team:
perm.team.member_role.children.add(project.use_role)
perm.team.member_role.children.add(role)
logger.info(smart_text(u'adding Team({}) access for Project({})'.format(perm.team.name, project.name)))
if perm.user:
project.use_role.members.add(perm.user)
role.members.add(perm.user)
logger.info(smart_text(u'adding User({}) access for Project({})'.format(perm.user.username, project.name)))
if project.organization is not None:
for user in project.organization.deprecated_users.all():
if not (project.use_role.members.filter(pk=user.id).exists() or project.admin_role.members.filter(pk=user.id).exists()):
project.read_role.members.add(user)
logger.info(smart_text(u'adding Organization({}) member access to Project({})'.format(project.organization.name, project.name)))
@log_migration
def migrate_job_templates(apps, schema_editor):
@ -403,7 +431,7 @@ def migrate_job_templates(apps, schema_editor):
team_create_permissions = set(
jt_permission_qs
.filter(permission_type__in=['create'] if jt.job_type == 'check' else ['create'])
.filter(permission_type__in=['create'])
.values_list('team__id', flat=True)
)
team_run_permissions = set(
@ -413,12 +441,12 @@ def migrate_job_templates(apps, schema_editor):
)
user_create_permissions = set(
jt_permission_qs
.filter(permission_type__in=['create'] if jt.job_type == 'check' else ['run'])
.filter(permission_type__in=['create'])
.values_list('user__id', flat=True)
)
user_run_permissions = set(
jt_permission_qs
.filter(permission_type__in=['check', 'run'] if jt.job_type == 'check' else ['create'])
.filter(permission_type__in=['check', 'run'] if jt.job_type == 'check' else ['run'])
.values_list('user__id', flat=True)
)
@ -446,17 +474,20 @@ def migrate_job_templates(apps, schema_editor):
logger.info(smart_text(u'transfering execute access on JobTemplate({}) to Team({})'.format(jt.name, team.name)))
for user in User.objects.filter(id__in=user_create_permissions).iterator():
cred = jt.credential or jt.cloud_credential
if (jt.inventory.id in user_inv_permissions[user.id] or
any([jt.inventory.id in team_inv_permissions[team.id] for team in user.deprecated_teams.all()])) and \
((not jt.credential and not jt.cloud_credential) or
Credential.objects.filter(Q(deprecated_user=user) | Q(deprecated_team__deprecated_users=user), jobtemplates=jt).exists()):
(not cred or cred.deprecated_user == user or
(cred.deprecated_team and cred.deprecated_team.deprecated_users.filter(pk=user.id).exists())):
jt.admin_role.members.add(user)
logger.info(smart_text(u'transfering admin access on JobTemplate({}) to User({})'.format(jt.name, user.username)))
for user in User.objects.filter(id__in=user_run_permissions).iterator():
cred = jt.credential or jt.cloud_credential
if (jt.inventory.id in user_inv_permissions[user.id] or
any([jt.inventory.id in team_inv_permissions[team.id] for team in user.deprecated_teams.all()])) and \
((not jt.credential and not jt.cloud_credential) or
Credential.objects.filter(Q(deprecated_user=user) | Q(deprecated_team__deprecated_users=user), jobtemplates=jt).exists()):
(not cred or cred.deprecated_user == user or
(cred.deprecated_team and cred.deprecated_team.deprecated_users.filter(pk=user.id).exists())):
jt.execute_role.members.add(user)
logger.info(smart_text(u'transfering execute access on JobTemplate({}) to User({})'.format(jt.name, user.username)))
@ -468,8 +499,6 @@ def rebuild_role_hierarchy(apps, schema_editor):
start = time()
roots = Role.objects \
.all() \
.exclude(pk__in=Role.parents.through.objects.all()
.values_list('from_role_id', flat=True).distinct()) \
.values_list('id', flat=True)
stop = time()
logger.info('Found %d roots in %f seconds, rebuilding ancestry map' % (len(roots), stop - start))

View File

@ -1,38 +1,52 @@
import logging
from django.utils.encoding import smart_text
from django.conf import settings
from awx.fact.models import FactVersion
from awx.fact.utils.dbtransform import KeyTransform
from mongoengine.connection import ConnectionError
from pymongo.errors import OperationFailure
from django.conf import settings
def drop_system_tracking_db():
try:
db = FactVersion._get_db()
db.connection.drop_database(settings.MONGO_DB)
except ConnectionError:
# TODO: Log this. Not a deal-breaker. Just let the user know they
# may need to manually drop/delete the database.
pass
except OperationFailure:
# TODO: This means the database was up but something happened when we tried to query it
pass
logger = logging.getLogger(__name__)
def log_migration(wrapped):
'''setup the logging mechanism for each migration method
as it runs, Django resets this, so we use a decorator
to re-add the handler for each method.
'''
handler = logging.FileHandler("/tmp/tower_system_tracking_migrations.log", mode="a", encoding="UTF-8")
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
def wrapper(*args, **kwargs):
logger.handlers = []
logger.addHandler(handler)
return wrapped(*args, **kwargs)
return wrapper
@log_migration
def migrate_facts(apps, schema_editor):
Fact = apps.get_model('main', "Fact")
Host = apps.get_model('main', "Host")
if (not hasattr(settings, 'MONGO_HOST')) or settings.MONGO_HOST == NotImplemented:
logger.info("failed to find MONGO_HOST in settings. Will NOT attempt to migrate system_tracking data from Mongo to Postgres.")
# If settings do not specify a mongo database, do not raise error or drop db
return (0, 0)
try:
n = FactVersion.objects.all().count()
except ConnectionError:
# TODO: Let the user know about the error. Likely this is
# Let the user know about the error. Likely this is
# a new install and we just don't need to do this
logger.info(smart_text(u"failed to connect to mongo database host {}. Will NOT attempt to migrate system_tracking data from Mongo to Postgres.".format(settings.MONGO_HOST)))
return (0, 0)
except OperationFailure:
# TODO: This means the database was up but something happened when we tried to query it
# The database was up but something happened when we tried to query it
logger.info(smart_text(u"failed to connect to issue Mongo query on host {}. Will NOT attempt to migrate system_tracking data from Mongo to Postgres.".format(settings.MONGO_HOST)))
return (0, 0)
migrated_count = 0
@ -45,9 +59,11 @@ def migrate_facts(apps, schema_editor):
Fact.objects.create(host_id=host.id, timestamp=fact_obj.timestamp, module=fact_obj.module, facts=fact_obj.fact).save()
migrated_count += 1
except Host.DoesNotExist:
# TODO: Log this. No host was found to migrate the facts to.
# No host was found to migrate the facts to.
# This isn't a hard error. Just something the user would want to know.
logger.info(smart_text(u"unable to migrate fact {} <inventory, hostname> not found in Postgres <{}, {}>".format(factver.id, factver.host.inventory_id, factver.host.hostname)))
not_migrated_count += 1
drop_system_tracking_db()
logger.info(smart_text(u"successfully migrated {} records of system_tracking data from Mongo to Postgres. {} records not migrated due to corresponding <inventory, hostname> pairs not found in Postgres.".format(migrated_count, not_migrated_count)))
return (migrated_count, not_migrated_count)

View File

@ -48,12 +48,18 @@ User.add_to_class('admin_role', user_admin_role)
@property
def user_get_organizations(user):
return Organization.objects.filter(member_role__members=user)
@property
def user_get_admin_of_organizations(user):
return Organization.objects.filter(admin_role__members=user)
@property
def user_get_auditor_of_organizations(user):
return Organization.objects.filter(auditor_role__members=user)
User.add_to_class('organizations', user_get_organizations)
User.add_to_class('admin_of_organizations', user_get_admin_of_organizations)
User.add_to_class('auditor_of_organizations', user_get_auditor_of_organizations)
@property
def user_is_system_auditor(user):

View File

@ -156,6 +156,25 @@ class AdHocCommand(UnifiedJob):
h = hmac.new(settings.SECRET_KEY, self.created.isoformat())
return '%d-%s' % (self.pk, h.hexdigest())
@property
def notification_templates(self):
all_inventory_sources = set()
for h in self.hosts.all():
for invsrc in h.inventory_sources.all():
all_inventory_sources.add(invsrc)
active_templates = dict(error=set(),
success=set(),
any=set())
for invsrc in all_inventory_sources:
notifications_dict = invsrc.notification_templates
for notification_type in active_templates.keys():
for templ in notifications_dict[notification_type]:
active_templates[notification_type].add(templ)
active_templates['error'] = list(active_templates['error'])
active_templates['any'] = list(active_templates['any'])
active_templates['success'] = list(active_templates['success'])
return active_templates
def get_passwords_needed_to_start(self):
return self.passwords_needed_to_start

View File

@ -61,7 +61,7 @@ PERMISSION_TYPE_CHOICES = [
(PERM_JOBTEMPLATE_CREATE, _('Create a Job Template')),
]
CLOUD_INVENTORY_SOURCES = ['ec2', 'rax', 'vmware', 'gce', 'azure', 'azure_rm', 'openstack', 'custom', 'foreman', 'cloudforms']
CLOUD_INVENTORY_SOURCES = ['ec2', 'rax', 'vmware', 'gce', 'azure', 'azure_rm', 'openstack', 'custom', 'satellite6', 'cloudforms']
VERBOSITY_CHOICES = [
(0, '0 (Normal)'),
@ -137,7 +137,7 @@ class BaseModel(models.Model):
errors = {}
try:
super(BaseModel, self).clean_fields(exclude)
except ValidationError, e:
except ValidationError as e:
errors = e.update_error_dict(errors)
for f in self._meta.fields:
if f.name in exclude:
@ -145,7 +145,7 @@ class BaseModel(models.Model):
if hasattr(self, 'clean_%s' % f.name):
try:
setattr(self, f.name, getattr(self, 'clean_%s' % f.name)())
except ValidationError, e:
except ValidationError as e:
errors[f.name] = e.messages
if errors:
raise ValidationError(errors)

View File

@ -38,7 +38,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
('aws', _('Amazon Web Services')),
('rax', _('Rackspace')),
('vmware', _('VMware vCenter')),
('foreman', _('Red Hat Satellite 6')),
('satellite6', _('Red Hat Satellite 6')),
('cloudforms', _('Red Hat CloudForms')),
('gce', _('Google Compute Engine')),
('azure', _('Microsoft Azure Classic (deprecated)')),
@ -61,6 +61,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
class Meta:
app_label = 'main'
ordering = ('kind', 'name')
unique_together = (('organization', 'name', 'kind'),)
deprecated_user = models.ForeignKey(
'auth.User',
@ -78,6 +79,14 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
on_delete=models.CASCADE,
related_name='deprecated_credentials',
)
organization = models.ForeignKey(
'Organization',
null=True,
default=None,
blank=True,
on_delete=models.CASCADE,
related_name='credentials',
)
kind = models.CharField(
max_length=32,
choices=KIND_CHOICES,
@ -203,18 +212,22 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
default='',
help_text=_('Tenant identifier for this credential'),
)
owner_role = ImplicitRoleField(
admin_role = ImplicitRoleField(
parent_role=[
'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
],
)
use_role = ImplicitRoleField(
parent_role=['owner_role']
parent_role=[
'organization.admin_role',
'admin_role',
]
)
read_role = ImplicitRoleField(parent_role=[
'singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR,
'organization.auditor_role',
'use_role',
'owner_role'
'admin_role',
])
@property

View File

@ -309,7 +309,8 @@ class Inventory(CommonModel, ResourceMixin):
else:
computed_fields.pop(field)
if computed_fields:
iobj.save(update_fields=computed_fields.keys())
if len(computed_fields) > 0:
iobj.save(update_fields=computed_fields.keys())
logger.debug("Finished updating inventory computed fields")
@property
@ -443,7 +444,7 @@ class Host(CommonModelNameNotUnique):
# Use .job_events.all() to get events affecting this host.
class Group(CommonModelNameNotUnique, ResourceMixin):
class Group(CommonModelNameNotUnique):
'''
A group containing managed hosts. A group or host may belong to multiple
groups.
@ -513,25 +514,6 @@ class Group(CommonModelNameNotUnique, ResourceMixin):
editable=False,
help_text=_('Inventory source(s) that created or modified this group.'),
)
admin_role = ImplicitRoleField(
parent_role=['inventory.admin_role', 'parents.admin_role'],
)
update_role = ImplicitRoleField(
parent_role=['inventory.update_role', 'parents.update_role', 'admin_role'],
)
adhoc_role = ImplicitRoleField(
parent_role=['inventory.adhoc_role', 'parents.adhoc_role', 'admin_role'],
)
use_role = ImplicitRoleField(
parent_role=['inventory.use_role', 'parents.use_role', 'adhoc_role'],
)
read_role = ImplicitRoleField(parent_role=[
'inventory.read_role',
'parents.read_role',
'use_role',
'update_role',
'admin_role'
])
def __unicode__(self):
return self.name
@ -543,7 +525,7 @@ class Group(CommonModelNameNotUnique, ResourceMixin):
def delete_recursive(self):
from awx.main.utils import ignore_inventory_computed_fields
from awx.main.tasks import update_inventory_computed_fields
from awx.main.signals import disable_activity_stream
from awx.main.signals import disable_activity_stream, activity_stream_delete
def mark_actual():
@ -601,7 +583,7 @@ class Group(CommonModelNameNotUnique, ResourceMixin):
with ignore_inventory_computed_fields():
with disable_activity_stream():
mark_actual()
activity_stream_delete(None, self)
def update_computed_fields(self):
'''
@ -728,7 +710,7 @@ class InventorySourceOptions(BaseModel):
('azure', _('Microsoft Azure Classic (deprecated)')),
('azure_rm', _('Microsoft Azure Resource Manager')),
('vmware', _('VMware vCenter')),
('foreman', _('Red Hat Satellite 6')),
('satellite6', _('Red Hat Satellite 6')),
('cloudforms', _('Red Hat CloudForms')),
('openstack', _('OpenStack')),
('custom', _('Custom Script')),
@ -964,7 +946,7 @@ class InventorySourceOptions(BaseModel):
return [('all', 'All')]
@classmethod
def get_foreman_region_choices(self):
def get_satellite6_region_choices(self):
"""Red Hat Satellite 6 region choices (not implemented)"""
return [('all', 'All')]
@ -1184,14 +1166,21 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions):
def notification_templates(self):
base_notification_templates = NotificationTemplate.objects
error_notification_templates = list(base_notification_templates
.filter(organization_notification_templates_for_errors=self.inventory.organization))
.filter(unifiedjobtemplate_notification_templates_for_errors__in=[self]))
success_notification_templates = list(base_notification_templates
.filter(organization_notification_templates_for_success=self.inventory.organization))
.filter(unifiedjobtemplate_notification_templates_for_success__in=[self]))
any_notification_templates = list(base_notification_templates
.filter(organization_notification_templates_for_any=self.inventory.organization))
return dict(error=error_notification_templates,
success=success_notification_templates,
any=any_notification_templates)
.filter(unifiedjobtemplate_notification_templates_for_any__in=[self]))
if self.inventory.organization is not None:
error_notification_templates = set(error_notification_templates + list(base_notification_templates
.filter(organization_notification_templates_for_errors=self.inventory.organization)))
success_notification_templates = set(success_notification_templates + list(base_notification_templates
.filter(organization_notification_templates_for_success=self.inventory.organization)))
any_notification_templates = set(any_notification_templates + list(base_notification_templates
.filter(organization_notification_templates_for_any=self.inventory.organization)))
return dict(error=list(error_notification_templates),
success=list(success_notification_templates),
any=list(any_notification_templates))
def clean_source(self):
source = self.source

View File

@ -26,7 +26,7 @@ from awx.main.models.unified_jobs import * # noqa
from awx.main.models.notifications import NotificationTemplate
from awx.main.utils import decrypt_field, ignore_inventory_computed_fields
from awx.main.utils import emit_websocket_notification
from awx.main.redact import PlainTextCleaner
from awx.main.redact import PlainTextCleaner, REPLACE_STR
from awx.main.conf import tower_settings
from awx.main.fields import ImplicitRoleField
from awx.main.models.mixins import ResourceMixin
@ -229,6 +229,10 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
read_role = ImplicitRoleField(
parent_role=['project.organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'],
)
allow_simultaneous = models.BooleanField(
default=False,
)
@classmethod
def _get_unified_job_class(cls):
@ -242,14 +246,37 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
'force_handlers', 'skip_tags', 'start_at_task', 'become_enabled',
'labels',]
def clean(self):
if self.job_type == 'scan' and (self.inventory is None or self.ask_inventory_on_launch):
raise ValidationError({"inventory": ["Scan jobs must be assigned a fixed inventory.",]})
if (not self.ask_inventory_on_launch) and self.inventory is None:
raise ValidationError({"inventory": ["Job Template must provide 'inventory' or allow prompting for it.",]})
if (not self.ask_credential_on_launch) and self.credential is None:
raise ValidationError({"credential": ["Job Template must provide 'credential' or allow prompting for it.",]})
return super(JobTemplate, self).clean()
def resource_validation_data(self):
'''
Process consistency errors and need-for-launch related fields.
'''
resources_needed_to_start = []
validation_errors = {}
# Inventory and Credential related checks
if self.inventory is None:
resources_needed_to_start.append('inventory')
if not self.ask_inventory_on_launch:
validation_errors['inventory'] = ["Job Template must provide 'inventory' or allow prompting for it.",]
if self.credential is None:
resources_needed_to_start.append('credential')
if not self.ask_credential_on_launch:
validation_errors['credential'] = ["Job Template must provide 'credential' or allow prompting for it.",]
# Job type dependent checks
if self.job_type == 'scan':
if self.inventory is None or self.ask_inventory_on_launch:
validation_errors['inventory'] = ["Scan jobs must be assigned a fixed inventory.",]
elif self.project is None:
resources_needed_to_start.append('project')
validation_errors['project'] = ["Job types 'run' and 'check' must have assigned a project.",]
return (validation_errors, resources_needed_to_start)
@property
def resources_needed_to_start(self):
validation_errors, resources_needed_to_start = self.resource_validation_data()
return resources_needed_to_start
def create_job(self, **kwargs):
'''
@ -265,9 +292,13 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
Return whether job template can be used to start a new job without
requiring any user input.
'''
return bool(self.credential and not len(self.passwords_needed_to_start) and
not len(self.variables_needed_to_start) and
self.inventory)
prompting_needed = False
for value in self._ask_for_vars_dict().values():
if value:
prompting_needed = True
return (not prompting_needed and
not self.passwords_needed_to_start and
not self.variables_needed_to_start)
@property
def variables_needed_to_start(self):
@ -301,20 +332,20 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
errors.append("'%s' value missing" % survey_element['variable'])
elif survey_element['type'] in ["textarea", "text", "password"]:
if survey_element['variable'] in data:
if 'min' in survey_element and survey_element['min'] not in ["", None] and len(data[survey_element['variable']]) < survey_element['min']:
errors.append("'%s' value %s is too small (must be at least %s)." %
(survey_element['variable'], data[survey_element['variable']], survey_element['min']))
if 'max' in survey_element and survey_element['max'] not in ["", None] and len(data[survey_element['variable']]) > survey_element['max']:
if 'min' in survey_element and survey_element['min'] not in ["", None] and len(data[survey_element['variable']]) < int(survey_element['min']):
errors.append("'%s' value %s is too small (length is %s must be at least %s)." %
(survey_element['variable'], data[survey_element['variable']], len(data[survey_element['variable']]), survey_element['min']))
if 'max' in survey_element and survey_element['max'] not in ["", None] and len(data[survey_element['variable']]) > int(survey_element['max']):
errors.append("'%s' value %s is too large (must be no more than %s)." %
(survey_element['variable'], data[survey_element['variable']], survey_element['max']))
elif survey_element['type'] == 'integer':
if survey_element['variable'] in data:
if 'min' in survey_element and survey_element['min'] not in ["", None] and survey_element['variable'] in data and \
data[survey_element['variable']] < survey_element['min']:
data[survey_element['variable']] < int(survey_element['min']):
errors.append("'%s' value %s is too small (must be at least %s)." %
(survey_element['variable'], data[survey_element['variable']], survey_element['min']))
if 'max' in survey_element and survey_element['max'] not in ["", None] and survey_element['variable'] in data and \
data[survey_element['variable']] > survey_element['max']:
data[survey_element['variable']] > int(survey_element['max']):
errors.append("'%s' value %s is too large (must be no more than %s)." %
(survey_element['variable'], data[survey_element['variable']], survey_element['max']))
if type(data[survey_element['variable']]) != int:
@ -322,10 +353,10 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
survey_element['variable']))
elif survey_element['type'] == 'float':
if survey_element['variable'] in data:
if 'min' in survey_element and survey_element['min'] not in ["", None] and data[survey_element['variable']] < survey_element['min']:
if 'min' in survey_element and survey_element['min'] not in ["", None] and data[survey_element['variable']] < float(survey_element['min']):
errors.append("'%s' value %s is too small (must be at least %s)." %
(survey_element['variable'], data[survey_element['variable']], survey_element['min']))
if 'max' in survey_element and survey_element['max'] not in ["", None] and data[survey_element['variable']] > survey_element['max']:
if 'max' in survey_element and survey_element['max'] not in ["", None] and data[survey_element['variable']] > float(survey_element['max']):
errors.append("'%s' value %s is too large (must be no more than %s)." %
(survey_element['variable'], data[survey_element['variable']], survey_element['max']))
if type(data[survey_element['variable']]) not in (float, int):
@ -408,9 +439,9 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
if ask_for_vars_dict[field]:
prompted_fields[field] = kwargs[field]
else:
if field == 'extra_vars' and self.survey_enabled:
if field == 'extra_vars' and self.survey_enabled and self.survey_spec:
# Accept vars defined in the survey and no others
survey_vars = [question['variable'] for question in self.survey_spec['spec']]
survey_vars = [question['variable'] for question in self.survey_spec.get('spec', [])]
for key in kwargs[field]:
if key in survey_vars:
prompted_fields[field][key] = kwargs[field][key]
@ -551,6 +582,8 @@ class Job(UnifiedJob, JobOptions):
if obj.job_template is not None and obj.inventory is not None:
if obj.job_template == self.job_template and \
obj.inventory == self.inventory:
if self.job_template.allow_simultaneous:
return False
if obj.launch_type == 'callback' and self.launch_type == 'callback' and \
obj.limit != self.limit:
return False
@ -606,14 +639,12 @@ class Job(UnifiedJob, JobOptions):
def generate_dependencies(self, active_tasks):
from awx.main.models import InventoryUpdate, ProjectUpdate
if self.inventory is None or self.project is None:
return []
inventory_sources = self.inventory.inventory_sources.filter( update_on_launch=True)
inventory_sources = self.inventory.inventory_sources.filter(update_on_launch=True)
project_found = False
inventory_sources_found = []
dependencies = []
for obj in active_tasks:
if type(obj) == ProjectUpdate:
if type(obj) == ProjectUpdate and self.project is not None:
if obj.project == self.project:
project_found = True
if type(obj) == InventoryUpdate:
@ -631,7 +662,7 @@ class Job(UnifiedJob, JobOptions):
for source in inventory_sources.filter(pk__in=inventory_sources_already_updated):
if source not in inventory_sources_found:
inventory_sources_found.append(source)
if not project_found and self.project.needs_update_on_launch:
if not project_found and self.project is not None and self.project.needs_update_on_launch:
dependencies.append(self.project.create_project_update(launch_type='dependency'))
if inventory_sources.count(): # and not has_setup_failures? Probably handled as an error scenario in the task runner
for source in inventory_sources:
@ -643,7 +674,7 @@ class Job(UnifiedJob, JobOptions):
data = super(Job, self).notification_data()
all_hosts = {}
for h in self.job_host_summaries.all():
all_hosts[h.host.name] = dict(failed=h.failed,
all_hosts[h.host_name] = dict(failed=h.failed,
changed=h.changed,
dark=h.dark,
failures=h.failures,
@ -651,7 +682,7 @@ class Job(UnifiedJob, JobOptions):
processed=h.processed,
skipped=h.skipped)
data.update(dict(inventory=self.inventory.name,
project=self.project.name,
project=self.project.name if self.project else None,
playbook=self.playbook,
credential=self.credential.name,
limit=self.limit,
@ -670,12 +701,27 @@ class Job(UnifiedJob, JobOptions):
return
try:
extra_vars = json.loads(extra_data)
except Exception, e:
except Exception as e:
logger.warn("Exception deserializing extra vars: " + str(e))
evars = self.extra_vars_dict
evars.update(extra_vars)
self.update_fields(extra_vars=json.dumps(evars))
def display_extra_vars(self):
'''
Hides fields marked as passwords in survey.
'''
if self.extra_vars and self.job_template and self.job_template.survey_enabled:
try:
extra_vars = json.loads(self.extra_vars)
for key in self.job_template.survey_password_variables():
if key in extra_vars:
extra_vars[key] = REPLACE_STR
return json.dumps(extra_vars)
except ValueError:
pass
return self.extra_vars
def _survey_search_and_replace(self, content):
# Use job template survey spec to identify password fields.
# Then lookup password fields in extra_vars and save the values
@ -697,8 +743,10 @@ class Job(UnifiedJob, JobOptions):
def copy(self):
presets = {}
for kw in self.job_template._get_unified_job_field_names():
for kw in JobTemplate._get_unified_job_field_names():
presets[kw] = getattr(self, kw)
if not self.job_template:
self.job_template = JobTemplate(name='temporary')
return self.job_template.create_unified_job(**presets)
# Job Credential required
@ -1268,7 +1316,7 @@ class SystemJob(UnifiedJob, SystemJobOptions):
return
try:
extra_vars = json.loads(extra_data)
except Exception, e:
except Exception as e:
logger.warn("Exception deserializing extra vars: " + str(e))
evars = self.extra_vars_dict
evars.update(extra_vars)

View File

@ -104,9 +104,11 @@ class Team(CommonModelNameNotUnique, ResourceMixin):
admin_role = ImplicitRoleField(
parent_role='organization.admin_role',
)
member_role = ImplicitRoleField()
member_role = ImplicitRoleField(
parent_role='admin_role',
)
read_role = ImplicitRoleField(
parent_role=['admin_role', 'organization.auditor_role', 'member_role'],
parent_role=['organization.auditor_role', 'member_role'],
)
def get_absolute_url(self):

View File

@ -115,7 +115,7 @@ class ProjectOptions(models.Model):
try:
scm_url = update_scm_url(self.scm_type, scm_url,
check_special_cases=False)
except ValueError, e:
except ValueError as e:
raise ValidationError((e.args or ('Invalid SCM URL.',))[0])
scm_url_parts = urlparse.urlsplit(scm_url)
if self.scm_type and not any(scm_url_parts):
@ -142,7 +142,7 @@ class ProjectOptions(models.Model):
try:
update_scm_url(self.scm_type, self.scm_url, scm_username,
scm_password)
except ValueError, e:
except ValueError as e:
raise ValidationError((e.args or ('Invalid credential.',))[0])
except ValueError:
pass
@ -256,6 +256,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin):
# If update_fields has been specified, add our field names to it,
# if it hasn't been specified, then we're just doing a normal save.
update_fields = kwargs.get('update_fields', [])
skip_update = bool(kwargs.pop('skip_update', False))
# Check if scm_type or scm_url changes.
if self.pk:
project_before = self.__class__.objects.get(pk=self.pk)
@ -279,7 +280,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin):
if update_fields:
self.save(update_fields=update_fields)
# If we just created a new project with SCM, start the initial update.
if new_instance and self.scm_type:
if new_instance and self.scm_type and not skip_update:
self.update()
def _get_current_status(self):

View File

@ -40,7 +40,6 @@ role_names = {
'auditor_role' : 'Auditor',
'execute_role' : 'Execute',
'member_role' : 'Member',
'owner_role' : 'Owner',
'read_role' : 'Read',
'update_role' : 'Update',
'use_role' : 'Use',
@ -49,12 +48,11 @@ role_names = {
role_descriptions = {
'system_administrator' : 'Can manage all aspects of the system',
'system_auditor' : 'Can view all settings on the system',
'adhoc_role' : 'May run ad hoc commands on an inventory or a group',
'adhoc_role' : 'May run ad hoc commands on an inventory',
'admin_role' : 'Can manage all aspects of the %s',
'auditor_role' : 'Can view all settings for the %s',
'execute_role' : 'May run the job template',
'member_role' : 'User is a member of the %s',
'owner_role' : 'Owns and can manage all aspects of this %s',
'read_role' : 'May view settings for the %s',
'update_role' : 'May update project or inventory or group using the configured source update system',
'use_role' : 'Can use the %s in a job template',
@ -63,6 +61,24 @@ role_descriptions = {
tls = threading.local() # thread local storage
def check_singleton(func):
'''
check_singleton is a decorator that checks if a user given
to a `visible_roles` method is in either of our singleton roles (Admin, Auditor)
and if so, returns their full list of roles without filtering.
'''
def wrapper(*args, **kwargs):
sys_admin = Role.singleton(ROLE_SINGLETON_SYSTEM_ADMINISTRATOR)
sys_audit = Role.singleton(ROLE_SINGLETON_SYSTEM_AUDITOR)
user = args[0]
if user in sys_admin or user in sys_audit:
if len(args) == 2:
return args[1]
return Role.objects.all()
return func(*args, **kwargs)
return wrapper
@contextlib.contextmanager
def batch_role_ancestor_rebuilding(allow_nesting=False):
'''
@ -354,6 +370,7 @@ class Role(models.Model):
@staticmethod
@check_singleton
def visible_roles(user):
sql_params = {
'ancestors_table': Role.ancestors.through._meta.db_table,
@ -365,15 +382,17 @@ class Role(models.Model):
qs = Role.objects.extra(
where = ['''
%(roles_table)s.id IN (
SELECT descendent_id FROM %(ancestors_table)s WHERE ancestor_id IN (%(ids)s)
UNION
SELECT ancestor_id FROM %(ancestors_table)s WHERE descendent_id IN (%(ids)s)
SELECT DISTINCT visible_roles_t2.ancestor_id
FROM %(ancestors_table)s as visible_roles_t1
LEFT JOIN %(ancestors_table)s as visible_roles_t2 ON (visible_roles_t1.descendent_id = visible_roles_t2.descendent_id)
WHERE visible_roles_t1.ancestor_id IN (%(ids)s)
)
''' % sql_params]
)
return qs
@staticmethod
@check_singleton
def filter_visible_roles(user, roles_qs):
sql_params = {
'ancestors_table': Role.ancestors.through._meta.db_table,
@ -385,10 +404,11 @@ class Role(models.Model):
qs = roles_qs.extra(
where = ['''
EXISTS (
SELECT 1 FROM
%(ancestors_table)s
WHERE (descendent_id = %(roles_table)s.id AND ancestor_id IN (%(ids)s))
OR (ancestor_id = %(roles_table)s.id AND descendent_id IN (%(ids)s))
SELECT 1
FROM %(ancestors_table)s as visible_roles_t1
LEFT JOIN %(ancestors_table)s as visible_roles_t2 ON (visible_roles_t1.descendent_id = visible_roles_t2.descendent_id)
WHERE visible_roles_t1.ancestor_id = %(roles_table)s.id
AND visible_roles_t2.ancestor_id IN (%(ids)s)
) ''' % sql_params]
)
return qs

View File

@ -309,7 +309,8 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
kwargs.pop('%s_id' % parent_field_name, None)
create_kwargs = {}
m2m_fields = {}
create_kwargs[parent_field_name] = self
if self.pk:
create_kwargs[parent_field_name] = self
for field_name in self._get_unified_job_field_names():
# Foreign keys can be specified as field_name or field_name_id.
id_field_name = '%s_id' % field_name
@ -754,8 +755,8 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
name=self.name,
url=self.get_ui_url(),
created_by=smart_text(self.created_by),
started=self.started.isoformat(),
finished=self.finished.isoformat(),
started=self.started.isoformat() if self.started is not None else None,
finished=self.finished.isoformat() if self.finished is not None else None,
status=self.status,
traceback=self.result_traceback)

View File

@ -1,7 +1,7 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import pprint
import json
from django.utils.encoding import smart_text
from django.core.mail.backends.base import BaseEmailBackend
@ -16,5 +16,5 @@ class TowerBaseEmailBackend(BaseEmailBackend):
body['id'],
body['status'],
body['url']))
body_actual += pprint.pformat(body, indent=4)
body_actual += json.dumps(body, indent=4)
return body_actual

View File

@ -1,7 +1,7 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import pprint
import json
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
@ -27,5 +27,5 @@ class CustomEmailBackend(EmailBackend):
body['id'],
body['status'],
body['url']))
body_actual += pprint.pformat(body, indent=4)
body_actual += json.dumps(body, indent=4)
return body_actual

View File

@ -42,6 +42,7 @@ class PagerDutyBackend(TowerBaseEmailBackend):
description=m.subject,
details=m.body,
client=m.from_email)
sent_messages += 1
except Exception as e:
logger.error(smart_text("Exception sending messages: {}".format(e)))
if not self.fail_silently:

View File

@ -29,12 +29,10 @@ class UriCleaner(object):
username = o.username
password = o.password
# Given a python MatchObject, with respect to redactedtext, find and
# Given a python MatchObject, with respect to redactedtext, find and
# replace the first occurance of username and the first and second
# occurance of password
# TODO: Ideally, we would replace username and password using the index
# that they were found at.
uri_str = redactedtext[match.start():match.end()]
if username:
uri_str = uri_str.replace(username, UriCleaner.REPLACE_STR, 1)

View File

@ -164,10 +164,30 @@ def rbac_activity_stream(instance, sender, **kwargs):
if hasattr(instance, 'content_type'):
if instance.content_type in [None, user_type]:
return
role = instance
elif sender.__name__ == 'Role_parents':
role = kwargs['model'].objects.filter(pk__in=kwargs['pk_set']).first()
# don't record implicit creation / parents
if role is not None and role.content_type is not None:
parent = role.content_type.name + "." + role.role_field
# Get the list of implicit parents that were defined at the class level.
# We have to take this list from the class property to avoid including parents
# that may have been added since the creation of the ImplicitRoleField
implicit_parents = getattr(instance.content_object.__class__, instance.role_field).field.parent_role
if type(implicit_parents) != list:
implicit_parents = [implicit_parents]
# Ignore any singleton parents we find. If the parent for the role
# matches any of the implicit parents we find, skip recording the activity stream.
for ip in implicit_parents:
if '.' not in ip and 'singleton:' not in ip:
ip = instance.content_type.name + "." + ip
if parent == ip:
return
else:
role = instance
instance = instance.content_object
else:
role = kwargs['model'].objects.filter(pk__in=kwargs['pk_set']).first()
activity_stream_associate(sender, instance, role=role, **kwargs)
def cleanup_detached_labels_on_deleted_parent(sender, instance, **kwargs):
@ -192,6 +212,7 @@ post_save.connect(emit_ad_hoc_command_event_detail, sender=AdHocCommandEvent)
m2m_changed.connect(rebuild_role_ancestor_list, Role.parents.through)
m2m_changed.connect(org_admin_edit_members, Role.members.through)
m2m_changed.connect(rbac_activity_stream, Role.members.through)
m2m_changed.connect(rbac_activity_stream, Role.parents.through)
post_save.connect(sync_superuser_status_to_rbac, sender=User)
post_save.connect(create_user_role, sender=User)
pre_delete.connect(cleanup_detached_labels_on_deleted_parent, sender=UnifiedJob)
@ -217,6 +238,8 @@ def migrate_children_from_deleted_group_to_parent_groups(sender, **kwargs):
parents_pks = getattr(instance, '_saved_parents_pks', [])
hosts_pks = getattr(instance, '_saved_hosts_pks', [])
children_pks = getattr(instance, '_saved_children_pks', [])
is_updating = getattr(_inventory_updates, 'is_updating', False)
with ignore_inventory_group_removal():
with ignore_inventory_computed_fields():
if parents_pks:
@ -230,7 +253,7 @@ def migrate_children_from_deleted_group_to_parent_groups(sender, **kwargs):
child_group, parent_group)
parent_group.children.add(child_group)
inventory_pk = getattr(instance, '_saved_inventory_pk', None)
if inventory_pk:
if inventory_pk and not is_updating:
try:
inventory = Inventory.objects.get(pk=inventory_pk)
inventory.update_computed_fields()
@ -316,12 +339,16 @@ def activity_stream_create(sender, instance, created, **kwargs):
# Skip recording any inventory source directly associated with a group.
if isinstance(instance, InventorySource) and instance.group:
return
# TODO: Rethink details of the new instance
object1 = camelcase_to_underscore(instance.__class__.__name__)
changes = model_to_dict(instance, model_serializer_mapping)
# Special case where Job survey password variables need to be hidden
if type(instance) == Job:
if 'extra_vars' in changes:
changes['extra_vars'] = instance.display_extra_vars()
activity_entry = ActivityStream(
operation='create',
object1=object1,
changes=json.dumps(model_to_dict(instance, model_serializer_mapping)))
changes=json.dumps(changes))
activity_entry.save()
#TODO: Weird situation where cascade SETNULL doesn't work
# it might actually be a good idea to remove all of these FK references since
@ -379,17 +406,30 @@ def activity_stream_associate(sender, instance, **kwargs):
obj1 = instance
object1=camelcase_to_underscore(obj1.__class__.__name__)
obj_rel = sender.__module__ + "." + sender.__name__
for entity_acted in kwargs['pk_set']:
obj2 = kwargs['model']
obj2_id = entity_acted
obj2_actual = obj2.objects.get(id=obj2_id)
object2 = camelcase_to_underscore(obj2.__name__)
if isinstance(obj2_actual, Role) and obj2_actual.content_object is not None:
obj2_actual = obj2_actual.content_object
object2 = camelcase_to_underscore(obj2_actual.__class__.__name__)
else:
object2 = camelcase_to_underscore(obj2.__name__)
# Skip recording any inventory source, or system job template changes here.
if isinstance(obj1, InventorySource) or isinstance(obj2_actual, InventorySource):
continue
if isinstance(obj1, SystemJobTemplate) or isinstance(obj2_actual, SystemJobTemplate):
continue
if isinstance(obj1, SystemJob) or isinstance(obj2_actual, SystemJob):
continue
activity_entry = ActivityStream(
changes=json.dumps(dict(object1=object1,
object1_pk=obj1.pk,
object2=object2,
object2_pk=obj2_id,
action=action,
relationship=obj_rel)),
operation=action,
object1=object1,
object2=object2,
@ -409,7 +449,7 @@ def activity_stream_associate(sender, instance, **kwargs):
# If the m2m is from the User side we need to
# set the content_object of the Role for our entry.
if type(instance) == User and role.content_object is not None:
getattr(activity_entry, role.content_type.name).add(role.content_object)
getattr(activity_entry, role.content_type.name.replace(' ', '_')).add(role.content_object)
activity_entry.role.add(role)
activity_entry.object_relationship_type = obj_rel

View File

@ -33,6 +33,7 @@ import pexpect
# Celery
from celery import Task, task
from celery.signals import celeryd_init
# Django
from django.conf import settings
@ -45,6 +46,7 @@ from django.contrib.auth.models import User
# AWX
from awx.main.constants import CLOUD_PROVIDERS
from awx.main.models import * # noqa
from awx.main.models import UnifiedJob
from awx.main.models.label import Label
from awx.main.queue import FifoQueue
from awx.main.conf import tower_settings
@ -67,6 +69,18 @@ Try upgrading OpenSSH or providing your private key in an different format. \
logger = logging.getLogger('awx.main.tasks')
@celeryd_init.connect
def celery_startup(conf=None, **kwargs):
# Re-init all schedules
# NOTE: Rework this during the Rampart work
logger.info("Syncing Tower Schedules")
for sch in Schedule.objects.all():
try:
sch.update_computed_fields()
sch.save()
except Exception as e:
logger.error("Failed to rebuild schedule {}: {}".format(sch, e))
@task()
def send_notifications(notification_list, job_id=None):
if not isinstance(notification_list, list):
@ -129,8 +143,8 @@ def tower_periodic_scheduler(self):
try:
last_run = dateutil.parser.parse(fd.read())
return last_run
except Exception:
#TODO: LOG
except Exception as exc:
logger.error("get_last_run failed: {}".format(exc))
return None
def write_last_run(last_run):
@ -199,7 +213,7 @@ def handle_work_success(self, result, task_actual):
elif task_actual['type'] == 'ad_hoc_command':
instance = AdHocCommand.objects.get(id=task_actual['id'])
instance_name = instance.module_name
notification_templates = [] # TODO: Ad-hoc commands need to notify someone
notification_templates = instance.notification_templates
friendly_name = "AdHoc Command"
elif task_actual['type'] == 'system_job':
instance = SystemJob.objects.get(id=task_actual['id'])
@ -247,7 +261,7 @@ def handle_work_error(self, task_id, subtasks=None):
elif each_task['type'] == 'ad_hoc_command':
instance = AdHocCommand.objects.get(id=each_task['id'])
instance_name = instance.module_name
notification_templates = []
notification_templates = instance.notification_templates
friendly_name = "AdHoc Command"
elif each_task['type'] == 'system_job':
instance = SystemJob.objects.get(id=each_task['id'])
@ -256,7 +270,8 @@ def handle_work_error(self, task_id, subtasks=None):
friendly_name = "System Job"
else:
# Unknown task type
break
logger.warn("Unknown task type: {}".format(each_task['type']))
continue
if first_task is None:
first_task = instance
first_task_id = instance.id
@ -423,6 +438,24 @@ class BaseTask(Task):
'': '',
}
def add_ansible_venv(self, env):
if settings.ANSIBLE_USE_VENV:
env['VIRTUAL_ENV'] = settings.ANSIBLE_VENV_PATH
env['PATH'] = os.path.join(settings.ANSIBLE_VENV_PATH, "bin") + ":" + env['PATH']
venv_libdir = os.path.join(settings.ANSIBLE_VENV_PATH, "lib")
env.pop('PYTHONPATH', None) # default to none if no python_ver matches
for python_ver in ["python2.7", "python2.6"]:
if os.path.isdir(os.path.join(venv_libdir, python_ver)):
env['PYTHONPATH'] = os.path.join(venv_libdir, python_ver, "site-packages") + ":"
break
return env
def add_tower_venv(self, env):
if settings.TOWER_USE_VENV:
env['VIRTUAL_ENV'] = settings.TOWER_VENV_PATH
env['PATH'] = os.path.join(settings.TOWER_VENV_PATH, "bin") + ":" + env['PATH']
return env
def build_env(self, instance, **kwargs):
'''
Build environment dictionary for ansible-playbook.
@ -438,10 +471,8 @@ class BaseTask(Task):
# Set environment variables needed for inventory and job event
# callbacks to work.
# Update PYTHONPATH to use local site-packages.
if settings.ANSIBLE_USE_VENV:
env['VIRTUAL_ENV'] = settings.ANSIBLE_VENV_PATH
env['PATH'] = os.path.join(settings.ANSIBLE_VENV_PATH, "bin") + ":" + env['PATH']
env['PYTHONPATH'] = os.path.join(settings.ANSIBLE_VENV_PATH, "lib/python2.7/site-packages/") + ":"
# NOTE:
# Derived class should call add_ansible_venv() or add_tower_venv()
if self.should_use_proot(instance, **kwargs):
env['PROOT_TMP_DIR'] = tower_settings.AWX_PROOT_BASE_PATH
return env
@ -756,6 +787,7 @@ class RunJob(BaseTask):
plugin_dirs.append(tower_settings.AWX_ANSIBLE_CALLBACK_PLUGINS)
plugin_path = ':'.join(plugin_dirs)
env = super(RunJob, self).build_env(job, **kwargs)
env = self.add_ansible_venv(env)
# Set environment variables needed for inventory and job event
# callbacks to work.
env['JOB_ID'] = str(job.pk)
@ -790,6 +822,7 @@ class RunJob(BaseTask):
elif cloud_cred and cloud_cred.kind == 'rax':
env['RAX_USERNAME'] = cloud_cred.username
env['RAX_API_KEY'] = decrypt_field(cloud_cred, 'password')
env['CLOUD_VERIFY_SSL'] = str(False)
elif cloud_cred and cloud_cred.kind == 'gce':
env['GCE_EMAIL'] = cloud_cred.username
env['GCE_PROJECT'] = cloud_cred.project
@ -915,7 +948,10 @@ class RunJob(BaseTask):
'tower_user_name': job.created_by.username,
})
if job.extra_vars_dict:
extra_vars.update(job.extra_vars_dict)
if kwargs.get('display', False) and job.job_template and job.job_template.survey_enabled:
extra_vars.update(json.loads(job.display_extra_vars()))
else:
extra_vars.update(job.extra_vars_dict)
args.extend(['-e', json.dumps(extra_vars)])
# Add path to playbook (relative to project.local_path).
@ -925,6 +961,9 @@ class RunJob(BaseTask):
args.append(job.playbook)
return args
def build_safe_args(self, job, **kwargs):
return self.build_args(job, display=True, **kwargs)
def build_cwd(self, job, **kwargs):
if job.project is None and job.job_type == PERM_INVENTORY_SCAN:
return self.get_path_to('..', 'playbooks')
@ -1026,6 +1065,7 @@ class RunProjectUpdate(BaseTask):
Build environment dictionary for ansible-playbook.
'''
env = super(RunProjectUpdate, self).build_env(project_update, **kwargs)
env = self.add_ansible_venv(env)
env['ANSIBLE_ASK_PASS'] = str(False)
env['ANSIBLE_ASK_SUDO_PASS'] = str(False)
env['DISPLAY'] = '' # Prevent stupid password popup when running tests.
@ -1250,7 +1290,7 @@ class RunInventoryUpdate(BaseTask):
for k,v in vmware_opts.items():
cp.set(section, k, unicode(v))
elif inventory_update.source == 'foreman':
elif inventory_update.source == 'satellite6':
section = 'foreman'
cp.add_section(section)
@ -1326,9 +1366,7 @@ class RunInventoryUpdate(BaseTask):
"""
env = super(RunInventoryUpdate, self).build_env(inventory_update,
**kwargs)
if settings.TOWER_USE_VENV:
env['VIRTUAL_ENV'] = settings.TOWER_VENV_PATH
env['PATH'] = os.path.join(settings.TOWER_VENV_PATH, "bin") + ":" + env['PATH']
env = self.add_tower_venv(env)
# Pass inventory source ID to inventory script.
env['INVENTORY_SOURCE_ID'] = str(inventory_update.inventory_source_id)
env['INVENTORY_UPDATE_ID'] = str(inventory_update.pk)
@ -1354,6 +1392,7 @@ class RunInventoryUpdate(BaseTask):
env['RAX_CREDS_FILE'] = cloud_credential
env['RAX_REGION'] = inventory_update.source_regions or 'all'
env['RAX_CACHE_MAX_AGE'] = "0"
env['CLOUD_VERIFY_SSL'] = str(False)
# Set this environment variable so the vendored package won't
# complain about not being able to determine its version number.
env['PBR_VERSION'] = '0.5.21'
@ -1383,7 +1422,7 @@ class RunInventoryUpdate(BaseTask):
env['GCE_ZONE'] = inventory_update.source_regions
elif inventory_update.source == 'openstack':
env['OS_CLIENT_CONFIG_FILE'] = cloud_credential
elif inventory_update.source == 'foreman':
elif inventory_update.source == 'satellite6':
env['FOREMAN_INI_PATH'] = cloud_credential
elif inventory_update.source == 'cloudforms':
env['CLOUDFORMS_INI_PATH'] = cloud_credential
@ -1531,6 +1570,7 @@ class RunAdHocCommand(BaseTask):
'''
plugin_dir = self.get_path_to('..', 'plugins', 'callback')
env = super(RunAdHocCommand, self).build_env(ad_hoc_command, **kwargs)
env = self.add_ansible_venv(env)
# Set environment variables needed for inventory and ad hoc event
# callbacks to work.
env['AD_HOC_COMMAND_ID'] = str(ad_hoc_command.pk)
@ -1683,9 +1723,15 @@ class RunSystemJob(BaseTask):
args.extend(['--older_than', str(json_vars['older_than'])])
if 'granularity' in json_vars:
args.extend(['--granularity', str(json_vars['granularity'])])
except Exception, e:
except Exception as e:
logger.error("Failed to parse system job: " + str(e))
return args
def build_env(self, instance, **kwargs):
env = super(RunSystemJob, self).build_env(instance,
**kwargs)
env = self.add_tower_venv(env)
return env
def build_cwd(self, instance, **kwargs):
return settings.BASE_DIR

View File

@ -387,7 +387,7 @@ class BaseTestMixin(QueueTestMixin, MockCommonlySlowTestMixin):
user = opts['user']
del opts['user']
cred = Credential.objects.create(**opts)
cred.owner_role.members.add(user)
cred.admin_role.members.add(user)
return cred
def setup_instances(self):

View File

@ -0,0 +1,41 @@
# Python
import pytest
from awx.main.tests.factories import (
create_organization,
create_job_template,
create_notification_template,
create_survey_spec,
)
@pytest.fixture
def job_template_factory():
return create_job_template
@pytest.fixture
def organization_factory():
return create_organization
@pytest.fixture
def notification_template_factory():
return create_notification_template
@pytest.fixture
def survey_spec_factory():
return create_survey_spec
@pytest.fixture
def job_with_secret_key_factory(job_template_factory):
def rf(persisted):
"Returns job with linked JT survey with password survey questions"
objects = job_template_factory('jt', organization='org1', survey=[
{'variable': 'submitter_email', 'type': 'text', 'default': 'foobar@redhat.com'},
{'variable': 'secret_key', 'default': '6kQngg3h8lgiSTvIEb21', 'type': 'password'},
{'variable': 'SSN', 'type': 'password'}], jobs=[1], persisted=persisted)
return objects.jobs[1]
return rf
@pytest.fixture
def job_with_secret_key_unit(job_with_secret_key_factory):
return job_with_secret_key_factory(persisted=False)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,65 @@
factories
=========
This is a module for defining stand-alone factories and fixtures. Ideally a fixture will implement a single item.
DO NOT decorate fixtures in this module with the @pytest.fixture. These fixtures are to be combined
with fixture factories and composition using the `conftest.py` convention. Those composed fixtures
will be decorated for usage and discovery.
Use the fixtures directly in factory methods to build up the desired set of components and relationships.
Each fixture should create exactly one object and should support the option for that object to be persisted
or not.
A factory should create at a minimum a single object for that factory type. The creation of any
associated objects should be explicit. For example, the `create_organization` factory when given only
a `name` parameter will create an Organization but it will not implicitly create any other objects.
teams
-----
There is some special handling for users when adding teams. There is a short hand that allows you to
assign a user to the member\_role of a team using the string notation of `team_name:user_name`. There is
no shortcut for adding a user to the admin\_role of a team. See the roles section for more information
about how to do that.
roles
-----
The roles helper allows you pass in roles to a factory. These roles assignments will happen after
the objects are created. Using the roles parameter required that persisted=True (default).
You can use a string notation of `object_name.role_name:user` OR `object_name.role_name:object_name.child_role`
obj.parent_role:user # This will make the user a member of parent_role
obj1.role:obj2.role # This will make obj2 a child role of obj1
team1.admin_role:joe
team1.admin_role:project1.admin_role
examples
--------
objects = create_organization('test-org')
assert objects.organization.name == 'test-org'
objects = create_organization('test-org', projects=['test-proj'])
assert objects.projects.test-proj.organization == objects.organization
objects = create_organization('test-org', persisted=False)
assert not objects.organization.pk
patterns
--------
`mk` functions are single object fixtures. They should create only a single object with the minimum deps.
They should also accept a `persited` flag, if they must be persisted to work, they raise an error if persisted=False
`generate` and `apply` functions are helpers that build up the various parts of a `create` functions objects. These
should be useful for more than one create function to use and should explicitly accept all of the values needed
to execute. These functions should also be robust and have very speciifc error reporting about constraints and/or
bad values.
`create` functions compose many of the `mk` and `generate` functions to make different object
factories. These functions when giving the minimum set of arguments should only produce a
single artifact (or the minimum needed for that object). These should be wrapped by discoverable
fixtures in various conftest.py files.

View File

@ -0,0 +1,18 @@
from .tower import (
create_organization,
create_job_template,
create_notification_template,
create_survey_spec,
)
from .exc import (
NotUnique,
)
__all__ = [
'create_organization',
'create_job_template',
'create_notification_template',
'create_survey_spec',
'NotUnique',
]

View File

@ -0,0 +1,5 @@
class NotUnique(Exception):
def __init__(self, name, objects):
msg = '{} is not a unique key, found {}={}'.format(name, name, objects[name])
super(Exception, self).__init__(msg)

View File

@ -0,0 +1,154 @@
import json
from django.contrib.auth.models import User
from awx.main.models import (
Organization,
Project,
Team,
Instance,
JobTemplate,
Job,
NotificationTemplate,
Credential,
Inventory,
Label,
)
# mk methods should create only a single object of a single type.
# they should also have the option of being persisted or not.
# if the object must be persisted an error should be raised when
# persisted=False
#
def mk_instance(persisted=True):
if not persisted:
raise RuntimeError('creating an Instance requires persisted=True')
from django.conf import settings
return Instance.objects.get_or_create(uuid=settings.SYSTEM_UUID, primary=True, hostname="instance.example.org")
def mk_organization(name, description=None, persisted=True):
description = description or '{}-description'.format(name)
org = Organization(name=name, description=description)
if persisted:
mk_instance(persisted)
org.save()
return org
def mk_label(name, organization=None, description=None, persisted=True):
description = description or '{}-description'.format(name)
label = Label(name=name, description=description)
if organization is not None:
label.organization = organization
if persisted:
label.save()
return label
def mk_team(name, organization=None, persisted=True):
team = Team(name=name)
if organization is not None:
team.organization = organization
if persisted:
mk_instance(persisted)
team.save()
return team
def mk_user(name, is_superuser=False, organization=None, team=None, persisted=True):
user = User(username=name, is_superuser=is_superuser)
if persisted:
user.save()
if organization is not None:
organization.member_role.members.add(user)
if team is not None:
team.member_role.members.add(user)
return user
def mk_project(name, organization=None, description=None, persisted=True):
description = description or '{}-description'.format(name)
project = Project(name=name, description=description)
if organization is not None:
project.organization = organization
if persisted:
project.save()
return project
def mk_credential(name, cloud=False, kind='ssh', persisted=True):
cred = Credential(name=name, cloud=cloud, kind=kind)
if persisted:
cred.save()
return cred
def mk_notification_template(name, notification_type='webhook', configuration=None, organization=None, persisted=True):
nt = NotificationTemplate(name=name)
nt.notification_type = notification_type
nt.notification_configuration = configuration or dict(url="http://localhost", headers={"Test": "Header"})
if organization is not None:
nt.organization = organization
if persisted:
nt.save()
return nt
def mk_inventory(name, organization=None, persisted=True):
inv = Inventory(name=name)
if organization is not None:
inv.organization = organization
if persisted:
inv.save()
return inv
def mk_job(job_type='run', status='new', job_template=None, inventory=None,
credential=None, project=None, extra_vars={},
persisted=True):
job = Job(job_type=job_type, status=status, extra_vars=json.dumps(extra_vars))
job.job_template = job_template
job.inventory = inventory
job.credential = credential
job.project = project
if persisted:
job.save()
return job
def mk_job_template(name, job_type='run',
organization=None, inventory=None,
credential=None, network_credential=None,
cloud_credential=None, persisted=True, extra_vars='',
project=None, spec=None):
if extra_vars:
extra_vars = json.dumps(extra_vars)
jt = JobTemplate(name=name, job_type=job_type, extra_vars=extra_vars,
playbook='mocked')
jt.inventory = inventory
if jt.inventory is None:
jt.ask_inventory_on_launch = True
jt.credential = credential
if jt.credential is None:
jt.ask_credential_on_launch = True
jt.network_credential = network_credential
jt.cloud_credential = cloud_credential
jt.project = project
jt.survey_spec = spec
if jt.survey_spec is not None:
jt.survey_enabled = True
if persisted:
jt.save()
return jt

View File

@ -0,0 +1,59 @@
from collections import namedtuple
from .exc import NotUnique
def generate_objects(artifacts, kwargs):
'''generate_objects takes a list of artifacts that are supported by
a create function and compares it to the kwargs passed in to the create
function. If a kwarg is found that is not in the artifacts list a RuntimeError
is raised.
'''
for k in kwargs.keys():
if k not in artifacts:
raise RuntimeError('{} is not a valid argument'.format(k))
return namedtuple("Objects", ",".join(artifacts))
def generate_role_objects(objects):
'''generate_role_objects assembles a dictionary of all possible objects by name.
It will raise an exception if any of the objects share a name due to the fact that
it is to be used with apply_roles, which expects unique object names.
roles share a common name e.g. admin_role, member_role. This ensures that the
roles short hand used for mapping Roles and Users in apply_roles will function as desired.
'''
combined_objects = {}
for o in objects:
if type(o) is dict:
for k,v in o.iteritems():
if combined_objects.get(k) is not None:
raise NotUnique(k, combined_objects)
combined_objects[k] = v
elif hasattr(o, 'name'):
if combined_objects.get(o.name) is not None:
raise NotUnique(o.name, combined_objects)
combined_objects[o.name] = o
else:
if o is not None:
raise RuntimeError('expected a list of dict or list of list, got a type {}'.format(type(o)))
return combined_objects
class _Mapped(object):
'''_Mapped is a helper class that replaces spaces and dashes
in the name of an object and assigns the object as an attribute
input: {'my org': Organization}
output: instance.my_org = Organization
'''
def __init__(self, d):
self.d = d
for k,v in d.items():
k = k.replace(' ', '_')
k = k.replace('-', '_')
setattr(self, k.replace(' ','_'), v)
def all(self):
return self.d.values()

View File

@ -0,0 +1,345 @@
from django.contrib.auth.models import User
from awx.main.models import (
Organization,
Project,
Team,
NotificationTemplate,
Credential,
Inventory,
Job,
Label,
)
from .objects import (
generate_objects,
generate_role_objects,
_Mapped,
)
from .fixtures import (
mk_organization,
mk_team,
mk_user,
mk_job_template,
mk_job,
mk_credential,
mk_inventory,
mk_project,
mk_label,
mk_notification_template,
)
def apply_roles(roles, objects, persisted):
'''apply_roles evaluates a list of Role relationships represented as strings.
The format of this string is 'role:[user|role]'. When a user is provided, they will be
made a member of the role on the LHS. When a role is provided that role will be added to
the children of the role on the LHS.
This function assumes that objects is a dictionary that contains a unique set of key to value
mappings for all possible "Role objects". See the example below:
Mapping Users
-------------
roles = ['org1.admin_role:user1', 'team1.admin_role:user1']
objects = {'org1': Organization, 'team1': Team, 'user1': User]
Mapping Roles
-------------
roles = ['org1.admin_role:team1.admin_role']
objects = {'org1': Organization, 'team1': Team}
Invalid Mapping
---------------
roles = ['org1.admin_role:team1.admin_role']
objects = {'org1': Organization', 'user1': User} # Exception, no team1 entry
'''
if roles is None:
return None
if not persisted:
raise RuntimeError('roles can not be used when persisted=False')
for role in roles:
obj_role, sep, member_role = role.partition(':')
if not member_role:
raise RuntimeError('you must provide an assignment role, got None')
obj_str, o_role_str = obj_role.split('.')
member_str, m_sep, m_role_str = member_role.partition('.')
obj = objects[obj_str]
obj_role = getattr(obj, o_role_str)
member = objects[member_str]
if m_role_str:
if hasattr(member, m_role_str):
member_role = getattr(member, m_role_str)
obj_role.children.add(member_role)
else:
raise RuntimeError('unable to find {} role for {}'.format(m_role_str, member_str))
else:
if type(member) is User:
obj_role.members.add(member)
else:
raise RuntimeError('unable to add non-user {} for members list of {}'.format(member_str, obj_str))
def generate_users(organization, teams, superuser, persisted, **kwargs):
'''generate_users evaluates a mixed list of User objects and strings.
If a string is encountered a user with that username is created and added to the lookup dict.
If a User object is encountered the User.username is used as a key for the lookup dict.
A short hand for assigning a user to a team is available in the following format: "team_name:username".
If a string in that format is encounted an attempt to lookup the team by the key team_name from the teams
argumnent is made, a KeyError will be thrown if the team does not exist in the dict. The teams argument should
be a dict of {Team.name:Team}
'''
users = {}
key = 'superusers' if superuser else 'users'
if key in kwargs and kwargs.get(key) is not None:
for u in kwargs[key]:
if type(u) is User:
users[u.username] = u
else:
p1, sep, p2 = u.partition(':')
if p2:
t = teams[p1]
users[p2] = mk_user(p2, organization=organization, team=t, is_superuser=superuser, persisted=persisted)
else:
users[p1] = mk_user(p1, organization=organization, team=None, is_superuser=superuser, persisted=persisted)
return users
def generate_teams(organization, persisted, **kwargs):
'''generate_teams evalutes a mixed list of Team objects and strings.
If a string is encountered a team with that string name is created and added to the lookup dict.
If a Team object is encounted the Team.name is used as a key for the lookup dict.
'''
teams = {}
if 'teams' in kwargs and kwargs.get('teams') is not None:
for t in kwargs['teams']:
if type(t) is Team:
teams[t.name] = t
else:
teams[t] = mk_team(t, organization=organization, persisted=persisted)
return teams
def create_survey_spec(variables=None, default_type='integer', required=True):
'''
Returns a valid survey spec for a job template, based on the input
argument specifying variable name(s)
'''
if isinstance(variables, list):
name = "%s survey" % variables[0]
description = "A survey that starts with %s." % variables[0]
vars_list = variables
else:
name = "%s survey" % variables
description = "A survey about %s." % variables
vars_list = [variables]
spec = []
index = 0
for var in vars_list:
spec_item = {}
spec_item['index'] = index
index += 1
spec_item['required'] = required
spec_item['choices'] = ''
spec_item['type'] = default_type
if isinstance(var, dict):
spec_item.update(var)
var_name = spec_item.get('variable', 'variable')
else:
var_name = var
spec_item.setdefault('variable', var_name)
spec_item.setdefault('question_name', "Enter a value for %s." % var_name)
spec_item.setdefault('question_description', "A question about %s." % var_name)
if spec_item['type'] == 'integer':
spec_item.setdefault('default', 0)
spec_item.setdefault('max', spec_item['default'] + 100)
spec_item.setdefault('min', spec_item['default'] - 100)
else:
spec_item.setdefault('default', '')
spec.append(spec_item)
survey_spec = {}
survey_spec['spec'] = spec
survey_spec['name'] = name
survey_spec['description'] = description
return survey_spec
# create methods are intended to be called directly as needed
# or encapsulated by specific factory fixtures in a conftest
#
def create_job_template(name, roles=None, persisted=True, **kwargs):
Objects = generate_objects(["job_template", "jobs",
"organization",
"inventory",
"project",
"credential", "cloud_credential", "network_credential",
"job_type",
"survey",], kwargs)
org = None
proj = None
inv = None
cred = None
cloud_cred = None
net_cred = None
spec = None
jobs = {}
job_type = kwargs.get('job_type', 'run')
extra_vars = kwargs.get('extra_vars', '')
if 'organization' in kwargs:
org = kwargs['organization']
if type(org) is not Organization:
org = mk_organization(org, '%s-desc'.format(org), persisted=persisted)
if 'credential' in kwargs:
cred = kwargs['credential']
if type(cred) is not Credential:
cred = mk_credential(cred, persisted=persisted)
if 'cloud_credential' in kwargs:
cloud_cred = kwargs['cloud_credential']
if type(cloud_cred) is not Credential:
cloud_cred = mk_credential(cloud_cred, kind='aws', persisted=persisted)
if 'network_credential' in kwargs:
net_cred = kwargs['network_credential']
if type(net_cred) is not Credential:
net_cred = mk_credential(net_cred, kind='net', persisted=persisted)
if 'project' in kwargs:
proj = kwargs['project']
if type(proj) is not Project:
proj = mk_project(proj, organization=org, persisted=persisted)
if 'inventory' in kwargs:
inv = kwargs['inventory']
if type(inv) is not Inventory:
inv = mk_inventory(inv, organization=org, persisted=persisted)
if 'survey' in kwargs:
spec = create_survey_spec(kwargs['survey'])
jt = mk_job_template(name, project=proj,
inventory=inv, credential=cred,
network_credential=net_cred, cloud_credential=cloud_cred,
job_type=job_type, spec=spec, extra_vars=extra_vars,
persisted=persisted)
if 'jobs' in kwargs:
for i in kwargs['jobs']:
if type(i) is Job:
jobs[i.pk] = i
else:
# Fill in default survey answers
job_extra_vars = {}
for question in spec['spec']:
job_extra_vars[question['variable']] = question['default']
jobs[i] = mk_job(job_template=jt, project=proj, inventory=inv, credential=cred,
extra_vars=job_extra_vars,
job_type=job_type, persisted=persisted)
role_objects = generate_role_objects([org, proj, inv, cred])
apply_roles(roles, role_objects, persisted)
return Objects(job_template=jt,
jobs=jobs,
project=proj,
inventory=inv,
credential=cred, cloud_credential=cloud_cred, network_credential=net_cred,
job_type=job_type,
organization=org,
survey=spec,)
def create_organization(name, roles=None, persisted=True, **kwargs):
Objects = generate_objects(["organization",
"teams", "users",
"superusers",
"projects",
"labels",
"notification_templates",
"inventories",], kwargs)
projects = {}
inventories = {}
labels = {}
notification_templates = {}
org = mk_organization(name, '%s-desc'.format(name), persisted=persisted)
if 'inventories' in kwargs:
for i in kwargs['inventories']:
if type(i) is Inventory:
inventories[i.name] = i
else:
inventories[i] = mk_inventory(i, organization=org, persisted=persisted)
if 'projects' in kwargs:
for p in kwargs['projects']:
if type(p) is Project:
projects[p.name] = p
else:
projects[p] = mk_project(p, organization=org, persisted=persisted)
teams = generate_teams(org, persisted, teams=kwargs.get('teams'))
superusers = generate_users(org, teams, True, persisted, superusers=kwargs.get('superusers'))
users = generate_users(org, teams, False, persisted, users=kwargs.get('users'))
if 'labels' in kwargs:
for l in kwargs['labels']:
if type(l) is Label:
labels[l.name] = l
else:
labels[l] = mk_label(l, organization=org, persisted=persisted)
if 'notification_templates' in kwargs:
for nt in kwargs['notification_templates']:
if type(nt) is NotificationTemplate:
notification_templates[nt.name] = nt
else:
notification_templates[nt] = mk_notification_template(nt, organization=org, persisted=persisted)
role_objects = generate_role_objects([org, superusers, users, teams, projects, labels, notification_templates])
apply_roles(roles, role_objects, persisted)
return Objects(organization=org,
superusers=_Mapped(superusers),
users=_Mapped(users),
teams=_Mapped(teams),
projects=_Mapped(projects),
labels=_Mapped(labels),
notification_templates=_Mapped(notification_templates),
inventories=_Mapped(inventories))
def create_notification_template(name, roles=None, persisted=True, **kwargs):
Objects = generate_objects(["notification_template",
"organization",
"users",
"superusers",
"teams",], kwargs)
organization = None
if 'organization' in kwargs:
org = kwargs['organization']
organization = mk_organization(org, '{}-desc'.format(org), persisted=persisted)
notification_template = mk_notification_template(name, organization=organization, persisted=persisted)
teams = generate_teams(organization, persisted, teams=kwargs.get('teams'))
superusers = generate_users(organization, teams, True, persisted, superusers=kwargs.get('superusers'))
users = generate_users(organization, teams, False, persisted, users=kwargs.get('users'))
role_objects = generate_role_objects([organization, notification_template])
apply_roles(roles, role_objects, persisted)
return Objects(notification_template=notification_template,
organization=organization,
users=_Mapped(users),
superusers=_Mapped(superusers),
teams=teams)

View File

@ -13,7 +13,7 @@ def mock_feature_enabled(feature, bypass_database=None):
@pytest.fixture
def activity_stream_entry(organization, org_admin):
return ActivityStream.objects.filter(organization__pk=organization.pk, operation='associate').first()
return ActivityStream.objects.filter(organization__pk=organization.pk, user=org_admin, operation='associate').first()
@pytest.mark.skipif(not getattr(settings, 'ACTIVITY_STREAM_ENABLED', True), reason="Activity stream not enabled")
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@ -131,3 +131,24 @@ def test_stream_queryset_hides_shows_items(
assert queryset.filter(host__pk=host.pk, operation='create').count() == 1
assert queryset.filter(team__pk=team.pk, operation='create').count() == 1
assert queryset.filter(notification_template__pk=notification_template.pk, operation='create').count() == 1
@pytest.mark.django_db
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
def test_stream_user_direct_role_updates(get, post, organization_factory):
objects = organization_factory('test_org',
superusers=['admin'],
users=['test'],
inventories=['inv1'])
url = reverse('api:user_roles_list', args=(objects.users.test.pk,))
post(url, dict(id=objects.inventories.inv1.read_role.pk), objects.superusers.admin)
activity_stream = ActivityStream.objects.filter(
inventory__pk=objects.inventories.inv1.pk,
user__pk=objects.users.test.pk,
role__pk=objects.inventories.inv1.read_role.pk).first()
url = reverse('api:activity_stream_detail', args=(activity_stream.pk,))
response = get(url, objects.users.test)
assert response.data['object1'] == 'user'
assert response.data['object2'] == 'inventory'

View File

@ -0,0 +1,148 @@
import mock # noqa
import pytest
from django.core.urlresolvers import reverse
"""
def run_test_ad_hoc_command(self, **kwargs):
# Post to list to start a new ad hoc command.
expect = kwargs.pop('expect', 201)
url = kwargs.pop('url', reverse('api:ad_hoc_command_list'))
data = {
'inventory': self.inventory.pk,
'credential': self.credential.pk,
'module_name': 'command',
'module_args': 'uptime',
}
data.update(kwargs)
for k,v in data.items():
if v is None:
del data[k]
return self.post(url, data, expect=expect)
"""
@pytest.fixture
def post_adhoc(post, inventory, machine_credential):
def f(url, data, user, expect=201):
if not url:
url = reverse('api:ad_hoc_command_list')
if 'module_name' not in data:
data['module_name'] = 'command'
if 'module_args' not in data:
data['module_args'] = 'uptime'
if 'inventory' not in data:
data['inventory'] = inventory.id
if 'credential' not in data:
data['credential'] = machine_credential.id
for k,v in data.items():
if v is None:
del data[k]
return post(url, data, user, expect=expect)
return f
@pytest.mark.django_db
def test_admin_post_ad_hoc_command_list(admin, post_adhoc, inventory, machine_credential):
res = post_adhoc(reverse('api:ad_hoc_command_list'), {}, admin, expect=201)
assert res.data['job_type'] == 'run'
assert res.data['inventory'], inventory.id
assert res.data['credential'] == machine_credential.id
assert res.data['module_name'] == 'command'
assert res.data['module_args'] == 'uptime'
assert res.data['limit'] == ''
assert res.data['forks'] == 0
assert res.data['verbosity'] == 0
assert res.data['become_enabled'] is False
@pytest.mark.django_db
def test_empty_post_403(admin, post):
post(reverse('api:ad_hoc_command_list'), {}, admin, expect=400)
@pytest.mark.django_db
def test_empty_put_405(admin, put):
put(reverse('api:ad_hoc_command_list'), {}, admin, expect=405)
@pytest.mark.django_db
def test_empty_patch_405(admin, patch):
patch(reverse('api:ad_hoc_command_list'), {}, admin, expect=405)
@pytest.mark.django_db
def test_empty_delete_405(admin, delete):
delete(reverse('api:ad_hoc_command_list'), admin, expect=405)
@pytest.mark.django_db
def test_user_post_ad_hoc_command_list(alice, post_adhoc, inventory, machine_credential):
inventory.adhoc_role.members.add(alice)
machine_credential.use_role.members.add(alice)
post_adhoc(reverse('api:ad_hoc_command_list'), {}, alice, expect=201)
@pytest.mark.django_db
def test_user_post_ad_hoc_command_list_xfail(alice, post_adhoc, inventory, machine_credential):
inventory.read_role.members.add(alice) # just read access? no dice.
machine_credential.use_role.members.add(alice)
post_adhoc(reverse('api:ad_hoc_command_list'), {}, alice, expect=403)
@pytest.mark.django_db
def test_user_post_ad_hoc_command_list_without_creds(alice, post_adhoc, inventory, machine_credential):
inventory.adhoc_role.members.add(alice)
post_adhoc(reverse('api:ad_hoc_command_list'), {}, alice, expect=403)
@pytest.mark.django_db
def test_user_post_ad_hoc_command_list_without_inventory(alice, post_adhoc, inventory, machine_credential):
machine_credential.use_role.members.add(alice)
post_adhoc(reverse('api:ad_hoc_command_list'), {}, alice, expect=403)
@pytest.mark.django_db
def test_admin_post_inventory_ad_hoc_command_list(admin, post_adhoc, inventory):
post_adhoc(reverse('api:inventory_ad_hoc_commands_list', args=(inventory.id,)), {'inventory': None}, admin, expect=201)
post_adhoc(reverse('api:inventory_ad_hoc_commands_list', args=(inventory.id,)), {}, admin, expect=201)
@pytest.mark.django_db
def test_get_inventory_ad_hoc_command_list(admin, alice, post_adhoc, get, inventory_factory, machine_credential):
inv1 = inventory_factory('inv1')
inv2 = inventory_factory('inv2')
post_adhoc(reverse('api:ad_hoc_command_list'), {'inventory': inv1.id}, admin, expect=201)
post_adhoc(reverse('api:ad_hoc_command_list'), {'inventory': inv2.id}, admin, expect=201)
res = get(reverse('api:ad_hoc_command_list'), admin, expect=200)
assert res.data['count'] == 2
res = get(reverse('api:inventory_ad_hoc_commands_list', args=(inv1.id,)), admin, expect=200)
assert res.data['count'] == 1
res = get(reverse('api:inventory_ad_hoc_commands_list', args=(inv2.id,)), admin, expect=200)
assert res.data['count'] == 1
inv1.adhoc_role.members.add(alice)
res = get(reverse('api:inventory_ad_hoc_commands_list', args=(inv1.id,)), alice, expect=200)
assert res.data['count'] == 0
machine_credential.use_role.members.add(alice)
res = get(reverse('api:inventory_ad_hoc_commands_list', args=(inv1.id,)), alice, expect=200)
assert res.data['count'] == 1
res = get(reverse('api:inventory_ad_hoc_commands_list', args=(inv2.id,)), alice, expect=403)
@pytest.mark.django_db
def test_bad_data1(admin, post_adhoc):
post_adhoc(reverse('api:ad_hoc_command_list'), {'module_name': 'command', 'module_args': None}, admin, expect=400)
@pytest.mark.django_db
def test_bad_data2(admin, post_adhoc):
post_adhoc(reverse('api:ad_hoc_command_list'), {'job_type': 'baddata'}, admin, expect=400)
@pytest.mark.django_db
def test_bad_data3(admin, post_adhoc):
post_adhoc(reverse('api:ad_hoc_command_list'), {'verbosity': -1}, admin, expect=400)
@pytest.mark.django_db
def test_bad_data4(admin, post_adhoc):
post_adhoc(reverse('api:ad_hoc_command_list'), {'forks': -1}, admin, expect=400)

View File

@ -0,0 +1,45 @@
import pytest
from django.core.urlresolvers import reverse
@pytest.mark.django_db
def test_user_role_view_access(rando, inventory, mocker, post):
"Assure correct access method is called when assigning users new roles"
role_pk = inventory.admin_role.pk
data = {"id": role_pk}
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
with mocker.patch('awx.main.access.RoleAccess', return_value=mock_access):
post(url=reverse('api:user_roles_list', args=(rando.pk,)),
data=data, user=rando, expect=403)
mock_access.can_attach.assert_called_once_with(
inventory.admin_role, rando, 'members', data,
skip_sub_obj_read_check=False)
@pytest.mark.django_db
def test_team_role_view_access(rando, team, inventory, mocker, post):
"Assure correct access method is called when assigning teams new roles"
team.admin_role.members.add(rando)
role_pk = inventory.admin_role.pk
data = {"id": role_pk}
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
with mocker.patch('awx.main.access.RoleAccess', return_value=mock_access):
post(url=reverse('api:team_roles_list', args=(team.pk,)),
data=data, user=rando, expect=403)
mock_access.can_attach.assert_called_once_with(
inventory.admin_role, team, 'member_role.parents', data,
skip_sub_obj_read_check=False)
@pytest.mark.django_db
def test_role_team_view_access(rando, team, inventory, mocker, post):
"""Assure that /role/N/teams/ enforces the same permission restrictions
that /teams/N/roles/ does when assigning teams new roles"""
role_pk = inventory.admin_role.pk
data = {"id": team.pk}
mock_access = mocker.MagicMock(return_value=False, __name__='mocked')
with mocker.patch('awx.main.access.RoleAccess.can_attach', mock_access):
post(url=reverse('api:role_teams_list', args=(role_pk,)),
data=data, user=rando, expect=403)
mock_access.assert_called_once_with(
inventory.admin_role, team, 'member_role.parents', data,
skip_sub_obj_read_check=False)

View File

@ -147,8 +147,7 @@ def test_credential_detail(post, get, organization, org_admin):
response = get(reverse('api:credential_detail', args=(response.data['id'],)), org_admin)
assert response.status_code == 200
summary_fields = response.data['summary_fields']
assert 'owners' in summary_fields
assert summary_fields['owners'][0]['id'] == organization.id
assert 'organization' in summary_fields
related_fields = response.data['related']
assert 'organization' in related_fields
@ -217,13 +216,16 @@ def test_openstack_create_fail_required_fields(post, organization, admin):
#
@pytest.mark.django_db
def test_create_credential_xfails(post, organization, team, admin):
def test_create_credential_missing_user_team_org_xfail(post, admin):
# Must specify one of user, team, or organization
response = post(reverse('api:credential_list'), {
'name': 'Some name',
'username': 'someusername',
}, admin)
assert response.status_code == 400
@pytest.mark.django_db
def test_create_credential_with_user_and_org_xfail(post, organization, admin):
# Can only specify one of user, team, or organization
response = post(reverse('api:credential_list'), {
'name': 'Some name',
@ -232,6 +234,9 @@ def test_create_credential_xfails(post, organization, team, admin):
'organization': organization.id,
}, admin)
assert response.status_code == 400
@pytest.mark.django_db
def test_create_credential_with_team_and_org_xfail(post, organization, team, admin):
response = post(reverse('api:credential_list'), {
'name': 'Some name',
'username': 'someusername',
@ -239,6 +244,9 @@ def test_create_credential_xfails(post, organization, team, admin):
'team': team.id,
}, admin)
assert response.status_code == 400
@pytest.mark.django_db
def test_create_credential_with_user_and_team_xfail(post, team, admin):
response = post(reverse('api:credential_list'), {
'name': 'Some name',
'username': 'someusername',
@ -246,7 +254,3 @@ def test_create_credential_xfails(post, organization, team, admin):
'team': team.id,
}, admin)
assert response.status_code == 400

View File

@ -0,0 +1,163 @@
import pytest
from django.core.urlresolvers import reverse
@pytest.mark.django_db
def test_inventory_source_notification_on_cloud_only(get, post, group_factory, user, notification_template):
u = user('admin', True)
g_cloud = group_factory('cloud')
g_not = group_factory('not_cloud')
cloud_is = g_cloud.inventory_source
not_is = g_not.inventory_source
cloud_is.source = 'ec2'
cloud_is.save()
url = reverse('api:inventory_source_notification_templates_any_list', args=(cloud_is.id,))
response = post(url, dict(id=notification_template.id), u)
assert response.status_code == 204
url = reverse('api:inventory_source_notification_templates_success_list', args=(not_is.id,))
response = post(url, dict(id=notification_template.id), u)
assert response.status_code == 400
@pytest.mark.parametrize("role_field,expected_status_code", [
(None, 403),
('admin_role', 200),
('update_role', 403),
('adhoc_role', 403),
('use_role', 403)
])
@pytest.mark.django_db
def test_edit_inventory(put, inventory, alice, role_field, expected_status_code):
data = { 'organization': inventory.organization.id, 'name': 'New name', 'description': 'Hello world', }
if role_field:
getattr(inventory, role_field).members.add(alice)
put(reverse('api:inventory_detail', args=(inventory.id,)), data, alice, expect=expected_status_code)
@pytest.mark.parametrize("role_field,expected_status_code", [
(None, 403),
('admin_role', 201),
('update_role', 403),
('adhoc_role', 403),
('use_role', 403)
])
@pytest.mark.django_db
def test_create_inventory_group(post, inventory, alice, role_field, expected_status_code):
data = { 'name': 'New name', 'description': 'Hello world', }
if role_field:
getattr(inventory, role_field).members.add(alice)
post(reverse('api:inventory_groups_list', args=(inventory.id,)), data, alice, expect=expected_status_code)
@pytest.mark.parametrize("role_field,expected_status_code", [
(None, 403),
('admin_role', 201),
('update_role', 403),
('adhoc_role', 403),
('use_role', 403)
])
@pytest.mark.django_db
def test_create_inventory_group_child(post, group, alice, role_field, expected_status_code):
data = { 'name': 'New name', 'description': 'Hello world', }
if role_field:
getattr(group.inventory, role_field).members.add(alice)
post(reverse('api:group_children_list', args=(group.id,)), data, alice, expect=expected_status_code)
@pytest.mark.parametrize("role_field,expected_status_code", [
(None, 403),
('admin_role', 200),
('update_role', 403),
('adhoc_role', 403),
('use_role', 403)
])
@pytest.mark.django_db
def test_edit_inventory_group(put, group, alice, role_field, expected_status_code):
data = { 'name': 'New name', 'description': 'Hello world', }
if role_field:
getattr(group.inventory, role_field).members.add(alice)
put(reverse('api:group_detail', args=(group.id,)), data, alice, expect=expected_status_code)
@pytest.mark.parametrize("role_field,expected_status_code", [
(None, 403),
('admin_role', 204),
('update_role', 403),
('adhoc_role', 403),
('use_role', 403)
])
@pytest.mark.django_db
def test_delete_inventory_group(delete, group, alice, role_field, expected_status_code):
if role_field:
getattr(group.inventory, role_field).members.add(alice)
delete(reverse('api:group_detail', args=(group.id,)), alice, expect=expected_status_code)
@pytest.mark.parametrize("role_field,expected_status_code", [
(None, 403),
('admin_role', 201),
('update_role', 403),
('adhoc_role', 403),
('use_role', 403)
])
@pytest.mark.django_db
def test_create_inventory_host(post, inventory, alice, role_field, expected_status_code):
data = { 'name': 'New name', 'description': 'Hello world', }
if role_field:
getattr(inventory, role_field).members.add(alice)
post(reverse('api:inventory_hosts_list', args=(inventory.id,)), data, alice, expect=expected_status_code)
@pytest.mark.parametrize("role_field,expected_status_code", [
(None, 403),
('admin_role', 201),
('update_role', 403),
('adhoc_role', 403),
('use_role', 403)
])
@pytest.mark.django_db
def test_create_inventory_group_host(post, group, alice, role_field, expected_status_code):
data = { 'name': 'New name', 'description': 'Hello world', }
if role_field:
getattr(group.inventory, role_field).members.add(alice)
post(reverse('api:group_hosts_list', args=(group.id,)), data, alice, expect=expected_status_code)
@pytest.mark.parametrize("role_field,expected_status_code", [
(None, 403),
('admin_role', 200),
('update_role', 403),
('adhoc_role', 403),
('use_role', 403)
])
@pytest.mark.django_db
def test_edit_inventory_host(put, host, alice, role_field, expected_status_code):
data = { 'name': 'New name', 'description': 'Hello world', }
if role_field:
getattr(host.inventory, role_field).members.add(alice)
put(reverse('api:host_detail', args=(host.id,)), data, alice, expect=expected_status_code)
@pytest.mark.parametrize("role_field,expected_status_code", [
(None, 403),
('admin_role', 204),
('update_role', 403),
('adhoc_role', 403),
('use_role', 403)
])
@pytest.mark.django_db
def test_delete_inventory_host(delete, host, alice, role_field, expected_status_code):
if role_field:
getattr(host.inventory, role_field).members.add(alice)
delete(reverse('api:host_detail', args=(host.id,)), alice, expect=expected_status_code)
@pytest.mark.parametrize("role_field,expected_status_code", [
(None, 403),
('admin_role', 202),
('update_role', 202),
('adhoc_role', 403),
('use_role', 403)
])
@pytest.mark.django_db
def test_inventory_source_update(post, inventory_source, alice, role_field, expected_status_code):
if role_field:
getattr(inventory_source.group.inventory, role_field).members.add(alice)
post(reverse('api:inventory_source_update_view', args=(inventory_source.id,)), {}, alice, expect=expected_status_code)

View File

@ -70,7 +70,7 @@ def bad_scan_JT(job_template_prompts):
# End of setup, tests start here
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_ignore_unprompted_vars(runtime_data, job_template_prompts, post, user, mocker):
def test_job_ignore_unprompted_vars(runtime_data, job_template_prompts, post, admin_user, mocker):
job_template = job_template_prompts(False)
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
@ -78,8 +78,7 @@ def test_job_ignore_unprompted_vars(runtime_data, job_template_prompts, post, us
with mocker.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
response = post(reverse('api:job_template_launch', args=[job_template.pk]),
runtime_data, user('admin', True))
assert response.status_code == 201
runtime_data, admin_user, expect=201)
# Check that job is serialized correctly
job_id = response.data['job']
@ -99,7 +98,7 @@ def test_job_ignore_unprompted_vars(runtime_data, job_template_prompts, post, us
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_accept_prompted_vars(runtime_data, job_template_prompts, post, user, mocker):
def test_job_accept_prompted_vars(runtime_data, job_template_prompts, post, admin_user, mocker):
job_template = job_template_prompts(True)
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
@ -107,9 +106,8 @@ def test_job_accept_prompted_vars(runtime_data, job_template_prompts, post, user
with mocker.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
response = post(reverse('api:job_template_launch', args=[job_template.pk]),
runtime_data, user('admin', True))
runtime_data, admin_user, expect=201)
assert response.status_code == 201
job_id = response.data['job']
assert job_id == 968
@ -134,51 +132,47 @@ def test_job_accept_prompted_vars_null(runtime_data, job_template_prompts_null,
with mocker.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
response = post(reverse('api:job_template_launch', args=[job_template.pk]),
runtime_data, rando)
runtime_data, rando, expect=201)
assert response.status_code == 201
job_id = response.data['job']
assert job_id == 968
mock_job.signal_start.assert_called_once_with(**runtime_data)
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_reject_invalid_prompted_vars(runtime_data, job_template_prompts, post, user):
def test_job_reject_invalid_prompted_vars(runtime_data, job_template_prompts, post, admin_user):
job_template = job_template_prompts(True)
response = post(
reverse('api:job_template_launch', args=[job_template.pk]),
dict(job_type='foobicate', # foobicate is not a valid job type
inventory=87865, credential=48474), user('admin', True))
inventory=87865, credential=48474), admin_user, expect=400)
assert response.status_code == 400
assert response.data['job_type'] == [u'"foobicate" is not a valid choice.']
assert response.data['inventory'] == [u'Invalid pk "87865" - object does not exist.']
assert response.data['credential'] == [u'Invalid pk "48474" - object does not exist.']
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_reject_invalid_prompted_extra_vars(runtime_data, job_template_prompts, post, user):
def test_job_reject_invalid_prompted_extra_vars(runtime_data, job_template_prompts, post, admin_user):
job_template = job_template_prompts(True)
response = post(
reverse('api:job_template_launch', args=[job_template.pk]),
dict(extra_vars='{"unbalanced brackets":'), user('admin', True))
dict(extra_vars='{"unbalanced brackets":'), admin_user, expect=400)
assert response.status_code == 400
assert response.data['extra_vars'] == ['Must be a valid JSON or YAML dictionary.']
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_launch_fails_without_inventory(deploy_jobtemplate, post, user):
def test_job_launch_fails_without_inventory(deploy_jobtemplate, post, admin_user):
deploy_jobtemplate.inventory = None
deploy_jobtemplate.save()
response = post(reverse('api:job_template_launch',
args=[deploy_jobtemplate.pk]), {}, user('admin', True))
args=[deploy_jobtemplate.pk]), {}, admin_user, expect=400)
assert response.status_code == 400
assert response.data['inventory'] == ['Job Template Inventory is missing or undefined.']
assert response.data['inventory'] == ["Job Template 'inventory' is missing or undefined."]
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
@ -188,9 +182,8 @@ def test_job_launch_fails_without_inventory_access(job_template_prompts, runtime
# Assure that giving an inventory without access to the inventory blocks the launch
response = post(reverse('api:job_template_launch', args=[job_template.pk]),
dict(inventory=runtime_data['inventory']), rando)
dict(inventory=runtime_data['inventory']), rando, expect=403)
assert response.status_code == 403
assert response.data['detail'] == u'You do not have permission to perform this action.'
@pytest.mark.django_db
@ -201,9 +194,8 @@ def test_job_launch_fails_without_credential_access(job_template_prompts, runtim
# Assure that giving a credential without access blocks the launch
response = post(reverse('api:job_template_launch', args=[job_template.pk]),
dict(credential=runtime_data['credential']), rando)
dict(credential=runtime_data['credential']), rando, expect=403)
assert response.status_code == 403
assert response.data['detail'] == u'You do not have permission to perform this action.'
@pytest.mark.django_db
@ -213,20 +205,19 @@ def test_job_block_scan_job_type_change(job_template_prompts, post, admin_user):
# Assure that changing the type of a scan job blocks the launch
response = post(reverse('api:job_template_launch', args=[job_template.pk]),
dict(job_type='scan'), admin_user)
dict(job_type='scan'), admin_user, expect=400)
assert response.status_code == 400
assert 'job_type' in response.data
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_block_scan_job_inv_change(mocker, bad_scan_JT, runtime_data, post, admin_user):
# Assure that giving a new inventory for a scan job blocks the launch
with mocker.patch('awx.main.access.BaseAccess.check_license', return_value=True):
with mocker.patch('awx.main.access.BaseAccess.check_license'):
response = post(reverse('api:job_template_launch', args=[bad_scan_JT.pk]),
dict(inventory=runtime_data['inventory']), admin_user)
dict(inventory=runtime_data['inventory']), admin_user,
expect=400)
assert response.status_code == 400
assert 'inventory' in response.data
@pytest.mark.django_db
@ -286,41 +277,23 @@ def test_job_launch_JT_with_validation(machine_credential, deploy_jobtemplate):
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_launch_unprompted_vars_with_survey(mocker, job_template_prompts, post, user):
with mocker.patch('awx.main.access.BaseAccess.check_license', return_value=False):
job_template = job_template_prompts(False)
job_template.survey_enabled = True
job_template.survey_spec = {
"spec": [
{
"index": 0,
"question_name": "survey_var",
"min": 0,
"default": "",
"max": 100,
"question_description": "A survey question",
"required": True,
"variable": "survey_var",
"choices": "",
"type": "integer"
}
],
"description": "",
"name": ""
}
job_template.save()
def test_job_launch_unprompted_vars_with_survey(mocker, survey_spec_factory, job_template_prompts, post, admin_user):
job_template = job_template_prompts(False)
job_template.survey_enabled = True
job_template.survey_spec = survey_spec_factory('survey_var')
job_template.save()
with mocker.patch('awx.main.access.BaseAccess.check_license'):
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
with mocker.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
response = post(
reverse('api:job_template_launch', args=[job_template.pk]),
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}),
user('admin', True))
assert response.status_code == 201
admin_user, expect=201)
job_id = response.data['job']
assert job_id == 968
job_id = response.data['job']
assert job_id == 968
# Check that the survey variable is accepted and the job variable isn't
mock_job.signal_start.assert_called_once_with(extra_vars={"survey_var": 4})
# Check that the survey variable is accepted and the job variable isn't
mock_job.signal_start.assert_called_once_with(extra_vars={"survey_var": 4})

View File

@ -0,0 +1,337 @@
import pytest
import mock
# AWX
from awx.api.serializers import JobTemplateSerializer, JobLaunchSerializer
from awx.main.models.jobs import JobTemplate
from awx.main.models.projects import ProjectOptions
# Django
from django.test.client import RequestFactory
from django.core.urlresolvers import reverse
@property
def project_playbooks(self):
return ['mocked', 'mocked.yml', 'alt-mocked.yml']
@pytest.mark.django_db
@mock.patch.object(ProjectOptions, "playbooks", project_playbooks)
@pytest.mark.parametrize(
"grant_project, grant_credential, grant_inventory, expect", [
(True, True, True, 201),
(True, True, False, 403),
(True, False, True, 403),
(False, True, True, 403),
]
)
def test_create(post, project, machine_credential, inventory, alice, grant_project, grant_credential, grant_inventory, expect):
if grant_project:
project.use_role.members.add(alice)
if grant_credential:
machine_credential.use_role.members.add(alice)
if grant_inventory:
inventory.use_role.members.add(alice)
post(reverse('api:job_template_list'), {
'name': 'Some name',
'project': project.id,
'credential': machine_credential.id,
'inventory': inventory.id,
'playbook': 'mocked.yml',
}, alice, expect=expect)
@pytest.mark.django_db
@mock.patch.object(ProjectOptions, "playbooks", project_playbooks)
@pytest.mark.parametrize(
"grant_project, grant_credential, grant_inventory, expect", [
(True, True, True, 200),
(True, True, False, 403),
(True, False, True, 403),
(False, True, True, 403),
]
)
def test_edit_sensitive_fields(patch, job_template_factory, alice, grant_project, grant_credential, grant_inventory, expect):
objs = job_template_factory('jt', organization='org1', project='prj', inventory='inv', credential='cred')
objs.job_template.admin_role.members.add(alice)
if grant_project:
objs.project.use_role.members.add(alice)
if grant_credential:
objs.credential.use_role.members.add(alice)
if grant_inventory:
objs.inventory.use_role.members.add(alice)
patch(reverse('api:job_template_detail', args=(objs.job_template.id,)), {
'name': 'Some name',
'project': objs.project.id,
'credential': objs.credential.id,
'inventory': objs.inventory.id,
'playbook': 'alt-mocked.yml',
}, alice, expect=expect)
@pytest.mark.django_db
@mock.patch.object(ProjectOptions, "playbooks", project_playbooks)
def test_edit_playbook(patch, job_template_factory, alice):
objs = job_template_factory('jt', organization='org1', project='prj', inventory='inv', credential='cred')
objs.job_template.admin_role.members.add(alice)
objs.project.use_role.members.add(alice)
objs.credential.use_role.members.add(alice)
objs.inventory.use_role.members.add(alice)
patch(reverse('api:job_template_detail', args=(objs.job_template.id,)), {
'playbook': 'alt-mocked.yml',
}, alice, expect=200)
objs.inventory.use_role.members.remove(alice)
patch(reverse('api:job_template_detail', args=(objs.job_template.id,)), {
'playbook': 'mocked.yml',
}, alice, expect=403)
@pytest.mark.django_db
@mock.patch.object(ProjectOptions, "playbooks", project_playbooks)
def test_edit_nonsenstive(patch, job_template_factory, alice):
objs = job_template_factory('jt', organization='org1', project='prj', inventory='inv', credential='cred')
jt = objs.job_template
jt.admin_role.members.add(alice)
res = patch(reverse('api:job_template_detail', args=(jt.id,)), {
'name': 'updated',
'description': 'bar',
'forks': 14,
'limit': 'something',
'verbosity': 5,
'extra_vars': '--',
'job_tags': 'sometags',
'force_handlers': True,
'skip_tags': True,
'ask_variables_on_launch':True,
'ask_tags_on_launch':True,
'ask_job_type_on_launch':True,
'ask_inventory_on_launch':True,
'ask_credential_on_launch': True,
}, alice, expect=200)
print(res.data)
assert res.data['name'] == 'updated'
@pytest.fixture
def jt_copy_edit(job_template_factory, project):
objects = job_template_factory(
'copy-edit-job-template',
project=project)
return objects.job_template
@property
def project_playbooks(self):
return ['mocked', 'mocked.yml', 'alt-mocked.yml']
@pytest.mark.django_db
def test_job_template_role_user(post, organization_factory, job_template_factory):
objects = organization_factory("org",
superusers=['admin'],
users=['test'])
jt_objects = job_template_factory("jt",
organization=objects.organization,
inventory='test_inv',
project='test_proj')
url = reverse('api:user_roles_list', args=(objects.users.test.pk,))
response = post(url, dict(id=jt_objects.job_template.execute_role.pk), objects.superusers.admin)
assert response.status_code == 204
# Test protection against limited set of validation problems
@pytest.mark.django_db
def test_bad_data_copy_edit(admin_user, project):
"""
If a required resource (inventory here) was deleted, copying not allowed
because doing so would caues a validation error
"""
jt_res = JobTemplate.objects.create(
job_type='run',
project=project,
inventory=None, ask_inventory_on_launch=False, # not allowed
credential=None, ask_credential_on_launch=True,
name='deploy-job-template'
)
serializer = JobTemplateSerializer(jt_res)
request = RequestFactory().get('/api/v1/job_templates/12/')
request.user = admin_user
serializer.context['request'] = request
response = serializer.to_representation(jt_res)
assert not response['summary_fields']['can_copy']
assert response['summary_fields']['can_edit']
# Tests for correspondence between view info and actual access
@pytest.mark.django_db
def test_admin_copy_edit(jt_copy_edit, admin_user):
"Absent a validation error, system admins can do everything"
# Serializer can_copy/can_edit fields
serializer = JobTemplateSerializer(jt_copy_edit)
request = RequestFactory().get('/api/v1/job_templates/12/')
request.user = admin_user
serializer.context['request'] = request
response = serializer.to_representation(jt_copy_edit)
assert response['summary_fields']['can_copy']
assert response['summary_fields']['can_edit']
@pytest.mark.django_db
def test_org_admin_copy_edit(jt_copy_edit, org_admin):
"Organization admins SHOULD be able to copy a JT firmly in their org"
# Serializer can_copy/can_edit fields
serializer = JobTemplateSerializer(jt_copy_edit)
request = RequestFactory().get('/api/v1/job_templates/12/')
request.user = org_admin
serializer.context['request'] = request
response = serializer.to_representation(jt_copy_edit)
assert response['summary_fields']['can_copy']
assert response['summary_fields']['can_edit']
@pytest.mark.django_db
def test_org_admin_foreign_cred_no_copy_edit(jt_copy_edit, org_admin, machine_credential):
"""
Organization admins without access to the 3 related resources:
SHOULD NOT be able to copy JT
SHOULD be able to edit that job template, for nonsensitive changes
"""
# Attach credential to JT that org admin can not use
jt_copy_edit.credential = machine_credential
jt_copy_edit.save()
# Serializer can_copy/can_edit fields
serializer = JobTemplateSerializer(jt_copy_edit)
request = RequestFactory().get('/api/v1/job_templates/12/')
request.user = org_admin
serializer.context['request'] = request
response = serializer.to_representation(jt_copy_edit)
assert not response['summary_fields']['can_copy']
assert response['summary_fields']['can_edit']
@pytest.mark.django_db
def test_jt_admin_copy_edit(jt_copy_edit, rando):
"""
JT admins wihout access to associated resources SHOULD NOT be able to copy
SHOULD be able to make nonsensitive changes"""
# random user given JT admin access only
jt_copy_edit.admin_role.members.add(rando)
jt_copy_edit.save()
# Serializer can_copy/can_edit fields
serializer = JobTemplateSerializer(jt_copy_edit)
request = RequestFactory().get('/api/v1/job_templates/12/')
request.user = rando
serializer.context['request'] = request
response = serializer.to_representation(jt_copy_edit)
assert not response['summary_fields']['can_copy']
assert response['summary_fields']['can_edit']
@pytest.mark.django_db
def test_proj_jt_admin_copy_edit(jt_copy_edit, rando):
"JT admins with access to associated resources SHOULD be able to copy"
# random user given JT and project admin abilities
jt_copy_edit.admin_role.members.add(rando)
jt_copy_edit.save()
jt_copy_edit.project.admin_role.members.add(rando)
jt_copy_edit.project.save()
# Serializer can_copy/can_edit fields
serializer = JobTemplateSerializer(jt_copy_edit)
request = RequestFactory().get('/api/v1/job_templates/12/')
request.user = rando
serializer.context['request'] = request
response = serializer.to_representation(jt_copy_edit)
assert response['summary_fields']['can_copy']
assert response['summary_fields']['can_edit']
# Functional tests - create new JT with all returned fields, as the UI does
@pytest.mark.django_db
@mock.patch.object(ProjectOptions, "playbooks", project_playbooks)
def test_org_admin_copy_edit_functional(jt_copy_edit, org_admin, get, post):
get_response = get(reverse('api:job_template_detail', args=[jt_copy_edit.pk]), user=org_admin)
assert get_response.status_code == 200
assert get_response.data['summary_fields']['can_copy']
post_data = get_response.data
post_data['name'] = '%s @ 12:19:47 pm' % post_data['name']
post_response = post(reverse('api:job_template_list', args=[]), user=org_admin, data=post_data)
assert post_response.status_code == 201
assert post_response.data['name'] == 'copy-edit-job-template @ 12:19:47 pm'
@pytest.mark.django_db
@mock.patch.object(ProjectOptions, "playbooks", project_playbooks)
def test_jt_admin_copy_edit_functional(jt_copy_edit, rando, get, post):
# Grant random user JT admin access only
jt_copy_edit.admin_role.members.add(rando)
jt_copy_edit.save()
get_response = get(reverse('api:job_template_detail', args=[jt_copy_edit.pk]), user=rando)
assert get_response.status_code == 200
assert not get_response.data['summary_fields']['can_copy']
post_data = get_response.data
post_data['name'] = '%s @ 12:19:47 pm' % post_data['name']
post_response = post(reverse('api:job_template_list', args=[]), user=rando, data=post_data)
assert post_response.status_code == 403
@pytest.mark.django_db
def test_scan_jt_no_inventory(job_template_factory):
# A user should be able to create a scan job without a project, but an inventory is required
objects = job_template_factory('jt',
credential='c',
job_type="scan",
project='p',
inventory='i',
organization='o')
serializer = JobTemplateSerializer(data={"name": "Test", "job_type": "scan",
"project": None, "inventory": objects.inventory.pk})
assert serializer.is_valid()
serializer = JobTemplateSerializer(data={"name": "Test", "job_type": "scan",
"project": None, "inventory": None})
assert not serializer.is_valid()
assert "inventory" in serializer.errors
serializer = JobTemplateSerializer(data={"name": "Test", "job_type": "scan",
"project": None, "inventory": None,
"ask_inventory_on_launch": True})
assert not serializer.is_valid()
assert "inventory" in serializer.errors
# A user shouldn't be able to launch a scan job template which is missing an inventory
obj_jt = objects.job_template
obj_jt.inventory = None
serializer = JobLaunchSerializer(instance=obj_jt,
context={'obj': obj_jt,
"data": {}},
data={})
assert not serializer.is_valid()
assert 'inventory' in serializer.errors
@pytest.mark.django_db
def test_scan_jt_surveys(inventory):
serializer = JobTemplateSerializer(data={"name": "Test", "job_type": "scan",
"project": None, "inventory": inventory.pk,
"survey_enabled": True})
assert not serializer.is_valid()
assert "survey_enabled" in serializer.errors
@pytest.mark.django_db
def test_jt_without_project(inventory):
data = dict(name="Test", job_type="run",
inventory=inventory.pk, project=None)
serializer = JobTemplateSerializer(data=data)
assert not serializer.is_valid()
assert "project" in serializer.errors
data["job_type"] = "check"
serializer = JobTemplateSerializer(data=data)
assert not serializer.is_valid()
assert "project" in serializer.errors
data["job_type"] = "scan"
serializer = JobTemplateSerializer(data=data)
assert serializer.is_valid()

View File

@ -0,0 +1,200 @@
import mock
import pytest
import json
from django.core.urlresolvers import reverse
from awx.main.models.jobs import JobTemplate, Job
from awx.main.models.activity_stream import ActivityStream
from awx.api.license import LicenseForbids
from awx.main.access import JobTemplateAccess
def mock_no_surveys(self, add_host=False, feature=None, check_expiration=True):
if feature == 'surveys':
raise LicenseForbids("Feature %s is not enabled in the active license." % feature)
else:
pass
@pytest.fixture
def job_template_with_survey(job_template_factory):
objects = job_template_factory('jt', project='prj', survey='submitted_email')
return objects.job_template
# Survey license-based denial tests
@mock.patch('awx.api.views.feature_enabled', lambda feature: False)
@pytest.mark.django_db
@pytest.mark.survey
def test_survey_spec_view_denied(job_template_with_survey, get, admin_user):
# TODO: Test non-enterprise license
response = get(reverse('api:job_template_survey_spec',
args=(job_template_with_survey.id,)), admin_user, expect=402)
assert response.data['detail'] == 'Your license does not allow adding surveys.'
@mock.patch('awx.main.access.BaseAccess.check_license', mock_no_surveys)
@pytest.mark.django_db
@pytest.mark.survey
def test_deny_enabling_survey(deploy_jobtemplate, patch, admin_user):
response = patch(url=deploy_jobtemplate.get_absolute_url(),
data=dict(survey_enabled=True), user=admin_user, expect=402)
assert response.data['detail'] == 'Feature surveys is not enabled in the active license.'
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
@pytest.mark.django_db
@pytest.mark.survey
def test_job_start_blocked_without_survey_license(job_template_with_survey, admin_user):
"""Check that user can't start a job with surveys without a survey license."""
access = JobTemplateAccess(admin_user)
with pytest.raises(LicenseForbids):
access.can_start(job_template_with_survey)
@mock.patch('awx.main.access.BaseAccess.check_license', mock_no_surveys)
@pytest.mark.django_db
@pytest.mark.survey
def test_deny_creating_with_survey(project, post, admin_user):
response = post(
url=reverse('api:job_template_list'),
data=dict(
name = 'JT with survey',
job_type = 'run',
project = project.pk,
playbook = 'helloworld.yml',
ask_credential_on_launch = True,
ask_inventory_on_launch = True,
survey_enabled = True),
user=admin_user, expect=402)
assert response.data['detail'] == 'Feature surveys is not enabled in the active license.'
# Test normal operations with survey license work
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
@pytest.mark.django_db
@pytest.mark.survey
def test_survey_spec_view_allowed(deploy_jobtemplate, get, admin_user):
get(reverse('api:job_template_survey_spec', args=(deploy_jobtemplate.id,)),
admin_user, expect=200)
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
@pytest.mark.django_db
@pytest.mark.survey
def test_survey_spec_sucessful_creation(survey_spec_factory, job_template, post, admin_user):
survey_input_data = survey_spec_factory('new_question')
post(url=reverse('api:job_template_survey_spec', args=(job_template.id,)),
data=survey_input_data, user=admin_user, expect=200)
updated_jt = JobTemplate.objects.get(pk=job_template.pk)
assert updated_jt.survey_spec == survey_input_data
# Tests related to survey content validation
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
@pytest.mark.django_db
@pytest.mark.survey
def test_survey_spec_non_dict_error(deploy_jobtemplate, post, admin_user):
"""When a question doesn't follow the standard format, verify error thrown."""
response = post(
url=reverse('api:job_template_survey_spec', args=(deploy_jobtemplate.id,)),
data={"description": "Email of the submitter",
"spec": ["What is your email?"], "name": "Email survey"},
user=admin_user, expect=400)
assert response.data['error'] == "Survey question 0 is not a json object."
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
@pytest.mark.django_db
@pytest.mark.survey
def test_survey_spec_dual_names_error(survey_spec_factory, deploy_jobtemplate, post, user):
response = post(
url=reverse('api:job_template_survey_spec', args=(deploy_jobtemplate.id,)),
data=survey_spec_factory(['submitter_email', 'submitter_email']),
user=user('admin', True), expect=400)
assert response.data['error'] == "'variable' 'submitter_email' duplicated in survey question 1."
# Test actions that should be allowed with non-survey license
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
@pytest.mark.django_db
@pytest.mark.survey
def test_disable_survey_access_without_license(job_template_with_survey, admin_user):
"""Assure that user can disable a JT survey after downgrading license."""
access = JobTemplateAccess(admin_user)
assert access.can_change(job_template_with_survey, dict(survey_enabled=False))
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
@pytest.mark.django_db
@pytest.mark.survey
def test_delete_survey_access_without_license(job_template_with_survey, admin_user):
"""Assure that access.py allows deleting surveys after downgrading license."""
access = JobTemplateAccess(admin_user)
assert access.can_change(job_template_with_survey, dict(survey_spec=None))
assert access.can_change(job_template_with_survey, dict(survey_spec={}))
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
@pytest.mark.django_db
@pytest.mark.survey
def test_job_start_allowed_with_survey_spec(job_template_factory, admin_user):
"""After user downgrades survey license and disables survey on the JT,
check that jobs still launch even if the survey_spec data persists."""
objects = job_template_factory('jt', project='prj', survey='submitter_email')
obj = objects.job_template
obj.survey_enabled = False
obj.save()
access = JobTemplateAccess(admin_user)
assert access.can_start(job_template_with_survey, {})
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
@pytest.mark.django_db
@pytest.mark.survey
def test_job_template_delete_access_with_survey(job_template_with_survey, admin_user):
"""The survey_spec view relies on JT `can_delete` to determine permission
to delete the survey. This checks that system admins can delete the survey on a JT."""
access = JobTemplateAccess(admin_user)
assert access.can_delete(job_template_with_survey)
@mock.patch('awx.api.views.feature_enabled', lambda feature: False)
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
@pytest.mark.django_db
@pytest.mark.survey
def test_delete_survey_spec_without_license(job_template_with_survey, delete, admin_user):
"""Functional delete test through the survey_spec view."""
delete(reverse('api:job_template_survey_spec', args=[job_template_with_survey.pk]),
admin_user, expect=200)
new_jt = JobTemplate.objects.get(pk=job_template_with_survey.pk)
assert new_jt.survey_spec == {}
@mock.patch('awx.main.access.BaseAccess.check_license', lambda self, **kwargs: True)
@mock.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job',
lambda self, extra_vars: mock.MagicMock(spec=Job, id=968))
@mock.patch('awx.api.serializers.JobSerializer.to_representation', lambda self, obj: {})
@pytest.mark.django_db
@pytest.mark.survey
def test_launch_survey_enabled_but_no_survey_spec(job_template_factory, post, admin_user):
"""False-ish values for survey_spec are interpreted as a survey with 0 questions."""
objects = job_template_factory('jt', organization='org1', project='prj',
inventory='inv', credential='cred')
obj = objects.job_template
obj.survey_enabled = True
obj.save()
response = post(reverse('api:job_template_launch', args=[obj.pk]),
dict(extra_vars=dict(survey_var=7)), admin_user, expect=201)
assert 'survey_var' in response.data['ignored_fields']['extra_vars']
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
@mock.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job',
lambda self: mock.MagicMock(spec=Job, id=968))
@mock.patch('awx.api.serializers.JobSerializer.to_representation', lambda self, obj: {})
@pytest.mark.django_db
@pytest.mark.survey
def test_launch_with_non_empty_survey_spec_no_license(job_template_factory, post, admin_user):
"""Assure jobs can still be launched from JTs with a survey_spec
when the survey is diabled."""
objects = job_template_factory('jt', organization='org1', project='prj',
inventory='inv', credential='cred',
survey='survey_var')
obj = objects.job_template
obj.survey_enabled = False
obj.save()
post(reverse('api:job_template_launch', args=[obj.pk]), {}, admin_user, expect=201)
@pytest.mark.django_db
@pytest.mark.survey
def test_redact_survey_passwords_in_activity_stream(job_with_secret_key):
AS_record = ActivityStream.objects.filter(object1='job').all()[0]
changes_dict = json.loads(AS_record.changes)
extra_vars = json.loads(changes_dict['extra_vars'])
assert extra_vars['secret_key'] == '$encrypted$'

View File

@ -1,135 +0,0 @@
import mock
import pytest
from django.core.urlresolvers import reverse
from awx.main.models.jobs import JobTemplate
from awx.api.license import LicenseForbids
def mock_feature_enabled(feature, bypass_database=None):
return True
def mock_feature_disabled(feature, bypass_database=None):
return False
def mock_check_license(self, add_host=False, feature=None, check_expiration=True):
raise LicenseForbids("Feature %s is not enabled in the active license." % feature)
@pytest.fixture
def survey_jobtemplate(project, inventory, credential):
return JobTemplate.objects.create(
job_type='run',
project=project,
inventory=inventory,
credential=credential,
name='deploy-job-template'
)
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_disabled)
@pytest.mark.django_db
@pytest.mark.survey
def test_survey_spec_view_denied(deploy_jobtemplate, get, user):
# TODO: Test non-enterprise license
spec_url = reverse('api:job_template_survey_spec', args=(deploy_jobtemplate.id,))
response = get(spec_url, user('admin', True))
assert response.status_code == 402
assert response.data['detail'] == 'Your license does not allow adding surveys.'
@mock.patch('awx.main.access.BaseAccess.check_license', mock_check_license)
@pytest.mark.django_db
@pytest.mark.survey
def test_deny_enabling_survey(deploy_jobtemplate, patch, user):
JT_url = reverse('api:job_template_detail', args=(deploy_jobtemplate.id,))
response = patch(url=JT_url, data=dict(survey_enabled=True), user=user('admin', True))
assert response.status_code == 402
assert response.data['detail'] == 'Feature surveys is not enabled in the active license.'
@mock.patch('awx.main.access.BaseAccess.check_license', mock_check_license)
@pytest.mark.django_db
@pytest.mark.survey
def test_deny_creating_with_survey(machine_credential, project, inventory, post, user):
JT_url = reverse('api:job_template_list')
JT_data = dict(
name = 'JT with survey',
job_type = 'run',
inventory = inventory.pk,
project = project.pk,
playbook = 'hiworld.yml',
credential = machine_credential.pk,
survey_enabled = True,
)
response = post(url=JT_url, data=JT_data, user=user('admin', True))
assert response.status_code == 402
assert response.data['detail'] == 'Feature surveys is not enabled in the active license.'
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.django_db
@pytest.mark.survey
def test_survey_spec_view_allowed(deploy_jobtemplate, get, user):
spec_url = reverse('api:job_template_survey_spec', args=(deploy_jobtemplate.id,))
response = get(spec_url, user('admin', True))
assert response.status_code == 200
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.django_db
@pytest.mark.survey
def test_survey_spec_sucessful_creation(deploy_jobtemplate, post, user):
spec_url = reverse('api:job_template_survey_spec', args=(deploy_jobtemplate.id,))
response = post(
url=spec_url,
data={
"description": "Email of the submitter",
"spec": [{
"variable": "submitter_email",
"question_name": "Enter your email",
"type": "text",
"required": False
}],
"name": "Email survey"
},
user=user('admin', True))
assert response.status_code == 200
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.django_db
@pytest.mark.survey
def test_survey_spec_non_dict_error(deploy_jobtemplate, post, user):
spec_url = reverse('api:job_template_survey_spec', args=(deploy_jobtemplate.id,))
response = post(
url=spec_url,
data={"description": "Email of the submitter",
"spec": ["What is your email?"], "name": "Email survey"},
user=user('admin', True))
assert response.status_code == 400
assert response.data['error'] == "Survey question 0 is not a json object."
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
@pytest.mark.django_db
@pytest.mark.survey
def test_survey_spec_dual_names_error(deploy_jobtemplate, post, user):
spec_url = reverse('api:job_template_survey_spec', args=(deploy_jobtemplate.id,))
response = post(
url=spec_url,
data={
"description": "Email of the submitter",
"spec": [{
"variable": "submitter_email",
"question_name": "Enter your email",
"type": "text",
"required": False
}, {
"variable": "submitter_email",
"question_name": "Same variable as last question",
"type": "integer",
"required": False
}],
"name": "Email survey"
},
user=user('admin', True))
assert response.status_code == 400
assert response.data['error'] == "'variable' 'submitter_email' duplicated in survey question 1."

View File

@ -29,6 +29,8 @@ from awx.main.models.jobs import JobTemplate
from awx.main.models.inventory import (
Group,
Inventory,
InventoryUpdate,
InventorySource
)
from awx.main.models.organization import (
Organization,
@ -147,18 +149,6 @@ def instance(settings):
def organization(instance):
return Organization.objects.create(name="test-org", description="test-org-desc")
@pytest.fixture
def organization_factory(instance):
def factory(name):
try:
org = Organization.objects.get(name=name)
except Organization.DoesNotExist:
org = Organization.objects.create(name=name,
description="description for " + name,
)
return org
return factory
@pytest.fixture
def credential():
return Credential.objects.create(kind='aws', name='test-cred')
@ -168,9 +158,8 @@ def machine_credential():
return Credential.objects.create(name='machine-cred', kind='ssh', username='test_user', password='pas4word')
@pytest.fixture
def org_credential(organization, credential):
credential.owner_role.parents.add(organization.admin_role)
return credential
def org_credential(organization):
return Credential.objects.create(kind='aws', name='test-cred', organization=organization)
@pytest.fixture
def inventory(organization):
@ -197,6 +186,11 @@ def notification_template(organization):
notification_type="webhook",
notification_configuration=dict(url="http://localhost",
headers={"Test": "Header"}))
@pytest.fixture
def job_with_secret_key(job_with_secret_key_factory):
return job_with_secret_key_factory(persisted=True)
@pytest.fixture
def admin(user):
return user('admin', True)
@ -221,6 +215,13 @@ def org_admin(user, organization):
organization.member_role.members.add(ret)
return ret
@pytest.fixture
def org_auditor(user, organization):
ret = user('org-auditor', False)
organization.auditor_role.members.add(ret)
organization.member_role.members.add(ret)
return ret
@pytest.fixture
def org_member(user, organization):
ret = user('org-member', False)
@ -265,6 +266,15 @@ def hosts(group_factory):
def group(inventory):
return inventory.groups.create(name='single-group')
@pytest.fixture
def inventory_source(group, inventory):
return InventorySource.objects.create(name=group.name, group=group,
inventory=inventory, source='gce')
@pytest.fixture
def inventory_update(inventory_source):
return InventoryUpdate.objects.create(inventory_source=inventory_source)
@pytest.fixture
def host(group, inventory):
return group.hosts.create(name='single-host', inventory=inventory)
@ -282,24 +292,9 @@ def permissions():
'update':False, 'delete':False, 'scm_update':False, 'execute':False, 'use':True,},
}
@pytest.fixture
def notification_template_factory(organization):
def n(name="test-notification_template"):
try:
notification_template = NotificationTemplate.objects.get(name=name)
except NotificationTemplate.DoesNotExist:
notification_template = NotificationTemplate(name=name,
organization=organization,
notification_type="webhook",
notification_configuration=dict(url="http://localhost",
headers={"Test": "Header"}))
notification_template.save()
return notification_template
return n
@pytest.fixture
def post():
def rf(url, data, user=None, middleware=None, **kwargs):
def rf(url, data, user=None, middleware=None, expect=None, **kwargs):
view, view_args, view_kwargs = resolve(urlparse(url)[2])
if 'format' not in kwargs:
kwargs['format'] = 'json'
@ -311,12 +306,16 @@ def post():
response = view(request, *view_args, **view_kwargs)
if middleware:
middleware.process_response(request, response)
if expect:
if response.status_code != expect:
print(response.data)
assert response.status_code == expect
return response
return rf
@pytest.fixture
def get():
def rf(url, user=None, middleware=None, **kwargs):
def rf(url, user=None, middleware=None, expect=None, **kwargs):
view, view_args, view_kwargs = resolve(urlparse(url)[2])
if 'format' not in kwargs:
kwargs['format'] = 'json'
@ -328,12 +327,16 @@ def get():
response = view(request, *view_args, **view_kwargs)
if middleware:
middleware.process_response(request, response)
if expect:
if response.status_code != expect:
print(response.data)
assert response.status_code == expect
return response
return rf
@pytest.fixture
def put():
def rf(url, data, user=None, middleware=None, **kwargs):
def rf(url, data, user=None, middleware=None, expect=None, **kwargs):
view, view_args, view_kwargs = resolve(urlparse(url)[2])
if 'format' not in kwargs:
kwargs['format'] = 'json'
@ -345,12 +348,16 @@ def put():
response = view(request, *view_args, **view_kwargs)
if middleware:
middleware.process_response(request, response)
if expect:
if response.status_code != expect:
print(response.data)
assert response.status_code == expect
return response
return rf
@pytest.fixture
def patch():
def rf(url, data, user=None, middleware=None, **kwargs):
def rf(url, data, user=None, middleware=None, expect=None, **kwargs):
view, view_args, view_kwargs = resolve(urlparse(url)[2])
if 'format' not in kwargs:
kwargs['format'] = 'json'
@ -362,12 +369,16 @@ def patch():
response = view(request, *view_args, **view_kwargs)
if middleware:
middleware.process_response(request, response)
if expect:
if response.status_code != expect:
print(response.data)
assert response.status_code == expect
return response
return rf
@pytest.fixture
def delete():
def rf(url, user=None, middleware=None, **kwargs):
def rf(url, user=None, middleware=None, expect=None, **kwargs):
view, view_args, view_kwargs = resolve(urlparse(url)[2])
if 'format' not in kwargs:
kwargs['format'] = 'json'
@ -379,12 +390,16 @@ def delete():
response = view(request, *view_args, **view_kwargs)
if middleware:
middleware.process_response(request, response)
if expect:
if response.status_code != expect:
print(response.data)
assert response.status_code == expect
return response
return rf
@pytest.fixture
def head():
def rf(url, user=None, middleware=None, **kwargs):
def rf(url, user=None, middleware=None, expect=None, **kwargs):
view, view_args, view_kwargs = resolve(urlparse(url)[2])
if 'format' not in kwargs:
kwargs['format'] = 'json'
@ -396,12 +411,16 @@ def head():
response = view(request, *view_args, **view_kwargs)
if middleware:
middleware.process_response(request, response)
if expect:
if response.status_code != expect:
print(response.data)
assert response.status_code == expect
return response
return rf
@pytest.fixture
def options():
def rf(url, data, user=None, middleware=None, **kwargs):
def rf(url, data, user=None, middleware=None, expect=None, **kwargs):
view, view_args, view_kwargs = resolve(urlparse(url)[2])
if 'format' not in kwargs:
kwargs['format'] = 'json'
@ -413,6 +432,10 @@ def options():
response = view(request, *view_args, **view_kwargs)
if middleware:
middleware.process_response(request, response)
if expect:
if response.status_code != expect:
print(response.data)
assert response.status_code == expect
return response
return rf
@ -474,3 +497,4 @@ def job_template_labels(organization, job_template):
job_template.labels.create(name="label-2", organization=organization)
return job_template

View File

@ -9,9 +9,6 @@ from awx.main.models.fact import Fact
from awx.main.migrations import _system_tracking as system_tracking
from awx.fact.models.fact import Fact as FactMongo
from awx.fact.models.fact import FactVersion, FactHost
def micro_to_milli(micro):
return micro - (((int)(micro / 1000)) * 1000)
@ -64,20 +61,3 @@ def test_migrate_facts_hostname_does_not_exist(inventories, hosts, hosts_mongo,
assert len(fact) == 1
assert fact[0] is not None
@pytest.mark.skipif(not getattr(settings, 'MONGO_DB', None), reason="MongoDB not configured")
@pytest.mark.django_db
@pytest.mark.mongo_db
def test_drop_system_tracking_db(inventories, hosts, hosts_mongo, fact_scans):
inventory_objs = inventories(1)
hosts_mongo(1, inventory_objs)
fact_scans(1, inventory_objs)
assert FactMongo.objects.all().count() > 0
assert FactVersion.objects.all().count() > 0
assert FactHost.objects.all().count() > 0
system_tracking.drop_system_tracking_db()
assert FactMongo.objects.all().count() == 0
assert FactVersion.objects.all().count() == 0
assert FactHost.objects.all().count() == 0

View File

@ -0,0 +1,15 @@
import pytest
from django.db import IntegrityError
from awx.main.models import Credential
@pytest.mark.django_db
def test_cred_unique_org_name_kind(organization_factory):
objects = organization_factory("test")
cred = Credential(name="test", kind="net", organization=objects.organization)
cred.save()
with pytest.raises(IntegrityError):
cred = Credential(name="test", kind="net", organization=objects.organization)
cred.save()

View File

@ -0,0 +1,110 @@
import pytest
from awx.main.tests.factories import NotUnique
def test_roles_exc_not_persisted(organization_factory):
with pytest.raises(RuntimeError) as exc:
organization_factory('test-org', roles=['test-org.admin_role:user1'], persisted=False)
assert 'persisted=False' in str(exc.value)
@pytest.mark.django_db
def test_roles_exc_bad_object(organization_factory):
with pytest.raises(KeyError):
organization_factory('test-org', roles=['test-project.admin_role:user'])
@pytest.mark.django_db
def test_roles_exc_not_unique(organization_factory):
with pytest.raises(NotUnique) as exc:
organization_factory('test-org', projects=['foo'], teams=['foo'], roles=['foo.admin_role:user'])
assert 'not a unique key' in str(exc.value)
@pytest.mark.django_db
def test_roles_exc_not_assignment(organization_factory):
with pytest.raises(RuntimeError) as exc:
organization_factory('test-org', projects=['foo'], roles=['foo.admin_role'])
assert 'provide an assignment' in str(exc.value)
@pytest.mark.django_db
def test_roles_exc_not_found(organization_factory):
with pytest.raises(RuntimeError) as exc:
organization_factory('test-org', users=['user'], projects=['foo'], roles=['foo.admin_role:user.bad_role'])
assert 'unable to find' in str(exc.value)
@pytest.mark.django_db
def test_roles_exc_not_user(organization_factory):
with pytest.raises(RuntimeError) as exc:
organization_factory('test-org', projects=['foo'], roles=['foo.admin_role:foo'])
assert 'unable to add non-user' in str(exc.value)
@pytest.mark.django_db
def test_org_factory_roles(organization_factory):
objects = organization_factory('org_roles_test',
teams=['team1', 'team2'],
users=['team1:foo', 'bar'],
projects=['baz', 'bang'],
roles=['team2.member_role:foo',
'team1.admin_role:bar',
'team1.admin_role:team2.admin_role',
'baz.admin_role:foo'])
assert objects.users.bar in objects.teams.team2.admin_role
assert objects.users.foo in objects.projects.baz.admin_role
assert objects.users.foo in objects.teams.team1.member_role
assert objects.teams.team2.admin_role in objects.teams.team1.admin_role.children.all()
@pytest.mark.django_db
def test_org_factory(organization_factory):
objects = organization_factory('organization1',
teams=['team1'],
superusers=['superuser'],
users=['admin', 'alice', 'team1:bob'],
projects=['proj1'])
assert hasattr(objects.users, 'admin')
assert hasattr(objects.users, 'alice')
assert hasattr(objects.superusers, 'superuser')
assert objects.users.bob in objects.teams.team1.member_role.members.all()
assert objects.projects.proj1.organization == objects.organization
@pytest.mark.django_db
def test_job_template_factory(job_template_factory):
jt_objects = job_template_factory('testJT', organization='org1',
project='proj1', inventory='inventory1',
credential='cred1', survey='test-survey',
cloud_credential='aws1',
network_credential='juniper1',
jobs=[1])
assert jt_objects.job_template.name == 'testJT'
assert jt_objects.project.name == 'proj1'
assert jt_objects.inventory.name == 'inventory1'
assert jt_objects.credential.name == 'cred1'
assert jt_objects.cloud_credential.name == 'aws1'
assert jt_objects.network_credential.name == 'juniper1'
assert jt_objects.inventory.organization.name == 'org1'
assert jt_objects.job_template.survey_enabled is True
assert jt_objects.job_template.survey_spec is not None
assert 'test-survey' in jt_objects.jobs[1].extra_vars
def test_survey_spec_generator_simple(survey_spec_factory):
survey_spec = survey_spec_factory('survey_variable')
assert 'name' in survey_spec
assert 'spec' in survey_spec
assert type(survey_spec['spec']) is list
assert type(survey_spec['spec'][0]) is dict
assert survey_spec['spec'][0]['type'] == 'integer'
def test_survey_spec_generator_mixed(survey_spec_factory):
survey_spec = survey_spec_factory(
[{'variable': 'question1', 'type': 'integer', 'max': 87},
{'variable': 'question2', 'type': 'str'},
'some_variable'])
assert len(survey_spec['spec']) == 3
assert [spec_item['type'] for spec_item in survey_spec['spec']] == ['integer', 'str', 'integer']
assert survey_spec['spec'][0]['max'] == 87

View File

@ -1,19 +0,0 @@
import pytest
from django.core.urlresolvers import reverse
@pytest.mark.django_db
def test_inventory_source_notification_on_cloud_only(get, post, group_factory, user, notification_template):
u = user('admin', True)
g_cloud = group_factory('cloud')
g_not = group_factory('not_cloud')
cloud_is = g_cloud.inventory_source
not_is = g_not.inventory_source
cloud_is.source = 'ec2'
cloud_is.save()
url = reverse('api:inventory_source_notification_templates_any_list', args=(cloud_is.id,))
response = post(url, dict(id=notification_template.id), u)
assert response.status_code == 204
url = reverse('api:inventory_source_notification_templates_success_list', args=(not_is.id,))
response = post(url, dict(id=notification_template.id), u)
assert response.status_code == 400

View File

@ -22,3 +22,24 @@ def test_job_blocking(get, post, job_template, inventory, inventory_factory):
assert j_callback_1.is_blocked_by(j_callback_2)
j_callback_2.limit = 'b'
assert not j_callback_1.is_blocked_by(j_callback_2)
@pytest.mark.django_db
def test_job_blocking_allow_simul(get, post, job_template, inventory):
job_template.allow_simultaneous = True
j1 = Job.objects.create(job_template=job_template,
inventory=inventory)
j2 = Job.objects.create(job_template=job_template,
inventory=inventory)
assert not j1.is_blocked_by(j2)
assert not j2.is_blocked_by(j1)
job_template.allow_simultaneous = False
assert j1.is_blocked_by(j2)
assert j2.is_blocked_by(j1)
@pytest.mark.django_db
def test_orphan_unified_job_creation(instance, inventory):
job = Job.objects.create(job_template=None, inventory=inventory, name='hi world')
job2 = job.copy()
assert job2.job_template is None
assert job2.inventory == inventory
assert job2.name == 'hi world'

View File

@ -1,7 +1,7 @@
import mock
import pytest
from awx.main.models.notifications import NotificationTemplate
from awx.main.models.notifications import NotificationTemplate, Notification
from awx.main.models.inventory import Inventory, Group
from awx.main.models.jobs import JobTemplate
@ -104,3 +104,21 @@ def test_notification_template_merging(get, post, user, organization, project, n
organization.notification_templates_any.add(notification_template)
project.notification_templates_any.add(notification_template)
assert len(project.notification_templates['any']) == 1
@pytest.mark.django_db
def test_notification_template_simple_patch(patch, notification_template, admin):
patch(reverse('api:notification_template_detail', args=(notification_template.id,)), { 'name': 'foo'}, admin, expect=200)
@pytest.mark.django_db
def test_notification_template_invalid_notification_type(patch, notification_template, admin):
patch(reverse('api:notification_template_detail', args=(notification_template.id,)), { 'notification_type': 'invalid'}, admin, expect=400)
@pytest.mark.django_db
def test_disallow_delete_when_notifications_pending(delete, user, notification_template):
u = user('superuser', True)
url = reverse('api:notification_template_detail', args=(notification_template.id,))
Notification.objects.create(notification_template=notification_template,
status='pending')
response = delete(url, user=u)
assert response.status_code == 405

View File

@ -1,7 +1,6 @@
import mock # noqa
import pytest
from django.db import transaction
from django.core.urlresolvers import reverse
from awx.main.models import Project
@ -9,62 +8,55 @@ from awx.main.models import Project
#
# Project listing and visibility tests
#
@pytest.fixture
def team_project_list(organization_factory):
objects = organization_factory('org-test',
superusers=['admin'],
users=['team1:alice', 'team2:bob'],
teams=['team1', 'team2'],
projects=['pteam1', 'pteam2', 'pshared'],
roles=['team1.member_role:pteam1.admin_role',
'team2.member_role:pteam2.admin_role',
'team1.member_role:pshared.admin_role',
'team2.member_role:pshared.admin_role'])
return objects
@pytest.mark.django_db
def test_user_project_list(get, project_factory, organization, admin, alice, bob):
def test_user_project_list(get, organization_factory):
'List of projects a user has access to, filtered by projects you can also see'
organization.member_role.members.add(alice, bob)
objects = organization_factory('org1',
projects=['alice project', 'bob project', 'shared project'],
superusers=['admin'],
users=['alice', 'bob'],
roles=['alice project.admin_role:alice',
'bob project.admin_role:bob',
'shared project.admin_role:bob',
'shared project.admin_role:alice'])
alice_project = project_factory('alice project')
alice_project.admin_role.members.add(alice)
bob_project = project_factory('bob project')
bob_project.admin_role.members.add(bob)
shared_project = project_factory('shared project')
shared_project.admin_role.members.add(alice)
shared_project.admin_role.members.add(bob)
# admins can see all projects
assert get(reverse('api:user_projects_list', args=(admin.pk,)), admin).data['count'] == 3
assert get(reverse('api:user_projects_list', args=(objects.superusers.admin.pk,)), objects.superusers.admin).data['count'] == 3
# admins can see everyones projects
assert get(reverse('api:user_projects_list', args=(alice.pk,)), admin).data['count'] == 2
assert get(reverse('api:user_projects_list', args=(bob.pk,)), admin).data['count'] == 2
assert get(reverse('api:user_projects_list', args=(objects.users.alice.pk,)), objects.superusers.admin).data['count'] == 2
assert get(reverse('api:user_projects_list', args=(objects.users.bob.pk,)), objects.superusers.admin).data['count'] == 2
# users can see their own projects
assert get(reverse('api:user_projects_list', args=(alice.pk,)), alice).data['count'] == 2
assert get(reverse('api:user_projects_list', args=(objects.users.alice.pk,)), objects.users.alice).data['count'] == 2
# alice should only be able to see the shared project when looking at bobs projects
assert get(reverse('api:user_projects_list', args=(bob.pk,)), alice).data['count'] == 1
assert get(reverse('api:user_projects_list', args=(objects.users.bob.pk,)), objects.users.alice).data['count'] == 1
# alice should see all projects they can see when viewing an admin
assert get(reverse('api:user_projects_list', args=(admin.pk,)), alice).data['count'] == 2
assert get(reverse('api:user_projects_list', args=(objects.superusers.admin.pk,)), objects.users.alice).data['count'] == 2
def setup_test_team_project_list(project_factory, team_factory, admin, alice, bob):
team1 = team_factory('team1')
team2 = team_factory('team2')
team1_project = project_factory('team1 project')
team1_project.admin_role.parents.add(team1.member_role)
team2_project = project_factory('team2 project')
team2_project.admin_role.parents.add(team2.member_role)
shared_project = project_factory('shared project')
shared_project.admin_role.parents.add(team1.member_role)
shared_project.admin_role.parents.add(team2.member_role)
team1.member_role.members.add(alice)
team2.member_role.members.add(bob)
return team1, team2
@pytest.mark.django_db
def test_team_project_list(get, project_factory, team_factory, admin, alice, bob):
'List of projects a team has access to, filtered by projects you can also see'
team1, team2 = setup_test_team_project_list(project_factory, team_factory, admin, alice, bob)
def test_team_project_list(get, team_project_list):
objects = team_project_list
team1, team2 = objects.teams.team1, objects.teams.team2
alice, bob, admin = objects.users.alice, objects.users.bob, objects.superusers.admin
# admins can see all projects on a team
assert get(reverse('api:team_projects_list', args=(team1.pk,)), admin).data['count'] == 2
@ -78,12 +70,6 @@ def test_team_project_list(get, project_factory, team_factory, admin, alice, bob
assert get(reverse('api:team_projects_list', args=(team2.pk,)), alice).data['count'] == 1
team2.read_role.members.remove(alice)
# Test user endpoints first, very similar tests to test_user_project_list
# but permissions are being derived from team membership instead.
with transaction.atomic():
res = get(reverse('api:user_projects_list', args=(bob.pk,)), alice)
assert res.status_code == 403
# admins can see all projects
assert get(reverse('api:user_projects_list', args=(admin.pk,)), admin).data['count'] == 3
@ -98,17 +84,11 @@ def test_team_project_list(get, project_factory, team_factory, admin, alice, bob
assert get(reverse('api:user_projects_list', args=(admin.pk,)), alice).data['count'] == 2
@pytest.mark.django_db
def test_team_project_list_fail1(get, project_factory, team_factory, admin, alice, bob):
# alice should not be able to see team2 projects because she doesn't have access to team2
team1, team2 = setup_test_team_project_list(project_factory, team_factory, admin, alice, bob)
res = get(reverse('api:team_projects_list', args=(team2.pk,)), alice)
def test_team_project_list_fail1(get, team_project_list):
objects = team_project_list
res = get(reverse('api:team_projects_list', args=(objects.teams.team2.pk,)), objects.users.alice)
assert res.status_code == 403
@pytest.mark.django_db
def test_team_project_list_fail2(get, project_factory, team_factory, admin, alice, bob):
team1, team2 = setup_test_team_project_list(project_factory, team_factory, admin, alice, bob)
# alice should not be able to see bob
@pytest.mark.parametrize("u,expected_status_code", [
('rando', 403),
('org_member', 403),

View File

@ -57,6 +57,28 @@ def test_get_roles_list_user(organization, inventory, team, get, user):
assert inventory.admin_role.id not in role_hash
assert team.member_role.id not in role_hash
@pytest.mark.django_db
def test_roles_visibility(get, organization, project, admin, alice, bob):
Role.singleton('system_auditor').members.add(alice)
assert get(reverse('api:role_list') + '?id=%d' % project.update_role.id, user=admin).data['count'] == 1
assert get(reverse('api:role_list') + '?id=%d' % project.update_role.id, user=alice).data['count'] == 1
assert get(reverse('api:role_list') + '?id=%d' % project.update_role.id, user=bob).data['count'] == 0
organization.auditor_role.members.add(bob)
assert get(reverse('api:role_list') + '?id=%d' % project.update_role.id, user=bob).data['count'] == 1
@pytest.mark.django_db
def test_roles_filter_visibility(get, organization, project, admin, alice, bob):
Role.singleton('system_auditor').members.add(alice)
project.update_role.members.add(admin)
assert get(reverse('api:user_roles_list', args=(admin.id,)) + '?id=%d' % project.update_role.id, user=admin).data['count'] == 1
assert get(reverse('api:user_roles_list', args=(admin.id,)) + '?id=%d' % project.update_role.id, user=alice).data['count'] == 1
assert get(reverse('api:user_roles_list', args=(admin.id,)) + '?id=%d' % project.update_role.id, user=bob).data['count'] == 0
organization.auditor_role.members.add(bob)
assert get(reverse('api:user_roles_list', args=(admin.id,)) + '?id=%d' % project.update_role.id, user=bob).data['count'] == 1
organization.auditor_role.members.remove(bob)
project.use_role.members.add(bob) # sibling role should still grant visibility
assert get(reverse('api:user_roles_list', args=(admin.id,)) + '?id=%d' % project.update_role.id, user=bob).data['count'] == 1
@pytest.mark.django_db
def test_cant_create_role(post, admin):
@ -183,7 +205,7 @@ def test_get_teams_roles_list(get, team, organization, admin):
assert response.status_code == 200
roles = response.data
assert roles['count'] == 2
assert roles['count'] == 1
assert roles['results'][0]['id'] == organization.admin_role.id or roles['results'][1]['id'] == organization.admin_role.id
@ -406,9 +428,9 @@ def test_ensure_rbac_fields_are_present(organization, get, admin):
org = response.data
assert 'summary_fields' in org
assert 'roles' in org['summary_fields']
assert 'object_roles' in org['summary_fields']
role_pk = org['summary_fields']['roles']['admin_role']['id']
role_pk = org['summary_fields']['object_roles']['admin_role']['id']
role_url = reverse('api:role_detail', args=(role_pk,))
org_role_response = get(role_url, admin)
@ -416,17 +438,6 @@ def test_ensure_rbac_fields_are_present(organization, get, admin):
role = org_role_response.data
assert role['related']['organization'] == url
@pytest.mark.django_db
def test_ensure_permissions_is_present(organization, get, user):
url = reverse('api:organization_detail', args=(organization.id,))
response = get(url, user('admin', True))
assert response.status_code == 200
org = response.data
assert 'summary_fields' in org
assert 'active_roles' in org['summary_fields']
assert 'read_role' in org['summary_fields']['active_roles']
@pytest.mark.django_db
def test_ensure_role_summary_is_present(organization, get, user):
url = reverse('api:organization_detail', args=(organization.id,))
@ -435,5 +446,5 @@ def test_ensure_role_summary_is_present(organization, get, user):
org = response.data
assert 'summary_fields' in org
assert 'roles' in org['summary_fields']
assert org['summary_fields']['roles']['admin_role']['id'] > 0
assert 'object_roles' in org['summary_fields']
assert org['summary_fields']['object_roles']['admin_role']['id'] > 0

View File

@ -78,25 +78,6 @@ def test_team_symantics(organization, team, alice):
team.member_role.members.remove(alice)
assert alice not in organization.auditor_role
@pytest.mark.django_db
def test_auto_m2m_adjustments(organization, inventory, group_factory, alice):
'Ensures the auto role reparenting is working correctly through m2m maps'
g1 = group_factory(name='g1')
g1.admin_role.members.add(alice)
assert alice in g1.admin_role
g2 = group_factory(name='g2')
assert alice not in g2.admin_role
g2.parents.add(g1)
assert alice in g2.admin_role
g2.parents.remove(g1)
assert alice not in g2.admin_role
g1.children.add(g2)
assert alice in g2.admin_role
g1.children.remove(g2)
assert alice not in g2.admin_role
@pytest.mark.django_db
def test_auto_field_adjustments(organization, inventory, team, alice):

View File

@ -16,7 +16,20 @@ def test_credential_migration_user(credential, user, permissions):
rbac.migrate_credential(apps, None)
assert u in credential.owner_role
assert u in credential.admin_role
@pytest.mark.django_db
def test_two_teams_same_cred_name(organization_factory):
objects = organization_factory("test",
teams=["team1", "team2"])
cred1 = Credential.objects.create(name="test", kind="net", deprecated_team=objects.teams.team1)
cred2 = Credential.objects.create(name="test", kind="net", deprecated_team=objects.teams.team2)
rbac.migrate_credential(apps, None)
assert objects.teams.team1.member_role in cred1.admin_role.parents.all()
assert objects.teams.team2.member_role in cred2.admin_role.parents.all()
@pytest.mark.django_db
def test_credential_use_role(credential, user, permissions):
@ -33,14 +46,14 @@ def test_credential_migration_team_member(credential, team, user, permissions):
# No permissions pre-migration (this happens automatically so we patch this)
team.admin_role.children.remove(credential.owner_role)
team.admin_role.children.remove(credential.admin_role)
team.member_role.children.remove(credential.use_role)
assert u not in credential.owner_role
assert u not in credential.admin_role
rbac.migrate_credential(apps, None)
# Admin permissions post migration
assert u in credential.owner_role
assert u in credential.admin_role
@pytest.mark.django_db
def test_credential_migration_team_admin(credential, team, user, permissions):
@ -64,6 +77,17 @@ def test_credential_access_superuser():
assert access.can_change(credential, None)
assert access.can_delete(credential)
@pytest.mark.django_db
def test_credential_access_auditor(credential, organization_factory):
objects = organization_factory("org_cred_auditor",
users=["user1"],
roles=['org_cred_auditor.auditor_role:user1'])
credential.organization = objects.organization
credential.save()
access = CredentialAccess(objects.users.user1)
assert access.can_read(credential)
@pytest.mark.django_db
def test_credential_access_admin(user, team, credential):
u = user('org-admin', False)
@ -80,7 +104,7 @@ def test_credential_access_admin(user, team, credential):
# credential is now part of a team
# that is part of an organization
# that I am an admin for
credential.owner_role.parents.add(team.admin_role)
credential.admin_role.parents.add(team.admin_role)
credential.save()
cred = Credential.objects.create(kind='aws', name='test-cred')
@ -88,7 +112,47 @@ def test_credential_access_admin(user, team, credential):
cred.save()
# should have can_change access as org-admin
assert access.can_change(credential, {'user': u.pk})
assert access.can_change(credential, {'description': 'New description.'})
@pytest.mark.django_db
def test_org_credential_access_member(alice, org_credential, credential):
org_credential.admin_role.members.add(alice)
credential.admin_role.members.add(alice)
access = CredentialAccess(alice)
# Alice should be able to PATCH if organization is not changed
assert access.can_change(org_credential, {
'description': 'New description.',
'organization': org_credential.organization.pk})
assert access.can_change(org_credential, {
'description': 'New description.'})
assert access.can_change(credential, {
'description': 'New description.',
'organization': None})
@pytest.mark.django_db
def test_credential_access_org_permissions(
org_admin, org_member, organization, org_credential, credential):
credential.admin_role.members.add(org_admin)
credential.admin_role.members.add(org_member)
org_credential.admin_role.members.add(org_member)
access = CredentialAccess(org_admin)
member_access = CredentialAccess(org_member)
# Org admin can move their own credential into their org
assert access.can_change(credential, {'organization': organization.pk})
# Org member can not
assert not member_access.can_change(credential, {
'organization': organization.pk})
# Org admin can remove a credential from their org
assert access.can_change(org_credential, {'organization': None})
# Org member can not
assert not member_access.can_change(org_credential, {'organization': None})
assert not member_access.can_change(org_credential, {
'user': org_member.pk, 'organization': None})
@pytest.mark.django_db
def test_cred_job_template_xfail(user, deploy_jobtemplate):
@ -118,6 +182,9 @@ def test_cred_job_template(user, team, deploy_jobtemplate):
access = CredentialAccess(a)
rbac.migrate_credential(apps, None)
cred.refresh_from_db()
assert access.can_change(cred, {'organization': org.pk})
org.admin_role.members.remove(a)
@ -135,6 +202,8 @@ def test_cred_multi_job_template_single_org_xfail(user, deploy_jobtemplate):
access = CredentialAccess(a)
rbac.migrate_credential(apps, None)
cred.refresh_from_db()
assert not access.can_change(cred, {'organization': org.pk})
@pytest.mark.django_db
@ -149,6 +218,8 @@ def test_cred_multi_job_template_single_org(user, team, deploy_jobtemplate):
access = CredentialAccess(a)
rbac.migrate_credential(apps, None)
cred.refresh_from_db()
assert access.can_change(cred, {'organization': org.pk})
org.admin_role.members.remove(a)
@ -180,6 +251,7 @@ def test_single_cred_multi_job_template_multi_org(user, organizations, credentia
for jt in jts:
jt.refresh_from_db()
credential.refresh_from_db()
assert jts[0].credential != jts[1].credential
assert access.can_change(jts[0].credential, {'organization': org.pk})

View File

@ -6,12 +6,17 @@ from awx.main.models import (
Host,
CustomInventoryScript,
)
from awx.main.access import InventoryAccess, HostAccess
from awx.main.access import (
InventoryAccess,
HostAccess,
InventoryUpdateAccess
)
from django.apps import apps
@pytest.mark.django_db
def test_custom_inv_script_access(organization, user):
u = user('user', False)
ou = user('oadm', False)
custom_inv = CustomInventoryScript.objects.create(name='test', script='test', description='test')
custom_inv.organization = organization
@ -21,6 +26,9 @@ def test_custom_inv_script_access(organization, user):
organization.member_role.members.add(u)
assert u in custom_inv.read_role
organization.admin_role.members.add(ou)
assert ou in custom_inv.admin_role
@pytest.mark.django_db
def test_inventory_admin_user(inventory, permissions, user):
u = user('admin', False)
@ -126,6 +134,7 @@ def test_inventory_auditor(inventory, permissions, user, team):
assert u in inventory.read_role
assert u not in inventory.admin_role
@pytest.mark.django_db
def test_inventory_updater(inventory, permissions, user, team):
u = user('updater', False)
@ -169,29 +178,6 @@ def test_inventory_executor(inventory, permissions, user, team):
assert team.member_role.is_ancestor_of(inventory.update_role) is False
assert team.member_role.is_ancestor_of(inventory.use_role)
@pytest.mark.django_db
def test_group_parent_admin(group_factory, permissions, user):
u = user('admin', False)
parent1 = group_factory('parent-1')
parent2 = group_factory('parent-2')
childA = group_factory('child-1')
parent1.admin_role.members.add(u)
assert u in parent1.admin_role
assert u not in parent2.admin_role
assert u not in childA.admin_role
childA.parents.add(parent1)
assert u in childA.admin_role
childA.parents.remove(parent1)
assert u not in childA.admin_role
parent2.children.add(childA)
assert u not in childA.admin_role
parent2.admin_role.members.add(u)
assert u in childA.admin_role
@pytest.mark.django_db
def test_access_admin(organization, inventory, user):
@ -210,6 +196,7 @@ def test_access_admin(organization, inventory, user):
assert access.can_delete(inventory)
assert access.can_run_ad_hoc_commands(inventory)
@pytest.mark.django_db
def test_access_auditor(organization, inventory, user):
u = user('admin', False)
@ -227,45 +214,36 @@ def test_access_auditor(organization, inventory, user):
assert not access.can_delete(inventory)
assert not access.can_run_ad_hoc_commands(inventory)
@pytest.mark.django_db
def test_inventory_update_org_admin(inventory_update, org_admin):
access = InventoryUpdateAccess(org_admin)
assert access.can_delete(inventory_update)
@pytest.mark.django_db
def test_host_access(organization, inventory, user, group_factory):
def test_host_access(organization, inventory, group, user, group_factory):
other_inventory = organization.inventories.create(name='other-inventory')
inventory_admin = user('inventory_admin', False)
my_group = group_factory('my-group')
not_my_group = group_factory('not-my-group')
group_admin = user('group_admin', False)
inventory_admin_access = HostAccess(inventory_admin)
group_admin_access = HostAccess(group_admin)
h1 = Host.objects.create(inventory=inventory, name='host1')
h2 = Host.objects.create(inventory=inventory, name='host2')
h1.groups.add(my_group)
h2.groups.add(not_my_group)
host = Host.objects.create(inventory=inventory, name='host1')
host.groups.add(group)
assert inventory_admin_access.can_read(h1) is False
assert group_admin_access.can_read(h1) is False
assert inventory_admin_access.can_read(host) is False
inventory.admin_role.members.add(inventory_admin)
my_group.admin_role.members.add(group_admin)
assert inventory_admin_access.can_read(h1)
assert inventory_admin_access.can_read(h2)
assert group_admin_access.can_read(h1)
assert group_admin_access.can_read(h2) is False
assert inventory_admin_access.can_read(host)
my_group.hosts.remove(h1)
group.hosts.remove(host)
assert inventory_admin_access.can_read(h1)
assert group_admin_access.can_read(h1) is False
assert inventory_admin_access.can_read(host)
h1.inventory = other_inventory
h1.save()
host.inventory = other_inventory
host.save()
assert inventory_admin_access.can_read(h1) is False
assert group_admin_access.can_read(h1) is False
assert inventory_admin_access.can_read(host) is False

View File

@ -0,0 +1,161 @@
import pytest
from awx.main.access import (
JobAccess,
AdHocCommandAccess,
InventoryUpdateAccess,
ProjectUpdateAccess
)
from awx.main.models import (
Job,
AdHocCommand,
InventoryUpdate,
InventorySource,
ProjectUpdate
)
@pytest.fixture
def normal_job(deploy_jobtemplate):
return Job.objects.create(
job_template=deploy_jobtemplate,
project=deploy_jobtemplate.project,
inventory=deploy_jobtemplate.inventory
)
@pytest.fixture
def jt_user(deploy_jobtemplate, rando):
deploy_jobtemplate.execute_role.members.add(rando)
return rando
@pytest.fixture
def inv_updater(inventory, rando):
inventory.update_role.members.add(rando)
return rando
@pytest.fixture
def host_adhoc(host, machine_credential, rando):
host.inventory.adhoc_role.members.add(rando)
machine_credential.use_role.members.add(rando)
return rando
@pytest.fixture
def proj_updater(project, rando):
project.update_role.members.add(rando)
return rando
# Read permissions testing
@pytest.mark.django_db
def test_superuser_sees_orphans(normal_job, admin_user):
normal_job.job_template = None
access = JobAccess(admin_user)
assert access.can_read(normal_job)
@pytest.mark.django_db
def test_org_member_does_not_see_orphans(normal_job, org_member, project):
normal_job.job_template = None
# Check that privledged access to project still does not grant access
project.admin_role.members.add(org_member)
access = JobAccess(org_member)
assert not access.can_read(normal_job)
@pytest.mark.django_db
def test_org_admin_sees_orphans(normal_job, org_admin):
normal_job.job_template = None
access = JobAccess(org_admin)
assert access.can_read(normal_job)
@pytest.mark.django_db
def test_org_auditor_sees_orphans(normal_job, org_auditor):
normal_job.job_template = None
access = JobAccess(org_auditor)
assert access.can_read(normal_job)
# Delete permissions testing
@pytest.mark.django_db
def test_JT_admin_delete_denied(normal_job, rando):
normal_job.job_template.admin_role.members.add(rando)
access = JobAccess(rando)
assert not access.can_delete(normal_job)
@pytest.mark.django_db
def test_inventory_admin_delete_denied(normal_job, rando):
normal_job.job_template.inventory.admin_role.members.add(rando)
access = JobAccess(rando)
assert not access.can_delete(normal_job)
@pytest.mark.django_db
def test_null_related_delete_denied(normal_job, rando):
normal_job.project = None
normal_job.inventory = None
access = JobAccess(rando)
assert not access.can_delete(normal_job)
@pytest.mark.django_db
def test_inventory_org_admin_delete_allowed(normal_job, org_admin):
normal_job.project = None # do this so we test job->inventory->org->admin connection
access = JobAccess(org_admin)
assert access.can_delete(normal_job)
@pytest.mark.django_db
def test_project_org_admin_delete_allowed(normal_job, org_admin):
normal_job.inventory = None # do this so we test job->project->org->admin connection
access = JobAccess(org_admin)
assert access.can_delete(normal_job)
@pytest.mark.django_db
class TestJobAndUpdateCancels:
# used in view: job_template_launch
def test_jt_self_cancel(self, deploy_jobtemplate, jt_user):
job = Job(job_template=deploy_jobtemplate, created_by=jt_user)
access = JobAccess(jt_user)
assert access.can_cancel(job)
def test_jt_friend_cancel(self, deploy_jobtemplate, admin_user, jt_user):
job = Job(job_template=deploy_jobtemplate, created_by=admin_user)
access = JobAccess(jt_user)
assert not access.can_cancel(job)
def test_jt_org_admin_cancel(self, deploy_jobtemplate, org_admin, jt_user):
job = Job(job_template=deploy_jobtemplate, created_by=jt_user)
access = JobAccess(org_admin)
assert access.can_cancel(job)
# used in view: host_ad_hoc_commands_list
def test_host_self_cancel(self, host, host_adhoc):
adhoc_command = AdHocCommand(inventory=host.inventory, created_by=host_adhoc)
access = AdHocCommandAccess(host_adhoc)
assert access.can_cancel(adhoc_command)
def test_host_friend_cancel(self, host, admin_user, host_adhoc):
adhoc_command = AdHocCommand(inventory=host.inventory, created_by=admin_user)
access = AdHocCommandAccess(host_adhoc)
assert not access.can_cancel(adhoc_command)
# used in view: inventory_source_update_view
def test_inventory_self_cancel(self, inventory, inv_updater):
inventory_update = InventoryUpdate(inventory_source=InventorySource(
name=inventory.name, inventory=inventory, source='gce'
), created_by=inv_updater)
access = InventoryUpdateAccess(inv_updater)
assert access.can_cancel(inventory_update)
def test_inventory_friend_cancel(self, inventory, admin_user, inv_updater):
inventory_update = InventoryUpdate(inventory_source=InventorySource(
name=inventory.name, inventory=inventory, source='gce'
), created_by=admin_user)
access = InventoryUpdateAccess(inv_updater)
assert not access.can_cancel(inventory_update)
# used in view: project_update_view
def test_project_self_cancel(self, project, proj_updater):
project_update = ProjectUpdate(project=project, created_by=proj_updater)
access = ProjectUpdateAccess(proj_updater)
assert access.can_cancel(project_update)
def test_project_friend_cancel(self, project, admin_user, proj_updater):
project_update = ProjectUpdate(project=project, created_by=admin_user)
access = ProjectUpdateAccess(proj_updater)
assert not access.can_cancel(project_update)

View File

@ -7,8 +7,18 @@ from awx.main.access import (
)
from awx.main.migrations import _rbac as rbac
from awx.main.models import Permission
from awx.main.models.jobs import JobTemplate
from django.apps import apps
from django.core.urlresolvers import reverse
@pytest.fixture
def jt_objects(job_template_factory):
objects = job_template_factory(
'testJT', organization='org1', project='proj1', inventory='inventory1',
credential='cred1', cloud_credential='aws1', network_credential='juniper1')
return objects
@pytest.mark.django_db
def test_job_template_migration_check(credential, deploy_jobtemplate, check_jobtemplate, user):
@ -155,3 +165,78 @@ def test_job_template_access_superuser(check_license, user, deploy_jobtemplate):
# THEN all access checks should pass
assert access.can_read(deploy_jobtemplate)
assert access.can_add({})
@pytest.mark.django_db
def test_job_template_access_read_level(jt_objects, rando):
access = JobTemplateAccess(rando)
jt_objects.project.read_role.members.add(rando)
jt_objects.inventory.read_role.members.add(rando)
jt_objects.credential.read_role.members.add(rando)
jt_objects.cloud_credential.read_role.members.add(rando)
jt_objects.network_credential.read_role.members.add(rando)
proj_pk = jt_objects.project.pk
assert not access.can_add(dict(inventory=jt_objects.inventory.pk, project=proj_pk))
assert not access.can_add(dict(credential=jt_objects.credential.pk, project=proj_pk))
assert not access.can_add(dict(cloud_credential=jt_objects.cloud_credential.pk, project=proj_pk))
assert not access.can_add(dict(network_credential=jt_objects.network_credential.pk, project=proj_pk))
@pytest.mark.django_db
def test_job_template_access_use_level(jt_objects, rando):
access = JobTemplateAccess(rando)
jt_objects.project.use_role.members.add(rando)
jt_objects.inventory.use_role.members.add(rando)
jt_objects.credential.use_role.members.add(rando)
jt_objects.cloud_credential.use_role.members.add(rando)
jt_objects.network_credential.use_role.members.add(rando)
proj_pk = jt_objects.project.pk
assert access.can_add(dict(inventory=jt_objects.inventory.pk, project=proj_pk))
assert access.can_add(dict(credential=jt_objects.credential.pk, project=proj_pk))
assert access.can_add(dict(cloud_credential=jt_objects.cloud_credential.pk, project=proj_pk))
assert access.can_add(dict(network_credential=jt_objects.network_credential.pk, project=proj_pk))
@pytest.mark.django_db
def test_job_template_access_org_admin(jt_objects, rando):
access = JobTemplateAccess(rando)
# Appoint this user as admin of the organization
jt_objects.inventory.organization.admin_role.members.add(rando)
# Assign organization permission in the same way the create view does
organization = jt_objects.inventory.organization
jt_objects.credential.admin_role.parents.add(organization.admin_role)
jt_objects.cloud_credential.admin_role.parents.add(organization.admin_role)
jt_objects.network_credential.admin_role.parents.add(organization.admin_role)
proj_pk = jt_objects.project.pk
assert access.can_add(dict(inventory=jt_objects.inventory.pk, project=proj_pk))
assert access.can_add(dict(credential=jt_objects.credential.pk, project=proj_pk))
assert access.can_add(dict(cloud_credential=jt_objects.cloud_credential.pk, project=proj_pk))
assert access.can_add(dict(network_credential=jt_objects.network_credential.pk, project=proj_pk))
assert access.can_read(jt_objects.job_template)
assert access.can_delete(jt_objects.job_template)
@pytest.mark.django_db
@pytest.mark.job_permissions
def test_job_template_creator_access(project, rando, post):
project.admin_role.members.add(rando)
with mock.patch(
'awx.main.models.projects.ProjectOptions.playbooks',
new_callable=mock.PropertyMock(return_value=['helloworld.yml'])):
response = post(reverse('api:job_template_list', args=[]), dict(
name='newly-created-jt',
job_type='run',
ask_inventory_on_launch=True,
ask_credential_on_launch=True,
project=project.pk,
playbook='helloworld.yml'
), rando)
assert response.status_code == 201
jt_pk = response.data['id']
jt_obj = JobTemplate.objects.get(pk=jt_pk)
# Creating a JT should place the creator in the admin role
assert rando in jt_obj.admin_role

View File

@ -31,20 +31,22 @@ def test_label_access_superuser(label, user):
assert access.can_delete(label)
@pytest.mark.django_db
def test_label_access_admin(label, user, organization_factory):
def test_label_access_admin(organization_factory):
'''can_change because I am an admin of that org'''
a = user('admin', False)
org_no_members = organization_factory("no_members")
org_members = organization_factory("has_members")
no_members = organization_factory("no_members")
members = organization_factory("has_members",
users=['admin'],
labels=['test'])
label.organization.admin_role.members.add(a)
org_members.admin_role.members.add(a)
label = members.labels.test
admin = members.users.admin
members.organization.admin_role.members.add(admin)
access = LabelAccess(user('admin', False))
assert not access.can_change(label, {'organization': org_no_members.id})
access = LabelAccess(admin)
assert not access.can_change(label, {'organization': no_members.organization.id})
assert access.can_read(label)
assert access.can_change(label, None)
assert access.can_change(label, {'organization': org_members.id})
assert access.can_change(label, {'organization': members.organization.id})
assert access.can_delete(label)
@pytest.mark.django_db

View File

@ -25,35 +25,44 @@ def test_notification_template_get_queryset_orgadmin(notification_template, user
assert access.get_queryset().count() == 1
@pytest.mark.django_db
def test_notification_template_access_superuser(notification_template, user, notification_template_factory):
access = NotificationTemplateAccess(user('admin', True))
assert access.can_read(notification_template)
assert access.can_change(notification_template, None)
assert access.can_delete(notification_template)
nf = notification_template_factory("test-orphaned")
def test_notification_template_access_superuser(notification_template_factory):
nf_objects = notification_template_factory('test-orphaned', organization='test', superusers=['admin'])
admin = nf_objects.superusers.admin
nf = nf_objects.notification_template
access = NotificationTemplateAccess(admin)
assert access.can_read(nf)
assert access.can_change(nf, None)
assert access.can_delete(nf)
nf.organization = None
nf.save()
assert access.can_read(nf)
assert access.can_change(nf, None)
assert access.can_delete(nf)
@pytest.mark.django_db
def test_notification_template_access_admin(notification_template, user, organization_factory, notification_template_factory):
adm = user('admin', False)
other_org = organization_factory('other')
present_org = organization_factory('present')
notification_template.organization.admin_role.members.add(adm)
present_org.admin_role.members.add(adm)
def test_notification_template_access_admin(organization_factory, notification_template_factory):
other_objects = organization_factory('other')
present_objects = organization_factory('present',
users=['admin'],
notification_templates=['test-notification'],
roles=['present.admin_role:admin'])
access = NotificationTemplateAccess(user('admin', False))
notification_template = present_objects.notification_templates.test_notification
other_org = other_objects.organization
present_org = present_objects.organization
admin = present_objects.users.admin
access = NotificationTemplateAccess(admin)
assert not access.can_change(notification_template, {'organization': other_org.id})
assert access.can_read(notification_template)
assert access.can_change(notification_template, None)
assert access.can_change(notification_template, {'organization': present_org.id})
assert access.can_delete(notification_template)
nf = notification_template_factory("test-orphaned")
nf.organization = None
nf.save()
assert not access.can_read(nf)
assert not access.can_change(nf, None)
assert not access.can_delete(nf)
@ -66,3 +75,9 @@ def test_notification_template_access_org_user(notification_template, user):
assert not access.can_read(notification_template)
assert not access.can_change(notification_template, None)
assert not access.can_delete(notification_template)
@pytest.mark.django_db
def test_notificaiton_template_orphan_access_org_admin(notification_template, organization, org_admin):
notification_template.organization = None
access = NotificationTemplateAccess(org_admin)
assert not access.can_change(notification_template, {'organization': organization.id})

View File

@ -2,6 +2,7 @@ import pytest
from awx.main.migrations import _rbac as rbac
from awx.main.models import Role, Permission, Project, Organization, Credential, JobTemplate, Inventory
from awx.main.access import ProjectAccess
from django.apps import apps
from awx.main.migrations import _old_access as old_access
@ -209,3 +210,10 @@ def test_project_explicit_permission(user, team, project, organization):
rbac.migrate_projects(apps, None)
assert u in project.read_role
@pytest.mark.django_db
def test_create_project_foreign_org_admin(org_admin, organization, organization_factory):
"""Org admins can only create projects in their own org."""
other_org = organization_factory('not-my-org').organization
access = ProjectAccess(org_admin)
assert not access.can_add({'organization': other_org.pk, 'name': 'new-project'})

View File

@ -0,0 +1,32 @@
import pytest
from awx.main.access import (
RoleAccess,
UserAccess,
TeamAccess)
@pytest.mark.django_db
def test_team_access_attach(rando, team, inventory):
# rando is admin of the team
team.admin_role.members.add(rando)
inventory.read_role.members.add(rando)
# team has read_role for the inventory
team.member_role.children.add(inventory.read_role)
access = TeamAccess(rando)
data = {'id': inventory.admin_role.pk}
assert not access.can_attach(team, inventory.admin_role, 'member_role.children', data, False)
@pytest.mark.django_db
def test_user_access_attach(rando, inventory):
inventory.read_role.members.add(rando)
access = UserAccess(rando)
data = {'id': inventory.admin_role.pk}
assert not access.can_attach(rando, inventory.admin_role, 'roles', data, False)
@pytest.mark.django_db
def test_role_access_attach(rando, inventory):
inventory.read_role.members.add(rando)
access = RoleAccess(rando)
assert not access.can_attach(inventory.admin_role, rando, 'members', None)

View File

@ -90,3 +90,23 @@ def test_team_accessible_objects(team, user, project):
team.member_role.members.add(u)
assert len(Project.accessible_objects(u, 'read_role')) == 1
@pytest.mark.django_db
def test_team_admin_member_access(team, user, project):
u = user('team_admin', False)
team.member_role.children.add(project.use_role)
team.admin_role.members.add(u)
assert len(Project.accessible_objects(u, 'use_role')) == 1
@pytest.mark.django_db
def test_org_admin_team_access(organization, team, user, project):
u = user('team_admin', False)
organization.admin_role.members.add(u)
team.organization = organization
team.save()
team.member_role.children.add(project.use_role)
assert len(Project.accessible_objects(u, 'use_role')) == 1

View File

@ -3,8 +3,12 @@ import pytest
@pytest.mark.django_db()
def test_admin_not_member(team):
"Test to ensure we don't add admin_role as a parent to team.member_role, as "
"this creates a cycle with organization administration, which we've decided "
"to remove support for"
"""Test to ensure we don't add admin_role as a parent to team.member_role, as
this creates a cycle with organization administration, which we've decided
to remove support for
assert team.admin_role.is_ancestor_of(team.member_role) is False
(2016-06-16) I think this might have been resolved. I'm asserting
this to be true in the mean time.
"""
assert team.admin_role.is_ancestor_of(team.member_role) is True

View File

@ -269,14 +269,14 @@ class BaseJobTestMixin(BaseTestMixin):
password=TEST_SSH_KEY_DATA,
created_by=self.user_sue,
)
self.cred_sue.owner_role.members.add(self.user_sue)
self.cred_sue.admin_role.members.add(self.user_sue)
self.cred_sue_ask = Credential.objects.create(
username='sue',
password='ASK',
created_by=self.user_sue,
)
self.cred_sue_ask.owner_role.members.add(self.user_sue)
self.cred_sue_ask.admin_role.members.add(self.user_sue)
self.cred_sue_ask_many = Credential.objects.create(
username='sue',
@ -288,7 +288,7 @@ class BaseJobTestMixin(BaseTestMixin):
ssh_key_unlock='ASK',
created_by=self.user_sue,
)
self.cred_sue_ask_many.owner_role.members.add(self.user_sue)
self.cred_sue_ask_many.admin_role.members.add(self.user_sue)
self.cred_bob = Credential.objects.create(
username='bob',
@ -384,7 +384,7 @@ class BaseJobTestMixin(BaseTestMixin):
password='Heading0',
created_by = self.user_sue,
)
self.team_ops_north.member_role.children.add(self.cred_ops_north.owner_role)
self.team_ops_north.member_role.children.add(self.cred_ops_north.admin_role)
self.cred_ops_test = Credential.objects.create(
username='testers',

View File

@ -404,164 +404,6 @@ class AdHocCommandApiTest(BaseAdHocCommandTest):
del data[k]
return self.post(url, data, expect=expect)
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', side_effect=run_pexpect_mock)
def test_ad_hoc_command_list(self, ignore):
url = reverse('api:ad_hoc_command_list')
# Retrieve the empty list of ad hoc commands.
qs = AdHocCommand.objects.none()
self.check_get_list(url, 'admin', qs)
self.check_get_list(url, 'normal', qs)
self.check_get_list(url, 'other', qs)
self.check_get_list(url, 'nobody', qs)
self.check_get_list(url, None, qs, expect=401)
# Start a new ad hoc command. Only admin and normal user (org admin)
# can run commands by default.
with self.current_user('admin'):
response = self.run_test_ad_hoc_command()
self.assertEqual(response['job_type'], 'run')
self.assertEqual(response['inventory'], self.inventory.pk)
self.assertEqual(response['credential'], self.credential.pk)
self.assertEqual(response['module_name'], 'command')
self.assertEqual(response['module_args'], 'uptime')
self.assertEqual(response['limit'], '')
self.assertEqual(response['forks'], 0)
self.assertEqual(response['verbosity'], 0)
self.assertEqual(response['become_enabled'], False)
self.put(url, {}, expect=405)
self.patch(url, {}, expect=405)
self.delete(url, expect=405)
with self.current_user('normal'):
self.run_test_ad_hoc_command()
self.put(url, {}, expect=405)
self.patch(url, {}, expect=405)
self.delete(url, expect=405)
with self.current_user('other'):
self.run_test_ad_hoc_command(expect=403)
self.put(url, {}, expect=405)
self.patch(url, {}, expect=405)
self.delete(url, expect=405)
with self.current_user('nobody'):
self.run_test_ad_hoc_command(expect=403)
self.put(url, {}, expect=405)
self.patch(url, {}, expect=405)
self.delete(url, expect=405)
with self.current_user(None):
self.run_test_ad_hoc_command(expect=401)
self.put(url, {}, expect=401)
self.patch(url, {}, expect=401)
self.delete(url, expect=401)
# Retrieve the list of ad hoc commands (only admin/normal can see by default).
qs = AdHocCommand.objects.all()
self.assertEqual(qs.count(), 2)
self.check_get_list(url, 'admin', qs)
self.check_get_list(url, 'normal', qs)
qs = AdHocCommand.objects.none()
self.check_get_list(url, 'other', qs)
self.check_get_list(url, 'nobody', qs)
self.check_get_list(url, None, qs, expect=401)
# Explicitly give other user updater permission on the inventory (still
# not allowed to run ad hoc commands).
user_roles_list_url = reverse('api:user_roles_list', args=(self.other_django_user.pk,))
with self.current_user('admin'):
response = self.post(user_roles_list_url, {"id": self.inventory.update_role.id}, expect=204)
with self.current_user('other'):
self.run_test_ad_hoc_command(expect=403)
self.check_get_list(url, 'other', qs)
# Add executor role permissions to other. Fails
# when other user can't read credential.
with self.current_user('admin'):
response = self.post(user_roles_list_url, {"id": self.inventory.execute_role.id}, expect=204)
with self.current_user('other'):
self.run_test_ad_hoc_command(expect=403)
# Succeeds once other user has a readable credential. Other user can
# only see his own ad hoc command (because of credential permissions).
other_cred = self.create_test_credential(user=self.other_django_user)
with self.current_user('other'):
self.run_test_ad_hoc_command(credential=other_cred.pk)
qs = AdHocCommand.objects.filter(created_by=self.other_django_user)
self.assertEqual(qs.count(), 1)
self.check_get_list(url, 'other', qs)
# Explicitly give nobody user read permission on the inventory.
nobody_roles_list_url = reverse('api:user_roles_list', args=(self.nobody_django_user.pk,))
with self.current_user('admin'):
response = self.post(nobody_roles_list_url, {"id": self.inventory.read_role.id}, expect=204)
with self.current_user('nobody'):
self.run_test_ad_hoc_command(credential=other_cred.pk, expect=403)
self.check_get_list(url, 'other', qs)
# Create a cred for the nobody user, run an ad hoc command as the admin
# user with that cred. Nobody user can still not see the ad hoc command
# without the run_ad_hoc_commands permission flag.
nobody_cred = self.create_test_credential(user=self.nobody_django_user)
with self.current_user('admin'):
self.run_test_ad_hoc_command(credential=nobody_cred.pk)
qs = AdHocCommand.objects.none()
self.check_get_list(url, 'nobody', qs)
# Give the nobody user the run_ad_hoc_commands flag, and can now see
# the one ad hoc command previously run.
with self.current_user('admin'):
response = self.post(nobody_roles_list_url, {"id": self.inventory.execute_role.id}, expect=204)
qs = AdHocCommand.objects.filter(credential_id=nobody_cred.pk)
self.assertEqual(qs.count(), 1)
self.check_get_list(url, 'nobody', qs)
# Post without inventory (should fail).
with self.current_user('admin'):
self.run_test_ad_hoc_command(inventory=None, expect=400)
# Post without credential (should fail).
with self.current_user('admin'):
self.run_test_ad_hoc_command(credential=None, expect=400)
# Post with empty or unsupported module name (empty defaults to command).
with self.current_user('admin'):
response = self.run_test_ad_hoc_command(module_name=None)
self.assertEqual(response['module_name'], 'command')
with self.current_user('admin'):
response = self.run_test_ad_hoc_command(module_name='')
self.assertEqual(response['module_name'], 'command')
with self.current_user('admin'):
self.run_test_ad_hoc_command(module_name='transcombobulator', expect=400)
# Post with empty module args for shell/command modules (should fail),
# empty args for other modules ok.
with self.current_user('admin'):
self.run_test_ad_hoc_command(module_args=None, expect=400)
with self.current_user('admin'):
self.run_test_ad_hoc_command(module_name='shell', module_args=None, expect=400)
with self.current_user('admin'):
self.run_test_ad_hoc_command(module_name='shell', module_args='', expect=400)
with self.current_user('admin'):
self.run_test_ad_hoc_command(module_name='ping', module_args=None)
# Post with invalid values for other parameters.
with self.current_user('admin'):
self.run_test_ad_hoc_command(job_type='something', expect=400)
with self.current_user('admin'):
response = self.run_test_ad_hoc_command(job_type='check')
self.assertEqual(response['job_type'], 'check')
with self.current_user('admin'):
self.run_test_ad_hoc_command(verbosity=-1, expect=400)
with self.current_user('admin'):
self.run_test_ad_hoc_command(forks=-1, expect=400)
with self.current_user('admin'):
response = self.run_test_ad_hoc_command(become_enabled=True)
self.assertEqual(response['become_enabled'], True)
# Try to run with expired license.
self.create_expired_license_file()
with self.current_user('admin'):
self.run_test_ad_hoc_command(expect=403)
with self.current_user('normal'):
self.run_test_ad_hoc_command(expect=403)
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', side_effect=run_pexpect_mock)
def test_ad_hoc_command_detail(self, ignore):
@ -953,98 +795,6 @@ class AdHocCommandApiTest(BaseAdHocCommandTest):
self.patch(url, {}, expect=401)
self.delete(url, expect=401)
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', side_effect=run_pexpect_mock)
def test_inventory_ad_hoc_commands_list(self, ignore):
with self.current_user('admin'):
response = self.run_test_ad_hoc_command()
response = self.run_test_ad_hoc_command(inventory=self.inventory2.pk)
# Test the ad hoc commands list for an inventory. Should only return
# the ad hoc command(s) run against that inventory. Posting should
# start a new ad hoc command and always set the inventory from the URL.
url = reverse('api:inventory_ad_hoc_commands_list', args=(self.inventory.pk,))
inventory_url = reverse('api:inventory_detail', args=(self.inventory.pk,))
with self.current_user('admin'):
response = self.get(url, expect=200)
self.assertEqual(response['count'], 1)
response = self.run_test_ad_hoc_command(url=url, inventory=None, expect=201)
self.assertEqual(response['inventory'], self.inventory.pk)
response = self.run_test_ad_hoc_command(url=url, inventory=self.inventory2.pk, expect=201)
self.assertEqual(response['inventory'], self.inventory.pk)
self.put(url, {}, expect=405)
self.patch(url, {}, expect=405)
self.delete(url, expect=405)
response = self.get(inventory_url, expect=200)
self.assertTrue(response['can_run_ad_hoc_commands'])
with self.current_user('normal'):
response = self.get(url, expect=200)
self.assertEqual(response['count'], 3)
response = self.run_test_ad_hoc_command(url=url, inventory=None, expect=201)
self.assertEqual(response['inventory'], self.inventory.pk)
self.put(url, {}, expect=405)
self.patch(url, {}, expect=405)
self.delete(url, expect=405)
response = self.get(inventory_url, expect=200)
self.assertTrue(response['can_run_ad_hoc_commands'])
with self.current_user('other'):
self.get(url, expect=403)
self.post(url, {}, expect=403)
self.put(url, {}, expect=405)
self.patch(url, {}, expect=405)
self.delete(url, expect=405)
with self.current_user('nobody'):
self.get(url, expect=403)
self.post(url, {}, expect=403)
self.put(url, {}, expect=405)
self.patch(url, {}, expect=405)
self.delete(url, expect=405)
with self.current_user(None):
self.get(url, expect=401)
self.post(url, {}, expect=401)
self.put(url, {}, expect=401)
self.patch(url, {}, expect=401)
self.delete(url, expect=401)
# Create another unrelated inventory permission with run_ad_hoc_commands
# set; this tests an edge case in the RBAC query where we'll return
# can_run_ad_hoc_commands = True when we shouldn't.
nobody_roles_list_url = reverse('api:user_roles_list', args=(self.nobody_django_user.pk,))
with self.current_user('admin'):
response = self.post(nobody_roles_list_url, {"id": self.inventory.execute_role.id}, expect=204)
# Create a credential for the other user and explicitly give other
# user admin permission on the inventory (still not allowed to run ad
# hoc commands; can get the list but can't see any items).
other_cred = self.create_test_credential(user=self.other_django_user)
user_roles_list_url = reverse('api:user_roles_list', args=(self.other_django_user.pk,))
with self.current_user('admin'):
response = self.post(user_roles_list_url, {"id": self.inventory.update_role.id}, expect=204)
with self.current_user('other'):
response = self.get(url, expect=200)
self.assertEqual(response['count'], 0)
response = self.get(inventory_url, expect=200)
self.assertFalse(response['can_run_ad_hoc_commands'])
self.run_test_ad_hoc_command(url=url, inventory=None, credential=other_cred.pk, expect=403)
# Update permission to allow other user to run ad hoc commands. Can
# only see his own ad hoc commands (because of credential permission).
with self.current_user('admin'):
response = self.post(user_roles_list_url, {"id": self.inventory.adhoc_role.id}, expect=204)
with self.current_user('other'):
response = self.get(url, expect=200)
self.assertEqual(response['count'], 0)
self.run_test_ad_hoc_command(url=url, inventory=None, credential=other_cred.pk, expect=201)
response = self.get(url, expect=200)
self.assertEqual(response['count'], 1)
response = self.get(inventory_url, expect=200)
self.assertTrue(response['can_run_ad_hoc_commands'])
# Try to run with expired license.
self.create_expired_license_file()
with self.current_user('admin'):
self.run_test_ad_hoc_command(url=url, expect=403)
with self.current_user('normal'):
self.run_test_ad_hoc_command(url=url, expect=403)
def test_host_ad_hoc_commands_list(self):
# TODO: Figure out why this test needs pexpect

View File

@ -938,31 +938,6 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
self.assertNotEqual(new_inv.total_groups, 0)
self.assertElapsedLessThan(60)
@unittest.skipIf(True,
'This test is deprecated and being removed from '
'integration and unit tests in favor of writing '
'an explicit unit test around what the original '
'problem was')
def test_splunk_inventory(self):
new_inv = self.organizations[0].inventories.create(name='splunk')
self.assertEqual(new_inv.hosts.count(), 0)
self.assertEqual(new_inv.groups.count(), 0)
inv_file = os.path.join(os.path.dirname(__file__), '..', '..', 'data',
'splunk_inventory.py')
result, stdout, stderr = self.run_command('inventory_import',
inventory_id=new_inv.pk,
source=inv_file, verbosity=0)
self.assertEqual(result, None, stdout + stderr)
# Check that inventory is populated as expected within a reasonable
# amount of time. Computed fields should also be updated.
new_inv = Inventory.objects.get(pk=new_inv.pk)
self.assertNotEqual(new_inv.hosts.count(), 0)
self.assertNotEqual(new_inv.groups.count(), 0)
self.assertNotEqual(new_inv.total_hosts, 0)
self.assertNotEqual(new_inv.total_groups, 0)
self.assertElapsedLessThan(600)
def _get_ngroups_for_nhosts(self, n):
if n > 0:
return min(n, 10) + ((n - 1) / 10 + 1) + ((n - 1) / 100 + 1) + ((n - 1) / 1000 + 1)

View File

@ -285,10 +285,10 @@ class InventoryTest(BaseTest):
got = self.get(inventory_scripts, expect=200, auth=self.get_super_credentials())
self.assertEquals(got['count'], 1)
new_failed_script = dict(name="Shouldfail", description="This test should fail", script=TEST_SIMPLE_INVENTORY_SCRIPT, organization=self.organizations[0].id)
self.post(inventory_scripts, data=new_failed_script, expect=403, auth=self.get_normal_credentials())
new_failed_script = dict(name="Should not fail", description="This test should not fail", script=TEST_SIMPLE_INVENTORY_SCRIPT, organization=self.organizations[0].id)
self.post(inventory_scripts, data=new_failed_script, expect=201, auth=self.get_normal_credentials())
failed_no_shebang = dict(name="ShouldAlsoFail", descript="This test should also fail", script=TEST_SIMPLE_INVENTORY_SCRIPT_WITHOUT_HASHBANG,
failed_no_shebang = dict(name="ShouldFail", descript="This test should fail", script=TEST_SIMPLE_INVENTORY_SCRIPT_WITHOUT_HASHBANG,
organization=self.organizations[0].id)
self.post(inventory_scripts, data=failed_no_shebang, expect=400, auth=self.get_super_credentials())
@ -1424,80 +1424,6 @@ class InventoryUpdatesTest(BaseTransactionTest):
response = self.put(inv_src_url2, inv_src_data, expect=200)
self.assertEqual(response['source_regions'], 'ORD,IAD')
def test_post_inventory_source_update(self):
creds_url = reverse('api:credential_list')
inv_src_url = reverse('api:inventory_source_detail',
args=(self.group.inventory_source.pk,))
inv_src_update_url = reverse('api:inventory_source_update_view',
args=(self.group.inventory_source.pk,))
# Create a credential to use for this inventory source.
aws_cred_data = {
'name': 'AWS key that does not need to have valid info because we '
'do not care if the update actually succeeds',
'kind': 'aws',
'user': self.super_django_user.pk,
'username': 'aws access key id goes here',
'password': 'aws secret access key goes here',
}
with self.current_user(self.super_django_user):
aws_cred_response = self.post(creds_url, aws_cred_data, expect=201)
aws_cred_id = aws_cred_response['id']
# Updaate the inventory source to use EC2.
inv_src_data = {
'source': 'ec2',
'credential': aws_cred_id,
}
with self.current_user(self.super_django_user):
self.put(inv_src_url, inv_src_data, expect=200)
# Read the inventory source, verify the update URL returns can_update.
with self.current_user(self.super_django_user):
self.get(inv_src_url, expect=200)
response = self.get(inv_src_update_url, expect=200)
self.assertTrue(response['can_update'])
# Now do the update.
with self.current_user(self.super_django_user):
self.post(inv_src_update_url, {}, expect=202)
# Normal user should be allowed as an org admin.
with self.current_user(self.normal_django_user):
self.get(inv_src_url, expect=200)
response = self.get(inv_src_update_url, expect=200)
self.assertTrue(response['can_update'])
with self.current_user(self.normal_django_user):
self.post(inv_src_update_url, {}, expect=202)
# Other user should be denied as only an org user.
with self.current_user(self.other_django_user):
self.get(inv_src_url, expect=403)
response = self.get(inv_src_update_url, expect=403)
with self.current_user(self.other_django_user):
self.post(inv_src_update_url, {}, expect=403)
# If given read permission to the inventory, other user should be able
# to see the inventory source and update view, but not start an update.
user_roles_list_url = reverse('api:user_roles_list', args=(self.other_django_user.pk,))
with self.current_user(self.super_django_user):
self.post(user_roles_list_url, {"id": self.inventory.read_role.id}, expect=204)
with self.current_user(self.other_django_user):
self.get(inv_src_url, expect=200)
response = self.get(inv_src_update_url, expect=200)
with self.current_user(self.other_django_user):
self.post(inv_src_update_url, {}, expect=403)
# Once given write permission, the normal user is able to update the
# inventory source.
with self.current_user(self.super_django_user):
self.post(user_roles_list_url, {"id": self.inventory.admin_role.id}, expect=204)
with self.current_user(self.other_django_user):
self.get(inv_src_url, expect=200)
response = self.get(inv_src_update_url, expect=200)
# FIXME: This is misleading, as an update would fail...
self.assertTrue(response['can_update'])
with self.current_user(self.other_django_user):
self.post(inv_src_update_url, {}, expect=202)
# Nobody user should be denied as well.
with self.current_user(self.nobody_django_user):
self.get(inv_src_url, expect=403)
response = self.get(inv_src_update_url, expect=403)
with self.current_user(self.nobody_django_user):
self.post(inv_src_update_url, {}, expect=403)
def test_update_from_ec2(self):
source_username = getattr(settings, 'TEST_AWS_ACCESS_KEY_ID', '')
source_password = getattr(settings, 'TEST_AWS_SECRET_ACCESS_KEY', '')
@ -1508,7 +1434,7 @@ class InventoryUpdatesTest(BaseTransactionTest):
credential = Credential.objects.create(kind='aws',
username=source_username,
password=source_password)
credential.owner_role.members.add(self.super_django_user)
credential.admin_role.members.add(self.super_django_user)
# Set parent group name to one that might be created by the sync.
group = self.group
group.name = 'ec2'
@ -1595,7 +1521,7 @@ class InventoryUpdatesTest(BaseTransactionTest):
username=source_username,
password=source_password,
security_token=source_token)
credential.owner_role.members.add(self.super_django_user)
credential.admin_role.members.add(self.super_django_user)
# Set parent group name to one that might be created by the sync.
group = self.group
group.name = 'ec2'
@ -1617,7 +1543,7 @@ class InventoryUpdatesTest(BaseTransactionTest):
username=source_username,
password=source_password,
security_token="BADTOKEN")
credential.owner_role.members.add(self.super_django_user)
credential.admin_role.members.add(self.super_django_user)
# Set parent group name to one that might be created by the sync.
group = self.group
@ -1652,7 +1578,7 @@ class InventoryUpdatesTest(BaseTransactionTest):
credential = Credential.objects.create(kind='aws',
username=source_username,
password=source_password)
credential.owner_role.members.add(self.super_django_user)
credential.admin_role.members.add(self.super_django_user)
group = self.group
group.name = 'AWS Inventory'
group.save()
@ -1770,6 +1696,7 @@ class InventoryUpdatesTest(BaseTransactionTest):
self.assertFalse(inventory_update.name.endswith(inventory_update.inventory_source.name), inventory_update.name)
def test_update_from_rax(self):
self.skipTest('Skipping until we can resolve the CERTIFICATE_VERIFY_FAILED issue: #1706')
source_username = getattr(settings, 'TEST_RACKSPACE_USERNAME', '')
source_password = getattr(settings, 'TEST_RACKSPACE_API_KEY', '')
source_regions = getattr(settings, 'TEST_RACKSPACE_REGIONS', '')
@ -1779,7 +1706,7 @@ class InventoryUpdatesTest(BaseTransactionTest):
credential = Credential.objects.create(kind='rax',
username=source_username,
password=source_password)
credential.owner_role.members.add(self.super_django_user)
credential.admin_role.members.add(self.super_django_user)
# Set parent group name to one that might be created by the sync.
group = self.group
group.name = 'DFW'
@ -1832,7 +1759,7 @@ class InventoryUpdatesTest(BaseTransactionTest):
username=source_username,
password=source_password,
host=source_host)
credential.owner_role.members.add(self.super_django_user)
credential.admin_role.members.add(self.super_django_user)
inventory_source = self.update_inventory_source(self.group,
source='vmware', credential=credential)
# Check first without instance_id set (to import by name only).

View File

@ -961,7 +961,7 @@ class JobTemplateCallbackTest(BaseJobTestMixin, django.test.LiveServerTestCase):
self.assertEqual(jobs_qs.count(), 7)
job = jobs_qs[0]
self.assertEqual(job.launch_type, 'callback')
self.assertEqual(job.limit, ':&'.join([job_template.limit, host.name]))
self.assertEqual(job.limit, host.name)
self.assertEqual(job.hosts.count(), 1)
self.assertEqual(job.hosts.all()[0], host)
@ -1058,7 +1058,7 @@ class JobTransactionTest(BaseJobTestMixin, django.test.LiveServerTestCase):
data = json.loads(response.content)
if data.get('status', '') not in ('new', 'pending', 'running'):
break
except Exception, e:
except Exception as e:
errors.append(e)
break

View File

@ -506,7 +506,7 @@ class ProjectUpdatesTest(BaseTransactionTest):
u = kw['user']
del kw['user']
credential = Credential.objects.create(**kw)
credential.owner_role.members.add(u)
credential.admin_role.members.add(u)
kwargs['credential'] = credential
project = Project.objects.create(**kwargs)
project_path = project.get_project_path(check_if_exists=False)
@ -1418,7 +1418,7 @@ class ProjectUpdatesTest(BaseTransactionTest):
inventory=self.inventory)
self.group.hosts.add(self.host)
self.credential = Credential.objects.create(name='test-creds')
self.credential.owner_role.members.add(self.super_django_user)
self.credential.admin_role.members.add(self.super_django_user)
self.project = self.create_project(
name='my public git project over https',
scm_type='git',
@ -1454,7 +1454,7 @@ class ProjectUpdatesTest(BaseTransactionTest):
inventory=self.inventory)
self.group.hosts.add(self.host)
self.credential = Credential.objects.create(name='test-creds')
self.credential.owner_role.members.add(self.super_django_user)
self.credential.admin_role.members.add(self.super_django_user)
self.project = self.create_project(
name='my private git project over https',
scm_type='git',

View File

@ -62,7 +62,7 @@ class ScheduleTest(BaseTest):
self.organizations[1].member_role.members.add(self.diff_org_user)
self.cloud_source = Credential.objects.create(kind='awx', username='Dummy', password='Dummy')
self.cloud_source.owner_role.members.add(self.super_django_user)
self.cloud_source.admin_role.members.add(self.super_django_user)
self.first_inventory = Inventory.objects.create(name='test_inventory', description='for org 0', organization=self.organizations[0])
self.first_inventory.hosts.create(name='host_1')

View File

@ -283,7 +283,7 @@ class RunJobTest(BaseJobExecutionTest):
user = opts['user']
del opts['user']
self.cloud_credential = Credential.objects.create(**opts)
self.cloud_credential.owner_role.members.add(user)
self.cloud_credential.admin_role.members.add(user)
return self.cloud_credential
def create_test_project(self, playbook_content, role_playbooks=None):

View File

@ -123,28 +123,30 @@ class TestSubListCreateAttachDetachAPIView:
view.unattach_by_id.assert_not_called()
class TestDeleteLastUnattachLabelMixin:
@mock.patch('awx.api.generics.super')
@mock.patch('__builtin__.super')
def test_unattach_ok(self, super, mocker):
mock_request = mocker.MagicMock()
mock_sub_id = mocker.MagicMock()
super.return_value = super
super.unattach_validate = mocker.MagicMock(return_value=(mock_sub_id, None))
super.unattach_by_id = mocker.MagicMock()
mock_label = mocker.patch('awx.api.generics.Label')
mock_label.objects.get.return_value = mock_label
mock_label.is_detached.return_value = True
mock_model = mocker.MagicMock()
mock_model.objects.get.return_value = mock_model
mock_model.is_detached.return_value = True
view = DeleteLastUnattachLabelMixin()
view.model = mock_model
view.unattach(mock_request, None, None)
super.unattach_validate.assert_called_with(mock_request, None, None)
super.unattach_validate.assert_called_with(mock_request)
super.unattach_by_id.assert_called_with(mock_request, mock_sub_id)
mock_label.is_detached.assert_called_with()
mock_label.objects.get.assert_called_with(id=mock_sub_id)
mock_label.delete.assert_called_with()
mock_model.is_detached.assert_called_with()
mock_model.objects.get.assert_called_with(id=mock_sub_id)
mock_model.delete.assert_called_with()
@mock.patch('awx.api.generics.super')
@mock.patch('__builtin__.super')
def test_unattach_fail(self, super, mocker):
mock_request = mocker.MagicMock()
mock_response = mocker.MagicMock()
@ -154,7 +156,7 @@ class TestDeleteLastUnattachLabelMixin:
res = view.unattach(mock_request, None, None)
super.unattach_validate.assert_called_with(mock_request, None, None)
super.unattach_validate.assert_called_with(mock_request)
assert mock_response == res
class TestParentMixin:

View File

@ -1,6 +1,7 @@
# Python
import pytest
import mock
import json
# AWX
from awx.api.serializers import JobTemplateSerializer, JobSerializer, JobOptionsSerializer
@ -9,9 +10,14 @@ from awx.main.models import Label, Job
#DRF
from rest_framework import serializers
def mock_JT_resource_data():
return ({}, [])
@pytest.fixture
def job_template(mocker):
return mocker.MagicMock(pk=5)
mock_jt = mocker.MagicMock(pk=5)
mock_jt.resource_validation_data = mock_JT_resource_data
return mock_jt
@pytest.fixture
def job(mocker, job_template):
@ -140,6 +146,21 @@ class TestJobSerializerGetRelated(GetRelatedMixin):
assert 'job_template' in related
assert related['job_template'] == '/api/v1/%s/%d/' % ('job_templates', job.job_template.pk)
@mock.patch('awx.api.serializers.BaseSerializer.to_representation', lambda self,obj: {
'extra_vars': obj.extra_vars})
class TestJobSerializerSubstitution():
def test_survey_password_hide(self, mocker):
job = mocker.MagicMock(**{
'display_extra_vars.return_value': '{\"secret_key\": \"$encrypted$\"}',
'extra_vars.return_value': '{\"secret_key\": \"my_password\"}'})
serializer = JobSerializer(job)
rep = serializer.to_representation(job)
extra_vars = json.loads(rep['extra_vars'])
assert extra_vars['secret_key'] == '$encrypted$'
job.display_extra_vars.assert_called_once_with()
assert 'my_password' not in extra_vars
@mock.patch('awx.api.serializers.BaseSerializer.get_summary_fields', lambda x,y: {})
class TestJobOptionsSerializerGetSummaryFields(GetSummaryFieldsMixin):
def test__summary_field_labels_10_max(self, mocker, job_template, labels):

View File

@ -1,9 +1,21 @@
import mock
import pytest
from rest_framework.test import APIRequestFactory
from rest_framework.test import force_authenticate
from django.contrib.contenttypes.models import ContentType
from awx.api.views import (
ApiV1RootView,
TeamRolesList,
JobTemplateLabelList,
)
from awx.main.models import (
User,
Role,
)
@pytest.fixture
def mock_response_new(mocker):
@ -11,7 +23,6 @@ def mock_response_new(mocker):
m.return_value = m
return m
class TestApiV1RootView:
def test_get_endpoints(self, mocker, mock_response_new):
endpoints = [
@ -52,3 +63,35 @@ class TestApiV1RootView:
for endpoint in endpoints:
assert endpoint in data_arg
class TestJobTemplateLabelList:
def test_inherited_mixin_unattach(self):
with mock.patch('awx.api.generics.DeleteLastUnattachLabelMixin.unattach') as mixin_unattach:
view = JobTemplateLabelList()
mock_request = mock.MagicMock()
super(JobTemplateLabelList, view).unattach(mock_request, None, None)
assert mixin_unattach.called_with(mock_request, None, None)
@pytest.mark.parametrize("url", ["/team/1/roles", "/role/1/teams"])
def test_team_roles_list_post_org_roles(url):
with mock.patch('awx.api.views.Role.objects.get') as role_get, \
mock.patch('awx.api.views.ContentType.objects.get_for_model') as ct_get:
role_mock = mock.MagicMock(spec=Role)
content_type_mock = mock.MagicMock(spec=ContentType)
role_mock.content_type = content_type_mock
role_get.return_value = role_mock
ct_get.return_value = content_type_mock
factory = APIRequestFactory()
view = TeamRolesList.as_view()
request = factory.post(url, {'id':1}, format="json")
force_authenticate(request, User(username="root", is_superuser=True))
response = view(request)
response.render()
assert response.status_code == 400
assert 'cannot assign' in response.content

View File

@ -0,0 +1,40 @@
import pytest
def test_missing_project_error(job_template_factory):
objects = job_template_factory(
'missing-project-jt',
organization='org1',
inventory='inventory1',
credential='cred1',
persisted=False)
obj = objects.job_template
assert 'project' in obj.resources_needed_to_start
validation_errors, resources_needed_to_start = obj.resource_validation_data()
assert 'project' in validation_errors
def test_inventory_credential_need_to_start(job_template_factory):
objects = job_template_factory(
'job-template-few-resources',
project='project1',
persisted=False)
obj = objects.job_template
assert 'inventory' in obj.resources_needed_to_start
assert 'credential' in obj.resources_needed_to_start
def test_inventory_credential_contradictions(job_template_factory):
objects = job_template_factory(
'job-template-paradox',
project='project1',
persisted=False)
obj = objects.job_template
obj.ask_inventory_on_launch = False
obj.ask_credential_on_launch = False
validation_errors, resources_needed_to_start = obj.resource_validation_data()
assert 'inventory' in validation_errors
assert 'credential' in validation_errors
@pytest.mark.survey
def test_survey_password_list(job_with_secret_key_unit):
"""Verify that survey_password_variables method gives a list of survey passwords"""
assert job_with_secret_key_unit.job_template.survey_password_variables() == ['secret_key', 'SSN']

View File

@ -0,0 +1,39 @@
import pytest
import json
from awx.main.tasks import RunJob
@pytest.fixture
def job(mocker):
return mocker.MagicMock(**{
'display_extra_vars.return_value': '{\"secret_key\": \"$encrypted$\"}',
'extra_vars_dict': {"secret_key": "my_password"},
'pk': 1, 'job_template.pk': 1, 'job_template.name': '',
'created_by.pk': 1, 'created_by.username': 'admin',
'launch_type': 'manual'})
@pytest.mark.survey
def test_job_redacted_extra_vars(job_with_secret_key_unit):
"""Verify that this method redacts vars marked as passwords in a survey"""
assert json.loads(job_with_secret_key_unit.display_extra_vars()) == {
'submitter_email': 'foobar@redhat.com',
'secret_key': '$encrypted$',
'SSN': '$encrypted$'}
def test_job_safe_args_redacted_passwords(job):
"""Verify that safe_args hides passwords in the job extra_vars"""
kwargs = {'ansible_version': '2.1'}
run_job = RunJob()
safe_args = run_job.build_safe_args(job, **kwargs)
ev_index = safe_args.index('-e') + 1
extra_vars = json.loads(safe_args[ev_index])
assert extra_vars['secret_key'] == '$encrypted$'
def test_job_args_unredacted_passwords(job):
kwargs = {'ansible_version': '2.1'}
run_job = RunJob()
args = run_job.build_args(job, **kwargs)
ev_index = args.index('-e') + 1
extra_vars = json.loads(args[ev_index])
assert extra_vars['secret_key'] == 'my_password'

View File

@ -1,10 +1,36 @@
import pytest
import mock
from django.contrib.auth.models import User
from django.forms.models import model_to_dict
from awx.main.access import (
BaseAccess,
check_superuser,
JobTemplateAccess,
)
from awx.main.models import Credential, Inventory, Project, Role, Organization
@pytest.fixture
def job_template_with_ids(job_template_factory):
# Create non-persisted objects with IDs to send to job_template_factory
credential = Credential(id=1, pk=1, name='testcred', kind='ssh')
net_cred = Credential(id=2, pk=2, name='testnetcred', kind='net')
cloud_cred = Credential(id=3, pk=3, name='testcloudcred', kind='aws')
inv = Inventory(id=11, pk=11, name='testinv')
proj = Project(id=14, pk=14, name='testproj')
jt_objects = job_template_factory(
'testJT', project=proj, inventory=inv, credential=credential,
cloud_credential=cloud_cred, network_credential=net_cred,
persisted=False)
return jt_objects.job_template
@pytest.fixture
def user_unit():
return User(username='rando', password='raginrando', email='rando@redhat.com')
def test_superuser(mocker):
user = mocker.MagicMock(spec=User, id=1, is_superuser=True)
access = BaseAccess(user)
@ -19,3 +45,68 @@ def test_not_superuser(mocker):
can_add = check_superuser(BaseAccess.can_add)
assert can_add(access, None) is False
def test_jt_existing_values_are_nonsensitive(job_template_with_ids, user_unit):
"""Assure that permission checks are not required if submitted data is
identical to what the job template already has."""
data = model_to_dict(job_template_with_ids)
access = JobTemplateAccess(user_unit)
assert access.changes_are_non_sensitive(job_template_with_ids, data)
def test_change_jt_sensitive_data(job_template_with_ids, mocker, user_unit):
"""Assure that can_add is called with all ForeignKeys."""
job_template_with_ids.admin_role = Role()
data = {'inventory': job_template_with_ids.inventory.id + 1}
access = JobTemplateAccess(user_unit)
mock_add = mock.MagicMock(return_value=False)
with mock.patch('awx.main.models.rbac.Role.__contains__', return_value=True):
with mocker.patch('awx.main.access.JobTemplateAccess.can_add', mock_add):
with mocker.patch('awx.main.access.JobTemplateAccess.can_read', return_value=True):
assert not access.can_change(job_template_with_ids, data)
mock_add.assert_called_once_with({
'inventory': data['inventory'],
'project': job_template_with_ids.project.id,
'credential': job_template_with_ids.credential.id,
'cloud_credential': job_template_with_ids.cloud_credential.id,
'network_credential': job_template_with_ids.network_credential.id
})
def test_jt_add_scan_job_check(job_template_with_ids, user_unit):
"Assure that permissions to add scan jobs work correctly"
access = JobTemplateAccess(user_unit)
project = job_template_with_ids.project
inventory = job_template_with_ids.inventory
project.use_role = Role()
inventory.use_role = Role()
organization = Organization(name='test-org')
inventory.organization = organization
organization.admin_role = Role()
def mock_get_object(Class, **kwargs):
if Class == Project:
return project
elif Class == Inventory:
return inventory
else:
raise Exception('Item requested has not been mocked')
with mock.patch.object(JobTemplateAccess, 'check_license', return_value=None):
with mock.patch('awx.main.models.rbac.Role.__contains__', return_value=True):
with mock.patch('awx.main.access.get_object_or_400', mock_get_object):
assert access.can_add({
'project': project.pk,
'inventory': inventory.pk,
'job_type': 'scan'
})
def test_jt_can_add_bad_data(user_unit):
"Assure that no server errors are returned if we call JT can_add with bad data"
access = JobTemplateAccess(user_unit)
assert not access.can_add({'asdf': 'asdf'})

View File

@ -1,4 +1,25 @@
from awx.main.tasks import run_label_cleanup
import pytest
from contextlib import contextmanager
from awx.main.models import (
UnifiedJob,
Notification,
)
from awx.main.tasks import (
run_label_cleanup,
send_notifications,
run_administrative_checks,
)
from awx.main.task_engine import TaskSerializer
@contextmanager
def apply_patches(_patches):
[p.start() for p in _patches]
yield
[p.stop() for p in _patches]
def test_run_label_cleanup(mocker):
qs = mocker.Mock(**{'count.return_value': 3, 'delete.return_value': None})
@ -10,3 +31,51 @@ def test_run_label_cleanup(mocker):
qs.delete.assert_called_with()
assert 3 == ret
def test_send_notifications_not_list():
with pytest.raises(TypeError):
send_notifications(None)
def test_send_notifications_job_id(mocker):
with mocker.patch('awx.main.models.UnifiedJob.objects.get'):
send_notifications([], job_id=1)
assert UnifiedJob.objects.get.called
assert UnifiedJob.objects.get.called_with(id=1)
def test_send_notifications_list(mocker):
patches = list()
mock_job = mocker.MagicMock(spec=UnifiedJob)
patches.append(mocker.patch('awx.main.models.UnifiedJob.objects.get', return_value=mock_job))
mock_notification = mocker.MagicMock(spec=Notification, subject="test")
patches.append(mocker.patch('awx.main.models.Notification.objects.get', return_value=mock_notification))
with apply_patches(patches):
send_notifications([1,2], job_id=1)
assert Notification.objects.get.call_count == 2
assert mock_notification.status == "successful"
assert mock_notification.save.called
assert mock_job.notifications.add.called
assert mock_job.notifications.add.called_with(mock_notification)
@pytest.mark.parametrize("current_instances,call_count", [(91, 2), (89,1)])
def test_run_admin_checks_usage(mocker, current_instances, call_count):
patches = list()
patches.append(mocker.patch('awx.main.tasks.tower_settings'))
patches.append(mocker.patch('awx.main.tasks.User'))
mock_ts = mocker.Mock(spec=TaskSerializer)
mock_ts.from_database.return_value = {'instance_count': 100, 'current_instances': current_instances}
patches.append(mocker.patch('awx.main.tasks.TaskSerializer', return_value=mock_ts))
mock_sm = mocker.Mock()
patches.append(mocker.patch('awx.main.tasks.send_mail', wraps=mock_sm))
with apply_patches(patches):
run_administrative_checks()
assert mock_sm.called
if call_count == 2:
assert '90%' in mock_sm.call_args_list[0][0][0]
else:
assert 'expire' in mock_sm.call_args_list[0][0][0]

Some files were not shown because too many files have changed in this diff Show More