Merge branch 'rbac' of github.com:ansible/ansible-tower into rbac

This commit is contained in:
Akita Noek
2016-03-15 11:53:07 -04:00
45 changed files with 527 additions and 903 deletions

View File

@@ -26,19 +26,6 @@ class MongoFilterBackend(BaseFilterBackend):
def filter_queryset(self, request, queryset, view): def filter_queryset(self, request, queryset, view):
return queryset return queryset
class ActiveOnlyBackend(BaseFilterBackend):
'''
Filter to show only objects where is_active/active is True.
'''
def filter_queryset(self, request, queryset, view):
for field in queryset.model._meta.fields:
if field.name == 'is_active':
queryset = queryset.filter(is_active=True)
elif field.name == 'active':
queryset = queryset.filter(active=True)
return queryset
class TypeFilterBackend(BaseFilterBackend): class TypeFilterBackend(BaseFilterBackend):
''' '''
Filter on type field now returned with all objects. Filter on type field now returned with all objects.
@@ -166,12 +153,12 @@ class FieldLookupBackend(BaseFilterBackend):
for key, values in request.query_params.lists(): for key, values in request.query_params.lists():
if key in self.RESERVED_NAMES: if key in self.RESERVED_NAMES:
continue continue
# HACK: Make job event filtering by host name mostly work even # HACK: Make job event filtering by host name mostly work even
# when not capturing job event hosts M2M. # when not capturing job event hosts M2M.
if queryset.model._meta.object_name == 'JobEvent' and key.startswith('hosts__name'): if queryset.model._meta.object_name == 'JobEvent' and key.startswith('hosts__name'):
key = key.replace('hosts__name', 'or__host__name') key = key.replace('hosts__name', 'or__host__name')
or_filters.append((False, 'host__name__isnull', True)) or_filters.append((False, 'host__name__isnull', True))
# Custom __int filter suffix (internal use only). # Custom __int filter suffix (internal use only).
q_int = False q_int = False

View File

@@ -7,7 +7,6 @@ import logging
import time import time
# Django # Django
from django.http import Http404
from django.conf import settings from django.conf import settings
from django.db import connection from django.db import connection
from django.shortcuts import get_object_or_404 from django.shortcuts import get_object_or_404
@@ -415,9 +414,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
raise PermissionDenied() raise PermissionDenied()
if parent_key: if parent_key:
# sub object has a ForeignKey to the parent, so we can't remove it sub.delete()
# from the set, only mark it as inactive.
sub.mark_inactive()
else: else:
relationship.remove(sub) relationship.remove(sub)
@@ -457,17 +454,9 @@ class RetrieveDestroyAPIView(RetrieveAPIView, generics.RetrieveDestroyAPIView):
def destroy(self, request, *args, **kwargs): def destroy(self, request, *args, **kwargs):
# somewhat lame that delete has to call it's own permissions check # somewhat lame that delete has to call it's own permissions check
obj = self.get_object() obj = self.get_object()
# FIXME: Why isn't the active check being caught earlier by RBAC?
if not getattr(obj, 'active', True):
raise Http404()
if not getattr(obj, 'is_active', True):
raise Http404()
if not request.user.can_access(self.model, 'delete', obj): if not request.user.can_access(self.model, 'delete', obj):
raise PermissionDenied() raise PermissionDenied()
if hasattr(obj, 'mark_inactive'): obj.delete()
obj.mark_inactive()
else:
raise NotImplementedError('destroy() not implemented yet for %s' % obj)
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, RetrieveDestroyAPIView): class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, RetrieveDestroyAPIView):

View File

@@ -103,11 +103,7 @@ class ModelAccessPermission(permissions.BasePermission):
if not request.user or request.user.is_anonymous(): if not request.user or request.user.is_anonymous():
return False return False
# Don't allow inactive users (and respond with a 403). # Always allow superusers
if not request.user.is_active:
raise PermissionDenied('your account is inactive')
# Always allow superusers (as long as they are active).
if getattr(view, 'always_allow_superuser', True) and request.user.is_superuser: if getattr(view, 'always_allow_superuser', True) and request.user.is_superuser:
return True return True
@@ -161,8 +157,6 @@ class JobTemplateCallbackPermission(ModelAccessPermission):
raise PermissionDenied() raise PermissionDenied()
elif not host_config_key: elif not host_config_key:
raise PermissionDenied() raise PermissionDenied()
elif obj and not obj.active:
raise PermissionDenied()
elif obj and obj.host_config_key != host_config_key: elif obj and obj.host_config_key != host_config_key:
raise PermissionDenied() raise PermissionDenied()
else: else:
@@ -182,7 +176,7 @@ class TaskPermission(ModelAccessPermission):
# Verify that the ID present in the auth token is for a valid, active # Verify that the ID present in the auth token is for a valid, active
# unified job. # unified job.
try: try:
unified_job = UnifiedJob.objects.get(active=True, status='running', unified_job = UnifiedJob.objects.get(status='running',
pk=int(request.auth.split('-')[0])) pk=int(request.auth.split('-')[0]))
except (UnifiedJob.DoesNotExist, TypeError): except (UnifiedJob.DoesNotExist, TypeError):
return False return False

View File

@@ -252,7 +252,6 @@ class BaseSerializer(serializers.ModelSerializer):
# make certain fields read only # make certain fields read only
created = serializers.SerializerMethodField() created = serializers.SerializerMethodField()
modified = serializers.SerializerMethodField() modified = serializers.SerializerMethodField()
active = serializers.SerializerMethodField()
def get_type(self, obj): def get_type(self, obj):
@@ -288,9 +287,9 @@ class BaseSerializer(serializers.ModelSerializer):
def get_related(self, obj): def get_related(self, obj):
res = OrderedDict() res = OrderedDict()
if getattr(obj, 'created_by', None) and obj.created_by.is_active: if getattr(obj, 'created_by', None):
res['created_by'] = reverse('api:user_detail', args=(obj.created_by.pk,)) res['created_by'] = reverse('api:user_detail', args=(obj.created_by.pk,))
if getattr(obj, 'modified_by', None) and obj.modified_by.is_active: if getattr(obj, 'modified_by', None):
res['modified_by'] = reverse('api:user_detail', args=(obj.modified_by.pk,)) res['modified_by'] = reverse('api:user_detail', args=(obj.modified_by.pk,))
return res return res
@@ -315,10 +314,6 @@ class BaseSerializer(serializers.ModelSerializer):
continue continue
if fkval == obj: if fkval == obj:
continue continue
if hasattr(fkval, 'active') and not fkval.active:
continue
if hasattr(fkval, 'is_active') and not fkval.is_active:
continue
summary_fields[fk] = OrderedDict() summary_fields[fk] = OrderedDict()
for field in related_fields: for field in related_fields:
fval = getattr(fkval, field, None) fval = getattr(fkval, field, None)
@@ -334,11 +329,11 @@ class BaseSerializer(serializers.ModelSerializer):
# Can be raised by the reverse accessor for a OneToOneField. # Can be raised by the reverse accessor for a OneToOneField.
except ObjectDoesNotExist: except ObjectDoesNotExist:
pass pass
if getattr(obj, 'created_by', None) and obj.created_by.is_active: if getattr(obj, 'created_by', None):
summary_fields['created_by'] = OrderedDict() summary_fields['created_by'] = OrderedDict()
for field in SUMMARIZABLE_FK_FIELDS['user']: for field in SUMMARIZABLE_FK_FIELDS['user']:
summary_fields['created_by'][field] = getattr(obj.created_by, field) summary_fields['created_by'][field] = getattr(obj.created_by, field)
if getattr(obj, 'modified_by', None) and obj.modified_by.is_active: if getattr(obj, 'modified_by', None):
summary_fields['modified_by'] = OrderedDict() summary_fields['modified_by'] = OrderedDict()
for field in SUMMARIZABLE_FK_FIELDS['user']: for field in SUMMARIZABLE_FK_FIELDS['user']:
summary_fields['modified_by'][field] = getattr(obj.modified_by, field) summary_fields['modified_by'][field] = getattr(obj.modified_by, field)
@@ -378,14 +373,6 @@ class BaseSerializer(serializers.ModelSerializer):
else: else:
return obj.modified return obj.modified
def get_active(self, obj):
if obj is None:
return False
elif isinstance(obj, User):
return obj.is_active
else:
return obj.active
def build_standard_field(self, field_name, model_field): def build_standard_field(self, field_name, model_field):
# DRF 3.3 serializers.py::build_standard_field() -> utils/field_mapping.py::get_field_kwargs() short circuits # DRF 3.3 serializers.py::build_standard_field() -> utils/field_mapping.py::get_field_kwargs() short circuits
@@ -564,11 +551,11 @@ class UnifiedJobTemplateSerializer(BaseSerializer):
def get_related(self, obj): def get_related(self, obj):
res = super(UnifiedJobTemplateSerializer, self).get_related(obj) res = super(UnifiedJobTemplateSerializer, self).get_related(obj)
if obj.current_job and obj.current_job.active: if obj.current_job:
res['current_job'] = obj.current_job.get_absolute_url() res['current_job'] = obj.current_job.get_absolute_url()
if obj.last_job and obj.last_job.active: if obj.last_job:
res['last_job'] = obj.last_job.get_absolute_url() res['last_job'] = obj.last_job.get_absolute_url()
if obj.next_schedule and obj.next_schedule.active: if obj.next_schedule:
res['next_schedule'] = obj.next_schedule.get_absolute_url() res['next_schedule'] = obj.next_schedule.get_absolute_url()
return res return res
@@ -623,9 +610,9 @@ class UnifiedJobSerializer(BaseSerializer):
def get_related(self, obj): def get_related(self, obj):
res = super(UnifiedJobSerializer, self).get_related(obj) res = super(UnifiedJobSerializer, self).get_related(obj)
if obj.unified_job_template and obj.unified_job_template.active: if obj.unified_job_template:
res['unified_job_template'] = obj.unified_job_template.get_absolute_url() res['unified_job_template'] = obj.unified_job_template.get_absolute_url()
if obj.schedule and obj.schedule.active: if obj.schedule:
res['schedule'] = obj.schedule.get_absolute_url() res['schedule'] = obj.schedule.get_absolute_url()
if isinstance(obj, ProjectUpdate): if isinstance(obj, ProjectUpdate):
res['stdout'] = reverse('api:project_update_stdout', args=(obj.pk,)) res['stdout'] = reverse('api:project_update_stdout', args=(obj.pk,))
@@ -874,7 +861,7 @@ class ProjectOptionsSerializer(BaseSerializer):
def get_related(self, obj): def get_related(self, obj):
res = super(ProjectOptionsSerializer, self).get_related(obj) res = super(ProjectOptionsSerializer, self).get_related(obj)
if obj.credential and obj.credential.active: if obj.credential:
res['credential'] = reverse('api:credential_detail', res['credential'] = reverse('api:credential_detail',
args=(obj.credential.pk,)) args=(obj.credential.pk,))
return res return res
@@ -903,7 +890,7 @@ class ProjectOptionsSerializer(BaseSerializer):
def to_representation(self, obj): def to_representation(self, obj):
ret = super(ProjectOptionsSerializer, self).to_representation(obj) ret = super(ProjectOptionsSerializer, self).to_representation(obj)
if obj is not None and 'credential' in ret and (not obj.credential or not obj.credential.active): if obj is not None and 'credential' in ret and not obj.credential:
ret['credential'] = None ret['credential'] = None
return ret return ret
@@ -1039,13 +1026,13 @@ class InventorySerializer(BaseSerializerWithVariables):
access_list = reverse('api:inventory_access_list', args=(obj.pk,)), access_list = reverse('api:inventory_access_list', args=(obj.pk,)),
#single_fact = reverse('api:inventory_single_fact_view', args=(obj.pk,)), #single_fact = reverse('api:inventory_single_fact_view', args=(obj.pk,)),
)) ))
if obj.organization and obj.organization.active: if obj.organization:
res['organization'] = reverse('api:organization_detail', args=(obj.organization.pk,)) res['organization'] = reverse('api:organization_detail', args=(obj.organization.pk,))
return res return res
def to_representation(self, obj): def to_representation(self, obj):
ret = super(InventorySerializer, self).to_representation(obj) ret = super(InventorySerializer, self).to_representation(obj)
if obj is not None and 'organization' in ret and (not obj.organization or not obj.organization.active): if obj is not None and 'organization' in ret and not obj.organization:
ret['organization'] = None ret['organization'] = None
return ret return ret
@@ -1100,11 +1087,11 @@ class HostSerializer(BaseSerializerWithVariables):
fact_versions = reverse('api:host_fact_versions_list', args=(obj.pk,)), fact_versions = reverse('api:host_fact_versions_list', args=(obj.pk,)),
#single_fact = reverse('api:host_single_fact_view', args=(obj.pk,)), #single_fact = reverse('api:host_single_fact_view', args=(obj.pk,)),
)) ))
if obj.inventory and obj.inventory.active: if obj.inventory:
res['inventory'] = reverse('api:inventory_detail', args=(obj.inventory.pk,)) res['inventory'] = reverse('api:inventory_detail', args=(obj.inventory.pk,))
if obj.last_job and obj.last_job.active: if obj.last_job:
res['last_job'] = reverse('api:job_detail', args=(obj.last_job.pk,)) res['last_job'] = reverse('api:job_detail', args=(obj.last_job.pk,))
if obj.last_job_host_summary and obj.last_job_host_summary.job.active: if obj.last_job_host_summary:
res['last_job_host_summary'] = reverse('api:job_host_summary_detail', args=(obj.last_job_host_summary.pk,)) res['last_job_host_summary'] = reverse('api:job_host_summary_detail', args=(obj.last_job_host_summary.pk,))
return res return res
@@ -1120,7 +1107,7 @@ class HostSerializer(BaseSerializerWithVariables):
'name': j.job.job_template.name if j.job.job_template is not None else "", 'name': j.job.job_template.name if j.job.job_template is not None else "",
'status': j.job.status, 'status': j.job.status,
'finished': j.job.finished, 'finished': j.job.finished,
} for j in obj.job_host_summaries.filter(job__active=True).select_related('job__job_template').order_by('-created')[:5]]}) } for j in obj.job_host_summaries.select_related('job__job_template').order_by('-created')[:5]]})
return d return d
def _get_host_port_from_name(self, name): def _get_host_port_from_name(self, name):
@@ -1169,11 +1156,11 @@ class HostSerializer(BaseSerializerWithVariables):
ret = super(HostSerializer, self).to_representation(obj) ret = super(HostSerializer, self).to_representation(obj)
if not obj: if not obj:
return ret return ret
if 'inventory' in ret and (not obj.inventory or not obj.inventory.active): if 'inventory' in ret and not obj.inventory:
ret['inventory'] = None ret['inventory'] = None
if 'last_job' in ret and (not obj.last_job or not obj.last_job.active): if 'last_job' in ret and not obj.last_job:
ret['last_job'] = None ret['last_job'] = None
if 'last_job_host_summary' in ret and (not obj.last_job_host_summary or not obj.last_job_host_summary.job.active): if 'last_job_host_summary' in ret and not obj.last_job_host_summary:
ret['last_job_host_summary'] = None ret['last_job_host_summary'] = None
return ret return ret
@@ -1210,7 +1197,7 @@ class GroupSerializer(BaseSerializerWithVariables):
access_list = reverse('api:group_access_list', args=(obj.pk,)), access_list = reverse('api:group_access_list', args=(obj.pk,)),
#single_fact = reverse('api:group_single_fact_view', args=(obj.pk,)), #single_fact = reverse('api:group_single_fact_view', args=(obj.pk,)),
)) ))
if obj.inventory and obj.inventory.active: if obj.inventory:
res['inventory'] = reverse('api:inventory_detail', args=(obj.inventory.pk,)) res['inventory'] = reverse('api:inventory_detail', args=(obj.inventory.pk,))
if obj.inventory_source: if obj.inventory_source:
res['inventory_source'] = reverse('api:inventory_source_detail', args=(obj.inventory_source.pk,)) res['inventory_source'] = reverse('api:inventory_source_detail', args=(obj.inventory_source.pk,))
@@ -1223,7 +1210,7 @@ class GroupSerializer(BaseSerializerWithVariables):
def to_representation(self, obj): def to_representation(self, obj):
ret = super(GroupSerializer, self).to_representation(obj) ret = super(GroupSerializer, self).to_representation(obj)
if obj is not None and 'inventory' in ret and (not obj.inventory or not obj.inventory.active): if obj is not None and 'inventory' in ret and not obj.inventory:
ret['inventory'] = None ret['inventory'] = None
return ret return ret
@@ -1239,7 +1226,7 @@ class GroupTreeSerializer(GroupSerializer):
def get_children(self, obj): def get_children(self, obj):
if obj is None: if obj is None:
return {} return {}
children_qs = obj.children.filter(active=True) children_qs = obj.children
children_qs = children_qs.select_related('inventory') children_qs = children_qs.select_related('inventory')
children_qs = children_qs.prefetch_related('inventory_source') children_qs = children_qs.prefetch_related('inventory_source')
return GroupTreeSerializer(children_qs, many=True).data return GroupTreeSerializer(children_qs, many=True).data
@@ -1304,7 +1291,7 @@ class CustomInventoryScriptSerializer(BaseSerializer):
def get_related(self, obj): def get_related(self, obj):
res = super(CustomInventoryScriptSerializer, self).get_related(obj) res = super(CustomInventoryScriptSerializer, self).get_related(obj)
if obj.organization and obj.organization.active: if obj.organization:
res['organization'] = reverse('api:organization_detail', args=(obj.organization.pk,)) res['organization'] = reverse('api:organization_detail', args=(obj.organization.pk,))
return res return res
@@ -1317,10 +1304,10 @@ class InventorySourceOptionsSerializer(BaseSerializer):
def get_related(self, obj): def get_related(self, obj):
res = super(InventorySourceOptionsSerializer, self).get_related(obj) res = super(InventorySourceOptionsSerializer, self).get_related(obj)
if obj.credential and obj.credential.active: if obj.credential:
res['credential'] = reverse('api:credential_detail', res['credential'] = reverse('api:credential_detail',
args=(obj.credential.pk,)) args=(obj.credential.pk,))
if obj.source_script and obj.source_script.active: if obj.source_script:
res['source_script'] = reverse('api:inventory_script_detail', args=(obj.source_script.pk,)) res['source_script'] = reverse('api:inventory_script_detail', args=(obj.source_script.pk,))
return res return res
@@ -1365,7 +1352,7 @@ class InventorySourceOptionsSerializer(BaseSerializer):
ret = super(InventorySourceOptionsSerializer, self).to_representation(obj) ret = super(InventorySourceOptionsSerializer, self).to_representation(obj)
if obj is None: if obj is None:
return ret return ret
if 'credential' in ret and (not obj.credential or not obj.credential.active): if 'credential' in ret and not obj.credential:
ret['credential'] = None ret['credential'] = None
return ret return ret
@@ -1396,9 +1383,9 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
notifiers_success = reverse('api:inventory_source_notifiers_success_list', args=(obj.pk,)), notifiers_success = reverse('api:inventory_source_notifiers_success_list', args=(obj.pk,)),
notifiers_error = reverse('api:inventory_source_notifiers_error_list', args=(obj.pk,)), notifiers_error = reverse('api:inventory_source_notifiers_error_list', args=(obj.pk,)),
)) ))
if obj.inventory and obj.inventory.active: if obj.inventory:
res['inventory'] = reverse('api:inventory_detail', args=(obj.inventory.pk,)) res['inventory'] = reverse('api:inventory_detail', args=(obj.inventory.pk,))
if obj.group and obj.group.active: if obj.group:
res['group'] = reverse('api:group_detail', args=(obj.group.pk,)) res['group'] = reverse('api:group_detail', args=(obj.group.pk,))
# Backwards compatibility. # Backwards compatibility.
if obj.current_update: if obj.current_update:
@@ -1413,9 +1400,9 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
ret = super(InventorySourceSerializer, self).to_representation(obj) ret = super(InventorySourceSerializer, self).to_representation(obj)
if obj is None: if obj is None:
return ret return ret
if 'inventory' in ret and (not obj.inventory or not obj.inventory.active): if 'inventory' in ret and not obj.inventory:
ret['inventory'] = None ret['inventory'] = None
if 'group' in ret and (not obj.group or not obj.group.active): if 'group' in ret and not obj.group:
ret['group'] = None ret['group'] = None
return ret return ret
@@ -1473,13 +1460,13 @@ class TeamSerializer(BaseSerializer):
activity_stream = reverse('api:team_activity_stream_list', args=(obj.pk,)), activity_stream = reverse('api:team_activity_stream_list', args=(obj.pk,)),
access_list = reverse('api:team_access_list', args=(obj.pk,)), access_list = reverse('api:team_access_list', args=(obj.pk,)),
)) ))
if obj.organization and obj.organization.active: if obj.organization:
res['organization'] = reverse('api:organization_detail', args=(obj.organization.pk,)) res['organization'] = reverse('api:organization_detail', args=(obj.organization.pk,))
return res return res
def to_representation(self, obj): def to_representation(self, obj):
ret = super(TeamSerializer, self).to_representation(obj) ret = super(TeamSerializer, self).to_representation(obj)
if obj is not None and 'organization' in ret and (not obj.organization or not obj.organization.active): if obj is not None and 'organization' in ret and not obj.organization:
ret['organization'] = None ret['organization'] = None
return ret return ret
@@ -1563,9 +1550,9 @@ class CredentialSerializer(BaseSerializer):
def to_representation(self, obj): def to_representation(self, obj):
ret = super(CredentialSerializer, self).to_representation(obj) ret = super(CredentialSerializer, self).to_representation(obj)
if obj is not None and 'user' in ret and (not obj.user or not obj.user.is_active): if obj is not None and 'user' in ret and not obj.user:
ret['user'] = None ret['user'] = None
if obj is not None and 'team' in ret and (not obj.team or not obj.team.active): if obj is not None and 'team' in ret and not obj.team:
ret['team'] = None ret['team'] = None
return ret return ret
@@ -1604,13 +1591,13 @@ class JobOptionsSerializer(BaseSerializer):
def get_related(self, obj): def get_related(self, obj):
res = super(JobOptionsSerializer, self).get_related(obj) res = super(JobOptionsSerializer, self).get_related(obj)
if obj.inventory and obj.inventory.active: if obj.inventory:
res['inventory'] = reverse('api:inventory_detail', args=(obj.inventory.pk,)) res['inventory'] = reverse('api:inventory_detail', args=(obj.inventory.pk,))
if obj.project and obj.project.active: if obj.project:
res['project'] = reverse('api:project_detail', args=(obj.project.pk,)) res['project'] = reverse('api:project_detail', args=(obj.project.pk,))
if obj.credential and obj.credential.active: if obj.credential:
res['credential'] = reverse('api:credential_detail', args=(obj.credential.pk,)) res['credential'] = reverse('api:credential_detail', args=(obj.credential.pk,))
if obj.cloud_credential and obj.cloud_credential.active: if obj.cloud_credential:
res['cloud_credential'] = reverse('api:credential_detail', res['cloud_credential'] = reverse('api:credential_detail',
args=(obj.cloud_credential.pk,)) args=(obj.cloud_credential.pk,))
return res return res
@@ -1619,15 +1606,15 @@ class JobOptionsSerializer(BaseSerializer):
ret = super(JobOptionsSerializer, self).to_representation(obj) ret = super(JobOptionsSerializer, self).to_representation(obj)
if obj is None: if obj is None:
return ret return ret
if 'inventory' in ret and (not obj.inventory or not obj.inventory.active): if 'inventory' in ret and not obj.inventory:
ret['inventory'] = None ret['inventory'] = None
if 'project' in ret and (not obj.project or not obj.project.active): if 'project' in ret and not obj.project:
ret['project'] = None ret['project'] = None
if 'playbook' in ret: if 'playbook' in ret:
ret['playbook'] = '' ret['playbook'] = ''
if 'credential' in ret and (not obj.credential or not obj.credential.active): if 'credential' in ret and not obj.credential:
ret['credential'] = None ret['credential'] = None
if 'cloud_credential' in ret and (not obj.cloud_credential or not obj.cloud_credential.active): if 'cloud_credential' in ret and not obj.cloud_credential:
ret['cloud_credential'] = None ret['cloud_credential'] = None
return ret return ret
@@ -1690,7 +1677,7 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer):
else: else:
d['can_copy'] = False d['can_copy'] = False
d['can_edit'] = False d['can_edit'] = False
d['recent_jobs'] = [{'id': x.id, 'status': x.status, 'finished': x.finished} for x in obj.jobs.filter(active=True).order_by('-created')[:10]] d['recent_jobs'] = [{'id': x.id, 'status': x.status, 'finished': x.finished} for x in obj.jobs.order_by('-created')[:10]]
return d return d
def validate(self, attrs): def validate(self, attrs):
@@ -1721,7 +1708,7 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
activity_stream = reverse('api:job_activity_stream_list', args=(obj.pk,)), activity_stream = reverse('api:job_activity_stream_list', args=(obj.pk,)),
notifications = reverse('api:job_notifications_list', args=(obj.pk,)), notifications = reverse('api:job_notifications_list', args=(obj.pk,)),
)) ))
if obj.job_template and obj.job_template.active: if obj.job_template:
res['job_template'] = reverse('api:job_template_detail', res['job_template'] = reverse('api:job_template_detail',
args=(obj.job_template.pk,)) args=(obj.job_template.pk,))
if obj.can_start or True: if obj.can_start or True:
@@ -1766,7 +1753,7 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
ret = super(JobSerializer, self).to_representation(obj) ret = super(JobSerializer, self).to_representation(obj)
if obj is None: if obj is None:
return ret return ret
if 'job_template' in ret and (not obj.job_template or not obj.job_template.active): if 'job_template' in ret and not obj.job_template:
ret['job_template'] = None ret['job_template'] = None
if obj.job_template and obj.job_template.survey_enabled: if obj.job_template and obj.job_template.survey_enabled:
@@ -1830,11 +1817,11 @@ class JobRelaunchSerializer(JobSerializer):
def validate(self, attrs): def validate(self, attrs):
obj = self.context.get('obj') obj = self.context.get('obj')
if not obj.credential or obj.credential.active is False: if not obj.credential:
raise serializers.ValidationError(dict(credential=["Credential not found or deleted."])) raise serializers.ValidationError(dict(credential=["Credential not found or deleted."]))
if obj.job_type != PERM_INVENTORY_SCAN and (obj.project is None or not obj.project.active): if obj.job_type != PERM_INVENTORY_SCAN and obj.project is None:
raise serializers.ValidationError(dict(errors=["Job Template Project is missing or undefined"])) raise serializers.ValidationError(dict(errors=["Job Template Project is missing or undefined"]))
if obj.inventory is None or not obj.inventory.active: if obj.inventory is None:
raise serializers.ValidationError(dict(errors=["Job Template Inventory is missing or undefined"])) raise serializers.ValidationError(dict(errors=["Job Template Inventory is missing or undefined"]))
attrs = super(JobRelaunchSerializer, self).validate(attrs) attrs = super(JobRelaunchSerializer, self).validate(attrs)
return attrs return attrs
@@ -1874,9 +1861,9 @@ class AdHocCommandSerializer(UnifiedJobSerializer):
def get_related(self, obj): def get_related(self, obj):
res = super(AdHocCommandSerializer, self).get_related(obj) res = super(AdHocCommandSerializer, self).get_related(obj)
if obj.inventory and obj.inventory.active: if obj.inventory:
res['inventory'] = reverse('api:inventory_detail', args=(obj.inventory.pk,)) res['inventory'] = reverse('api:inventory_detail', args=(obj.inventory.pk,))
if obj.credential and obj.credential.active: if obj.credential:
res['credential'] = reverse('api:credential_detail', args=(obj.credential.pk,)) res['credential'] = reverse('api:credential_detail', args=(obj.credential.pk,))
res.update(dict( res.update(dict(
events = reverse('api:ad_hoc_command_ad_hoc_command_events_list', args=(obj.pk,)), events = reverse('api:ad_hoc_command_ad_hoc_command_events_list', args=(obj.pk,)),
@@ -1888,9 +1875,9 @@ class AdHocCommandSerializer(UnifiedJobSerializer):
def to_representation(self, obj): def to_representation(self, obj):
ret = super(AdHocCommandSerializer, self).to_representation(obj) ret = super(AdHocCommandSerializer, self).to_representation(obj)
if 'inventory' in ret and (not obj.inventory or not obj.inventory.active): if 'inventory' in ret and not obj.inventory:
ret['inventory'] = None ret['inventory'] = None
if 'credential' in ret and (not obj.credential or not obj.credential.active): if 'credential' in ret and not obj.credential:
ret['credential'] = None ret['credential'] = None
# For the UI, only module_name is returned for name, instead of the # For the UI, only module_name is returned for name, instead of the
# longer module name + module_args format. # longer module name + module_args format.
@@ -1942,7 +1929,7 @@ class SystemJobSerializer(UnifiedJobSerializer):
def get_related(self, obj): def get_related(self, obj):
res = super(SystemJobSerializer, self).get_related(obj) res = super(SystemJobSerializer, self).get_related(obj)
if obj.system_job_template and obj.system_job_template.active: if obj.system_job_template:
res['system_job_template'] = reverse('api:system_job_template_detail', res['system_job_template'] = reverse('api:system_job_template_detail',
args=(obj.system_job_template.pk,)) args=(obj.system_job_template.pk,))
if obj.can_cancel or True: if obj.can_cancel or True:
@@ -2080,7 +2067,7 @@ class JobLaunchSerializer(BaseSerializer):
} }
def get_credential_needed_to_start(self, obj): def get_credential_needed_to_start(self, obj):
return not (obj and obj.credential and obj.credential.active) return not (obj and obj.credential)
def get_survey_enabled(self, obj): def get_survey_enabled(self, obj):
if obj: if obj:
@@ -2093,7 +2080,7 @@ class JobLaunchSerializer(BaseSerializer):
data = self.context.get('data') data = self.context.get('data')
credential = attrs.get('credential', obj and obj.credential or None) credential = attrs.get('credential', obj and obj.credential or None)
if not credential or not credential.active: if not credential:
errors['credential'] = 'Credential not provided' errors['credential'] = 'Credential not provided'
# fill passwords dict with request data passwords # fill passwords dict with request data passwords
@@ -2124,9 +2111,9 @@ class JobLaunchSerializer(BaseSerializer):
if validation_errors: if validation_errors:
errors['variables_needed_to_start'] = validation_errors errors['variables_needed_to_start'] = validation_errors
if obj.job_type != PERM_INVENTORY_SCAN and (obj.project is None or not obj.project.active): if obj.job_type != PERM_INVENTORY_SCAN and (obj.project is None):
errors['project'] = 'Job Template Project is missing or undefined' errors['project'] = 'Job Template Project is missing or undefined'
if obj.inventory is None or not obj.inventory.active: if obj.inventory is None:
errors['inventory'] = 'Job Template Inventory is missing or undefined' errors['inventory'] = 'Job Template Inventory is missing or undefined'
if errors: if errors:
@@ -2162,7 +2149,7 @@ class NotifierSerializer(BaseSerializer):
test = reverse('api:notifier_test', args=(obj.pk,)), test = reverse('api:notifier_test', args=(obj.pk,)),
notifications = reverse('api:notifier_notification_list', args=(obj.pk,)), notifications = reverse('api:notifier_notification_list', args=(obj.pk,)),
)) ))
if obj.organization and obj.organization.active: if obj.organization:
res['organization'] = reverse('api:organization_detail', args=(obj.organization.pk,)) res['organization'] = reverse('api:organization_detail', args=(obj.organization.pk,))
return res return res
@@ -2220,7 +2207,7 @@ class ScheduleSerializer(BaseSerializer):
res.update(dict( res.update(dict(
unified_jobs = reverse('api:schedule_unified_jobs_list', args=(obj.pk,)), unified_jobs = reverse('api:schedule_unified_jobs_list', args=(obj.pk,)),
)) ))
if obj.unified_job_template and obj.unified_job_template.active: if obj.unified_job_template:
res['unified_job_template'] = obj.unified_job_template.get_absolute_url() res['unified_job_template'] = obj.unified_job_template.get_absolute_url()
return res return res
@@ -2447,8 +2434,6 @@ class AuthTokenSerializer(serializers.Serializer):
if username and password: if username and password:
user = authenticate(username=username, password=password) user = authenticate(username=username, password=password)
if user: if user:
if not user.is_active:
raise serializers.ValidationError('User account is disabled.')
attrs['user'] = user attrs['user'] = user
return attrs return attrs
else: else:

View File

@@ -214,7 +214,7 @@ class ApiV1ConfigView(APIView):
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys()) user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys())
data['user_ldap_fields'] = user_ldap_fields data['user_ldap_fields'] = user_ldap_fields
if request.user.is_superuser or request.user.admin_of_organizations.filter(active=True).count(): if request.user.is_superuser or request.user.admin_of_organizations.count():
data.update(dict( data.update(dict(
project_base_dir = settings.PROJECTS_ROOT, project_base_dir = settings.PROJECTS_ROOT,
project_local_paths = Project.get_local_path_choices(), project_local_paths = Project.get_local_path_choices(),
@@ -609,7 +609,7 @@ class OrganizationList(ListCreateAPIView):
# by the license, then we are only willing to create this organization # by the license, then we are only willing to create this organization
# if no organizations exist in the system. # if no organizations exist in the system.
if (not feature_enabled('multiple_organizations') and if (not feature_enabled('multiple_organizations') and
self.model.objects.filter(active=True).count() > 0): self.model.objects.count() > 0):
raise LicenseForbids('Your Tower license only permits a single ' raise LicenseForbids('Your Tower license only permits a single '
'organization to exist.') 'organization to exist.')
@@ -804,7 +804,7 @@ class ProjectList(ListCreateAPIView):
def get(self, request, *args, **kwargs): def get(self, request, *args, **kwargs):
# Not optimal, but make sure the project status and last_updated fields # Not optimal, but make sure the project status and last_updated fields
# are up to date here... # are up to date here...
projects_qs = Project.objects.filter(active=True) projects_qs = Project.objects
projects_qs = projects_qs.select_related('current_job', 'last_job') projects_qs = projects_qs.select_related('current_job', 'last_job')
for project in projects_qs: for project in projects_qs:
project._set_status_and_last_job_run() project._set_status_and_last_job_run()
@@ -1093,8 +1093,6 @@ class UserDetail(RetrieveUpdateDestroyAPIView):
can_delete = request.user.can_access(User, 'delete', obj) can_delete = request.user.can_access(User, 'delete', obj)
if not can_delete: if not can_delete:
raise PermissionDenied('Cannot delete user') raise PermissionDenied('Cannot delete user')
for own_credential in Credential.objects.filter(user=obj):
own_credential.mark_inactive()
return super(UserDetail, self).destroy(request, *args, **kwargs) return super(UserDetail, self).destroy(request, *args, **kwargs)
class UserAccessList(ResourceAccessList): class UserAccessList(ResourceAccessList):
@@ -1400,7 +1398,7 @@ class GroupChildrenList(SubListCreateAttachDetachAPIView):
if sub_id is not None: if sub_id is not None:
return super(GroupChildrenList, self).unattach(request, *args, **kwargs) return super(GroupChildrenList, self).unattach(request, *args, **kwargs)
parent = self.get_parent_object() parent = self.get_parent_object()
parent.mark_inactive() parent.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
def _unattach(self, request, *args, **kwargs): # FIXME: Disabled for now for UI support. def _unattach(self, request, *args, **kwargs): # FIXME: Disabled for now for UI support.
@@ -1423,8 +1421,8 @@ class GroupChildrenList(SubListCreateAttachDetachAPIView):
sub, self.relationship): sub, self.relationship):
raise PermissionDenied() raise PermissionDenied()
if sub.parents.filter(active=True).exclude(pk=parent.pk).count() == 0: if sub.parents.exclude(pk=parent.pk).count() == 0:
sub.mark_inactive() sub.delete()
else: else:
relationship.remove(sub) relationship.remove(sub)
@@ -1526,15 +1524,9 @@ class GroupDetail(RetrieveUpdateDestroyAPIView):
def destroy(self, request, *args, **kwargs): def destroy(self, request, *args, **kwargs):
obj = self.get_object() obj = self.get_object()
# FIXME: Why isn't the active check being caught earlier by RBAC?
if not getattr(obj, 'active', True):
raise Http404()
if not getattr(obj, 'is_active', True):
raise Http404()
if not request.user.can_access(self.model, 'delete', obj): if not request.user.can_access(self.model, 'delete', obj):
raise PermissionDenied() raise PermissionDenied()
if hasattr(obj, 'mark_inactive'): obj.delete_recursive()
obj.mark_inactive_recursive()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
class GroupAccessList(ResourceAccessList): class GroupAccessList(ResourceAccessList):
@@ -1601,9 +1593,9 @@ class InventoryScriptView(RetrieveAPIView):
hostvars = bool(request.query_params.get('hostvars', '')) hostvars = bool(request.query_params.get('hostvars', ''))
show_all = bool(request.query_params.get('all', '')) show_all = bool(request.query_params.get('all', ''))
if show_all: if show_all:
hosts_q = dict(active=True) hosts_q = dict()
else: else:
hosts_q = dict(active=True, enabled=True) hosts_q = dict(enabled=True)
if hostname: if hostname:
host = get_object_or_404(obj.hosts, name=hostname, **hosts_q) host = get_object_or_404(obj.hosts, name=hostname, **hosts_q)
data = host.variables_dict data = host.variables_dict
@@ -1621,8 +1613,7 @@ class InventoryScriptView(RetrieveAPIView):
all_group['hosts'] = groupless_hosts all_group['hosts'] = groupless_hosts
# Build in-memory mapping of groups and their hosts. # Build in-memory mapping of groups and their hosts.
group_hosts_kw = dict(group__inventory_id=obj.id, group__active=True, group_hosts_kw = dict(group__inventory_id=obj.id, host__inventory_id=obj.id)
host__inventory_id=obj.id, host__active=True)
if 'enabled' in hosts_q: if 'enabled' in hosts_q:
group_hosts_kw['host__enabled'] = hosts_q['enabled'] group_hosts_kw['host__enabled'] = hosts_q['enabled']
group_hosts_qs = Group.hosts.through.objects.filter(**group_hosts_kw) group_hosts_qs = Group.hosts.through.objects.filter(**group_hosts_kw)
@@ -1635,8 +1626,8 @@ class InventoryScriptView(RetrieveAPIView):
# Build in-memory mapping of groups and their children. # Build in-memory mapping of groups and their children.
group_parents_qs = Group.parents.through.objects.filter( group_parents_qs = Group.parents.through.objects.filter(
from_group__inventory_id=obj.id, from_group__active=True, from_group__inventory_id=obj.id,
to_group__inventory_id=obj.id, to_group__active=True, to_group__inventory_id=obj.id,
) )
group_parents_qs = group_parents_qs.order_by('from_group__name') group_parents_qs = group_parents_qs.order_by('from_group__name')
group_parents_qs = group_parents_qs.values_list('from_group_id', 'from_group__name', 'to_group_id') group_parents_qs = group_parents_qs.values_list('from_group_id', 'from_group__name', 'to_group_id')
@@ -1646,7 +1637,7 @@ class InventoryScriptView(RetrieveAPIView):
group_children.append(from_group_name) group_children.append(from_group_name)
# Now use in-memory maps to build up group info. # Now use in-memory maps to build up group info.
for group in obj.groups.filter(active=True): for group in obj.groups.all():
group_info = OrderedDict() group_info = OrderedDict()
group_info['hosts'] = group_hosts_map.get(group.id, []) group_info['hosts'] = group_hosts_map.get(group.id, [])
group_info['children'] = group_children_map.get(group.id, []) group_info['children'] = group_children_map.get(group.id, [])
@@ -1692,9 +1683,9 @@ class InventoryTreeView(RetrieveAPIView):
def retrieve(self, request, *args, **kwargs): def retrieve(self, request, *args, **kwargs):
inventory = self.get_object() inventory = self.get_object()
group_children_map = inventory.get_group_children_map(active=True) group_children_map = inventory.get_group_children_map()
root_group_pks = inventory.root_groups.filter(active=True).order_by('name').values_list('pk', flat=True) root_group_pks = inventory.root_groups.order_by('name').values_list('pk', flat=True)
groups_qs = inventory.groups.filter(active=True) groups_qs = inventory.groups
groups_qs = groups_qs.select_related('inventory') groups_qs = groups_qs.select_related('inventory')
groups_qs = groups_qs.prefetch_related('inventory_source') groups_qs = groups_qs.prefetch_related('inventory_source')
all_group_data = GroupSerializer(groups_qs, many=True).data all_group_data = GroupSerializer(groups_qs, many=True).data
@@ -1898,7 +1889,7 @@ class JobTemplateLaunch(RetrieveAPIView, GenericAPIView):
if obj: if obj:
for p in obj.passwords_needed_to_start: for p in obj.passwords_needed_to_start:
data[p] = u'' data[p] = u''
if obj.credential and obj.credential.active: if obj.credential:
data.pop('credential', None) data.pop('credential', None)
else: else:
data['credential'] = None data['credential'] = None
@@ -2095,7 +2086,7 @@ class JobTemplateCallback(GenericAPIView):
return set() return set()
# Find the host objects to search for a match. # Find the host objects to search for a match.
obj = self.get_object() obj = self.get_object()
qs = obj.inventory.hosts.filter(active=True) qs = obj.inventory.hosts
# First try for an exact match on the name. # First try for an exact match on the name.
try: try:
return set([qs.get(name__in=remote_hosts)]) return set([qs.get(name__in=remote_hosts)])
@@ -2155,7 +2146,7 @@ class JobTemplateCallback(GenericAPIView):
# match again. # match again.
inventory_sources_already_updated = [] inventory_sources_already_updated = []
if len(matching_hosts) != 1: if len(matching_hosts) != 1:
inventory_sources = job_template.inventory.inventory_sources.filter(active=True, update_on_launch=True) inventory_sources = job_template.inventory.inventory_sources.filter( update_on_launch=True)
inventory_update_pks = set() inventory_update_pks = set()
for inventory_source in inventory_sources: for inventory_source in inventory_sources:
if inventory_source.needs_update_on_launch: if inventory_source.needs_update_on_launch:

View File

@@ -245,7 +245,7 @@ class UserAccess(BaseAccess):
return False return False
if self.user.is_superuser: if self.user.is_superuser:
return True return True
return Organization.accessible_objects(self.user, ALL_PERMISSIONS).filter(active=True).exists() return Organization.accessible_objects(self.user, ALL_PERMISSIONS).exists()
def can_change(self, obj, data): def can_change(self, obj, data):
if data is not None and 'is_superuser' in data: if data is not None and 'is_superuser' in data:
@@ -266,7 +266,7 @@ class UserAccess(BaseAccess):
if obj == self.user: if obj == self.user:
# cannot delete yourself # cannot delete yourself
return False return False
super_users = User.objects.filter(is_active=True, is_superuser=True) super_users = User.objects.filter(is_superuser=True)
if obj.is_superuser and super_users.count() == 1: if obj.is_superuser and super_users.count() == 1:
# cannot delete the last active superuser # cannot delete the last active superuser
return False return False
@@ -527,7 +527,7 @@ class InventoryUpdateAccess(BaseAccess):
model = InventoryUpdate model = InventoryUpdate
def get_queryset(self): def get_queryset(self):
qs = InventoryUpdate.objects.filter(active=True).distinct() qs = InventoryUpdate.objects.distinct()
qs = qs.select_related('created_by', 'modified_by', 'inventory_source__group', qs = qs.select_related('created_by', 'modified_by', 'inventory_source__group',
'inventory_source__inventory') 'inventory_source__inventory')
inventory_sources_qs = self.user.get_queryset(InventorySource) inventory_sources_qs = self.user.get_queryset(InventorySource)
@@ -677,7 +677,7 @@ class ProjectUpdateAccess(BaseAccess):
model = ProjectUpdate model = ProjectUpdate
def get_queryset(self): def get_queryset(self):
qs = ProjectUpdate.objects.filter(active=True).distinct() qs = ProjectUpdate.objects.distinct()
qs = qs.select_related('created_by', 'modified_by', 'project') qs = qs.select_related('created_by', 'modified_by', 'project')
project_ids = set(self.user.get_queryset(Project).values_list('id', flat=True)) project_ids = set(self.user.get_queryset(Project).values_list('id', flat=True))
return qs.filter(project_id__in=project_ids) return qs.filter(project_id__in=project_ids)
@@ -821,7 +821,7 @@ class JobAccess(BaseAccess):
model = Job model = Job
def get_queryset(self): def get_queryset(self):
qs = self.model.objects.filter(active=True).distinct() qs = self.model.objects.distinct()
qs = qs.select_related('created_by', 'modified_by', 'job_template', 'inventory', qs = qs.select_related('created_by', 'modified_by', 'job_template', 'inventory',
'project', 'credential', 'cloud_credential', 'job_template') 'project', 'credential', 'cloud_credential', 'job_template')
qs = qs.prefetch_related('unified_job_template') qs = qs.prefetch_related('unified_job_template')
@@ -843,12 +843,10 @@ class JobAccess(BaseAccess):
# TODO: I think the below queries can be combined # TODO: I think the below queries can be combined
deploy_permissions_ids = Permission.objects.filter( deploy_permissions_ids = Permission.objects.filter(
Q(user=self.user) | Q(team__in=team_ids), Q(user=self.user) | Q(team__in=team_ids),
active=True,
permission_type__in=allowed_deploy, permission_type__in=allowed_deploy,
) )
check_permissions_ids = Permission.objects.filter( check_permissions_ids = Permission.objects.filter(
Q(user=self.user) | Q(team__in=team_ids), Q(user=self.user) | Q(team__in=team_ids),
active=True,
permission_type__in=allowed_check, permission_type__in=allowed_check,
) )
@@ -947,18 +945,17 @@ class AdHocCommandAccess(BaseAccess):
model = AdHocCommand model = AdHocCommand
def get_queryset(self): def get_queryset(self):
qs = self.model.objects.filter(active=True).distinct() qs = self.model.objects.distinct()
qs = qs.select_related('created_by', 'modified_by', 'inventory', qs = qs.select_related('created_by', 'modified_by', 'inventory',
'credential') 'credential')
if self.user.is_superuser: if self.user.is_superuser:
return qs return qs
credential_ids = set(self.user.get_queryset(Credential).values_list('id', flat=True)) credential_ids = set(self.user.get_queryset(Credential).values_list('id', flat=True))
team_ids = set(Team.objects.filter(active=True, users__in=[self.user]).values_list('id', flat=True)) team_ids = set(Team.objects.filter( users__in=[self.user]).values_list('id', flat=True))
permission_ids = set(Permission.objects.filter( permission_ids = set(Permission.objects.filter(
Q(user=self.user) | Q(team__in=team_ids), Q(user=self.user) | Q(team__in=team_ids),
active=True,
permission_type__in=PERMISSION_TYPES_ALLOWING_INVENTORY_READ, permission_type__in=PERMISSION_TYPES_ALLOWING_INVENTORY_READ,
run_ad_hoc_commands=True, run_ad_hoc_commands=True,
).values_list('id', flat=True)) ).values_list('id', flat=True))
@@ -982,7 +979,7 @@ class AdHocCommandAccess(BaseAccess):
# If a credential is provided, the user should have read access to it. # If a credential is provided, the user should have read access to it.
credential_pk = get_pk_from_dict(data, 'credential') credential_pk = get_pk_from_dict(data, 'credential')
if credential_pk: if credential_pk:
credential = get_object_or_400(Credential, pk=credential_pk, active=True) credential = get_object_or_400(Credential, pk=credential_pk)
if not credential.accessible_by(self.user, {'read':True}): if not credential.accessible_by(self.user, {'read':True}):
return False return False
@@ -990,7 +987,7 @@ class AdHocCommandAccess(BaseAccess):
# given inventory. # given inventory.
inventory_pk = get_pk_from_dict(data, 'inventory') inventory_pk = get_pk_from_dict(data, 'inventory')
if inventory_pk: if inventory_pk:
inventory = get_object_or_400(Inventory, pk=inventory_pk, active=True) inventory = get_object_or_400(Inventory, pk=inventory_pk)
if not inventory.accessible_by(self.user, {'execute': True}): if not inventory.accessible_by(self.user, {'execute': True}):
return False return False

View File

@@ -111,8 +111,6 @@ class Command(BaseCommand):
n_deleted_items = 0 n_deleted_items = 0
n_deleted_items += self.cleanup_model(User) n_deleted_items += self.cleanup_model(User)
for model in self.get_models(PrimordialModel):
n_deleted_items += self.cleanup_model(model)
if not self.dry_run: if not self.dry_run:
self.logger.log(99, "Removed %d items", n_deleted_items) self.logger.log(99, "Removed %d items", n_deleted_items)

View File

@@ -19,7 +19,7 @@ class Command(BaseCommand):
# Create a default organization as the first superuser found. # Create a default organization as the first superuser found.
try: try:
superuser = User.objects.filter(is_superuser=True, is_active=True).order_by('pk')[0] superuser = User.objects.filter(is_superuser=True).order_by('pk')[0]
except IndexError: except IndexError:
superuser = None superuser = None
with impersonate(superuser): with impersonate(superuser):

View File

@@ -53,13 +53,13 @@ class MemObject(object):
''' '''
Common code shared between in-memory groups and hosts. Common code shared between in-memory groups and hosts.
''' '''
def __init__(self, name, source_dir): def __init__(self, name, source_dir):
assert name, 'no name' assert name, 'no name'
assert source_dir, 'no source dir' assert source_dir, 'no source dir'
self.name = name self.name = name
self.source_dir = source_dir self.source_dir = source_dir
def load_vars(self, base_path): def load_vars(self, base_path):
all_vars = {} all_vars = {}
files_found = 0 files_found = 0
@@ -107,7 +107,7 @@ class MemGroup(MemObject):
group_vars = os.path.join(source_dir, 'group_vars', self.name) group_vars = os.path.join(source_dir, 'group_vars', self.name)
self.variables = self.load_vars(group_vars) self.variables = self.load_vars(group_vars)
logger.debug('Loaded group: %s', self.name) logger.debug('Loaded group: %s', self.name)
def child_group_by_name(self, name, loader): def child_group_by_name(self, name, loader):
if name == 'all': if name == 'all':
return return
@@ -266,7 +266,7 @@ class BaseLoader(object):
logger.debug('Filtering group %s', name) logger.debug('Filtering group %s', name)
return None return None
if name not in self.all_group.all_groups: if name not in self.all_group.all_groups:
group = MemGroup(name, self.source_dir) group = MemGroup(name, self.source_dir)
if not child: if not child:
all_group.add_child_group(group) all_group.add_child_group(group)
self.all_group.all_groups[name] = group self.all_group.all_groups[name] = group
@@ -315,7 +315,7 @@ class IniLoader(BaseLoader):
for t in tokens[1:]: for t in tokens[1:]:
k,v = t.split('=', 1) k,v = t.split('=', 1)
host.variables[k] = v host.variables[k] = v
group.add_host(host) group.add_host(host)
elif input_mode == 'children': elif input_mode == 'children':
group.child_group_by_name(line, self) group.child_group_by_name(line, self)
elif input_mode == 'vars': elif input_mode == 'vars':
@@ -328,7 +328,7 @@ class IniLoader(BaseLoader):
# from API documentation: # from API documentation:
# #
# if called with --list, inventory outputs like so: # if called with --list, inventory outputs like so:
# #
# { # {
# "databases" : { # "databases" : {
# "hosts" : [ "host1.example.com", "host2.example.com" ], # "hosts" : [ "host1.example.com", "host2.example.com" ],
@@ -581,7 +581,7 @@ class Command(NoArgsCommand):
def _get_instance_id(self, from_dict, default=''): def _get_instance_id(self, from_dict, default=''):
''' '''
Retrieve the instance ID from the given dict of host variables. Retrieve the instance ID from the given dict of host variables.
The instance ID variable may be specified as 'foo.bar', in which case The instance ID variable may be specified as 'foo.bar', in which case
the lookup will traverse into nested dicts, equivalent to: the lookup will traverse into nested dicts, equivalent to:
@@ -633,7 +633,7 @@ class Command(NoArgsCommand):
else: else:
q = dict(name=self.inventory_name) q = dict(name=self.inventory_name)
try: try:
self.inventory = Inventory.objects.filter(active=True).get(**q) self.inventory = Inventory.objects.get(**q)
except Inventory.DoesNotExist: except Inventory.DoesNotExist:
raise CommandError('Inventory with %s = %s cannot be found' % q.items()[0]) raise CommandError('Inventory with %s = %s cannot be found' % q.items()[0])
except Inventory.MultipleObjectsReturned: except Inventory.MultipleObjectsReturned:
@@ -648,8 +648,7 @@ class Command(NoArgsCommand):
if inventory_source_id: if inventory_source_id:
try: try:
self.inventory_source = InventorySource.objects.get(pk=inventory_source_id, self.inventory_source = InventorySource.objects.get(pk=inventory_source_id,
inventory=self.inventory, inventory=self.inventory)
active=True)
except InventorySource.DoesNotExist: except InventorySource.DoesNotExist:
raise CommandError('Inventory source with id=%s not found' % raise CommandError('Inventory source with id=%s not found' %
inventory_source_id) inventory_source_id)
@@ -669,7 +668,6 @@ class Command(NoArgsCommand):
source_path=os.path.abspath(self.source), source_path=os.path.abspath(self.source),
overwrite=self.overwrite, overwrite=self.overwrite,
overwrite_vars=self.overwrite_vars, overwrite_vars=self.overwrite_vars,
active=True,
) )
self.inventory_update = self.inventory_source.create_inventory_update( self.inventory_update = self.inventory_source.create_inventory_update(
job_args=json.dumps(sys.argv), job_args=json.dumps(sys.argv),
@@ -703,7 +701,7 @@ class Command(NoArgsCommand):
host_qs = self.inventory_source.group.all_hosts host_qs = self.inventory_source.group.all_hosts
else: else:
host_qs = self.inventory.hosts.all() host_qs = self.inventory.hosts.all()
host_qs = host_qs.filter(active=True, instance_id='', host_qs = host_qs.filter(instance_id='',
variables__contains=self.instance_id_var.split('.')[0]) variables__contains=self.instance_id_var.split('.')[0])
for host in host_qs: for host in host_qs:
instance_id = self._get_instance_id(host.variables_dict) instance_id = self._get_instance_id(host.variables_dict)
@@ -740,7 +738,7 @@ class Command(NoArgsCommand):
hosts_qs = self.inventory_source.group.all_hosts hosts_qs = self.inventory_source.group.all_hosts
# FIXME: Also include hosts from inventory_source.managed_hosts? # FIXME: Also include hosts from inventory_source.managed_hosts?
else: else:
hosts_qs = self.inventory.hosts.filter(active=True) hosts_qs = self.inventory.hosts
# Build list of all host pks, remove all that should not be deleted. # Build list of all host pks, remove all that should not be deleted.
del_host_pks = set(hosts_qs.values_list('pk', flat=True)) del_host_pks = set(hosts_qs.values_list('pk', flat=True))
if self.instance_id_var: if self.instance_id_var:
@@ -765,7 +763,7 @@ class Command(NoArgsCommand):
del_pks = all_del_pks[offset:(offset + self._batch_size)] del_pks = all_del_pks[offset:(offset + self._batch_size)]
for host in hosts_qs.filter(pk__in=del_pks): for host in hosts_qs.filter(pk__in=del_pks):
host_name = host.name host_name = host.name
host.mark_inactive() host.delete()
self.logger.info('Deleted host "%s"', host_name) self.logger.info('Deleted host "%s"', host_name)
if settings.SQL_DEBUG: if settings.SQL_DEBUG:
self.logger.warning('host deletions took %d queries for %d hosts', self.logger.warning('host deletions took %d queries for %d hosts',
@@ -785,7 +783,7 @@ class Command(NoArgsCommand):
groups_qs = self.inventory_source.group.all_children groups_qs = self.inventory_source.group.all_children
# FIXME: Also include groups from inventory_source.managed_groups? # FIXME: Also include groups from inventory_source.managed_groups?
else: else:
groups_qs = self.inventory.groups.filter(active=True) groups_qs = self.inventory.groups
# Build list of all group pks, remove those that should not be deleted. # Build list of all group pks, remove those that should not be deleted.
del_group_pks = set(groups_qs.values_list('pk', flat=True)) del_group_pks = set(groups_qs.values_list('pk', flat=True))
all_group_names = self.all_group.all_groups.keys() all_group_names = self.all_group.all_groups.keys()
@@ -799,7 +797,8 @@ class Command(NoArgsCommand):
del_pks = all_del_pks[offset:(offset + self._batch_size)] del_pks = all_del_pks[offset:(offset + self._batch_size)]
for group in groups_qs.filter(pk__in=del_pks): for group in groups_qs.filter(pk__in=del_pks):
group_name = group.name group_name = group.name
group.mark_inactive(recompute=False) with ignore_inventory_computed_fields():
group.delete()
self.logger.info('Group "%s" deleted', group_name) self.logger.info('Group "%s" deleted', group_name)
if settings.SQL_DEBUG: if settings.SQL_DEBUG:
self.logger.warning('group deletions took %d queries for %d groups', self.logger.warning('group deletions took %d queries for %d groups',
@@ -821,10 +820,10 @@ class Command(NoArgsCommand):
if self.inventory_source.group: if self.inventory_source.group:
db_groups = self.inventory_source.group.all_children db_groups = self.inventory_source.group.all_children
else: else:
db_groups = self.inventory.groups.filter(active=True) db_groups = self.inventory.groups
for db_group in db_groups: for db_group in db_groups:
# Delete child group relationships not present in imported data. # Delete child group relationships not present in imported data.
db_children = db_group.children.filter(active=True) db_children = db_group.children
db_children_name_pk_map = dict(db_children.values_list('name', 'pk')) db_children_name_pk_map = dict(db_children.values_list('name', 'pk'))
mem_children = self.all_group.all_groups[db_group.name].children mem_children = self.all_group.all_groups[db_group.name].children
for mem_group in mem_children: for mem_group in mem_children:
@@ -839,7 +838,7 @@ class Command(NoArgsCommand):
db_child.name, db_group.name) db_child.name, db_group.name)
# FIXME: Inventory source group relationships # FIXME: Inventory source group relationships
# Delete group/host relationships not present in imported data. # Delete group/host relationships not present in imported data.
db_hosts = db_group.hosts.filter(active=True) db_hosts = db_group.hosts
del_host_pks = set(db_hosts.values_list('pk', flat=True)) del_host_pks = set(db_hosts.values_list('pk', flat=True))
mem_hosts = self.all_group.all_groups[db_group.name].hosts mem_hosts = self.all_group.all_groups[db_group.name].hosts
all_mem_host_names = [h.name for h in mem_hosts if not h.instance_id] all_mem_host_names = [h.name for h in mem_hosts if not h.instance_id]
@@ -860,7 +859,7 @@ class Command(NoArgsCommand):
del_pks = del_host_pks[offset:(offset + self._batch_size)] del_pks = del_host_pks[offset:(offset + self._batch_size)]
for db_host in db_hosts.filter(pk__in=del_pks): for db_host in db_hosts.filter(pk__in=del_pks):
group_host_count += 1 group_host_count += 1
if db_host not in db_group.hosts.filter(active=True): if db_host not in db_group.hosts:
continue continue
db_group.hosts.remove(db_host) db_group.hosts.remove(db_host)
self.logger.info('Host "%s" removed from group "%s"', self.logger.info('Host "%s" removed from group "%s"',
@@ -1036,7 +1035,7 @@ class Command(NoArgsCommand):
all_host_pks = sorted(mem_host_pk_map.keys()) all_host_pks = sorted(mem_host_pk_map.keys())
for offset in xrange(0, len(all_host_pks), self._batch_size): for offset in xrange(0, len(all_host_pks), self._batch_size):
host_pks = all_host_pks[offset:(offset + self._batch_size)] host_pks = all_host_pks[offset:(offset + self._batch_size)]
for db_host in self.inventory.hosts.filter(active=True, pk__in=host_pks): for db_host in self.inventory.hosts.filter( pk__in=host_pks):
if db_host.pk in host_pks_updated: if db_host.pk in host_pks_updated:
continue continue
mem_host = mem_host_pk_map[db_host.pk] mem_host = mem_host_pk_map[db_host.pk]
@@ -1048,7 +1047,7 @@ class Command(NoArgsCommand):
all_instance_ids = sorted(mem_host_instance_id_map.keys()) all_instance_ids = sorted(mem_host_instance_id_map.keys())
for offset in xrange(0, len(all_instance_ids), self._batch_size): for offset in xrange(0, len(all_instance_ids), self._batch_size):
instance_ids = all_instance_ids[offset:(offset + self._batch_size)] instance_ids = all_instance_ids[offset:(offset + self._batch_size)]
for db_host in self.inventory.hosts.filter(active=True, instance_id__in=instance_ids): for db_host in self.inventory.hosts.filter( instance_id__in=instance_ids):
if db_host.pk in host_pks_updated: if db_host.pk in host_pks_updated:
continue continue
mem_host = mem_host_instance_id_map[db_host.instance_id] mem_host = mem_host_instance_id_map[db_host.instance_id]
@@ -1060,7 +1059,7 @@ class Command(NoArgsCommand):
all_host_names = sorted(mem_host_name_map.keys()) all_host_names = sorted(mem_host_name_map.keys())
for offset in xrange(0, len(all_host_names), self._batch_size): for offset in xrange(0, len(all_host_names), self._batch_size):
host_names = all_host_names[offset:(offset + self._batch_size)] host_names = all_host_names[offset:(offset + self._batch_size)]
for db_host in self.inventory.hosts.filter(active=True, name__in=host_names): for db_host in self.inventory.hosts.filter( name__in=host_names):
if db_host.pk in host_pks_updated: if db_host.pk in host_pks_updated:
continue continue
mem_host = mem_host_name_map[db_host.name] mem_host = mem_host_name_map[db_host.name]
@@ -1297,7 +1296,7 @@ class Command(NoArgsCommand):
except CommandError as e: except CommandError as e:
self.mark_license_failure(save=True) self.mark_license_failure(save=True)
raise e raise e
if self.inventory_source.group: if self.inventory_source.group:
inv_name = 'group "%s"' % (self.inventory_source.group.name) inv_name = 'group "%s"' % (self.inventory_source.group.name)
else: else:
@@ -1336,7 +1335,7 @@ class Command(NoArgsCommand):
self.inventory_update.result_traceback = tb self.inventory_update.result_traceback = tb
self.inventory_update.status = status self.inventory_update.status = status
self.inventory_update.save(update_fields=['status', 'result_traceback']) self.inventory_update.save(update_fields=['status', 'result_traceback'])
if exc and isinstance(exc, CommandError): if exc and isinstance(exc, CommandError):
sys.exit(1) sys.exit(1)
elif exc: elif exc:

View File

@@ -13,9 +13,9 @@ class HostManager(models.Manager):
def active_count(self): def active_count(self):
"""Return count of active, unique hosts for licensing.""" """Return count of active, unique hosts for licensing."""
try: try:
return self.filter(active=True, inventory__active=True).order_by('name').distinct('name').count() return self.order_by('name').distinct('name').count()
except NotImplementedError: # For unit tests only, SQLite doesn't support distinct('name') except NotImplementedError: # For unit tests only, SQLite doesn't support distinct('name')
return len(set(self.filter(active=True, inventory__active=True).values_list('name', flat=True))) return len(set(self.values_list('name', flat=True)))
class InstanceManager(models.Manager): class InstanceManager(models.Manager):
"""A custom manager class for the Instance model. """A custom manager class for the Instance model.

View File

@@ -0,0 +1,65 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from awx.main.migrations import _cleanup_deleted as cleanup_deleted
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0004_v300_changes'),
]
operations = [
migrations.RunPython(cleanup_deleted.cleanup_deleted),
migrations.RemoveField(
model_name='credential',
name='active',
),
migrations.RemoveField(
model_name='custominventoryscript',
name='active',
),
migrations.RemoveField(
model_name='group',
name='active',
),
migrations.RemoveField(
model_name='host',
name='active',
),
migrations.RemoveField(
model_name='inventory',
name='active',
),
migrations.RemoveField(
model_name='notifier',
name='active',
),
migrations.RemoveField(
model_name='organization',
name='active',
),
migrations.RemoveField(
model_name='permission',
name='active',
),
migrations.RemoveField(
model_name='schedule',
name='active',
),
migrations.RemoveField(
model_name='team',
name='active',
),
migrations.RemoveField(
model_name='unifiedjob',
name='active',
),
migrations.RemoveField(
model_name='unifiedjobtemplate',
name='active',
),
]

View File

@@ -1,16 +0,0 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from awx.main.migrations import _rbac as rbac
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0005_v300_changes'),
]
operations = [
# This is a placeholder for our future active flag removal work
]

View File

@@ -14,7 +14,7 @@ class Migration(migrations.Migration):
('taggit', '0002_auto_20150616_2121'), ('taggit', '0002_auto_20150616_2121'),
('contenttypes', '0002_remove_content_type_name'), ('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL), migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('main', '0006_v300_active_flag_removal'), ('main', '0005_v300_active_flag_removal'),
] ]
operations = [ operations = [
@@ -41,7 +41,6 @@ class Migration(migrations.Migration):
('created', models.DateTimeField(default=None, editable=False)), ('created', models.DateTimeField(default=None, editable=False)),
('modified', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)),
('description', models.TextField(default=b'', blank=True)), ('description', models.TextField(default=b'', blank=True)),
('active', models.BooleanField(default=True, editable=False)),
('name', models.CharField(max_length=512)), ('name', models.CharField(max_length=512)),
('singleton_name', models.TextField(default=None, unique=True, null=True, db_index=True)), ('singleton_name', models.TextField(default=None, unique=True, null=True, db_index=True)),
('object_id', models.PositiveIntegerField(default=None, null=True)), ('object_id', models.PositiveIntegerField(default=None, null=True)),

View File

@@ -8,7 +8,7 @@ from django.db import migrations
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
('main', '0007_v300_rbac_changes'), ('main', '0006_v300_rbac_changes'),
] ]
operations = [ operations = [

View File

@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import unicode_literals from __future__ import unicode_literals
from django.db import migrations, models from django.db import migrations
from django.utils.timezone import now from django.utils.timezone import now
from awx.api.license import feature_enabled from awx.api.license import feature_enabled
@@ -107,7 +107,7 @@ def create_system_job_templates(apps, schema_editor):
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
('main', '0004_v300_changes'), ('main', '0007_v300_rbac_migrations'),
] ]
operations = [ operations = [

View File

@@ -0,0 +1,85 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
# Python
import logging
# Django
from django.db import transaction
from django.utils.dateparse import parse_datetime
def cleanup_deleted(apps, schema_editor):
logger = logging.getLogger('awx.main.migrations.cleanup_deleted')
def cleanup_model(model):
'''
Presume the '_deleted_' string to be in the 'name' field unless considering the User model.
When considering the User model, presume the '_d_' string to be in the 'username' field.
'''
logger.debug('cleaning up model %s', model)
name_field = 'name'
name_prefix = '_deleted_'
active_field = None
n_deleted_items = 0
for field in model._meta.fields:
if field.name in ('is_active', 'active'):
active_field = field.name
if field.name == 'is_active': # is User model
name_field = 'username'
name_prefix = '_d_'
if not active_field:
logger.warning('skipping model %s, no active field', model)
return n_deleted_items
qs = model.objects.filter(**{
active_field: False,
'%s__startswith' % name_field: name_prefix,
})
pks_to_delete = set()
for instance in qs.iterator():
dt = parse_datetime(getattr(instance, name_field).split('_')[2])
if not dt:
logger.warning('unable to find deleted timestamp in %s field', name_field)
else:
action_text = 'deleting'
logger.info('%s %s', action_text, instance)
n_deleted_items += 1
instance.delete()
# Cleanup objects in batches instead of deleting each one individually.
if len(pks_to_delete) >= 50:
model.objects.filter(pk__in=pks_to_delete).delete()
pks_to_delete.clear()
if len(pks_to_delete):
model.objects.filter(pk__in=pks_to_delete).delete()
return n_deleted_items
logger = logging.getLogger('awx.main.commands.cleanup_deleted')
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter('%(message)s'))
logger.addHandler(handler)
logger.propagate = False
with transaction.atomic():
n_deleted_items = 0
models = [
apps.get_model('auth', "User"),
apps.get_model('main', 'Credential'),
apps.get_model('main', 'CustomInventoryScript'),
apps.get_model('main', 'Group'),
apps.get_model('main', 'Host'),
apps.get_model('main', 'Inventory'),
apps.get_model('main', 'Notifier'),
apps.get_model('main', 'Organization'),
apps.get_model('main', 'Permission'),
apps.get_model('main', 'Schedule'),
apps.get_model('main', 'Team'),
apps.get_model('main', 'UnifiedJob'),
apps.get_model('main', 'UnifiedJobTemplate'),
]
for model in models:
n_deleted_items += cleanup_model(model)
logger.log(99, "Removed %d items", n_deleted_items)

View File

@@ -199,16 +199,16 @@ class UserAccess(BaseAccess):
model = User model = User
def get_queryset(self): def get_queryset(self):
qs = self.model.objects.filter(is_active=True).distinct() qs = self.model.objects.distinct()
if self.user.is_superuser: if self.user.is_superuser:
return qs return qs
if tower_settings.ORG_ADMINS_CAN_SEE_ALL_USERS and self.user.admin_of_organizations.filter(active=True).exists(): if tower_settings.ORG_ADMINS_CAN_SEE_ALL_USERS and self.user.admin_of_organizations.all().exists():
return qs return qs
return qs.filter( return qs.filter(
Q(pk=self.user.pk) | Q(pk=self.user.pk) |
Q(organizations__in=self.user.admin_of_organizations.filter(active=True)) | Q(organizations__in=self.user.admin_of_organizations) |
Q(organizations__in=self.user.organizations.filter(active=True)) | Q(organizations__in=self.user.organizations) |
Q(teams__in=self.user.teams.filter(active=True)) Q(teams__in=self.user.teams)
).distinct() ).distinct()
def can_add(self, data): def can_add(self, data):
@@ -216,7 +216,7 @@ class UserAccess(BaseAccess):
if to_python_boolean(data['is_superuser'], allow_none=True) and not self.user.is_superuser: if to_python_boolean(data['is_superuser'], allow_none=True) and not self.user.is_superuser:
return False return False
return bool(self.user.is_superuser or return bool(self.user.is_superuser or
self.user.admin_of_organizations.filter(active=True).exists()) self.user.admin_of_organizations.exists())
def can_change(self, obj, data): def can_change(self, obj, data):
if data is not None and 'is_superuser' in data: if data is not None and 'is_superuser' in data:
@@ -231,18 +231,18 @@ class UserAccess(BaseAccess):
# Admin implies changing all user fields. # Admin implies changing all user fields.
if self.user.is_superuser: if self.user.is_superuser:
return True return True
return bool(obj.organizations.filter(active=True, deprecated_admins__in=[self.user]).exists()) return bool(obj.organizations.filter(deprecated_admins__in=[self.user]).exists())
def can_delete(self, obj): def can_delete(self, obj):
if obj == self.user: if obj == self.user:
# cannot delete yourself # cannot delete yourself
return False return False
super_users = User.objects.filter(is_active=True, is_superuser=True) super_users = User.objects.filter(is_superuser=True)
if obj.is_superuser and super_users.count() == 1: if obj.is_superuser and super_users.count() == 1:
# cannot delete the last active superuser # cannot delete the last active superuser
return False return False
return bool(self.user.is_superuser or return bool(self.user.is_superuser or
obj.organizations.filter(active=True, deprecated_admins__in=[self.user]).exists()) obj.organizations.filter(deprecated_admins__in=[self.user]).exists())
class OrganizationAccess(BaseAccess): class OrganizationAccess(BaseAccess):
''' '''
@@ -257,7 +257,7 @@ class OrganizationAccess(BaseAccess):
model = Organization model = Organization
def get_queryset(self): def get_queryset(self):
qs = self.model.objects.filter(active=True).distinct() qs = self.model.objects.distinct()
qs = qs.select_related('created_by', 'modified_by') qs = qs.select_related('created_by', 'modified_by')
if self.user.is_superuser: if self.user.is_superuser:
return qs return qs
@@ -295,25 +295,21 @@ class InventoryAccess(BaseAccess):
def get_queryset(self, allowed=None, ad_hoc=None): def get_queryset(self, allowed=None, ad_hoc=None):
allowed = allowed or PERMISSION_TYPES_ALLOWING_INVENTORY_READ allowed = allowed or PERMISSION_TYPES_ALLOWING_INVENTORY_READ
qs = Inventory.objects.filter(active=True).distinct() qs = Inventory.objects.distinct()
qs = qs.select_related('created_by', 'modified_by', 'organization') qs = qs.select_related('created_by', 'modified_by', 'organization')
if self.user.is_superuser: if self.user.is_superuser:
return qs return qs
qs = qs.filter(organization__active=True)
admin_of = qs.filter(organization__deprecated_admins__in=[self.user]).distinct() admin_of = qs.filter(organization__deprecated_admins__in=[self.user]).distinct()
has_user_kw = dict( has_user_kw = dict(
permissions__user__in=[self.user], permissions__user__in=[self.user],
permissions__permission_type__in=allowed, permissions__permission_type__in=allowed,
permissions__active=True,
) )
if ad_hoc is not None: if ad_hoc is not None:
has_user_kw['permissions__run_ad_hoc_commands'] = ad_hoc has_user_kw['permissions__run_ad_hoc_commands'] = ad_hoc
has_user_perms = qs.filter(**has_user_kw).distinct() has_user_perms = qs.filter(**has_user_kw).distinct()
has_team_kw = dict( has_team_kw = dict(
permissions__team__deprecated_users__in=[self.user], permissions__team__deprecated_users__in=[self.user],
permissions__team__active=True,
permissions__permission_type__in=allowed, permissions__permission_type__in=allowed,
permissions__active=True,
) )
if ad_hoc is not None: if ad_hoc is not None:
has_team_kw['permissions__run_ad_hoc_commands'] = ad_hoc has_team_kw['permissions__run_ad_hoc_commands'] = ad_hoc
@@ -330,7 +326,7 @@ class InventoryAccess(BaseAccess):
# If no data is specified, just checking for generic add permission? # If no data is specified, just checking for generic add permission?
if not data: if not data:
return bool(self.user.is_superuser or return bool(self.user.is_superuser or
self.user.admin_of_organizations.filter(active=True).exists()) self.user.admin_of_organizations.exists())
# Otherwise, verify that the user has access to change the parent # Otherwise, verify that the user has access to change the parent
# organization of this inventory. # organization of this inventory.
if self.user.is_superuser: if self.user.is_superuser:
@@ -379,7 +375,7 @@ class HostAccess(BaseAccess):
model = Host model = Host
def get_queryset(self): def get_queryset(self):
qs = self.model.objects.filter(active=True).distinct() qs = self.model.objects.distinct()
qs = qs.select_related('created_by', 'modified_by', 'inventory', qs = qs.select_related('created_by', 'modified_by', 'inventory',
'last_job__job_template', 'last_job__job_template',
'last_job_host_summary__job') 'last_job_host_summary__job')
@@ -435,7 +431,7 @@ class GroupAccess(BaseAccess):
model = Group model = Group
def get_queryset(self): def get_queryset(self):
qs = self.model.objects.filter(active=True).distinct() qs = self.model.objects.distinct()
qs = qs.select_related('created_by', 'modified_by', 'inventory') qs = qs.select_related('created_by', 'modified_by', 'inventory')
qs = qs.prefetch_related('parents', 'children', 'inventory_source') qs = qs.prefetch_related('parents', 'children', 'inventory_source')
inventory_ids = set(self.user.get_queryset(Inventory).values_list('id', flat=True)) inventory_ids = set(self.user.get_queryset(Inventory).values_list('id', flat=True))
@@ -466,9 +462,6 @@ class GroupAccess(BaseAccess):
if not super(GroupAccess, self).can_attach(obj, sub_obj, relationship, if not super(GroupAccess, self).can_attach(obj, sub_obj, relationship,
data, skip_sub_obj_read_check): data, skip_sub_obj_read_check):
return False return False
# Don't allow attaching if the sub obj is not active
if not obj.active:
return False
# Prevent assignments between different inventories. # Prevent assignments between different inventories.
if obj.inventory != sub_obj.inventory: if obj.inventory != sub_obj.inventory:
raise ParseError('Cannot associate two items from different inventories') raise ParseError('Cannot associate two items from different inventories')
@@ -495,7 +488,7 @@ class InventorySourceAccess(BaseAccess):
model = InventorySource model = InventorySource
def get_queryset(self): def get_queryset(self):
qs = self.model.objects.filter(active=True).distinct() qs = self.model.objects.distinct()
qs = qs.select_related('created_by', 'modified_by', 'group', 'inventory') qs = qs.select_related('created_by', 'modified_by', 'group', 'inventory')
inventory_ids = set(self.user.get_queryset(Inventory).values_list('id', flat=True)) inventory_ids = set(self.user.get_queryset(Inventory).values_list('id', flat=True))
return qs.filter(Q(inventory_id__in=inventory_ids) | return qs.filter(Q(inventory_id__in=inventory_ids) |
@@ -535,7 +528,7 @@ class InventoryUpdateAccess(BaseAccess):
model = InventoryUpdate model = InventoryUpdate
def get_queryset(self): def get_queryset(self):
qs = InventoryUpdate.objects.filter(active=True).distinct() qs = InventoryUpdate.objects.distinct()
qs = qs.select_related('created_by', 'modified_by', 'inventory_source__group', qs = qs.select_related('created_by', 'modified_by', 'inventory_source__group',
'inventory_source__inventory') 'inventory_source__inventory')
inventory_sources_qs = self.user.get_queryset(InventorySource) inventory_sources_qs = self.user.get_queryset(InventorySource)
@@ -569,19 +562,19 @@ class CredentialAccess(BaseAccess):
# Create a base queryset. # Create a base queryset.
# If the user is a superuser, and therefore can see everything, this # If the user is a superuser, and therefore can see everything, this
# is also sufficient, and we are done. # is also sufficient, and we are done.
qs = self.model.objects.filter(active=True).distinct() qs = self.model.objects.distinct()
qs = qs.select_related('created_by', 'modified_by', 'user', 'team') qs = qs.select_related('created_by', 'modified_by', 'user', 'team')
if self.user.is_superuser: if self.user.is_superuser:
return qs return qs
# Get the list of organizations for which the user is an admin # Get the list of organizations for which the user is an admin
orgs_as_admin_ids = set(self.user.admin_of_organizations.filter(active=True).values_list('id', flat=True)) orgs_as_admin_ids = set(self.user.admin_of_organizations.values_list('id', flat=True))
return qs.filter( return qs.filter(
Q(user=self.user) | Q(user=self.user) |
Q(user__organizations__id__in=orgs_as_admin_ids) | Q(user__organizations__id__in=orgs_as_admin_ids) |
Q(user__admin_of_organizations__id__in=orgs_as_admin_ids) | Q(user__admin_of_organizations__id__in=orgs_as_admin_ids) |
Q(team__organization__id__in=orgs_as_admin_ids, team__active=True) | Q(team__organization__id__in=orgs_as_admin_ids) |
Q(team__deprecated_users__in=[self.user], team__active=True) Q(team__deprecated_users__in=[self.user])
) )
def can_add(self, data): def can_add(self, data):
@@ -607,12 +600,12 @@ class CredentialAccess(BaseAccess):
if obj.user: if obj.user:
if self.user == obj.user: if self.user == obj.user:
return True return True
if obj.user.organizations.filter(active=True, deprecated_admins__in=[self.user]).exists(): if obj.user.organizations.filter(deprecated_admins__in=[self.user]).exists():
return True return True
if obj.user.admin_of_organizations.filter(active=True, deprecated_admins__in=[self.user]).exists(): if obj.user.admin_of_organizations.filter(deprecated_admins__in=[self.user]).exists():
return True return True
if obj.team: if obj.team:
if self.user in obj.team.organization.deprecated_admins.filter(is_active=True): if self.user in obj.team.organization.deprecated_admins.all():
return True return True
return False return False
@@ -637,12 +630,12 @@ class TeamAccess(BaseAccess):
model = Team model = Team
def get_queryset(self): def get_queryset(self):
qs = self.model.objects.filter(active=True).distinct() qs = self.model.objects.distinct()
qs = qs.select_related('created_by', 'modified_by', 'organization') qs = qs.select_related('created_by', 'modified_by', 'organization')
if self.user.is_superuser: if self.user.is_superuser:
return qs return qs
return qs.filter( return qs.filter(
Q(organization__deprecated_admins__in=[self.user], organization__active=True) | Q(organization__deprecated_admins__in=[self.user]) |
Q(deprecated_users__in=[self.user]) Q(deprecated_users__in=[self.user])
) )
@@ -689,26 +682,24 @@ class ProjectAccess(BaseAccess):
model = Project model = Project
def get_queryset(self): def get_queryset(self):
qs = Project.objects.filter(active=True).distinct() qs = Project.objects.distinct()
qs = qs.select_related('modified_by', 'credential', 'current_job', 'last_job') qs = qs.select_related('modified_by', 'credential', 'current_job', 'last_job')
if self.user.is_superuser: if self.user.is_superuser:
return qs return qs
team_ids = set(Team.objects.filter(deprecated_users__in=[self.user]).values_list('id', flat=True)) team_ids = set(Team.objects.filter(deprecated_users__in=[self.user]).values_list('id', flat=True))
qs = qs.filter(Q(created_by=self.user, deprecated_organizations__isnull=True) | qs = qs.filter(Q(created_by=self.user, deprecated_organizations__isnull=True) |
Q(deprecated_organizations__deprecated_admins__in=[self.user], deprecated_organizations__active=True) | Q(deprecated_organizations__deprecated_admins__in=[self.user]) |
Q(deprecated_organizations__deprecated_users__in=[self.user], deprecated_organizations__active=True) | Q(deprecated_organizations__deprecated_users__in=[self.user]) |
Q(teams__in=team_ids)) Q(teams__in=team_ids))
allowed_deploy = [PERM_JOBTEMPLATE_CREATE, PERM_INVENTORY_DEPLOY] allowed_deploy = [PERM_JOBTEMPLATE_CREATE, PERM_INVENTORY_DEPLOY]
allowed_check = [PERM_JOBTEMPLATE_CREATE, PERM_INVENTORY_DEPLOY, PERM_INVENTORY_CHECK] allowed_check = [PERM_JOBTEMPLATE_CREATE, PERM_INVENTORY_DEPLOY, PERM_INVENTORY_CHECK]
deploy_permissions_ids = set(Permission.objects.filter( deploy_permissions_ids = set(Permission.objects.filter(
Q(user=self.user) | Q(team_id__in=team_ids), Q(user=self.user) | Q(team_id__in=team_ids),
active=True,
permission_type__in=allowed_deploy, permission_type__in=allowed_deploy,
).values_list('id', flat=True)) ).values_list('id', flat=True))
check_permissions_ids = set(Permission.objects.filter( check_permissions_ids = set(Permission.objects.filter(
Q(user=self.user) | Q(team_id__in=team_ids), Q(user=self.user) | Q(team_id__in=team_ids),
active=True,
permission_type__in=allowed_check, permission_type__in=allowed_check,
).values_list('id', flat=True)) ).values_list('id', flat=True))
@@ -719,16 +710,16 @@ class ProjectAccess(BaseAccess):
def can_add(self, data): def can_add(self, data):
if self.user.is_superuser: if self.user.is_superuser:
return True return True
if self.user.admin_of_organizations.filter(active=True).exists(): if self.user.admin_of_organizations.exists():
return True return True
return False return False
def can_change(self, obj, data): def can_change(self, obj, data):
if self.user.is_superuser: if self.user.is_superuser:
return True return True
if obj.created_by == self.user and not obj.deprecated_organizations.filter(active=True).count(): if obj.created_by == self.user and not obj.deprecated_organizations.count():
return True return True
if obj.deprecated_organizations.filter(active=True, deprecated_admins__in=[self.user]).exists(): if obj.deprecated_organizations.filter(deprecated_admins__in=[self.user]).exists():
return True return True
return False return False
@@ -748,7 +739,7 @@ class ProjectUpdateAccess(BaseAccess):
model = ProjectUpdate model = ProjectUpdate
def get_queryset(self): def get_queryset(self):
qs = ProjectUpdate.objects.filter(active=True).distinct() qs = ProjectUpdate.objects.distinct()
qs = qs.select_related('created_by', 'modified_by', 'project') qs = qs.select_related('created_by', 'modified_by', 'project')
project_ids = set(self.user.get_queryset(Project).values_list('id', flat=True)) project_ids = set(self.user.get_queryset(Project).values_list('id', flat=True))
return qs.filter(project_id__in=project_ids) return qs.filter(project_id__in=project_ids)
@@ -776,18 +767,18 @@ class PermissionAccess(BaseAccess):
model = Permission model = Permission
def get_queryset(self): def get_queryset(self):
qs = self.model.objects.filter(active=True).distinct() qs = self.model.objects.distinct()
qs = qs.select_related('created_by', 'modified_by', 'user', 'team', 'inventory', qs = qs.select_related('created_by', 'modified_by', 'user', 'team', 'inventory',
'project') 'project')
if self.user.is_superuser: if self.user.is_superuser:
return qs return qs
orgs_as_admin_ids = set(self.user.admin_of_organizations.filter(active=True).values_list('id', flat=True)) orgs_as_admin_ids = set(self.user.admin_of_organizations.values_list('id', flat=True))
return qs.filter( return qs.filter(
Q(user__organizations__in=orgs_as_admin_ids) | Q(user__organizations__in=orgs_as_admin_ids) |
Q(user__admin_of_organizations__in=orgs_as_admin_ids) | Q(user__admin_of_organizations__in=orgs_as_admin_ids) |
Q(team__organization__in=orgs_as_admin_ids, team__active=True) | Q(team__organization__in=orgs_as_admin_ids) |
Q(user=self.user) | Q(user=self.user) |
Q(team__deprecated_users__in=[self.user], team__active=True) Q(team__deprecated_users__in=[self.user])
) )
def can_add(self, data): def can_add(self, data):
@@ -868,7 +859,7 @@ class JobTemplateAccess(BaseAccess):
model = JobTemplate model = JobTemplate
def get_queryset(self): def get_queryset(self):
qs = self.model.objects.filter(active=True).distinct() qs = self.model.objects.distinct()
qs = qs.select_related('created_by', 'modified_by', 'inventory', 'project', qs = qs.select_related('created_by', 'modified_by', 'inventory', 'project',
'credential', 'cloud_credential', 'next_schedule') 'credential', 'cloud_credential', 'next_schedule')
if self.user.is_superuser: if self.user.is_superuser:
@@ -892,12 +883,10 @@ class JobTemplateAccess(BaseAccess):
# TODO: I think the below queries can be combined # TODO: I think the below queries can be combined
deploy_permissions_ids = Permission.objects.filter( deploy_permissions_ids = Permission.objects.filter(
Q(user=self.user) | Q(team_id__in=team_ids), Q(user=self.user) | Q(team_id__in=team_ids),
active=True,
permission_type__in=allowed_deploy, permission_type__in=allowed_deploy,
) )
check_permissions_ids = Permission.objects.filter( check_permissions_ids = Permission.objects.filter(
Q(user=self.user) | Q(team_id__in=team_ids), Q(user=self.user) | Q(team_id__in=team_ids),
active=True,
permission_type__in=allowed_check, permission_type__in=allowed_check,
) )
@@ -986,7 +975,6 @@ class JobTemplateAccess(BaseAccess):
Q(user=self.user) | Q(team__deprecated_users__in=[self.user]), Q(user=self.user) | Q(team__deprecated_users__in=[self.user]),
inventory=inventory, inventory=inventory,
project=project, project=project,
active=True,
#permission_type__in=[PERM_INVENTORY_CHECK, PERM_INVENTORY_DEPLOY], #permission_type__in=[PERM_INVENTORY_CHECK, PERM_INVENTORY_DEPLOY],
permission_type=PERM_JOBTEMPLATE_CREATE, permission_type=PERM_JOBTEMPLATE_CREATE,
) )
@@ -1044,7 +1032,6 @@ class JobTemplateAccess(BaseAccess):
Q(user=self.user) | Q(team__deprecated_users__in=[self.user]), Q(user=self.user) | Q(team__deprecated_users__in=[self.user]),
inventory=obj.inventory, inventory=obj.inventory,
project=obj.project, project=obj.project,
active=True,
permission_type__in=[PERM_JOBTEMPLATE_CREATE, PERM_INVENTORY_CHECK, PERM_INVENTORY_DEPLOY], permission_type__in=[PERM_JOBTEMPLATE_CREATE, PERM_INVENTORY_CHECK, PERM_INVENTORY_DEPLOY],
) )
@@ -1086,7 +1073,7 @@ class JobAccess(BaseAccess):
model = Job model = Job
def get_queryset(self): def get_queryset(self):
qs = self.model.objects.filter(active=True).distinct() qs = self.model.objects.distinct()
qs = qs.select_related('created_by', 'modified_by', 'job_template', 'inventory', qs = qs.select_related('created_by', 'modified_by', 'job_template', 'inventory',
'project', 'credential', 'cloud_credential', 'job_template') 'project', 'credential', 'cloud_credential', 'job_template')
qs = qs.prefetch_related('unified_job_template') qs = qs.prefetch_related('unified_job_template')
@@ -1108,12 +1095,10 @@ class JobAccess(BaseAccess):
# TODO: I think the below queries can be combined # TODO: I think the below queries can be combined
deploy_permissions_ids = Permission.objects.filter( deploy_permissions_ids = Permission.objects.filter(
Q(user=self.user) | Q(team__in=team_ids), Q(user=self.user) | Q(team__in=team_ids),
active=True,
permission_type__in=allowed_deploy, permission_type__in=allowed_deploy,
) )
check_permissions_ids = Permission.objects.filter( check_permissions_ids = Permission.objects.filter(
Q(user=self.user) | Q(team__in=team_ids), Q(user=self.user) | Q(team__in=team_ids),
active=True,
permission_type__in=allowed_check, permission_type__in=allowed_check,
) )
@@ -1212,18 +1197,17 @@ class AdHocCommandAccess(BaseAccess):
model = AdHocCommand model = AdHocCommand
def get_queryset(self): def get_queryset(self):
qs = self.model.objects.filter(active=True).distinct() qs = self.model.objects.distinct()
qs = qs.select_related('created_by', 'modified_by', 'inventory', qs = qs.select_related('created_by', 'modified_by', 'inventory',
'credential') 'credential')
if self.user.is_superuser: if self.user.is_superuser:
return qs return qs
credential_ids = set(self.user.get_queryset(Credential).values_list('id', flat=True)) credential_ids = set(self.user.get_queryset(Credential).values_list('id', flat=True))
team_ids = set(Team.objects.filter(active=True, deprecated_users__in=[self.user]).values_list('id', flat=True)) team_ids = set(Team.objects.filter(deprecated_users__in=[self.user]).values_list('id', flat=True))
permission_ids = set(Permission.objects.filter( permission_ids = set(Permission.objects.filter(
Q(user=self.user) | Q(team__in=team_ids), Q(user=self.user) | Q(team__in=team_ids),
active=True,
permission_type__in=PERMISSION_TYPES_ALLOWING_INVENTORY_READ, permission_type__in=PERMISSION_TYPES_ALLOWING_INVENTORY_READ,
run_ad_hoc_commands=True, run_ad_hoc_commands=True,
).values_list('id', flat=True)) ).values_list('id', flat=True))
@@ -1247,7 +1231,7 @@ class AdHocCommandAccess(BaseAccess):
# If a credential is provided, the user should have read access to it. # If a credential is provided, the user should have read access to it.
credential_pk = get_pk_from_dict(data, 'credential') credential_pk = get_pk_from_dict(data, 'credential')
if credential_pk: if credential_pk:
credential = get_object_or_400(Credential, pk=credential_pk, active=True) credential = get_object_or_400(Credential, pk=credential_pk)
if not check_user_access(self.user, Credential, 'read', credential): if not check_user_access(self.user, Credential, 'read', credential):
return False return False
@@ -1255,7 +1239,7 @@ class AdHocCommandAccess(BaseAccess):
# given inventory. # given inventory.
inventory_pk = get_pk_from_dict(data, 'inventory') inventory_pk = get_pk_from_dict(data, 'inventory')
if inventory_pk: if inventory_pk:
inventory = get_object_or_400(Inventory, pk=inventory_pk, active=True) inventory = get_object_or_400(Inventory, pk=inventory_pk)
if not check_user_access(self.user, Inventory, 'run_ad_hoc_commands', inventory): if not check_user_access(self.user, Inventory, 'run_ad_hoc_commands', inventory):
return False return False
@@ -1375,7 +1359,7 @@ class UnifiedJobTemplateAccess(BaseAccess):
model = UnifiedJobTemplate model = UnifiedJobTemplate
def get_queryset(self): def get_queryset(self):
qs = self.model.objects.filter(active=True).distinct() qs = self.model.objects.distinct()
project_qs = self.user.get_queryset(Project).filter(scm_type__in=[s[0] for s in Project.SCM_TYPE_CHOICES]) project_qs = self.user.get_queryset(Project).filter(scm_type__in=[s[0] for s in Project.SCM_TYPE_CHOICES])
inventory_source_qs = self.user.get_queryset(InventorySource).filter(source__in=CLOUD_INVENTORY_SOURCES) inventory_source_qs = self.user.get_queryset(InventorySource).filter(source__in=CLOUD_INVENTORY_SOURCES)
job_template_qs = self.user.get_queryset(JobTemplate) job_template_qs = self.user.get_queryset(JobTemplate)
@@ -1405,7 +1389,7 @@ class UnifiedJobAccess(BaseAccess):
model = UnifiedJob model = UnifiedJob
def get_queryset(self): def get_queryset(self):
qs = self.model.objects.filter(active=True).distinct() qs = self.model.objects.distinct()
project_update_qs = self.user.get_queryset(ProjectUpdate) project_update_qs = self.user.get_queryset(ProjectUpdate)
inventory_update_qs = self.user.get_queryset(InventoryUpdate).filter(source__in=CLOUD_INVENTORY_SOURCES) inventory_update_qs = self.user.get_queryset(InventoryUpdate).filter(source__in=CLOUD_INVENTORY_SOURCES)
job_qs = self.user.get_queryset(Job) job_qs = self.user.get_queryset(Job)
@@ -1442,7 +1426,7 @@ class ScheduleAccess(BaseAccess):
model = Schedule model = Schedule
def get_queryset(self): def get_queryset(self):
qs = self.model.objects.filter(active=True).distinct() qs = self.model.objects.distinct()
qs = qs.select_related('created_by', 'modified_by') qs = qs.select_related('created_by', 'modified_by')
qs = qs.prefetch_related('unified_job_template') qs = qs.prefetch_related('unified_job_template')
if self.user.is_superuser: if self.user.is_superuser:
@@ -1614,7 +1598,7 @@ class CustomInventoryScriptAccess(BaseAccess):
model = CustomInventoryScript model = CustomInventoryScript
def get_queryset(self): def get_queryset(self):
qs = self.model.objects.filter(active=True).distinct() qs = self.model.objects.distinct()
if not self.user.is_superuser: if not self.user.is_superuser:
qs = qs.filter(Q(organization__deprecated_admins__in=[self.user]) | Q(organization__deprecated_users__in=[self.user])) qs = qs.filter(Q(organization__deprecated_admins__in=[self.user]) | Q(organization__deprecated_users__in=[self.user]))
return qs return qs
@@ -1622,8 +1606,6 @@ class CustomInventoryScriptAccess(BaseAccess):
def can_read(self, obj): def can_read(self, obj):
if self.user.is_superuser: if self.user.is_superuser:
return True return True
if not obj.active:
return False
return bool(obj.organization in self.user.organizations.all() or obj.organization in self.user.admin_of_organizations.all()) return bool(obj.organization in self.user.organizations.all() or obj.organization in self.user.admin_of_organizations.all())
def can_add(self, data): def can_add(self, data):

View File

@@ -73,7 +73,7 @@ def migrate_inventory(apps, schema_editor):
for inventory in Inventory.objects.all(): for inventory in Inventory.objects.all():
teams, users = [], [] teams, users = [], []
for perm in Permission.objects.filter(inventory=inventory, active=True): for perm in Permission.objects.filter(inventory=inventory):
role = None role = None
execrole = None execrole = None
if perm.permission_type == 'admin': if perm.permission_type == 'admin':
@@ -186,7 +186,7 @@ def migrate_projects(apps, schema_editor):
project.member_role.members.add(user) project.member_role.members.add(user)
migrations[project.name]['users'].add(user) migrations[project.name]['users'].add(user)
for perm in Permission.objects.filter(project=project, active=True): for perm in Permission.objects.filter(project=project):
# All perms at this level just imply a user or team can read # All perms at this level just imply a user or team can read
if perm.team: if perm.team:
perm.team.member_role.children.add(project.member_role) perm.team.member_role.children.add(project.member_role)
@@ -253,7 +253,6 @@ def migrate_job_templates(apps, schema_editor):
permission = Permission.objects.filter( permission = Permission.objects.filter(
inventory=jt.inventory, inventory=jt.inventory,
project=jt.project, project=jt.project,
active=True,
permission_type__in=['create', 'check', 'run'] if jt.job_type == 'check' else ['create', 'run'], permission_type__in=['create', 'check', 'run'] if jt.job_type == 'check' else ['create', 'run'],
) )

View File

@@ -87,7 +87,7 @@ class AdHocCommand(UnifiedJob):
def clean_inventory(self): def clean_inventory(self):
inv = self.inventory inv = self.inventory
if not inv or not inv.active: if not inv:
raise ValidationError('Inventory is no longer available.') raise ValidationError('Inventory is no longer available.')
return inv return inv
@@ -123,7 +123,7 @@ class AdHocCommand(UnifiedJob):
@property @property
def passwords_needed_to_start(self): def passwords_needed_to_start(self):
'''Return list of password field names needed to start the job.''' '''Return list of password field names needed to start the job.'''
if self.credential and self.credential.active: if self.credential:
return self.credential.passwords_needed return self.credential.passwords_needed
else: else:
return [] return []
@@ -164,14 +164,14 @@ class AdHocCommand(UnifiedJob):
def task_impact(self): def task_impact(self):
# NOTE: We sorta have to assume the host count matches and that forks default to 5 # NOTE: We sorta have to assume the host count matches and that forks default to 5
from awx.main.models.inventory import Host from awx.main.models.inventory import Host
count_hosts = Host.objects.filter(active=True, enabled=True, inventory__ad_hoc_commands__pk=self.pk).count() count_hosts = Host.objects.filter( enabled=True, inventory__ad_hoc_commands__pk=self.pk).count()
return min(count_hosts, 5 if self.forks == 0 else self.forks) * 10 return min(count_hosts, 5 if self.forks == 0 else self.forks) * 10
def generate_dependencies(self, active_tasks): def generate_dependencies(self, active_tasks):
from awx.main.models import InventoryUpdate from awx.main.models import InventoryUpdate
if not self.inventory: if not self.inventory:
return [] return []
inventory_sources = self.inventory.inventory_sources.filter(active=True, update_on_launch=True) inventory_sources = self.inventory.inventory_sources.filter( update_on_launch=True)
inventory_sources_found = [] inventory_sources_found = []
dependencies = [] dependencies = []
for obj in active_tasks: for obj in active_tasks:

View File

@@ -203,15 +203,6 @@ class PasswordFieldsModel(BaseModel):
def _password_field_allows_ask(self, field): def _password_field_allows_ask(self, field):
return False # Override in subclasses if needed. return False # Override in subclasses if needed.
def mark_inactive(self, save=True):
'''
When marking a password model inactive we'll clear sensitive fields
'''
for sensitive_field in self.PASSWORD_FIELDS:
setattr(self, sensitive_field, "")
self.save()
super(PasswordFieldsModel, self).mark_inactive(save=save)
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
new_instance = not bool(self.pk) new_instance = not bool(self.pk)
# If update_fields has been specified, add our field names to it, # If update_fields has been specified, add our field names to it,
@@ -273,29 +264,9 @@ class PrimordialModel(CreatedModifiedModel):
editable=False, editable=False,
on_delete=models.SET_NULL, on_delete=models.SET_NULL,
) )
active = models.BooleanField(
default=True,
editable=False,
)
tags = TaggableManager(blank=True) tags = TaggableManager(blank=True)
def mark_inactive(self, save=True, update_fields=None, skip_active_check=False):
'''Use instead of delete to rename and mark inactive.'''
update_fields = update_fields or []
if skip_active_check or self.active:
dtnow = now()
if 'name' in self._meta.get_all_field_names():
self.name = "_deleted_%s_%s" % (dtnow.isoformat(), self.name)
if 'name' not in update_fields:
update_fields.append('name')
self.active = False
if 'active' not in update_fields:
update_fields.append('active')
if save:
self.save(update_fields=update_fields)
return update_fields
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
update_fields = kwargs.get('update_fields', []) update_fields = kwargs.get('update_fields', [])
user = get_current_user() user = get_current_user()

View File

@@ -26,7 +26,7 @@ from awx.main.models.jobs import Job
from awx.main.models.unified_jobs import * # noqa from awx.main.models.unified_jobs import * # noqa
from awx.main.models.mixins import ResourceMixin from awx.main.models.mixins import ResourceMixin
from awx.main.models.notifications import Notifier from awx.main.models.notifications import Notifier
from awx.main.utils import ignore_inventory_computed_fields, _inventory_updates from awx.main.utils import _inventory_updates
from awx.main.conf import tower_settings from awx.main.conf import tower_settings
__all__ = ['Inventory', 'Host', 'Group', 'InventorySource', 'InventoryUpdate', 'CustomInventoryScript'] __all__ = ['Inventory', 'Host', 'Group', 'InventorySource', 'InventoryUpdate', 'CustomInventoryScript']
@@ -120,30 +120,15 @@ class Inventory(CommonModel, ResourceMixin):
def get_absolute_url(self): def get_absolute_url(self):
return reverse('api:inventory_detail', args=(self.pk,)) return reverse('api:inventory_detail', args=(self.pk,))
def mark_inactive(self, save=True):
'''
When marking inventory inactive, also mark hosts and groups inactive.
'''
with ignore_inventory_computed_fields():
for host in self.hosts.filter(active=True):
host.mark_inactive()
for group in self.groups.filter(active=True):
group.mark_inactive(recompute=False)
for inventory_source in self.inventory_sources.filter(active=True):
inventory_source.mark_inactive()
super(Inventory, self).mark_inactive(save=save)
variables_dict = VarsDictProperty('variables') variables_dict = VarsDictProperty('variables')
def get_group_hosts_map(self, active=None): def get_group_hosts_map(self):
''' '''
Return dictionary mapping group_id to set of child host_id's. Return dictionary mapping group_id to set of child host_id's.
''' '''
# FIXME: Cache this mapping? # FIXME: Cache this mapping?
group_hosts_kw = dict(group__inventory_id=self.pk, host__inventory_id=self.pk) group_hosts_kw = dict(group__inventory_id=self.pk, host__inventory_id=self.pk)
if active is not None:
group_hosts_kw['group__active'] = active
group_hosts_kw['host__active'] = active
group_hosts_qs = Group.hosts.through.objects.filter(**group_hosts_kw) group_hosts_qs = Group.hosts.through.objects.filter(**group_hosts_kw)
group_hosts_qs = group_hosts_qs.values_list('group_id', 'host_id') group_hosts_qs = group_hosts_qs.values_list('group_id', 'host_id')
group_hosts_map = {} group_hosts_map = {}
@@ -152,15 +137,12 @@ class Inventory(CommonModel, ResourceMixin):
group_host_ids.add(host_id) group_host_ids.add(host_id)
return group_hosts_map return group_hosts_map
def get_group_parents_map(self, active=None): def get_group_parents_map(self):
''' '''
Return dictionary mapping group_id to set of parent group_id's. Return dictionary mapping group_id to set of parent group_id's.
''' '''
# FIXME: Cache this mapping? # FIXME: Cache this mapping?
group_parents_kw = dict(from_group__inventory_id=self.pk, to_group__inventory_id=self.pk) group_parents_kw = dict(from_group__inventory_id=self.pk, to_group__inventory_id=self.pk)
if active is not None:
group_parents_kw['from_group__active'] = active
group_parents_kw['to_group__active'] = active
group_parents_qs = Group.parents.through.objects.filter(**group_parents_kw) group_parents_qs = Group.parents.through.objects.filter(**group_parents_kw)
group_parents_qs = group_parents_qs.values_list('from_group_id', 'to_group_id') group_parents_qs = group_parents_qs.values_list('from_group_id', 'to_group_id')
group_parents_map = {} group_parents_map = {}
@@ -169,15 +151,12 @@ class Inventory(CommonModel, ResourceMixin):
group_parents.add(to_group_id) group_parents.add(to_group_id)
return group_parents_map return group_parents_map
def get_group_children_map(self, active=None): def get_group_children_map(self):
''' '''
Return dictionary mapping group_id to set of child group_id's. Return dictionary mapping group_id to set of child group_id's.
''' '''
# FIXME: Cache this mapping? # FIXME: Cache this mapping?
group_parents_kw = dict(from_group__inventory_id=self.pk, to_group__inventory_id=self.pk) group_parents_kw = dict(from_group__inventory_id=self.pk, to_group__inventory_id=self.pk)
if active is not None:
group_parents_kw['from_group__active'] = active
group_parents_kw['to_group__active'] = active
group_parents_qs = Group.parents.through.objects.filter(**group_parents_kw) group_parents_qs = Group.parents.through.objects.filter(**group_parents_kw)
group_parents_qs = group_parents_qs.values_list('from_group_id', 'to_group_id') group_parents_qs = group_parents_qs.values_list('from_group_id', 'to_group_id')
group_children_map = {} group_children_map = {}
@@ -188,12 +167,12 @@ class Inventory(CommonModel, ResourceMixin):
def update_host_computed_fields(self): def update_host_computed_fields(self):
''' '''
Update computed fields for all active hosts in this inventory. Update computed fields for all hosts in this inventory.
''' '''
hosts_to_update = {} hosts_to_update = {}
hosts_qs = self.hosts.filter(active=True) hosts_qs = self.hosts
# Define queryset of all hosts with active failures. # Define queryset of all hosts with active failures.
hosts_with_active_failures = hosts_qs.filter(last_job_host_summary__isnull=False, last_job_host_summary__job__active=True, last_job_host_summary__failed=True).values_list('pk', flat=True) hosts_with_active_failures = hosts_qs.filter(last_job_host_summary__isnull=False, last_job_host_summary__failed=True).values_list('pk', flat=True)
# Find all hosts that need the has_active_failures flag set. # Find all hosts that need the has_active_failures flag set.
hosts_to_set = hosts_qs.filter(has_active_failures=False, pk__in=hosts_with_active_failures) hosts_to_set = hosts_qs.filter(has_active_failures=False, pk__in=hosts_with_active_failures)
for host_pk in hosts_to_set.values_list('pk', flat=True): for host_pk in hosts_to_set.values_list('pk', flat=True):
@@ -205,7 +184,7 @@ class Inventory(CommonModel, ResourceMixin):
host_updates = hosts_to_update.setdefault(host_pk, {}) host_updates = hosts_to_update.setdefault(host_pk, {})
host_updates['has_active_failures'] = False host_updates['has_active_failures'] = False
# Define queryset of all hosts with cloud inventory sources. # Define queryset of all hosts with cloud inventory sources.
hosts_with_cloud_inventory = hosts_qs.filter(inventory_sources__active=True, inventory_sources__source__in=CLOUD_INVENTORY_SOURCES).values_list('pk', flat=True) hosts_with_cloud_inventory = hosts_qs.filter(inventory_sources__source__in=CLOUD_INVENTORY_SOURCES).values_list('pk', flat=True)
# Find all hosts that need the has_inventory_sources flag set. # Find all hosts that need the has_inventory_sources flag set.
hosts_to_set = hosts_qs.filter(has_inventory_sources=False, pk__in=hosts_with_cloud_inventory) hosts_to_set = hosts_qs.filter(has_inventory_sources=False, pk__in=hosts_with_cloud_inventory)
for host_pk in hosts_to_set.values_list('pk', flat=True): for host_pk in hosts_to_set.values_list('pk', flat=True):
@@ -230,13 +209,13 @@ class Inventory(CommonModel, ResourceMixin):
''' '''
Update computed fields for all active groups in this inventory. Update computed fields for all active groups in this inventory.
''' '''
group_children_map = self.get_group_children_map(active=True) group_children_map = self.get_group_children_map()
group_hosts_map = self.get_group_hosts_map(active=True) group_hosts_map = self.get_group_hosts_map()
active_host_pks = set(self.hosts.filter(active=True).values_list('pk', flat=True)) active_host_pks = set(self.hosts.values_list('pk', flat=True))
failed_host_pks = set(self.hosts.filter(active=True, last_job_host_summary__job__active=True, last_job_host_summary__failed=True).values_list('pk', flat=True)) failed_host_pks = set(self.hosts.filter(last_job_host_summary__failed=True).values_list('pk', flat=True))
# active_group_pks = set(self.groups.filter(active=True).values_list('pk', flat=True)) # active_group_pks = set(self.groups.values_list('pk', flat=True))
failed_group_pks = set() # Update below as we check each group. failed_group_pks = set() # Update below as we check each group.
groups_with_cloud_pks = set(self.groups.filter(active=True, inventory_sources__active=True, inventory_sources__source__in=CLOUD_INVENTORY_SOURCES).values_list('pk', flat=True)) groups_with_cloud_pks = set(self.groups.filter(inventory_sources__source__in=CLOUD_INVENTORY_SOURCES).values_list('pk', flat=True))
groups_to_update = {} groups_to_update = {}
# Build list of group pks to check, starting with the groups at the # Build list of group pks to check, starting with the groups at the
@@ -308,11 +287,11 @@ class Inventory(CommonModel, ResourceMixin):
self.update_host_computed_fields() self.update_host_computed_fields()
if update_groups: if update_groups:
self.update_group_computed_fields() self.update_group_computed_fields()
active_hosts = self.hosts.filter(active=True) active_hosts = self.hosts
failed_hosts = active_hosts.filter(has_active_failures=True) failed_hosts = active_hosts.filter(has_active_failures=True)
active_groups = self.groups.filter(active=True) active_groups = self.groups
failed_groups = active_groups.filter(has_active_failures=True) failed_groups = active_groups.filter(has_active_failures=True)
active_inventory_sources = self.inventory_sources.filter(active=True, source__in=CLOUD_INVENTORY_SOURCES) active_inventory_sources = self.inventory_sources.filter( source__in=CLOUD_INVENTORY_SOURCES)
failed_inventory_sources = active_inventory_sources.filter(last_job_failed=True) failed_inventory_sources = active_inventory_sources.filter(last_job_failed=True)
computed_fields = { computed_fields = {
'has_active_failures': bool(failed_hosts.count()), 'has_active_failures': bool(failed_hosts.count()),
@@ -412,24 +391,13 @@ class Host(CommonModelNameNotUnique, ResourceMixin):
def get_absolute_url(self): def get_absolute_url(self):
return reverse('api:host_detail', args=(self.pk,)) return reverse('api:host_detail', args=(self.pk,))
def mark_inactive(self, save=True, from_inventory_import=False, skip_active_check=False):
'''
When marking hosts inactive, remove all associations to related
inventory sources.
'''
super(Host, self).mark_inactive(save=save, skip_active_check=skip_active_check)
if not from_inventory_import:
self.inventory_sources.clear()
def update_computed_fields(self, update_inventory=True, update_groups=True): def update_computed_fields(self, update_inventory=True, update_groups=True):
''' '''
Update model fields that are computed from database relationships. Update model fields that are computed from database relationships.
''' '''
has_active_failures = bool(self.last_job_host_summary and has_active_failures = bool(self.last_job_host_summary and
self.last_job_host_summary.job.active and
self.last_job_host_summary.failed) self.last_job_host_summary.failed)
active_inventory_sources = self.inventory_sources.filter(active=True, active_inventory_sources = self.inventory_sources.filter(source__in=CLOUD_INVENTORY_SOURCES)
source__in=CLOUD_INVENTORY_SOURCES)
computed_fields = { computed_fields = {
'has_active_failures': has_active_failures, 'has_active_failures': has_active_failures,
'has_inventory_sources': bool(active_inventory_sources.count()), 'has_inventory_sources': bool(active_inventory_sources.count()),
@@ -445,7 +413,7 @@ class Host(CommonModelNameNotUnique, ResourceMixin):
# change. # change.
# NOTE: I think this is no longer needed # NOTE: I think this is no longer needed
# if update_groups: # if update_groups:
# for group in self.all_groups.filter(active=True): # for group in self.all_groups:
# group.update_computed_fields() # group.update_computed_fields()
# if update_inventory: # if update_inventory:
# self.inventory.update_computed_fields(update_groups=False, # self.inventory.update_computed_fields(update_groups=False,
@@ -575,11 +543,11 @@ class Group(CommonModelNameNotUnique, ResourceMixin):
return reverse('api:group_detail', args=(self.pk,)) return reverse('api:group_detail', args=(self.pk,))
@transaction.atomic @transaction.atomic
def mark_inactive_recursive(self): def delete_recursive(self):
from awx.main.tasks import bulk_inventory_element_delete
from awx.main.utils import ignore_inventory_computed_fields from awx.main.utils import ignore_inventory_computed_fields
from awx.main.signals import disable_activity_stream from awx.main.signals import disable_activity_stream
def mark_actual(): def mark_actual():
all_group_hosts = Group.hosts.through.objects.select_related("host", "group").filter(group__inventory=self.inventory) all_group_hosts = Group.hosts.through.objects.select_related("host", "group").filter(group__inventory=self.inventory)
group_hosts = {'groups': {}, 'hosts': {}} group_hosts = {'groups': {}, 'hosts': {}}
@@ -629,51 +597,24 @@ class Group(CommonModelNameNotUnique, ResourceMixin):
for direct_child in group_children[group]: for direct_child in group_children[group]:
linked_children.append((group, direct_child)) linked_children.append((group, direct_child))
marked_groups.append(group) marked_groups.append(group)
Group.objects.filter(id__in=marked_groups).update(active=False) Group.objects.filter(id__in=marked_groups).delete()
Host.objects.filter(id__in=marked_hosts).update(active=False) Host.objects.filter(id__in=marked_hosts).delete()
Group.parents.through.objects.filter(to_group__id__in=marked_groups) update_inventory_computed_fields.delay(self.inventory.id)
Group.hosts.through.objects.filter(group__id__in=marked_groups)
Group.inventory_sources.through.objects.filter(group__id__in=marked_groups).delete()
bulk_inventory_element_delete.delay(self.inventory.id, groups=marked_groups, hosts=marked_hosts)
with ignore_inventory_computed_fields(): with ignore_inventory_computed_fields():
with disable_activity_stream(): with disable_activity_stream():
mark_actual() mark_actual()
def mark_inactive(self, save=True, recompute=True, from_inventory_import=False, skip_active_check=False):
'''
When marking groups inactive, remove all associations to related
groups/hosts/inventory_sources.
'''
def mark_actual():
super(Group, self).mark_inactive(save=save, skip_active_check=skip_active_check)
self.inventory_source.mark_inactive(save=save)
self.inventory_sources.clear()
self.parents.clear()
self.children.clear()
self.hosts.clear()
i = self.inventory
if from_inventory_import:
super(Group, self).mark_inactive(save=save, skip_active_check=skip_active_check)
elif recompute:
with ignore_inventory_computed_fields():
mark_actual()
i.update_computed_fields()
else:
mark_actual()
def update_computed_fields(self): def update_computed_fields(self):
''' '''
Update model fields that are computed from database relationships. Update model fields that are computed from database relationships.
''' '''
active_hosts = self.all_hosts.filter(active=True) active_hosts = self.all_hosts
failed_hosts = active_hosts.filter(last_job_host_summary__job__active=True, failed_hosts = active_hosts.filter(last_job_host_summary__failed=True)
last_job_host_summary__failed=True) active_groups = self.all_children
active_groups = self.all_children.filter(active=True)
# FIXME: May not be accurate unless we always update groups depth-first. # FIXME: May not be accurate unless we always update groups depth-first.
failed_groups = active_groups.filter(has_active_failures=True) failed_groups = active_groups.filter(has_active_failures=True)
active_inventory_sources = self.inventory_sources.filter(active=True, active_inventory_sources = self.inventory_sources.filter(source__in=CLOUD_INVENTORY_SOURCES)
source__in=CLOUD_INVENTORY_SOURCES)
computed_fields = { computed_fields = {
'total_hosts': active_hosts.count(), 'total_hosts': active_hosts.count(),
'has_active_failures': bool(failed_hosts.count()), 'has_active_failures': bool(failed_hosts.count()),
@@ -1200,7 +1141,7 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, ResourceMixin)
def _can_update(self): def _can_update(self):
if self.source == 'custom': if self.source == 'custom':
return bool(self.source_script and self.source_script.active) return bool(self.source_script)
else: else:
return bool(self.source in CLOUD_INVENTORY_SOURCES) return bool(self.source in CLOUD_INVENTORY_SOURCES)
@@ -1217,7 +1158,7 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, ResourceMixin)
@property @property
def needs_update_on_launch(self): def needs_update_on_launch(self):
if self.active and self.source and self.update_on_launch: if self.source and self.update_on_launch:
if not self.last_job_run: if not self.last_job_run:
return True return True
if (self.last_job_run + datetime.timedelta(seconds=self.update_cache_timeout)) <= now(): if (self.last_job_run + datetime.timedelta(seconds=self.update_cache_timeout)) <= now():
@@ -1226,7 +1167,7 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, ResourceMixin)
@property @property
def notifiers(self): def notifiers(self):
base_notifiers = Notifier.objects.filter(active=True) base_notifiers = Notifier.objects
error_notifiers = list(base_notifiers.filter(organization_notifiers_for_errors=self.inventory.organization)) error_notifiers = list(base_notifiers.filter(organization_notifiers_for_errors=self.inventory.organization))
success_notifiers = list(base_notifiers.filter(organization_notifiers_for_success=self.inventory.organization)) success_notifiers = list(base_notifiers.filter(organization_notifiers_for_success=self.inventory.organization))
any_notifiers = list(base_notifiers.filter(organization_notifiers_for_any=self.inventory.organization)) any_notifiers = list(base_notifiers.filter(organization_notifiers_for_any=self.inventory.organization))
@@ -1235,7 +1176,7 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, ResourceMixin)
def clean_source(self): def clean_source(self):
source = self.source source = self.source
if source and self.group: if source and self.group:
qs = self.group.inventory_sources.filter(source__in=CLOUD_INVENTORY_SOURCES, active=True, group__active=True) qs = self.group.inventory_sources.filter(source__in=CLOUD_INVENTORY_SOURCES)
existing_sources = qs.exclude(pk=self.pk) existing_sources = qs.exclude(pk=self.pk)
if existing_sources.count(): if existing_sources.count():
s = u', '.join([x.group.name for x in existing_sources]) s = u', '.join([x.group.name for x in existing_sources])
@@ -1279,7 +1220,7 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions):
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
update_fields = kwargs.get('update_fields', []) update_fields = kwargs.get('update_fields', [])
inventory_source = self.inventory_source inventory_source = self.inventory_source
if self.active and inventory_source.inventory and self.name == inventory_source.name: if inventory_source.inventory and self.name == inventory_source.name:
if inventory_source.group: if inventory_source.group:
self.name = '%s (%s)' % (inventory_source.group.name, inventory_source.inventory.name) self.name = '%s (%s)' % (inventory_source.group.name, inventory_source.inventory.name)
else: else:
@@ -1315,7 +1256,7 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions):
return False return False
if (self.source not in ('custom', 'ec2') and if (self.source not in ('custom', 'ec2') and
not (self.credential and self.credential.active)): not (self.credential)):
return False return False
return True return True

View File

@@ -149,7 +149,7 @@ class JobOptions(BaseModel):
@property @property
def passwords_needed_to_start(self): def passwords_needed_to_start(self):
'''Return list of password field names needed to start the job.''' '''Return list of password field names needed to start the job.'''
if self.credential and self.credential.active: if self.credential:
return self.credential.passwords_needed return self.credential.passwords_needed
else: else:
return [] return []
@@ -357,7 +357,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
# Return all notifiers defined on the Job Template, on the Project, and on the Organization for each trigger type # Return all notifiers defined on the Job Template, on the Project, and on the Organization for each trigger type
# TODO: Currently there is no org fk on project so this will need to be added once that is # TODO: Currently there is no org fk on project so this will need to be added once that is
# available after the rbac pr # available after the rbac pr
base_notifiers = Notifier.objects.filter(active=True) base_notifiers = Notifier.objects
error_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_errors__in=[self, self.project])) error_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_errors__in=[self, self.project]))
success_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_success__in=[self, self.project])) success_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_success__in=[self, self.project]))
any_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_any__in=[self, self.project])) any_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_any__in=[self, self.project]))
@@ -493,7 +493,7 @@ class Job(UnifiedJob, JobOptions):
from awx.main.models import InventoryUpdate, ProjectUpdate from awx.main.models import InventoryUpdate, ProjectUpdate
if self.inventory is None or self.project is None: if self.inventory is None or self.project is None:
return [] return []
inventory_sources = self.inventory.inventory_sources.filter(active=True, update_on_launch=True) inventory_sources = self.inventory.inventory_sources.filter( update_on_launch=True)
project_found = False project_found = False
inventory_sources_found = [] inventory_sources_found = []
dependencies = [] dependencies = []
@@ -592,7 +592,7 @@ class Job(UnifiedJob, JobOptions):
if not super(Job, self).can_start: if not super(Job, self).can_start:
return False return False
if not (self.credential and self.credential.active): if not (self.credential):
return False return False
return True return True

View File

@@ -79,11 +79,6 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin):
def __unicode__(self): def __unicode__(self):
return self.name return self.name
def mark_inactive(self, save=True):
for script in self.custom_inventory_scripts.all():
script.organization = None
script.save()
super(Organization, self).mark_inactive(save=save)
class Team(CommonModelNameNotUnique, ResourceMixin): class Team(CommonModelNameNotUnique, ResourceMixin):
@@ -135,14 +130,6 @@ class Team(CommonModelNameNotUnique, ResourceMixin):
def get_absolute_url(self): def get_absolute_url(self):
return reverse('api:team_detail', args=(self.pk,)) return reverse('api:team_detail', args=(self.pk,))
def mark_inactive(self, save=True):
'''
When marking a team inactive we'll wipe out its credentials also
'''
for cred in self.credentials.all():
cred.mark_inactive()
super(Team, self).mark_inactive(save=save)
class Permission(CommonModelNameNotUnique): class Permission(CommonModelNameNotUnique):
''' '''
@@ -351,22 +338,6 @@ class AuthToken(BaseModel):
return self.key return self.key
# Add mark_inactive method to User model.
def user_mark_inactive(user, save=True):
'''Use instead of delete to rename and mark users inactive.'''
if user.is_active:
# Set timestamp to datetime.isoformat() but without the time zone
# offset to stay withint the 30 character username limit.
dtnow = tz_now()
deleted_ts = dtnow.strftime('%Y-%m-%dT%H:%M:%S.%f')
user.username = '_d_%s' % deleted_ts
user.is_active = False
if save:
user.save()
User.add_to_class('mark_inactive', user_mark_inactive)
# Add get_absolute_url method to User model if not present. # Add get_absolute_url method to User model if not present.
if not hasattr(User, 'get_absolute_url'): if not hasattr(User, 'get_absolute_url'):
def user_get_absolute_url(user): def user_get_absolute_url(user):

View File

@@ -53,7 +53,7 @@ class ProjectOptions(models.Model):
paths = [x.decode('utf-8') for x in os.listdir(settings.PROJECTS_ROOT) paths = [x.decode('utf-8') for x in os.listdir(settings.PROJECTS_ROOT)
if (os.path.isdir(os.path.join(settings.PROJECTS_ROOT, x)) and if (os.path.isdir(os.path.join(settings.PROJECTS_ROOT, x)) and
not x.startswith('.') and not x.startswith('_'))] not x.startswith('.') and not x.startswith('_'))]
qs = Project.objects.filter(active=True) qs = Project.objects
used_paths = qs.values_list('local_path', flat=True) used_paths = qs.values_list('local_path', flat=True)
return [x for x in paths if x not in used_paths] return [x for x in paths if x not in used_paths]
else: else:
@@ -336,7 +336,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin):
@property @property
def needs_update_on_launch(self): def needs_update_on_launch(self):
if self.active and self.scm_type and self.scm_update_on_launch: if self.scm_type and self.scm_update_on_launch:
if not self.last_job_run: if not self.last_job_run:
return True return True
if (self.last_job_run + datetime.timedelta(seconds=self.scm_update_cache_timeout)) <= now(): if (self.last_job_run + datetime.timedelta(seconds=self.scm_update_cache_timeout)) <= now():
@@ -345,7 +345,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin):
@property @property
def notifiers(self): def notifiers(self):
base_notifiers = Notifier.objects.filter(active=True) base_notifiers = Notifier.objects
error_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_errors=self)) error_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_errors=self))
success_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_success=self)) success_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_success=self))
any_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_any=self)) any_notifiers = list(base_notifiers.filter(unifiedjobtemplate_notifiers_for_any=self))

View File

@@ -27,7 +27,7 @@ __all__ = ['Schedule']
class ScheduleFilterMethods(object): class ScheduleFilterMethods(object):
def enabled(self, enabled=True): def enabled(self, enabled=True):
return self.filter(enabled=enabled, active=enabled) return self.filter(enabled=enabled)
def before(self, dt): def before(self, dt):
return self.filter(next_run__lt=dt) return self.filter(next_run__lt=dt)

View File

@@ -210,17 +210,6 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
self.next_job_run = related_schedules[0].next_run self.next_job_run = related_schedules[0].next_run
self.save(update_fields=['next_schedule', 'next_job_run']) self.save(update_fields=['next_schedule', 'next_job_run'])
def mark_inactive(self, save=True):
'''
When marking a unified job template inactive, also mark its schedules
inactive.
'''
for schedule in self.schedules.filter(active=True):
schedule.mark_inactive()
schedule.enabled = False
schedule.save()
super(UnifiedJobTemplate, self).mark_inactive(save=save)
def save(self, *args, **kwargs): def save(self, *args, **kwargs):
# If update_fields has been specified, add our field names to it, # If update_fields has been specified, add our field names to it,
# if it hasn't been specified, then we're just doing a normal save. # if it hasn't been specified, then we're just doing a normal save.

View File

@@ -8,7 +8,7 @@ import threading
import json import json
# Django # Django
from django.db.models.signals import pre_save, post_save, pre_delete, post_delete, m2m_changed from django.db.models.signals import post_save, pre_delete, post_delete, m2m_changed
from django.dispatch import receiver from django.dispatch import receiver
# Django-CRUM # Django-CRUM
@@ -27,9 +27,8 @@ __all__ = []
logger = logging.getLogger('awx.main.signals') logger = logging.getLogger('awx.main.signals')
# Update has_active_failures for inventory/groups when a Host/Group is deleted # Update has_active_failures for inventory/groups when a Host/Group is deleted,
# or marked inactive, when a Host-Group or Group-Group relationship is updated, # when a Host-Group or Group-Group relationship is updated, or when a Job is deleted
# or when a Job is deleted or marked inactive.
def emit_job_event_detail(sender, **kwargs): def emit_job_event_detail(sender, **kwargs):
instance = kwargs['instance'] instance = kwargs['instance']
@@ -69,7 +68,7 @@ def emit_update_inventory_computed_fields(sender, **kwargs):
else: else:
sender_name = unicode(sender._meta.verbose_name) sender_name = unicode(sender._meta.verbose_name)
if kwargs['signal'] == post_save: if kwargs['signal'] == post_save:
if sender == Job and instance.active: if sender == Job:
return return
sender_action = 'saved' sender_action = 'saved'
elif kwargs['signal'] == post_delete: elif kwargs['signal'] == post_delete:
@@ -92,7 +91,6 @@ def emit_update_inventory_on_created_or_deleted(sender, **kwargs):
return return
instance = kwargs['instance'] instance = kwargs['instance']
if ('created' in kwargs and kwargs['created']) or \ if ('created' in kwargs and kwargs['created']) or \
(hasattr(instance, '_saved_active_state') and instance._saved_active_state != instance.active) or \
kwargs['signal'] == post_delete: kwargs['signal'] == post_delete:
pass pass
else: else:
@@ -108,13 +106,6 @@ def emit_update_inventory_on_created_or_deleted(sender, **kwargs):
if inventory is not None: if inventory is not None:
update_inventory_computed_fields.delay(inventory.id, True) update_inventory_computed_fields.delay(inventory.id, True)
def store_initial_active_state(sender, **kwargs):
instance = kwargs['instance']
if instance.id is not None:
instance._saved_active_state = sender.objects.get(id=instance.id).active
else:
instance._saved_active_state = True
def rebuild_role_ancestor_list(reverse, model, instance, pk_set, **kwargs): def rebuild_role_ancestor_list(reverse, model, instance, pk_set, **kwargs):
'When a role parent is added or removed, update our role hierarchy list' 'When a role parent is added or removed, update our role hierarchy list'
if reverse: if reverse:
@@ -161,20 +152,16 @@ def org_admin_edit_members(instance, action, model, reverse, pk_set, **kwargs):
if action == 'pre_remove': if action == 'pre_remove':
instance.content_object.admin_role.children.remove(user.admin_role) instance.content_object.admin_role.children.remove(user.admin_role)
pre_save.connect(store_initial_active_state, sender=Host)
post_save.connect(emit_update_inventory_on_created_or_deleted, sender=Host) post_save.connect(emit_update_inventory_on_created_or_deleted, sender=Host)
post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=Host) post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=Host)
pre_save.connect(store_initial_active_state, sender=Group)
post_save.connect(emit_update_inventory_on_created_or_deleted, sender=Group) post_save.connect(emit_update_inventory_on_created_or_deleted, sender=Group)
post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=Group) post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=Group)
m2m_changed.connect(emit_update_inventory_computed_fields, sender=Group.hosts.through) m2m_changed.connect(emit_update_inventory_computed_fields, sender=Group.hosts.through)
m2m_changed.connect(emit_update_inventory_computed_fields, sender=Group.parents.through) m2m_changed.connect(emit_update_inventory_computed_fields, sender=Group.parents.through)
m2m_changed.connect(emit_update_inventory_computed_fields, sender=Host.inventory_sources.through) m2m_changed.connect(emit_update_inventory_computed_fields, sender=Host.inventory_sources.through)
m2m_changed.connect(emit_update_inventory_computed_fields, sender=Group.inventory_sources.through) m2m_changed.connect(emit_update_inventory_computed_fields, sender=Group.inventory_sources.through)
pre_save.connect(store_initial_active_state, sender=InventorySource)
post_save.connect(emit_update_inventory_on_created_or_deleted, sender=InventorySource) post_save.connect(emit_update_inventory_on_created_or_deleted, sender=InventorySource)
post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=InventorySource) post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=InventorySource)
pre_save.connect(store_initial_active_state, sender=Job)
post_save.connect(emit_update_inventory_on_created_or_deleted, sender=Job) post_save.connect(emit_update_inventory_on_created_or_deleted, sender=Job)
post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=Job) post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=Job)
post_save.connect(emit_job_event_detail, sender=JobEvent) post_save.connect(emit_job_event_detail, sender=JobEvent)
@@ -184,8 +171,8 @@ m2m_changed.connect(org_admin_edit_members, Role.members.through)
post_save.connect(sync_superuser_status_to_rbac, sender=User) post_save.connect(sync_superuser_status_to_rbac, sender=User)
post_save.connect(create_user_role, sender=User) post_save.connect(create_user_role, sender=User)
# Migrate hosts, groups to parent group(s) whenever a group is deleted or
# marked as inactive. # Migrate hosts, groups to parent group(s) whenever a group is deleted
@receiver(pre_delete, sender=Group) @receiver(pre_delete, sender=Group)
def save_related_pks_before_group_delete(sender, **kwargs): def save_related_pks_before_group_delete(sender, **kwargs):
@@ -208,80 +195,28 @@ def migrate_children_from_deleted_group_to_parent_groups(sender, **kwargs):
with ignore_inventory_group_removal(): with ignore_inventory_group_removal():
with ignore_inventory_computed_fields(): with ignore_inventory_computed_fields():
if parents_pks: if parents_pks:
for parent_group in Group.objects.filter(pk__in=parents_pks, active=True): for parent_group in Group.objects.filter(pk__in=parents_pks):
for child_host in Host.objects.filter(pk__in=hosts_pks, active=True): for child_host in Host.objects.filter(pk__in=hosts_pks):
logger.debug('adding host %s to parent %s after group deletion', logger.debug('adding host %s to parent %s after group deletion',
child_host, parent_group) child_host, parent_group)
parent_group.hosts.add(child_host) parent_group.hosts.add(child_host)
for child_group in Group.objects.filter(pk__in=children_pks, active=True): for child_group in Group.objects.filter(pk__in=children_pks):
logger.debug('adding group %s to parent %s after group deletion', logger.debug('adding group %s to parent %s after group deletion',
child_group, parent_group) child_group, parent_group)
parent_group.children.add(child_group) parent_group.children.add(child_group)
inventory_pk = getattr(instance, '_saved_inventory_pk', None) inventory_pk = getattr(instance, '_saved_inventory_pk', None)
if inventory_pk: if inventory_pk:
try: try:
inventory = Inventory.objects.get(pk=inventory_pk, active=True) inventory = Inventory.objects.get(pk=inventory_pk)
inventory.update_computed_fields() inventory.update_computed_fields()
except Inventory.DoesNotExist: except Inventory.DoesNotExist:
pass pass
@receiver(pre_save, sender=Group)
def save_related_pks_before_group_marked_inactive(sender, **kwargs):
if getattr(_inventory_updates, 'is_removing', False):
return
instance = kwargs['instance']
if not instance.pk or instance.active:
return
instance._saved_inventory_pk = instance.inventory.pk
instance._saved_parents_pks = set(instance.parents.values_list('pk', flat=True))
instance._saved_hosts_pks = set(instance.hosts.values_list('pk', flat=True))
instance._saved_children_pks = set(instance.children.values_list('pk', flat=True))
instance._saved_inventory_source_pk = instance.inventory_source.pk
@receiver(post_save, sender=Group) # Update host pointers to last_job and last_job_host_summary when a job is deleted
def migrate_children_from_inactive_group_to_parent_groups(sender, **kwargs):
if getattr(_inventory_updates, 'is_removing', False):
return
instance = kwargs['instance']
if instance.active:
return
parents_pks = getattr(instance, '_saved_parents_pks', [])
hosts_pks = getattr(instance, '_saved_hosts_pks', [])
children_pks = getattr(instance, '_saved_children_pks', [])
with ignore_inventory_group_removal():
with ignore_inventory_computed_fields():
if parents_pks:
for parent_group in Group.objects.filter(pk__in=parents_pks, active=True):
for child_host in Host.objects.filter(pk__in=hosts_pks, active=True):
logger.debug('moving host %s to parent %s after marking group %s inactive',
child_host, parent_group, instance)
parent_group.hosts.add(child_host)
for child_group in Group.objects.filter(pk__in=children_pks, active=True):
logger.debug('moving group %s to parent %s after marking group %s inactive',
child_group, parent_group, instance)
parent_group.children.add(child_group)
parent_group.children.remove(instance)
inventory_source_pk = getattr(instance, '_saved_inventory_source_pk', None)
if inventory_source_pk:
try:
inventory_source = InventorySource.objects.get(pk=inventory_source_pk, active=True)
inventory_source.mark_inactive()
except InventorySource.DoesNotExist:
pass
inventory_pk = getattr(instance, '_saved_inventory_pk', None)
if not getattr(_inventory_updates, 'is_updating', False):
if inventory_pk:
try:
inventory = Inventory.objects.get(pk=inventory_pk, active=True)
inventory.update_computed_fields()
except Inventory.DoesNotExist:
pass
# Update host pointers to last_job and last_job_host_summary when a job is
# marked inactive or deleted.
def _update_host_last_jhs(host): def _update_host_last_jhs(host):
jhs_qs = JobHostSummary.objects.filter(job__active=True, host__pk=host.pk) jhs_qs = JobHostSummary.objects.filter(host__pk=host.pk)
try: try:
jhs = jhs_qs.order_by('-job__pk')[0] jhs = jhs_qs.order_by('-job__pk')[0]
except IndexError: except IndexError:
@@ -297,19 +232,10 @@ def _update_host_last_jhs(host):
if update_fields: if update_fields:
host.save(update_fields=update_fields) host.save(update_fields=update_fields)
@receiver(post_save, sender=Job)
def update_host_last_job_when_job_marked_inactive(sender, **kwargs):
instance = kwargs['instance']
if instance.active:
return
hosts_qs = Host.objects.filter(active=True, last_job__pk=instance.pk)
for host in hosts_qs:
_update_host_last_jhs(host)
@receiver(pre_delete, sender=Job) @receiver(pre_delete, sender=Job)
def save_host_pks_before_job_delete(sender, **kwargs): def save_host_pks_before_job_delete(sender, **kwargs):
instance = kwargs['instance'] instance = kwargs['instance']
hosts_qs = Host.objects.filter(active=True, last_job__pk=instance.pk) hosts_qs = Host.objects.filter( last_job__pk=instance.pk)
instance._saved_hosts_pks = set(hosts_qs.values_list('pk', flat=True)) instance._saved_hosts_pks = set(hosts_qs.values_list('pk', flat=True))
@receiver(post_delete, sender=Job) @receiver(post_delete, sender=Job)
@@ -388,11 +314,6 @@ def activity_stream_update(sender, instance, **kwargs):
except sender.DoesNotExist: except sender.DoesNotExist:
return return
# Handle the AWX mark-inactive for delete event
if hasattr(instance, 'active') and not instance.active:
activity_stream_delete(sender, instance, **kwargs)
return
new = instance new = instance
changes = model_instance_diff(old, new, model_serializer_mapping) changes = model_instance_diff(old, new, model_serializer_mapping)
if changes is None: if changes is None:

View File

@@ -13,7 +13,7 @@ class Migration(DataMigration):
# and orm['appname.ModelName'] for models in other applications. # and orm['appname.ModelName'] for models in other applications.
# Refresh has_active_failures for all hosts. # Refresh has_active_failures for all hosts.
for host in orm.Host.objects.filter(active=True): for host in orm.Host.objects:
has_active_failures = bool(host.last_job_host_summary and has_active_failures = bool(host.last_job_host_summary and
host.last_job_host_summary.job.active and host.last_job_host_summary.job.active and
host.last_job_host_summary.failed) host.last_job_host_summary.failed)
@@ -30,9 +30,9 @@ class Migration(DataMigration):
for subgroup in group.children.exclude(pk__in=except_group_pks): for subgroup in group.children.exclude(pk__in=except_group_pks):
qs = qs | get_all_hosts_for_group(subgroup, except_group_pks) qs = qs | get_all_hosts_for_group(subgroup, except_group_pks)
return qs return qs
for group in orm.Group.objects.filter(active=True): for group in orm.Group.objects:
all_hosts = get_all_hosts_for_group(group) all_hosts = get_all_hosts_for_group(group)
failed_hosts = all_hosts.filter(active=True, failed_hosts = all_hosts.filter(
last_job_host_summary__job__active=True, last_job_host_summary__job__active=True,
last_job_host_summary__failed=True) last_job_host_summary__failed=True)
hosts_with_active_failures = failed_hosts.count() hosts_with_active_failures = failed_hosts.count()
@@ -49,8 +49,8 @@ class Migration(DataMigration):
# Now update has_active_failures and hosts_with_active_failures for all # Now update has_active_failures and hosts_with_active_failures for all
# inventories. # inventories.
for inventory in orm.Inventory.objects.filter(active=True): for inventory in orm.Inventory.objects:
failed_hosts = inventory.hosts.filter(active=True, has_active_failures=True) failed_hosts = inventory.hosts.filter( has_active_failures=True)
hosts_with_active_failures = failed_hosts.count() hosts_with_active_failures = failed_hosts.count()
has_active_failures = bool(hosts_with_active_failures) has_active_failures = bool(hosts_with_active_failures)
changed = False changed = False

View File

@@ -8,7 +8,7 @@ from django.db import models
class Migration(DataMigration): class Migration(DataMigration):
def forwards(self, orm): def forwards(self, orm):
for iu in orm.InventoryUpdate.objects.filter(active=True): for iu in orm.InventoryUpdate.objects:
if iu.inventory_source is None or iu.inventory_source.group is None or iu.inventory_source.inventory is None: if iu.inventory_source is None or iu.inventory_source.group is None or iu.inventory_source.inventory is None:
continue continue
iu.name = "%s (%s)" % (iu.inventory_source.group.name, iu.inventory_source.inventory.name) iu.name = "%s (%s)" % (iu.inventory_source.group.name, iu.inventory_source.inventory.name)

View File

@@ -12,7 +12,7 @@ from django.conf import settings
class Migration(DataMigration): class Migration(DataMigration):
def forwards(self, orm): def forwards(self, orm):
for j in orm.UnifiedJob.objects.filter(active=True): for j in orm.UnifiedJob.objects:
cur = connection.cursor() cur = connection.cursor()
stdout_filename = os.path.join(settings.JOBOUTPUT_ROOT, "%d-%s.out" % (j.pk, str(uuid.uuid1()))) stdout_filename = os.path.join(settings.JOBOUTPUT_ROOT, "%d-%s.out" % (j.pk, str(uuid.uuid1())))
fd = open(stdout_filename, 'w') fd = open(stdout_filename, 'w')

View File

@@ -51,7 +51,7 @@ from awx.main.queue import FifoQueue
from awx.main.conf import tower_settings from awx.main.conf import tower_settings
from awx.main.task_engine import TaskSerializer, TASK_TIMEOUT_INTERVAL from awx.main.task_engine import TaskSerializer, TASK_TIMEOUT_INTERVAL
from awx.main.utils import (get_ansible_version, get_ssh_version, decrypt_field, update_scm_url, from awx.main.utils import (get_ansible_version, get_ssh_version, decrypt_field, update_scm_url,
ignore_inventory_computed_fields, emit_websocket_notification, emit_websocket_notification,
check_proot_installed, build_proot_temp_dir, wrap_args_with_proot) check_proot_installed, build_proot_temp_dir, wrap_args_with_proot)
__all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate', __all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate',
@@ -110,17 +110,6 @@ def run_administrative_checks(self):
tower_admin_emails, tower_admin_emails,
fail_silently=True) fail_silently=True)
@task()
def bulk_inventory_element_delete(inventory, hosts=[], groups=[]):
from awx.main.signals import disable_activity_stream
with ignore_inventory_computed_fields():
with disable_activity_stream():
for group in groups:
Group.objects.get(id=group).mark_inactive(skip_active_check=True)
for host in hosts:
Host.objects.get(id=host).mark_inactive(skip_active_check=True)
update_inventory_computed_fields(inventory)
@task(bind=True) @task(bind=True)
def tower_periodic_scheduler(self): def tower_periodic_scheduler(self):
def get_last_run(): def get_last_run():
@@ -894,12 +883,12 @@ class RunJob(BaseTask):
'tower_job_id': job.pk, 'tower_job_id': job.pk,
'tower_job_launch_type': job.launch_type, 'tower_job_launch_type': job.launch_type,
} }
if job.job_template and job.job_template.active: if job.job_template:
extra_vars.update({ extra_vars.update({
'tower_job_template_id': job.job_template.pk, 'tower_job_template_id': job.job_template.pk,
'tower_job_template_name': job.job_template.name, 'tower_job_template_name': job.job_template.name,
}) })
if job.created_by and job.created_by.is_active: if job.created_by:
extra_vars.update({ extra_vars.update({
'tower_user_id': job.created_by.pk, 'tower_user_id': job.created_by.pk,
'tower_user_name': job.created_by.username, 'tower_user_name': job.created_by.username,
@@ -1392,7 +1381,7 @@ class RunInventoryUpdate(BaseTask):
runpath = tempfile.mkdtemp(prefix='ansible_tower_launch_') runpath = tempfile.mkdtemp(prefix='ansible_tower_launch_')
handle, path = tempfile.mkstemp(dir=runpath) handle, path = tempfile.mkstemp(dir=runpath)
f = os.fdopen(handle, 'w') f = os.fdopen(handle, 'w')
if inventory_update.source_script is None or not inventory_update.source_script.active: if inventory_update.source_script is None:
raise RuntimeError('Inventory Script does not exist') raise RuntimeError('Inventory Script does not exist')
f.write(inventory_update.source_script.script.encode('utf-8')) f.write(inventory_update.source_script.script.encode('utf-8'))
f.close() f.close()

View File

@@ -4,7 +4,6 @@ from awx.main.models import (
Role, Role,
RolePermission, RolePermission,
Organization, Organization,
Group,
) )

View File

@@ -229,13 +229,18 @@ class BaseJobTestMixin(BaseTestMixin):
self.team_ops_west.users.add(self.user_iris) self.team_ops_west.users.add(self.user_iris)
# The south team is no longer active having been folded into the east team # The south team is no longer active having been folded into the east team
self.team_ops_south = self.org_ops.teams.create( # FIXME: This code can be removed (probably)
name='southerners', # - this case has been removed as we've gotten rid of the active flag, keeping
created_by=self.user_sue, # code around in case this has ramifications on some test failures.. if
active=False, # you find this message and all tests are passing, then feel free to remove this
) # - anoek 2016-03-10
self.team_ops_south.projects.add(self.proj_prod) #self.team_ops_south = self.org_ops.teams.create(
self.team_ops_south.users.add(self.user_greg) # name='southerners',
# created_by=self.user_sue,
# active=False,
#)
#self.team_ops_south.projects.add(self.proj_prod)
#self.team_ops_south.users.add(self.user_greg)
# The north team is going to be deleted # The north team is going to be deleted
self.team_ops_north = self.org_ops.teams.create( self.team_ops_north = self.org_ops.teams.create(
@@ -337,11 +342,18 @@ class BaseJobTestMixin(BaseTestMixin):
password='Heading270', password='Heading270',
created_by = self.user_sue, created_by = self.user_sue,
) )
self.cred_ops_south = self.team_ops_south.credentials.create(
username='south',
password='Heading180', # FIXME: This code can be removed (probably)
created_by = self.user_sue, # - this case has been removed as we've gotten rid of the active flag, keeping
) # code around in case this has ramifications on some test failures.. if
# you find this message and all tests are passing, then feel free to remove this
# - anoek 2016-03-10
#self.cred_ops_south = self.team_ops_south.credentials.create(
# username='south',
# password='Heading180',
# created_by = self.user_sue,
#)
self.cred_ops_north = self.team_ops_north.credentials.create( self.cred_ops_north = self.team_ops_north.credentials.create(
username='north', username='north',

View File

@@ -637,8 +637,8 @@ class AdHocCommandApiTest(BaseAdHocCommandTest):
# Verify that the credential and inventory are null when they have # Verify that the credential and inventory are null when they have
# been deleted, can delete an ad hoc command without inventory or # been deleted, can delete an ad hoc command without inventory or
# credential. # credential.
self.credential.mark_inactive() self.credential.delete()
self.inventory.mark_inactive() self.inventory.delete()
with self.current_user('admin'): with self.current_user('admin'):
response = self.get(url, expect=200) response = self.get(url, expect=200)
self.assertEqual(response['credential'], None) self.assertEqual(response['credential'], None)
@@ -758,7 +758,7 @@ class AdHocCommandApiTest(BaseAdHocCommandTest):
tower_settings.AD_HOC_COMMANDS = ad_hoc_commands tower_settings.AD_HOC_COMMANDS = ad_hoc_commands
# Try to relaunch after the inventory has been marked inactive. # Try to relaunch after the inventory has been marked inactive.
self.inventory.mark_inactive() self.inventory.delete()
with self.current_user('admin'): with self.current_user('admin'):
response = self.get(url, expect=200) response = self.get(url, expect=200)
self.assertEqual(response['passwords_needed_to_start'], []) self.assertEqual(response['passwords_needed_to_start'], [])

View File

@@ -1,34 +0,0 @@
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
# AWX
from awx.main.tests.base import BaseTest
from command_base import BaseCommandMixin
__all__ = ['AgeDeletedCommandFunctionalTest']
class AgeDeletedCommandFunctionalTest(BaseCommandMixin, BaseTest):
def setUp(self):
super(AgeDeletedCommandFunctionalTest, self).setUp()
self.create_test_license_file()
self.setup_instances()
self.setup_users()
self.organization = self.make_organization(self.super_django_user)
self.credential = self.make_credential()
self.credential2 = self.make_credential()
self.credential.mark_inactive(True)
self.credential2.mark_inactive(True)
self.credential_active = self.make_credential()
self.super_django_user.mark_inactive(True)
def test_default(self):
result, stdout, stderr = self.run_command('age_deleted')
self.assertEqual(stdout, 'Aged %d items\n' % 3)
def test_type(self):
result, stdout, stderr = self.run_command('age_deleted', type='Credential')
self.assertEqual(stdout, 'Aged %d items\n' % 2)
def test_id_type(self):
result, stdout, stderr = self.run_command('age_deleted', type='Credential', id=self.credential.pk)
self.assertEqual(stdout, 'Aged %d items\n' % 1)

View File

@@ -15,7 +15,6 @@ import unittest2 as unittest
# Django # Django
from django.conf import settings from django.conf import settings
from django.contrib.auth.models import User
from django.core.management import call_command from django.core.management import call_command
from django.core.management.base import CommandError from django.core.management.base import CommandError
from django.utils.timezone import now from django.utils.timezone import now
@@ -232,126 +231,6 @@ class DumpDataTest(BaseCommandMixin, BaseTest):
self.assertEqual(result, None) self.assertEqual(result, None)
json.loads(stdout) json.loads(stdout)
class CleanupDeletedTest(BaseCommandMixin, BaseTest):
'''
Test cases for cleanup_deleted management command.
'''
def setUp(self):
self.start_redis()
super(CleanupDeletedTest, self).setUp()
self.create_test_inventories()
def tearDown(self):
super(CleanupDeletedTest, self).tearDown()
self.stop_redis()
def get_model_counts(self):
def get_models(m):
if not m._meta.abstract:
yield m
for sub in m.__subclasses__():
for subm in get_models(sub):
yield subm
counts = {}
for model in get_models(PrimordialModel):
active = model.objects.filter(active=True).count()
inactive = model.objects.filter(active=False).count()
counts[model] = (active, inactive)
return counts
def test_cleanup_our_models(self):
# Test with nothing to be deleted.
counts_before = self.get_model_counts()
self.assertFalse(sum(x[1] for x in counts_before.values()))
result, stdout, stderr = self.run_command('cleanup_deleted')
self.assertEqual(result, None)
counts_after = self.get_model_counts()
self.assertEqual(counts_before, counts_after)
# "Delete" some hosts.
for host in Host.objects.all():
host.mark_inactive()
# With no parameters, "days" defaults to 90, which won't cleanup any of
# the hosts we just removed.
counts_before = self.get_model_counts()
self.assertTrue(sum(x[1] for x in counts_before.values()))
result, stdout, stderr = self.run_command('cleanup_deleted')
self.assertEqual(result, None)
counts_after = self.get_model_counts()
self.assertEqual(counts_before, counts_after)
# Even with days=1, the hosts will remain.
counts_before = self.get_model_counts()
self.assertTrue(sum(x[1] for x in counts_before.values()))
result, stdout, stderr = self.run_command('cleanup_deleted', days=1)
self.assertEqual(result, None)
counts_after = self.get_model_counts()
self.assertEqual(counts_before, counts_after)
# With days=0, the hosts will be deleted.
counts_before = self.get_model_counts()
self.assertTrue(sum(x[1] for x in counts_before.values()))
result, stdout, stderr = self.run_command('cleanup_deleted', days=0)
self.assertEqual(result, None)
counts_after = self.get_model_counts()
self.assertNotEqual(counts_before, counts_after)
self.assertFalse(sum(x[1] for x in counts_after.values()))
return # Don't test how long it takes (for now).
# Create lots of hosts already marked as deleted.
t = time.time()
dtnow = now()
for x in xrange(1000):
hostname = "_deleted_%s_host-%d" % (dtnow.isoformat(), x)
host = self.inventories[0].hosts.create(name=hostname, active=False)
create_elapsed = time.time() - t
# Time how long it takes to cleanup deleted items, should be no more
# then the time taken to create them.
counts_before = self.get_model_counts()
self.assertTrue(sum(x[1] for x in counts_before.values()))
t = time.time()
result, stdout, stderr = self.run_command('cleanup_deleted', days=0)
cleanup_elapsed = time.time() - t
self.assertEqual(result, None)
counts_after = self.get_model_counts()
self.assertNotEqual(counts_before, counts_after)
self.assertFalse(sum(x[1] for x in counts_after.values()))
self.assertTrue(cleanup_elapsed < create_elapsed,
'create took %0.3fs, cleanup took %0.3fs, expected < %0.3fs' % (create_elapsed, cleanup_elapsed, create_elapsed))
def get_user_counts(self):
active = User.objects.filter(is_active=True).count()
inactive = User.objects.filter(is_active=False).count()
return active, inactive
def test_cleanup_user_model(self):
# Test with nothing to be deleted.
counts_before = self.get_user_counts()
self.assertFalse(counts_before[1])
result, stdout, stderr = self.run_command('cleanup_deleted')
self.assertEqual(result, None)
counts_after = self.get_user_counts()
self.assertEqual(counts_before, counts_after)
# "Delete some users".
for user in User.objects.all():
user.mark_inactive()
self.assertTrue(len(user.username) <= 30,
'len(%r) == %d' % (user.username, len(user.username)))
# With days=1, no users will be deleted.
counts_before = self.get_user_counts()
self.assertTrue(counts_before[1])
result, stdout, stderr = self.run_command('cleanup_deleted', days=1)
self.assertEqual(result, None)
counts_after = self.get_user_counts()
self.assertEqual(counts_before, counts_after)
# With days=0, inactive users will be deleted.
counts_before = self.get_user_counts()
self.assertTrue(counts_before[1])
result, stdout, stderr = self.run_command('cleanup_deleted', days=0)
self.assertEqual(result, None)
counts_after = self.get_user_counts()
self.assertNotEqual(counts_before, counts_after)
self.assertFalse(counts_after[1])
@override_settings(CELERY_ALWAYS_EAGER=True, @override_settings(CELERY_ALWAYS_EAGER=True,
CELERY_EAGER_PROPAGATES_EXCEPTIONS=True, CELERY_EAGER_PROPAGATES_EXCEPTIONS=True,
ANSIBLE_TRANSPORT='local') ANSIBLE_TRANSPORT='local')
@@ -641,12 +520,12 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
self.assertEqual(inventory_source.inventory_updates.count(), 1) self.assertEqual(inventory_source.inventory_updates.count(), 1)
inventory_update = inventory_source.inventory_updates.all()[0] inventory_update = inventory_source.inventory_updates.all()[0]
self.assertEqual(inventory_update.status, 'successful') self.assertEqual(inventory_update.status, 'successful')
for host in inventory.hosts.filter(active=True): for host in inventory.hosts:
if host.pk in (except_host_pks or []): if host.pk in (except_host_pks or []):
continue continue
source_pks = host.inventory_sources.values_list('pk', flat=True) source_pks = host.inventory_sources.values_list('pk', flat=True)
self.assertTrue(inventory_source.pk in source_pks) self.assertTrue(inventory_source.pk in source_pks)
for group in inventory.groups.filter(active=True): for group in inventory.groups:
if group.pk in (except_group_pks or []): if group.pk in (except_group_pks or []):
continue continue
source_pks = group.inventory_sources.values_list('pk', flat=True) source_pks = group.inventory_sources.values_list('pk', flat=True)
@@ -814,7 +693,7 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
'lbservers', 'others']) 'lbservers', 'others'])
if overwrite: if overwrite:
expected_group_names.remove('lbservers') expected_group_names.remove('lbservers')
group_names = set(new_inv.groups.filter(active=True).values_list('name', flat=True)) group_names = set(new_inv.groups.values_list('name', flat=True))
self.assertEqual(expected_group_names, group_names) self.assertEqual(expected_group_names, group_names)
expected_host_names = set(['web1.example.com', 'web2.example.com', expected_host_names = set(['web1.example.com', 'web2.example.com',
'web3.example.com', 'db1.example.com', 'web3.example.com', 'db1.example.com',
@@ -824,13 +703,13 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
'fe80::1610:9fff:fedd:b654', '::1']) 'fe80::1610:9fff:fedd:b654', '::1'])
if overwrite: if overwrite:
expected_host_names.remove('lb.example.com') expected_host_names.remove('lb.example.com')
host_names = set(new_inv.hosts.filter(active=True).values_list('name', flat=True)) host_names = set(new_inv.hosts.values_list('name', flat=True))
self.assertEqual(expected_host_names, host_names) self.assertEqual(expected_host_names, host_names)
expected_inv_vars = {'vara': 'A', 'varc': 'C'} expected_inv_vars = {'vara': 'A', 'varc': 'C'}
if overwrite_vars: if overwrite_vars:
expected_inv_vars.pop('varc') expected_inv_vars.pop('varc')
self.assertEqual(new_inv.variables_dict, expected_inv_vars) self.assertEqual(new_inv.variables_dict, expected_inv_vars)
for host in new_inv.hosts.filter(active=True): for host in new_inv.hosts:
if host.name == 'web1.example.com': if host.name == 'web1.example.com':
self.assertEqual(host.variables_dict, self.assertEqual(host.variables_dict,
{'ansible_ssh_host': 'w1.example.net'}) {'ansible_ssh_host': 'w1.example.net'})
@@ -842,35 +721,35 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
self.assertEqual(host.variables_dict, {'lbvar': 'ni!'}) self.assertEqual(host.variables_dict, {'lbvar': 'ni!'})
else: else:
self.assertEqual(host.variables_dict, {}) self.assertEqual(host.variables_dict, {})
for group in new_inv.groups.filter(active=True): for group in new_inv.groups:
if group.name == 'servers': if group.name == 'servers':
expected_vars = {'varb': 'B', 'vard': 'D'} expected_vars = {'varb': 'B', 'vard': 'D'}
if overwrite_vars: if overwrite_vars:
expected_vars.pop('vard') expected_vars.pop('vard')
self.assertEqual(group.variables_dict, expected_vars) self.assertEqual(group.variables_dict, expected_vars)
children = set(group.children.filter(active=True).values_list('name', flat=True)) children = set(group.children.values_list('name', flat=True))
expected_children = set(['dbservers', 'webservers', 'lbservers']) expected_children = set(['dbservers', 'webservers', 'lbservers'])
if overwrite: if overwrite:
expected_children.remove('lbservers') expected_children.remove('lbservers')
self.assertEqual(children, expected_children) self.assertEqual(children, expected_children)
self.assertEqual(group.hosts.filter(active=True).count(), 0) self.assertEqual(group.hosts.count(), 0)
elif group.name == 'dbservers': elif group.name == 'dbservers':
self.assertEqual(group.variables_dict, {'dbvar': 'ugh'}) self.assertEqual(group.variables_dict, {'dbvar': 'ugh'})
self.assertEqual(group.children.filter(active=True).count(), 0) self.assertEqual(group.children.count(), 0)
hosts = set(group.hosts.filter(active=True).values_list('name', flat=True)) hosts = set(group.hosts.values_list('name', flat=True))
host_names = set(['db1.example.com','db2.example.com']) host_names = set(['db1.example.com','db2.example.com'])
self.assertEqual(hosts, host_names) self.assertEqual(hosts, host_names)
elif group.name == 'webservers': elif group.name == 'webservers':
self.assertEqual(group.variables_dict, {'webvar': 'blah'}) self.assertEqual(group.variables_dict, {'webvar': 'blah'})
self.assertEqual(group.children.filter(active=True).count(), 0) self.assertEqual(group.children.count(), 0)
hosts = set(group.hosts.filter(active=True).values_list('name', flat=True)) hosts = set(group.hosts.values_list('name', flat=True))
host_names = set(['web1.example.com','web2.example.com', host_names = set(['web1.example.com','web2.example.com',
'web3.example.com']) 'web3.example.com'])
self.assertEqual(hosts, host_names) self.assertEqual(hosts, host_names)
elif group.name == 'lbservers': elif group.name == 'lbservers':
self.assertEqual(group.variables_dict, {}) self.assertEqual(group.variables_dict, {})
self.assertEqual(group.children.filter(active=True).count(), 0) self.assertEqual(group.children.count(), 0)
hosts = set(group.hosts.filter(active=True).values_list('name', flat=True)) hosts = set(group.hosts.values_list('name', flat=True))
host_names = set(['lb.example.com']) host_names = set(['lb.example.com'])
self.assertEqual(hosts, host_names) self.assertEqual(hosts, host_names)
if overwrite: if overwrite:
@@ -920,7 +799,7 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
# Check hosts in dotcom group. # Check hosts in dotcom group.
group = new_inv.groups.get(name='dotcom') group = new_inv.groups.get(name='dotcom')
self.assertEqual(group.hosts.count(), 65) self.assertEqual(group.hosts.count(), 65)
for host in group.hosts.filter(active=True, name__startswith='web'): for host in group.hosts.filter( name__startswith='web'):
self.assertEqual(host.variables_dict.get('ansible_ssh_user', ''), 'example') self.assertEqual(host.variables_dict.get('ansible_ssh_user', ''), 'example')
# Check hosts in dotnet group. # Check hosts in dotnet group.
group = new_inv.groups.get(name='dotnet') group = new_inv.groups.get(name='dotnet')
@@ -928,7 +807,7 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
# Check hosts in dotorg group. # Check hosts in dotorg group.
group = new_inv.groups.get(name='dotorg') group = new_inv.groups.get(name='dotorg')
self.assertEqual(group.hosts.count(), 61) self.assertEqual(group.hosts.count(), 61)
for host in group.hosts.filter(active=True): for host in group.hosts:
if host.name.startswith('mx.'): if host.name.startswith('mx.'):
continue continue
self.assertEqual(host.variables_dict.get('ansible_ssh_user', ''), 'example') self.assertEqual(host.variables_dict.get('ansible_ssh_user', ''), 'example')
@@ -936,7 +815,7 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
# Check hosts in dotus group. # Check hosts in dotus group.
group = new_inv.groups.get(name='dotus') group = new_inv.groups.get(name='dotus')
self.assertEqual(group.hosts.count(), 10) self.assertEqual(group.hosts.count(), 10)
for host in group.hosts.filter(active=True): for host in group.hosts:
if int(host.name[2:4]) % 2 == 0: if int(host.name[2:4]) % 2 == 0:
self.assertEqual(host.variables_dict.get('even_odd', ''), 'even') self.assertEqual(host.variables_dict.get('even_odd', ''), 'even')
else: else:
@@ -1090,7 +969,7 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
else: else:
return 0 return 0
def _check_largeinv_import(self, new_inv, nhosts, nhosts_inactive=0): def _check_largeinv_import(self, new_inv, nhosts):
self._start_time = time.time() self._start_time = time.time()
inv_file = os.path.join(os.path.dirname(__file__), '..', '..', 'data', 'largeinv.py') inv_file = os.path.join(os.path.dirname(__file__), '..', '..', 'data', 'largeinv.py')
ngroups = self._get_ngroups_for_nhosts(nhosts) ngroups = self._get_ngroups_for_nhosts(nhosts)
@@ -1103,9 +982,8 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
# Check that inventory is populated as expected within a reasonable # Check that inventory is populated as expected within a reasonable
# amount of time. Computed fields should also be updated. # amount of time. Computed fields should also be updated.
new_inv = Inventory.objects.get(pk=new_inv.pk) new_inv = Inventory.objects.get(pk=new_inv.pk)
self.assertEqual(new_inv.hosts.filter(active=True).count(), nhosts) self.assertEqual(new_inv.hosts.count(), nhosts)
self.assertEqual(new_inv.groups.filter(active=True).count(), ngroups) self.assertEqual(new_inv.groups.count(), ngroups)
self.assertEqual(new_inv.hosts.filter(active=False).count(), nhosts_inactive)
self.assertEqual(new_inv.total_hosts, nhosts) self.assertEqual(new_inv.total_hosts, nhosts)
self.assertEqual(new_inv.total_groups, ngroups) self.assertEqual(new_inv.total_groups, ngroups)
self.assertElapsedLessThan(120) self.assertElapsedLessThan(120)
@@ -1119,10 +997,10 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
self.assertEqual(new_inv.groups.count(), 0) self.assertEqual(new_inv.groups.count(), 0)
nhosts = 2000 nhosts = 2000
# Test initial import into empty inventory. # Test initial import into empty inventory.
self._check_largeinv_import(new_inv, nhosts, 0) self._check_largeinv_import(new_inv, nhosts)
# Test re-importing and overwriting. # Test re-importing and overwriting.
self._check_largeinv_import(new_inv, nhosts, 0) self._check_largeinv_import(new_inv, nhosts)
# Test re-importing with only half as many hosts. # Test re-importing with only half as many hosts.
self._check_largeinv_import(new_inv, nhosts / 2, nhosts / 2) self._check_largeinv_import(new_inv, nhosts / 2)
# Test re-importing that clears all hosts. # Test re-importing that clears all hosts.
self._check_largeinv_import(new_inv, 0, nhosts) self._check_largeinv_import(new_inv, 0)

View File

@@ -69,7 +69,7 @@ class InventoryTest(BaseTest):
def test_get_inventory_list(self): def test_get_inventory_list(self):
url = reverse('api:inventory_list') url = reverse('api:inventory_list')
qs = Inventory.objects.filter(active=True).distinct() qs = Inventory.objects.distinct()
# Check list view with invalid authentication. # Check list view with invalid authentication.
self.check_invalid_auth(url) self.check_invalid_auth(url)
@@ -226,6 +226,8 @@ class InventoryTest(BaseTest):
self.inventory_a.groups.create(name='group-a') self.inventory_a.groups.create(name='group-a')
self.inventory_b.hosts.create(name='host-b') self.inventory_b.hosts.create(name='host-b')
self.inventory_b.groups.create(name='group-b') self.inventory_b.groups.create(name='group-b')
a_pk = self.inventory_a.pk
b_pk = self.inventory_b.pk
# Check put to detail view with invalid authentication. # Check put to detail view with invalid authentication.
self.check_invalid_auth(url_a, methods=('delete',)) self.check_invalid_auth(url_a, methods=('delete',))
@@ -248,24 +250,16 @@ class InventoryTest(BaseTest):
self.delete(url_a, expect=204) self.delete(url_a, expect=204)
self.delete(url_b, expect=403) self.delete(url_b, expect=403)
# Verify that the inventory is marked inactive, along with all its # Verify that the inventory was deleted
# hosts and groups. assert Inventory.objects.filter(pk=a_pk).count() == 0
self.inventory_a = Inventory.objects.get(pk=self.inventory_a.pk)
self.assertFalse(self.inventory_a.active)
self.assertFalse(self.inventory_a.hosts.filter(active=True).count())
self.assertFalse(self.inventory_a.groups.filter(active=True).count())
# a super user can delete inventory records # a super user can delete inventory records
with self.current_user(self.super_django_user): with self.current_user(self.super_django_user):
self.delete(url_a, expect=404) self.delete(url_a, expect=404)
self.delete(url_b, expect=204) self.delete(url_b, expect=204)
# Verify that the inventory is marked inactive, along with all its # Verify that the inventory was deleted
# hosts and groups. assert Inventory.objects.filter(pk=b_pk).count() == 0
self.inventory_b = Inventory.objects.get(pk=self.inventory_b.pk)
self.assertFalse(self.inventory_b.active)
self.assertFalse(self.inventory_b.hosts.filter(active=True).count())
self.assertFalse(self.inventory_b.groups.filter(active=True).count())
def test_inventory_access_deleted_permissions(self): def test_inventory_access_deleted_permissions(self):
temp_org = self.make_organizations(self.super_django_user, 1)[0] temp_org = self.make_organizations(self.super_django_user, 1)[0]
@@ -423,7 +417,7 @@ class InventoryTest(BaseTest):
del_children_url = reverse('api:group_children_list', args=(del_group.pk,)) del_children_url = reverse('api:group_children_list', args=(del_group.pk,))
nondel_url = reverse('api:group_detail', nondel_url = reverse('api:group_detail',
args=(Group.objects.get(name='nondel').pk,)) args=(Group.objects.get(name='nondel').pk,))
del_group.mark_inactive() del_group.delete()
nondel_detail = self.get(nondel_url, expect=200, auth=self.get_normal_credentials()) nondel_detail = self.get(nondel_url, expect=200, auth=self.get_normal_credentials())
self.post(del_children_url, data=nondel_detail, expect=403, auth=self.get_normal_credentials()) self.post(del_children_url, data=nondel_detail, expect=403, auth=self.get_normal_credentials())
@@ -747,13 +741,11 @@ class InventoryTest(BaseTest):
# removed group should be automatically marked inactive once it no longer has any parents. # removed group should be automatically marked inactive once it no longer has any parents.
removed_group = Group.objects.get(pk=result['id']) removed_group = Group.objects.get(pk=result['id'])
self.assertTrue(removed_group.parents.count()) self.assertTrue(removed_group.parents.count())
self.assertTrue(removed_group.active)
for parent in removed_group.parents.all(): for parent in removed_group.parents.all():
parent_children_url = reverse('api:group_children_list', args=(parent.pk,)) parent_children_url = reverse('api:group_children_list', args=(parent.pk,))
data = {'id': removed_group.pk, 'disassociate': 1} data = {'id': removed_group.pk, 'disassociate': 1}
self.post(parent_children_url, data, expect=204, auth=self.get_super_credentials()) self.post(parent_children_url, data, expect=204, auth=self.get_super_credentials())
removed_group = Group.objects.get(pk=result['id']) removed_group = Group.objects.get(pk=result['id'])
#self.assertFalse(removed_group.active) # FIXME: Disabled for now because automatically deleting group with no parents is also disabled.
# Removing a group from a hierarchy should migrate its children to the # Removing a group from a hierarchy should migrate its children to the
# parent. The group itself will be deleted (marked inactive), and all # parent. The group itself will be deleted (marked inactive), and all
@@ -766,7 +758,6 @@ class InventoryTest(BaseTest):
with self.current_user(self.super_django_user): with self.current_user(self.super_django_user):
self.post(url, data, expect=204) self.post(url, data, expect=204)
gx3 = Group.objects.get(pk=gx3.pk) gx3 = Group.objects.get(pk=gx3.pk)
#self.assertFalse(gx3.active) # FIXME: Disabled for now....
self.assertFalse(gx3 in gx2.children.all()) self.assertFalse(gx3 in gx2.children.all())
#self.assertTrue(gx4 in gx2.children.all()) #self.assertTrue(gx4 in gx2.children.all())
@@ -944,13 +935,10 @@ class InventoryTest(BaseTest):
# Mark group C inactive. Its child groups and hosts should now also be # Mark group C inactive. Its child groups and hosts should now also be
# attached to group A. Group D hosts should be unchanged. Group C # attached to group A. Group D hosts should be unchanged. Group C
# should also no longer have any group or host relationships. # should also no longer have any group or host relationships.
g_c.mark_inactive() g_c.delete()
self.assertTrue(g_d in g_a.children.all()) self.assertTrue(g_d in g_a.children.all())
self.assertTrue(h_c in g_a.hosts.all()) self.assertTrue(h_c in g_a.hosts.all())
self.assertFalse(h_d in g_a.hosts.all()) self.assertFalse(h_d in g_a.hosts.all())
self.assertFalse(g_c.parents.all())
self.assertFalse(g_c.children.all())
self.assertFalse(g_c.hosts.all())
def test_safe_delete_recursion(self): def test_safe_delete_recursion(self):
# First hierarchy # First hierarchy
@@ -989,11 +977,9 @@ class InventoryTest(BaseTest):
self.assertTrue(other_sub_group in sub_group.children.all()) self.assertTrue(other_sub_group in sub_group.children.all())
# Now recursively remove its parent and the reference from subgroup should remain # Now recursively remove its parent and the reference from subgroup should remain
other_top_group.mark_inactive_recursive() other_top_group.delete_recursive()
other_top_group = Group.objects.get(pk=other_top_group.pk)
self.assertTrue(s2 in sub_group.all_hosts.all()) self.assertTrue(s2 in sub_group.all_hosts.all())
self.assertTrue(other_sub_group in sub_group.children.all()) self.assertTrue(other_sub_group in sub_group.children.all())
self.assertFalse(other_top_group.active)
def test_group_parents_and_children(self): def test_group_parents_and_children(self):
# Test for various levels of group parent/child relations, with hosts, # Test for various levels of group parent/child relations, with hosts,
@@ -1173,7 +1159,7 @@ class InventoryTest(BaseTest):
# Delete recently added hosts and verify the count drops. # Delete recently added hosts and verify the count drops.
hostnames4 = list('defg') hostnames4 = list('defg')
for host in Host.objects.filter(name__in=hostnames4): for host in Host.objects.filter(name__in=hostnames4):
host.mark_inactive() host.delete()
with self.current_user(self.super_django_user): with self.current_user(self.super_django_user):
response = self.get(url) response = self.get(url)
for n, d in enumerate(reversed(response['hosts'])): for n, d in enumerate(reversed(response['hosts'])):
@@ -1270,7 +1256,7 @@ class InventoryUpdatesTest(BaseTransactionTest):
url = reverse('api:inventory_source_hosts_list', args=(inventory_source.pk,)) url = reverse('api:inventory_source_hosts_list', args=(inventory_source.pk,))
response = self.get(url, expect=200) response = self.get(url, expect=200)
self.assertNotEqual(response['count'], 0) self.assertNotEqual(response['count'], 0)
for host in inventory.hosts.filter(active=True): for host in inventory.hosts:
source_pks = host.inventory_sources.values_list('pk', flat=True) source_pks = host.inventory_sources.values_list('pk', flat=True)
self.assertTrue(inventory_source.pk in source_pks) self.assertTrue(inventory_source.pk in source_pks)
self.assertTrue(host.has_inventory_sources) self.assertTrue(host.has_inventory_sources)
@@ -1284,12 +1270,12 @@ class InventoryUpdatesTest(BaseTransactionTest):
url = reverse('api:host_inventory_sources_list', args=(host.pk,)) url = reverse('api:host_inventory_sources_list', args=(host.pk,))
response = self.get(url, expect=200) response = self.get(url, expect=200)
self.assertNotEqual(response['count'], 0) self.assertNotEqual(response['count'], 0)
for group in inventory.groups.filter(active=True): for group in inventory.groups:
source_pks = group.inventory_sources.values_list('pk', flat=True) source_pks = group.inventory_sources.values_list('pk', flat=True)
self.assertTrue(inventory_source.pk in source_pks) self.assertTrue(inventory_source.pk in source_pks)
self.assertTrue(group.has_inventory_sources) self.assertTrue(group.has_inventory_sources)
self.assertTrue(group.children.filter(active=True).exists() or self.assertTrue(group.children.exists() or
group.hosts.filter(active=True).exists()) group.hosts.exists())
# Make sure EC2 instance ID groups and RDS groups are excluded. # Make sure EC2 instance ID groups and RDS groups are excluded.
if inventory_source.source == 'ec2' and not instance_id_group_ok: if inventory_source.source == 'ec2' and not instance_id_group_ok:
self.assertFalse(re.match(r'^i-[0-9a-f]{8}$', group.name, re.I), self.assertFalse(re.match(r'^i-[0-9a-f]{8}$', group.name, re.I),
@@ -1307,7 +1293,7 @@ class InventoryUpdatesTest(BaseTransactionTest):
self.assertNotEqual(response['count'], 0) self.assertNotEqual(response['count'], 0)
# Try to set a source on a child group that was imported. Should not # Try to set a source on a child group that was imported. Should not
# be allowed. # be allowed.
for group in inventory_source.group.children.filter(active=True): for group in inventory_source.group.children:
inv_src_2 = group.inventory_source inv_src_2 = group.inventory_source
inv_src_url2 = reverse('api:inventory_source_detail', args=(inv_src_2.pk,)) inv_src_url2 = reverse('api:inventory_source_detail', args=(inv_src_2.pk,))
with self.current_user(self.super_django_user): with self.current_user(self.super_django_user):
@@ -1663,7 +1649,7 @@ class InventoryUpdatesTest(BaseTransactionTest):
inventory_source.overwrite = True inventory_source.overwrite = True
inventory_source.save() inventory_source.save()
self.check_inventory_source(inventory_source, initial=False) self.check_inventory_source(inventory_source, initial=False)
for host in self.inventory.hosts.filter(active=True): for host in self.inventory.hosts:
self.assertEqual(host.variables_dict['ec2_instance_type'], instance_type) self.assertEqual(host.variables_dict['ec2_instance_type'], instance_type)
# Try invalid instance filters that should be ignored: # Try invalid instance filters that should be ignored:
@@ -1797,12 +1783,12 @@ class InventoryUpdatesTest(BaseTransactionTest):
inventory_source.save() inventory_source.save()
self.check_inventory_source(inventory_source, initial=False) self.check_inventory_source(inventory_source, initial=False)
# Verify that only the desired groups are returned. # Verify that only the desired groups are returned.
child_names = self.group.children.filter(active=True).values_list('name', flat=True) child_names = self.group.children.values_list('name', flat=True)
self.assertTrue('ec2' in child_names) self.assertTrue('ec2' in child_names)
self.assertTrue('regions' in child_names) self.assertTrue('regions' in child_names)
self.assertTrue(self.group.children.get(name='regions').children.filter(active=True).count()) self.assertTrue(self.group.children.get(name='regions').children.count())
self.assertTrue('types' in child_names) self.assertTrue('types' in child_names)
self.assertTrue(self.group.children.get(name='types').children.filter(active=True).count()) self.assertTrue(self.group.children.get(name='types').children.count())
self.assertFalse('keys' in child_names) self.assertFalse('keys' in child_names)
self.assertFalse('security_groups' in child_names) self.assertFalse('security_groups' in child_names)
self.assertFalse('tags' in child_names) self.assertFalse('tags' in child_names)
@@ -1819,27 +1805,27 @@ class InventoryUpdatesTest(BaseTransactionTest):
self.check_inventory_source(inventory_source, initial=False, instance_id_group_ok=True) self.check_inventory_source(inventory_source, initial=False, instance_id_group_ok=True)
# Verify that only the desired groups are returned. # Verify that only the desired groups are returned.
# Skip vpcs as selected inventory may or may not have any. # Skip vpcs as selected inventory may or may not have any.
child_names = self.group.children.filter(active=True).values_list('name', flat=True) child_names = self.group.children.values_list('name', flat=True)
self.assertTrue('ec2' in child_names) self.assertTrue('ec2' in child_names)
self.assertFalse('tag_none' in child_names) self.assertFalse('tag_none' in child_names)
self.assertTrue('regions' in child_names) self.assertTrue('regions' in child_names)
self.assertTrue(self.group.children.get(name='regions').children.filter(active=True).count()) self.assertTrue(self.group.children.get(name='regions').children.count())
self.assertTrue('types' in child_names) self.assertTrue('types' in child_names)
self.assertTrue(self.group.children.get(name='types').children.filter(active=True).count()) self.assertTrue(self.group.children.get(name='types').children.count())
self.assertTrue('keys' in child_names) self.assertTrue('keys' in child_names)
self.assertTrue(self.group.children.get(name='keys').children.filter(active=True).count()) self.assertTrue(self.group.children.get(name='keys').children.count())
self.assertTrue('security_groups' in child_names) self.assertTrue('security_groups' in child_names)
self.assertTrue(self.group.children.get(name='security_groups').children.filter(active=True).count()) self.assertTrue(self.group.children.get(name='security_groups').children.count())
self.assertTrue('tags' in child_names) self.assertTrue('tags' in child_names)
self.assertTrue(self.group.children.get(name='tags').children.filter(active=True).count()) self.assertTrue(self.group.children.get(name='tags').children.count())
# Only check for tag_none as a child of tags if there is a tag_none group; # Only check for tag_none as a child of tags if there is a tag_none group;
# the test inventory *may* have tags set for all hosts. # the test inventory *may* have tags set for all hosts.
if self.inventory.groups.filter(name='tag_none').exists(): if self.inventory.groups.filter(name='tag_none').exists():
self.assertTrue('tag_none' in self.group.children.get(name='tags').children.values_list('name', flat=True)) self.assertTrue('tag_none' in self.group.children.get(name='tags').children.values_list('name', flat=True))
self.assertTrue('images' in child_names) self.assertTrue('images' in child_names)
self.assertTrue(self.group.children.get(name='images').children.filter(active=True).count()) self.assertTrue(self.group.children.get(name='images').children.count())
self.assertTrue('instances' in child_names) self.assertTrue('instances' in child_names)
self.assertTrue(self.group.children.get(name='instances').children.filter(active=True).count()) self.assertTrue(self.group.children.get(name='instances').children.count())
# Sync again with overwrite set to False after renaming a group that # Sync again with overwrite set to False after renaming a group that
# was created by the sync. With overwrite false, the renamed group and # was created by the sync. With overwrite false, the renamed group and
# the original group (created again by the sync) will both exist. # the original group (created again by the sync) will both exist.
@@ -1853,7 +1839,7 @@ class InventoryUpdatesTest(BaseTransactionTest):
inventory_source.overwrite = False inventory_source.overwrite = False
inventory_source.save() inventory_source.save()
self.check_inventory_source(inventory_source, initial=False, instance_id_group_ok=True) self.check_inventory_source(inventory_source, initial=False, instance_id_group_ok=True)
child_names = self.group.children.filter(active=True).values_list('name', flat=True) child_names = self.group.children.values_list('name', flat=True)
self.assertTrue(region_group_original_name in self.group.children.get(name='regions').children.values_list('name', flat=True)) self.assertTrue(region_group_original_name in self.group.children.get(name='regions').children.values_list('name', flat=True))
self.assertTrue(region_group.name in self.group.children.get(name='regions').children.values_list('name', flat=True)) self.assertTrue(region_group.name in self.group.children.get(name='regions').children.values_list('name', flat=True))
# Replacement text should not be left in inventory source name. # Replacement text should not be left in inventory source name.

View File

@@ -96,7 +96,7 @@ class JobTemplateLaunchTest(BaseJobTestMixin, django.test.TransactionTestCase):
def test_credential_explicit(self): def test_credential_explicit(self):
# Explicit, credential # Explicit, credential
with self.current_user(self.user_sue): with self.current_user(self.user_sue):
self.cred_sue.mark_inactive() self.cred_sue.delete()
response = self.post(self.launch_url, {'credential': self.cred_doug.pk}, expect=202) response = self.post(self.launch_url, {'credential': self.cred_doug.pk}, expect=202)
j = Job.objects.get(pk=response['job']) j = Job.objects.get(pk=response['job'])
self.assertEqual(j.status, 'new') self.assertEqual(j.status, 'new')
@@ -105,7 +105,7 @@ class JobTemplateLaunchTest(BaseJobTestMixin, django.test.TransactionTestCase):
def test_credential_explicit_via_credential_id(self): def test_credential_explicit_via_credential_id(self):
# Explicit, credential # Explicit, credential
with self.current_user(self.user_sue): with self.current_user(self.user_sue):
self.cred_sue.mark_inactive() self.cred_sue.delete()
response = self.post(self.launch_url, {'credential_id': self.cred_doug.pk}, expect=202) response = self.post(self.launch_url, {'credential_id': self.cred_doug.pk}, expect=202)
j = Job.objects.get(pk=response['job']) j = Job.objects.get(pk=response['job'])
self.assertEqual(j.status, 'new') self.assertEqual(j.status, 'new')
@@ -131,15 +131,16 @@ class JobTemplateLaunchTest(BaseJobTestMixin, django.test.TransactionTestCase):
# Can't launch a job template without a credential defined (or if we # Can't launch a job template without a credential defined (or if we
# pass an invalid/inactive credential value). # pass an invalid/inactive credential value).
with self.current_user(self.user_sue): with self.current_user(self.user_sue):
self.cred_sue.mark_inactive() self.cred_sue.delete()
self.post(self.launch_url, {}, expect=400) self.post(self.launch_url, {}, expect=400)
self.post(self.launch_url, {'credential': 0}, expect=400) self.post(self.launch_url, {'credential': 0}, expect=400)
self.post(self.launch_url, {'credential_id': 0}, expect=400) self.post(self.launch_url, {'credential_id': 0}, expect=400)
self.post(self.launch_url, {'credential': 'one'}, expect=400) self.post(self.launch_url, {'credential': 'one'}, expect=400)
self.post(self.launch_url, {'credential_id': 'one'}, expect=400) self.post(self.launch_url, {'credential_id': 'one'}, expect=400)
self.cred_doug.mark_inactive() cred_doug_pk = self.cred_doug.pk
self.post(self.launch_url, {'credential': self.cred_doug.pk}, expect=400) self.cred_doug.delete()
self.post(self.launch_url, {'credential_id': self.cred_doug.pk}, expect=400) self.post(self.launch_url, {'credential': cred_doug_pk}, expect=400)
self.post(self.launch_url, {'credential_id': cred_doug_pk}, expect=400)
def test_explicit_unowned_cred(self): def test_explicit_unowned_cred(self):
# Explicitly specify a credential that we don't have access to # Explicitly specify a credential that we don't have access to
@@ -174,7 +175,7 @@ class JobTemplateLaunchTest(BaseJobTestMixin, django.test.TransactionTestCase):
def test_deleted_credential_fail(self): def test_deleted_credential_fail(self):
# Job Templates with deleted credentials cannot be launched. # Job Templates with deleted credentials cannot be launched.
self.cred_sue.mark_inactive() self.cred_sue.delete()
with self.current_user(self.user_sue): with self.current_user(self.user_sue):
self.post(self.launch_url, {}, expect=400) self.post(self.launch_url, {}, expect=400)
@@ -202,7 +203,7 @@ class JobTemplateLaunchPasswordsTest(BaseJobTestMixin, django.test.TransactionTe
passwords_required = ['ssh_password', 'become_password', 'ssh_key_unlock'] passwords_required = ['ssh_password', 'become_password', 'ssh_key_unlock']
# Job Templates with deleted credentials cannot be launched. # Job Templates with deleted credentials cannot be launched.
with self.current_user(self.user_sue): with self.current_user(self.user_sue):
self.cred_sue_ask.mark_inactive() self.cred_sue_ask.delete()
response = self.post(self.launch_url, {'credential_id': self.cred_sue_ask_many.pk}, expect=400) response = self.post(self.launch_url, {'credential_id': self.cred_sue_ask_many.pk}, expect=400)
for p in passwords_required: for p in passwords_required:
self.assertIn(p, response['passwords_needed_to_start']) self.assertIn(p, response['passwords_needed_to_start'])

View File

@@ -436,10 +436,8 @@ class OrganizationsTest(BaseTest):
self.delete(urls[0], expect=204, auth=self.get_super_credentials()) self.delete(urls[0], expect=204, auth=self.get_super_credentials())
# check that when we have deleted an object it comes back 404 via GET # check that when we have deleted an object it comes back 404 via GET
# but that it's still in the database as inactive
self.get(urls[1], expect=404, auth=self.get_normal_credentials()) self.get(urls[1], expect=404, auth=self.get_normal_credentials())
org1 = Organization.objects.get(pk=urldata1['id']) assert Organization.objects.filter(pk=urldata1['id']).count() == 0
self.assertEquals(org1.active, False)
# also check that DELETE on the collection doesn't work # also check that DELETE on the collection doesn't work
self.delete(self.collection(), expect=405, auth=self.get_super_credentials()) self.delete(self.collection(), expect=405, auth=self.get_super_credentials())

View File

@@ -162,14 +162,14 @@ class ProjectsTest(BaseTransactionTest):
set(Project.get_local_path_choices())) set(Project.get_local_path_choices()))
# return local paths are only the ones not used by any active project. # return local paths are only the ones not used by any active project.
qs = Project.objects.filter(active=True) qs = Project.objects
used_paths = qs.values_list('local_path', flat=True) used_paths = qs.values_list('local_path', flat=True)
self.assertFalse(set(response['project_local_paths']) & set(used_paths)) self.assertFalse(set(response['project_local_paths']) & set(used_paths))
for project in self.projects: for project in self.projects:
local_path = project.local_path local_path = project.local_path
response = self.get(url, expect=200, auth=self.get_super_credentials()) response = self.get(url, expect=200, auth=self.get_super_credentials())
self.assertTrue(local_path not in response['project_local_paths']) self.assertTrue(local_path not in response['project_local_paths'])
project.mark_inactive() project.delete()
response = self.get(url, expect=200, auth=self.get_super_credentials()) response = self.get(url, expect=200, auth=self.get_super_credentials())
self.assertTrue(local_path in response['project_local_paths']) self.assertTrue(local_path in response['project_local_paths'])
@@ -402,7 +402,7 @@ class ProjectsTest(BaseTransactionTest):
# ===================================================================== # =====================================================================
# TEAM PROJECTS # TEAM PROJECTS
team = Team.objects.filter(active=True, organization__pk=self.organizations[1].pk)[0] team = Team.objects.filter( organization__pk=self.organizations[1].pk)[0]
team_projects = reverse('api:team_projects_list', args=(team.pk,)) team_projects = reverse('api:team_projects_list', args=(team.pk,))
p1 = self.projects[0] p1 = self.projects[0]
@@ -419,7 +419,7 @@ class ProjectsTest(BaseTransactionTest):
# ===================================================================== # =====================================================================
# TEAMS USER MEMBERSHIP # TEAMS USER MEMBERSHIP
team = Team.objects.filter(active=True, organization__pk=self.organizations[1].pk)[0] team = Team.objects.filter( organization__pk=self.organizations[1].pk)[0]
team_users = reverse('api:team_users_list', args=(team.pk,)) team_users = reverse('api:team_users_list', args=(team.pk,))
for x in team.deprecated_users.all(): for x in team.deprecated_users.all():
team.deprecated_users.remove(x) team.deprecated_users.remove(x)
@@ -1262,7 +1262,7 @@ class ProjectUpdatesTest(BaseTransactionTest):
else: else:
self.check_project_update(project, should_fail=should_still_fail) self.check_project_update(project, should_fail=should_still_fail)
# Test that we can delete project updates. # Test that we can delete project updates.
for pu in project.project_updates.filter(active=True): for pu in project.project_updates:
pu_url = reverse('api:project_update_detail', args=(pu.pk,)) pu_url = reverse('api:project_update_detail', args=(pu.pk,))
with self.current_user(self.super_django_user): with self.current_user(self.super_django_user):
self.delete(pu_url, expect=204) self.delete(pu_url, expect=204)

View File

@@ -88,7 +88,8 @@ class InventoryScriptTest(BaseScriptTest):
inventory=inventory, inventory=inventory,
variables=variables) variables=variables)
if x in (3, 7): if x in (3, 7):
host.mark_inactive() host.delete()
continue
hosts.append(host) hosts.append(host)
# add localhost just to make sure it's thrown into all (Ansible github bug) # add localhost just to make sure it's thrown into all (Ansible github bug)
@@ -106,7 +107,8 @@ class InventoryScriptTest(BaseScriptTest):
inventory=inventory, inventory=inventory,
variables=variables) variables=variables)
if x == 2: if x == 2:
group.mark_inactive() group.delete()
continue
groups.append(group) groups.append(group)
group.hosts.add(hosts[x]) group.hosts.add(hosts[x])
group.hosts.add(hosts[x + 5]) group.hosts.add(hosts[x + 5])
@@ -144,12 +146,11 @@ class InventoryScriptTest(BaseScriptTest):
def test_list_with_inventory_id_as_argument(self): def test_list_with_inventory_id_as_argument(self):
inventory = self.inventories[0] inventory = self.inventories[0]
self.assertTrue(inventory.active)
rc, stdout, stderr = self.run_inventory_script(list=True, rc, stdout, stderr = self.run_inventory_script(list=True,
inventory=inventory.pk) inventory=inventory.pk)
self.assertEqual(rc, 0, stderr) self.assertEqual(rc, 0, stderr)
data = json.loads(stdout) data = json.loads(stdout)
groups = inventory.groups.filter(active=True) groups = inventory.groups
groupnames = [ x for x in groups.values_list('name', flat=True)] groupnames = [ x for x in groups.values_list('name', flat=True)]
# it's ok for all to be here because due to an Ansible inventory workaround # it's ok for all to be here because due to an Ansible inventory workaround
@@ -165,16 +166,13 @@ class InventoryScriptTest(BaseScriptTest):
self.assertTrue(isinstance(v['children'], (list,tuple))) self.assertTrue(isinstance(v['children'], (list,tuple)))
self.assertTrue(isinstance(v['hosts'], (list,tuple))) self.assertTrue(isinstance(v['hosts'], (list,tuple)))
self.assertTrue(isinstance(v['vars'], (dict))) self.assertTrue(isinstance(v['vars'], (dict)))
group = inventory.groups.get(active=True, name=k) group = inventory.groups.get(name=k)
hosts = group.hosts.filter(active=True) hosts = group.hosts
hostnames = hosts.values_list('name', flat=True) hostnames = hosts.values_list('name', flat=True)
self.assertEqual(set(v['hosts']), set(hostnames)) self.assertEqual(set(v['hosts']), set(hostnames))
else: else:
self.assertTrue(v['hosts'] == ['localhost']) self.assertTrue(v['hosts'] == ['localhost'])
for group in inventory.groups.filter(active=False):
self.assertFalse(group.name in data.keys(),
'deleted group %s should not be in data' % group)
# Command line argument for inventory ID should take precedence over # Command line argument for inventory ID should take precedence over
# environment variable. # environment variable.
inventory_pks = set(map(lambda x: x.pk, self.inventories)) inventory_pks = set(map(lambda x: x.pk, self.inventories))
@@ -187,12 +185,11 @@ class InventoryScriptTest(BaseScriptTest):
def test_list_with_inventory_id_in_environment(self): def test_list_with_inventory_id_in_environment(self):
inventory = self.inventories[1] inventory = self.inventories[1]
self.assertTrue(inventory.active)
os.environ['INVENTORY_ID'] = str(inventory.pk) os.environ['INVENTORY_ID'] = str(inventory.pk)
rc, stdout, stderr = self.run_inventory_script(list=True) rc, stdout, stderr = self.run_inventory_script(list=True)
self.assertEqual(rc, 0, stderr) self.assertEqual(rc, 0, stderr)
data = json.loads(stdout) data = json.loads(stdout)
groups = inventory.groups.filter(active=True) groups = inventory.groups
groupnames = list(groups.values_list('name', flat=True)) + ['all'] groupnames = list(groups.values_list('name', flat=True)) + ['all']
self.assertEqual(set(data.keys()), set(groupnames)) self.assertEqual(set(data.keys()), set(groupnames))
# Groups for this inventory should have hosts, variable data, and one # Groups for this inventory should have hosts, variable data, and one
@@ -202,14 +199,14 @@ class InventoryScriptTest(BaseScriptTest):
if k == 'all': if k == 'all':
self.assertEqual(v.get('vars', {}), inventory.variables_dict) self.assertEqual(v.get('vars', {}), inventory.variables_dict)
continue continue
group = inventory.groups.get(active=True, name=k) group = inventory.groups.get(name=k)
hosts = group.hosts.filter(active=True) hosts = group.hosts
hostnames = hosts.values_list('name', flat=True) hostnames = hosts.values_list('name', flat=True)
self.assertEqual(set(v.get('hosts', [])), set(hostnames)) self.assertEqual(set(v.get('hosts', [])), set(hostnames))
if group.variables: if group.variables:
self.assertEqual(v.get('vars', {}), group.variables_dict) self.assertEqual(v.get('vars', {}), group.variables_dict)
if k == 'group-3': if k == 'group-3':
children = group.children.filter(active=True) children = group.children
childnames = children.values_list('name', flat=True) childnames = children.values_list('name', flat=True)
self.assertEqual(set(v.get('children', [])), set(childnames)) self.assertEqual(set(v.get('children', [])), set(childnames))
else: else:
@@ -217,13 +214,12 @@ class InventoryScriptTest(BaseScriptTest):
def test_list_with_hostvars_inline(self): def test_list_with_hostvars_inline(self):
inventory = self.inventories[1] inventory = self.inventories[1]
self.assertTrue(inventory.active)
rc, stdout, stderr = self.run_inventory_script(list=True, rc, stdout, stderr = self.run_inventory_script(list=True,
inventory=inventory.pk, inventory=inventory.pk,
hostvars=True) hostvars=True)
self.assertEqual(rc, 0, stderr) self.assertEqual(rc, 0, stderr)
data = json.loads(stdout) data = json.loads(stdout)
groups = inventory.groups.filter(active=True) groups = inventory.groups
groupnames = list(groups.values_list('name', flat=True)) groupnames = list(groups.values_list('name', flat=True))
groupnames.extend(['all', '_meta']) groupnames.extend(['all', '_meta'])
self.assertEqual(set(data.keys()), set(groupnames)) self.assertEqual(set(data.keys()), set(groupnames))
@@ -237,15 +233,15 @@ class InventoryScriptTest(BaseScriptTest):
continue continue
if k == '_meta': if k == '_meta':
continue continue
group = inventory.groups.get(active=True, name=k) group = inventory.groups.get(name=k)
hosts = group.hosts.filter(active=True) hosts = group.hosts
hostnames = hosts.values_list('name', flat=True) hostnames = hosts.values_list('name', flat=True)
all_hostnames.update(hostnames) all_hostnames.update(hostnames)
self.assertEqual(set(v.get('hosts', [])), set(hostnames)) self.assertEqual(set(v.get('hosts', [])), set(hostnames))
if group.variables: if group.variables:
self.assertEqual(v.get('vars', {}), group.variables_dict) self.assertEqual(v.get('vars', {}), group.variables_dict)
if k == 'group-3': if k == 'group-3':
children = group.children.filter(active=True) children = group.children
childnames = children.values_list('name', flat=True) childnames = children.values_list('name', flat=True)
self.assertEqual(set(v.get('children', [])), set(childnames)) self.assertEqual(set(v.get('children', [])), set(childnames))
else: else:
@@ -267,8 +263,7 @@ class InventoryScriptTest(BaseScriptTest):
def test_valid_host(self): def test_valid_host(self):
# Host without variable data. # Host without variable data.
inventory = self.inventories[0] inventory = self.inventories[0]
self.assertTrue(inventory.active) host = inventory.hosts[2]
host = inventory.hosts.filter(active=True)[2]
os.environ['INVENTORY_ID'] = str(inventory.pk) os.environ['INVENTORY_ID'] = str(inventory.pk)
rc, stdout, stderr = self.run_inventory_script(host=host.name) rc, stdout, stderr = self.run_inventory_script(host=host.name)
self.assertEqual(rc, 0, stderr) self.assertEqual(rc, 0, stderr)
@@ -276,8 +271,7 @@ class InventoryScriptTest(BaseScriptTest):
self.assertEqual(data, {}) self.assertEqual(data, {})
# Host with variable data. # Host with variable data.
inventory = self.inventories[1] inventory = self.inventories[1]
self.assertTrue(inventory.active) host = inventory.hosts[4]
host = inventory.hosts.filter(active=True)[4]
os.environ['INVENTORY_ID'] = str(inventory.pk) os.environ['INVENTORY_ID'] = str(inventory.pk)
rc, stdout, stderr = self.run_inventory_script(host=host.name) rc, stdout, stderr = self.run_inventory_script(host=host.name)
self.assertEqual(rc, 0, stderr) self.assertEqual(rc, 0, stderr)
@@ -287,8 +281,7 @@ class InventoryScriptTest(BaseScriptTest):
def test_invalid_host(self): def test_invalid_host(self):
# Valid host, but not part of the specified inventory. # Valid host, but not part of the specified inventory.
inventory = self.inventories[0] inventory = self.inventories[0]
self.assertTrue(inventory.active) host = Host.objects.exclude(inventory=inventory)[0]
host = Host.objects.filter(active=True).exclude(inventory=inventory)[0]
os.environ['INVENTORY_ID'] = str(inventory.pk) os.environ['INVENTORY_ID'] = str(inventory.pk)
rc, stdout, stderr = self.run_inventory_script(host=host.name) rc, stdout, stderr = self.run_inventory_script(host=host.name)
self.assertNotEqual(rc, 0, stderr) self.assertNotEqual(rc, 0, stderr)
@@ -320,16 +313,15 @@ class InventoryScriptTest(BaseScriptTest):
def test_with_deleted_inventory(self): def test_with_deleted_inventory(self):
inventory = self.inventories[0] inventory = self.inventories[0]
inventory.mark_inactive() pk = inventory.pk
self.assertFalse(inventory.active) inventory.delete()
os.environ['INVENTORY_ID'] = str(inventory.pk) os.environ['INVENTORY_ID'] = str(pk)
rc, stdout, stderr = self.run_inventory_script(list=True) rc, stdout, stderr = self.run_inventory_script(list=True)
self.assertNotEqual(rc, 0, stderr) self.assertNotEqual(rc, 0, stderr)
self.assertEqual(json.loads(stdout), {'failed': True}) self.assertEqual(json.loads(stdout), {'failed': True})
def test_without_list_or_host_argument(self): def test_without_list_or_host_argument(self):
inventory = self.inventories[0] inventory = self.inventories[0]
self.assertTrue(inventory.active)
os.environ['INVENTORY_ID'] = str(inventory.pk) os.environ['INVENTORY_ID'] = str(inventory.pk)
rc, stdout, stderr = self.run_inventory_script() rc, stdout, stderr = self.run_inventory_script()
self.assertNotEqual(rc, 0, stderr) self.assertNotEqual(rc, 0, stderr)
@@ -337,7 +329,6 @@ class InventoryScriptTest(BaseScriptTest):
def test_with_both_list_and_host_arguments(self): def test_with_both_list_and_host_arguments(self):
inventory = self.inventories[0] inventory = self.inventories[0]
self.assertTrue(inventory.active)
os.environ['INVENTORY_ID'] = str(inventory.pk) os.environ['INVENTORY_ID'] = str(inventory.pk)
rc, stdout, stderr = self.run_inventory_script(list=True, host='blah') rc, stdout, stderr = self.run_inventory_script(list=True, host='blah')
self.assertNotEqual(rc, 0, stderr) self.assertNotEqual(rc, 0, stderr)
@@ -345,8 +336,7 @@ class InventoryScriptTest(BaseScriptTest):
def test_with_disabled_hosts(self): def test_with_disabled_hosts(self):
inventory = self.inventories[1] inventory = self.inventories[1]
self.assertTrue(inventory.active) for host in inventory.hosts.filter(enabled=True):
for host in inventory.hosts.filter(active=True, enabled=True):
host.enabled = False host.enabled = False
host.save(update_fields=['enabled']) host.save(update_fields=['enabled'])
os.environ['INVENTORY_ID'] = str(inventory.pk) os.environ['INVENTORY_ID'] = str(inventory.pk)
@@ -354,7 +344,7 @@ class InventoryScriptTest(BaseScriptTest):
rc, stdout, stderr = self.run_inventory_script(list=True) rc, stdout, stderr = self.run_inventory_script(list=True)
self.assertEqual(rc, 0, stderr) self.assertEqual(rc, 0, stderr)
data = json.loads(stdout) data = json.loads(stdout)
groups = inventory.groups.filter(active=True) groups = inventory.groups
groupnames = list(groups.values_list('name', flat=True)) + ['all'] groupnames = list(groups.values_list('name', flat=True)) + ['all']
self.assertEqual(set(data.keys()), set(groupnames)) self.assertEqual(set(data.keys()), set(groupnames))
for k,v in data.items(): for k,v in data.items():
@@ -362,15 +352,15 @@ class InventoryScriptTest(BaseScriptTest):
if k == 'all': if k == 'all':
self.assertEqual(v.get('vars', {}), inventory.variables_dict) self.assertEqual(v.get('vars', {}), inventory.variables_dict)
continue continue
group = inventory.groups.get(active=True, name=k) group = inventory.groups.get(name=k)
hosts = group.hosts.filter(active=True, enabled=True) hosts = group.hosts.filter(enabled=True)
hostnames = hosts.values_list('name', flat=True) hostnames = hosts.values_list('name', flat=True)
self.assertEqual(set(v.get('hosts', [])), set(hostnames)) self.assertEqual(set(v.get('hosts', [])), set(hostnames))
self.assertFalse(hostnames) self.assertFalse(hostnames)
if group.variables: if group.variables:
self.assertEqual(v.get('vars', {}), group.variables_dict) self.assertEqual(v.get('vars', {}), group.variables_dict)
if k == 'group-3': if k == 'group-3':
children = group.children.filter(active=True) children = group.children
childnames = children.values_list('name', flat=True) childnames = children.values_list('name', flat=True)
self.assertEqual(set(v.get('children', [])), set(childnames)) self.assertEqual(set(v.get('children', [])), set(childnames))
else: else:
@@ -379,7 +369,7 @@ class InventoryScriptTest(BaseScriptTest):
rc, stdout, stderr = self.run_inventory_script(list=True, all=True) rc, stdout, stderr = self.run_inventory_script(list=True, all=True)
self.assertEqual(rc, 0, stderr) self.assertEqual(rc, 0, stderr)
data = json.loads(stdout) data = json.loads(stdout)
groups = inventory.groups.filter(active=True) groups = inventory.groups
groupnames = list(groups.values_list('name', flat=True)) + ['all'] groupnames = list(groups.values_list('name', flat=True)) + ['all']
self.assertEqual(set(data.keys()), set(groupnames)) self.assertEqual(set(data.keys()), set(groupnames))
for k,v in data.items(): for k,v in data.items():
@@ -387,15 +377,15 @@ class InventoryScriptTest(BaseScriptTest):
if k == 'all': if k == 'all':
self.assertEqual(v.get('vars', {}), inventory.variables_dict) self.assertEqual(v.get('vars', {}), inventory.variables_dict)
continue continue
group = inventory.groups.get(active=True, name=k) group = inventory.groups.get(name=k)
hosts = group.hosts.filter(active=True) hosts = group.hosts
hostnames = hosts.values_list('name', flat=True) hostnames = hosts.values_list('name', flat=True)
self.assertEqual(set(v.get('hosts', [])), set(hostnames)) self.assertEqual(set(v.get('hosts', [])), set(hostnames))
self.assertTrue(hostnames) self.assertTrue(hostnames)
if group.variables: if group.variables:
self.assertEqual(v.get('vars', {}), group.variables_dict) self.assertEqual(v.get('vars', {}), group.variables_dict)
if k == 'group-3': if k == 'group-3':
children = group.children.filter(active=True) children = group.children
childnames = children.values_list('name', flat=True) childnames = children.values_list('name', flat=True)
self.assertEqual(set(v.get('children', [])), set(childnames)) self.assertEqual(set(v.get('children', [])), set(childnames))
else: else:

View File

@@ -592,26 +592,8 @@ class RunJobTest(BaseJobExecutionTest):
new_group.children.remove(self.group) new_group.children.remove(self.group)
new_group = Group.objects.get(pk=new_group.pk) new_group = Group.objects.get(pk=new_group.pk)
self.assertFalse(new_group.has_active_failures) self.assertFalse(new_group.has_active_failures)
# Mark host inactive (should clear flag on parent group and inventory) # Delete host (should clear flag on parent group and inventory)
self.host.mark_inactive()
self.group = Group.objects.get(pk=self.group.pk)
self.assertFalse(self.group.has_active_failures)
self.inventory = Inventory.objects.get(pk=self.inventory.pk)
self.assertFalse(self.inventory.has_active_failures)
# Un-mark host as inactive (need to force update of flag on group and
# inventory)
host = self.host
host.name = '_'.join(host.name.split('_')[3:]) or 'undeleted host'
host.active = True
host.save()
host.update_computed_fields()
self.group = Group.objects.get(pk=self.group.pk)
self.assertTrue(self.group.has_active_failures)
self.inventory = Inventory.objects.get(pk=self.inventory.pk)
self.assertTrue(self.inventory.has_active_failures)
# Delete host. (should clear flag)
self.host.delete() self.host.delete()
self.host = None
self.group = Group.objects.get(pk=self.group.pk) self.group = Group.objects.get(pk=self.group.pk)
self.assertFalse(self.group.has_active_failures) self.assertFalse(self.group.has_active_failures)
self.inventory = Inventory.objects.get(pk=self.inventory.pk) self.inventory = Inventory.objects.get(pk=self.inventory.pk)
@@ -619,30 +601,7 @@ class RunJobTest(BaseJobExecutionTest):
def test_update_has_active_failures_when_job_removed(self): def test_update_has_active_failures_when_job_removed(self):
job = self.test_run_job_that_fails() job = self.test_run_job_that_fails()
# Mark job as inactive (should clear flags). # Delete (should clear flags).
job.mark_inactive()
self.host = Host.objects.get(pk=self.host.pk)
self.assertFalse(self.host.has_active_failures)
self.group = Group.objects.get(pk=self.group.pk)
self.assertFalse(self.group.has_active_failures)
self.inventory = Inventory.objects.get(pk=self.inventory.pk)
self.assertFalse(self.inventory.has_active_failures)
# Un-mark job as inactive (need to force update of flag)
job.active = True
job.save()
# Need to manually update last_job on host...
host = Host.objects.get(pk=self.host.pk)
host.last_job = job
host.last_job_host_summary = JobHostSummary.objects.get(job=job, host=host)
host.save()
self.inventory.update_computed_fields()
self.host = Host.objects.get(pk=self.host.pk)
self.assertTrue(self.host.has_active_failures)
self.group = Group.objects.get(pk=self.group.pk)
self.assertTrue(self.group.has_active_failures)
self.inventory = Inventory.objects.get(pk=self.inventory.pk)
self.assertTrue(self.inventory.has_active_failures)
# Delete job entirely.
job.delete() job.delete()
self.host = Host.objects.get(pk=self.host.pk) self.host = Host.objects.get(pk=self.host.pk)
self.assertFalse(self.host.has_active_failures) self.assertFalse(self.host.has_active_failures)
@@ -662,8 +621,8 @@ class RunJobTest(BaseJobExecutionTest):
self.host = Host.objects.get(pk=self.host.pk) self.host = Host.objects.get(pk=self.host.pk)
self.assertEqual(self.host.last_job, job1) self.assertEqual(self.host.last_job, job1)
self.assertEqual(self.host.last_job_host_summary.job, job1) self.assertEqual(self.host.last_job_host_summary.job, job1)
# Mark job1 inactive (should update host.last_job to None). # Delete job1 (should update host.last_job to None).
job1.mark_inactive() job1.delete()
self.host = Host.objects.get(pk=self.host.pk) self.host = Host.objects.get(pk=self.host.pk)
self.assertEqual(self.host.last_job, None) self.assertEqual(self.host.last_job, None)
self.assertEqual(self.host.last_job_host_summary, None) self.assertEqual(self.host.last_job_host_summary, None)

View File

@@ -196,7 +196,7 @@ class UsersTest(BaseTest):
self.post(url, expect=201, data=new_user2, auth=self.get_normal_credentials()) self.post(url, expect=201, data=new_user2, auth=self.get_normal_credentials())
self.post(url, expect=400, data=new_user2, auth=self.get_normal_credentials()) self.post(url, expect=400, data=new_user2, auth=self.get_normal_credentials())
# Normal user cannot add users after his org is marked inactive. # Normal user cannot add users after his org is marked inactive.
self.organizations[0].mark_inactive() self.organizations[0].delete()
new_user3 = dict(username='blippy3') new_user3 = dict(username='blippy3')
self.post(url, expect=403, data=new_user3, auth=self.get_normal_credentials()) self.post(url, expect=403, data=new_user3, auth=self.get_normal_credentials())
@@ -316,7 +316,7 @@ class UsersTest(BaseTest):
remote_addr=remote_addr) remote_addr=remote_addr)
# Token auth should be denied if the user is inactive. # Token auth should be denied if the user is inactive.
self.normal_django_user.mark_inactive() self.normal_django_user.delete()
response = self.get(user_me_url, expect=401, auth=auth_token2, response = self.get(user_me_url, expect=401, auth=auth_token2,
remote_addr=remote_addr) remote_addr=remote_addr)
self.assertEqual(response['detail'], 'User inactive or deleted') self.assertEqual(response['detail'], 'User inactive or deleted')
@@ -422,7 +422,7 @@ class UsersTest(BaseTest):
# Normal user can no longer see all users after the organization he # Normal user can no longer see all users after the organization he
# admins is marked inactive, nor can he see any other users that were # admins is marked inactive, nor can he see any other users that were
# in that org, so he only sees himself. # in that org, so he only sees himself.
self.organizations[0].mark_inactive() self.organizations[0].delete()
data3 = self.get(url, expect=200, auth=self.get_normal_credentials()) data3 = self.get(url, expect=200, auth=self.get_normal_credentials())
self.assertEquals(data3['count'], 1) self.assertEquals(data3['count'], 1)

View File

@@ -209,7 +209,6 @@ REST_FRAMEWORK = {
'awx.api.permissions.ModelAccessPermission', 'awx.api.permissions.ModelAccessPermission',
), ),
'DEFAULT_FILTER_BACKENDS': ( 'DEFAULT_FILTER_BACKENDS': (
'awx.api.filters.ActiveOnlyBackend',
'awx.api.filters.TypeFilterBackend', 'awx.api.filters.TypeFilterBackend',
'awx.api.filters.FieldLookupBackend', 'awx.api.filters.FieldLookupBackend',
'rest_framework.filters.SearchFilter', 'rest_framework.filters.SearchFilter',

View File

@@ -90,7 +90,7 @@ def update_user_orgs(backend, details, user=None, *args, **kwargs):
org = Organization.objects.get_or_create(name=org_name)[0] org = Organization.objects.get_or_create(name=org_name)[0]
else: else:
try: try:
org = Organization.objects.filter(active=True).order_by('pk')[0] org = Organization.objects.order_by('pk')[0]
except IndexError: except IndexError:
continue continue
@@ -126,7 +126,7 @@ def update_user_teams(backend, details, user=None, *args, **kwargs):
org = Organization.objects.get_or_create(name=team_opts['organization'])[0] org = Organization.objects.get_or_create(name=team_opts['organization'])[0]
else: else:
try: try:
org = Organization.objects.filter(active=True).order_by('pk')[0] org = Organization.objects.order_by('pk')[0]
except IndexError: except IndexError:
continue continue