mirror of
https://github.com/ansible/awx.git
synced 2026-01-12 02:19:58 -03:30
Merge branch 'devel' into workflow-channels
This commit is contained in:
commit
34439fed9d
13
Makefile
13
Makefile
@ -284,18 +284,18 @@ virtualenv_tower:
|
||||
requirements_ansible: virtualenv_ansible
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/ansible/bin/activate; \
|
||||
$(VENV_BASE)/ansible/bin/pip install --no-binary $(SRC_ONLY_PKGS) -r requirements/requirements_ansible.txt ;\
|
||||
$(VENV_BASE)/ansible/bin/pip install --ignore-installed --no-binary $(SRC_ONLY_PKGS) -r requirements/requirements_ansible.txt ;\
|
||||
else \
|
||||
pip install --no-binary $(SRC_ONLY_PKGS) -r requirements/requirements_ansible.txt ; \
|
||||
pip install --ignore-installed --no-binary $(SRC_ONLY_PKGS) -r requirements/requirements_ansible.txt ; \
|
||||
fi
|
||||
|
||||
# Install third-party requirements needed for Tower's environment.
|
||||
requirements_tower: virtualenv_tower
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/tower/bin/activate; \
|
||||
$(VENV_BASE)/tower/bin/pip install --no-binary $(SRC_ONLY_PKGS) -r requirements/requirements.txt ;\
|
||||
$(VENV_BASE)/tower/bin/pip install --ignore-installed --no-binary $(SRC_ONLY_PKGS) -r requirements/requirements.txt ;\
|
||||
else \
|
||||
pip install --no-binary $(SRC_ONLY_PKGS) -r requirements/requirements.txt ; \
|
||||
pip install --ignore-installed --no-binary $(SRC_ONLY_PKGS) -r requirements/requirements.txt ; \
|
||||
fi
|
||||
|
||||
requirements_tower_dev:
|
||||
@ -464,7 +464,10 @@ pep8: reports
|
||||
@(set -o pipefail && $@ | tee reports/$@.report)
|
||||
|
||||
flake8: reports
|
||||
@(set -o pipefail && $@ | tee reports/$@.report)
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/tower/bin/activate; \
|
||||
fi; \
|
||||
(set -o pipefail && $@ | tee reports/$@.report)
|
||||
|
||||
pyflakes: reports
|
||||
@(set -o pipefail && $@ | tee reports/$@.report)
|
||||
|
||||
@ -17,6 +17,7 @@ try:
|
||||
except ImportError: # pragma: no cover
|
||||
MODE = 'production'
|
||||
|
||||
|
||||
def find_commands(management_dir):
|
||||
# Modified version of function from django/core/management/__init__.py.
|
||||
command_dir = os.path.join(management_dir, 'commands')
|
||||
@ -33,6 +34,7 @@ def find_commands(management_dir):
|
||||
pass
|
||||
return commands
|
||||
|
||||
|
||||
def prepare_env():
|
||||
# Update the default settings environment variable based on current mode.
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings.%s' % MODE)
|
||||
|
||||
@ -21,6 +21,7 @@ from awx.main.models import UnifiedJob, AuthToken
|
||||
|
||||
logger = logging.getLogger('awx.api.authentication')
|
||||
|
||||
|
||||
class TokenAuthentication(authentication.TokenAuthentication):
|
||||
'''
|
||||
Custom token authentication using tokens that expire and are associated
|
||||
|
||||
@ -20,12 +20,14 @@ from rest_framework.filters import BaseFilterBackend
|
||||
# Ansible Tower
|
||||
from awx.main.utils import get_type_for_model, to_python_boolean
|
||||
|
||||
|
||||
class MongoFilterBackend(BaseFilterBackend):
|
||||
|
||||
# FIX: Note that MongoEngine can't use the filter backends from DRF
|
||||
def filter_queryset(self, request, queryset, view):
|
||||
return queryset
|
||||
|
||||
|
||||
class TypeFilterBackend(BaseFilterBackend):
|
||||
'''
|
||||
Filter on type field now returned with all objects.
|
||||
@ -62,6 +64,7 @@ class TypeFilterBackend(BaseFilterBackend):
|
||||
# Return a 400 for invalid field names.
|
||||
raise ParseError(*e.args)
|
||||
|
||||
|
||||
class FieldLookupBackend(BaseFilterBackend):
|
||||
'''
|
||||
Filter using field lookups provided via query string parameters.
|
||||
@ -229,6 +232,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
except ValidationError as e:
|
||||
raise ParseError(e.messages)
|
||||
|
||||
|
||||
class OrderByBackend(BaseFilterBackend):
|
||||
'''
|
||||
Filter to apply ordering based on query string parameters.
|
||||
|
||||
@ -42,6 +42,7 @@ __all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView',
|
||||
|
||||
logger = logging.getLogger('awx.api.generics')
|
||||
|
||||
|
||||
def get_view_name(cls, suffix=None):
|
||||
'''
|
||||
Wrapper around REST framework get_view_name() to support get_name() method
|
||||
@ -59,6 +60,7 @@ def get_view_name(cls, suffix=None):
|
||||
return ('%s %s' % (name, suffix)) if suffix else name
|
||||
return views.get_view_name(cls, suffix=None)
|
||||
|
||||
|
||||
def get_view_description(cls, html=False):
|
||||
'''
|
||||
Wrapper around REST framework get_view_description() to support
|
||||
@ -78,6 +80,7 @@ def get_view_description(cls, html=False):
|
||||
desc = '<div class="description">%s</div>' % desc
|
||||
return mark_safe(desc)
|
||||
|
||||
|
||||
class APIView(views.APIView):
|
||||
|
||||
def initialize_request(self, request, *args, **kwargs):
|
||||
@ -227,11 +230,13 @@ class GenericAPIView(generics.GenericAPIView, APIView):
|
||||
d['settings'] = settings
|
||||
return d
|
||||
|
||||
|
||||
class SimpleListAPIView(generics.ListAPIView, GenericAPIView):
|
||||
|
||||
def get_queryset(self):
|
||||
return self.request.user.get_queryset(self.model)
|
||||
|
||||
|
||||
class ListAPIView(generics.ListAPIView, GenericAPIView):
|
||||
# Base class for a read-only list view.
|
||||
|
||||
@ -266,10 +271,12 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
|
||||
fields.append(field.name)
|
||||
return fields
|
||||
|
||||
|
||||
class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView):
|
||||
# Base class for a list view that allows creating new objects.
|
||||
pass
|
||||
|
||||
|
||||
class ParentMixin(object):
|
||||
|
||||
def get_parent_object(self):
|
||||
@ -288,6 +295,7 @@ class ParentMixin(object):
|
||||
if not self.request.user.can_access(*args):
|
||||
raise PermissionDenied()
|
||||
|
||||
|
||||
class SubListAPIView(ListAPIView, ParentMixin):
|
||||
# Base class for a read-only sublist view.
|
||||
|
||||
@ -315,6 +323,7 @@ class SubListAPIView(ListAPIView, ParentMixin):
|
||||
sublist_qs = getattrd(parent, self.relationship).distinct()
|
||||
return qs & sublist_qs
|
||||
|
||||
|
||||
class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
|
||||
# Base class for a sublist view that allows for creating subobjects
|
||||
# associated with the parent object.
|
||||
@ -367,6 +376,7 @@ class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
|
||||
headers = {'Location': obj.get_absolute_url()}
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
||||
|
||||
|
||||
class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
# Base class for a sublist view that allows for creating subobjects and
|
||||
# attaching/detaching them from the parent.
|
||||
@ -469,12 +479,13 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
else:
|
||||
return self.attach(request, *args, **kwargs)
|
||||
|
||||
'''
|
||||
Models for which you want the last instance to be deleted from the database
|
||||
when the last disassociate is called should inherit from this class. Further,
|
||||
the model should implement is_detached()
|
||||
'''
|
||||
|
||||
class DeleteLastUnattachLabelMixin(object):
|
||||
'''
|
||||
Models for which you want the last instance to be deleted from the database
|
||||
when the last disassociate is called should inherit from this class. Further,
|
||||
the model should implement is_detached()
|
||||
'''
|
||||
def unattach(self, request, *args, **kwargs):
|
||||
(sub_id, res) = super(DeleteLastUnattachLabelMixin, self).unattach_validate(request)
|
||||
if res:
|
||||
@ -489,12 +500,15 @@ class DeleteLastUnattachLabelMixin(object):
|
||||
|
||||
return res
|
||||
|
||||
|
||||
class SubDetailAPIView(generics.RetrieveAPIView, GenericAPIView, ParentMixin):
|
||||
pass
|
||||
|
||||
|
||||
class RetrieveAPIView(generics.RetrieveAPIView, GenericAPIView):
|
||||
pass
|
||||
|
||||
|
||||
class RetrieveUpdateAPIView(RetrieveAPIView, generics.RetrieveUpdateAPIView):
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
@ -509,6 +523,7 @@ class RetrieveUpdateAPIView(RetrieveAPIView, generics.RetrieveUpdateAPIView):
|
||||
''' scrub any fields the user cannot/should not put/patch, based on user context. This runs after read-only serialization filtering '''
|
||||
pass
|
||||
|
||||
|
||||
class RetrieveDestroyAPIView(RetrieveAPIView, generics.RetrieveDestroyAPIView):
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
@ -519,9 +534,11 @@ class RetrieveDestroyAPIView(RetrieveAPIView, generics.RetrieveDestroyAPIView):
|
||||
obj.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, RetrieveDestroyAPIView):
|
||||
pass
|
||||
|
||||
|
||||
class DestroyAPIView(GenericAPIView, generics.DestroyAPIView):
|
||||
pass
|
||||
|
||||
|
||||
@ -184,6 +184,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
|
||||
return metadata
|
||||
|
||||
|
||||
class RoleMetadata(Metadata):
|
||||
def determine_metadata(self, request, view):
|
||||
metadata = super(RoleMetadata, self).determine_metadata(request, view)
|
||||
|
||||
@ -21,6 +21,7 @@ logger = logging.getLogger('awx.api.permissions')
|
||||
__all__ = ['ModelAccessPermission', 'JobTemplateCallbackPermission',
|
||||
'TaskPermission', 'ProjectUpdatePermission', 'UserPermission']
|
||||
|
||||
|
||||
class ModelAccessPermission(permissions.BasePermission):
|
||||
'''
|
||||
Default permissions class to check user access based on the model and
|
||||
@ -140,6 +141,7 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
def has_object_permission(self, request, view, obj):
|
||||
return self.has_permission(request, view, obj)
|
||||
|
||||
|
||||
class JobTemplateCallbackPermission(ModelAccessPermission):
|
||||
'''
|
||||
Permission check used by job template callback view for requests from
|
||||
@ -165,6 +167,7 @@ class JobTemplateCallbackPermission(ModelAccessPermission):
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
class TaskPermission(ModelAccessPermission):
|
||||
'''
|
||||
Permission checks used for API callbacks from running a task.
|
||||
@ -192,6 +195,7 @@ class TaskPermission(ModelAccessPermission):
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
class ProjectUpdatePermission(ModelAccessPermission):
|
||||
'''
|
||||
Permission check used by ProjectUpdateView to determine who can update projects
|
||||
|
||||
@ -496,6 +496,7 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
class EmptySerializer(serializers.Serializer):
|
||||
pass
|
||||
|
||||
|
||||
class BaseFactSerializer(BaseSerializer):
|
||||
|
||||
__metaclass__ = BaseSerializerMetaclass
|
||||
@ -509,6 +510,7 @@ class BaseFactSerializer(BaseSerializer):
|
||||
ret['module'] = serializers.ChoiceField(choices=choices, read_only=True, required=False)
|
||||
return ret
|
||||
|
||||
|
||||
class UnifiedJobTemplateSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
@ -1293,6 +1295,7 @@ class GroupVariableDataSerializer(BaseVariableDataSerializer):
|
||||
class Meta:
|
||||
model = Group
|
||||
|
||||
|
||||
class CustomInventoryScriptSerializer(BaseSerializer):
|
||||
|
||||
script = serializers.CharField(trim_whitespace=False)
|
||||
@ -1496,7 +1499,6 @@ class TeamSerializer(BaseSerializer):
|
||||
return ret
|
||||
|
||||
|
||||
|
||||
class RoleSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
@ -1792,13 +1794,19 @@ class OrganizationCredentialSerializerCreate(CredentialSerializerCreate):
|
||||
class LabelsListMixin(object):
|
||||
|
||||
def _summary_field_labels(self, obj):
|
||||
return {'count': obj.labels.count(), 'results': [{'id': x.id, 'name': x.name} for x in obj.labels.all().order_by('name')[:10]]}
|
||||
label_list = [{'id': x.id, 'name': x.name} for x in obj.labels.all().order_by('name')[:10]]
|
||||
if len(label_list) < 10:
|
||||
label_ct = len(label_list)
|
||||
else:
|
||||
label_ct = obj.labels.count()
|
||||
return {'count': label_ct, 'results': label_list}
|
||||
|
||||
def get_summary_fields(self, obj):
|
||||
res = super(LabelsListMixin, self).get_summary_fields(obj)
|
||||
res['labels'] = self._summary_field_labels(obj)
|
||||
return res
|
||||
|
||||
|
||||
class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
@ -2063,6 +2071,7 @@ class JobRelaunchSerializer(JobSerializer):
|
||||
attrs = super(JobRelaunchSerializer, self).validate(attrs)
|
||||
return attrs
|
||||
|
||||
|
||||
class AdHocCommandSerializer(UnifiedJobSerializer):
|
||||
|
||||
class Meta:
|
||||
@ -2163,6 +2172,7 @@ class SystemJobTemplateSerializer(UnifiedJobTemplateSerializer):
|
||||
))
|
||||
return res
|
||||
|
||||
|
||||
class SystemJobSerializer(UnifiedJobSerializer):
|
||||
|
||||
class Meta:
|
||||
@ -2179,6 +2189,7 @@ class SystemJobSerializer(UnifiedJobSerializer):
|
||||
res['cancel'] = reverse('api:system_job_cancel', args=(obj.pk,))
|
||||
return res
|
||||
|
||||
|
||||
class SystemJobCancelSerializer(SystemJobSerializer):
|
||||
|
||||
can_cancel = serializers.BooleanField(read_only=True)
|
||||
@ -2186,6 +2197,7 @@ class SystemJobCancelSerializer(SystemJobSerializer):
|
||||
class Meta:
|
||||
fields = ('can_cancel',)
|
||||
|
||||
|
||||
class WorkflowJobTemplateSerializer(LabelsListMixin, UnifiedJobTemplateSerializer):
|
||||
show_capabilities = ['start', 'edit', 'delete']
|
||||
|
||||
@ -2211,10 +2223,12 @@ class WorkflowJobTemplateSerializer(LabelsListMixin, UnifiedJobTemplateSerialize
|
||||
def validate_extra_vars(self, value):
|
||||
return vars_validate_or_raise(value)
|
||||
|
||||
|
||||
# TODO:
|
||||
class WorkflowJobTemplateListSerializer(WorkflowJobTemplateSerializer):
|
||||
pass
|
||||
|
||||
|
||||
# TODO:
|
||||
class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
|
||||
|
||||
@ -2242,10 +2256,12 @@ class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
|
||||
ret['extra_vars'] = obj.display_extra_vars()
|
||||
return ret
|
||||
|
||||
|
||||
# TODO:
|
||||
class WorkflowJobListSerializer(WorkflowJobSerializer, UnifiedJobListSerializer):
|
||||
pass
|
||||
|
||||
|
||||
class WorkflowJobCancelSerializer(WorkflowJobSerializer):
|
||||
|
||||
can_cancel = serializers.BooleanField(read_only=True)
|
||||
@ -2341,6 +2357,7 @@ class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer):
|
||||
"unified_job_template": _("Can not nest a %s inside a WorkflowJobTemplate") % ujt_obj.__class__.__name__})
|
||||
return super(WorkflowJobTemplateNodeSerializer, self).validate(attrs)
|
||||
|
||||
|
||||
class WorkflowJobNodeSerializer(WorkflowNodeBaseSerializer):
|
||||
class Meta:
|
||||
model = WorkflowJobNode
|
||||
@ -2357,14 +2374,16 @@ class WorkflowJobNodeSerializer(WorkflowNodeBaseSerializer):
|
||||
res['workflow_job'] = reverse('api:workflow_job_detail', args=(obj.workflow_job.pk,))
|
||||
return res
|
||||
|
||||
|
||||
class WorkflowJobNodeListSerializer(WorkflowJobNodeSerializer):
|
||||
pass
|
||||
|
||||
|
||||
class WorkflowJobNodeDetailSerializer(WorkflowJobNodeSerializer):
|
||||
pass
|
||||
|
||||
class WorkflowJobTemplateNodeDetailSerializer(WorkflowJobTemplateNodeSerializer):
|
||||
|
||||
class WorkflowJobTemplateNodeDetailSerializer(WorkflowJobTemplateNodeSerializer):
|
||||
'''
|
||||
Influence the api browser sample data to not include workflow_job_template
|
||||
when editing a WorkflowNode.
|
||||
@ -2379,18 +2398,23 @@ class WorkflowJobTemplateNodeDetailSerializer(WorkflowJobTemplateNodeSerializer)
|
||||
field_kwargs.pop('queryset', None)
|
||||
return field_class, field_kwargs
|
||||
|
||||
|
||||
class WorkflowJobTemplateNodeListSerializer(WorkflowJobTemplateNodeSerializer):
|
||||
pass
|
||||
|
||||
|
||||
class JobListSerializer(JobSerializer, UnifiedJobListSerializer):
|
||||
pass
|
||||
|
||||
|
||||
class AdHocCommandListSerializer(AdHocCommandSerializer, UnifiedJobListSerializer):
|
||||
pass
|
||||
|
||||
|
||||
class SystemJobListSerializer(SystemJobSerializer, UnifiedJobListSerializer):
|
||||
pass
|
||||
|
||||
|
||||
class JobHostSummarySerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
@ -2607,6 +2631,7 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
obj.credential = JT_credential
|
||||
return attrs
|
||||
|
||||
|
||||
class WorkflowJobLaunchSerializer(BaseSerializer):
|
||||
|
||||
can_start_without_user_input = serializers.BooleanField(read_only=True)
|
||||
@ -2665,6 +2690,7 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
|
||||
obj.extra_vars = WFJT_extra_vars
|
||||
return attrs
|
||||
|
||||
|
||||
class NotificationTemplateSerializer(BaseSerializer):
|
||||
show_capabilities = ['edit', 'delete']
|
||||
|
||||
@ -2754,6 +2780,7 @@ class NotificationTemplateSerializer(BaseSerializer):
|
||||
raise serializers.ValidationError(error_list)
|
||||
return attrs
|
||||
|
||||
|
||||
class NotificationSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
@ -2768,6 +2795,7 @@ class NotificationSerializer(BaseSerializer):
|
||||
))
|
||||
return res
|
||||
|
||||
|
||||
class LabelSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
@ -2780,6 +2808,7 @@ class LabelSerializer(BaseSerializer):
|
||||
res['organization'] = reverse('api:organization_detail', args=(obj.organization.pk,))
|
||||
return res
|
||||
|
||||
|
||||
class ScheduleSerializer(BaseSerializer):
|
||||
show_capabilities = ['edit', 'delete']
|
||||
|
||||
@ -2853,6 +2882,7 @@ class ScheduleSerializer(BaseSerializer):
|
||||
raise serializers.ValidationError(_("rrule parsing failed validation."))
|
||||
return value
|
||||
|
||||
|
||||
class ActivityStreamSerializer(BaseSerializer):
|
||||
|
||||
changes = serializers.SerializerMethodField()
|
||||
@ -2996,6 +3026,7 @@ class FactVersionSerializer(BaseFactSerializer):
|
||||
res['fact_view'] = build_url('api:host_fact_compare_view', args=(obj.host.pk,), get=params)
|
||||
return res
|
||||
|
||||
|
||||
class FactSerializer(BaseFactSerializer):
|
||||
|
||||
class Meta:
|
||||
|
||||
@ -9,6 +9,7 @@ from rest_framework.response import Response
|
||||
from rest_framework.settings import api_settings
|
||||
from rest_framework import status
|
||||
|
||||
|
||||
def paginated(method):
|
||||
"""Given an method with a Django REST Framework API method signature
|
||||
(e.g. `def get(self, request, ...):`), abstract out boilerplate pagination
|
||||
|
||||
223
awx/api/views.py
223
awx/api/views.py
File diff suppressed because it is too large
Load Diff
@ -57,9 +57,11 @@ register(
|
||||
category_slug='cows',
|
||||
)
|
||||
|
||||
|
||||
def _get_read_only_ansible_cow_selection_default():
|
||||
return getattr(settings, 'ANSIBLE_COW_SELECTION', 'No default cow!')
|
||||
|
||||
|
||||
register(
|
||||
'READONLY_ANSIBLE_COW_SELECTION',
|
||||
field_class=fields.CharField,
|
||||
|
||||
@ -314,9 +314,6 @@ class Command(BaseCommand):
|
||||
self.stdout.write(' No settings to migrate!')
|
||||
for name, db_value in to_migrate.items():
|
||||
display_value = json.dumps(db_value, indent=4)
|
||||
# Always encode "raw" strings as JSON.
|
||||
if isinstance(db_value, basestring):
|
||||
db_value = json.dumps(db_value)
|
||||
setting = Setting.objects.filter(key=name, user__isnull=True).order_by('pk').first()
|
||||
action = 'No Change'
|
||||
if not setting:
|
||||
|
||||
@ -14,11 +14,6 @@ def copy_tower_settings(apps, schema_editor):
|
||||
# LICENSE is stored as a string; convert it to a dict.
|
||||
if tower_setting.key == 'LICENSE':
|
||||
value = json.loads(value)
|
||||
# Anything else (e.g. TOWER_URL_BASE) that is stored as a string
|
||||
# needs to be converted to a JSON-encoded string to work with the
|
||||
# JSON field.
|
||||
elif tower_setting.value_type == 'string':
|
||||
value = json.dumps(value)
|
||||
setting, created = Setting.objects.get_or_create(
|
||||
key=tower_setting.key,
|
||||
user=tower_setting.user,
|
||||
|
||||
20
awx/conf/migrations/0003_v310_JSONField_changes.py
Normal file
20
awx/conf/migrations/0003_v310_JSONField_changes.py
Normal file
@ -0,0 +1,20 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations
|
||||
import awx.main.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('conf', '0002_v310_copy_tower_settings'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='setting',
|
||||
name='value',
|
||||
field=awx.main.fields.JSONField(null=True),
|
||||
),
|
||||
]
|
||||
@ -7,11 +7,9 @@ import json
|
||||
# Django
|
||||
from django.db import models
|
||||
|
||||
# Django-JSONField
|
||||
from jsonfield import JSONField
|
||||
|
||||
# Tower
|
||||
from awx.main.models.base import CreatedModifiedModel
|
||||
from awx.main.fields import JSONField
|
||||
|
||||
__all__ = ['Setting']
|
||||
|
||||
@ -23,8 +21,6 @@ class Setting(CreatedModifiedModel):
|
||||
)
|
||||
value = JSONField(
|
||||
null=True,
|
||||
# FIXME: Enable when we upgrade to JSONField with support:
|
||||
# load_kwargs={'object_pairs_hook': collections.OrderedDict},
|
||||
)
|
||||
user = models.ForeignKey(
|
||||
'auth.User',
|
||||
|
||||
@ -118,4 +118,5 @@ class SettingsRegistry(object):
|
||||
logger.warning('Unable to retrieve default value for setting "%s".', setting, exc_info=True)
|
||||
return field_instance
|
||||
|
||||
|
||||
settings_registry = SettingsRegistry()
|
||||
|
||||
@ -1,6 +1,5 @@
|
||||
# Python
|
||||
import contextlib
|
||||
import json
|
||||
import logging
|
||||
import threading
|
||||
import time
|
||||
@ -142,16 +141,18 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
def _get_local(self, name):
|
||||
self._preload_cache()
|
||||
cache_key = Setting.get_cache_key(name)
|
||||
value = cache.get(cache_key, empty)
|
||||
logger.debug('cache get(%r, %r) -> %r', cache_key, empty, value)
|
||||
if value == SETTING_CACHE_NOTSET:
|
||||
cache_value = cache.get(cache_key, empty)
|
||||
logger.debug('cache get(%r, %r) -> %r', cache_key, empty, cache_value)
|
||||
if cache_value == SETTING_CACHE_NOTSET:
|
||||
value = empty
|
||||
elif value == SETTING_CACHE_NONE:
|
||||
elif cache_value == SETTING_CACHE_NONE:
|
||||
value = None
|
||||
elif value == SETTING_CACHE_EMPTY_LIST:
|
||||
elif cache_value == SETTING_CACHE_EMPTY_LIST:
|
||||
value = []
|
||||
elif value == SETTING_CACHE_EMPTY_DICT:
|
||||
elif cache_value == SETTING_CACHE_EMPTY_DICT:
|
||||
value = {}
|
||||
else:
|
||||
value = cache_value
|
||||
field = settings_registry.get_setting_field(name)
|
||||
if value is empty:
|
||||
setting = None
|
||||
@ -159,9 +160,6 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
setting = Setting.objects.filter(key=name, user__isnull=True).order_by('pk').first()
|
||||
if setting:
|
||||
value = setting.value
|
||||
# If None implies not set, convert when reading the value.
|
||||
if value is None and SETTING_CACHE_NOTSET == SETTING_CACHE_NONE:
|
||||
value = SETTING_CACHE_NOTSET
|
||||
else:
|
||||
value = SETTING_CACHE_NOTSET
|
||||
if SETTING_CACHE_DEFAULTS:
|
||||
@ -169,8 +167,12 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
value = field.get_default()
|
||||
except SkipField:
|
||||
pass
|
||||
logger.debug('cache set(%r, %r, %r)', cache_key, self._get_cache_value(value), SETTING_CACHE_TIMEOUT)
|
||||
cache.set(cache_key, self._get_cache_value(value), SETTING_CACHE_TIMEOUT)
|
||||
# If None implies not set, convert when reading the value.
|
||||
if value is None and SETTING_CACHE_NOTSET == SETTING_CACHE_NONE:
|
||||
value = SETTING_CACHE_NOTSET
|
||||
if cache_value != value:
|
||||
logger.debug('cache set(%r, %r, %r)', cache_key, self._get_cache_value(value), SETTING_CACHE_TIMEOUT)
|
||||
cache.set(cache_key, self._get_cache_value(value), SETTING_CACHE_TIMEOUT)
|
||||
if value == SETTING_CACHE_NOTSET and not SETTING_CACHE_DEFAULTS:
|
||||
try:
|
||||
value = field.get_default()
|
||||
@ -218,9 +220,6 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
logger.exception('Unable to assign value "%r" to setting "%s".', value, name, exc_info=True)
|
||||
raise e
|
||||
|
||||
# Always encode "raw" strings as JSON.
|
||||
if isinstance(db_value, basestring):
|
||||
db_value = json.dumps(db_value)
|
||||
setting = Setting.objects.filter(key=name, user__isnull=True).order_by('pk').first()
|
||||
if not setting:
|
||||
setting = Setting.objects.create(key=name, user=None, value=db_value)
|
||||
|
||||
@ -3,7 +3,6 @@
|
||||
|
||||
# Python
|
||||
import collections
|
||||
import json
|
||||
import sys
|
||||
|
||||
# Django
|
||||
@ -68,7 +67,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
if self.category_slug not in category_slugs:
|
||||
raise PermissionDenied()
|
||||
|
||||
registered_settings = settings_registry.get_registered_settings(category_slug=self.category_slug)
|
||||
registered_settings = settings_registry.get_registered_settings(category_slug=self.category_slug, read_only=False)
|
||||
if self.category_slug == 'user':
|
||||
return Setting.objects.filter(key__in=registered_settings, user=self.request.user)
|
||||
else:
|
||||
@ -100,9 +99,6 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
if key == 'LICENSE':
|
||||
continue
|
||||
setattr(serializer.instance, key, value)
|
||||
# Always encode "raw" strings as JSON.
|
||||
if isinstance(value, basestring):
|
||||
value = json.dumps(value)
|
||||
setting = settings_qs.filter(key=key).order_by('pk').first()
|
||||
if not setting:
|
||||
setting = Setting.objects.create(key=key, user=user, value=value)
|
||||
|
||||
@ -5,6 +5,7 @@ import sys
|
||||
# Based on http://stackoverflow.com/a/6879344/131141 -- Initialize tower display
|
||||
# callback as early as possible to wrap ansible.display.Display methods.
|
||||
|
||||
|
||||
def argv_ready(argv):
|
||||
if argv and os.path.basename(argv[0]) in {'ansible', 'ansible-playbook'}:
|
||||
import tower_display_callback # noqa
|
||||
|
||||
@ -67,6 +67,7 @@ def with_verbosity(f):
|
||||
return f(*args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
|
||||
Display.verbose = with_verbosity(Display.verbose)
|
||||
|
||||
|
||||
@ -89,4 +90,5 @@ def display_with_context(f):
|
||||
event_context.remove_local(uuid=None)
|
||||
return wrapper
|
||||
|
||||
|
||||
Display.display = display_with_context(Display.display)
|
||||
|
||||
@ -134,4 +134,5 @@ class EventContext(object):
|
||||
def dump_end(self, fileobj):
|
||||
self.dump(fileobj, self.get_end_dict())
|
||||
|
||||
|
||||
event_context = EventContext()
|
||||
|
||||
@ -58,13 +58,16 @@ access_registry = {
|
||||
# ...
|
||||
}
|
||||
|
||||
|
||||
class StateConflict(ValidationError):
|
||||
status_code = 409
|
||||
|
||||
|
||||
def register_access(model_class, access_class):
|
||||
access_classes = access_registry.setdefault(model_class, [])
|
||||
access_classes.append(access_class)
|
||||
|
||||
|
||||
@property
|
||||
def user_admin_role(self):
|
||||
role = Role.objects.get(
|
||||
@ -76,9 +79,11 @@ def user_admin_role(self):
|
||||
role.parents = [org.admin_role.pk for org in self.organizations]
|
||||
return role
|
||||
|
||||
|
||||
def user_accessible_objects(user, role_name):
|
||||
return ResourceMixin._accessible_objects(User, user, role_name)
|
||||
|
||||
|
||||
def get_user_queryset(user, model_class):
|
||||
'''
|
||||
Return a queryset for the given model_class containing only the instances
|
||||
@ -98,6 +103,7 @@ def get_user_queryset(user, model_class):
|
||||
queryset = queryset.filter(pk__in=qs.values_list('pk', flat=True))
|
||||
return queryset
|
||||
|
||||
|
||||
def check_user_access(user, model_class, action, *args, **kwargs):
|
||||
'''
|
||||
Return True if user can perform action against model_class with the
|
||||
@ -117,6 +123,7 @@ def check_user_access(user, model_class, action, *args, **kwargs):
|
||||
return result
|
||||
return False
|
||||
|
||||
|
||||
def get_user_capabilities(user, instance, **kwargs):
|
||||
'''
|
||||
Returns a dictionary of capabilities the user has on the particular
|
||||
@ -129,6 +136,7 @@ def get_user_capabilities(user, instance, **kwargs):
|
||||
return access_class(user).get_user_capabilities(instance, **kwargs)
|
||||
return None
|
||||
|
||||
|
||||
def check_superuser(func):
|
||||
'''
|
||||
check_superuser is a decorator that provides a simple short circuit
|
||||
@ -141,6 +149,7 @@ def check_superuser(func):
|
||||
return func(self, *args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
|
||||
class BaseAccess(object):
|
||||
'''
|
||||
Base class for checking user access to a given model. Subclasses should
|
||||
@ -488,6 +497,7 @@ class OrganizationAccess(BaseAccess):
|
||||
"active_jobs": active_jobs})
|
||||
return True
|
||||
|
||||
|
||||
class InventoryAccess(BaseAccess):
|
||||
'''
|
||||
I can see inventory when:
|
||||
@ -557,6 +567,7 @@ class InventoryAccess(BaseAccess):
|
||||
def can_run_ad_hoc_commands(self, obj):
|
||||
return self.user in obj.adhoc_role
|
||||
|
||||
|
||||
class HostAccess(BaseAccess):
|
||||
'''
|
||||
I can see hosts whenever I can see their inventory.
|
||||
@ -611,6 +622,7 @@ class HostAccess(BaseAccess):
|
||||
def can_delete(self, obj):
|
||||
return obj and self.user in obj.inventory.admin_role
|
||||
|
||||
|
||||
class GroupAccess(BaseAccess):
|
||||
'''
|
||||
I can see groups whenever I can see their inventory.
|
||||
@ -678,6 +690,7 @@ class GroupAccess(BaseAccess):
|
||||
return self.user.can_access(InventorySource, 'start', obj.inventory_source, validate_license=validate_license)
|
||||
return False
|
||||
|
||||
|
||||
class InventorySourceAccess(BaseAccess):
|
||||
'''
|
||||
I can see inventory sources whenever I can see their group or inventory.
|
||||
@ -757,6 +770,7 @@ class InventoryUpdateAccess(BaseAccess):
|
||||
def can_delete(self, obj):
|
||||
return self.user in obj.inventory_source.inventory.admin_role
|
||||
|
||||
|
||||
class CredentialAccess(BaseAccess):
|
||||
'''
|
||||
I can see credentials when:
|
||||
@ -829,6 +843,7 @@ class CredentialAccess(BaseAccess):
|
||||
# return True
|
||||
return self.can_change(obj, None)
|
||||
|
||||
|
||||
class TeamAccess(BaseAccess):
|
||||
'''
|
||||
I can see a team when:
|
||||
@ -889,6 +904,7 @@ class TeamAccess(BaseAccess):
|
||||
return super(TeamAccess, self).can_unattach(obj, sub_obj, relationship,
|
||||
*args, **kwargs)
|
||||
|
||||
|
||||
class ProjectAccess(BaseAccess):
|
||||
'''
|
||||
I can see projects when:
|
||||
@ -943,6 +959,7 @@ class ProjectAccess(BaseAccess):
|
||||
def can_start(self, obj, validate_license=True):
|
||||
return obj and self.user in obj.update_role
|
||||
|
||||
|
||||
class ProjectUpdateAccess(BaseAccess):
|
||||
'''
|
||||
I can see project updates when I can see the project.
|
||||
@ -979,6 +996,7 @@ class ProjectUpdateAccess(BaseAccess):
|
||||
def can_delete(self, obj):
|
||||
return obj and self.user in obj.project.admin_role
|
||||
|
||||
|
||||
class JobTemplateAccess(BaseAccess):
|
||||
'''
|
||||
I can see job templates when:
|
||||
@ -1175,6 +1193,7 @@ class JobTemplateAccess(BaseAccess):
|
||||
"active_jobs": active_jobs})
|
||||
return True
|
||||
|
||||
|
||||
class JobAccess(BaseAccess):
|
||||
'''
|
||||
I can see jobs when:
|
||||
@ -1313,6 +1332,7 @@ class JobAccess(BaseAccess):
|
||||
return True
|
||||
return obj.job_template is not None and self.user in obj.job_template.admin_role
|
||||
|
||||
|
||||
class SystemJobTemplateAccess(BaseAccess):
|
||||
'''
|
||||
I can only see/manage System Job Templates if I'm a super user
|
||||
@ -1325,6 +1345,7 @@ class SystemJobTemplateAccess(BaseAccess):
|
||||
'''Only a superuser can start a job from a SystemJobTemplate'''
|
||||
return False
|
||||
|
||||
|
||||
class SystemJobAccess(BaseAccess):
|
||||
'''
|
||||
I can only see manage System Jobs if I'm a super user
|
||||
@ -1334,6 +1355,7 @@ class SystemJobAccess(BaseAccess):
|
||||
def can_start(self, obj, validate_license=True):
|
||||
return False # no relaunching of system jobs
|
||||
|
||||
|
||||
# TODO:
|
||||
class WorkflowJobTemplateNodeAccess(BaseAccess):
|
||||
'''
|
||||
@ -1430,6 +1452,7 @@ class WorkflowJobTemplateNodeAccess(BaseAccess):
|
||||
def can_unattach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
||||
return self.wfjt_admin(obj) and self.check_same_WFJT(obj, sub_obj)
|
||||
|
||||
|
||||
class WorkflowJobNodeAccess(BaseAccess):
|
||||
'''
|
||||
I can see a WorkflowJobNode if I have permission to...
|
||||
@ -1462,6 +1485,7 @@ class WorkflowJobNodeAccess(BaseAccess):
|
||||
def can_delete(self, obj):
|
||||
return False
|
||||
|
||||
|
||||
# TODO: revisit for survey logic, notification attachments?
|
||||
class WorkflowJobTemplateAccess(BaseAccess):
|
||||
'''
|
||||
@ -1554,7 +1578,7 @@ class WorkflowJobTemplateAccess(BaseAccess):
|
||||
is_delete_allowed = self.user.is_superuser or self.user in obj.admin_role
|
||||
if not is_delete_allowed:
|
||||
return False
|
||||
active_jobs = [dict(type="job", id=o.id)
|
||||
active_jobs = [dict(type="workflow_job", id=o.id)
|
||||
for o in obj.jobs.filter(status__in=ACTIVE_STATES)]
|
||||
if len(active_jobs) > 0:
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
@ -1604,6 +1628,7 @@ class WorkflowJobAccess(BaseAccess):
|
||||
return False
|
||||
return self.can_delete(obj) or self.user == obj.created_by
|
||||
|
||||
|
||||
class AdHocCommandAccess(BaseAccess):
|
||||
'''
|
||||
I can only see/run ad hoc commands when:
|
||||
@ -1660,6 +1685,7 @@ class AdHocCommandAccess(BaseAccess):
|
||||
return True
|
||||
return obj.inventory is not None and self.user in obj.inventory.admin_role
|
||||
|
||||
|
||||
class AdHocCommandEventAccess(BaseAccess):
|
||||
'''
|
||||
I can see ad hoc command event records whenever I can read both ad hoc
|
||||
@ -1688,6 +1714,7 @@ class AdHocCommandEventAccess(BaseAccess):
|
||||
def can_delete(self, obj):
|
||||
return False
|
||||
|
||||
|
||||
class JobHostSummaryAccess(BaseAccess):
|
||||
'''
|
||||
I can see job/host summary records whenever I can read both job and host.
|
||||
@ -1713,6 +1740,7 @@ class JobHostSummaryAccess(BaseAccess):
|
||||
def can_delete(self, obj):
|
||||
return False
|
||||
|
||||
|
||||
class JobEventAccess(BaseAccess):
|
||||
'''
|
||||
I can see job event records whenever I can read both job and host.
|
||||
@ -1746,6 +1774,7 @@ class JobEventAccess(BaseAccess):
|
||||
def can_delete(self, obj):
|
||||
return False
|
||||
|
||||
|
||||
class UnifiedJobTemplateAccess(BaseAccess):
|
||||
'''
|
||||
I can see a unified job template whenever I can see the same project,
|
||||
@ -1788,6 +1817,7 @@ class UnifiedJobTemplateAccess(BaseAccess):
|
||||
|
||||
return qs.all()
|
||||
|
||||
|
||||
class UnifiedJobAccess(BaseAccess):
|
||||
'''
|
||||
I can see a unified job whenever I can see the same project update,
|
||||
@ -1838,6 +1868,7 @@ class UnifiedJobAccess(BaseAccess):
|
||||
#)
|
||||
return qs.all()
|
||||
|
||||
|
||||
class ScheduleAccess(BaseAccess):
|
||||
'''
|
||||
I can see a schedule if I can see it's related unified job, I can create them or update them if I have write access
|
||||
@ -1878,6 +1909,7 @@ class ScheduleAccess(BaseAccess):
|
||||
def can_delete(self, obj):
|
||||
return self.can_change(obj, {})
|
||||
|
||||
|
||||
class NotificationTemplateAccess(BaseAccess):
|
||||
'''
|
||||
I can see/use a notification_template if I have permission to
|
||||
@ -1926,6 +1958,7 @@ class NotificationTemplateAccess(BaseAccess):
|
||||
return False
|
||||
return self.user in obj.organization.admin_role
|
||||
|
||||
|
||||
class NotificationAccess(BaseAccess):
|
||||
'''
|
||||
I can see/use a notification if I have permission to
|
||||
@ -1947,6 +1980,7 @@ class NotificationAccess(BaseAccess):
|
||||
def can_delete(self, obj):
|
||||
return self.user.can_access(NotificationTemplate, 'delete', obj.notification_template)
|
||||
|
||||
|
||||
class LabelAccess(BaseAccess):
|
||||
'''
|
||||
I can see/use a Label if I have permission to associated organization
|
||||
@ -1980,6 +2014,7 @@ class LabelAccess(BaseAccess):
|
||||
def can_delete(self, obj):
|
||||
return self.can_change(obj, None)
|
||||
|
||||
|
||||
class ActivityStreamAccess(BaseAccess):
|
||||
'''
|
||||
I can see activity stream events only when I have permission on all objects included in the event
|
||||
@ -2058,6 +2093,7 @@ class ActivityStreamAccess(BaseAccess):
|
||||
def can_delete(self, obj):
|
||||
return False
|
||||
|
||||
|
||||
class CustomInventoryScriptAccess(BaseAccess):
|
||||
|
||||
model = CustomInventoryScript
|
||||
@ -2085,6 +2121,7 @@ class CustomInventoryScriptAccess(BaseAccess):
|
||||
def can_delete(self, obj):
|
||||
return self.can_admin(obj)
|
||||
|
||||
|
||||
class RoleAccess(BaseAccess):
|
||||
'''
|
||||
- I can see roles when
|
||||
|
||||
@ -77,6 +77,7 @@ register(
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
|
||||
def _load_default_license_from_file():
|
||||
try:
|
||||
license_file = os.environ.get('AWX_LICENSE_FILE', '/etc/tower/license')
|
||||
@ -88,6 +89,7 @@ def _load_default_license_from_file():
|
||||
logger.warning('Could not read license from "%s".', license_file, exc_info=True)
|
||||
return {}
|
||||
|
||||
|
||||
register(
|
||||
'LICENSE',
|
||||
field_class=fields.DictField,
|
||||
@ -169,7 +171,7 @@ register(
|
||||
'AWX_ANSIBLE_CALLBACK_PLUGINS',
|
||||
field_class=fields.StringListField,
|
||||
label=_('Ansible Callback Plugins'),
|
||||
help_text=_('List of paths for extra callback plugins to be used when running jobs.'),
|
||||
help_text=_('List of paths to search for extra callback plugins to be used when running jobs.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
@ -178,6 +180,7 @@ register(
|
||||
'DEFAULT_JOB_TIMEOUT',
|
||||
field_class=fields.IntegerField,
|
||||
min_value=0,
|
||||
default=0,
|
||||
label=_('Default Job Timeout'),
|
||||
help_text=_('Maximum time to allow jobs to run. Use value of 0 to indicate that no '
|
||||
'timeout should be imposed. A timeout set on an individual job template will override this.'),
|
||||
@ -189,6 +192,7 @@ register(
|
||||
'DEFAULT_INVENTORY_UPDATE_TIMEOUT',
|
||||
field_class=fields.IntegerField,
|
||||
min_value=0,
|
||||
default=0,
|
||||
label=_('Default Inventory Update Timeout'),
|
||||
help_text=_('Maximum time to allow inventory updates to run. Use value of 0 to indicate that no '
|
||||
'timeout should be imposed. A timeout set on an individual inventory source will override this.'),
|
||||
@ -200,6 +204,7 @@ register(
|
||||
'DEFAULT_PROJECT_UPDATE_TIMEOUT',
|
||||
field_class=fields.IntegerField,
|
||||
min_value=0,
|
||||
default=0,
|
||||
label=_('Default Project Update Timeout'),
|
||||
help_text=_('Maximum time to allow project updates to run. Use value of 0 to indicate that no '
|
||||
'timeout should be imposed. A timeout set on an individual project will override this.'),
|
||||
|
||||
@ -30,6 +30,7 @@ def user_from_token(auth_token):
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
@channel_session
|
||||
def ws_connect(message):
|
||||
token = None
|
||||
|
||||
@ -19,17 +19,28 @@ from django.db.models.fields.related import (
|
||||
)
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
# Django-JSONField
|
||||
from jsonfield import JSONField as upstream_JSONField
|
||||
|
||||
# AWX
|
||||
from awx.main.models.rbac import batch_role_ancestor_rebuilding, Role
|
||||
from awx.main.utils import get_current_apps
|
||||
|
||||
|
||||
__all__ = ['AutoOneToOneField', 'ImplicitRoleField']
|
||||
__all__ = ['AutoOneToOneField', 'ImplicitRoleField', 'JSONField']
|
||||
|
||||
|
||||
class JSONField(upstream_JSONField):
|
||||
|
||||
def from_db_value(self, value, expression, connection, context):
|
||||
if value in {'', None} and not self.null:
|
||||
return {}
|
||||
return super(JSONField, self).from_db_value(value, expression, connection, context)
|
||||
|
||||
# Based on AutoOneToOneField from django-annoying:
|
||||
# https://bitbucket.org/offline/django-annoying/src/a0de8b294db3/annoying/fields.py
|
||||
|
||||
|
||||
class AutoSingleRelatedObjectDescriptor(SingleRelatedObjectDescriptor):
|
||||
"""Descriptor for access to the object from its related class."""
|
||||
|
||||
@ -46,6 +57,7 @@ class AutoSingleRelatedObjectDescriptor(SingleRelatedObjectDescriptor):
|
||||
obj.save()
|
||||
return obj
|
||||
|
||||
|
||||
class AutoOneToOneField(models.OneToOneField):
|
||||
"""OneToOneField that creates related object if it doesn't exist."""
|
||||
|
||||
|
||||
@ -4,6 +4,7 @@
|
||||
# AWX
|
||||
from awx.main.models import Instance
|
||||
|
||||
|
||||
def is_ha_environment():
|
||||
"""Return True if this is an HA environment, and False
|
||||
otherwise.
|
||||
|
||||
@ -13,6 +13,7 @@ from django.utils.timezone import now
|
||||
# AWX
|
||||
from awx.main.models import ActivityStream
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
'''
|
||||
Management command to purge old activity stream events.
|
||||
|
||||
@ -12,6 +12,7 @@ from django.utils.timezone import now
|
||||
# AWX
|
||||
from awx.main.models import * # noqa
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
'''
|
||||
Management command to cleanup expired auth tokens
|
||||
|
||||
@ -18,6 +18,7 @@ from awx.conf.license import feature_enabled
|
||||
OLDER_THAN = 'older_than'
|
||||
GRANULARITY = 'granularity'
|
||||
|
||||
|
||||
class CleanupFacts(object):
|
||||
def __init__(self):
|
||||
self.timestamp = None
|
||||
@ -27,7 +28,7 @@ class CleanupFacts(object):
|
||||
# Find all factVersion < pivot && > (pivot - granularity) grouped by host sorted by time descending (because it's indexed this way)
|
||||
# foreach group
|
||||
# Delete all except LAST entry (or Delete all except the FIRST entry, it's an arbitrary decision)
|
||||
#
|
||||
#
|
||||
# pivot -= granularity
|
||||
# group by host
|
||||
def cleanup(self, older_than_abs, granularity, module=None):
|
||||
@ -89,6 +90,7 @@ class CleanupFacts(object):
|
||||
deleted_count = self.cleanup(t - older_than, granularity, module=module)
|
||||
print("Deleted %d facts." % deleted_count)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Cleanup facts. For each host older than the value specified, keep one fact scan for each time window (granularity).'
|
||||
option_list = BaseCommand.option_list + (
|
||||
@ -142,4 +144,3 @@ class Command(BaseCommand):
|
||||
raise CommandError('--granularity invalid value "%s"' % options[GRANULARITY])
|
||||
|
||||
cleanup_facts.run(older_than, granularity, module=options['module'])
|
||||
|
||||
|
||||
@ -14,6 +14,7 @@ from django.utils.timezone import now
|
||||
# AWX
|
||||
from awx.main.models import Job, AdHocCommand, ProjectUpdate, InventoryUpdate, SystemJob
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
'''
|
||||
Management command to cleanup old jobs and project updates.
|
||||
|
||||
@ -4,6 +4,7 @@
|
||||
from awx.main.models import Instance
|
||||
from django.core.management.base import NoArgsCommand
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
"""List instances from the Tower database
|
||||
"""
|
||||
|
||||
@ -7,6 +7,7 @@ from django.conf import settings
|
||||
from optparse import make_option
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""
|
||||
Internal tower command.
|
||||
|
||||
@ -17,8 +17,8 @@ from awx.main.models import * # noqa
|
||||
|
||||
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
||||
|
||||
class CallbackBrokerWorker(ConsumerMixin):
|
||||
|
||||
class CallbackBrokerWorker(ConsumerMixin):
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
||||
|
||||
@ -51,6 +51,7 @@ class CallbackBrokerWorker(ConsumerMixin):
|
||||
logger.error('Callback Task Processor Raised Exception: %r', exc)
|
||||
message.ack()
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
'''
|
||||
Save Job Callback receiver (see awx.plugins.callbacks.job_event_callback)
|
||||
|
||||
@ -18,6 +18,7 @@ from awx.main.socket_queue import Socket
|
||||
|
||||
logger = logging.getLogger('awx.main.commands.run_fact_cache_receiver')
|
||||
|
||||
|
||||
class FactCacheReceiver(object):
|
||||
def __init__(self):
|
||||
self.timestamp = None
|
||||
@ -91,6 +92,7 @@ class FactCacheReceiver(object):
|
||||
else:
|
||||
self.process_fact_message(message)
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
'''
|
||||
blah blah
|
||||
|
||||
@ -25,6 +25,7 @@ from socketio.namespace import BaseNamespace
|
||||
|
||||
logger = logging.getLogger('awx.main.commands.run_socketio_service')
|
||||
|
||||
|
||||
class SocketSession(object):
|
||||
def __init__(self, session_id, token_key, socket):
|
||||
self.socket = weakref.ref(socket)
|
||||
@ -45,8 +46,8 @@ class SocketSession(object):
|
||||
auth_token = auth_token[0]
|
||||
return bool(not auth_token.is_expired())
|
||||
|
||||
class SocketSessionManager(object):
|
||||
|
||||
class SocketSessionManager(object):
|
||||
def __init__(self):
|
||||
self.SESSIONS_MAX = 1000
|
||||
self.socket_sessions = []
|
||||
@ -79,8 +80,8 @@ class SocketSessionManager(object):
|
||||
self._prune()
|
||||
return session
|
||||
|
||||
class SocketController(object):
|
||||
|
||||
class SocketController(object):
|
||||
def __init__(self, SocketSessionManager):
|
||||
self.server = None
|
||||
self.SocketSessionManager = SocketSessionManager
|
||||
@ -124,14 +125,15 @@ class SocketController(object):
|
||||
self.server = server
|
||||
return server
|
||||
|
||||
|
||||
socketController = SocketController(SocketSessionManager())
|
||||
|
||||
|
||||
#
|
||||
# Socket session is attached to self.session['socket_session']
|
||||
# self.session and self.socket.session point to the same dict
|
||||
#
|
||||
class TowerBaseNamespace(BaseNamespace):
|
||||
|
||||
def get_allowed_methods(self):
|
||||
return ['recv_disconnect']
|
||||
|
||||
@ -178,33 +180,33 @@ class TowerBaseNamespace(BaseNamespace):
|
||||
if socket_session and not socket_session.is_valid():
|
||||
self.disconnect(silent=False)
|
||||
|
||||
class TestNamespace(TowerBaseNamespace):
|
||||
|
||||
class TestNamespace(TowerBaseNamespace):
|
||||
def recv_connect(self):
|
||||
logger.info("Received client connect for test namespace from %s" % str(self.environ['REMOTE_ADDR']))
|
||||
self.emit('test', "If you see this then you attempted to connect to the test socket endpoint")
|
||||
super(TestNamespace, self).recv_connect()
|
||||
|
||||
class JobNamespace(TowerBaseNamespace):
|
||||
|
||||
class JobNamespace(TowerBaseNamespace):
|
||||
def recv_connect(self):
|
||||
logger.info("Received client connect for job namespace from %s" % str(self.environ['REMOTE_ADDR']))
|
||||
super(JobNamespace, self).recv_connect()
|
||||
|
||||
class JobEventNamespace(TowerBaseNamespace):
|
||||
|
||||
class JobEventNamespace(TowerBaseNamespace):
|
||||
def recv_connect(self):
|
||||
logger.info("Received client connect for job event namespace from %s" % str(self.environ['REMOTE_ADDR']))
|
||||
super(JobEventNamespace, self).recv_connect()
|
||||
|
||||
class AdHocCommandEventNamespace(TowerBaseNamespace):
|
||||
|
||||
class AdHocCommandEventNamespace(TowerBaseNamespace):
|
||||
def recv_connect(self):
|
||||
logger.info("Received client connect for ad hoc command event namespace from %s" % str(self.environ['REMOTE_ADDR']))
|
||||
super(AdHocCommandEventNamespace, self).recv_connect()
|
||||
|
||||
class ScheduleNamespace(TowerBaseNamespace):
|
||||
|
||||
class ScheduleNamespace(TowerBaseNamespace):
|
||||
def get_allowed_methods(self):
|
||||
parent_allowed = super(ScheduleNamespace, self).get_allowed_methods()
|
||||
return parent_allowed + ["schedule_changed"]
|
||||
@ -213,16 +215,16 @@ class ScheduleNamespace(TowerBaseNamespace):
|
||||
logger.info("Received client connect for schedule namespace from %s" % str(self.environ['REMOTE_ADDR']))
|
||||
super(ScheduleNamespace, self).recv_connect()
|
||||
|
||||
|
||||
# Catch-all namespace.
|
||||
# Deliver 'global' events over this namespace
|
||||
class ControlNamespace(TowerBaseNamespace):
|
||||
|
||||
def recv_connect(self):
|
||||
logger.warn("Received client connect for control namespace from %s" % str(self.environ['REMOTE_ADDR']))
|
||||
super(ControlNamespace, self).recv_connect()
|
||||
|
||||
class TowerSocket(object):
|
||||
|
||||
class TowerSocket(object):
|
||||
def __call__(self, environ, start_response):
|
||||
path = environ['PATH_INFO'].strip('/') or 'index.html'
|
||||
if path.startswith('socket.io'):
|
||||
@ -237,6 +239,7 @@ class TowerSocket(object):
|
||||
start_response('404 Not Found', [])
|
||||
return ['Tower version %s' % awx.__version__]
|
||||
|
||||
|
||||
def notification_handler(server):
|
||||
with Socket('websocket', 'r') as websocket:
|
||||
for message in websocket.listen():
|
||||
@ -253,6 +256,7 @@ def notification_handler(server):
|
||||
else:
|
||||
socketController.broadcast_packet(packet)
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
'''
|
||||
SocketIO event emitter Tower service
|
||||
|
||||
@ -9,6 +9,7 @@ from django.core.management.base import BaseCommand
|
||||
# AWX
|
||||
from awx.main.models import * # noqa
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
'''
|
||||
Emits some simple statistics suitable for external monitoring
|
||||
|
||||
@ -9,10 +9,11 @@ from django.core.management.base import BaseCommand
|
||||
from django.core.management.base import CommandError
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
|
||||
class UpdatePassword(object):
|
||||
def update_password(self, username, password):
|
||||
changed = False
|
||||
u = User.objects.get(username=username)
|
||||
u = User.objects.get(username=username)
|
||||
if not u:
|
||||
raise RuntimeError("User not found")
|
||||
check = u.check_password(password)
|
||||
@ -22,6 +23,7 @@ class UpdatePassword(object):
|
||||
changed = True
|
||||
return changed
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--username', dest='username', action='store', type='string', default=None,
|
||||
|
||||
@ -23,7 +23,7 @@ from awx.main.utils import timedelta_total_seconds
|
||||
TEST_FACT_ANSIBLE = {
|
||||
"ansible_swapfree_mb" : 4092,
|
||||
"ansible_default_ipv6" : {
|
||||
|
||||
|
||||
},
|
||||
"ansible_distribution_release" : "trusty",
|
||||
"ansible_system_vendor" : "innotek GmbH",
|
||||
@ -199,6 +199,7 @@ EXPERIMENT_DEFAULT = {
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
class Experiment(object):
|
||||
def __init__(self, exp, fact_fixtures, raw_db, mongoengine_db):
|
||||
self.db = raw_db
|
||||
@ -291,6 +292,7 @@ class Experiment(object):
|
||||
print("Finished at: %s" % time_end)
|
||||
print("Total runtime: %s seconds" % timedelta_total_seconds(time_end - time_start))
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--drop', dest='drop', action='store_true', default=False,
|
||||
@ -334,4 +336,3 @@ class Command(BaseCommand):
|
||||
|
||||
self.experiment = Experiment(exp, FACT_FIXTURES, db, enginedb)
|
||||
self.experiment.generate_workload()
|
||||
|
||||
|
||||
@ -20,6 +20,7 @@ class HostManager(models.Manager):
|
||||
except NotImplementedError: # For unit tests only, SQLite doesn't support distinct('name')
|
||||
return len(set(self.values_list('name', flat=True)))
|
||||
|
||||
|
||||
class InstanceManager(models.Manager):
|
||||
"""A custom manager class for the Instance model.
|
||||
|
||||
|
||||
@ -68,6 +68,7 @@ class ActivityStreamMiddleware(threading.local):
|
||||
if instance.id not in self.instance_ids:
|
||||
self.instance_ids.append(instance.id)
|
||||
|
||||
|
||||
class AuthTokenTimeoutMiddleware(object):
|
||||
"""Presume that when the user includes the auth header, they go through the
|
||||
authentication mechanism. Further, that mechanism is presumed to extend
|
||||
|
||||
90
awx/main/migrations/0050_v310_JSONField_changes.py
Normal file
90
awx/main/migrations/0050_v310_JSONField_changes.py
Normal file
@ -0,0 +1,90 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import awx.main.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0049_v310_workflow_surveys'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='adhoccommandevent',
|
||||
name='event_data',
|
||||
field=awx.main.fields.JSONField(default={}, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='job',
|
||||
name='artifacts',
|
||||
field=awx.main.fields.JSONField(default={}, editable=False, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='job',
|
||||
name='survey_passwords',
|
||||
field=awx.main.fields.JSONField(default={}, editable=False, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='jobevent',
|
||||
name='event_data',
|
||||
field=awx.main.fields.JSONField(default={}, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='jobtemplate',
|
||||
name='survey_spec',
|
||||
field=awx.main.fields.JSONField(default={}, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='notification',
|
||||
name='body',
|
||||
field=awx.main.fields.JSONField(default=dict, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='notificationtemplate',
|
||||
name='notification_configuration',
|
||||
field=awx.main.fields.JSONField(default=dict),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='playbook_files',
|
||||
field=awx.main.fields.JSONField(default=[], help_text='List of playbooks found in the project', verbose_name='Playbook Files', editable=False, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='schedule',
|
||||
name='extra_data',
|
||||
field=awx.main.fields.JSONField(default={}, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='job_env',
|
||||
field=awx.main.fields.JSONField(default={}, editable=False, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjob',
|
||||
name='survey_passwords',
|
||||
field=awx.main.fields.JSONField(default={}, editable=False, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobnode',
|
||||
name='ancestor_artifacts',
|
||||
field=awx.main.fields.JSONField(default={}, editable=False, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobnode',
|
||||
name='char_prompts',
|
||||
field=awx.main.fields.JSONField(default={}, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='survey_spec',
|
||||
field=awx.main.fields.JSONField(default={}, blank=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplatenode',
|
||||
name='char_prompts',
|
||||
field=awx.main.fields.JSONField(default={}, blank=True),
|
||||
),
|
||||
]
|
||||
@ -28,14 +28,19 @@ from awx.main.models.channels import * # noqa
|
||||
# the dumpdata command; see https://github.com/alex/django-taggit/issues/155).
|
||||
from django.core.serializers.python import Serializer as _PythonSerializer
|
||||
_original_handle_m2m_field = _PythonSerializer.handle_m2m_field
|
||||
|
||||
|
||||
def _new_handle_m2m_field(self, obj, field):
|
||||
try:
|
||||
field.rel.through._meta
|
||||
except AttributeError:
|
||||
return
|
||||
return _original_handle_m2m_field(self, obj, field)
|
||||
|
||||
|
||||
_PythonSerializer.handle_m2m_field = _new_handle_m2m_field
|
||||
|
||||
|
||||
# Add custom methods to User model for permissions checks.
|
||||
from django.contrib.auth.models import User # noqa
|
||||
from awx.main.access import * # noqa
|
||||
@ -46,26 +51,32 @@ User.add_to_class('can_access', check_user_access)
|
||||
User.add_to_class('accessible_objects', user_accessible_objects)
|
||||
User.add_to_class('admin_role', user_admin_role)
|
||||
|
||||
|
||||
@property
|
||||
def user_get_organizations(user):
|
||||
return Organization.objects.filter(member_role__members=user)
|
||||
|
||||
|
||||
@property
|
||||
def user_get_admin_of_organizations(user):
|
||||
return Organization.objects.filter(admin_role__members=user)
|
||||
|
||||
|
||||
@property
|
||||
def user_get_auditor_of_organizations(user):
|
||||
return Organization.objects.filter(auditor_role__members=user)
|
||||
|
||||
|
||||
User.add_to_class('organizations', user_get_organizations)
|
||||
User.add_to_class('admin_of_organizations', user_get_admin_of_organizations)
|
||||
User.add_to_class('auditor_of_organizations', user_get_auditor_of_organizations)
|
||||
|
||||
|
||||
@property
|
||||
def user_is_system_auditor(user):
|
||||
return Role.singleton('system_auditor').members.filter(id=user.id).exists()
|
||||
|
||||
|
||||
@user_is_system_auditor.setter
|
||||
def user_is_system_auditor(user, tf):
|
||||
if user.id:
|
||||
@ -74,6 +85,7 @@ def user_is_system_auditor(user, tf):
|
||||
else:
|
||||
Role.singleton('system_auditor').members.remove(user)
|
||||
|
||||
|
||||
User.add_to_class('is_system_auditor', user_is_system_auditor)
|
||||
|
||||
# Import signal handlers only after models have been defined.
|
||||
|
||||
@ -17,13 +17,11 @@ from django.utils.translation import ugettext_lazy as _
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
# Django-JSONField
|
||||
from jsonfield import JSONField
|
||||
|
||||
# AWX
|
||||
from awx.main.models.base import * # noqa
|
||||
from awx.main.models.unified_jobs import * # noqa
|
||||
from awx.main.models.notifications import JobNotificationMixin
|
||||
from awx.main.fields import JSONField
|
||||
|
||||
logger = logging.getLogger('awx.main.models.ad_hoc_commands')
|
||||
|
||||
@ -211,6 +209,7 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
|
||||
def get_notification_friendly_name(self):
|
||||
return "AdHoc Command"
|
||||
|
||||
|
||||
class AdHocCommandEvent(CreatedModifiedModel):
|
||||
'''
|
||||
An event/message logged from the ad hoc event callback for each host.
|
||||
|
||||
@ -320,6 +320,7 @@ class CommonModelNameNotUnique(PrimordialModel):
|
||||
unique=False,
|
||||
)
|
||||
|
||||
|
||||
class NotificationFieldsModel(BaseModel):
|
||||
|
||||
class Meta:
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
from django.db import models
|
||||
|
||||
|
||||
class ChannelGroup(models.Model):
|
||||
group = models.CharField(max_length=200, unique=True)
|
||||
channels = models.TextField()
|
||||
|
||||
@ -8,6 +8,7 @@ from jsonbfield.fields import JSONField
|
||||
|
||||
__all__ = ('Fact', )
|
||||
|
||||
|
||||
class Fact(models.Model):
|
||||
"""A model representing a fact returned from Ansible.
|
||||
Facts are stored as JSON dictionaries.
|
||||
@ -20,8 +21,8 @@ class Fact(models.Model):
|
||||
help_text=_('Host for the facts that the fact scan captured.'),
|
||||
)
|
||||
timestamp = models.DateTimeField(
|
||||
default=None,
|
||||
editable=False,
|
||||
default=None,
|
||||
editable=False,
|
||||
help_text=_('Date and time of the corresponding fact scan gathering time.')
|
||||
)
|
||||
module = models.CharField(max_length=128)
|
||||
|
||||
@ -39,9 +39,11 @@ class Instance(models.Model):
|
||||
# NOTE: TODO: Likely to repurpose this once standalone ramparts are a thing
|
||||
return "tower"
|
||||
|
||||
|
||||
class TowerScheduleState(SingletonModel):
|
||||
schedule_last_run = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
|
||||
class JobOrigin(models.Model):
|
||||
"""A model representing the relationship between a unified job and
|
||||
the instance that was responsible for starting that job.
|
||||
|
||||
@ -21,9 +21,6 @@ from django.utils.translation import ugettext_lazy as _
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
# Django-JSONField
|
||||
from jsonfield import JSONField
|
||||
|
||||
# AWX
|
||||
from awx.main.constants import CLOUD_PROVIDERS
|
||||
from awx.main.models.base import * # noqa
|
||||
@ -40,6 +37,7 @@ from awx.main.redact import PlainTextCleaner
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.models.mixins import ResourceMixin, SurveyJobTemplateMixin, SurveyJobMixin
|
||||
from awx.main.models.base import PERM_INVENTORY_SCAN
|
||||
from awx.main.fields import JSONField
|
||||
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
|
||||
@ -190,6 +188,7 @@ class JobOptions(BaseModel):
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, ResourceMixin):
|
||||
'''
|
||||
A job template is a reusable job definition for applying a project (with
|
||||
@ -394,6 +393,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
||||
any_notification_templates = set(any_notification_templates + list(base_notification_templates.filter(organization_notification_templates_for_any=self.project.organization)))
|
||||
return dict(error=list(error_notification_templates), success=list(success_notification_templates), any=list(any_notification_templates))
|
||||
|
||||
|
||||
class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin):
|
||||
'''
|
||||
A job applies a project (with playbook) to an inventory source with a given
|
||||
@ -648,6 +648,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin):
|
||||
def get_notification_friendly_name(self):
|
||||
return "Job"
|
||||
|
||||
|
||||
class JobHostSummary(CreatedModifiedModel):
|
||||
'''
|
||||
Per-host statistics for each job.
|
||||
|
||||
@ -12,6 +12,7 @@ from awx.main.models.unified_jobs import UnifiedJobTemplate, UnifiedJob
|
||||
|
||||
__all__ = ('Label', )
|
||||
|
||||
|
||||
class Label(CommonModelNameNotUnique):
|
||||
'''
|
||||
Generic Tag. Designed for tagging Job Templates, but expandable to other models.
|
||||
@ -37,7 +38,7 @@ class Label(CommonModelNameNotUnique):
|
||||
return \
|
||||
Label.objects.filter(
|
||||
organization=None,
|
||||
jobtemplate_labels__isnull=True
|
||||
unifiedjobtemplate_labels__isnull=True
|
||||
)
|
||||
|
||||
def is_detached(self):
|
||||
@ -55,4 +56,3 @@ class Label(CommonModelNameNotUnique):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
@ -5,17 +5,18 @@ import json
|
||||
from django.db import models
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.contrib.auth.models import User # noqa
|
||||
from jsonfield import JSONField
|
||||
|
||||
# AWX
|
||||
from awx.main.models.rbac import (
|
||||
Role, RoleAncestorEntry, get_roles_on_resource
|
||||
)
|
||||
from awx.main.utils import parse_yaml_or_json
|
||||
from awx.main.fields import JSONField
|
||||
|
||||
|
||||
__all__ = ['ResourceMixin', 'SurveyJobTemplateMixin', 'SurveyJobMixin']
|
||||
|
||||
|
||||
class ResourceMixin(models.Model):
|
||||
|
||||
class Meta:
|
||||
@ -216,4 +217,3 @@ class SurveyJobMixin(models.Model):
|
||||
return json.dumps(extra_vars)
|
||||
else:
|
||||
return self.extra_vars
|
||||
|
||||
|
||||
@ -18,14 +18,14 @@ from awx.main.notifications.pagerduty_backend import PagerDutyBackend
|
||||
from awx.main.notifications.hipchat_backend import HipChatBackend
|
||||
from awx.main.notifications.webhook_backend import WebhookBackend
|
||||
from awx.main.notifications.irc_backend import IrcBackend
|
||||
from awx.main.fields import JSONField
|
||||
|
||||
# Django-JSONField
|
||||
from jsonfield import JSONField
|
||||
|
||||
logger = logging.getLogger('awx.main.models.notifications')
|
||||
|
||||
__all__ = ['NotificationTemplate', 'Notification']
|
||||
|
||||
|
||||
class NotificationTemplate(CommonModel):
|
||||
|
||||
NOTIFICATION_TYPES = [('email', _('Email'), CustomEmailBackend),
|
||||
@ -117,6 +117,7 @@ class NotificationTemplate(CommonModel):
|
||||
notification_obj = EmailMessage(subject, backend_obj.format_body(body), sender, recipients)
|
||||
return backend_obj.send_messages([notification_obj])
|
||||
|
||||
|
||||
class Notification(CreatedModifiedModel):
|
||||
'''
|
||||
A notification event emitted when a NotificationTemplate is run
|
||||
@ -172,6 +173,7 @@ class Notification(CreatedModifiedModel):
|
||||
def get_absolute_url(self):
|
||||
return reverse('api:notification_detail', args=(self.pk,))
|
||||
|
||||
|
||||
class JobNotificationMixin(object):
|
||||
def get_notification_templates(self):
|
||||
raise RuntimeError("Define me")
|
||||
@ -194,4 +196,3 @@ class JobNotificationMixin(object):
|
||||
|
||||
def build_notification_failed_message(self):
|
||||
return self._build_notification_message('failed')
|
||||
|
||||
|
||||
@ -72,7 +72,6 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin):
|
||||
return self.name
|
||||
|
||||
|
||||
|
||||
class Team(CommonModelNameNotUnique, ResourceMixin):
|
||||
'''
|
||||
A team is a group of users that work on common projects.
|
||||
@ -191,6 +190,7 @@ class Profile(CreatedModifiedModel):
|
||||
default='',
|
||||
)
|
||||
|
||||
|
||||
"""
|
||||
Since expiration and session expiration is event driven a token could be
|
||||
invalidated for both reasons. Further, we only support a single reason for a
|
||||
@ -199,6 +199,8 @@ session token being invalid. For this case, mark the token as expired.
|
||||
Note: Again, because the value of reason is event based. The reason may not be
|
||||
set (i.e. may equal '') even though a session is expired or a limit is reached.
|
||||
"""
|
||||
|
||||
|
||||
class AuthToken(BaseModel):
|
||||
'''
|
||||
Custom authentication tokens per user with expiration and request-specific
|
||||
|
||||
@ -7,9 +7,6 @@ import os
|
||||
import re
|
||||
import urlparse
|
||||
|
||||
# JSONField
|
||||
from jsonfield import JSONField
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
@ -34,6 +31,7 @@ from awx.main.models.rbac import (
|
||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
||||
)
|
||||
from awx.main.fields import JSONField
|
||||
|
||||
__all__ = ['Project', 'ProjectUpdate']
|
||||
|
||||
@ -393,6 +391,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin):
|
||||
def get_absolute_url(self):
|
||||
return reverse('api:project_detail', args=(self.pk,))
|
||||
|
||||
|
||||
class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin):
|
||||
'''
|
||||
Internal job for tracking project updates from SCM.
|
||||
|
||||
@ -79,6 +79,7 @@ def check_singleton(func):
|
||||
return func(*args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def batch_role_ancestor_rebuilding(allow_nesting=False):
|
||||
'''
|
||||
@ -426,6 +427,7 @@ class Role(models.Model):
|
||||
def is_ancestor_of(self, role):
|
||||
return role.ancestors.filter(id=self.id).exists()
|
||||
|
||||
|
||||
class RoleAncestorEntry(models.Model):
|
||||
|
||||
class Meta:
|
||||
|
||||
@ -11,14 +11,12 @@ from django.db import models
|
||||
from django.db.models.query import QuerySet
|
||||
from django.utils.timezone import now, make_aware, get_default_timezone
|
||||
|
||||
# Django-JSONField
|
||||
from jsonfield import JSONField
|
||||
|
||||
# AWX
|
||||
from awx.main.models.base import * # noqa
|
||||
from awx.main.utils import ignore_inventory_computed_fields
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
from django.core.urlresolvers import reverse
|
||||
from awx.main.fields import JSONField
|
||||
|
||||
logger = logging.getLogger('awx.main.models.schedule')
|
||||
|
||||
|
||||
@ -20,9 +20,6 @@ from django.utils.timezone import now
|
||||
from django.utils.encoding import smart_text
|
||||
from django.apps import apps
|
||||
|
||||
# Django-JSONField
|
||||
from jsonfield import JSONField
|
||||
|
||||
# Django-Polymorphic
|
||||
from polymorphic import PolymorphicModel
|
||||
|
||||
@ -35,6 +32,7 @@ from awx.main.models.schedules import Schedule
|
||||
from awx.main.utils import decrypt_field, _inventory_updates
|
||||
from awx.main.redact import UriCleaner, REPLACE_STR
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
from awx.main.fields import JSONField
|
||||
|
||||
__all__ = ['UnifiedJobTemplate', 'UnifiedJob']
|
||||
|
||||
@ -358,6 +356,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
|
||||
dest_field.add(*list(src_field_value.all().values_list('id', flat=True)))
|
||||
return unified_job
|
||||
|
||||
|
||||
class UnifiedJobTypeStringMixin(object):
|
||||
@classmethod
|
||||
def _underscore_to_camel(cls, word):
|
||||
@ -381,6 +380,7 @@ class UnifiedJobTypeStringMixin(object):
|
||||
return None
|
||||
return model.objects.get(id=job_id)
|
||||
|
||||
|
||||
class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique, UnifiedJobTypeStringMixin):
|
||||
'''
|
||||
Concrete base class for unified job run by the task engine.
|
||||
|
||||
@ -9,8 +9,6 @@ from django.db import models
|
||||
from django.core.urlresolvers import reverse
|
||||
#from django import settings as tower_settings
|
||||
|
||||
from jsonfield import JSONField
|
||||
|
||||
# AWX
|
||||
from awx.main.models import UnifiedJobTemplate, UnifiedJob
|
||||
from awx.main.models.notifications import (
|
||||
@ -26,6 +24,7 @@ from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.models.mixins import ResourceMixin, SurveyJobTemplateMixin, SurveyJobMixin
|
||||
from awx.main.redact import REPLACE_STR
|
||||
from awx.main.utils import parse_yaml_or_json
|
||||
from awx.main.fields import JSONField
|
||||
|
||||
from copy import copy
|
||||
|
||||
@ -33,6 +32,7 @@ __all__ = ['WorkflowJobTemplate', 'WorkflowJob', 'WorkflowJobOptions', 'Workflow
|
||||
|
||||
CHAR_PROMPTS_LIST = ['job_type', 'job_tags', 'skip_tags', 'limit']
|
||||
|
||||
|
||||
class WorkflowNodeBase(CreatedModifiedModel):
|
||||
class Meta:
|
||||
abstract = True
|
||||
@ -159,6 +159,7 @@ class WorkflowNodeBase(CreatedModifiedModel):
|
||||
return ['workflow_job', 'unified_job_template',
|
||||
'inventory', 'credential', 'char_prompts']
|
||||
|
||||
|
||||
class WorkflowJobTemplateNode(WorkflowNodeBase):
|
||||
workflow_job_template = models.ForeignKey(
|
||||
'WorkflowJobTemplate',
|
||||
@ -184,6 +185,7 @@ class WorkflowJobTemplateNode(WorkflowNodeBase):
|
||||
create_kwargs[field_name] = getattr(self, field_name)
|
||||
return WorkflowJobNode.objects.create(**create_kwargs)
|
||||
|
||||
|
||||
class WorkflowJobNode(WorkflowNodeBase):
|
||||
job = models.OneToOneField(
|
||||
'UnifiedJob',
|
||||
@ -206,7 +208,7 @@ class WorkflowJobNode(WorkflowNodeBase):
|
||||
default={},
|
||||
editable=False,
|
||||
)
|
||||
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('api:workflow_job_node_detail', args=(self.pk,))
|
||||
|
||||
@ -260,6 +262,7 @@ class WorkflowJobNode(WorkflowNodeBase):
|
||||
data['launch_type'] = 'workflow'
|
||||
return data
|
||||
|
||||
|
||||
class WorkflowJobOptions(BaseModel):
|
||||
class Meta:
|
||||
abstract = True
|
||||
@ -271,8 +274,8 @@ class WorkflowJobOptions(BaseModel):
|
||||
|
||||
extra_vars_dict = VarsDictProperty('extra_vars', True)
|
||||
|
||||
class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTemplateMixin, ResourceMixin):
|
||||
|
||||
class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTemplateMixin, ResourceMixin):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
|
||||
@ -374,6 +377,7 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
|
||||
warning_data[node.pk] = node_prompts_warnings
|
||||
return warning_data
|
||||
|
||||
|
||||
class WorkflowJobInheritNodesMixin(object):
|
||||
def _inherit_relationship(self, old_node, new_node, node_ids_map, node_type):
|
||||
old_related_nodes = self._get_all_by_type(old_node, node_type)
|
||||
@ -415,10 +419,9 @@ class WorkflowJobInheritNodesMixin(object):
|
||||
new_node = new_nodes[index]
|
||||
for node_type in ['success_nodes', 'failure_nodes', 'always_nodes']:
|
||||
self._inherit_relationship(old_node, new_node, node_ids_map, node_type)
|
||||
|
||||
|
||||
|
||||
class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificationMixin, WorkflowJobInheritNodesMixin):
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ('id',)
|
||||
|
||||
@ -3,6 +3,7 @@ import urlparse
|
||||
|
||||
REPLACE_STR = '$encrypted$'
|
||||
|
||||
|
||||
class UriCleaner(object):
|
||||
REPLACE_STR = REPLACE_STR
|
||||
# https://regex101.com/r/sV2dO2/2
|
||||
@ -51,6 +52,7 @@ class UriCleaner(object):
|
||||
|
||||
return redactedtext
|
||||
|
||||
|
||||
class PlainTextCleaner(object):
|
||||
REPLACE_STR = REPLACE_STR
|
||||
|
||||
|
||||
@ -39,4 +39,5 @@ class ActivityStreamRegistrar(object):
|
||||
m2m_attr = getattr(model, m2mfield.name)
|
||||
m2m_changed.disconnect(dispatch_uid=str(self.__class__) + str(m2m_attr.through) + "_associate")
|
||||
|
||||
|
||||
activity_stream_registrar = ActivityStreamRegistrar()
|
||||
|
||||
@ -35,6 +35,7 @@ from celery.task.control import inspect
|
||||
|
||||
logger = logging.getLogger('awx.main.scheduler')
|
||||
|
||||
|
||||
class TaskManager():
|
||||
def __init__(self):
|
||||
self.graph = DependencyGraph()
|
||||
|
||||
@ -8,6 +8,7 @@ from awx.main.models import (
|
||||
SystemJob,
|
||||
)
|
||||
|
||||
|
||||
class SimpleDAG(object):
|
||||
''' A simple implementation of a directed acyclic graph '''
|
||||
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
# AWX
|
||||
from awx.main.scheduler.dag_simple import SimpleDAG
|
||||
|
||||
|
||||
class WorkflowDAG(SimpleDAG):
|
||||
def __init__(self, workflow_job=None):
|
||||
super(WorkflowDAG, self).__init__()
|
||||
|
||||
@ -9,6 +9,8 @@ from awx.main.scheduler.partial import (
|
||||
AdHocCommandDict,
|
||||
WorkflowJobDict,
|
||||
)
|
||||
|
||||
|
||||
class DependencyGraph(object):
|
||||
PROJECT_UPDATES = 'project_updates'
|
||||
INVENTORY_UPDATES = 'inventory_updates'
|
||||
|
||||
@ -14,6 +14,7 @@ from awx.main.models import (
|
||||
WorkflowJob,
|
||||
)
|
||||
|
||||
|
||||
class PartialModelDict(object):
|
||||
FIELDS = ()
|
||||
model = None
|
||||
@ -60,6 +61,7 @@ class PartialModelDict(object):
|
||||
def task_impact(self):
|
||||
raise RuntimeError("Inherit and implement me")
|
||||
|
||||
|
||||
class JobDict(PartialModelDict):
|
||||
FIELDS = (
|
||||
'id', 'status', 'job_template_id', 'inventory_id', 'project_id',
|
||||
@ -83,6 +85,7 @@ class JobDict(PartialModelDict):
|
||||
start_args = start_args or {}
|
||||
return start_args.get('inventory_sources_already_updated', [])
|
||||
|
||||
|
||||
class ProjectUpdateDict(PartialModelDict):
|
||||
FIELDS = (
|
||||
'id', 'status', 'project_id', 'created', 'celery_task_id',
|
||||
@ -105,6 +108,7 @@ class ProjectUpdateDict(PartialModelDict):
|
||||
}
|
||||
return [cls(o) for o in cls.model.objects.filter(**kv).values(*cls.get_db_values())]
|
||||
|
||||
|
||||
class ProjectUpdateLatestDict(ProjectUpdateDict):
|
||||
FIELDS = (
|
||||
'id', 'status', 'project_id', 'created', 'finished',
|
||||
@ -125,6 +129,7 @@ class ProjectUpdateLatestDict(ProjectUpdateDict):
|
||||
results.append(cls(cls.model.objects.filter(id=qs[0].id).values(*cls.get_db_values())[0]))
|
||||
return results
|
||||
|
||||
|
||||
class InventoryUpdateDict(PartialModelDict):
|
||||
#'inventory_source__update_on_launch',
|
||||
#'inventory_source__update_cache_timeout',
|
||||
@ -139,6 +144,7 @@ class InventoryUpdateDict(PartialModelDict):
|
||||
def task_impact(self):
|
||||
return 20
|
||||
|
||||
|
||||
class InventoryUpdateLatestDict(InventoryUpdateDict):
|
||||
#'inventory_source__update_on_launch',
|
||||
#'inventory_source__update_cache_timeout',
|
||||
@ -166,6 +172,7 @@ class InventoryUpdateLatestDict(InventoryUpdateDict):
|
||||
results.append(cls(cls.model.objects.filter(id=qs[0].id).values(*cls.get_db_values())[0]))
|
||||
return results
|
||||
|
||||
|
||||
class InventorySourceDict(PartialModelDict):
|
||||
FIELDS = (
|
||||
'id',
|
||||
@ -187,6 +194,7 @@ class InventorySourceDict(PartialModelDict):
|
||||
}
|
||||
return [cls(o) for o in cls.model.objects.filter(**kv).values(*cls.get_db_values())]
|
||||
|
||||
|
||||
class SystemJobDict(PartialModelDict):
|
||||
FIELDS = (
|
||||
'id', 'created', 'status',
|
||||
@ -206,6 +214,7 @@ class SystemJobDict(PartialModelDict):
|
||||
}
|
||||
return [cls(o) for o in cls.model.objects.filter(**kv).values(*cls.get_db_values())]
|
||||
|
||||
|
||||
class AdHocCommandDict(PartialModelDict):
|
||||
FIELDS = (
|
||||
'id', 'created', 'status', 'inventory_id',
|
||||
@ -218,6 +227,7 @@ class AdHocCommandDict(PartialModelDict):
|
||||
def task_impact(self):
|
||||
return 20
|
||||
|
||||
|
||||
class WorkflowJobDict(PartialModelDict):
|
||||
FIELDS = (
|
||||
'id', 'created', 'status', 'workflow_job_template_id',
|
||||
|
||||
@ -21,18 +21,22 @@ logger = logging.getLogger('awx.main.scheduler')
|
||||
# Would we need the request loop then? I think so. Even if we get the in-memory
|
||||
# updated model, the call to schedule() may get stale data.
|
||||
|
||||
|
||||
@task
|
||||
def run_job_launch(job_id):
|
||||
TaskManager().schedule()
|
||||
|
||||
|
||||
@task
|
||||
def run_job_complete(job_id):
|
||||
TaskManager().schedule()
|
||||
|
||||
|
||||
@task
|
||||
def run_task_manager():
|
||||
TaskManager().schedule()
|
||||
|
||||
|
||||
@task
|
||||
def run_fail_inconsistent_running_jobs():
|
||||
with transaction.atomic():
|
||||
|
||||
@ -32,6 +32,7 @@ logger = logging.getLogger('awx.main.signals')
|
||||
# Update has_active_failures for inventory/groups when a Host/Group is deleted,
|
||||
# when a Host-Group or Group-Group relationship is updated, or when a Job is deleted
|
||||
|
||||
|
||||
def emit_job_event_detail(sender, **kwargs):
|
||||
instance = kwargs['instance']
|
||||
created = kwargs['created']
|
||||
@ -48,6 +49,7 @@ def emit_job_event_detail(sender, **kwargs):
|
||||
event_serialized['group_name'] = "workflow_events"
|
||||
emit_channel_notification('workflow_events-' + str(instance.job.workflow_job_id), event_serialized)
|
||||
|
||||
|
||||
def emit_ad_hoc_command_event_detail(sender, **kwargs):
|
||||
instance = kwargs['instance']
|
||||
created = kwargs['created']
|
||||
@ -60,6 +62,7 @@ def emit_ad_hoc_command_event_detail(sender, **kwargs):
|
||||
event_serialized["group_name"] = "ad_hoc_command_events"
|
||||
emit_channel_notification('ad_hoc_command_events-' + str(instance.ad_hoc_command_id), event_serialized)
|
||||
|
||||
|
||||
def emit_update_inventory_computed_fields(sender, **kwargs):
|
||||
logger.debug("In update inventory computed fields")
|
||||
if getattr(_inventory_updates, 'is_updating', False):
|
||||
@ -94,6 +97,7 @@ def emit_update_inventory_computed_fields(sender, **kwargs):
|
||||
else:
|
||||
update_inventory_computed_fields.delay(inventory.id, True)
|
||||
|
||||
|
||||
def emit_update_inventory_on_created_or_deleted(sender, **kwargs):
|
||||
if getattr(_inventory_updates, 'is_updating', False):
|
||||
return
|
||||
@ -114,6 +118,7 @@ def emit_update_inventory_on_created_or_deleted(sender, **kwargs):
|
||||
if inventory is not None:
|
||||
update_inventory_computed_fields.delay(inventory.id, True)
|
||||
|
||||
|
||||
def rebuild_role_ancestor_list(reverse, model, instance, pk_set, action, **kwargs):
|
||||
'When a role parent is added or removed, update our role hierarchy list'
|
||||
if action == 'post_add':
|
||||
@ -128,6 +133,7 @@ def rebuild_role_ancestor_list(reverse, model, instance, pk_set, action, **kwarg
|
||||
else:
|
||||
model.rebuild_role_ancestor_list([], [instance.id])
|
||||
|
||||
|
||||
def sync_superuser_status_to_rbac(instance, **kwargs):
|
||||
'When the is_superuser flag is changed on a user, reflect that in the membership of the System Admnistrator role'
|
||||
if instance.is_superuser:
|
||||
@ -135,6 +141,7 @@ def sync_superuser_status_to_rbac(instance, **kwargs):
|
||||
else:
|
||||
Role.singleton(ROLE_SINGLETON_SYSTEM_ADMINISTRATOR).members.remove(instance)
|
||||
|
||||
|
||||
def create_user_role(instance, **kwargs):
|
||||
try:
|
||||
Role.objects.get(
|
||||
@ -149,6 +156,7 @@ def create_user_role(instance, **kwargs):
|
||||
)
|
||||
role.members.add(instance)
|
||||
|
||||
|
||||
def org_admin_edit_members(instance, action, model, reverse, pk_set, **kwargs):
|
||||
content_type = ContentType.objects.get_for_model(Organization)
|
||||
|
||||
@ -164,6 +172,7 @@ def org_admin_edit_members(instance, action, model, reverse, pk_set, **kwargs):
|
||||
if action == 'pre_remove':
|
||||
instance.content_object.admin_role.children.remove(user.admin_role)
|
||||
|
||||
|
||||
def rbac_activity_stream(instance, sender, **kwargs):
|
||||
user_type = ContentType.objects.get_for_model(User)
|
||||
# Only if we are associating/disassociating
|
||||
@ -198,11 +207,13 @@ def rbac_activity_stream(instance, sender, **kwargs):
|
||||
|
||||
activity_stream_associate(sender, instance, role=role, **kwargs)
|
||||
|
||||
|
||||
def cleanup_detached_labels_on_deleted_parent(sender, instance, **kwargs):
|
||||
for l in instance.labels.all():
|
||||
if l.is_candidate_for_detach():
|
||||
l.delete()
|
||||
|
||||
|
||||
post_save.connect(emit_update_inventory_on_created_or_deleted, sender=Host)
|
||||
post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=Host)
|
||||
post_save.connect(emit_update_inventory_on_created_or_deleted, sender=Group)
|
||||
@ -228,6 +239,7 @@ pre_delete.connect(cleanup_detached_labels_on_deleted_parent, sender=UnifiedJobT
|
||||
|
||||
# Migrate hosts, groups to parent group(s) whenever a group is deleted
|
||||
|
||||
|
||||
@receiver(pre_delete, sender=Group)
|
||||
def save_related_pks_before_group_delete(sender, **kwargs):
|
||||
if getattr(_inventory_updates, 'is_removing', False):
|
||||
@ -238,6 +250,7 @@ def save_related_pks_before_group_delete(sender, **kwargs):
|
||||
instance._saved_hosts_pks = set(instance.hosts.values_list('pk', flat=True))
|
||||
instance._saved_children_pks = set(instance.children.values_list('pk', flat=True))
|
||||
|
||||
|
||||
@receiver(post_delete, sender=Group)
|
||||
def migrate_children_from_deleted_group_to_parent_groups(sender, **kwargs):
|
||||
if getattr(_inventory_updates, 'is_removing', False):
|
||||
@ -271,6 +284,7 @@ def migrate_children_from_deleted_group_to_parent_groups(sender, **kwargs):
|
||||
|
||||
# Update host pointers to last_job and last_job_host_summary when a job is deleted
|
||||
|
||||
|
||||
def _update_host_last_jhs(host):
|
||||
jhs_qs = JobHostSummary.objects.filter(host__pk=host.pk)
|
||||
try:
|
||||
@ -288,12 +302,14 @@ def _update_host_last_jhs(host):
|
||||
if update_fields:
|
||||
host.save(update_fields=update_fields)
|
||||
|
||||
|
||||
@receiver(pre_delete, sender=Job)
|
||||
def save_host_pks_before_job_delete(sender, **kwargs):
|
||||
instance = kwargs['instance']
|
||||
hosts_qs = Host.objects.filter( last_job__pk=instance.pk)
|
||||
instance._saved_hosts_pks = set(hosts_qs.values_list('pk', flat=True))
|
||||
|
||||
|
||||
@receiver(post_delete, sender=Job)
|
||||
def update_host_last_job_after_job_deleted(sender, **kwargs):
|
||||
instance = kwargs['instance']
|
||||
@ -303,6 +319,7 @@ def update_host_last_job_after_job_deleted(sender, **kwargs):
|
||||
|
||||
# Set via ActivityStreamRegistrar to record activity stream events
|
||||
|
||||
|
||||
class ActivityStreamEnabled(threading.local):
|
||||
def __init__(self):
|
||||
self.enabled = True
|
||||
@ -310,8 +327,10 @@ class ActivityStreamEnabled(threading.local):
|
||||
def __nonzero__(self):
|
||||
return bool(self.enabled and getattr(settings, 'ACTIVITY_STREAM_ENABLED', True))
|
||||
|
||||
|
||||
activity_stream_enabled = ActivityStreamEnabled()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def disable_activity_stream():
|
||||
'''
|
||||
@ -342,6 +361,7 @@ model_serializer_mapping = {
|
||||
Notification: NotificationSerializer,
|
||||
}
|
||||
|
||||
|
||||
def activity_stream_create(sender, instance, created, **kwargs):
|
||||
if created and activity_stream_enabled:
|
||||
# Skip recording any inventory source directly associated with a group.
|
||||
@ -364,6 +384,7 @@ def activity_stream_create(sender, instance, created, **kwargs):
|
||||
if instance._meta.model_name != 'setting': # Is not conf.Setting instance
|
||||
getattr(activity_entry, object1).add(instance)
|
||||
|
||||
|
||||
def activity_stream_update(sender, instance, **kwargs):
|
||||
if instance.id is None:
|
||||
return
|
||||
@ -387,6 +408,7 @@ def activity_stream_update(sender, instance, **kwargs):
|
||||
if instance._meta.model_name != 'setting': # Is not conf.Setting instance
|
||||
getattr(activity_entry, object1).add(instance)
|
||||
|
||||
|
||||
def activity_stream_delete(sender, instance, **kwargs):
|
||||
if not activity_stream_enabled:
|
||||
return
|
||||
@ -401,6 +423,7 @@ def activity_stream_delete(sender, instance, **kwargs):
|
||||
object1=object1)
|
||||
activity_entry.save()
|
||||
|
||||
|
||||
def activity_stream_associate(sender, instance, **kwargs):
|
||||
if not activity_stream_enabled:
|
||||
return
|
||||
|
||||
@ -69,6 +69,7 @@ Try upgrading OpenSSH or providing your private key in an different format. \
|
||||
|
||||
logger = logging.getLogger('awx.main.tasks')
|
||||
|
||||
|
||||
@celeryd_init.connect
|
||||
def celery_startup(conf=None, **kwargs):
|
||||
# Re-init all schedules
|
||||
@ -81,6 +82,7 @@ def celery_startup(conf=None, **kwargs):
|
||||
except Exception as e:
|
||||
logger.error("Failed to rebuild schedule {}: {}".format(sch, e))
|
||||
|
||||
|
||||
@task(queue='default')
|
||||
def send_notifications(notification_list, job_id=None):
|
||||
if not isinstance(notification_list, list):
|
||||
@ -102,6 +104,7 @@ def send_notifications(notification_list, job_id=None):
|
||||
if job_id is not None:
|
||||
job_actual.notifications.add(notification)
|
||||
|
||||
|
||||
@task(bind=True, queue='default')
|
||||
def run_administrative_checks(self):
|
||||
if not settings.TOWER_ADMIN_ALERTS:
|
||||
@ -122,10 +125,12 @@ def run_administrative_checks(self):
|
||||
tower_admin_emails,
|
||||
fail_silently=True)
|
||||
|
||||
|
||||
@task(bind=True, queue='default')
|
||||
def cleanup_authtokens(self):
|
||||
AuthToken.objects.filter(expires__lt=now()).delete()
|
||||
|
||||
|
||||
@task(bind=True)
|
||||
def cluster_node_heartbeat(self):
|
||||
inst = Instance.objects.filter(hostname=settings.CLUSTER_HOST_ID)
|
||||
@ -136,6 +141,7 @@ def cluster_node_heartbeat(self):
|
||||
return
|
||||
raise RuntimeError("Cluster Host Not Found: {}".format(settings.CLUSTER_HOST_ID))
|
||||
|
||||
|
||||
@task(bind=True, queue='default')
|
||||
def tower_periodic_scheduler(self):
|
||||
run_now = now()
|
||||
@ -165,6 +171,7 @@ def tower_periodic_scheduler(self):
|
||||
emit_channel_notification('schedules-changed', dict(id=schedule.id, group_name="schedules"))
|
||||
state.save()
|
||||
|
||||
|
||||
def _send_notification_templates(instance, status_str):
|
||||
if status_str not in ['succeeded', 'failed']:
|
||||
raise ValueError(_("status_str must be either succeeded or failed"))
|
||||
@ -192,6 +199,7 @@ def handle_work_success(self, result, task_actual):
|
||||
from awx.main.scheduler.tasks import run_job_complete
|
||||
run_job_complete.delay(instance.id)
|
||||
|
||||
|
||||
@task(bind=True, queue='default')
|
||||
def handle_work_error(self, task_id, subtasks=None):
|
||||
print('Executing error task id %s, subtasks: %s' %
|
||||
@ -231,6 +239,7 @@ def handle_work_error(self, task_id, subtasks=None):
|
||||
run_job_complete.delay(first_instance.id)
|
||||
pass
|
||||
|
||||
|
||||
@task(queue='default')
|
||||
def update_inventory_computed_fields(inventory_id, should_update_hosts=True):
|
||||
'''
|
||||
@ -1222,6 +1231,7 @@ class RunProjectUpdate(BaseTask):
|
||||
else:
|
||||
logger.error("Could not find scm revision in check")
|
||||
|
||||
|
||||
class RunInventoryUpdate(BaseTask):
|
||||
|
||||
name = 'awx.main.tasks.run_inventory_update'
|
||||
@ -1556,6 +1566,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
def get_idle_timeout(self):
|
||||
return getattr(settings, 'INVENTORY_UPDATE_IDLE_TIMEOUT', None)
|
||||
|
||||
|
||||
class RunAdHocCommand(BaseTask):
|
||||
'''
|
||||
Celery task to run an ad hoc command using ansible.
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
# Helps with test cases.
|
||||
# Save all components of a uri (i.e. scheme, username, password, etc.) so that
|
||||
# when we construct a uri string and decompose it, we can verify the decomposition
|
||||
|
||||
|
||||
class URI(object):
|
||||
DEFAULTS = {
|
||||
'scheme' : 'http',
|
||||
|
||||
@ -41,6 +41,7 @@ TEST_PLAYBOOK = '''- hosts: mygroup
|
||||
command: test 1 = 1
|
||||
'''
|
||||
|
||||
|
||||
class QueueTestMixin(object):
|
||||
def start_queue(self):
|
||||
self.start_rabbit()
|
||||
@ -83,13 +84,17 @@ class QueueStartStopTestMixin(QueueTestMixin):
|
||||
super(QueueStartStopTestMixin, self).tearDown()
|
||||
self.terminate_queue()
|
||||
|
||||
|
||||
class MockCommonlySlowTestMixin(object):
|
||||
def __init__(self, *args, **kwargs):
|
||||
from awx.api import generics
|
||||
mock.patch.object(generics, 'get_view_description', return_value=None).start()
|
||||
super(MockCommonlySlowTestMixin, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
ansible_version = get_ansible_version()
|
||||
|
||||
|
||||
class BaseTestMixin(MockCommonlySlowTestMixin):
|
||||
'''
|
||||
Mixin with shared code for use by all test cases.
|
||||
@ -675,17 +680,20 @@ class BaseTestMixin(MockCommonlySlowTestMixin):
|
||||
u'expected no traceback, got:\n%s' %
|
||||
job.result_traceback)
|
||||
|
||||
|
||||
class BaseTest(BaseTestMixin, django.test.TestCase):
|
||||
'''
|
||||
Base class for unit tests.
|
||||
'''
|
||||
|
||||
|
||||
class BaseTransactionTest(BaseTestMixin, django.test.TransactionTestCase):
|
||||
'''
|
||||
Base class for tests requiring transactions (or where the test database
|
||||
needs to be accessed by subprocesses).
|
||||
'''
|
||||
|
||||
|
||||
@override_settings(CELERY_ALWAYS_EAGER=True,
|
||||
CELERY_EAGER_PROPAGATES_EXCEPTIONS=True,
|
||||
ANSIBLE_TRANSPORT='local')
|
||||
@ -697,6 +705,7 @@ class BaseLiveServerTest(BaseTestMixin, django.test.LiveServerTestCase):
|
||||
super(BaseLiveServerTest, self).setUp()
|
||||
settings.INTERNAL_API_URL = self.live_server_url
|
||||
|
||||
|
||||
@override_settings(CELERY_ALWAYS_EAGER=True,
|
||||
CELERY_EAGER_PROPAGATES_EXCEPTIONS=True,
|
||||
ANSIBLE_TRANSPORT='local',
|
||||
|
||||
@ -11,22 +11,27 @@ from awx.main.tests.factories import (
|
||||
create_workflow_job_template,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_template_factory():
|
||||
return create_job_template
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def organization_factory():
|
||||
return create_organization
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def notification_template_factory():
|
||||
return create_notification_template
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def survey_spec_factory():
|
||||
return create_survey_spec
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_template_with_survey_passwords_factory(job_template_factory):
|
||||
def rf(persisted):
|
||||
@ -38,22 +43,27 @@ def job_template_with_survey_passwords_factory(job_template_factory):
|
||||
return objects.job_template
|
||||
return rf
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_with_secret_key_unit(job_with_secret_key_factory):
|
||||
return job_with_secret_key_factory(persisted=False)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def workflow_job_template_factory():
|
||||
return create_workflow_job_template
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def get_ssh_version(mocker):
|
||||
return mocker.patch('awx.main.tasks.get_ssh_version', return_value='OpenSSH_6.9p1, LibreSSL 2.1.8')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_template_with_survey_passwords_unit(job_template_with_survey_passwords_factory):
|
||||
return job_template_with_survey_passwords_factory(persisted=False)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def enterprise_license():
|
||||
from awx.main.task_engine import TaskEnhancer
|
||||
|
||||
@ -25,6 +25,7 @@ from awx.main.models import (
|
||||
# persisted=False
|
||||
#
|
||||
|
||||
|
||||
def mk_instance(persisted=True):
|
||||
if not persisted:
|
||||
raise RuntimeError('creating an Instance requires persisted=True')
|
||||
@ -158,6 +159,7 @@ def mk_job_template(name, job_type='run',
|
||||
jt.save()
|
||||
return jt
|
||||
|
||||
|
||||
def mk_workflow_job(status='new', workflow_job_template=None, extra_vars={},
|
||||
persisted=True):
|
||||
job = WorkflowJob(status=status, extra_vars=json.dumps(extra_vars))
|
||||
@ -168,6 +170,7 @@ def mk_workflow_job(status='new', workflow_job_template=None, extra_vars={},
|
||||
job.save()
|
||||
return job
|
||||
|
||||
|
||||
def mk_workflow_job_template(name, extra_vars='', spec=None, organization=None, persisted=True):
|
||||
if extra_vars:
|
||||
extra_vars = json.dumps(extra_vars)
|
||||
@ -182,6 +185,7 @@ def mk_workflow_job_template(name, extra_vars='', spec=None, organization=None,
|
||||
wfjt.save()
|
||||
return wfjt
|
||||
|
||||
|
||||
def mk_workflow_job_template_node(workflow_job_template=None,
|
||||
unified_job_template=None,
|
||||
success_nodes=None,
|
||||
@ -197,6 +201,7 @@ def mk_workflow_job_template_node(workflow_job_template=None,
|
||||
workflow_node.save()
|
||||
return workflow_node
|
||||
|
||||
|
||||
def mk_workflow_job_node(unified_job_template=None,
|
||||
success_nodes=None,
|
||||
failure_nodes=None,
|
||||
|
||||
@ -2,6 +2,7 @@ from collections import namedtuple
|
||||
|
||||
from .exc import NotUnique
|
||||
|
||||
|
||||
def generate_objects(artifacts, kwargs):
|
||||
'''generate_objects takes a list of artifacts that are supported by
|
||||
a create function and compares it to the kwargs passed in to the create
|
||||
|
||||
@ -87,6 +87,7 @@ def apply_roles(roles, objects, persisted):
|
||||
else:
|
||||
raise RuntimeError('unable to add non-user {} for members list of {}'.format(member_str, obj_str))
|
||||
|
||||
|
||||
def generate_users(organization, teams, superuser, persisted, **kwargs):
|
||||
'''generate_users evaluates a mixed list of User objects and strings.
|
||||
If a string is encountered a user with that username is created and added to the lookup dict.
|
||||
@ -112,6 +113,7 @@ def generate_users(organization, teams, superuser, persisted, **kwargs):
|
||||
users[p1] = mk_user(p1, organization=organization, team=None, is_superuser=superuser, persisted=persisted)
|
||||
return users
|
||||
|
||||
|
||||
def generate_teams(organization, persisted, **kwargs):
|
||||
'''generate_teams evalutes a mixed list of Team objects and strings.
|
||||
If a string is encountered a team with that string name is created and added to the lookup dict.
|
||||
@ -126,6 +128,7 @@ def generate_teams(organization, persisted, **kwargs):
|
||||
teams[t] = mk_team(t, organization=organization, persisted=persisted)
|
||||
return teams
|
||||
|
||||
|
||||
def create_survey_spec(variables=None, default_type='integer', required=True):
|
||||
'''
|
||||
Returns a valid survey spec for a job template, based on the input
|
||||
@ -176,6 +179,7 @@ def create_survey_spec(variables=None, default_type='integer', required=True):
|
||||
# or encapsulated by specific factory fixtures in a conftest
|
||||
#
|
||||
|
||||
|
||||
def create_job_template(name, roles=None, persisted=True, **kwargs):
|
||||
Objects = generate_objects(["job_template", "jobs",
|
||||
"organization",
|
||||
@ -260,6 +264,7 @@ def create_job_template(name, roles=None, persisted=True, **kwargs):
|
||||
organization=org,
|
||||
survey=spec,)
|
||||
|
||||
|
||||
def create_organization(name, roles=None, persisted=True, **kwargs):
|
||||
Objects = generate_objects(["organization",
|
||||
"teams", "users",
|
||||
@ -319,6 +324,7 @@ def create_organization(name, roles=None, persisted=True, **kwargs):
|
||||
notification_templates=_Mapped(notification_templates),
|
||||
inventories=_Mapped(inventories))
|
||||
|
||||
|
||||
def create_notification_template(name, roles=None, persisted=True, **kwargs):
|
||||
Objects = generate_objects(["notification_template",
|
||||
"organization",
|
||||
@ -346,6 +352,7 @@ def create_notification_template(name, roles=None, persisted=True, **kwargs):
|
||||
superusers=_Mapped(superusers),
|
||||
teams=teams)
|
||||
|
||||
|
||||
def generate_workflow_job_template_nodes(workflow_job_template,
|
||||
persisted,
|
||||
**kwargs):
|
||||
@ -372,6 +379,7 @@ def generate_workflow_job_template_nodes(workflow_job_template,
|
||||
for related_index in workflow_job_template_nodes[i][node_type]:
|
||||
getattr(new_node, node_type).add(new_nodes[related_index])
|
||||
|
||||
|
||||
# TODO: Implement survey and jobs
|
||||
def create_workflow_job_template(name, organization=None, persisted=True, **kwargs):
|
||||
Objects = generate_objects(["workflow_job_template",
|
||||
|
||||
@ -7,13 +7,16 @@ from awx.main.access import ActivityStreamAccess
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
|
||||
def mock_feature_enabled(feature):
|
||||
return True
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def activity_stream_entry(organization, org_admin):
|
||||
return ActivityStream.objects.filter(organization__pk=organization.pk, user=org_admin, operation='associate').first()
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_get_activity_stream_list(monkeypatch, organization, get, user, settings):
|
||||
@ -23,6 +26,7 @@ def test_get_activity_stream_list(monkeypatch, organization, get, user, settings
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_basic_fields(monkeypatch, organization, get, user, settings):
|
||||
@ -43,6 +47,7 @@ def test_basic_fields(monkeypatch, organization, get, user, settings):
|
||||
assert 'organization' in response.data['summary_fields']
|
||||
assert response.data['summary_fields']['organization'][0]['name'] == 'test-org'
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_middleware_actor_added(monkeypatch, post, get, user, settings):
|
||||
@ -65,6 +70,7 @@ def test_middleware_actor_added(monkeypatch, post, get, user, settings):
|
||||
assert response.status_code == 200
|
||||
assert response.data['summary_fields']['actor']['username'] == 'admin-poster'
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_rbac_stream_resource_roles(activity_stream_entry, organization, org_admin, settings):
|
||||
@ -74,6 +80,7 @@ def test_rbac_stream_resource_roles(activity_stream_entry, organization, org_adm
|
||||
assert activity_stream_entry.role.first() == organization.admin_role
|
||||
assert activity_stream_entry.object_relationship_type == 'awx.main.models.organization.Organization.admin_role'
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_rbac_stream_user_roles(activity_stream_entry, organization, org_admin, settings):
|
||||
@ -83,6 +90,7 @@ def test_rbac_stream_user_roles(activity_stream_entry, organization, org_admin,
|
||||
assert activity_stream_entry.role.first() == organization.admin_role
|
||||
assert activity_stream_entry.object_relationship_type == 'awx.main.models.organization.Organization.admin_role'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.activity_stream_access
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@ -94,6 +102,7 @@ def test_stream_access_cant_change(activity_stream_entry, organization, org_admi
|
||||
assert not access.can_change(activity_stream_entry, {'organization': None})
|
||||
assert not access.can_delete(activity_stream_entry)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.activity_stream_access
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@ -129,6 +138,7 @@ def test_stream_queryset_hides_shows_items(
|
||||
assert queryset.filter(team__pk=team.pk, operation='create').count() == 1
|
||||
assert queryset.filter(notification_template__pk=notification_template.pk, operation='create').count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
def test_stream_user_direct_role_updates(get, post, organization_factory):
|
||||
|
||||
@ -4,7 +4,6 @@ import pytest
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
|
||||
|
||||
"""
|
||||
def run_test_ad_hoc_command(self, **kwargs):
|
||||
# Post to list to start a new ad hoc command.
|
||||
@ -23,6 +22,7 @@ from django.core.urlresolvers import reverse
|
||||
return self.post(url, data, expect=expect)
|
||||
"""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def post_adhoc(post, inventory, machine_credential):
|
||||
def f(url, data, user, expect=201):
|
||||
@ -46,7 +46,6 @@ def post_adhoc(post, inventory, machine_credential):
|
||||
return f
|
||||
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_admin_post_ad_hoc_command_list(admin, post_adhoc, inventory, machine_credential):
|
||||
res = post_adhoc(reverse('api:ad_hoc_command_list'), {}, admin, expect=201)
|
||||
@ -65,35 +64,42 @@ def test_admin_post_ad_hoc_command_list(admin, post_adhoc, inventory, machine_cr
|
||||
def test_empty_post_403(admin, post):
|
||||
post(reverse('api:ad_hoc_command_list'), {}, admin, expect=400)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_empty_put_405(admin, put):
|
||||
put(reverse('api:ad_hoc_command_list'), {}, admin, expect=405)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_empty_patch_405(admin, patch):
|
||||
patch(reverse('api:ad_hoc_command_list'), {}, admin, expect=405)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_empty_delete_405(admin, delete):
|
||||
delete(reverse('api:ad_hoc_command_list'), admin, expect=405)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_post_ad_hoc_command_list(alice, post_adhoc, inventory, machine_credential):
|
||||
inventory.adhoc_role.members.add(alice)
|
||||
machine_credential.use_role.members.add(alice)
|
||||
post_adhoc(reverse('api:ad_hoc_command_list'), {}, alice, expect=201)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_post_ad_hoc_command_list_xfail(alice, post_adhoc, inventory, machine_credential):
|
||||
inventory.read_role.members.add(alice) # just read access? no dice.
|
||||
machine_credential.use_role.members.add(alice)
|
||||
post_adhoc(reverse('api:ad_hoc_command_list'), {}, alice, expect=403)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_post_ad_hoc_command_list_without_creds(alice, post_adhoc, inventory, machine_credential):
|
||||
inventory.adhoc_role.members.add(alice)
|
||||
post_adhoc(reverse('api:ad_hoc_command_list'), {}, alice, expect=403)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_post_ad_hoc_command_list_without_inventory(alice, post_adhoc, inventory, machine_credential):
|
||||
machine_credential.use_role.members.add(alice)
|
||||
@ -134,15 +140,17 @@ def test_get_inventory_ad_hoc_command_list(admin, alice, post_adhoc, get, invent
|
||||
def test_bad_data1(admin, post_adhoc):
|
||||
post_adhoc(reverse('api:ad_hoc_command_list'), {'module_name': 'command', 'module_args': None}, admin, expect=400)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_bad_data2(admin, post_adhoc):
|
||||
post_adhoc(reverse('api:ad_hoc_command_list'), {'job_type': 'baddata'}, admin, expect=400)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_bad_data3(admin, post_adhoc):
|
||||
post_adhoc(reverse('api:ad_hoc_command_list'), {'verbosity': -1}, admin, expect=400)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_bad_data4(admin, post_adhoc):
|
||||
post_adhoc(reverse('api:ad_hoc_command_list'), {'forks': -1}, admin, expect=400)
|
||||
|
||||
|
||||
@ -16,6 +16,7 @@ def test_user_role_view_access(rando, inventory, mocker, post):
|
||||
inventory.admin_role, rando, 'members', data,
|
||||
skip_sub_obj_read_check=False)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_team_role_view_access(rando, team, inventory, mocker, post):
|
||||
"Assure correct access method is called when assigning teams new roles"
|
||||
@ -30,6 +31,7 @@ def test_team_role_view_access(rando, team, inventory, mocker, post):
|
||||
inventory.admin_role, team, 'member_role.parents', data,
|
||||
skip_sub_obj_read_check=False)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_role_team_view_access(rando, team, inventory, mocker, post):
|
||||
"""Assure that /role/N/teams/ enforces the same permission restrictions
|
||||
|
||||
@ -8,6 +8,7 @@ from django.core.urlresolvers import reverse
|
||||
# user credential creation
|
||||
#
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_user_credential_via_credentials_list(post, get, alice):
|
||||
response = post(reverse('api:credential_list'), {
|
||||
@ -21,6 +22,7 @@ def test_create_user_credential_via_credentials_list(post, get, alice):
|
||||
assert response.status_code == 200
|
||||
assert response.data['count'] == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_credential_validation_error_with_bad_user(post, admin):
|
||||
response = post(reverse('api:credential_list'), {
|
||||
@ -31,6 +33,7 @@ def test_credential_validation_error_with_bad_user(post, admin):
|
||||
assert response.status_code == 400
|
||||
assert response.data['user'][0] == 'Incorrect type. Expected pk value, received unicode.'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_user_credential_via_user_credentials_list(post, get, alice):
|
||||
response = post(reverse('api:user_credentials_list', args=(alice.pk,)), {
|
||||
@ -44,6 +47,7 @@ def test_create_user_credential_via_user_credentials_list(post, get, alice):
|
||||
assert response.status_code == 200
|
||||
assert response.data['count'] == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_user_credential_via_credentials_list_xfail(post, alice, bob):
|
||||
response = post(reverse('api:credential_list'), {
|
||||
@ -53,6 +57,7 @@ def test_create_user_credential_via_credentials_list_xfail(post, alice, bob):
|
||||
}, alice)
|
||||
assert response.status_code == 403
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_user_credential_via_user_credentials_list_xfail(post, alice, bob):
|
||||
response = post(reverse('api:user_credentials_list', args=(bob.pk,)), {
|
||||
@ -67,6 +72,7 @@ def test_create_user_credential_via_user_credentials_list_xfail(post, alice, bob
|
||||
# team credential creation
|
||||
#
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_team_credential(post, get, team, organization, org_admin, team_member):
|
||||
response = post(reverse('api:credential_list'), {
|
||||
@ -83,6 +89,7 @@ def test_create_team_credential(post, get, team, organization, org_admin, team_m
|
||||
# Assure that credential's organization is implictly set to team's org
|
||||
assert response.data['results'][0]['summary_fields']['organization']['id'] == team.organization.id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_team_credential_via_team_credentials_list(post, get, team, org_admin, team_member):
|
||||
response = post(reverse('api:team_credentials_list', args=(team.pk,)), {
|
||||
@ -96,6 +103,7 @@ def test_create_team_credential_via_team_credentials_list(post, get, team, org_a
|
||||
assert response.status_code == 200
|
||||
assert response.data['count'] == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_team_credential_by_urelated_user_xfail(post, team, organization, alice, team_member):
|
||||
response = post(reverse('api:credential_list'), {
|
||||
@ -106,6 +114,7 @@ def test_create_team_credential_by_urelated_user_xfail(post, team, organization,
|
||||
}, alice)
|
||||
assert response.status_code == 403
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_team_credential_by_team_member_xfail(post, team, organization, alice, team_member):
|
||||
# Members can't add credentials, only org admins.. for now?
|
||||
@ -122,6 +131,7 @@ def test_create_team_credential_by_team_member_xfail(post, team, organization, a
|
||||
# Permission granting
|
||||
#
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_grant_org_credential_to_org_user_through_role_users(post, credential, organization, org_admin, org_member):
|
||||
credential.organization = organization
|
||||
@ -131,6 +141,7 @@ def test_grant_org_credential_to_org_user_through_role_users(post, credential, o
|
||||
}, org_admin)
|
||||
assert response.status_code == 204
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_grant_org_credential_to_org_user_through_user_roles(post, credential, organization, org_admin, org_member):
|
||||
credential.organization = organization
|
||||
@ -140,6 +151,7 @@ def test_grant_org_credential_to_org_user_through_user_roles(post, credential, o
|
||||
}, org_admin)
|
||||
assert response.status_code == 204
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_grant_org_credential_to_non_org_user_through_role_users(post, credential, organization, org_admin, alice):
|
||||
credential.organization = organization
|
||||
@ -149,6 +161,7 @@ def test_grant_org_credential_to_non_org_user_through_role_users(post, credentia
|
||||
}, org_admin)
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_grant_org_credential_to_non_org_user_through_user_roles(post, credential, organization, org_admin, alice):
|
||||
credential.organization = organization
|
||||
@ -158,6 +171,7 @@ def test_grant_org_credential_to_non_org_user_through_user_roles(post, credentia
|
||||
}, org_admin)
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_grant_private_credential_to_user_through_role_users(post, credential, alice, bob):
|
||||
# normal users can't do this
|
||||
@ -167,6 +181,7 @@ def test_grant_private_credential_to_user_through_role_users(post, credential, a
|
||||
}, alice)
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_grant_private_credential_to_org_user_through_role_users(post, credential, org_admin, org_member):
|
||||
# org admins can't either
|
||||
@ -176,6 +191,7 @@ def test_grant_private_credential_to_org_user_through_role_users(post, credentia
|
||||
}, org_admin)
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_sa_grant_private_credential_to_user_through_role_users(post, credential, admin, bob):
|
||||
# but system admins can
|
||||
@ -184,6 +200,7 @@ def test_sa_grant_private_credential_to_user_through_role_users(post, credential
|
||||
}, admin)
|
||||
assert response.status_code == 204
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_grant_private_credential_to_user_through_user_roles(post, credential, alice, bob):
|
||||
# normal users can't do this
|
||||
@ -193,6 +210,7 @@ def test_grant_private_credential_to_user_through_user_roles(post, credential, a
|
||||
}, alice)
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_grant_private_credential_to_org_user_through_user_roles(post, credential, org_admin, org_member):
|
||||
# org admins can't either
|
||||
@ -202,6 +220,7 @@ def test_grant_private_credential_to_org_user_through_user_roles(post, credentia
|
||||
}, org_admin)
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_sa_grant_private_credential_to_user_through_user_roles(post, credential, admin, bob):
|
||||
# but system admins can
|
||||
@ -210,6 +229,7 @@ def test_sa_grant_private_credential_to_user_through_user_roles(post, credential
|
||||
}, admin)
|
||||
assert response.status_code == 204
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_grant_org_credential_to_team_through_role_teams(post, credential, organization, org_admin, org_auditor, team):
|
||||
assert org_auditor not in credential.read_role
|
||||
@ -221,6 +241,7 @@ def test_grant_org_credential_to_team_through_role_teams(post, credential, organ
|
||||
assert response.status_code == 204
|
||||
assert org_auditor in credential.read_role
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_grant_org_credential_to_team_through_team_roles(post, credential, organization, org_admin, org_auditor, team):
|
||||
assert org_auditor not in credential.read_role
|
||||
@ -232,6 +253,7 @@ def test_grant_org_credential_to_team_through_team_roles(post, credential, organ
|
||||
assert response.status_code == 204
|
||||
assert org_auditor in credential.read_role
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_sa_grant_private_credential_to_team_through_role_teams(post, credential, admin, team):
|
||||
# not even a system admin can grant a private cred to a team though
|
||||
@ -240,6 +262,7 @@ def test_sa_grant_private_credential_to_team_through_role_teams(post, credential
|
||||
}, admin)
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_sa_grant_private_credential_to_team_through_team_roles(post, credential, admin, team):
|
||||
# not even a system admin can grant a private cred to a team though
|
||||
@ -249,12 +272,11 @@ def test_sa_grant_private_credential_to_team_through_team_roles(post, credential
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
|
||||
|
||||
#
|
||||
# organization credentials
|
||||
#
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_org_credential_as_not_admin(post, organization, org_member):
|
||||
response = post(reverse('api:credential_list'), {
|
||||
@ -264,6 +286,7 @@ def test_create_org_credential_as_not_admin(post, organization, org_member):
|
||||
}, org_member)
|
||||
assert response.status_code == 403
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_org_credential_as_admin(post, organization, org_admin):
|
||||
response = post(reverse('api:credential_list'), {
|
||||
@ -273,6 +296,7 @@ def test_create_org_credential_as_admin(post, organization, org_admin):
|
||||
}, org_admin)
|
||||
assert response.status_code == 201
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_credential_detail(post, get, organization, org_admin):
|
||||
response = post(reverse('api:credential_list'), {
|
||||
@ -288,6 +312,7 @@ def test_credential_detail(post, get, organization, org_admin):
|
||||
related_fields = response.data['related']
|
||||
assert 'organization' in related_fields
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_list_created_org_credentials(post, get, organization, org_admin, org_member):
|
||||
response = post(reverse('api:credential_list'), {
|
||||
@ -336,6 +361,7 @@ def test_cant_change_organization(patch, credential, organization, org_admin):
|
||||
}, org_admin)
|
||||
assert response.status_code == 403
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_cant_add_organization(patch, credential, organization, org_admin):
|
||||
assert credential.organization is None
|
||||
@ -350,6 +376,7 @@ def test_cant_add_organization(patch, credential, organization, org_admin):
|
||||
# Openstack Credentials
|
||||
#
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_openstack_create_ok(post, organization, admin):
|
||||
data = {
|
||||
@ -364,6 +391,7 @@ def test_openstack_create_ok(post, organization, admin):
|
||||
response = post(reverse('api:credential_list'), data, admin)
|
||||
assert response.status_code == 201
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_openstack_create_fail_required_fields(post, organization, admin):
|
||||
data = {
|
||||
@ -383,6 +411,7 @@ def test_openstack_create_fail_required_fields(post, organization, admin):
|
||||
# misc xfail conditions
|
||||
#
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_credential_missing_user_team_org_xfail(post, admin):
|
||||
# Must specify one of user, team, or organization
|
||||
@ -391,4 +420,3 @@ def test_create_credential_missing_user_team_org_xfail(post, admin):
|
||||
'username': 'someusername',
|
||||
}, admin)
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
@ -13,12 +13,15 @@ from awx.main.utils import timestamp_apiformat
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.utils import timezone
|
||||
|
||||
|
||||
def mock_feature_enabled(feature):
|
||||
return True
|
||||
|
||||
|
||||
def mock_feature_disabled(feature):
|
||||
return False
|
||||
|
||||
|
||||
def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), get_params={}, host_count=1):
|
||||
hosts = hosts(host_count=host_count)
|
||||
fact_scans(fact_scans=3, timestamp_epoch=epoch)
|
||||
@ -28,6 +31,7 @@ def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), get_params=
|
||||
|
||||
return (hosts[0], response)
|
||||
|
||||
|
||||
def check_url(url1_full, fact_known, module):
|
||||
url1_split = urlparse.urlsplit(url1_full)
|
||||
url1 = url1_split.path
|
||||
@ -42,16 +46,19 @@ def check_url(url1_full, fact_known, module):
|
||||
url2_params_sorted = sorted(url2_params, key=lambda val: val[0])
|
||||
assert urllib.urlencode(url1_params_sorted) == urllib.urlencode(url2_params_sorted)
|
||||
|
||||
|
||||
def check_response_facts(facts_known, response):
|
||||
for i, fact_known in enumerate(facts_known):
|
||||
assert fact_known.module == response.data['results'][i]['module']
|
||||
assert timestamp_apiformat(fact_known.timestamp) == response.data['results'][i]['timestamp']
|
||||
check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module)
|
||||
|
||||
|
||||
def check_system_tracking_feature_forbidden(response):
|
||||
assert 402 == response.status_code
|
||||
assert 'Your license does not permit use of system tracking.' == response.data['detail']
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_disabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.license_feature
|
||||
@ -62,6 +69,7 @@ def test_system_tracking_license_get(hosts, get, user):
|
||||
|
||||
check_system_tracking_feature_forbidden(response)
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_disabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.license_feature
|
||||
@ -72,6 +80,7 @@ def test_system_tracking_license_options(hosts, options, user):
|
||||
|
||||
check_system_tracking_feature_forbidden(response)
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.license_feature
|
||||
@ -85,6 +94,7 @@ def test_no_facts_db(hosts, get, user):
|
||||
}
|
||||
assert response_expected == response.data
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_basic_fields(hosts, fact_scans, get, user):
|
||||
@ -101,6 +111,7 @@ def test_basic_fields(hosts, fact_scans, get, user):
|
||||
assert 'timestamp' in results[0]
|
||||
assert 'module' in results[0]
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.license_feature
|
||||
@ -117,6 +128,7 @@ def test_basic_options_fields(hosts, fact_scans, options, user):
|
||||
assert ("services", "Services") in response.data['actions']['GET']['module']['choices']
|
||||
assert ("packages", "Packages") in response.data['actions']['GET']['module']['choices']
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_related_fact_view(hosts, fact_scans, get, user):
|
||||
@ -130,6 +142,7 @@ def test_related_fact_view(hosts, fact_scans, get, user):
|
||||
for i, fact_known in enumerate(facts_known):
|
||||
check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module)
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_multiple_hosts(hosts, fact_scans, get, user):
|
||||
@ -143,6 +156,7 @@ def test_multiple_hosts(hosts, fact_scans, get, user):
|
||||
for i, fact_known in enumerate(facts_known):
|
||||
check_url(response.data['results'][i]['related']['fact_view'], fact_known, fact_known.module)
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_param_to_from(hosts, fact_scans, get, user):
|
||||
@ -159,6 +173,7 @@ def test_param_to_from(hosts, fact_scans, get, user):
|
||||
|
||||
check_response_facts(facts_known, response)
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_param_module(hosts, fact_scans, get, user):
|
||||
@ -174,6 +189,7 @@ def test_param_module(hosts, fact_scans, get, user):
|
||||
|
||||
check_response_facts(facts_known, response)
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_param_from(hosts, fact_scans, get, user):
|
||||
@ -189,6 +205,7 @@ def test_param_from(hosts, fact_scans, get, user):
|
||||
|
||||
check_response_facts(facts_known, response)
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_param_to(hosts, fact_scans, get, user):
|
||||
@ -204,6 +221,7 @@ def test_param_to(hosts, fact_scans, get, user):
|
||||
|
||||
check_response_facts(facts_known, response)
|
||||
|
||||
|
||||
def _test_user_access_control(hosts, fact_scans, get, user_obj, team_obj):
|
||||
hosts = hosts(host_count=1)
|
||||
fact_scans(fact_scans=1)
|
||||
@ -214,6 +232,7 @@ def _test_user_access_control(hosts, fact_scans, get, user_obj, team_obj):
|
||||
response = get(url, user_obj)
|
||||
return response
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
@ -224,6 +243,7 @@ def test_normal_user_403(hosts, fact_scans, get, user, team):
|
||||
assert 403 == response.status_code
|
||||
assert "You do not have permission to perform this action." == response.data['detail']
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
@ -233,6 +253,7 @@ def test_super_user_ok(hosts, fact_scans, get, user, team):
|
||||
|
||||
assert 200 == response.status_code
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
@ -244,6 +265,7 @@ def test_user_admin_ok(organization, hosts, fact_scans, get, user, team):
|
||||
|
||||
assert 200 == response.status_code
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
@ -255,4 +277,3 @@ def test_user_admin_403(organization, organizations, hosts, fact_scans, get, use
|
||||
response = _test_user_access_control(hosts, fact_scans, get, user_admin, team)
|
||||
|
||||
assert 403 == response.status_code
|
||||
|
||||
|
||||
@ -6,12 +6,15 @@ from awx.main.utils import timestamp_apiformat
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.utils import timezone
|
||||
|
||||
|
||||
def mock_feature_enabled(feature):
|
||||
return True
|
||||
|
||||
|
||||
def mock_feature_disabled(feature):
|
||||
return False
|
||||
|
||||
|
||||
# TODO: Consider making the fact_scan() fixture a Class, instead of a function, and move this method into it
|
||||
def find_fact(facts, host_id, module_name, timestamp):
|
||||
for f in facts:
|
||||
@ -19,6 +22,7 @@ def find_fact(facts, host_id, module_name, timestamp):
|
||||
return f
|
||||
raise RuntimeError('fact <%s, %s, %s> not found in %s', (host_id, module_name, timestamp, facts))
|
||||
|
||||
|
||||
def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), module_name='ansible', get_params={}):
|
||||
hosts = hosts(host_count=1)
|
||||
facts = fact_scans(fact_scans=1, timestamp_epoch=epoch)
|
||||
@ -29,10 +33,12 @@ def setup_common(hosts, fact_scans, get, user, epoch=timezone.now(), module_name
|
||||
fact_known = find_fact(facts, hosts[0].id, module_name, epoch)
|
||||
return (fact_known, response)
|
||||
|
||||
|
||||
def check_system_tracking_feature_forbidden(response):
|
||||
assert 402 == response.status_code
|
||||
assert 'Your license does not permit use of system tracking.' == response.data['detail']
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_disabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.license_feature
|
||||
@ -43,6 +49,7 @@ def test_system_tracking_license_get(hosts, get, user):
|
||||
|
||||
check_system_tracking_feature_forbidden(response)
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_disabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.license_feature
|
||||
@ -53,6 +60,7 @@ def test_system_tracking_license_options(hosts, options, user):
|
||||
|
||||
check_system_tracking_feature_forbidden(response)
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_no_fact_found(hosts, get, user):
|
||||
@ -66,6 +74,7 @@ def test_no_fact_found(hosts, get, user):
|
||||
assert 404 == response.status_code
|
||||
assert expected_response == response.data
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_basic_fields(hosts, fact_scans, get, user):
|
||||
@ -88,6 +97,7 @@ def test_basic_fields(hosts, fact_scans, get, user):
|
||||
assert 'host' in response.data['related']
|
||||
assert reverse('api:host_detail', args=(hosts[0].pk,)) == response.data['related']['host']
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_content(hosts, fact_scans, get, user, fact_ansible_json):
|
||||
@ -98,6 +108,7 @@ def test_content(hosts, fact_scans, get, user, fact_ansible_json):
|
||||
assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp']
|
||||
assert fact_known.module == response.data['module']
|
||||
|
||||
|
||||
def _test_search_by_module(hosts, fact_scans, get, user, fact_json, module_name):
|
||||
params = {
|
||||
'module': module_name
|
||||
@ -108,16 +119,19 @@ def _test_search_by_module(hosts, fact_scans, get, user, fact_json, module_name)
|
||||
assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp']
|
||||
assert module_name == response.data['module']
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_search_by_module_packages(hosts, fact_scans, get, user, fact_packages_json):
|
||||
_test_search_by_module(hosts, fact_scans, get, user, fact_packages_json, 'packages')
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_search_by_module_services(hosts, fact_scans, get, user, fact_services_json):
|
||||
_test_search_by_module(hosts, fact_scans, get, user, fact_services_json, 'services')
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_search_by_timestamp_and_module(hosts, fact_scans, get, user, fact_packages_json):
|
||||
@ -128,6 +142,7 @@ def test_search_by_timestamp_and_module(hosts, fact_scans, get, user, fact_packa
|
||||
|
||||
assert fact_known.id == response.data['id']
|
||||
|
||||
|
||||
def _test_user_access_control(hosts, fact_scans, get, user_obj, team_obj):
|
||||
hosts = hosts(host_count=1)
|
||||
fact_scans(fact_scans=1)
|
||||
@ -138,6 +153,7 @@ def _test_user_access_control(hosts, fact_scans, get, user_obj, team_obj):
|
||||
response = get(url, user_obj)
|
||||
return response
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
@ -148,6 +164,7 @@ def test_normal_user_403(hosts, fact_scans, get, user, team):
|
||||
assert 403 == response.status_code
|
||||
assert "You do not have permission to perform this action." == response.data['detail']
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
@ -157,6 +174,7 @@ def test_super_user_ok(hosts, fact_scans, get, user, team):
|
||||
|
||||
assert 200 == response.status_code
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
@ -168,6 +186,7 @@ def test_user_admin_ok(organization, hosts, fact_scans, get, user, team):
|
||||
|
||||
assert 200 == response.status_code
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.ac
|
||||
@pytest.mark.django_db
|
||||
@ -179,4 +198,3 @@ def test_user_admin_403(organization, organizations, hosts, fact_scans, get, use
|
||||
response = _test_user_access_control(hosts, fact_scans, get, user_admin, team)
|
||||
|
||||
assert 403 == response.status_code
|
||||
|
||||
|
||||
@ -4,6 +4,7 @@ import pytest
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_basic_fields(hosts, fact_scans, get, user):
|
||||
hosts = hosts(host_count=1)
|
||||
|
||||
@ -2,6 +2,7 @@ import pytest
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_inventory_source_notification_on_cloud_only(get, post, group_factory, user, notification_template):
|
||||
u = user('admin', True)
|
||||
@ -48,6 +49,7 @@ def test_create_inventory_group(post, inventory, alice, role_field, expected_sta
|
||||
getattr(inventory, role_field).members.add(alice)
|
||||
post(reverse('api:inventory_groups_list', args=(inventory.id,)), data, alice, expect=expected_status_code)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("role_field,expected_status_code", [
|
||||
(None, 403),
|
||||
('admin_role', 201),
|
||||
@ -106,6 +108,7 @@ def test_create_inventory_host(post, inventory, alice, role_field, expected_stat
|
||||
getattr(inventory, role_field).members.add(alice)
|
||||
post(reverse('api:inventory_hosts_list', args=(inventory.id,)), data, alice, expect=expected_status_code)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("role_field,expected_status_code", [
|
||||
(None, 403),
|
||||
('admin_role', 201),
|
||||
@ -149,6 +152,7 @@ def test_delete_inventory_host(delete, host, alice, role_field, expected_status_
|
||||
getattr(host.inventory, role_field).members.add(alice)
|
||||
delete(reverse('api:host_detail', args=(host.id,)), alice, expect=expected_status_code)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("role_field,expected_status_code", [
|
||||
(None, 403),
|
||||
('admin_role', 202),
|
||||
|
||||
@ -8,6 +8,7 @@ from awx.main.models.jobs import Job, JobTemplate
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def runtime_data(organization):
|
||||
cred_obj = Credential.objects.create(name='runtime-cred', kind='ssh', username='test_user2', password='pas4word2')
|
||||
@ -22,10 +23,12 @@ def runtime_data(organization):
|
||||
credential=cred_obj.pk,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_with_links(machine_credential, inventory):
|
||||
return Job.objects.create(name='existing-job', credential=machine_credential, inventory=inventory)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_template_prompts(project, inventory, machine_credential):
|
||||
def rf(on_off):
|
||||
@ -45,6 +48,7 @@ def job_template_prompts(project, inventory, machine_credential):
|
||||
)
|
||||
return rf
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_template_prompts_null(project):
|
||||
return JobTemplate.objects.create(
|
||||
@ -62,6 +66,7 @@ def job_template_prompts_null(project):
|
||||
ask_credential_on_launch=True,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def bad_scan_JT(job_template_prompts):
|
||||
job_template = job_template_prompts(True)
|
||||
@ -69,6 +74,7 @@ def bad_scan_JT(job_template_prompts):
|
||||
job_template.save()
|
||||
return job_template
|
||||
|
||||
|
||||
# End of setup, tests start here
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
@ -98,6 +104,7 @@ def test_job_ignore_unprompted_vars(runtime_data, job_template_prompts, post, ad
|
||||
assert 'job_tags' in response.data['ignored_fields']
|
||||
assert 'skip_tags' in response.data['ignored_fields']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_accept_prompted_vars(runtime_data, job_template_prompts, post, admin_user, mocker):
|
||||
@ -115,6 +122,7 @@ def test_job_accept_prompted_vars(runtime_data, job_template_prompts, post, admi
|
||||
|
||||
mock_job.signal_start.assert_called_once_with(**runtime_data)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_accept_null_tags(job_template_prompts, post, admin_user, mocker):
|
||||
@ -129,6 +137,7 @@ def test_job_accept_null_tags(job_template_prompts, post, admin_user, mocker):
|
||||
|
||||
mock_job.signal_start.assert_called_once_with(job_tags='', skip_tags='')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_accept_prompted_vars_null(runtime_data, job_template_prompts_null, post, rando, mocker):
|
||||
@ -154,6 +163,7 @@ def test_job_accept_prompted_vars_null(runtime_data, job_template_prompts_null,
|
||||
assert job_id == 968
|
||||
mock_job.signal_start.assert_called_once_with(**runtime_data)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_reject_invalid_prompted_vars(runtime_data, job_template_prompts, post, admin_user):
|
||||
@ -168,6 +178,7 @@ def test_job_reject_invalid_prompted_vars(runtime_data, job_template_prompts, po
|
||||
assert response.data['inventory'] == [u'Invalid pk "87865" - object does not exist.']
|
||||
assert response.data['credential'] == [u'Invalid pk "48474" - object does not exist.']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_reject_invalid_prompted_extra_vars(runtime_data, job_template_prompts, post, admin_user):
|
||||
@ -179,6 +190,7 @@ def test_job_reject_invalid_prompted_extra_vars(runtime_data, job_template_promp
|
||||
|
||||
assert response.data['extra_vars'] == ['Must be a valid JSON or YAML dictionary.']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_launch_fails_without_inventory(deploy_jobtemplate, post, admin_user):
|
||||
@ -190,6 +202,7 @@ def test_job_launch_fails_without_inventory(deploy_jobtemplate, post, admin_user
|
||||
|
||||
assert response.data['inventory'] == ["Job Template 'inventory' is missing or undefined."]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_launch_fails_without_inventory_access(job_template_prompts, runtime_data, post, rando):
|
||||
@ -202,6 +215,7 @@ def test_job_launch_fails_without_inventory_access(job_template_prompts, runtime
|
||||
|
||||
assert response.data['detail'] == u'You do not have permission to perform this action.'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_launch_fails_without_credential_access(job_template_prompts, runtime_data, post, rando):
|
||||
@ -214,6 +228,7 @@ def test_job_launch_fails_without_credential_access(job_template_prompts, runtim
|
||||
|
||||
assert response.data['detail'] == u'You do not have permission to perform this action.'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_block_scan_job_type_change(job_template_prompts, post, admin_user):
|
||||
@ -225,6 +240,7 @@ def test_job_block_scan_job_type_change(job_template_prompts, post, admin_user):
|
||||
|
||||
assert 'job_type' in response.data
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_block_scan_job_inv_change(mocker, bad_scan_JT, runtime_data, post, admin_user):
|
||||
@ -236,6 +252,7 @@ def test_job_block_scan_job_inv_change(mocker, bad_scan_JT, runtime_data, post,
|
||||
|
||||
assert 'inventory' in response.data
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_relaunch_copy_vars(job_with_links, machine_credential, inventory,
|
||||
@ -251,6 +268,7 @@ def test_job_relaunch_copy_vars(job_with_links, machine_credential, inventory,
|
||||
assert second_job.inventory == job_with_links.inventory
|
||||
assert second_job.limit == 'my_server'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_relaunch_resource_access(job_with_links, user):
|
||||
@ -271,6 +289,7 @@ def test_job_relaunch_resource_access(job_with_links, user):
|
||||
job_with_links.inventory.use_role.members.add(inventory_user)
|
||||
assert not inventory_user.can_access(Job, 'start', job_with_links)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_job_launch_JT_with_validation(machine_credential, deploy_jobtemplate):
|
||||
deploy_jobtemplate.extra_vars = '{"job_template_var": 3}'
|
||||
@ -291,6 +310,7 @@ def test_job_launch_JT_with_validation(machine_credential, deploy_jobtemplate):
|
||||
assert 'job_launch_var' in final_job_extra_vars
|
||||
assert job_obj.credential.id == machine_credential.id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_launch_unprompted_vars_with_survey(mocker, survey_spec_factory, job_template_prompts, post, admin_user):
|
||||
|
||||
@ -9,6 +9,7 @@ from awx.main.migrations import _save_password_keys as save_password_keys
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.apps import apps
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"grant_project, grant_credential, grant_inventory, expect", [
|
||||
@ -34,6 +35,7 @@ def test_create(post, project, machine_credential, inventory, alice, grant_proje
|
||||
'playbook': 'helloworld.yml',
|
||||
}, alice, expect=expect)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"grant_project, grant_credential, grant_inventory, expect", [
|
||||
@ -62,6 +64,7 @@ def test_edit_sensitive_fields(patch, job_template_factory, alice, grant_project
|
||||
'playbook': 'alt-helloworld.yml',
|
||||
}, alice, expect=expect)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_edit_playbook(patch, job_template_factory, alice):
|
||||
objs = job_template_factory('jt', organization='org1', project='prj', inventory='inv', credential='cred')
|
||||
@ -79,6 +82,7 @@ def test_edit_playbook(patch, job_template_factory, alice):
|
||||
'playbook': 'helloworld.yml',
|
||||
}, alice, expect=403)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_edit_nonsenstive(patch, job_template_factory, alice):
|
||||
objs = job_template_factory('jt', organization='org1', project='prj', inventory='inv', credential='cred')
|
||||
@ -104,6 +108,8 @@ def test_edit_nonsenstive(patch, job_template_factory, alice):
|
||||
}, alice, expect=200)
|
||||
print(res.data)
|
||||
assert res.data['name'] == 'updated'
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def jt_copy_edit(job_template_factory, project):
|
||||
objects = job_template_factory(
|
||||
@ -111,6 +117,7 @@ def jt_copy_edit(job_template_factory, project):
|
||||
project=project)
|
||||
return objects.job_template
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_job_template_role_user(post, organization_factory, job_template_factory):
|
||||
objects = organization_factory("org",
|
||||
@ -127,10 +134,8 @@ def test_job_template_role_user(post, organization_factory, job_template_factory
|
||||
assert response.status_code == 204
|
||||
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_jt_admin_copy_edit_functional(jt_copy_edit, rando, get, post):
|
||||
|
||||
# Grant random user JT admin access only
|
||||
jt_copy_edit.admin_role.members.add(rando)
|
||||
jt_copy_edit.save()
|
||||
@ -143,6 +148,7 @@ def test_jt_admin_copy_edit_functional(jt_copy_edit, rando, get, post):
|
||||
post_response = post(reverse('api:job_template_list', args=[]), user=rando, data=post_data)
|
||||
assert post_response.status_code == 403
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_scan_jt_no_inventory(job_template_factory):
|
||||
# A user should be able to create a scan job without a project, but an inventory is required
|
||||
@ -175,6 +181,7 @@ def test_scan_jt_no_inventory(job_template_factory):
|
||||
assert not serializer.is_valid()
|
||||
assert 'inventory' in serializer.errors
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_scan_jt_surveys(inventory):
|
||||
serializer = JobTemplateSerializer(data={"name": "Test", "job_type": "scan",
|
||||
@ -183,6 +190,7 @@ def test_scan_jt_surveys(inventory):
|
||||
assert not serializer.is_valid()
|
||||
assert "survey_enabled" in serializer.errors
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_jt_without_project(inventory):
|
||||
data = dict(name="Test", job_type="run",
|
||||
@ -198,6 +206,7 @@ def test_jt_without_project(inventory):
|
||||
serializer = JobTemplateSerializer(data=data)
|
||||
assert serializer.is_valid()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_disallow_template_delete_on_running_job(job_template_factory, delete, admin_user):
|
||||
objects = job_template_factory('jt',
|
||||
@ -210,6 +219,7 @@ def test_disallow_template_delete_on_running_job(job_template_factory, delete, a
|
||||
delete_response = delete(reverse('api:job_template_detail', args=[objects.job_template.pk]), user=admin_user)
|
||||
assert delete_response.status_code == 409
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_save_survey_passwords_to_job(job_template_with_survey_passwords):
|
||||
"""Test that when a new job is created, the survey_passwords field is
|
||||
@ -217,6 +227,7 @@ def test_save_survey_passwords_to_job(job_template_with_survey_passwords):
|
||||
job = job_template_with_survey_passwords.create_unified_job()
|
||||
assert job.survey_passwords == {'SSN': '$encrypted$', 'secret_key': '$encrypted$'}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_save_survey_passwords_on_migration(job_template_with_survey_passwords):
|
||||
"""Test that when upgrading to 3.0.2, the jobs connected to a JT that has
|
||||
|
||||
@ -2,6 +2,7 @@ import pytest
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def organization_resource_creator(organization, user):
|
||||
def rf(users, admins, job_templates, projects, inventories, teams):
|
||||
@ -40,6 +41,7 @@ def organization_resource_creator(organization, user):
|
||||
return organization
|
||||
return rf
|
||||
|
||||
|
||||
COUNTS_PRIMES = {
|
||||
'users': 11,
|
||||
'admins': 5,
|
||||
@ -57,10 +59,12 @@ COUNTS_ZEROS = {
|
||||
'teams': 0
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def resourced_organization(organization_resource_creator):
|
||||
return organization_resource_creator(**COUNTS_PRIMES)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_counts_detail_admin(resourced_organization, user, get):
|
||||
# Check that all types of resources are counted by a superuser
|
||||
@ -72,6 +76,7 @@ def test_org_counts_detail_admin(resourced_organization, user, get):
|
||||
counts = response.data['summary_fields']['related_field_counts']
|
||||
assert counts == COUNTS_PRIMES
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_counts_detail_member(resourced_organization, user, get):
|
||||
# Check that a non-admin org member can only see users / admin in detail view
|
||||
@ -90,6 +95,7 @@ def test_org_counts_detail_member(resourced_organization, user, get):
|
||||
'teams': 0
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_counts_list_admin(resourced_organization, user, get):
|
||||
# Check that all types of resources are counted by a superuser
|
||||
@ -100,6 +106,7 @@ def test_org_counts_list_admin(resourced_organization, user, get):
|
||||
counts = response.data['results'][0]['summary_fields']['related_field_counts']
|
||||
assert counts == COUNTS_PRIMES
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_counts_list_member(resourced_organization, user, get):
|
||||
# Check that a non-admin user can only see the full project and
|
||||
@ -119,6 +126,7 @@ def test_org_counts_list_member(resourced_organization, user, get):
|
||||
'teams': 0
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_new_org_zero_counts(user, post):
|
||||
# Check that a POST to the organization list endpoint returns
|
||||
@ -132,6 +140,7 @@ def test_new_org_zero_counts(user, post):
|
||||
counts_dict = new_org_list['summary_fields']['related_field_counts']
|
||||
assert counts_dict == COUNTS_ZEROS
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_two_organizations(resourced_organization, organizations, user, get):
|
||||
# Check correct results for two organizations are returned
|
||||
@ -150,6 +159,7 @@ def test_two_organizations(resourced_organization, organizations, user, get):
|
||||
assert counts[org_id_full] == COUNTS_PRIMES
|
||||
assert counts[org_id_zero] == COUNTS_ZEROS
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_scan_JT_counted(resourced_organization, user, get):
|
||||
admin_user = user('admin', True)
|
||||
@ -170,6 +180,7 @@ def test_scan_JT_counted(resourced_organization, user, get):
|
||||
assert detail_response.status_code == 200
|
||||
assert detail_response.data['summary_fields']['related_field_counts'] == counts_dict
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_JT_associated_with_project(organizations, project, user, get):
|
||||
# Check that adding a project to an organization gets the project's JT
|
||||
|
||||
@ -172,6 +172,7 @@ def mock_access_method(mocker):
|
||||
mock_method.__name__ = 'bars' # Required for a logging statement
|
||||
return mock_method
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestAccessListCapabilities:
|
||||
"""
|
||||
@ -240,6 +241,7 @@ def test_team_roles_unattach(mocker, team, team_member, inventory, mock_access_m
|
||||
inventory.admin_role, team.member_role, 'parents', skip_sub_obj_read_check=True, data={})
|
||||
assert response.data['results'][0]['summary_fields']['user_capabilities']['unattach'] == 'foobar'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_roles_unattach(mocker, organization, alice, bob, mock_access_method, get):
|
||||
# Add to same organization so that alice and bob can see each other
|
||||
@ -254,6 +256,7 @@ def test_user_roles_unattach(mocker, organization, alice, bob, mock_access_metho
|
||||
organization.member_role, alice, 'members', skip_sub_obj_read_check=True, data={})
|
||||
assert response.data['results'][0]['summary_fields']['user_capabilities']['unattach'] == 'foobar'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_team_roles_unattach_functional(team, team_member, inventory, get):
|
||||
team.member_role.children.add(inventory.admin_role)
|
||||
@ -262,6 +265,7 @@ def test_team_roles_unattach_functional(team, team_member, inventory, get):
|
||||
# the inventory admin_role grants that ability
|
||||
assert response.data['results'][0]['summary_fields']['user_capabilities']['unattach']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_roles_unattach_functional(organization, alice, bob, get):
|
||||
organization.member_role.members.add(alice)
|
||||
@ -278,6 +282,7 @@ def test_prefetch_jt_capabilities(job_template, rando):
|
||||
cache_list_capabilities(qs, ['admin', 'execute'], JobTemplate, rando)
|
||||
assert qs[0].capabilities_cache == {'edit': False, 'start': True}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_prefetch_group_capabilities(group, rando):
|
||||
group.inventory.adhoc_role.members.add(rando)
|
||||
@ -285,6 +290,7 @@ def test_prefetch_group_capabilities(group, rando):
|
||||
cache_list_capabilities(qs, ['inventory.admin', 'inventory.adhoc'], Group, rando)
|
||||
assert qs[0].capabilities_cache == {'edit': False, 'adhoc': True}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_prefetch_jt_copy_capability(job_template, project, inventory, machine_credential, rando):
|
||||
job_template.project = project
|
||||
@ -309,11 +315,13 @@ def test_prefetch_jt_copy_capability(job_template, project, inventory, machine_c
|
||||
]}], JobTemplate, rando)
|
||||
assert qs[0].capabilities_cache == {'copy': True}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_manual_projects_no_update(project, get, admin_user):
|
||||
response = get(reverse('api:project_detail', args=[project.pk]), admin_user, expect=200)
|
||||
assert not response.data['summary_fields']['user_capabilities']['start']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_group_update_capabilities_possible(group, inventory_source, admin_user):
|
||||
group.inventory_source = inventory_source
|
||||
@ -322,6 +330,7 @@ def test_group_update_capabilities_possible(group, inventory_source, admin_user)
|
||||
capabilities = get_user_capabilities(admin_user, group, method_list=['start'])
|
||||
assert capabilities['start']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_group_update_capabilities_impossible(group, inventory_source, admin_user):
|
||||
inventory_source.source = ""
|
||||
@ -332,6 +341,7 @@ def test_group_update_capabilities_impossible(group, inventory_source, admin_use
|
||||
capabilities = get_user_capabilities(admin_user, group, method_list=['start'])
|
||||
assert not capabilities['start']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_license_check_not_called(mocker, job_template, project, org_admin, get):
|
||||
job_template.project = project
|
||||
@ -340,4 +350,3 @@ def test_license_check_not_called(mocker, job_template, project, org_admin, get)
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license', mock_license_check):
|
||||
get(reverse('api:job_template_detail', args=[job_template.pk]), org_admin, expect=200)
|
||||
assert not mock_license_check.called
|
||||
|
||||
|
||||
@ -3,6 +3,7 @@ import pytest
|
||||
from django.core.urlresolvers import reverse
|
||||
from awx.main.models import Role
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_indirect_access_list(get, organization, project, team_factory, user, admin):
|
||||
project_admin = user('project_admin')
|
||||
|
||||
@ -2,6 +2,7 @@ import pytest
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_admin_visible_to_orphaned_users(get, alice):
|
||||
names = set()
|
||||
|
||||
@ -11,17 +11,18 @@ from django.core.urlresolvers import reverse
|
||||
# AWX
|
||||
from awx.conf.models import Setting
|
||||
|
||||
'''
|
||||
Ensures that tests don't pick up dev container license file
|
||||
'''
|
||||
|
||||
@pytest.fixture
|
||||
def mock_no_license_file(mocker):
|
||||
'''
|
||||
Ensures that tests don't pick up dev container license file
|
||||
'''
|
||||
os.environ['AWX_LICENSE_FILE'] = '/does_not_exist'
|
||||
return None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_license_cannot_be_removed_via_system_settings(mock_no_license_file, get, put, patch, delete, admin, enterprise_license):
|
||||
|
||||
url = reverse('api:setting_singleton_detail', args=('system',))
|
||||
response = get(url, user=admin, expect=200)
|
||||
assert not response.data['LICENSE']
|
||||
|
||||
@ -16,11 +16,13 @@ def mock_no_surveys(self, add_host=False, feature=None, check_expiration=True):
|
||||
else:
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_template_with_survey(job_template_factory):
|
||||
objects = job_template_factory('jt', project='prj', survey='submitted_email')
|
||||
return objects.job_template
|
||||
|
||||
|
||||
# Survey license-based denial tests
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: False)
|
||||
@pytest.mark.django_db
|
||||
@ -31,6 +33,7 @@ def test_survey_spec_view_denied(job_template_with_survey, get, admin_user):
|
||||
args=(job_template_with_survey.id,)), admin_user, expect=402)
|
||||
assert response.data['detail'] == 'Your license does not allow adding surveys.'
|
||||
|
||||
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', mock_no_surveys)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
@ -39,6 +42,7 @@ def test_deny_enabling_survey(deploy_jobtemplate, patch, admin_user):
|
||||
data=dict(survey_enabled=True), user=admin_user, expect=402)
|
||||
assert response.data['detail'] == 'Feature surveys is not enabled in the active license.'
|
||||
|
||||
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
@ -48,6 +52,7 @@ def test_job_start_blocked_without_survey_license(job_template_with_survey, admi
|
||||
with pytest.raises(LicenseForbids):
|
||||
access.can_start(job_template_with_survey)
|
||||
|
||||
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', mock_no_surveys)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
@ -65,6 +70,7 @@ def test_deny_creating_with_survey(project, post, admin_user):
|
||||
user=admin_user, expect=402)
|
||||
assert response.data['detail'] == 'Feature surveys is not enabled in the active license.'
|
||||
|
||||
|
||||
# Test normal operations with survey license work
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
|
||||
@pytest.mark.django_db
|
||||
@ -73,6 +79,7 @@ def test_survey_spec_view_allowed(deploy_jobtemplate, get, admin_user):
|
||||
get(reverse('api:job_template_survey_spec', args=(deploy_jobtemplate.id,)),
|
||||
admin_user, expect=200)
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
@ -83,6 +90,7 @@ def test_survey_spec_sucessful_creation(survey_spec_factory, job_template, post,
|
||||
updated_jt = JobTemplate.objects.get(pk=job_template.pk)
|
||||
assert updated_jt.survey_spec == survey_input_data
|
||||
|
||||
|
||||
# Tests related to survey content validation
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
|
||||
@pytest.mark.django_db
|
||||
@ -96,6 +104,7 @@ def test_survey_spec_non_dict_error(deploy_jobtemplate, post, admin_user):
|
||||
user=admin_user, expect=400)
|
||||
assert response.data['error'] == "Survey question 0 is not a json object."
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
@ -106,6 +115,7 @@ def test_survey_spec_dual_names_error(survey_spec_factory, deploy_jobtemplate, p
|
||||
user=user('admin', True), expect=400)
|
||||
assert response.data['error'] == "'variable' 'submitter_email' duplicated in survey question 1."
|
||||
|
||||
|
||||
# Test actions that should be allowed with non-survey license
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
|
||||
@pytest.mark.django_db
|
||||
@ -115,6 +125,7 @@ def test_disable_survey_access_without_license(job_template_with_survey, admin_u
|
||||
access = JobTemplateAccess(admin_user)
|
||||
assert access.can_change(job_template_with_survey, dict(survey_enabled=False))
|
||||
|
||||
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
@ -124,6 +135,7 @@ def test_delete_survey_access_without_license(job_template_with_survey, admin_us
|
||||
assert access.can_change(job_template_with_survey, dict(survey_spec=None))
|
||||
assert access.can_change(job_template_with_survey, dict(survey_spec={}))
|
||||
|
||||
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
@ -137,6 +149,7 @@ def test_job_start_allowed_with_survey_spec(job_template_factory, admin_user):
|
||||
access = JobTemplateAccess(admin_user)
|
||||
assert access.can_start(job_template_with_survey, {})
|
||||
|
||||
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
@ -146,6 +159,7 @@ def test_job_template_delete_access_with_survey(job_template_with_survey, admin_
|
||||
access = JobTemplateAccess(admin_user)
|
||||
assert access.can_delete(job_template_with_survey)
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: False)
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
|
||||
@pytest.mark.django_db
|
||||
@ -157,6 +171,7 @@ def test_delete_survey_spec_without_license(job_template_with_survey, delete, ad
|
||||
new_jt = JobTemplate.objects.get(pk=job_template_with_survey.pk)
|
||||
assert new_jt.survey_spec == {}
|
||||
|
||||
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', lambda self, **kwargs: True)
|
||||
@mock.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job',
|
||||
lambda self, extra_vars: mock.MagicMock(spec=Job, id=968))
|
||||
@ -174,6 +189,7 @@ def test_launch_survey_enabled_but_no_survey_spec(job_template_factory, post, ad
|
||||
dict(extra_vars=dict(survey_var=7)), admin_user, expect=201)
|
||||
assert 'survey_var' in response.data['ignored_fields']['extra_vars']
|
||||
|
||||
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
|
||||
@mock.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job',
|
||||
lambda self: mock.MagicMock(spec=Job, id=968))
|
||||
@ -191,6 +207,7 @@ def test_launch_with_non_empty_survey_spec_no_license(job_template_factory, post
|
||||
obj.save()
|
||||
post(reverse('api:job_template_launch', args=[obj.pk]), {}, admin_user, expect=201)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_redact_survey_passwords_in_activity_stream(job_template_with_survey_passwords):
|
||||
|
||||
@ -35,6 +35,7 @@ def test_cases(project):
|
||||
ret.append(e)
|
||||
return ret
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def negative_test_cases(job_factory):
|
||||
ret = []
|
||||
@ -53,6 +54,7 @@ formats = [
|
||||
('html', 'text/html'),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("format,content_type", formats)
|
||||
@pytest.mark.django_db
|
||||
def test_project_update_redaction_enabled(get, format, content_type, test_cases, admin):
|
||||
@ -66,6 +68,7 @@ def test_project_update_redaction_enabled(get, format, content_type, test_cases,
|
||||
assert test_data['uri'].password not in content
|
||||
assert content.count(test_data['uri'].host) == test_data['occurrences']
|
||||
|
||||
|
||||
@pytest.mark.parametrize("format,content_type", formats)
|
||||
@pytest.mark.django_db
|
||||
def test_job_redaction_disabled(get, format, content_type, negative_test_cases, admin):
|
||||
@ -80,7 +83,6 @@ def test_job_redaction_disabled(get, format, content_type, negative_test_cases,
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_options_fields_choices(instance, options, user):
|
||||
|
||||
url = reverse('api:unified_job_list')
|
||||
response = options(url, None, user('admin', True))
|
||||
|
||||
@ -89,5 +91,3 @@ def test_options_fields_choices(instance, options, user):
|
||||
assert UnifiedJob.LAUNCH_TYPE_CHOICES == response.data['actions']['GET']['launch_type']['choices']
|
||||
assert 'choice' == response.data['actions']['GET']['status']['type']
|
||||
assert UnifiedJob.STATUS_CHOICES == response.data['actions']['GET']['status']['choices']
|
||||
|
||||
|
||||
|
||||
@ -7,6 +7,7 @@ from django.core.urlresolvers import reverse
|
||||
# user creation
|
||||
#
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_create(post, admin):
|
||||
response = post(reverse('api:user_list'), {
|
||||
@ -19,6 +20,7 @@ def test_user_create(post, admin):
|
||||
}, admin)
|
||||
assert response.status_code == 201
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_fail_double_create_user(post, admin):
|
||||
response = post(reverse('api:user_list'), {
|
||||
@ -41,6 +43,7 @@ def test_fail_double_create_user(post, admin):
|
||||
}, admin)
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_delete_create_user(post, delete, admin):
|
||||
response = post(reverse('api:user_list'), {
|
||||
|
||||
@ -3,6 +3,7 @@ import time
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fact_msg_base(inventory, hosts):
|
||||
host_objs = hosts(1)
|
||||
@ -13,6 +14,7 @@ def fact_msg_base(inventory, hosts):
|
||||
'inventory_id': inventory.id
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fact_msg_small(fact_msg_base):
|
||||
fact_msg_base['facts'] = {
|
||||
@ -77,7 +79,7 @@ def fact_msg_small(fact_msg_base):
|
||||
}
|
||||
}
|
||||
return fact_msg_base
|
||||
|
||||
|
||||
|
||||
'''
|
||||
Facts sent from ansible to our fact cache reciever.
|
||||
@ -92,18 +94,20 @@ key of 'ansible'
|
||||
}
|
||||
'''
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fact_msg_ansible(fact_msg_base, fact_ansible_json):
|
||||
fact_msg_base['facts'] = fact_ansible_json
|
||||
return fact_msg_base
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fact_msg_packages(fact_msg_base, fact_packages_json):
|
||||
fact_msg_base['facts']['packages'] = fact_packages_json
|
||||
return fact_msg_base
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fact_msg_services(fact_msg_base, fact_services_json):
|
||||
fact_msg_base['facts']['services'] = fact_services_json
|
||||
return fact_msg_base
|
||||
|
||||
|
||||
@ -16,12 +16,15 @@ from awx.main.management.commands.cleanup_facts import CleanupFacts, Command
|
||||
from awx.main.models.fact import Fact
|
||||
from awx.main.models.inventory import Host
|
||||
|
||||
|
||||
def mock_feature_enabled(feature):
|
||||
return True
|
||||
|
||||
|
||||
def mock_feature_disabled(feature):
|
||||
return False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_cleanup_granularity(fact_scans, hosts):
|
||||
epoch = timezone.now()
|
||||
@ -35,11 +38,12 @@ def test_cleanup_granularity(fact_scans, hosts):
|
||||
deleted_count = cleanup_facts.cleanup(timestamp_future, granularity)
|
||||
assert 60 == deleted_count
|
||||
|
||||
'''
|
||||
Delete half of the scans
|
||||
'''
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_cleanup_older_than(fact_scans, hosts):
|
||||
'''
|
||||
Delete half of the scans
|
||||
'''
|
||||
epoch = timezone.now()
|
||||
hosts(5)
|
||||
fact_scans(28, timestamp_epoch=epoch)
|
||||
@ -51,6 +55,7 @@ def test_cleanup_older_than(fact_scans, hosts):
|
||||
deleted_count = cleanup_facts.cleanup(fact_middle.timestamp, granularity)
|
||||
assert 210 == deleted_count
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_cleanup_older_than_granularity_module(fact_scans, hosts):
|
||||
epoch = timezone.now()
|
||||
@ -65,11 +70,11 @@ def test_cleanup_older_than_granularity_module(fact_scans, hosts):
|
||||
assert 20 == deleted_count
|
||||
|
||||
|
||||
'''
|
||||
Reduce the granularity of half of the facts scans, by half.
|
||||
'''
|
||||
@pytest.mark.django_db
|
||||
def test_cleanup_logic(fact_scans, hosts):
|
||||
'''
|
||||
Reduce the granularity of half of the facts scans, by half.
|
||||
'''
|
||||
epoch = timezone.now()
|
||||
hosts = hosts(5)
|
||||
fact_scans(60, timestamp_epoch=epoch)
|
||||
@ -95,6 +100,7 @@ def test_cleanup_logic(fact_scans, hosts):
|
||||
timestamp_pivot -= granularity
|
||||
assert fact.timestamp == timestamp_pivot
|
||||
|
||||
|
||||
@mock.patch('awx.main.management.commands.cleanup_facts.feature_enabled', new=mock_feature_disabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.license_feature
|
||||
@ -104,6 +110,7 @@ def test_system_tracking_feature_disabled(mocker):
|
||||
cmd.handle(None)
|
||||
assert 'The System Tracking feature is not enabled for your Tower instance' in err.value
|
||||
|
||||
|
||||
@mock.patch('awx.main.management.commands.cleanup_facts.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_parameters_ok(mocker):
|
||||
@ -117,6 +124,7 @@ def test_parameters_ok(mocker):
|
||||
cmd.handle(None, **kv)
|
||||
run.assert_called_once_with(relativedelta(days=1), relativedelta(days=1), module=None)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_string_time_to_timestamp_ok():
|
||||
kvs = [
|
||||
@ -146,6 +154,7 @@ def test_string_time_to_timestamp_ok():
|
||||
res = cmd.string_time_to_timestamp(kv['time'])
|
||||
assert kv['timestamp'] == res
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_string_time_to_timestamp_invalid():
|
||||
kvs = [
|
||||
@ -175,6 +184,7 @@ def test_string_time_to_timestamp_invalid():
|
||||
res = cmd.string_time_to_timestamp(kv['time'])
|
||||
assert res is None
|
||||
|
||||
|
||||
@mock.patch('awx.main.management.commands.cleanup_facts.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_parameters_fail(mocker):
|
||||
@ -197,4 +207,3 @@ def test_parameters_fail(mocker):
|
||||
with pytest.raises(CommandError) as err:
|
||||
cmd.handle(None, older_than=kv['older_than'], granularity=kv['granularity'])
|
||||
assert kv['msg'] in err.value
|
||||
|
||||
|
||||
@ -10,6 +10,7 @@ from django.core.management import call_command
|
||||
|
||||
from awx.main.management.commands.update_password import UpdatePassword
|
||||
|
||||
|
||||
def run_command(name, *args, **options):
|
||||
command_runner = options.pop('command_runner', call_command)
|
||||
stdin_fileobj = options.pop('stdin_fileobj', None)
|
||||
|
||||
@ -14,6 +14,7 @@ from awx.main.management.commands.run_fact_cache_receiver import FactCacheReceiv
|
||||
from awx.main.models.fact import Fact
|
||||
from awx.main.models.inventory import Host
|
||||
|
||||
|
||||
# TODO: Check that timestamp and other attributes are as expected
|
||||
def check_process_fact_message_module(fact_returned, data, module_name):
|
||||
date_key = data['date_key']
|
||||
@ -36,6 +37,7 @@ def check_process_fact_message_module(fact_returned, data, module_name):
|
||||
assert timestamp == fact_returned.timestamp
|
||||
assert module_name == fact_returned.module
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_process_fact_message_ansible(fact_msg_ansible):
|
||||
receiver = FactCacheReceiver()
|
||||
@ -43,6 +45,7 @@ def test_process_fact_message_ansible(fact_msg_ansible):
|
||||
|
||||
check_process_fact_message_module(fact_returned, fact_msg_ansible, 'ansible')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_process_fact_message_packages(fact_msg_packages):
|
||||
receiver = FactCacheReceiver()
|
||||
@ -50,6 +53,7 @@ def test_process_fact_message_packages(fact_msg_packages):
|
||||
|
||||
check_process_fact_message_module(fact_returned, fact_msg_packages, 'packages')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_process_fact_message_services(fact_msg_services):
|
||||
receiver = FactCacheReceiver()
|
||||
@ -57,15 +61,16 @@ def test_process_fact_message_services(fact_msg_services):
|
||||
|
||||
check_process_fact_message_module(fact_returned, fact_msg_services, 'services')
|
||||
|
||||
'''
|
||||
We pickypack our fact sending onto the Ansible fact interface.
|
||||
The interface is <hostname, facts>. Where facts is a json blob of all the facts.
|
||||
This makes it hard to decipher what facts are new/changed.
|
||||
Because of this, we handle the same fact module data being sent multiple times
|
||||
and just keep the newest version.
|
||||
'''
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_process_facts_message_ansible_overwrite(fact_scans, fact_msg_ansible):
|
||||
'''
|
||||
We pickypack our fact sending onto the Ansible fact interface.
|
||||
The interface is <hostname, facts>. Where facts is a json blob of all the facts.
|
||||
This makes it hard to decipher what facts are new/changed.
|
||||
Because of this, we handle the same fact module data being sent multiple times
|
||||
and just keep the newest version.
|
||||
'''
|
||||
#epoch = timezone.now()
|
||||
epoch = datetime.fromtimestamp(fact_msg_ansible['date_key'])
|
||||
fact_scans(fact_scans=1, timestamp_epoch=epoch)
|
||||
@ -82,6 +87,7 @@ def test_process_facts_message_ansible_overwrite(fact_scans, fact_msg_ansible):
|
||||
assert key in fact_obj.facts
|
||||
assert fact_msg_ansible['facts'] == (json.loads(fact_obj.facts) if isinstance(fact_obj.facts, unicode) else fact_obj.facts) # TODO: Just make response.data['facts'] when we're only dealing with postgres, or if jsonfields ever fixes this bug
|
||||
|
||||
|
||||
# Ensure that the message flows from the socket through to process_fact_message()
|
||||
@pytest.mark.django_db
|
||||
def test_run_receiver(mocker, fact_msg_ansible):
|
||||
|
||||
@ -43,24 +43,27 @@ from awx.main.models.notifications import (
|
||||
Notification
|
||||
)
|
||||
|
||||
'''
|
||||
Disable all django model signals.
|
||||
'''
|
||||
|
||||
@pytest.fixture(scope="session", autouse=False)
|
||||
def disable_signals():
|
||||
'''
|
||||
Disable all django model signals.
|
||||
'''
|
||||
mocked = mock.patch('django.dispatch.Signal.send', autospec=True)
|
||||
mocked.start()
|
||||
|
||||
'''
|
||||
FIXME: Not sure how "far" just setting the BROKER_URL will get us.
|
||||
We may need to incluence CELERY's configuration like we do in the old unit tests (see base.py)
|
||||
|
||||
Allows django signal code to execute without the need for redis
|
||||
'''
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def celery_memory_broker():
|
||||
'''
|
||||
FIXME: Not sure how "far" just setting the BROKER_URL will get us.
|
||||
We may need to incluence CELERY's configuration like we do in the old unit tests (see base.py)
|
||||
|
||||
Allows django signal code to execute without the need for redis
|
||||
'''
|
||||
settings.BROKER_URL='memory://localhost/'
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def user():
|
||||
def u(name, is_superuser=False):
|
||||
@ -72,6 +75,7 @@ def user():
|
||||
return user
|
||||
return u
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_jobtemplate(project, inventory, credential):
|
||||
return \
|
||||
@ -83,6 +87,7 @@ def check_jobtemplate(project, inventory, credential):
|
||||
name='check-job-template'
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def deploy_jobtemplate(project, inventory, credential):
|
||||
return \
|
||||
@ -94,10 +99,12 @@ def deploy_jobtemplate(project, inventory, credential):
|
||||
name='deploy-job-template'
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def team(organization):
|
||||
return organization.teams.create(name='test-team')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def team_member(user, team):
|
||||
ret = user('team-member', False)
|
||||
@ -115,6 +122,7 @@ def project(instance, organization):
|
||||
)
|
||||
return prj
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def project_factory(organization):
|
||||
def factory(name):
|
||||
@ -128,12 +136,14 @@ def project_factory(organization):
|
||||
return prj
|
||||
return factory
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_factory(job_template, admin):
|
||||
def factory(job_template=job_template, initial_state='new', created_by=admin):
|
||||
return job_template.create_job(created_by=created_by, status=initial_state)
|
||||
return factory
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def team_factory(organization):
|
||||
def factory(name):
|
||||
@ -146,35 +156,43 @@ def team_factory(organization):
|
||||
return t
|
||||
return factory
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def user_project(user):
|
||||
owner = user('owner')
|
||||
return Project.objects.create(name="test-user-project", created_by=owner, description="test-user-project-desc")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def instance(settings):
|
||||
return Instance.objects.create(uuid=settings.SYSTEM_UUID, hostname="instance.example.org", capacity=100)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def organization(instance):
|
||||
return Organization.objects.create(name="test-org", description="test-org-desc")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def credential():
|
||||
return Credential.objects.create(kind='aws', name='test-cred', username='something', password='secret')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def machine_credential():
|
||||
return Credential.objects.create(name='machine-cred', kind='ssh', username='test_user', password='pas4word')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def org_credential(organization):
|
||||
return Credential.objects.create(kind='aws', name='test-cred', username='something', password='secret', organization=organization)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def inventory(organization):
|
||||
return organization.inventories.create(name="test-inv")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def inventory_factory(organization):
|
||||
def factory(name, org=organization):
|
||||
@ -185,10 +203,12 @@ def inventory_factory(organization):
|
||||
return inv
|
||||
return factory
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def label(organization):
|
||||
return organization.labels.create(name="test-label", description="test-label-desc")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def notification_template(organization):
|
||||
return NotificationTemplate.objects.create(name='test-notification_template',
|
||||
@ -197,6 +217,7 @@ def notification_template(organization):
|
||||
notification_configuration=dict(url="http://localhost",
|
||||
headers={"Test": "Header"}))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def notification(notification_template):
|
||||
return Notification.objects.create(notification_template=notification_template,
|
||||
@ -206,27 +227,33 @@ def notification(notification_template):
|
||||
recipients='admin@redhat.com',
|
||||
subject='email subject')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_template_with_survey_passwords(job_template_with_survey_passwords_factory):
|
||||
return job_template_with_survey_passwords_factory(persisted=True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def admin(user):
|
||||
return user('admin', True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def alice(user):
|
||||
return user('alice', False)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def bob(user):
|
||||
return user('bob', False)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def rando(user):
|
||||
"Rando, the random user that doesn't have access to anything"
|
||||
return user('rando', False)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def org_admin(user, organization):
|
||||
ret = user('org-admin', False)
|
||||
@ -234,6 +261,7 @@ def org_admin(user, organization):
|
||||
organization.member_role.members.add(ret)
|
||||
return ret
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def org_auditor(user, organization):
|
||||
ret = user('org-auditor', False)
|
||||
@ -241,12 +269,14 @@ def org_auditor(user, organization):
|
||||
organization.member_role.members.add(ret)
|
||||
return ret
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def org_member(user, organization):
|
||||
ret = user('org-member', False)
|
||||
organization.member_role.members.add(ret)
|
||||
return ret
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def organizations(instance):
|
||||
def rf(organization_count=1):
|
||||
@ -257,6 +287,7 @@ def organizations(instance):
|
||||
return orgs
|
||||
return rf
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def group_factory(inventory):
|
||||
def g(name):
|
||||
@ -266,6 +297,7 @@ def group_factory(inventory):
|
||||
return Group.objects.create(inventory=inventory, name=name)
|
||||
return g
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def hosts(group_factory):
|
||||
group1 = group_factory('group-1')
|
||||
@ -281,23 +313,28 @@ def hosts(group_factory):
|
||||
return hosts
|
||||
return rf
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def group(inventory):
|
||||
return inventory.groups.create(name='single-group')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def inventory_source(group, inventory):
|
||||
return InventorySource.objects.create(name=group.name, group=group,
|
||||
inventory=inventory, source='gce')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def inventory_update(inventory_source):
|
||||
return InventoryUpdate.objects.create(inventory_source=inventory_source)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def host(group, inventory):
|
||||
return group.hosts.create(name='single-host', inventory=inventory)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def permissions():
|
||||
return {
|
||||
@ -339,36 +376,42 @@ def _request(verb):
|
||||
return response
|
||||
return rf
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def post():
|
||||
return _request('post')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def get():
|
||||
return _request('get')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def put():
|
||||
return _request('put')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def patch():
|
||||
return _request('patch')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def delete():
|
||||
return _request('delete')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def head():
|
||||
return _request('head')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def options():
|
||||
return _request('options')
|
||||
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fact_scans(group_factory, fact_ansible_json, fact_packages_json, fact_services_json):
|
||||
group1 = group_factory('group-1')
|
||||
@ -391,27 +434,33 @@ def fact_scans(group_factory, fact_ansible_json, fact_packages_json, fact_servic
|
||||
return facts
|
||||
return rf
|
||||
|
||||
|
||||
def _fact_json(module_name):
|
||||
current_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
with open('%s/%s.json' % (current_dir, module_name)) as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fact_ansible_json():
|
||||
return _fact_json('ansible')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fact_packages_json():
|
||||
return _fact_json('packages')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fact_services_json():
|
||||
return _fact_json('services')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def permission_inv_read(organization, inventory, team):
|
||||
return Permission.objects.create(inventory=inventory, team=team, permission_type=PERM_INVENTORY_READ)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_template(organization):
|
||||
jt = JobTemplate(name='test-job_template')
|
||||
@ -419,6 +468,7 @@ def job_template(organization):
|
||||
|
||||
return jt
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_template_labels(organization, job_template):
|
||||
job_template.labels.create(name="label-1", organization=organization)
|
||||
|
||||
@ -8,6 +8,7 @@ from datetime import datetime
|
||||
from awx.main.models import Host
|
||||
from awx.main.task_engine import TaskEnhancer
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_license_writer(inventory, admin):
|
||||
task_enhancer = TaskEnhancer(
|
||||
@ -50,6 +51,7 @@ def test_license_writer(inventory, admin):
|
||||
assert vdata['compliant'] is False
|
||||
assert vdata['subscription_name']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_expired_licenses():
|
||||
task_enhancer = TaskEnhancer(
|
||||
|
||||
@ -5,6 +5,7 @@ from django.utils import timezone
|
||||
|
||||
from awx.main.models import Fact
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_newest_scan_exact(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
@ -20,18 +21,19 @@ def test_newest_scan_exact(hosts, fact_scans):
|
||||
|
||||
assert fact_found == fact_known
|
||||
|
||||
'''
|
||||
Show me the most recent state of the sytem at any point of time.
|
||||
or, said differently
|
||||
For any timestamp, get the first scan that is <= the timestamp.
|
||||
'''
|
||||
|
||||
'''
|
||||
Ensure most recent scan run is the scan returned.
|
||||
Query by future date.
|
||||
'''
|
||||
@pytest.mark.django_db
|
||||
def test_newest_scan_less_than(hosts, fact_scans):
|
||||
'''
|
||||
Show me the most recent state of the sytem at any point of time.
|
||||
or, said differently
|
||||
For any timestamp, get the first scan that is <= the timestamp.
|
||||
'''
|
||||
|
||||
'''
|
||||
Ensure most recent scan run is the scan returned.
|
||||
Query by future date.
|
||||
'''
|
||||
epoch = timezone.now()
|
||||
timestamp_future = epoch + timedelta(days=10)
|
||||
hosts = hosts(host_count=2)
|
||||
@ -48,11 +50,12 @@ def test_newest_scan_less_than(hosts, fact_scans):
|
||||
|
||||
assert fact_found == fact_known
|
||||
|
||||
'''
|
||||
Tests query Fact that is in the middle of the fact scan timeline, but not an exact timestamp.
|
||||
'''
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_query_middle_of_timeline(hosts, fact_scans):
|
||||
'''
|
||||
Tests query Fact that is in the middle of the fact scan timeline, but not an exact timestamp.
|
||||
'''
|
||||
epoch = timezone.now()
|
||||
timestamp_middle = epoch + timedelta(days=1, hours=3)
|
||||
hosts = hosts(host_count=2)
|
||||
@ -69,11 +72,12 @@ def test_query_middle_of_timeline(hosts, fact_scans):
|
||||
|
||||
assert fact_found == fact_known
|
||||
|
||||
'''
|
||||
Query time less than any fact scan. Should return None
|
||||
'''
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_query_result_empty(hosts, fact_scans):
|
||||
'''
|
||||
Query time less than any fact scan. Should return None
|
||||
'''
|
||||
epoch = timezone.now()
|
||||
timestamp_less = epoch - timedelta(days=1)
|
||||
hosts = hosts(host_count=2)
|
||||
@ -83,11 +87,12 @@ def test_query_result_empty(hosts, fact_scans):
|
||||
|
||||
assert fact_found is None
|
||||
|
||||
'''
|
||||
Query by fact module other than 'ansible'
|
||||
'''
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_by_module(hosts, fact_scans):
|
||||
'''
|
||||
Query by fact module other than 'ansible'
|
||||
'''
|
||||
epoch = timezone.now()
|
||||
hosts = hosts(host_count=2)
|
||||
facts = fact_scans(fact_scans=3, timestamp_epoch=epoch)
|
||||
@ -108,4 +113,3 @@ def test_by_module(hosts, fact_scans):
|
||||
|
||||
assert fact_found_services == fact_known_services
|
||||
assert fact_found_packages == fact_known_packages
|
||||
|
||||
|
||||
@ -5,6 +5,7 @@ from django.utils import timezone
|
||||
|
||||
from awx.main.models import Fact
|
||||
|
||||
|
||||
def setup_common(hosts, fact_scans, ts_from=None, ts_to=None, epoch=timezone.now(), module_name='ansible', ts_known=None):
|
||||
hosts = hosts(host_count=2)
|
||||
facts = fact_scans(fact_scans=3, timestamp_epoch=epoch)
|
||||
@ -20,6 +21,7 @@ def setup_common(hosts, fact_scans, ts_from=None, ts_to=None, epoch=timezone.now
|
||||
fact_objs = Fact.get_timeline(hosts[0].id, module=module_name, ts_from=ts_from, ts_to=ts_to)
|
||||
return (facts_known, fact_objs)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_all(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
@ -30,6 +32,7 @@ def test_all(hosts, fact_scans):
|
||||
assert 9 == len(facts_known)
|
||||
assert 9 == len(fact_objs)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_all_ansible(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
@ -43,6 +46,7 @@ def test_all_ansible(hosts, fact_scans):
|
||||
for i in xrange(len(facts_known) - 1, 0):
|
||||
assert facts_known[i].id == fact_objs[i].id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_empty_db(hosts, fact_scans):
|
||||
hosts = hosts(host_count=2)
|
||||
@ -54,6 +58,7 @@ def test_empty_db(hosts, fact_scans):
|
||||
|
||||
assert 0 == len(fact_objs)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_no_results(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
@ -63,6 +68,7 @@ def test_no_results(hosts, fact_scans):
|
||||
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, epoch=epoch)
|
||||
assert 0 == len(fact_objs)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_exact_same_equal(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
@ -74,6 +80,7 @@ def test_exact_same_equal(hosts, fact_scans):
|
||||
|
||||
assert facts_known[0].id == fact_objs[0].id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_exact_from_exclusive_to_inclusive(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
@ -87,6 +94,7 @@ def test_exact_from_exclusive_to_inclusive(hosts, fact_scans):
|
||||
|
||||
assert facts_known[0].id == fact_objs[0].id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_to_lte(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
@ -101,6 +109,7 @@ def test_to_lte(hosts, fact_scans):
|
||||
for i in xrange(0, len(fact_objs)):
|
||||
assert facts_known_subset[len(facts_known_subset) - i - 1].id == fact_objs[i].id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_from_gt(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
@ -115,6 +124,7 @@ def test_from_gt(hosts, fact_scans):
|
||||
for i in xrange(0, len(fact_objs)):
|
||||
assert facts_known_subset[len(facts_known_subset) - i - 1].id == fact_objs[i].id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_no_ts(hosts, fact_scans):
|
||||
epoch = timezone.now()
|
||||
@ -125,5 +135,3 @@ def test_no_ts(hosts, fact_scans):
|
||||
|
||||
for i in xrange(len(facts_known) - 1, 0):
|
||||
assert facts_known[i].id == fact_objs[i].id
|
||||
|
||||
|
||||
|
||||
@ -14,7 +14,6 @@ from django.test import TransactionTestCase
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestWorkflowDAGFunctional(TransactionTestCase):
|
||||
|
||||
def workflow_job(self):
|
||||
wfj = WorkflowJob.objects.create()
|
||||
nodes = [WorkflowJobNode.objects.create(workflow_job=wfj) for i in range(0, 5)]
|
||||
@ -35,6 +34,7 @@ class TestWorkflowDAGFunctional(TransactionTestCase):
|
||||
with self.assertNumQueries(4):
|
||||
dag._init_graph(wfj)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestWorkflowJob:
|
||||
@pytest.fixture
|
||||
@ -95,9 +95,9 @@ class TestWorkflowJob:
|
||||
assert queued_node.get_job_kwargs()['extra_vars'] == {'a': 42, 'b': 43}
|
||||
assert queued_node.ancestor_artifacts == {'a': 42, 'b': 43}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestWorkflowJobTemplate:
|
||||
|
||||
@pytest.fixture
|
||||
def wfjt(self, workflow_job_template_factory):
|
||||
wfjt = workflow_job_template_factory('test').workflow_job_template
|
||||
@ -134,6 +134,7 @@ class TestWorkflowJobTemplate:
|
||||
assert (test_view.is_valid_relation(nodes[2], node_assoc_1) ==
|
||||
{'Error': 'Cannot associate failure_nodes when always_nodes have been associated.'})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestWorkflowJobFailure:
|
||||
"""
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user