diff --git a/.gitignore b/.gitignore
index b928791ed5..b452f7ee24 100644
--- a/.gitignore
+++ b/.gitignore
@@ -53,6 +53,7 @@ npm-debug.log
/DEBUG
# Testing
+.cache
.coverage
.tox
coverage.xml
@@ -60,6 +61,7 @@ htmlcov
pep8.txt
scratch
testem.log
+awx/awx_test.sqlite3-journal
# Mac OS X
*.DS_Store
diff --git a/Makefile b/Makefile
index 3bc4119191..c5735982b9 100644
--- a/Makefile
+++ b/Makefile
@@ -154,7 +154,7 @@ endif
.PHONY: clean rebase push requirements requirements_dev requirements_jenkins \
real-requirements real-requirements_dev real-requirements_jenkins \
- develop refresh adduser syncdb migrate dbchange dbshell runserver celeryd \
+ develop refresh adduser migrate dbchange dbshell runserver celeryd \
receiver test test_unit test_coverage coverage_html test_jenkins dev_build \
release_build release_clean sdist rpmtar mock-rpm mock-srpm rpm-sign \
build-ui sync-ui test-ui build-ui-for-coverage test-ui-for-coverage \
@@ -280,13 +280,9 @@ refresh: clean requirements_dev version_file develop migrate
adduser:
$(PYTHON) manage.py createsuperuser
-# Create initial database tables (excluding migrations).
-syncdb:
- $(PYTHON) manage.py syncdb --noinput
-
# Create database tables and apply any new migrations.
-migrate: syncdb
- $(PYTHON) manage.py migrate --noinput
+migrate:
+ $(PYTHON) manage.py migrate --noinput --fake-initial
# Run after making changes to the models to create a new migration.
dbchange:
diff --git a/awx/__init__.py b/awx/__init__.py
index 701b44347e..d597dff532 100644
--- a/awx/__init__.py
+++ b/awx/__init__.py
@@ -75,10 +75,26 @@ def prepare_env():
settings.DATABASES['default'][opt] = os.environ['AWX_TEST_DATABASE_%s' % opt]
# Disable capturing all SQL queries in memory when in DEBUG mode.
if settings.DEBUG and not getattr(settings, 'SQL_DEBUG', True):
- from django.db.backends import BaseDatabaseWrapper
- from django.db.backends.util import CursorWrapper
+ from django.db.backends.base.base import BaseDatabaseWrapper
+ from django.db.backends.utils import CursorWrapper
BaseDatabaseWrapper.make_debug_cursor = lambda self, cursor: CursorWrapper(cursor, self)
+ # Use the default devserver addr/port defined in settings for runserver.
+ default_addr = getattr(settings, 'DEVSERVER_DEFAULT_ADDR', '127.0.0.1')
+ default_port = getattr(settings, 'DEVSERVER_DEFAULT_PORT', 8000)
+ from django.core.management.commands import runserver as core_runserver
+ original_handle = core_runserver.Command.handle
+
+ def handle(self, *args, **options):
+ if not options.get('addrport'):
+ options['addrport'] = '%s:%d' % (default_addr, int(default_port))
+ elif options.get('addrport').isdigit():
+ options['addrport'] = '%s:%d' % (default_addr, int(options['addrport']))
+ return original_handle(self, *args, **options)
+
+ core_runserver.Command.handle = handle
+
+
def manage():
# Prepare the AWX environment.
prepare_env()
diff --git a/awx/api/filters.py b/awx/api/filters.py
index bc3f05d37c..73afbc178b 100644
--- a/awx/api/filters.py
+++ b/awx/api/filters.py
@@ -8,9 +8,10 @@ import re
from django.core.exceptions import FieldError, ValidationError
from django.db import models
from django.db.models import Q
-from django.db.models.related import RelatedObject
from django.db.models.fields import FieldDoesNotExist
+from django.db.models.fields.related import ForeignObjectRel
from django.contrib.contenttypes.models import ContentType
+from django.utils.encoding import force_text
# Django REST Framework
from rest_framework.exceptions import ParseError
@@ -46,7 +47,7 @@ class TypeFilterBackend(BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
try:
types = None
- for key, value in request.QUERY_PARAMS.items():
+ for key, value in request.query_params.items():
if key == 'type':
if ',' in value:
types = value.split(',')
@@ -107,23 +108,21 @@ class FieldLookupBackend(BaseFilterBackend):
'last_updated': 'last_job_run',
}.get(name, name)
+ new_parts.append(name)
+
if name == 'pk':
field = model._meta.pk
else:
field = model._meta.get_field_by_name(name)[0]
- if n < (len(parts) - 2):
- if getattr(field, 'rel', None):
- model = field.rel.to
- else:
- model = field.model
- new_parts.append(name)
+ model = getattr(field, 'related_model', None) or field.model
+
if parts:
new_parts.append(parts[-1])
new_lookup = '__'.join(new_parts)
return field, new_lookup
def to_python_related(self, value):
- value = unicode(value)
+ value = force_text(value)
if value.lower() in ('none', 'null'):
return None
else:
@@ -134,7 +133,7 @@ class FieldLookupBackend(BaseFilterBackend):
return to_python_boolean(value, allow_none=True)
elif isinstance(field, models.BooleanField):
return to_python_boolean(value)
- elif isinstance(field, RelatedObject):
+ elif isinstance(field, ForeignObjectRel):
return self.to_python_related(value)
else:
return field.to_python(value)
@@ -159,12 +158,12 @@ class FieldLookupBackend(BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
try:
- # Apply filters specified via QUERY_PARAMS. Each entry in the lists
+ # Apply filters specified via query_params. Each entry in the lists
# below is (negate, field, value).
and_filters = []
or_filters = []
chain_filters = []
- for key, values in request.QUERY_PARAMS.lists():
+ for key, values in request.query_params.lists():
if key in self.RESERVED_NAMES:
continue
@@ -246,7 +245,7 @@ class OrderByBackend(BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
try:
order_by = None
- for key, value in request.QUERY_PARAMS.items():
+ for key, value in request.query_params.items():
if key in ('order', 'order_by'):
order_by = value
if ',' in value:
diff --git a/awx/api/generics.py b/awx/api/generics.py
index 65ce0fde90..6618263742 100644
--- a/awx/api/generics.py
+++ b/awx/api/generics.py
@@ -12,6 +12,7 @@ from django.conf import settings
from django.db import connection
from django.shortcuts import get_object_or_404
from django.template.loader import render_to_string
+from django.utils.encoding import smart_text
from django.utils.safestring import mark_safe
# Django REST Framework
@@ -19,7 +20,6 @@ from rest_framework.authentication import get_authorization_header
from rest_framework.exceptions import PermissionDenied
from rest_framework import generics
from rest_framework.response import Response
-from rest_framework.request import clone_request
from rest_framework import status
from rest_framework import views
@@ -155,18 +155,6 @@ class APIView(views.APIView):
context = self.get_description_context()
return render_to_string(template_list, context)
- def metadata(self, request):
- '''
- Add version number where view was added to Tower.
- '''
- ret = super(APIView, self).metadata(request)
- added_in_version = '1.2'
- for version in ('3.0.0', '2.4.0', '2.3.0', '2.2.0', '2.1.0', '2.0.0', '1.4.8', '1.4.5', '1.4', '1.3'):
- if getattr(self, 'new_in_%s' % version.replace('.', ''), False):
- added_in_version = version
- break
- ret['added_in_version'] = added_in_version
- return ret
class GenericAPIView(generics.GenericAPIView, APIView):
# Base class for all model-based views.
@@ -188,8 +176,12 @@ class GenericAPIView(generics.GenericAPIView, APIView):
def get_queryset(self):
#if hasattr(self.request.user, 'get_queryset'):
# return self.request.user.get_queryset(self.model)
- #else:
- return super(GenericAPIView, self).get_queryset()
+ if self.queryset is not None:
+ return self.queryset._clone()
+ elif self.model is not None:
+ return self.model._default_manager.all()
+ else:
+ return super(GenericAPIView, self).get_queryset()
def get_description_context(self):
# Set instance attributes needed to get serializer metadata.
@@ -201,69 +193,13 @@ class GenericAPIView(generics.GenericAPIView, APIView):
if hasattr(self.model, "_meta"):
if hasattr(self.model._meta, "verbose_name"):
d.update({
- 'model_verbose_name': unicode(self.model._meta.verbose_name),
- 'model_verbose_name_plural': unicode(self.model._meta.verbose_name_plural),
+ 'model_verbose_name': smart_text(self.model._meta.verbose_name),
+ 'model_verbose_name_plural': smart_text(self.model._meta.verbose_name_plural),
})
- d.update({'serializer_fields': self.get_serializer().metadata()})
+ d['serializer_fields'] = self.metadata_class().get_serializer_info(self.get_serializer())
d['settings'] = settings
return d
- def metadata(self, request):
- '''
- Add field information for GET requests (so field names/labels are
- available even when we can't POST/PUT).
- '''
- ret = super(GenericAPIView, self).metadata(request)
- actions = ret.get('actions', {})
- # Remove read only fields from PUT/POST data.
- for method in ('POST', 'PUT'):
- fields = actions.get(method, {})
- for field, meta in fields.items():
- if not isinstance(meta, dict):
- continue
- if meta.pop('read_only', False):
- fields.pop(field)
- if 'GET' in self.allowed_methods:
- cloned_request = clone_request(request, 'GET')
- try:
- # Test global permissions
- self.check_permissions(cloned_request)
- # Test object permissions
- if hasattr(self, 'retrieve'):
- try:
- self.get_object()
- except Http404:
- # Http404 should be acceptable and the serializer
- # metadata should be populated. Except this so the
- # outer "else" clause of the try-except-else block
- # will be executed.
- pass
- except (exceptions.APIException, PermissionDenied):
- pass
- else:
- # If user has appropriate permissions for the view, include
- # appropriate metadata about the fields that should be supplied.
- serializer = self.get_serializer()
- actions['GET'] = serializer.metadata()
- if hasattr(serializer, 'get_types'):
- ret['types'] = serializer.get_types()
- # Remove fields labeled as write_only, remove field attributes
- # that aren't relevant for retrieving data.
- for field, meta in actions['GET'].items():
- if not isinstance(meta, dict):
- continue
- meta.pop('required', None)
- meta.pop('read_only', None)
- meta.pop('default', None)
- meta.pop('min_length', None)
- meta.pop('max_length', None)
- if meta.pop('write_only', False):
- actions['GET'].pop(field)
- if actions:
- ret['actions'] = actions
- if getattr(self, 'search_fields', None):
- ret['search_fields'] = self.search_fields
- return ret
class MongoAPIView(GenericAPIView):
@@ -337,8 +273,8 @@ class SubListAPIView(ListAPIView):
def get_description_context(self):
d = super(SubListAPIView, self).get_description_context()
d.update({
- 'parent_model_verbose_name': unicode(self.parent_model._meta.verbose_name),
- 'parent_model_verbose_name_plural': unicode(self.parent_model._meta.verbose_name_plural),
+ 'parent_model_verbose_name': smart_text(self.parent_model._meta.verbose_name),
+ 'parent_model_verbose_name_plural': smart_text(self.parent_model._meta.verbose_name_plural),
})
return d
@@ -388,10 +324,10 @@ class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
# Make a copy of the data provided (since it's readonly) in order to
# inject additional data.
- if hasattr(request.DATA, 'dict'):
- data = request.DATA.dict()
+ if hasattr(request.data, 'dict'):
+ data = request.data.dict()
else:
- data = request.DATA
+ data = request.data
# add the parent key to the post data using the pk from the URL
parent_key = getattr(self, 'parent_key', None)
@@ -405,7 +341,7 @@ class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
status=status.HTTP_400_BAD_REQUEST)
# Verify we have permission to add the object as given.
- if not request.user.can_access(self.model, 'add', serializer.init_data):
+ if not request.user.can_access(self.model, 'add', serializer.initial_data):
raise PermissionDenied()
# save the object through the serializer, reload and returned the saved
@@ -424,8 +360,8 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
created = False
parent = self.get_parent_object()
relationship = getattr(parent, self.relationship)
- sub_id = request.DATA.get('id', None)
- data = request.DATA
+ sub_id = request.data.get('id', None)
+ data = request.data
# Create the sub object if an ID is not provided.
if not sub_id:
@@ -462,7 +398,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
return Response(status=status.HTTP_204_NO_CONTENT)
def unattach(self, request, *args, **kwargs):
- sub_id = request.DATA.get('id', None)
+ sub_id = request.data.get('id', None)
if not sub_id:
data = dict(msg='"id" is required to disassociate')
return Response(data, status=status.HTTP_400_BAD_REQUEST)
@@ -486,10 +422,10 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
return Response(status=status.HTTP_204_NO_CONTENT)
def post(self, request, *args, **kwargs):
- if not isinstance(request.DATA, dict):
+ if not isinstance(request.data, dict):
return Response('invalid type for post data',
status=status.HTTP_400_BAD_REQUEST)
- if 'disassociate' in request.DATA:
+ if 'disassociate' in request.data:
return self.unattach(request, *args, **kwargs)
else:
return self.attach(request, *args, **kwargs)
@@ -499,9 +435,6 @@ class RetrieveAPIView(generics.RetrieveAPIView, GenericAPIView):
class RetrieveUpdateAPIView(RetrieveAPIView, generics.RetrieveUpdateAPIView):
- def pre_save(self, obj):
- super(RetrieveUpdateAPIView, self).pre_save(obj)
-
def update(self, request, *args, **kwargs):
self.update_filter(request, *args, **kwargs)
return super(RetrieveUpdateAPIView, self).update(request, *args, **kwargs)
diff --git a/awx/api/metadata.py b/awx/api/metadata.py
new file mode 100644
index 0000000000..46ea3f36da
--- /dev/null
+++ b/awx/api/metadata.py
@@ -0,0 +1,139 @@
+# Copyright (c) 2016 Ansible, Inc.
+# All Rights Reserved.
+
+# Django
+from django.core.exceptions import PermissionDenied
+from django.http import Http404
+
+# Django REST Framework
+from rest_framework import exceptions
+from rest_framework import metadata
+from rest_framework import serializers
+from rest_framework.request import clone_request
+
+# Ansible Tower
+from awx.main.models import InventorySource
+
+
+class Metadata(metadata.SimpleMetadata):
+
+ def get_field_info(self, field):
+ field_info = super(Metadata, self).get_field_info(field)
+
+ # Indicate if a field has a default value.
+ # FIXME: Still isn't showing all default values?
+ try:
+ field_info['default'] = field.get_default()
+ except serializers.SkipField:
+ pass
+
+ # Indicate if a field is write-only.
+ if getattr(field, 'write_only', False):
+ field_info['write_only'] = True
+
+ # Update choices to be a list of 2-tuples instead of list of dicts with
+ # value/display_name.
+ if 'choices' in field_info:
+ choices = []
+ for choice in field_info['choices']:
+ if isinstance(choice, dict):
+ choices.append((choice.get('value'), choice.get('display_name')))
+ else:
+ choices.append(choice)
+ field_info['choices'] = choices
+
+ # Special handling of inventory source_region choices that vary based on
+ # selected inventory source.
+ if field.field_name == 'source_regions':
+ for cp in ('azure', 'ec2', 'gce', 'rax'):
+ get_regions = getattr(InventorySource, 'get_%s_region_choices' % cp)
+ field_info['%s_region_choices' % cp] = get_regions()
+
+ # Special handling of group_by choices for EC2.
+ if field.field_name == 'group_by':
+ for cp in ('ec2',):
+ get_group_by_choices = getattr(InventorySource, 'get_%s_group_by_choices' % cp)
+ field_info['%s_group_by_choices' % cp] = get_group_by_choices()
+
+ # Update type of fields returned...
+ if field.field_name == 'type':
+ field_info['type'] = 'multiple choice'
+ elif field.field_name == 'url':
+ field_info['type'] = 'string'
+ elif field.field_name in ('related', 'summary_fields'):
+ field_info['type'] = 'object'
+ elif field.field_name in ('created', 'modified'):
+ field_info['type'] = 'datetime'
+
+ return field_info
+
+ def determine_actions(self, request, view):
+ # Add field information for GET requests (so field names/labels are
+ # available even when we can't POST/PUT).
+ actions = {}
+ for method in {'GET', 'PUT', 'POST'} & set(view.allowed_methods):
+ view.request = clone_request(request, method)
+ try:
+ # Test global permissions
+ if hasattr(view, 'check_permissions'):
+ view.check_permissions(view.request)
+ # Test object permissions
+ if method == 'PUT' and hasattr(view, 'get_object'):
+ view.get_object()
+ except (exceptions.APIException, PermissionDenied, Http404):
+ continue
+ else:
+ # If user has appropriate permissions for the view, include
+ # appropriate metadata about the fields that should be supplied.
+ serializer = view.get_serializer()
+ actions[method] = self.get_serializer_info(serializer)
+ finally:
+ view.request = request
+
+ for field, meta in actions[method].items():
+ if not isinstance(meta, dict):
+ continue
+
+ # Add type choices if available from the serializer.
+ if field == 'type' and hasattr(serializer, 'get_type_choices'):
+ meta['choices'] = serializer.get_type_choices()
+
+ # For GET method, remove meta attributes that aren't relevant
+ # when reading a field and remove write-only fields.
+ if method == 'GET':
+ meta.pop('required', None)
+ meta.pop('read_only', None)
+ meta.pop('default', None)
+ meta.pop('min_length', None)
+ meta.pop('max_length', None)
+ if meta.pop('write_only', False):
+ actions['GET'].pop(field)
+
+ # For PUT/POST methods, remove read-only fields.
+ if method in ('PUT', 'POST'):
+ if meta.pop('read_only', False):
+ actions[method].pop(field)
+
+ return actions
+
+ def determine_metadata(self, request, view):
+ metadata = super(Metadata, self).determine_metadata(request, view)
+
+ # Add version number in which view was added to Tower.
+ added_in_version = '1.2'
+ for version in ('3.0.0', '2.4.0', '2.3.0', '2.2.0', '2.1.0', '2.0.0', '1.4.8', '1.4.5', '1.4', '1.3'):
+ if getattr(view, 'new_in_%s' % version.replace('.', ''), False):
+ added_in_version = version
+ break
+ metadata['added_in_version'] = added_in_version
+
+ # Add type(s) handled by this view/serializer.
+ serializer = view.get_serializer()
+ if hasattr(serializer, 'get_types'):
+ metadata['types'] = serializer.get_types()
+
+ # Add search fields if available from the view.
+ if getattr(view, 'search_fields', None):
+ metadata['search_fields'] = view.search_fields
+
+ return metadata
diff --git a/awx/api/pagination.py b/awx/api/pagination.py
index ac8ba62123..822e6065ee 100644
--- a/awx/api/pagination.py
+++ b/awx/api/pagination.py
@@ -2,36 +2,26 @@
# All Rights Reserved.
# Django REST Framework
-from rest_framework import serializers, pagination
-from rest_framework.templatetags.rest_framework import replace_query_param
+from rest_framework import pagination
+from rest_framework.utils.urls import remove_query_param, replace_query_param
-class NextPageField(pagination.NextPageField):
- '''Pagination field to output URL path.'''
- def to_native(self, value):
- if not value.has_next():
+class Pagination(pagination.PageNumberPagination):
+
+ page_size_query_param = 'page_size'
+
+ def get_next_link(self):
+ if not self.page.has_next():
return None
- page = value.next_page_number()
- request = self.context.get('request')
- url = request and request.get_full_path() or ''
- return replace_query_param(url, self.page_field, page)
+ url = self.request and self.request.get_full_path() or ''
+ page_number = self.page.next_page_number()
+ return replace_query_param(url, self.page_query_param, page_number)
-class PreviousPageField(pagination.NextPageField):
- '''Pagination field to output URL path.'''
-
- def to_native(self, value):
- if not value.has_previous():
+ def get_previous_link(self):
+ if not self.page.has_previous():
return None
- page = value.previous_page_number()
- request = self.context.get('request')
- url = request and request.get_full_path() or ''
- return replace_query_param(url, self.page_field, page)
-
-class PaginationSerializer(pagination.BasePaginationSerializer):
- '''
- Custom pagination serializer to output only URL path (without host/port).
- '''
-
- count = serializers.Field(source='paginator.count')
- next = NextPageField(source='*')
- previous = PreviousPageField(source='*')
+ url = self.request and self.request.get_full_path() or ''
+ page_number = self.page.previous_page_number()
+ if page_number == 1:
+ return remove_query_param(url, self.page_query_param)
+ return replace_query_param(url, self.page_query_param, page_number)
diff --git a/awx/api/permissions.py b/awx/api/permissions.py
index 95b640f72f..8f535f7adf 100644
--- a/awx/api/permissions.py
+++ b/awx/api/permissions.py
@@ -61,7 +61,7 @@ class ModelAccessPermission(permissions.BasePermission):
else:
if obj:
return True
- return check_user_access(request.user, view.model, 'add', request.DATA)
+ return check_user_access(request.user, view.model, 'add', request.data)
def check_put_permissions(self, request, view, obj=None):
if not obj:
@@ -70,10 +70,10 @@ class ModelAccessPermission(permissions.BasePermission):
return True
if getattr(view, 'is_variable_data', False):
return check_user_access(request.user, view.model, 'change', obj,
- dict(variables=request.DATA))
+ dict(variables=request.data))
else:
return check_user_access(request.user, view.model, 'change', obj,
- request.DATA)
+ request.data)
def check_patch_permissions(self, request, view, obj=None):
return self.check_put_permissions(request, view, obj)
@@ -127,7 +127,7 @@ class ModelAccessPermission(permissions.BasePermission):
def has_permission(self, request, view, obj=None):
logger.debug('has_permission(user=%s method=%s data=%r, %s, %r)',
- request.user, request.method, request.DATA,
+ request.user, request.method, request.data,
view.__class__.__name__, obj)
try:
response = self.check_permissions(request, view, obj)
@@ -156,7 +156,7 @@ class JobTemplateCallbackPermission(ModelAccessPermission):
# Require method to be POST, host_config_key to be specified and match
# the requested job template, and require the job template to be
# active in order to proceed.
- host_config_key = request.DATA.get('host_config_key', '')
+ host_config_key = request.data.get('host_config_key', '')
if request.method.lower() != 'post':
raise PermissionDenied()
elif not host_config_key:
diff --git a/awx/api/renderers.py b/awx/api/renderers.py
index fd60520db2..1897028333 100644
--- a/awx/api/renderers.py
+++ b/awx/api/renderers.py
@@ -4,6 +4,7 @@
# Django REST Framework
from rest_framework import renderers
+
class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
'''
Customizations to the default browsable API renderer.
@@ -16,14 +17,16 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
return renderers.JSONRenderer()
return renderer
- def get_raw_data_form(self, view, method, request):
+ def get_raw_data_form(self, data, view, method, request):
+ # Set a flag on the view to indiciate to the view/serializer that we're
+ # creating a raw data form for the browsable API.
try:
setattr(view, '_raw_data_form_marker', True)
- return super(BrowsableAPIRenderer, self).get_raw_data_form(view, method, request)
+ return super(BrowsableAPIRenderer, self).get_raw_data_form(data, view, method, request)
finally:
delattr(view, '_raw_data_form_marker')
- def get_rendered_html_form(self, view, method, request):
+ def get_rendered_html_form(self, data, view, method, request):
'''Never show auto-generated form (only raw form).'''
obj = getattr(view, 'object', None)
if not self.show_form_for_method(view, method, request, obj):
@@ -31,9 +34,10 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
if method in ('DELETE', 'OPTIONS'):
return True # Don't actually need to return a form
- def get_context(self, data, accepted_media_type, renderer_context):
- context = super(BrowsableAPIRenderer, self).get_context(data, accepted_media_type, renderer_context)
- return context
+ def get_filter_form(self, data, view, request):
+ # Don't show filter form in browsable API.
+ return
+
class PlainTextRenderer(renderers.BaseRenderer):
@@ -45,9 +49,12 @@ class PlainTextRenderer(renderers.BaseRenderer):
data = unicode(data)
return data.encode(self.charset)
+
class DownloadTextRenderer(PlainTextRenderer):
+
format = "txt_download"
+
class AnsiTextRenderer(PlainTextRenderer):
media_type = 'text/plain'
diff --git a/awx/api/serializers.py b/awx/api/serializers.py
index c59614f53f..f655b35f4e 100644
--- a/awx/api/serializers.py
+++ b/awx/api/serializers.py
@@ -2,14 +2,15 @@
# All Rights Reserved.
# Python
-import functools
+import copy
import json
import re
import logging
+from collections import OrderedDict
from dateutil import rrule
from ast import literal_eval
-from rest_framework_mongoengine.serializers import MongoEngineModelSerializer, MongoEngineModelSerializerOptions
+from rest_framework_mongoengine.serializers import DocumentSerializer
# PyYAML
import yaml
@@ -19,16 +20,16 @@ from django.conf import settings
from django.contrib.auth import authenticate
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
-from django.core.exceptions import ObjectDoesNotExist
-from django.db.models.fields import BLANK_CHOICE_DASH
-from django.utils.datastructures import SortedDict
+from django.core.exceptions import ObjectDoesNotExist, ValidationError as DjangoValidationError
+from django.db import models
# from django.utils.translation import ugettext_lazy as _
-from django.utils.encoding import smart_str
+from django.utils.encoding import force_text, smart_text
# Django REST Framework
-from rest_framework.compat import get_concrete_model
+from rest_framework.exceptions import ValidationError
from rest_framework import fields
from rest_framework import serializers
+from rest_framework.utils.serializer_helpers import ReturnList
# Django-Polymorphic
from polymorphic import PolymorphicModel
@@ -90,48 +91,6 @@ SUMMARIZABLE_FK_FIELDS = {
'source_script': ('name', 'description'),
}
-# Monkeypatch REST framework to include default value and write_only flag in
-# field metadata.
-def add_metadata_default(f):
- @functools.wraps(f)
- def _add_metadata_default(self, *args, **kwargs):
- metadata = f(self, *args, **kwargs)
- if hasattr(self, 'get_default_value'):
- default = self.get_default_value()
- if default is None and metadata.get('type', '') != 'field':
- default = getattr(self, 'empty', None)
- if default or not getattr(self, 'required', False):
- metadata['default'] = default
- if getattr(self, 'write_only', False):
- metadata['write_only'] = True
- return metadata
- return _add_metadata_default
-
-fields.Field.metadata = add_metadata_default(fields.Field.metadata)
-
-class ChoiceField(fields.ChoiceField):
-
- def __init__(self, *args, **kwargs):
- super(ChoiceField, self).__init__(*args, **kwargs)
- if not self.required:
- # Remove extra blank option if one is already present (for writable
- # field) or if present at all for read-only fields.
- if ([x[0] for x in self.choices].count(u'') > 1 or self.get_default_value() != u'' or self.read_only) \
- and BLANK_CHOICE_DASH[0] in self.choices:
- self.choices = [x for x in self.choices
- if x != BLANK_CHOICE_DASH[0]]
-
- def metadata(self):
- metadata = super(ChoiceField, self).metadata()
- metadata['choices'] = self.choices or []
- if not self.choices:
- metadata.pop('default', None)
- return metadata
-
-# Monkeypatch REST framework to replace default ChoiceField used by
-# ModelSerializer.
-serializers.ChoiceField = ChoiceField
-
class BaseSerializerMetaclass(serializers.SerializerMetaclass):
'''
@@ -154,17 +113,46 @@ class BaseSerializerMetaclass(serializers.SerializerMetaclass):
# Define fields as 'foo' and 'bar'; ignore base class fields.
fields = ('foo', 'bar')
+ # Extra field kwargs dicts are also merged from base classes.
+ extra_kwargs = {
+ 'foo': {'required': True},
+ 'bar': {'read_only': True},
+ }
+
+ # If a subclass were to define extra_kwargs as:
+ extra_kwargs = {
+ 'foo': {'required': False, 'default': ''},
+ 'bar': {'label': 'New Label for Bar'},
+ }
+
+ # The resulting value of extra_kwargs would be:
+ extra_kwargs = {
+ 'foo': {'required': False, 'default': ''},
+ 'bar': {'read_only': True, 'label': 'New Label for Bar'},
+ }
+
+ # Extra field kwargs cannot be removed in subclasses, only replaced.
+
'''
+ @staticmethod
+ def _is_list_of_strings(x):
+ return isinstance(x, (list, tuple)) and all([isinstance(y, basestring) for y in x])
+
+ @staticmethod
+ def _is_extra_kwargs(x):
+ return isinstance(x, dict) and all([isinstance(k, basestring) and isinstance(v, dict) for k,v in x.items()])
+
@classmethod
def _update_meta(cls, base, meta, other=None):
for attr in dir(other):
if attr.startswith('_'):
continue
- meta_val = getattr(meta, attr, [])
- val = getattr(other, attr, [])
- # Special handling for lists of strings (field names).
- if isinstance(val, (list, tuple)) and all([isinstance(x, basestring) for x in val]):
+ val = getattr(other, attr)
+ meta_val = getattr(meta, attr, None)
+ # Special handling for lists/tuples of strings (field names).
+ if cls._is_list_of_strings(val) and cls._is_list_of_strings(meta_val or []):
+ meta_val = meta_val or []
new_vals = []
except_vals = []
if base: # Merge values from all bases.
@@ -180,6 +168,20 @@ class BaseSerializerMetaclass(serializers.SerializerMetaclass):
for v in new_vals:
if v not in except_vals and v not in val:
val.append(v)
+ val = tuple(val)
+ # Merge extra_kwargs dicts from base classes.
+ elif cls._is_extra_kwargs(val) and cls._is_extra_kwargs(meta_val or {}):
+ meta_val = meta_val or {}
+ new_val = {}
+ if base:
+ for k,v in meta_val.items():
+ new_val[k] = copy.deepcopy(v)
+ for k,v in val.items():
+ new_val.setdefault(k, {}).update(copy.deepcopy(v))
+ val = new_val
+ # Any other values are copied in case they are mutable objects.
+ else:
+ val = copy.deepcopy(val)
setattr(meta, attr, val)
def __new__(cls, name, bases, attrs):
@@ -191,64 +193,34 @@ class BaseSerializerMetaclass(serializers.SerializerMetaclass):
return super(BaseSerializerMetaclass, cls).__new__(cls, name, bases, attrs)
-class BaseSerializerOptions(serializers.ModelSerializerOptions):
-
- def __init__(self, meta):
- super(BaseSerializerOptions, self).__init__(meta)
- self.summary_fields = getattr(meta, 'summary_fields', ())
- self.summarizable_fields = getattr(meta, 'summarizable_fields', ())
-
-
class BaseSerializer(serializers.ModelSerializer):
__metaclass__ = BaseSerializerMetaclass
- _options_class = BaseSerializerOptions
class Meta:
fields = ('id', 'type', 'url', 'related', 'summary_fields', 'created',
'modified', 'name', 'description')
summary_fields = () # FIXME: List of field names from this serializer that should be used when included as part of another's summary_fields.
summarizable_fields = () # FIXME: List of field names on this serializer that should be included in summary_fields.
+ extra_kwargs = {
+ 'description': {
+ 'allow_null': True,
+ },
+ }
# add the URL and related resources
- type = serializers.SerializerMethodField('get_type')
- url = serializers.SerializerMethodField('get_url')
+ type = serializers.SerializerMethodField()
+ url = serializers.SerializerMethodField()
related = serializers.SerializerMethodField('_get_related')
summary_fields = serializers.SerializerMethodField('_get_summary_fields')
# make certain fields read only
- created = serializers.SerializerMethodField('get_created')
- modified = serializers.SerializerMethodField('get_modified')
- active = serializers.SerializerMethodField('get_active')
-
- def get_fields(self):
- opts = get_concrete_model(self.opts.model)._meta
- ret = super(BaseSerializer, self).get_fields()
- for key, field in ret.items():
- if key == 'id' and not getattr(field, 'help_text', None):
- field.help_text = 'Database ID for this %s.' % unicode(opts.verbose_name)
- elif key == 'type':
- field.help_text = 'Data type for this %s.' % unicode(opts.verbose_name)
- field.type_label = 'multiple choice'
- elif key == 'url':
- field.help_text = 'URL for this %s.' % unicode(opts.verbose_name)
- field.type_label = 'string'
- elif key == 'related':
- field.help_text = 'Data structure with URLs of related resources.'
- field.type_label = 'object'
- elif key == 'summary_fields':
- field.help_text = 'Data structure with name/description for related resources.'
- field.type_label = 'object'
- elif key == 'created':
- field.help_text = 'Timestamp when this %s was created.' % unicode(opts.verbose_name)
- field.type_label = 'datetime'
- elif key == 'modified':
- field.help_text = 'Timestamp when this %s was last modified.' % unicode(opts.verbose_name)
- field.type_label = 'datetime'
- return ret
+ created = serializers.SerializerMethodField()
+ modified = serializers.SerializerMethodField()
+ active = serializers.SerializerMethodField()
def get_type(self, obj):
- return get_type_for_model(self.opts.model)
+ return get_type_for_model(self.Meta.model)
def get_types(self):
return [self.get_type(None)]
@@ -263,7 +235,7 @@ class BaseSerializer(serializers.ModelSerializer):
}
choices = []
for t in self.get_types():
- name = type_name_map.get(t, unicode(get_model_for_type(t)._meta.verbose_name).title())
+ name = type_name_map.get(t, force_text(get_model_for_type(t)._meta.verbose_name).title())
choices.append((t, name))
return choices
@@ -279,7 +251,7 @@ class BaseSerializer(serializers.ModelSerializer):
return {} if obj is None else self.get_related(obj)
def get_related(self, obj):
- res = SortedDict()
+ res = OrderedDict()
if getattr(obj, 'created_by', None) and obj.created_by.is_active:
res['created_by'] = reverse('api:user_detail', args=(obj.created_by.pk,))
if getattr(obj, 'modified_by', None) and obj.modified_by.is_active:
@@ -292,7 +264,7 @@ class BaseSerializer(serializers.ModelSerializer):
def get_summary_fields(self, obj):
# Return values for certain fields on related objects, to simplify
# displaying lists of items without additional API requests.
- summary_fields = SortedDict()
+ summary_fields = OrderedDict()
for fk, related_fields in SUMMARIZABLE_FK_FIELDS.items():
try:
# A few special cases where we don't want to access the field
@@ -311,7 +283,7 @@ class BaseSerializer(serializers.ModelSerializer):
continue
if hasattr(fkval, 'is_active') and not fkval.is_active:
continue
- summary_fields[fk] = SortedDict()
+ summary_fields[fk] = OrderedDict()
for field in related_fields:
fval = getattr(fkval, field, None)
if fval is None and field == 'type':
@@ -327,11 +299,11 @@ class BaseSerializer(serializers.ModelSerializer):
except ObjectDoesNotExist:
pass
if getattr(obj, 'created_by', None) and obj.created_by.is_active:
- summary_fields['created_by'] = SortedDict()
+ summary_fields['created_by'] = OrderedDict()
for field in SUMMARIZABLE_FK_FIELDS['user']:
summary_fields['created_by'][field] = getattr(obj.created_by, field)
if getattr(obj, 'modified_by', None) and obj.modified_by.is_active:
- summary_fields['modified_by'] = SortedDict()
+ summary_fields['modified_by'] = OrderedDict()
for field in SUMMARIZABLE_FK_FIELDS['user']:
summary_fields['modified_by'][field] = getattr(obj.modified_by, field)
return summary_fields
@@ -360,12 +332,63 @@ class BaseSerializer(serializers.ModelSerializer):
else:
return obj.active
- def get_validation_exclusions(self, instance=None):
- # Override base class method to continue to use model validation for
- # fields (including optional ones), appears this was broken by DRF
- # 2.3.13 update.
- cls = self.opts.model
- opts = get_concrete_model(cls)._meta
+ def build_standard_field(self, field_name, model_field):
+ field_class, field_kwargs = super(BaseSerializer, self).build_standard_field(field_name, model_field)
+
+ # Update help text for common fields.
+ opts = self.Meta.model._meta.concrete_model._meta
+ if field_name == 'id':
+ field_kwargs.setdefault('help_text', 'Database ID for this %s.' % smart_text(opts.verbose_name))
+ elif field_name == 'name':
+ field_kwargs['help_text'] = 'Name of this %s.' % smart_text(opts.verbose_name)
+ elif field_name == 'description':
+ field_kwargs['help_text'] = 'Optional description of this %s.' % smart_text(opts.verbose_name)
+ elif field_name == 'type':
+ field_kwargs['help_text'] = 'Data type for this %s.' % smart_text(opts.verbose_name)
+ elif field_name == 'url':
+ field_kwargs['help_text'] = 'URL for this %s.' % smart_text(opts.verbose_name)
+ elif field_name == 'related':
+ field_kwargs['help_text'] = 'Data structure with URLs of related resources.'
+ elif field_name == 'summary_fields':
+ field_kwargs['help_text'] = 'Data structure with name/description for related resources.'
+ elif field_name == 'created':
+ field_kwargs['help_text'] = 'Timestamp when this %s was created.' % smart_text(opts.verbose_name)
+ elif field_name == 'modified':
+ field_kwargs['help_text'] = 'Timestamp when this %s was last modified.' % smart_text(opts.verbose_name)
+
+ # Pass model field default onto the serializer field if field is not read-only.
+ if model_field.has_default() and not field_kwargs.get('read_only', False):
+ field_kwargs['default'] = model_field.get_default()
+ # Enforce minimum value of 0 for PositiveIntegerFields.
+ if isinstance(model_field, (models.PositiveIntegerField, models.PositiveSmallIntegerField)) and 'choices' not in field_kwargs:
+ field_kwargs['min_value'] = 0
+ # Update verbosity choices from settings (for job templates, jobs, ad hoc commands).
+ if field_name == 'verbosity' and 'choices' in field_kwargs:
+ field_kwargs['choices'] = getattr(settings, 'VERBOSITY_CHOICES', field_kwargs['choices'])
+ return field_class, field_kwargs
+
+ def build_relational_field(self, field_name, relation_info):
+ field_class, field_kwargs = super(BaseSerializer, self).build_relational_field(field_name, relation_info)
+ # Don't include choicse for foreign key fields.
+ field_kwargs.pop('choices', None)
+ return field_class, field_kwargs
+
+ def validate_description(self, value):
+ # Description should always be empty string, never null.
+ return value or u''
+
+ def run_validation(self, data=fields.empty):
+ try:
+ return super(BaseSerializer, self).run_validation(data)
+ except ValidationError as exc:
+ # Avoid bug? in DRF if exc.detail happens to be a list instead of a dict.
+ raise ValidationError(detail=serializers.get_validation_error_detail(exc))
+
+ def get_validation_exclusions(self, obj=None):
+ # Borrowed from DRF 2.x - return model fields that should be excluded
+ # from model validation.
+ cls = self.Meta.model
+ opts = cls._meta.concrete_model._meta
exclusions = [field.name for field in opts.fields + opts.many_to_many]
for field_name, field in self.fields.items():
field_name = field.source or field_name
@@ -378,49 +401,72 @@ class BaseSerializer(serializers.ModelSerializer):
exclusions.remove(field_name)
return exclusions
- def to_native(self, obj):
+ def validate(self, attrs):
+ attrs = super(BaseSerializer, self).validate(attrs)
+ try:
+ # Create/update a model instance and run it's full_clean() method to
+ # do any validation implemented on the model class.
+ exclusions = self.get_validation_exclusions(self.instance)
+ obj = self.instance or self.Meta.model()
+ for k,v in attrs.items():
+ if k not in exclusions:
+ setattr(obj, k, v)
+ obj.full_clean(exclude=exclusions)
+ # full_clean may modify values on the instance; copy those changes
+ # back to attrs so they are saved.
+ for k in attrs.keys():
+ if k not in exclusions:
+ attrs[k] = getattr(obj, k)
+ except DjangoValidationError as exc:
+ # DjangoValidationError may contain a list or dict; normalize into a
+ # dict where the keys are the field name and the values are a list
+ # of error messages, then raise as a DRF ValidationError. DRF would
+ # normally convert any DjangoValidationError to a non-field specific
+ # error message; here we preserve field-specific errors raised from
+ # the model's full_clean method.
+ d = exc.update_error_dict({})
+ for k,v in d.items():
+ v = v if isinstance(v, list) else [v]
+ v2 = []
+ for e in v:
+ if isinstance(e, DjangoValidationError):
+ v2.extend(list(e))
+ elif isinstance(e, list):
+ v2.extend(e)
+ else:
+ v2.append(e)
+ d[k] = map(force_text, v2)
+ raise ValidationError(d)
+ return attrs
+
+ def to_representation(self, obj):
+ # FIXME: Doesn't get called anymore for an new raw data form!
# When rendering the raw data form, create an instance of the model so
# that the model defaults will be filled in.
view = self.context.get('view', None)
parent_key = getattr(view, 'parent_key', None)
if not obj and hasattr(view, '_raw_data_form_marker'):
- obj = self.opts.model()
+ obj = self.Meta.model()
# FIXME: Would be nice to include any posted data for the raw data
# form, so that a submission with errors can be modified in place
# and resubmitted.
- ret = super(BaseSerializer, self).to_native(obj)
+ ret = super(BaseSerializer, self).to_representation(obj)
# Remove parent key from raw form data, since it will be automatically
# set by the sub list create view.
if parent_key and hasattr(view, '_raw_data_form_marker'):
ret.pop(parent_key, None)
return ret
- def metadata(self):
- fields = super(BaseSerializer, self).metadata()
- for field, meta in fields.items():
- if not isinstance(meta, dict):
- continue
- if field == 'type':
- meta['choices'] = self.get_type_choices()
- #if meta.get('type', '') == 'field':
- # meta['type'] = 'id'
- return fields
+class BaseFactSerializer(DocumentSerializer):
-class BaseFactSerializerOptions(MongoEngineModelSerializerOptions):
- def __init__(self, meta):
- super(BaseFactSerializerOptions, self).__init__(meta)
-
-
-class BaseFactSerializer(MongoEngineModelSerializer):
- _options_class = BaseFactSerializerOptions
__metaclass__ = BaseSerializerMetaclass
def get_fields(self):
ret = super(BaseFactSerializer, self).get_fields()
if 'module' in ret and feature_enabled('system_tracking'):
choices = [(o, o.title()) for o in FactVersion.objects.all().only('module').distinct('module')]
- ret['module'] = ChoiceField(source='module', choices=choices, read_only=True, required=False)
+ ret['module'] = serializers.ChoiceField(source='module', choices=choices, read_only=True, required=False)
return ret
@@ -447,7 +493,7 @@ class UnifiedJobTemplateSerializer(BaseSerializer):
else:
return super(UnifiedJobTemplateSerializer, self).get_types()
- def to_native(self, obj):
+ def to_representation(self, obj):
serializer_class = None
if type(self) is UnifiedJobTemplateSerializer:
if isinstance(obj, Project):
@@ -458,16 +504,14 @@ class UnifiedJobTemplateSerializer(BaseSerializer):
serializer_class = JobTemplateSerializer
if serializer_class:
serializer = serializer_class(instance=obj)
- return serializer.to_native(obj)
+ return serializer.to_representation(obj)
else:
- return super(UnifiedJobTemplateSerializer, self).to_native(obj)
+ return super(UnifiedJobTemplateSerializer, self).to_representation(obj)
class UnifiedJobSerializer(BaseSerializer):
- result_stdout = serializers.SerializerMethodField('get_result_stdout')
- unified_job_template = serializers.Field(source='unified_job_template_id', label='unified job template')
- job_env = serializers.CharField(source='job_env', label='job env', read_only=True)
+ result_stdout = serializers.SerializerMethodField()
class Meta:
model = UnifiedJob
@@ -475,6 +519,16 @@ class UnifiedJobSerializer(BaseSerializer):
'failed', 'started', 'finished', 'elapsed', 'job_args',
'job_cwd', 'job_env', 'job_explanation', 'result_stdout',
'result_traceback')
+ extra_kwargs = {
+ 'unified_job_template': {
+ 'source': 'unified_job_template_id',
+ 'label': 'unified job template',
+ },
+ 'job_env': {
+ 'read_only': True,
+ 'label': 'job_env',
+ }
+ }
def get_types(self):
if type(self) is UnifiedJobSerializer:
@@ -498,7 +552,7 @@ class UnifiedJobSerializer(BaseSerializer):
res['stdout'] = reverse('api:ad_hoc_command_stdout', args=(obj.pk,))
return res
- def to_native(self, obj):
+ def to_representation(self, obj):
serializer_class = None
if type(self) is UnifiedJobSerializer:
if isinstance(obj, ProjectUpdate):
@@ -513,9 +567,9 @@ class UnifiedJobSerializer(BaseSerializer):
serializer_class = SystemJobSerializer
if serializer_class:
serializer = serializer_class(instance=obj)
- ret = serializer.to_native(obj)
+ ret = serializer.to_representation(obj)
else:
- ret = super(UnifiedJobSerializer, self).to_native(obj)
+ ret = super(UnifiedJobSerializer, self).to_representation(obj)
if 'elapsed' in ret:
ret['elapsed'] = float(ret['elapsed'])
return ret
@@ -527,11 +581,11 @@ class UnifiedJobSerializer(BaseSerializer):
tower_settings.STDOUT_MAX_BYTES_DISPLAY)
return obj.result_stdout
+
class UnifiedJobListSerializer(UnifiedJobSerializer):
class Meta:
- exclude = ('*', 'job_args', 'job_cwd', 'job_env', 'result_traceback',
- 'result_stdout')
+ fields = ('*', '-job_args', '-job_cwd', '-job_env', '-result_traceback', '-result_stdout')
def get_types(self):
if type(self) is UnifiedJobListSerializer:
@@ -539,7 +593,7 @@ class UnifiedJobListSerializer(UnifiedJobSerializer):
else:
return super(UnifiedJobListSerializer, self).get_types()
- def to_native(self, obj):
+ def to_representation(self, obj):
serializer_class = None
if type(self) is UnifiedJobListSerializer:
if isinstance(obj, ProjectUpdate):
@@ -554,9 +608,9 @@ class UnifiedJobListSerializer(UnifiedJobSerializer):
serializer_class = SystemJobListSerializer
if serializer_class:
serializer = serializer_class(instance=obj)
- ret = serializer.to_native(obj)
+ ret = serializer.to_representation(obj)
else:
- ret = super(UnifiedJobListSerializer, self).to_native(obj)
+ ret = super(UnifiedJobListSerializer, self).to_representation(obj)
if 'elapsed' in ret:
ret['elapsed'] = float(ret['elapsed'])
return ret
@@ -564,7 +618,7 @@ class UnifiedJobListSerializer(UnifiedJobSerializer):
class UnifiedJobStdoutSerializer(UnifiedJobSerializer):
- result_stdout = serializers.SerializerMethodField('get_result_stdout')
+ result_stdout = serializers.SerializerMethodField()
class Meta:
fields = ('result_stdout',)
@@ -583,8 +637,8 @@ class UnifiedJobStdoutSerializer(UnifiedJobSerializer):
return super(UnifiedJobStdoutSerializer, self).get_types()
# TODO: Needed?
- #def to_native(self, obj):
- # ret = super(UnifiedJobStdoutSerializer, self).to_native(obj)
+ #def to_representation(self, obj):
+ # ret = super(UnifiedJobStdoutSerializer, self).to_representation(obj)
# return ret.get('result_stdout', '')
@@ -599,32 +653,26 @@ class UserSerializer(BaseSerializer):
fields = ('*', '-name', '-description', '-modified',
'-summary_fields', 'username', 'first_name', 'last_name',
'email', 'is_superuser', 'password', 'ldap_dn')
+
- def to_native(self, obj):
- ret = super(UserSerializer, self).to_native(obj)
+ def to_representation(self, obj):
+ ret = super(UserSerializer, self).to_representation(obj)
ret.pop('password', None)
- ret.fields.pop('password', None)
if obj:
ret['auth'] = obj.social_auth.values('provider', 'uid')
return ret
- def get_validation_exclusions(self):
- ret = super(UserSerializer, self).get_validation_exclusions()
+ def get_validation_exclusions(self, obj=None):
+ ret = super(UserSerializer, self).get_validation_exclusions(obj)
ret.append('password')
return ret
- def restore_object(self, attrs, instance=None):
- new_password = attrs.pop('password', None)
- # first time creating, password required
- if instance is None and new_password in (None, ''):
- self._errors = {'password': ['Password required for new User']}
- return
- instance = super(UserSerializer, self).restore_object(attrs, instance)
- instance._new_password = new_password
- return instance
+ def validate_password(self, value):
+ if not self.instance and value in (None, ''):
+ raise serializers.ValidationError('Password required for new User')
+ return value
- def save_object(self, obj, **kwargs):
- new_password = getattr(obj, '_new_password', None)
+ def _update_password(self, obj, new_password):
# For now we're not raising an error, just not saving password for
# users managed by LDAP who already have an unusable password set.
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None) and feature_enabled('ldap'):
@@ -641,9 +689,22 @@ class UserSerializer(BaseSerializer):
new_password = None
if new_password:
obj.set_password(new_password)
- if not obj.password:
+ obj.save(update_fields=['password'])
+ elif not obj.password:
obj.set_unusable_password()
- return super(UserSerializer, self).save_object(obj, **kwargs)
+ obj.save(update_fields=['password'])
+
+ def create(self, validated_data):
+ new_password = validated_data.pop('password', None)
+ obj = super(UserSerializer, self).create(validated_data)
+ self._update_password(obj, new_password)
+ return obj
+
+ def update(self, obj, validated_data):
+ new_password = validated_data.pop('password', None)
+ obj = super(UserSerializer, self).update(obj, validated_data)
+ self._update_password(obj, new_password)
+ return obj
def get_related(self, obj):
res = super(UserSerializer, self).get_related(obj)
@@ -658,36 +719,36 @@ class UserSerializer(BaseSerializer):
))
return res
- def _validate_ldap_managed_field(self, attrs, source):
+ def _validate_ldap_managed_field(self, value, field_name):
if not getattr(settings, 'AUTH_LDAP_SERVER_URI', None) or not feature_enabled('ldap'):
- return attrs
+ return value
try:
- is_ldap_user = bool(self.object.profile.ldap_dn)
+ is_ldap_user = bool(self.instance and self.instance.profile.ldap_dn)
except AttributeError:
is_ldap_user = False
if is_ldap_user:
ldap_managed_fields = ['username']
ldap_managed_fields.extend(getattr(settings, 'AUTH_LDAP_USER_ATTR_MAP', {}).keys())
ldap_managed_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys())
- if source in ldap_managed_fields and source in attrs:
- if attrs[source] != getattr(self.object, source):
- raise serializers.ValidationError('Unable to change %s on user managed by LDAP' % source)
- return attrs
+ if field_name in ldap_managed_fields:
+ if value != getattr(self.instance, field_name):
+ raise serializers.ValidationError('Unable to change %s on user managed by LDAP' % field_name)
+ return value
- def validate_username(self, attrs, source):
- return self._validate_ldap_managed_field(attrs, source)
+ def validate_username(self, value):
+ return self._validate_ldap_managed_field(value, 'username')
- def validate_first_name(self, attrs, source):
- return self._validate_ldap_managed_field(attrs, source)
+ def validate_first_name(self, value):
+ return self._validate_ldap_managed_field(value, 'first_name')
- def validate_last_name(self, attrs, source):
- return self._validate_ldap_managed_field(attrs, source)
+ def validate_last_name(self, value):
+ return self._validate_ldap_managed_field(value, 'last_name')
- def validate_email(self, attrs, source):
- return self._validate_ldap_managed_field(attrs, source)
+ def validate_email(self, value):
+ return self._validate_ldap_managed_field(value, 'email')
- def validate_is_superuser(self, attrs, source):
- return self._validate_ldap_managed_field(attrs, source)
+ def validate_is_superuser(self, value):
+ return self._validate_ldap_managed_field(value, 'is_superuser')
class OrganizationSerializer(BaseSerializer):
@@ -714,6 +775,11 @@ class ProjectOptionsSerializer(BaseSerializer):
class Meta:
fields = ('*', 'local_path', 'scm_type', 'scm_url', 'scm_branch',
'scm_clean', 'scm_delete_on_update', 'credential')
+ extra_kwargs = {
+ 'scm_type': {
+ 'allow_null': True
+ }
+ }
def get_related(self, obj):
res = super(ProjectOptionsSerializer, self).get_related(obj)
@@ -722,24 +788,33 @@ class ProjectOptionsSerializer(BaseSerializer):
args=(obj.credential.pk,))
return res
- def validate_local_path(self, attrs, source):
+ def validate_scm_type(self, value):
+ return value or u''
+
+ def validate(self, attrs):
+ errors = {}
+
# Don't allow assigning a local_path used by another project.
# Don't allow assigning a local_path when scm_type is set.
valid_local_paths = Project.get_local_path_choices()
- if self.object:
- scm_type = attrs.get('scm_type', self.object.scm_type) or u''
+ if self.instance:
+ scm_type = attrs.get('scm_type', self.instance.scm_type) or u''
else:
scm_type = attrs.get('scm_type', u'') or u''
- if self.object and not scm_type:
- valid_local_paths.append(self.object.local_path)
+ if self.instance and not scm_type:
+ valid_local_paths.append(self.instance.local_path)
if scm_type:
- attrs.pop(source, None)
- if source in attrs and attrs[source] not in valid_local_paths:
- raise serializers.ValidationError('Invalid path choice')
- return attrs
+ attrs.pop('local_path', None)
+ if 'local_path' in attrs and attrs['local_path'] not in valid_local_paths:
+ errors['local_path'] = 'Invalid path choice'
- def to_native(self, obj):
- ret = super(ProjectOptionsSerializer, self).to_native(obj)
+ if errors:
+ raise serializers.ValidationError(errors)
+
+ return super(ProjectOptionsSerializer, self).validate(attrs)
+
+ def to_representation(self, obj):
+ ret = super(ProjectOptionsSerializer, self).to_representation(obj)
if obj is not None and 'credential' in ret and (not obj.credential or not obj.credential.active):
ret['credential'] = None
return ret
@@ -747,17 +822,18 @@ class ProjectOptionsSerializer(BaseSerializer):
class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
- playbooks = serializers.Field(source='playbooks', help_text='Array of playbooks available within this project.')
- scm_delete_on_next_update = serializers.BooleanField(source='scm_delete_on_next_update', read_only=True)
- status = ChoiceField(source='status', choices=Project.PROJECT_STATUS_CHOICES, read_only=True, required=False)
- last_update_failed = serializers.BooleanField(source='last_update_failed', read_only=True)
- last_updated = serializers.DateTimeField(source='last_updated', read_only=True)
+ playbooks = serializers.ReadOnlyField(help_text='Array of playbooks available within this project.')
+ scm_delete_on_next_update = serializers.BooleanField(read_only=True)
+ status = serializers.ChoiceField(choices=Project.PROJECT_STATUS_CHOICES, read_only=True, required=False)
+ last_update_failed = serializers.BooleanField(read_only=True)
+ last_updated = serializers.DateTimeField(read_only=True)
class Meta:
model = Project
- fields = ('*', 'scm_delete_on_next_update', 'scm_update_on_launch',
+ fields = ('*', 'playbooks', 'scm_delete_on_next_update', 'scm_update_on_launch',
'scm_update_cache_timeout') + \
('last_update_failed', 'last_updated') # Backwards compatibility
+
def get_related(self, obj):
res = super(ProjectSerializer, self).get_related(obj)
@@ -786,14 +862,16 @@ class ProjectPlaybooksSerializer(ProjectSerializer):
model = Project
fields = ('playbooks',)
- def to_native(self, obj):
- ret = super(ProjectPlaybooksSerializer, self).to_native(obj)
- return ret.get('playbooks', [])
+ @property
+ def data(self):
+ ret = super(ProjectPlaybooksSerializer, self).data
+ ret = ret.get('playbooks', [])
+ return ReturnList(ret, serializer=self)
class ProjectUpdateViewSerializer(ProjectSerializer):
- can_update = serializers.BooleanField(source='can_update', read_only=True)
+ can_update = serializers.BooleanField(read_only=True)
class Meta:
fields = ('can_update',)
@@ -821,7 +899,7 @@ class ProjectUpdateListSerializer(ProjectUpdateSerializer, UnifiedJobListSeriali
class ProjectUpdateCancelSerializer(ProjectUpdateSerializer):
- can_cancel = serializers.BooleanField(source='can_cancel', read_only=True)
+ can_cancel = serializers.BooleanField(read_only=True)
class Meta:
fields = ('can_cancel',)
@@ -829,15 +907,15 @@ class ProjectUpdateCancelSerializer(ProjectUpdateSerializer):
class BaseSerializerWithVariables(BaseSerializer):
- def validate_variables(self, attrs, source):
+ def validate_variables(self, value):
try:
- json.loads(attrs.get(source, '').strip() or '{}')
+ json.loads(value.strip() or '{}')
except ValueError:
try:
- yaml.safe_load(attrs[source])
+ yaml.safe_load(value)
except yaml.YAMLError:
raise serializers.ValidationError('Must be valid JSON or YAML')
- return attrs
+ return value
class InventorySerializer(BaseSerializerWithVariables):
@@ -868,8 +946,8 @@ class InventorySerializer(BaseSerializerWithVariables):
res['organization'] = reverse('api:organization_detail', args=(obj.organization.pk,))
return res
- def to_native(self, obj):
- ret = super(InventorySerializer, self).to_native(obj)
+ def to_representation(self, obj):
+ ret = super(InventorySerializer, self).to_representation(obj)
if obj is not None and 'organization' in ret and (not obj.organization or not obj.organization.active):
ret['organization'] = None
return ret
@@ -880,7 +958,7 @@ class InventoryDetailSerializer(InventorySerializer):
class Meta:
fields = ('*', 'can_run_ad_hoc_commands')
- can_run_ad_hoc_commands = serializers.SerializerMethodField('get_can_run_ad_hoc_commands')
+ can_run_ad_hoc_commands = serializers.SerializerMethodField()
def get_can_run_ad_hoc_commands(self, obj):
view = self.context.get('view', None)
@@ -890,8 +968,7 @@ class InventoryDetailSerializer(InventorySerializer):
class InventoryScriptSerializer(InventorySerializer):
class Meta:
- fields = ('id',)
- exclude = ('id',)
+ fields = ()
class HostSerializer(BaseSerializerWithVariables):
@@ -901,7 +978,7 @@ class HostSerializer(BaseSerializerWithVariables):
fields = ('*', 'inventory', 'enabled', 'instance_id', 'variables',
'has_active_failures', 'has_inventory_sources', 'last_job',
'last_job_host_summary')
- readonly_fields = ('last_job', 'last_job_host_summary')
+ read_only_fields = ('last_job', 'last_job_host_summary')
def get_related(self, obj):
res = super(HostSerializer, self).get_related(obj)
@@ -951,25 +1028,25 @@ class HostSerializer(BaseSerializerWithVariables):
if port < 1 or port > 65535:
raise ValueError
except ValueError:
- raise serializers.ValidationError(u'Invalid port specification: %s' % unicode(port))
+ raise serializers.ValidationError(u'Invalid port specification: %s' % force_text(port))
return name, port
- def validate_name(self, attrs, source):
- name = unicode(attrs.get(source, ''))
+ def validate_name(self, value):
+ name = force_text(value or '')
# Validate here only, update in main validate method.
host, port = self._get_host_port_from_name(name)
- return attrs
+ return value
def validate(self, attrs):
- name = unicode(attrs.get('name', ''))
+ name = force_text(attrs.get('name', ''))
host, port = self._get_host_port_from_name(name)
if port:
attrs['name'] = host
- if self.object:
- variables = unicode(attrs.get('variables', self.object.variables) or '')
+ if self.instance:
+ variables = force_text(attrs.get('variables', self.instance.variables) or '')
else:
- variables = unicode(attrs.get('variables', ''))
+ variables = force_text(attrs.get('variables', ''))
try:
vars_dict = json.loads(variables.strip() or '{}')
vars_dict['ansible_ssh_port'] = port
@@ -984,10 +1061,10 @@ class HostSerializer(BaseSerializerWithVariables):
except (yaml.YAMLError, TypeError):
raise serializers.ValidationError('Must be valid JSON or YAML')
- return attrs
+ return super(HostSerializer, self).validate(attrs)
- def to_native(self, obj):
- ret = super(HostSerializer, self).to_native(obj)
+ def to_representation(self, obj):
+ ret = super(HostSerializer, self).to_representation(obj)
if not obj:
return ret
if 'inventory' in ret and (not obj.inventory or not obj.inventory.active):
@@ -1028,14 +1105,13 @@ class GroupSerializer(BaseSerializerWithVariables):
res['inventory_source'] = reverse('api:inventory_source_detail', args=(obj.inventory_source.pk,))
return res
- def validate_name(self, attrs, source):
- name = attrs.get(source, '')
- if name in ('all', '_meta'):
+ def validate_name(self, value):
+ if value in ('all', '_meta'):
raise serializers.ValidationError('Invalid group name')
- return attrs
+ return value
- def to_native(self, obj):
- ret = super(GroupSerializer, self).to_native(obj)
+ def to_representation(self, obj):
+ ret = super(GroupSerializer, self).to_representation(obj)
if obj is not None and 'inventory' in ret and (not obj.inventory or not obj.inventory.active):
ret['inventory'] = None
return ret
@@ -1063,18 +1139,18 @@ class BaseVariableDataSerializer(BaseSerializer):
class Meta:
fields = ('variables',)
- def to_native(self, obj):
+ def to_representation(self, obj):
if obj is None:
return {}
- ret = super(BaseVariableDataSerializer, self).to_native(obj)
+ ret = super(BaseVariableDataSerializer, self).to_representation(obj)
try:
return json.loads(ret.get('variables', '') or '{}')
except ValueError:
return yaml.safe_load(ret.get('variables', ''))
- def from_native(self, data, files):
+ def to_internal_value(self, data):
data = {'variables': json.dumps(data)}
- return super(BaseVariableDataSerializer, self).from_native(data, files)
+ return super(BaseVariableDataSerializer, self).to_internal_value(data)
class InventoryVariableDataSerializer(BaseVariableDataSerializer):
@@ -1100,14 +1176,13 @@ class CustomInventoryScriptSerializer(BaseSerializer):
model = CustomInventoryScript
fields = ('*', "script", "organization")
- def validate_script(self, attrs, source):
- script_contents = attrs.get(source, '')
- if not script_contents.startswith("#!"):
+ def validate_script(self, value):
+ if not value.startswith("#!"):
raise serializers.ValidationError('Script must begin with a hashbang sequence: i.e.... #!/usr/bin/env python')
- return attrs
+ return value
- def to_native(self, obj):
- ret = super(CustomInventoryScriptSerializer, self).to_native(obj)
+ def to_representation(self, obj):
+ ret = super(CustomInventoryScriptSerializer, self).to_representation(obj)
if obj is None:
return ret
request = self.context.get('request', None)
@@ -1128,6 +1203,14 @@ class InventorySourceOptionsSerializer(BaseSerializer):
class Meta:
fields = ('*', 'source', 'source_path', 'source_script', 'source_vars', 'credential',
'source_regions', 'instance_filters', 'group_by', 'overwrite', 'overwrite_vars')
+ extra_kwargs = {
+ 'instance_filters': {
+ 'allow_null': True,
+ },
+ 'group_by': {
+ 'allow_null': True,
+ },
+ }
def get_related(self, obj):
res = super(InventorySourceOptionsSerializer, self).get_related(obj)
@@ -1138,58 +1221,44 @@ class InventorySourceOptionsSerializer(BaseSerializer):
res['source_script'] = reverse('api:inventory_script_detail', args=(obj.source_script.pk,))
return res
- def validate_source(self, attrs, source):
- # TODO: Validate
- # src = attrs.get(source, '')
- # obj = self.object
- return attrs
-
- def validate_source_script(self, attrs, source):
- src = attrs.get(source, None)
- if 'source' in attrs and attrs.get('source', '') == 'custom':
- if src is None or src == '':
- raise serializers.ValidationError("source_script must be provided")
- try:
- if src.organization != self.object.inventory.organization:
- raise serializers.ValidationError("source_script does not belong to the same organization as the inventory")
- except Exception:
- # TODO: Log
- raise serializers.ValidationError("source_script doesn't exist")
- return attrs
-
- def validate_source_vars(self, attrs, source):
+ def validate_source_vars(self, value):
# source_env must be blank, a valid JSON or YAML dict, or ...
# FIXME: support key=value pairs.
try:
- json.loads(attrs.get(source, '').strip() or '{}')
- return attrs
+ json.loads((value or '').strip() or '{}')
+ return value
except ValueError:
pass
try:
- yaml.safe_load(attrs[source])
- return attrs
+ yaml.safe_load(value)
+ return value
except yaml.YAMLError:
pass
raise serializers.ValidationError('Must be valid JSON or YAML')
- def validate_source_regions(self, attrs, source):
- # FIXME
- return attrs
+ def validate(self, attrs):
+ # TODO: Validate source, validate source_regions
+ errors = {}
- def metadata(self):
- metadata = super(InventorySourceOptionsSerializer, self).metadata()
- field_opts = metadata.get('source_regions', {})
- for cp in ('azure', 'ec2', 'gce', 'rax'):
- get_regions = getattr(self.opts.model, 'get_%s_region_choices' % cp)
- field_opts['%s_region_choices' % cp] = get_regions()
- field_opts = metadata.get('group_by', {})
- for cp in ('ec2',):
- get_group_by_choices = getattr(self.opts.model, 'get_%s_group_by_choices' % cp)
- field_opts['%s_group_by_choices' % cp] = get_group_by_choices()
- return metadata
+ source_script = attrs.get('source_script', None)
+ if 'source' in attrs and attrs.get('source', '') == 'custom':
+ if source_script is None or source_script == '':
+ errors['source_script'] = 'source_script must be provided'
+ else:
+ try:
+ if source_script.organization != self.instance.inventory.organization:
+ errors['source_script'] = 'source_script does not belong to the same organization as the inventory'
+ except Exception:
+ # TODO: Log
+ errors['source_script'] = 'source_script doesn\'t exist'
- def to_native(self, obj):
- ret = super(InventorySourceOptionsSerializer, self).to_native(obj)
+ if errors:
+ raise serializers.ValidationError(errors)
+
+ return super(InventorySourceOptionsSerializer, self).validate(attrs)
+
+ def to_representation(self, obj):
+ ret = super(InventorySourceOptionsSerializer, self).to_representation(obj)
if obj is None:
return ret
if 'credential' in ret and (not obj.credential or not obj.credential.active):
@@ -1199,9 +1268,9 @@ class InventorySourceOptionsSerializer(BaseSerializer):
class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOptionsSerializer):
- status = ChoiceField(source='status', choices=InventorySource.INVENTORY_SOURCE_STATUS_CHOICES, read_only=True, required=False)
- last_update_failed = serializers.BooleanField(source='last_update_failed', read_only=True)
- last_updated = serializers.DateTimeField(source='last_updated', read_only=True)
+ status = serializers.ChoiceField(choices=InventorySource.INVENTORY_SOURCE_STATUS_CHOICES, read_only=True, required=False)
+ last_update_failed = serializers.BooleanField(read_only=True)
+ last_updated = serializers.DateTimeField(read_only=True)
class Meta:
model = InventorySource
@@ -1233,8 +1302,8 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
args=(obj.last_update.pk,))
return res
- def to_native(self, obj):
- ret = super(InventorySourceSerializer, self).to_native(obj)
+ def to_representation(self, obj):
+ ret = super(InventorySourceSerializer, self).to_representation(obj)
if obj is None:
return ret
if 'inventory' in ret and (not obj.inventory or not obj.inventory.active):
@@ -1246,7 +1315,7 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
class InventorySourceUpdateSerializer(InventorySourceSerializer):
- can_update = serializers.BooleanField(source='can_update', read_only=True)
+ can_update = serializers.BooleanField(read_only=True)
class Meta:
fields = ('can_update',)
@@ -1274,7 +1343,7 @@ class InventoryUpdateListSerializer(InventoryUpdateSerializer, UnifiedJobListSer
class InventoryUpdateCancelSerializer(InventoryUpdateSerializer):
- can_cancel = serializers.BooleanField(source='can_cancel', read_only=True)
+ can_cancel = serializers.BooleanField(read_only=True)
class Meta:
fields = ('can_cancel',)
@@ -1299,8 +1368,8 @@ class TeamSerializer(BaseSerializer):
res['organization'] = reverse('api:organization_detail', args=(obj.organization.pk,))
return res
- def to_native(self, obj):
- ret = super(TeamSerializer, self).to_native(obj)
+ def to_representation(self, obj):
+ ret = super(TeamSerializer, self).to_representation(obj)
if obj is not None and 'organization' in ret and (not obj.organization or not obj.organization.active):
ret['organization'] = None
return ret
@@ -1338,10 +1407,11 @@ class PermissionSerializer(BaseSerializer):
if attrs.get('permission_type', None) in ('run', 'check') and not attrs.get('project', None):
raise serializers.ValidationError('project is required when '
'assigning deployment permissions')
- return attrs
- def to_native(self, obj):
- ret = super(PermissionSerializer, self).to_native(obj)
+ return super(PermissionSerializer, self).validate(attrs)
+
+ def to_representation(self, obj):
+ ret = super(PermissionSerializer, self).to_representation(obj)
if obj is None:
return ret
if 'user' in ret and (not obj.user or not obj.user.is_active):
@@ -1373,22 +1443,22 @@ class CredentialSerializer(BaseSerializer):
'become_method', 'become_username', 'become_password',
'vault_password')
- def to_native(self, obj):
- ret = super(CredentialSerializer, self).to_native(obj)
+ def to_representation(self, obj):
+ ret = super(CredentialSerializer, self).to_representation(obj)
if obj is not None and 'user' in ret and (not obj.user or not obj.user.is_active):
ret['user'] = None
if obj is not None and 'team' in ret and (not obj.team or not obj.team.active):
ret['team'] = None
# Replace the actual encrypted value with the string $encrypted$.
for field in Credential.PASSWORD_FIELDS:
- if field in ret and unicode(ret[field]).startswith('$encrypted$'):
+ if field in ret and force_text(ret[field]).startswith('$encrypted$'):
ret[field] = '$encrypted$'
return ret
- def restore_object(self, attrs, instance=None):
+ def validate(self, attrs):
# If the value sent to the API startswith $encrypted$, ignore it.
for field in Credential.PASSWORD_FIELDS:
- if unicode(attrs.get(field, '')).startswith('$encrypted$'):
+ if force_text(attrs.get(field, '')).startswith('$encrypted$'):
attrs.pop(field, None)
# If creating a credential from a view that automatically sets the
@@ -1400,8 +1470,7 @@ class CredentialSerializer(BaseSerializer):
if parent_key == 'team':
attrs['user'] = None
- instance = super(CredentialSerializer, self).restore_object(attrs, instance)
- return instance
+ return super(CredentialSerializer, self).validate(attrs)
def get_related(self, obj):
res = super(CredentialSerializer, self).get_related(obj)
@@ -1436,8 +1505,8 @@ class JobOptionsSerializer(BaseSerializer):
args=(obj.cloud_credential.pk,))
return res
- def to_native(self, obj):
- ret = super(JobOptionsSerializer, self).to_native(obj)
+ def to_representation(self, obj):
+ ret = super(JobOptionsSerializer, self).to_representation(obj)
if obj is None:
return ret
if 'inventory' in ret and (not obj.inventory or not obj.inventory.active):
@@ -1452,25 +1521,23 @@ class JobOptionsSerializer(BaseSerializer):
ret['cloud_credential'] = None
return ret
- def validate_project(self, attrs, source):
- project = attrs.get('project', None)
- if not project and attrs.get('job_type') != PERM_INVENTORY_SCAN:
- raise serializers.ValidationError("This field is required.")
- return attrs
+ def validate(self, attrs):
+ if 'project' in self.fields and 'playbook' in self.fields:
+ project = attrs.get('project', None)
+ playbook = attrs.get('playbook', '')
+ if not project and attrs.get('job_type') != PERM_INVENTORY_SCAN:
+ raise serializers.ValidationError({'project': 'This field is required.'})
+ if project and playbook and force_text(playbook) not in project.playbooks:
+ raise serializers.ValidationError({'playbook': 'Playbook not found for project'})
+ if project and not playbook:
+ raise serializers.ValidationError({'playbook': 'Must select playbook for project'})
- def validate_playbook(self, attrs, source):
- project = attrs.get('project', None)
- playbook = attrs.get('playbook', '')
- if project and playbook and smart_str(playbook) not in project.playbooks:
- raise serializers.ValidationError('Playbook not found for project')
- if project and not playbook:
- raise serializers.ValidationError('Must select playbook for project')
- return attrs
+ return super(JobOptionsSerializer, self).validate(attrs)
class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer):
- status = ChoiceField(source='status', choices=JobTemplate.JOB_TEMPLATE_STATUS_CHOICES, read_only=True, required=False)
+ status = serializers.ChoiceField(choices=JobTemplate.JOB_TEMPLATE_STATUS_CHOICES, read_only=True, required=False)
class Meta:
model = JobTemplate
@@ -1511,17 +1578,19 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer):
d['recent_jobs'] = [{'id': x.id, 'status': x.status, 'finished': x.finished} for x in obj.jobs.filter(active=True).order_by('-created')[:10]]
return d
- def validate_survey_enabled(self, attrs, source):
- survey_enabled = attrs[source] if source in attrs else False
- job_type = attrs['job_type'] if 'job_type' in attrs else None
+ def validate(self, attrs):
+ survey_enabled = attrs.get('survey_enabled', False)
+ job_type = attrs.get('job_type', None)
if survey_enabled and job_type == PERM_INVENTORY_SCAN:
- raise serializers.ValidationError("Survey Enabled can not be used with scan jobs")
- return attrs
+ raise serializers.ValidationError({'survey_enabled': 'Survey Enabled can not be used with scan jobs'})
+
+ return super(JobTemplateSerializer, self).validate(attrs)
+
class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
- passwords_needed_to_start = serializers.Field(source='passwords_needed_to_start')
- ask_variables_on_launch = serializers.Field(source='ask_variables_on_launch')
+ passwords_needed_to_start = serializers.ReadOnlyField()
+ ask_variables_on_launch = serializers.ReadOnlyField()
class Meta:
model = Job
@@ -1546,10 +1615,10 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
res['relaunch'] = reverse('api:job_relaunch', args=(obj.pk,))
return res
- def from_native(self, data, files):
+ def to_internal_value(self, data):
# When creating a new job and a job template is specified, populate any
# fields not provided in data from the job template.
- if not self.object and isinstance(data, dict) and 'job_template' in data:
+ if not self.instance and isinstance(data, dict) and 'job_template' in data:
try:
job_template = JobTemplate.objects.get(pk=data['job_template'])
except JobTemplate.DoesNotExist:
@@ -1575,10 +1644,10 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
data.setdefault('force_handlers', job_template.force_handlers)
data.setdefault('skip_tags', job_template.skip_tags)
data.setdefault('start_at_task', job_template.start_at_task)
- return super(JobSerializer, self).from_native(data, files)
+ return super(JobSerializer, self).to_internal_value(data)
- def to_native(self, obj):
- ret = super(JobSerializer, self).to_native(obj)
+ def to_representation(self, obj):
+ ret = super(JobSerializer, self).to_representation(obj)
if obj is None:
return ret
if 'job_template' in ret and (not obj.job_template or not obj.job_template.active):
@@ -1599,7 +1668,7 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
class JobCancelSerializer(JobSerializer):
- can_cancel = serializers.BooleanField(source='can_cancel', read_only=True)
+ can_cancel = serializers.BooleanField(read_only=True)
class Meta:
fields = ('can_cancel',)
@@ -1607,13 +1676,20 @@ class JobCancelSerializer(JobSerializer):
class JobRelaunchSerializer(JobSerializer):
- passwords_needed_to_start = serializers.SerializerMethodField('get_passwords_needed_to_start')
+ passwords_needed_to_start = serializers.SerializerMethodField()
class Meta:
fields = ('passwords_needed_to_start',)
- def to_native(self, obj):
- res = super(JobRelaunchSerializer, self).to_native(obj)
+ def to_internal_value(self, data):
+ obj = self.context.get('obj')
+ all_data = self.to_representation(obj)
+ all_data.update(data)
+ ret = super(JobRelaunchSerializer, self).to_internal_value(all_data)
+ return ret
+
+ def to_representation(self, obj):
+ res = super(JobRelaunchSerializer, self).to_representation(obj)
view = self.context.get('view', None)
if hasattr(view, '_raw_data_form_marker'):
password_keys = dict([(p, u'') for p in self.get_passwords_needed_to_start(obj)])
@@ -1625,7 +1701,7 @@ class JobRelaunchSerializer(JobSerializer):
return obj.passwords_needed_to_start
return ''
- def validate_passwords_needed_to_start(self, attrs, source):
+ def validate_passwords_needed_to_start(self, value):
obj = self.context.get('obj')
data = self.context.get('data')
@@ -1634,7 +1710,7 @@ class JobRelaunchSerializer(JobSerializer):
provided = dict([(field, data.get(field, '')) for field in needed])
if not all(provided.values()):
raise serializers.ValidationError(needed)
- return attrs
+ return value
def validate(self, attrs):
obj = self.context.get('obj')
@@ -1644,19 +1720,35 @@ class JobRelaunchSerializer(JobSerializer):
raise serializers.ValidationError(dict(errors=["Job Template Project is missing or undefined"]))
if obj.inventory is None or not obj.inventory.active:
raise serializers.ValidationError(dict(errors=["Job Template Inventory is missing or undefined"]))
+ attrs = super(JobRelaunchSerializer, self).validate(attrs)
return attrs
class AdHocCommandSerializer(UnifiedJobSerializer):
- name = serializers.CharField(source='name', read_only=True)
- module_name = ChoiceField(source='module_name', label='module name', required=bool(not AdHocCommand.MODULE_NAME_DEFAULT), choices=AdHocCommand.MODULE_NAME_CHOICES, default=AdHocCommand.MODULE_NAME_DEFAULT)
-
class Meta:
model = AdHocCommand
fields = ('*', 'job_type', 'inventory', 'limit', 'credential',
'module_name', 'module_args', 'forks', 'verbosity',
- 'become_enabled')
- exclude = ('unified_job_template', 'description')
+ 'become_enabled', '-unified_job_template', '-description')
+ extra_kwargs = {
+ 'name': {
+ 'read_only': True,
+ },
+ }
+
+ def build_standard_field(self, field_name, model_field):
+ field_class, field_kwargs = super(AdHocCommandSerializer, self).build_standard_field(field_name, model_field)
+ # Load module name choices dynamically from DB settings.
+ if field_name == 'module_name':
+ field_class = serializers.ChoiceField
+ module_name_choices = [(x, x) for x in tower_settings.AD_HOC_COMMANDS]
+ module_name_default = 'command' if 'command' in [x[0] for x in module_name_choices] else ''
+ field_kwargs['choices'] = module_name_choices
+ field_kwargs['required'] = bool(not module_name_default)
+ field_kwargs['default'] = module_name_default or serializers.empty
+ field_kwargs['allow_blank'] = bool(module_name_default)
+ field_kwargs.pop('max_length', None)
+ return field_class, field_kwargs
def get_related(self, obj):
res = super(AdHocCommandSerializer, self).get_related(obj)
@@ -1672,14 +1764,14 @@ class AdHocCommandSerializer(UnifiedJobSerializer):
res['relaunch'] = reverse('api:ad_hoc_command_relaunch', args=(obj.pk,))
return res
- def to_native(self, obj):
+ def to_representation(self, obj):
# In raw data form, populate limit field from host/group name.
view = self.context.get('view', None)
parent_model = getattr(view, 'parent_model', None)
if not (obj and obj.pk) and view and hasattr(view, '_raw_data_form_marker'):
if not obj:
- obj = self.opts.model()
- ret = super(AdHocCommandSerializer, self).to_native(obj)
+ obj = self.Meta.model()
+ ret = super(AdHocCommandSerializer, self).to_representation(obj)
# Hide inventory and limit fields from raw data, since they will be set
# automatically by sub list create view.
if not (obj and obj.pk) and view and hasattr(view, '_raw_data_form_marker'):
@@ -1699,7 +1791,7 @@ class AdHocCommandSerializer(UnifiedJobSerializer):
class AdHocCommandCancelSerializer(AdHocCommandSerializer):
- can_cancel = serializers.BooleanField(source='can_cancel', read_only=True)
+ can_cancel = serializers.BooleanField(read_only=True)
class Meta:
fields = ('can_cancel',)
@@ -1710,7 +1802,7 @@ class AdHocCommandRelaunchSerializer(AdHocCommandSerializer):
class Meta:
fields = ()
- def to_native(self, obj):
+ def to_representation(self, obj):
if obj:
return dict([(p, u'') for p in obj.passwords_needed_to_start])
else:
@@ -1749,7 +1841,7 @@ class SystemJobSerializer(UnifiedJobSerializer):
class SystemJobCancelSerializer(SystemJobSerializer):
- can_cancel = serializers.BooleanField(source='can_cancel', read_only=True)
+ can_cancel = serializers.BooleanField(read_only=True)
class Meta:
fields = ('can_cancel',)
@@ -1793,7 +1885,7 @@ class JobHostSummarySerializer(BaseSerializer):
class JobEventSerializer(BaseSerializer):
event_display = serializers.CharField(source='get_event_display2', read_only=True)
- event_level = serializers.IntegerField(source='event_level', read_only=True)
+ event_level = serializers.IntegerField(read_only=True)
class Meta:
model = JobEvent
@@ -1837,6 +1929,15 @@ class AdHocCommandEventSerializer(BaseSerializer):
'counter', 'event_display', 'event_data', 'failed',
'changed', 'host', 'host_name')
+ def to_internal_value(self, data):
+ ret = super(AdHocCommandEventSerializer, self).to_internal_value(data)
+ # AdHocCommandAdHocCommandEventsList should be the only view creating
+ # AdHocCommandEvent instances, so keep the ad_hoc_command it sets, even
+ # though ad_hoc_command is a read-only field.
+ if 'ad_hoc_command' in data:
+ ret['ad_hoc_command'] = data['ad_hoc_command']
+ return ret
+
def get_related(self, obj):
res = super(AdHocCommandEventSerializer, self).get_related(obj)
res.update(dict(
@@ -1846,13 +1947,14 @@ class AdHocCommandEventSerializer(BaseSerializer):
res['host'] = reverse('api:host_detail', args=(obj.host.pk,))
return res
+
class JobLaunchSerializer(BaseSerializer):
- passwords_needed_to_start = serializers.Field(source='passwords_needed_to_start')
- can_start_without_user_input = serializers.BooleanField(source='can_start_without_user_input', read_only=True)
- variables_needed_to_start = serializers.Field(source='variables_needed_to_start')
- credential_needed_to_start = serializers.SerializerMethodField('get_credential_needed_to_start')
- survey_enabled = serializers.SerializerMethodField('get_survey_enabled')
+ passwords_needed_to_start = serializers.ReadOnlyField()
+ can_start_without_user_input = serializers.BooleanField(read_only=True)
+ variables_needed_to_start = serializers.ReadOnlyField()
+ credential_needed_to_start = serializers.SerializerMethodField()
+ survey_enabled = serializers.SerializerMethodField()
class Meta:
model = JobTemplate
@@ -1860,10 +1962,10 @@ class JobLaunchSerializer(BaseSerializer):
'ask_variables_on_launch', 'survey_enabled', 'variables_needed_to_start',
'credential', 'credential_needed_to_start',)
read_only_fields = ('ask_variables_on_launch',)
- write_only_fields = ('credential','extra_vars',)
+ write_only_fields = ('credential', 'extra_vars',)
- def to_native(self, obj):
- res = super(JobLaunchSerializer, self).to_native(obj)
+ def to_representation(self, obj):
+ res = super(JobLaunchSerializer, self).to_representation(obj)
view = self.context.get('view', None)
if obj and hasattr(view, '_raw_data_form_marker'):
if obj.passwords_needed_to_start:
@@ -1881,32 +1983,25 @@ class JobLaunchSerializer(BaseSerializer):
return obj.survey_enabled and 'spec' in obj.survey_spec
return False
- def validate_credential(self, attrs, source):
+ def validate(self, attrs):
+ errors = {}
obj = self.context.get('obj')
- credential = attrs.get(source, None) or (obj and obj.credential)
- if not credential or not credential.active:
- raise serializers.ValidationError('Credential not provided')
- attrs[source] = credential
- return attrs
-
- def validate_passwords_needed_to_start(self, attrs, source):
- obj = self.context.get('obj')
- passwords = self.context.get('passwords')
data = self.context.get('data')
- credential = attrs.get('credential', None) or obj.credential
+ credential = attrs.get('credential', None) or (obj and obj.credential)
+ if not credential or not credential.active:
+ errors['credential'] = 'Credential not provided'
+
# fill passwords dict with request data passwords
if credential and credential.passwords_needed:
+ passwords = self.context.get('passwords')
try:
for p in credential.passwords_needed:
passwords[p] = data[p]
except KeyError:
- raise serializers.ValidationError(credential.passwords_needed)
- return attrs
+ errors['passwords_needed_to_start'] = credential.passwords_needed
- def validate(self, attrs):
- obj = self.context.get('obj')
- extra_vars = attrs.get('extra_vars', {})
+ extra_vars = force_text(attrs.get('extra_vars', {}))
try:
extra_vars = literal_eval(extra_vars)
extra_vars = json.dumps(extra_vars)
@@ -1919,7 +2014,7 @@ class JobLaunchSerializer(BaseSerializer):
try:
extra_vars = yaml.safe_load(extra_vars)
except (yaml.YAMLError, TypeError, AttributeError):
- raise serializers.ValidationError(dict(extra_vars=['Must be valid JSON or YAML']))
+ errors['extra_vars'] = 'Must be valid JSON or YAML'
if not isinstance(extra_vars, dict):
extra_vars = {}
@@ -1927,15 +2022,20 @@ class JobLaunchSerializer(BaseSerializer):
if self.get_survey_enabled(obj):
validation_errors = obj.survey_variable_validation(extra_vars)
if validation_errors:
- raise serializers.ValidationError(dict(variables_needed_to_start=validation_errors))
+ errors['variables_needed_to_start'] = validation_errors
if obj.job_type != PERM_INVENTORY_SCAN and (obj.project is None or not obj.project.active):
- raise serializers.ValidationError(dict(errors=["Job Template Project is missing or undefined"]))
+ errors['project'] = 'Job Template Project is missing or undefined'
if obj.inventory is None or not obj.inventory.active:
- raise serializers.ValidationError(dict(errors=["Job Template Inventory is missing or undefined"]))
+ errors['inventory'] = 'Job Template Inventory is missing or undefined'
+ if errors:
+ raise serializers.ValidationError(errors)
+
+ attrs = super(JobLaunchSerializer, self).validate(attrs)
return attrs
+
class ScheduleSerializer(BaseSerializer):
class Meta:
@@ -1951,11 +2051,10 @@ class ScheduleSerializer(BaseSerializer):
res['unified_job_template'] = obj.unified_job_template.get_absolute_url()
return res
- def validate_unified_job_template(self, attrs, source):
- ujt = attrs[source]
- if type(ujt) == InventorySource and ujt.source not in SCHEDULEABLE_PROVIDERS:
+ def validate_unified_job_template(self, value):
+ if type(value) == InventorySource and value.source not in SCHEDULEABLE_PROVIDERS:
raise serializers.ValidationError('Inventory Source must be a cloud resource')
- return attrs
+ return value
# We reject rrules if:
# - DTSTART is not include
@@ -1967,8 +2066,8 @@ class ScheduleSerializer(BaseSerializer):
# - BYWEEKNO
# - Multiple DTSTART or RRULE elements
# - COUNT > 999
- def validate_rrule(self, attrs, source):
- rrule_value = attrs[source]
+ def validate_rrule(self, value):
+ rrule_value = value
multi_by_month_day = ".*?BYMONTHDAY[\:\=][0-9]+,-*[0-9]+"
multi_by_month = ".*?BYMONTH[\:\=][0-9]+,[0-9]+"
by_day_with_numeric_prefix = ".*?BYDAY[\:\=][0-9]+[a-zA-Z]{2}"
@@ -2008,12 +2107,12 @@ class ScheduleSerializer(BaseSerializer):
except Exception:
# TODO: Log
raise serializers.ValidationError("rrule parsing failed validation")
- return attrs
+ return value
class ActivityStreamSerializer(BaseSerializer):
- changes = serializers.SerializerMethodField('get_changes')
- object_association = serializers.SerializerMethodField('get_object_association')
+ changes = serializers.SerializerMethodField()
+ object_association = serializers.SerializerMethodField()
class Meta:
model = ActivityStream
@@ -2067,7 +2166,7 @@ class ActivityStreamSerializer(BaseSerializer):
return rel
def get_summary_fields(self, obj):
- summary_fields = SortedDict()
+ summary_fields = OrderedDict()
for fk, related_fields in SUMMARIZABLE_FK_FIELDS.items():
try:
if not hasattr(obj, fk):
@@ -2108,7 +2207,8 @@ class ActivityStreamSerializer(BaseSerializer):
first_name = obj.actor.first_name,
last_name = obj.actor.last_name)
return summary_fields
-
+
+
class TowerSettingsSerializer(BaseSerializer):
class Meta:
@@ -2116,32 +2216,43 @@ class TowerSettingsSerializer(BaseSerializer):
fields = ('key', 'description', 'category', 'value', 'value_type', 'user')
read_only_fields = ('description', 'category', 'value_type', 'user')
- def from_native(self, data, files):
+ def __init__(self, instance=None, data=serializers.empty, **kwargs):
+ if instance is None and data is not serializers.empty and 'key' in data:
+ try:
+ instance = TowerSettings.objects.get(key=data['key'])
+ except TowerSettings.DoesNotExist:
+ pass
+ super(TowerSettingsSerializer, self).__init__(instance, data, **kwargs)
+
+ def to_representation(self, obj):
+ ret = super(TowerSettingsSerializer, self).to_representation(obj)
+ ret['value'] = getattr(obj, 'value_converted', obj.value)
+ return ret
+
+ def to_internal_value(self, data):
if data['key'] not in settings.TOWER_SETTINGS_MANIFEST:
self._errors = {'key': 'Key {0} is not a valid settings key'.format(data['key'])}
return
- current_val = TowerSettings.objects.filter(key=data['key'])
- if current_val.exists():
- current_val.delete()
+ ret = super(TowerSettingsSerializer, self).to_internal_value(data)
manifest_val = settings.TOWER_SETTINGS_MANIFEST[data['key']]
- data['description'] = manifest_val['description']
- data['category'] = manifest_val['category']
- data['value_type'] = manifest_val['type']
- return super(TowerSettingsSerializer, self).from_native(data, files)
+ ret['description'] = manifest_val['description']
+ ret['category'] = manifest_val['category']
+ ret['value_type'] = manifest_val['type']
+ return ret
def validate(self, attrs):
manifest = settings.TOWER_SETTINGS_MANIFEST
if attrs['key'] not in manifest:
raise serializers.ValidationError(dict(key=["Key {0} is not a valid settings key".format(attrs['key'])]))
# TODO: Type checking/coercion, contextual validation
- return attrs
+ return super(TowerSettingsSerializer, self).validate(attrs)
+
+ def _create(self, validated_data):
+ current_val = TowerSettings.objects.filter(key=validated_data['key'])
+ if current_val.exists():
+ return self.update(current_val[0], validated_data)
+ return super(TowerSettingsSerializer, self).create(validated_data)
- def save_object(self, obj, **kwargs):
- manifest_val = settings.TOWER_SETTINGS_MANIFEST[obj.key]
- obj.description = manifest_val['description']
- obj.category = manifest_val['category']
- obj.value_type = manifest_val['type']
- return super(TowerSettingsSerializer, self).save_object(obj, **kwargs)
class AuthTokenSerializer(serializers.Serializer):
diff --git a/awx/api/test_api.py b/awx/api/test_api.py
new file mode 100644
index 0000000000..d11a45e088
--- /dev/null
+++ b/awx/api/test_api.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2016 Ansible, Inc.
+# All Rights Reserved.
+
+from awx.api.tests import * # noqa
diff --git a/awx/api/utils/decorators.py b/awx/api/utils/decorators.py
index 8b25c25baf..223125597c 100644
--- a/awx/api/utils/decorators.py
+++ b/awx/api/utils/decorators.py
@@ -1,7 +1,7 @@
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
-from ordereddict import OrderedDict
+from collections import OrderedDict
import copy
import functools
@@ -23,21 +23,22 @@ def paginated(method):
def func(self, request, *args, **kwargs):
# Manually spin up pagination.
# How many results do we show?
- limit = api_settings.PAGINATE_BY
- if request.QUERY_PARAMS.get(api_settings.PAGINATE_BY_PARAM, False):
- limit = request.QUERY_PARAMS[api_settings.PAGINATE_BY_PARAM]
- if api_settings.MAX_PAGINATE_BY:
- limit = min(api_settings.MAX_PAGINATE_BY, limit)
+ paginator_class = api_settings.DEFAULT_PAGINATION_CLASS
+ limit = paginator_class.page_size
+ if request.query_params.get(paginator_class.page_size_query_param, False):
+ limit = request.query_params[paginator_class.page_size_query_param]
+ if paginator_class.max_page_size:
+ limit = min(paginator_class.max_page_size, limit)
limit = int(limit)
# Get the order parameter if it's given
- if request.QUERY_PARAMS.get("ordering", False):
- ordering = request.QUERY_PARAMS["ordering"]
+ if request.query_params.get("ordering", False):
+ ordering = request.query_params["ordering"]
else:
ordering = None
# What page are we on?
- page = int(request.QUERY_PARAMS.get('page', 1))
+ page = int(request.query_params.get('page', 1))
offset = (page - 1) * limit
# Add the limit, offset, page, and order variables to the keyword arguments
diff --git a/awx/api/views.py b/awx/api/views.py
index fc085acf89..9a41e779ea 100644
--- a/awx/api/views.py
+++ b/awx/api/views.py
@@ -12,6 +12,7 @@ import socket
import sys
import errno
from base64 import b64encode
+from collections import OrderedDict
# Django
from django.conf import settings
@@ -21,7 +22,7 @@ from django.core.exceptions import FieldError
from django.db.models import Q, Count
from django.db import IntegrityError, transaction
from django.shortcuts import get_object_or_404
-from django.utils.datastructures import SortedDict
+from django.utils.encoding import force_text
from django.utils.safestring import mark_safe
from django.utils.timezone import now
from django.views.decorators.csrf import csrf_exempt
@@ -31,14 +32,16 @@ from django.http import HttpResponse
# Django REST Framework
from rest_framework.exceptions import PermissionDenied, ParseError
-from rest_framework.parsers import YAMLParser
from rest_framework.permissions import AllowAny, IsAuthenticated
-from rest_framework.renderers import YAMLRenderer
from rest_framework.response import Response
from rest_framework.settings import api_settings
from rest_framework.views import exception_handler
from rest_framework import status
+# Django REST Framework YAML
+from rest_framework_yaml.parsers import YAMLParser
+from rest_framework_yaml.renderers import YAMLRenderer
+
# MongoEngine
import mongoengine
@@ -71,7 +74,7 @@ from awx.fact.models import * # noqa
from awx.main.utils import emit_websocket_notification
from awx.main.conf import tower_settings
-def api_exception_handler(exc):
+def api_exception_handler(exc, context):
'''
Override default API exception handler to catch IntegrityError exceptions.
'''
@@ -79,8 +82,7 @@ def api_exception_handler(exc):
exc = ParseError(exc.args[0])
if isinstance(exc, FieldError):
exc = ParseError(exc.args[0])
- return exception_handler(exc)
-
+ return exception_handler(exc, context)
class ApiRootView(APIView):
@@ -110,7 +112,7 @@ class ApiV1RootView(APIView):
def get(self, request, format=None):
''' list top level resources '''
- data = SortedDict()
+ data = OrderedDict()
data['authtoken'] = reverse('api:auth_token_view')
data['ping'] = reverse('api:api_v1_ping_view')
data['config'] = reverse('api:api_v1_config_view')
@@ -224,20 +226,20 @@ class ApiV1ConfigView(APIView):
def post(self, request):
if not request.user.is_superuser:
return Response(None, status=status.HTTP_404_NOT_FOUND)
- if not type(request.DATA) == dict:
+ if not type(request.data) == dict:
return Response({"error": "Invalid license data"}, status=status.HTTP_400_BAD_REQUEST)
- if "eula_accepted" not in request.DATA:
+ if "eula_accepted" not in request.data:
return Response({"error": "Missing 'eula_accepted' property"}, status=status.HTTP_400_BAD_REQUEST)
try:
- eula_accepted = to_python_boolean(request.DATA["eula_accepted"])
+ eula_accepted = to_python_boolean(request.data["eula_accepted"])
except ValueError:
return Response({"error": "'eula_accepted' value is invalid"}, status=status.HTTP_400_BAD_REQUEST)
if not eula_accepted:
return Response({"error": "'eula_accepted' must be True"}, status=status.HTTP_400_BAD_REQUEST)
- request.DATA.pop("eula_accepted")
+ request.data.pop("eula_accepted")
try:
- data_actual = json.dumps(request.DATA)
+ data_actual = json.dumps(request.data)
except Exception:
# FIX: Log
return Response({"error": "Invalid JSON"}, status=status.HTTP_400_BAD_REQUEST)
@@ -306,7 +308,7 @@ class DashboardView(APIView):
def get(self, request, format=None):
''' Show Dashboard Details '''
- data = SortedDict()
+ data = OrderedDict()
data['related'] = {'jobs_graph': reverse('api:dashboard_jobs_graph_view'),
'inventory_graph': reverse('api:dashboard_inventory_graph_view')}
user_inventory = get_user_queryset(request.user, Inventory)
@@ -411,8 +413,8 @@ class DashboardJobsGraphView(APIView):
new_in_200 = True
def get(self, request, format=None):
- period = request.QUERY_PARAMS.get('period', 'month')
- job_type = request.QUERY_PARAMS.get('job_type', 'all')
+ period = request.query_params.get('period', 'month')
+ job_type = request.query_params.get('job_type', 'all')
user_unified_jobs = get_user_queryset(request.user, UnifiedJob)
@@ -460,7 +462,7 @@ class DashboardInventoryGraphView(APIView):
new_in_200 = True
def get(self, request, format=None):
- period = request.QUERY_PARAMS.get('period', 'month')
+ period = request.query_params.get('period', 'month')
end_date = now()
if period == 'month':
@@ -476,7 +478,7 @@ class DashboardInventoryGraphView(APIView):
start_date = start_date.replace(minute=0, second=0, microsecond=0)
delta = dateutil.relativedelta.relativedelta(hours=1)
else:
- raise ParseError(u'Unknown period "%s"' % unicode(period))
+ raise ParseError(u'Unknown period "%s"' % force_text(period))
host_stats = []
date = start_date
@@ -527,7 +529,7 @@ class AuthView(APIView):
new_in_240 = True
def get(self, request):
- data = SortedDict()
+ data = OrderedDict()
err_backend, err_message = request.session.get('social_auth_error', (None, None))
auth_backends = load_backends(settings.AUTHENTICATION_BACKENDS).items()
# Return auth backends in consistent order: Google, GitHub, SAML.
@@ -567,27 +569,27 @@ class AuthTokenView(APIView):
model = AuthToken
def post(self, request):
- serializer = self.serializer_class(data=request.DATA)
+ serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
request_hash = AuthToken.get_request_hash(self.request)
try:
- token = AuthToken.objects.filter(user=serializer.object['user'],
+ token = AuthToken.objects.filter(user=serializer.validated_data['user'],
request_hash=request_hash,
expires__gt=now(),
reason='')[0]
token.refresh()
except IndexError:
- token = AuthToken.objects.create(user=serializer.object['user'],
+ token = AuthToken.objects.create(user=serializer.validated_data['user'],
request_hash=request_hash)
# Get user un-expired tokens that are not invalidated that are
# over the configured limit.
# Mark them as invalid and inform the user
- invalid_tokens = AuthToken.get_tokens_over_limit(serializer.object['user'])
+ invalid_tokens = AuthToken.get_tokens_over_limit(serializer.validated_data['user'])
for t in invalid_tokens:
# TODO: send socket notification
emit_websocket_notification('/socket.io/control',
'limit_reached',
- dict(reason=unicode(AuthToken.reason_long('limit_reached'))),
+ dict(reason=force_text(AuthToken.reason_long('limit_reached'))),
token_key=t.key)
t.invalidate(reason='limit_reached')
@@ -769,7 +771,7 @@ class ProjectList(ListCreateAPIView):
# Not optimal, but make sure the project status and last_updated fields
# are up to date here...
projects_qs = Project.objects.filter(active=True)
- projects_qs = projects_qs.select_related('current_update', 'last_updated')
+ projects_qs = projects_qs.select_related('current_job', 'last_job')
for project in projects_qs:
project._set_status_and_last_job_run()
return super(ProjectList, self).get(request, *args, **kwargs)
@@ -994,15 +996,15 @@ class UserDetail(RetrieveUpdateDestroyAPIView):
def update_filter(self, request, *args, **kwargs):
''' make sure non-read-only fields that can only be edited by admins, are only edited by admins '''
obj = self.get_object()
- can_change = request.user.can_access(User, 'change', obj, request.DATA)
- can_admin = request.user.can_access(User, 'admin', obj, request.DATA)
+ can_change = request.user.can_access(User, 'change', obj, request.data)
+ can_admin = request.user.can_access(User, 'admin', obj, request.data)
if can_change and not can_admin:
admin_only_edit_fields = ('last_name', 'first_name', 'username',
'is_active', 'is_superuser')
changed = {}
for field in admin_only_edit_fields:
left = getattr(obj, field, None)
- right = request.DATA.get(field, None)
+ right = request.data.get(field, None)
if left is not None and right is not None and left != right:
changed[field] = (left, right)
if changed:
@@ -1061,11 +1063,11 @@ class InventoryScriptDetail(RetrieveUpdateDestroyAPIView):
serializer_class = CustomInventoryScriptSerializer
def destroy(self, request, *args, **kwargs):
- obj = self.get_object()
- can_delete = request.user.can_access(self.model, 'delete', obj)
+ instance = self.get_object()
+ can_delete = request.user.can_access(self.model, 'delete', instance)
if not can_delete:
raise PermissionDenied("Cannot delete inventory script")
- for inv_src in InventorySource.objects.filter(source_script=obj):
+ for inv_src in InventorySource.objects.filter(source_script=instance):
inv_src.source_script = None
inv_src.save()
return super(InventoryScriptDetail, self).destroy(request, *args, **kwargs)
@@ -1137,10 +1139,10 @@ class InventorySingleFactView(MongoAPIView):
raise LicenseForbids('Your license does not permit use '
'of system tracking.')
- fact_key = request.QUERY_PARAMS.get("fact_key", None)
- fact_value = request.QUERY_PARAMS.get("fact_value", None)
- datetime_spec = request.QUERY_PARAMS.get("timestamp", None)
- module_spec = request.QUERY_PARAMS.get("module", None)
+ fact_key = request.query_params.get("fact_key", None)
+ fact_value = request.query_params.get("fact_value", None)
+ datetime_spec = request.query_params.get("timestamp", None)
+ module_spec = request.query_params.get("module", None)
if fact_key is None or fact_value is None or module_spec is None:
return Response({"error": "Missing fields"}, status=status.HTTP_400_BAD_REQUEST)
@@ -1231,9 +1233,9 @@ class HostFactVersionsList(MongoListAPIView):
filter_backends = (MongoFilterBackend,)
def get_queryset(self):
- from_spec = self.request.QUERY_PARAMS.get('from', None)
- to_spec = self.request.QUERY_PARAMS.get('to', None)
- module_spec = self.request.QUERY_PARAMS.get('module', None)
+ from_spec = self.request.query_params.get('from', None)
+ to_spec = self.request.query_params.get('to', None)
+ module_spec = self.request.query_params.get('module', None)
if not feature_enabled("system_tracking"):
raise LicenseForbids("Your license does not permit use "
@@ -1285,10 +1287,10 @@ class HostSingleFactView(MongoAPIView):
raise LicenseForbids('Your license does not permit use '
'of system tracking.')
- fact_key = request.QUERY_PARAMS.get("fact_key", None)
- fact_value = request.QUERY_PARAMS.get("fact_value", None)
- datetime_spec = request.QUERY_PARAMS.get("timestamp", None)
- module_spec = request.QUERY_PARAMS.get("module", None)
+ fact_key = request.query_params.get("fact_key", None)
+ fact_value = request.query_params.get("fact_value", None)
+ datetime_spec = request.query_params.get("timestamp", None)
+ module_spec = request.query_params.get("module", None)
if fact_key is None or fact_value is None or module_spec is None:
return Response({"error": "Missing fields"}, status=status.HTTP_400_BAD_REQUEST)
@@ -1310,8 +1312,8 @@ class HostFactCompareView(MongoAPIView):
raise LicenseForbids('Your license does not permit use '
'of system tracking.')
- datetime_spec = request.QUERY_PARAMS.get('datetime', None)
- module_spec = request.QUERY_PARAMS.get('module', "ansible")
+ datetime_spec = request.query_params.get('datetime', None)
+ module_spec = request.query_params.get('module', "ansible")
datetime_actual = dateutil.parser.parse(datetime_spec) if datetime_spec is not None else now()
host_obj = self.get_parent_object()
@@ -1333,7 +1335,7 @@ class GroupChildrenList(SubListCreateAttachDetachAPIView):
relationship = 'children'
def unattach(self, request, *args, **kwargs):
- sub_id = request.DATA.get('id', None)
+ sub_id = request.data.get('id', None)
if sub_id is not None:
return super(GroupChildrenList, self).unattach(request, *args, **kwargs)
parent = self.get_parent_object()
@@ -1345,7 +1347,7 @@ class GroupChildrenList(SubListCreateAttachDetachAPIView):
Special case for disassociating a child group from the parent. If the
child group has no more parents, then automatically mark it inactive.
'''
- sub_id = request.DATA.get('id', None)
+ sub_id = request.data.get('id', None)
if not sub_id:
data = dict(msg='"id" is required to disassociate')
return Response(data, status=status.HTTP_400_BAD_REQUEST)
@@ -1394,12 +1396,12 @@ class GroupHostsList(SubListCreateAttachDetachAPIView):
def create(self, request, *args, **kwargs):
parent_group = Group.objects.get(id=self.kwargs['pk'])
- existing_hosts = Host.objects.filter(inventory=parent_group.inventory, name=request.DATA['name'])
- if existing_hosts.count() > 0 and ('variables' not in request.DATA or
- request.DATA['variables'] == '' or
- request.DATA['variables'] == '{}' or
- request.DATA['variables'] == '---'):
- request.DATA['id'] = existing_hosts[0].id
+ existing_hosts = Host.objects.filter(inventory=parent_group.inventory, name=request.data['name'])
+ if existing_hosts.count() > 0 and ('variables' not in request.data or
+ request.data['variables'] == '' or
+ request.data['variables'] == '{}' or
+ request.data['variables'] == '---'):
+ request.data['id'] = existing_hosts[0].id
return self.attach(request, *args, **kwargs)
return super(GroupHostsList, self).create(request, *args, **kwargs)
@@ -1483,10 +1485,10 @@ class GroupSingleFactView(MongoAPIView):
raise LicenseForbids('Your license does not permit use '
'of system tracking.')
- fact_key = request.QUERY_PARAMS.get("fact_key", None)
- fact_value = request.QUERY_PARAMS.get("fact_value", None)
- datetime_spec = request.QUERY_PARAMS.get("timestamp", None)
- module_spec = request.QUERY_PARAMS.get("module", None)
+ fact_key = request.query_params.get("fact_key", None)
+ fact_value = request.query_params.get("fact_value", None)
+ datetime_spec = request.query_params.get("timestamp", None)
+ module_spec = request.query_params.get("module", None)
if fact_key is None or fact_value is None or module_spec is None:
return Response({"error": "Missing fields"}, status=status.HTTP_400_BAD_REQUEST)
@@ -1547,33 +1549,33 @@ class InventoryScriptView(RetrieveAPIView):
filter_backends = ()
def retrieve(self, request, *args, **kwargs):
- self.object = self.get_object()
- hostname = request.QUERY_PARAMS.get('host', '')
- hostvars = bool(request.QUERY_PARAMS.get('hostvars', ''))
- show_all = bool(request.QUERY_PARAMS.get('all', ''))
+ obj = self.get_object()
+ hostname = request.query_params.get('host', '')
+ hostvars = bool(request.query_params.get('hostvars', ''))
+ show_all = bool(request.query_params.get('all', ''))
if show_all:
hosts_q = dict(active=True)
else:
hosts_q = dict(active=True, enabled=True)
if hostname:
- host = get_object_or_404(self.object.hosts, name=hostname, **hosts_q)
+ host = get_object_or_404(obj.hosts, name=hostname, **hosts_q)
data = host.variables_dict
else:
- data = SortedDict()
- if self.object.variables_dict:
- all_group = data.setdefault('all', SortedDict())
- all_group['vars'] = self.object.variables_dict
+ data = OrderedDict()
+ if obj.variables_dict:
+ all_group = data.setdefault('all', OrderedDict())
+ all_group['vars'] = obj.variables_dict
# Add hosts without a group to the all group.
- groupless_hosts_qs = self.object.hosts.filter(groups__isnull=True, **hosts_q).order_by('name')
+ groupless_hosts_qs = obj.hosts.filter(groups__isnull=True, **hosts_q).order_by('name')
groupless_hosts = list(groupless_hosts_qs.values_list('name', flat=True))
if groupless_hosts:
- all_group = data.setdefault('all', SortedDict())
+ all_group = data.setdefault('all', OrderedDict())
all_group['hosts'] = groupless_hosts
# Build in-memory mapping of groups and their hosts.
- group_hosts_kw = dict(group__inventory_id=self.object.id, group__active=True,
- host__inventory_id=self.object.id, host__active=True)
+ group_hosts_kw = dict(group__inventory_id=obj.id, group__active=True,
+ host__inventory_id=obj.id, host__active=True)
if 'enabled' in hosts_q:
group_hosts_kw['host__enabled'] = hosts_q['enabled']
group_hosts_qs = Group.hosts.through.objects.filter(**group_hosts_kw)
@@ -1586,8 +1588,8 @@ class InventoryScriptView(RetrieveAPIView):
# Build in-memory mapping of groups and their children.
group_parents_qs = Group.parents.through.objects.filter(
- from_group__inventory_id=self.object.id, from_group__active=True,
- to_group__inventory_id=self.object.id, to_group__active=True,
+ from_group__inventory_id=obj.id, from_group__active=True,
+ to_group__inventory_id=obj.id, to_group__active=True,
)
group_parents_qs = group_parents_qs.order_by('from_group__name')
group_parents_qs = group_parents_qs.values_list('from_group_id', 'from_group__name', 'to_group_id')
@@ -1597,28 +1599,27 @@ class InventoryScriptView(RetrieveAPIView):
group_children.append(from_group_name)
# Now use in-memory maps to build up group info.
- for group in self.object.groups.filter(active=True):
- group_info = SortedDict()
+ for group in obj.groups.filter(active=True):
+ group_info = OrderedDict()
group_info['hosts'] = group_hosts_map.get(group.id, [])
group_info['children'] = group_children_map.get(group.id, [])
group_info['vars'] = group.variables_dict
data[group.name] = group_info
if hostvars:
- data.setdefault('_meta', SortedDict())
- data['_meta'].setdefault('hostvars', SortedDict())
- for host in self.object.hosts.filter(**hosts_q):
+ data.setdefault('_meta', OrderedDict())
+ data['_meta'].setdefault('hostvars', OrderedDict())
+ for host in obj.hosts.filter(**hosts_q):
data['_meta']['hostvars'][host.name] = host.variables_dict
# workaround for Ansible inventory bug (github #3687), localhost
# must be explicitly listed in the all group for dynamic inventory
# scripts to pick it up.
localhost_names = ('localhost', '127.0.0.1', '::1')
- localhosts_qs = self.object.hosts.filter(name__in=localhost_names,
- **hosts_q)
+ localhosts_qs = obj.hosts.filter(name__in=localhost_names, **hosts_q)
localhosts = list(localhosts_qs.values_list('name', flat=True))
if localhosts:
- all_group = data.setdefault('all', SortedDict())
+ all_group = data.setdefault('all', OrderedDict())
all_group_hosts = all_group.get('hosts', [])
all_group_hosts.extend(localhosts)
all_group['hosts'] = sorted(set(all_group_hosts))
@@ -1657,13 +1658,6 @@ class InventoryTreeView(RetrieveAPIView):
group_children_map)
return Response(tree_data)
- def get_description_context(self):
- d = super(InventoryTreeView, self).get_description_context()
- d.update({
- 'serializer_fields': GroupTreeSerializer().metadata(),
- })
- return d
-
class InventoryInventorySourcesList(SubListAPIView):
model = InventorySource
@@ -1828,23 +1822,23 @@ class JobTemplateLaunch(RetrieveAPIView, GenericAPIView):
if not request.user.can_access(self.model, 'start', obj):
raise PermissionDenied()
- if 'credential' not in request.DATA and 'credential_id' in request.DATA:
- request.DATA['credential'] = request.DATA['credential_id']
+ if 'credential' not in request.data and 'credential_id' in request.data:
+ request.data['credential'] = request.data['credential_id']
passwords = {}
- serializer = self.serializer_class(data=request.DATA, context={'obj': obj, 'data': request.DATA, 'passwords': passwords})
+ serializer = self.serializer_class(instance=obj, data=request.data, context={'obj': obj, 'data': request.data, 'passwords': passwords})
if not serializer.is_valid():
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
- # At this point, a credential is gauranteed to exist at serializer.object.credential
- if not request.user.can_access(Credential, 'read', serializer.object.credential):
+ # At this point, a credential is gauranteed to exist at serializer.instance.credential
+ if not request.user.can_access(Credential, 'read', serializer.instance.credential):
raise PermissionDenied()
kv = {
- 'credential': serializer.object.credential.pk,
+ 'credential': serializer.instance.credential.pk,
}
- if 'extra_vars' in request.DATA:
- kv['extra_vars'] = request.DATA['extra_vars']
+ if 'extra_vars' in request.data:
+ kv['extra_vars'] = request.data['extra_vars']
kv.update(passwords)
new_job = obj.create_unified_job(**kv)
@@ -1892,7 +1886,7 @@ class JobTemplateSurveySpec(GenericAPIView):
if not request.user.can_access(self.model, 'change', obj, None):
raise PermissionDenied()
try:
- obj.survey_spec = json.dumps(request.DATA)
+ obj.survey_spec = json.dumps(request.data)
except ValueError:
# TODO: Log
return Response(dict(error="Invalid JSON when parsing survey spec"), status=status.HTTP_400_BAD_REQUEST)
@@ -2040,7 +2034,7 @@ class JobTemplateCallback(GenericAPIView):
def post(self, request, *args, **kwargs):
extra_vars = None
if request.content_type == "application/json":
- extra_vars = request.DATA.get("extra_vars", None)
+ extra_vars = request.data.get("extra_vars", None)
# Permission class should have already validated host_config_key.
job_template = self.get_object()
# Attempt to find matching hosts based on remote address.
@@ -2144,8 +2138,8 @@ class SystemJobTemplateLaunch(GenericAPIView):
if not request.user.can_access(self.model, 'start', obj):
raise PermissionDenied()
- new_job = obj.create_unified_job(**request.DATA)
- new_job.signal_start(**request.DATA)
+ new_job = obj.create_unified_job(**request.data)
+ new_job.signal_start(**request.data)
data = dict(system_job=new_job.id)
return Response(data, status=status.HTTP_202_ACCEPTED)
@@ -2223,7 +2217,7 @@ class JobStart(GenericAPIView):
if not request.user.can_access(self.model, 'start', obj):
raise PermissionDenied()
if obj.can_start:
- result = obj.signal_start(**request.DATA)
+ result = obj.signal_start(**request.data)
if not result:
data = dict(passwords_needed_to_start=obj.passwords_needed_to_start)
return Response(data, status=status.HTTP_400_BAD_REQUEST)
@@ -2262,15 +2256,15 @@ class JobRelaunch(RetrieveAPIView, GenericAPIView):
if not request.user.can_access(self.model, 'start', obj):
raise PermissionDenied()
- # Note: is_valid() may modify request.DATA
+ # Note: is_valid() may modify request.data
# It will remove any key/value pair who's key is not in the 'passwords_needed_to_start' list
- serializer = self.serializer_class(data=request.DATA, context={'obj': obj, 'data': request.DATA})
+ serializer = self.serializer_class(data=request.data, context={'obj': obj, 'data': request.data})
if not serializer.is_valid():
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
obj.launch_type = 'relaunch'
new_job = obj.copy()
- result = new_job.signal_start(**request.DATA)
+ result = new_job.signal_start(**request.data)
if not result:
data = dict(passwords_needed_to_start=new_job.passwords_needed_to_start)
return Response(data, status=status.HTTP_400_BAD_REQUEST)
@@ -2357,13 +2351,11 @@ class JobJobEventsList(BaseJobEventsList):
# Post allowed for job event callback only.
def post(self, request, *args, **kwargs):
parent_obj = get_object_or_404(self.parent_model, pk=self.kwargs['pk'])
- data = request.DATA.copy()
+ data = request.data.copy()
data['job'] = parent_obj.pk
serializer = self.get_serializer(data=data)
if serializer.is_valid():
- self.pre_save(serializer.object)
- self.object = serializer.save(force_insert=True)
- self.post_save(self.object, created=True)
+ self.instance = serializer.save()
headers = {'Location': serializer.data['url']}
return Response(serializer.data, status=status.HTTP_201_CREATED,
headers=headers)
@@ -2392,16 +2384,16 @@ class JobJobPlaysList(BaseJobEventsList):
# doing this here for the moment until/unless we need to implement more
# complex filtering (since we aren't under a serializer)
- if "id__in" in request.QUERY_PARAMS:
- qs = qs.filter(id__in=[int(filter_id) for filter_id in request.QUERY_PARAMS["id__in"].split(",")])
- elif "id__gt" in request.QUERY_PARAMS:
- qs = qs.filter(id__gt=request.QUERY_PARAMS['id__gt'])
- elif "id__lt" in request.QUERY_PARAMS:
- qs = qs.filter(id__lt=request.QUERY_PARAMS['id__lt'])
- if "failed" in request.QUERY_PARAMS:
- qs = qs.filter(failed=(request.QUERY_PARAMS['failed'].lower() == 'true'))
- if "play__icontains" in request.QUERY_PARAMS:
- qs = qs.filter(play__icontains=request.QUERY_PARAMS['play__icontains'])
+ if "id__in" in request.query_params:
+ qs = qs.filter(id__in=[int(filter_id) for filter_id in request.query_params["id__in"].split(",")])
+ elif "id__gt" in request.query_params:
+ qs = qs.filter(id__gt=request.query_params['id__gt'])
+ elif "id__lt" in request.query_params:
+ qs = qs.filter(id__lt=request.query_params['id__lt'])
+ if "failed" in request.query_params:
+ qs = qs.filter(failed=(request.query_params['failed'].lower() == 'true'))
+ if "play__icontains" in request.query_params:
+ qs = qs.filter(play__icontains=request.query_params['play__icontains'])
count = qs.count()
@@ -2465,10 +2457,10 @@ class JobJobTasksList(BaseJobEventsList):
return ({'detail': 'job not found'}, -1, status.HTTP_404_NOT_FOUND)
job = job[0]
- if 'event_id' not in request.QUERY_PARAMS:
+ if 'event_id' not in request.query_params:
return ({'detail': '"event_id" not provided'}, -1, status.HTTP_400_BAD_REQUEST)
- parent_task = job.job_events.filter(pk=int(request.QUERY_PARAMS.get('event_id', -1)))
+ parent_task = job.job_events.filter(pk=int(request.query_params.get('event_id', -1)))
if not parent_task.exists():
return ({'detail': 'parent event not found'}, -1, status.HTTP_404_NOT_FOUND)
parent_task = parent_task[0]
@@ -2507,16 +2499,16 @@ class JobJobTasksList(BaseJobEventsList):
# doing this here for the moment until/unless we need to implement more
# complex filtering (since we aren't under a serializer)
- if "id__in" in request.QUERY_PARAMS:
- qs = qs.filter(id__in=[int(filter_id) for filter_id in request.QUERY_PARAMS["id__in"].split(",")])
- elif "id__gt" in request.QUERY_PARAMS:
- qs = qs.filter(id__gt=request.QUERY_PARAMS['id__gt'])
- elif "id__lt" in request.QUERY_PARAMS:
- qs = qs.filter(id__lt=request.QUERY_PARAMS['id__lt'])
- if "failed" in request.QUERY_PARAMS:
- qs = qs.filter(failed=(request.QUERY_PARAMS['failed'].lower() == 'true'))
- if "task__icontains" in request.QUERY_PARAMS:
- qs = qs.filter(task__icontains=request.QUERY_PARAMS['task__icontains'])
+ if "id__in" in request.query_params:
+ qs = qs.filter(id__in=[int(filter_id) for filter_id in request.query_params["id__in"].split(",")])
+ elif "id__gt" in request.query_params:
+ qs = qs.filter(id__gt=request.query_params['id__gt'])
+ elif "id__lt" in request.query_params:
+ qs = qs.filter(id__lt=request.query_params['id__lt'])
+ if "failed" in request.query_params:
+ qs = qs.filter(failed=(request.query_params['failed'].lower() == 'true'))
+ if "task__icontains" in request.query_params:
+ qs = qs.filter(task__icontains=request.query_params['task__icontains'])
if ordering is not None:
qs = qs.order_by(ordering)
@@ -2594,7 +2586,7 @@ class AdHocCommandList(ListCreateAPIView):
def create(self, request, *args, **kwargs):
# Inject inventory ID and limit if parent objects is a host/group.
if hasattr(self, 'get_parent_object') and not getattr(self, 'parent_key', None):
- data = request.DATA
+ data = request.data
# HACK: Make request data mutable.
if getattr(data, '_mutable', None) is False:
data._mutable = True
@@ -2604,11 +2596,11 @@ class AdHocCommandList(ListCreateAPIView):
data['limit'] = parent_obj.name
# Check for passwords needed before creating ad hoc command.
- credential_pk = get_pk_from_dict(request.DATA, 'credential')
+ credential_pk = get_pk_from_dict(request.data, 'credential')
if credential_pk:
credential = get_object_or_400(Credential, pk=credential_pk)
needed = credential.passwords_needed
- provided = dict([(field, request.DATA.get(field, '')) for field in needed])
+ provided = dict([(field, request.data.get(field, '')) for field in needed])
if not all(provided.values()):
data = dict(passwords_needed_to_start=needed)
return Response(data, status=status.HTTP_400_BAD_REQUEST)
@@ -2619,7 +2611,7 @@ class AdHocCommandList(ListCreateAPIView):
# Start ad hoc command running when created.
ad_hoc_command = get_object_or_400(self.model, pk=response.data['id'])
- result = ad_hoc_command.signal_start(**request.DATA)
+ result = ad_hoc_command.signal_start(**request.data)
if not result:
data = dict(passwords_needed_to_start=ad_hoc_command.passwords_needed_to_start)
return Response(data, status=status.HTTP_400_BAD_REQUEST)
@@ -2702,21 +2694,21 @@ class AdHocCommandRelaunch(GenericAPIView):
data[field[:-3]] = getattr(obj, field)
else:
data[field] = getattr(obj, field)
- serializer = self.get_serializer(data=data)
+ serializer = AdHocCommandSerializer(data=data, context=self.get_serializer_context())
if not serializer.is_valid():
return Response(serializer.errors,
status=status.HTTP_400_BAD_REQUEST)
# Check for passwords needed before copying ad hoc command.
needed = obj.passwords_needed_to_start
- provided = dict([(field, request.DATA.get(field, '')) for field in needed])
+ provided = dict([(field, request.data.get(field, '')) for field in needed])
if not all(provided.values()):
data = dict(passwords_needed_to_start=needed)
return Response(data, status=status.HTTP_400_BAD_REQUEST)
# Copy and start the new ad hoc command.
new_ad_hoc_command = obj.copy()
- result = new_ad_hoc_command.signal_start(**request.DATA)
+ result = new_ad_hoc_command.signal_start(**request.data)
if not result:
data = dict(passwords_needed_to_start=new_ad_hoc_command.passwords_needed_to_start)
return Response(data, status=status.HTTP_400_BAD_REQUEST)
@@ -2773,13 +2765,11 @@ class AdHocCommandAdHocCommandEventsList(BaseAdHocCommandEventsList):
if request.user:
raise PermissionDenied()
parent_obj = get_object_or_404(self.parent_model, pk=self.kwargs['pk'])
- data = request.DATA.copy()
- data['ad_hoc_command'] = parent_obj.pk
+ data = request.data.copy()
+ data['ad_hoc_command'] = parent_obj
serializer = self.get_serializer(data=data)
if serializer.is_valid():
- self.pre_save(serializer.object)
- self.object = serializer.save(force_insert=True)
- self.post_save(self.object, created=True)
+ self.instance = serializer.save()
headers = {'Location': serializer.data['url']}
return Response(serializer.data, status=status.HTTP_201_CREATED,
headers=headers)
@@ -2870,11 +2860,11 @@ class UnifiedJobStdout(RetrieveAPIView):
return Response(response_message)
if request.accepted_renderer.format in ('html', 'api', 'json'):
- content_format = request.QUERY_PARAMS.get('content_format', 'html')
- content_encoding = request.QUERY_PARAMS.get('content_encoding', None)
- start_line = request.QUERY_PARAMS.get('start_line', 0)
- end_line = request.QUERY_PARAMS.get('end_line', None)
- dark_val = request.QUERY_PARAMS.get('dark', '')
+ content_format = request.query_params.get('content_format', 'html')
+ content_encoding = request.query_params.get('content_encoding', None)
+ start_line = request.query_params.get('start_line', 0)
+ end_line = request.query_params.get('end_line', None)
+ dark_val = request.query_params.get('dark', '')
dark = bool(dark_val and dark_val[0].lower() in ('1', 't', 'y'))
content_only = bool(request.accepted_renderer.format in ('api', 'json'))
dark_bg = (content_only and dark) or (not content_only and (dark or not dark_val))
@@ -2973,7 +2963,7 @@ class SettingsList(ListCreateAPIView):
def get_queryset(self):
class SettingsIntermediary(object):
def __init__(self, key, description, category, value,
- value_type, user):
+ value_type, user=None):
self.key = key
self.description = description
self.category = category
@@ -3004,8 +2994,7 @@ class SettingsList(ListCreateAPIView):
m_entry['description'],
m_entry['category'],
m_entry['default'],
- m_entry['type'],
- None))
+ m_entry['type']))
return settings_actual
def delete(self, request, *args, **kwargs):
@@ -3023,7 +3012,7 @@ class SettingsReset(APIView):
# NOTE: Extend more with user settings
if not request.user.can_access(TowerSettings, 'delete', None):
raise PermissionDenied()
- settings_key = request.DATA.get('key', None)
+ settings_key = request.data.get('key', None)
if settings_key is not None:
TowerSettings.objects.filter(key=settings_key).delete()
return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/awx/fact/test_fact.py b/awx/fact/test_fact.py
new file mode 100644
index 0000000000..43070b2bbe
--- /dev/null
+++ b/awx/fact/test_fact.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2016 Ansible, Inc.
+# All Rights Reserved.
+
+from awx.fact.tests import * # noqa
diff --git a/awx/main/access.py b/awx/main/access.py
index ad87506ab2..e17fc59b02 100644
--- a/awx/main/access.py
+++ b/awx/main/access.py
@@ -684,7 +684,7 @@ class ProjectAccess(BaseAccess):
def get_queryset(self):
qs = Project.objects.filter(active=True).distinct()
- qs = qs.select_related('modified_by', 'credential', 'current_update', 'last_update')
+ qs = qs.select_related('modified_by', 'credential', 'current_job', 'last_job')
if self.user.is_superuser:
return qs
team_ids = set(Team.objects.filter(users__in=[self.user]).values_list('id', flat=True))
@@ -1280,7 +1280,7 @@ class AdHocCommandEventAccess(BaseAccess):
def get_queryset(self):
qs = self.model.objects.distinct()
- qs = qs.select_related('created_by', 'modified_by', 'ad_hoc_command', 'host')
+ qs = qs.select_related('ad_hoc_command', 'host')
if self.user.is_superuser:
return qs
@@ -1308,8 +1308,7 @@ class JobHostSummaryAccess(BaseAccess):
def get_queryset(self):
qs = self.model.objects.distinct()
- qs = qs.select_related('created_by', 'modified_by', 'job', 'job__job_template',
- 'host')
+ qs = qs.select_related('job', 'job__job_template', 'host')
if self.user.is_superuser:
return qs
job_qs = self.user.get_queryset(Job)
@@ -1334,8 +1333,7 @@ class JobEventAccess(BaseAccess):
def get_queryset(self):
qs = self.model.objects.distinct()
- qs = qs.select_related('created_by', 'modified_by', 'job', 'job__job_template',
- 'host', 'parent')
+ qs = qs.select_related('job', 'job__job_template', 'host', 'parent')
qs = qs.prefetch_related('hosts', 'children')
# Filter certain "internal" events generated by async polling.
@@ -1495,7 +1493,6 @@ class ActivityStreamAccess(BaseAccess):
def get_queryset(self):
qs = self.model.objects.distinct()
- #qs = qs.select_related('created_by')
qs = qs.select_related('actor')
qs = qs.prefetch_related('organization', 'user', 'inventory', 'host', 'group', 'inventory_source',
'inventory_update', 'credential', 'team', 'project', 'project_update',
diff --git a/awx/main/conf.py b/awx/main/conf.py
index 11f206fd40..e506432f21 100644
--- a/awx/main/conf.py
+++ b/awx/main/conf.py
@@ -37,16 +37,14 @@ class TowerConfiguration(object):
if key not in settings_manifest:
raise AttributeError("Tower Setting with key '{0}' does not exist".format(key))
settings_entry = settings_manifest[key]
- settings_actual = TowerSettings.objects.filter(key=key)
- if not settings_actual.exists():
+ try:
+ settings_actual = TowerSettings.objects.get(key=key)
+ except TowerSettings.DoesNotExist:
settings_actual = TowerSettings(key=key,
description=settings_entry['description'],
category=settings_entry['category'],
- value=value,
value_type=settings_entry['type'])
- else:
- settings_actual = settings_actual[0]
- settings_actual.value = value
+ settings_actual.value_converted = value
settings_actual.save()
tower_settings = TowerConfiguration()
diff --git a/awx/main/fields.py b/awx/main/fields.py
index 0c4ee95ec7..85c8c4ff5f 100644
--- a/awx/main/fields.py
+++ b/awx/main/fields.py
@@ -5,9 +5,6 @@
from django.db import models
from django.db.models.fields.related import SingleRelatedObjectDescriptor
-# South
-from south.modelsinspector import add_introspection_rules
-
__all__ = ['AutoOneToOneField']
# Based on AutoOneToOneField from django-annoying:
@@ -20,8 +17,8 @@ class AutoSingleRelatedObjectDescriptor(SingleRelatedObjectDescriptor):
try:
return super(AutoSingleRelatedObjectDescriptor,
self).__get__(instance, instance_type)
- except self.related.model.DoesNotExist:
- obj = self.related.model(**{self.related.field.name: instance})
+ except self.related.related_model.DoesNotExist:
+ obj = self.related.related_model(**{self.related.field.name: instance})
if self.related.field.rel.parent_link:
raise NotImplementedError('not supported with polymorphic!')
for f in instance._meta.local_fields:
@@ -35,6 +32,3 @@ class AutoOneToOneField(models.OneToOneField):
def contribute_to_related_class(self, cls, related):
setattr(cls, related.get_accessor_name(),
AutoSingleRelatedObjectDescriptor(related))
-
-add_introspection_rules([([AutoOneToOneField], [], {})],
- [r'^awx\.main\.fields\.AutoOneToOneField'])
diff --git a/awx/main/management/commands/inventory_import.py b/awx/main/management/commands/inventory_import.py
index 964389d560..187a04af7e 100644
--- a/awx/main/management/commands/inventory_import.py
+++ b/awx/main/management/commands/inventory_import.py
@@ -1110,13 +1110,14 @@ class Command(NoArgsCommand):
for db_group in self.inventory.groups.filter(name__in=group_names):
mem_group = self.all_group.all_groups[db_group.name]
group_group_count += len(mem_group.children)
- child_names = set([g.name for g in mem_group.children])
- db_children_qs = self.inventory.groups.filter(name__in=child_names)
- # FIXME: May fail unit tests when len(child_names) > 1000.
- for db_child in db_children_qs.filter(children__id=db_group.id):
- self.logger.info('Group "%s" already child of group "%s"', db_child.name, db_group.name)
- for db_child in db_children_qs.exclude(children__id=db_group.id):
- self._batch_add_m2m(db_group.children, db_child)
+ all_child_names = sorted([g.name for g in mem_group.children])
+ for offset2 in xrange(0, len(all_child_names), self._batch_size):
+ child_names = all_child_names[offset2:(offset2 + self._batch_size)]
+ db_children_qs = self.inventory.groups.filter(name__in=child_names)
+ for db_child in db_children_qs.filter(children__id=db_group.id):
+ self.logger.info('Group "%s" already child of group "%s"', db_child.name, db_group.name)
+ for db_child in db_children_qs.exclude(children__id=db_group.id):
+ self._batch_add_m2m(db_group.children, db_child)
self.logger.info('Group "%s" added as child of "%s"', db_child.name, db_group.name)
self._batch_add_m2m(db_group.children, flush=True)
if settings.SQL_DEBUG:
diff --git a/awx/main/management/commands/run_callback_receiver.py b/awx/main/management/commands/run_callback_receiver.py
index fb7ae401dd..d06ed1edd8 100644
--- a/awx/main/management/commands/run_callback_receiver.py
+++ b/awx/main/management/commands/run_callback_receiver.py
@@ -16,7 +16,7 @@ from django.conf import settings
from django.core.management.base import NoArgsCommand
from django.db import transaction, DatabaseError
from django.utils.dateparse import parse_datetime
-from django.utils.tzinfo import FixedOffset
+from django.utils.timezone import FixedOffset
from django.db import connection
# AWX
diff --git a/awx/main/managers.py b/awx/main/managers.py
index 951b44bd05..047f2999ca 100644
--- a/awx/main/managers.py
+++ b/awx/main/managers.py
@@ -12,10 +12,11 @@ class HostManager(models.Manager):
def active_count(self):
"""Return count of active, unique hosts for licensing."""
- try:
- return self.filter(active=True, inventory__active=True).distinct('name').count()
- except NotImplementedError: # For unit tests only, SQLite doesn't support distinct('name')
- return len(set(self.filter(active=True, inventory__active=True).values_list('name', flat=True)))
+ #try:
+ # return self.filter(active=True, inventory__active=True).distinct('name').count()
+ #except NotImplementedError: # For unit tests only, SQLite doesn't support distinct('name')
+ # FIXME-CC
+ return len(set(self.filter(active=True, inventory__active=True).values_list('name', flat=True)))
class InstanceManager(models.Manager):
"""A custom manager class for the Instance model.
diff --git a/awx/main/migrations/0001_initial.py b/awx/main/migrations/0001_initial.py
new file mode 100644
index 0000000000..79e8d8d6dd
--- /dev/null
+++ b/awx/main/migrations/0001_initial.py
@@ -0,0 +1,991 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2016 Ansible, Inc.
+# All Rights Reserved.
+
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+import django.utils.timezone
+import jsonfield.fields
+import django.db.models.deletion
+from django.conf import settings
+import taggit.managers
+import awx.main.fields
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('taggit', '0002_auto_20150616_2121'),
+ ('contenttypes', '0002_remove_content_type_name'),
+ migrations.swappable_dependency(settings.AUTH_USER_MODEL),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='ActivityStream',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('operation', models.CharField(max_length=13, choices=[(b'create', 'Entity Created'), (b'update', 'Entity Updated'), (b'delete', 'Entity Deleted'), (b'associate', 'Entity Associated with another Entity'), (b'disassociate', 'Entity was Disassociated with another Entity')])),
+ ('timestamp', models.DateTimeField(auto_now_add=True)),
+ ('changes', models.TextField(blank=True)),
+ ('object_relationship_type', models.TextField(blank=True)),
+ ('object1', models.TextField()),
+ ('object2', models.TextField()),
+ ('actor', models.ForeignKey(related_name='activity_stream', on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, null=True)),
+ ],
+ ),
+ migrations.CreateModel(
+ name='AdHocCommandEvent',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('created', models.DateTimeField(default=None, editable=False)),
+ ('modified', models.DateTimeField(default=None, editable=False)),
+ ('host_name', models.CharField(default=b'', max_length=1024, editable=False)),
+ ('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_unreachable', 'Host Unreachable')])),
+ ('event_data', jsonfield.fields.JSONField(default={}, blank=True)),
+ ('failed', models.BooleanField(default=False, editable=False)),
+ ('changed', models.BooleanField(default=False, editable=False)),
+ ('counter', models.PositiveIntegerField(default=0)),
+ ],
+ options={
+ 'ordering': ('-pk',),
+ },
+ ),
+ migrations.CreateModel(
+ name='AuthToken',
+ fields=[
+ ('key', models.CharField(max_length=40, serialize=False, primary_key=True)),
+ ('created', models.DateTimeField(auto_now_add=True)),
+ ('modified', models.DateTimeField(auto_now=True)),
+ ('expires', models.DateTimeField(default=django.utils.timezone.now)),
+ ('request_hash', models.CharField(default=b'', max_length=40, blank=True)),
+ ('reason', models.CharField(default=b'', help_text='Reason the auth token was invalidated.', max_length=1024, blank=True)),
+ ('user', models.ForeignKey(related_name='auth_tokens', to=settings.AUTH_USER_MODEL)),
+ ],
+ ),
+ migrations.CreateModel(
+ name='Credential',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('created', models.DateTimeField(default=None, editable=False)),
+ ('modified', models.DateTimeField(default=None, editable=False)),
+ ('description', models.TextField(default=b'', blank=True)),
+ ('active', models.BooleanField(default=True, editable=False)),
+ ('name', models.CharField(max_length=512)),
+ ('kind', models.CharField(default=b'ssh', max_length=32, choices=[(b'ssh', 'Machine'), (b'scm', 'Source Control'), (b'aws', 'Amazon Web Services'), (b'rax', 'Rackspace'), (b'vmware', 'VMware vCenter'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'openstack', 'OpenStack')])),
+ ('cloud', models.BooleanField(default=False, editable=False)),
+ ('host', models.CharField(default=b'', help_text='The hostname or IP address to use.', max_length=1024, verbose_name='Host', blank=True)),
+ ('username', models.CharField(default=b'', help_text='Username for this credential.', max_length=1024, verbose_name='Username', blank=True)),
+ ('password', models.CharField(default=b'', help_text='Password for this credential (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='Password', blank=True)),
+ ('security_token', models.CharField(default=b'', help_text='Security Token for this credential', max_length=1024, verbose_name='Security Token', blank=True)),
+ ('project', models.CharField(default=b'', help_text='The identifier for the project.', max_length=100, verbose_name='Project', blank=True)),
+ ('ssh_key_data', models.TextField(default=b'', help_text='RSA or DSA private key to be used instead of password.', verbose_name='SSH private key', blank=True)),
+ ('ssh_key_unlock', models.CharField(default=b'', help_text='Passphrase to unlock SSH private key if encrypted (or "ASK" to prompt the user for machine credentials).', max_length=1024, verbose_name='SSH key unlock', blank=True)),
+ ('become_method', models.CharField(default=b'', help_text='Privilege escalation method.', max_length=32, blank=True, choices=[(b'', 'None'), (b'sudo', 'Sudo'), (b'su', 'Su'), (b'pbrun', 'Pbrun'), (b'pfexec', 'Pfexec')])),
+ ('become_username', models.CharField(default=b'', help_text='Privilege escalation username.', max_length=1024, blank=True)),
+ ('become_password', models.CharField(default=b'', help_text='Password for privilege escalation method.', max_length=1024, blank=True)),
+ ('vault_password', models.CharField(default=b'', help_text='Vault password (or "ASK" to prompt the user).', max_length=1024, blank=True)),
+ ('created_by', models.ForeignKey(related_name="{u'class': 'credential', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ ('modified_by', models.ForeignKey(related_name="{u'class': 'credential', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ ('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
+ ],
+ options={
+ 'ordering': ('kind', 'name'),
+ },
+ ),
+ migrations.CreateModel(
+ name='CustomInventoryScript',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('created', models.DateTimeField(default=None, editable=False)),
+ ('modified', models.DateTimeField(default=None, editable=False)),
+ ('description', models.TextField(default=b'', blank=True)),
+ ('active', models.BooleanField(default=True, editable=False)),
+ ('name', models.CharField(max_length=512)),
+ ('script', models.TextField(default=b'', help_text='Inventory script contents', blank=True)),
+ ('created_by', models.ForeignKey(related_name="{u'class': 'custominventoryscript', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ ('modified_by', models.ForeignKey(related_name="{u'class': 'custominventoryscript', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ ],
+ options={
+ 'ordering': ('name',),
+ },
+ ),
+ migrations.CreateModel(
+ name='Group',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('created', models.DateTimeField(default=None, editable=False)),
+ ('modified', models.DateTimeField(default=None, editable=False)),
+ ('description', models.TextField(default=b'', blank=True)),
+ ('active', models.BooleanField(default=True, editable=False)),
+ ('name', models.CharField(max_length=512)),
+ ('variables', models.TextField(default=b'', help_text='Group variables in JSON or YAML format.', blank=True)),
+ ('total_hosts', models.PositiveIntegerField(default=0, help_text='Total number of hosts directly or indirectly in this group.', editable=False)),
+ ('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether this group has any hosts with active failures.', editable=False)),
+ ('hosts_with_active_failures', models.PositiveIntegerField(default=0, help_text='Number of hosts in this group with active failures.', editable=False)),
+ ('total_groups', models.PositiveIntegerField(default=0, help_text='Total number of child groups contained within this group.', editable=False)),
+ ('groups_with_active_failures', models.PositiveIntegerField(default=0, help_text='Number of child groups within this group that have active failures.', editable=False)),
+ ('has_inventory_sources', models.BooleanField(default=False, help_text='Flag indicating whether this group was created/updated from any external inventory sources.', editable=False)),
+ ('created_by', models.ForeignKey(related_name="{u'class': 'group', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ ],
+ options={
+ 'ordering': ('name',),
+ },
+ ),
+ migrations.CreateModel(
+ name='Host',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('created', models.DateTimeField(default=None, editable=False)),
+ ('modified', models.DateTimeField(default=None, editable=False)),
+ ('description', models.TextField(default=b'', blank=True)),
+ ('active', models.BooleanField(default=True, editable=False)),
+ ('name', models.CharField(max_length=512)),
+ ('enabled', models.BooleanField(default=True, help_text='Is this host online and available for running jobs?')),
+ ('instance_id', models.CharField(default=b'', max_length=100, blank=True)),
+ ('variables', models.TextField(default=b'', help_text='Host variables in JSON or YAML format.', blank=True)),
+ ('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether the last job failed for this host.', editable=False)),
+ ('has_inventory_sources', models.BooleanField(default=False, help_text='Flag indicating whether this host was created/updated from any external inventory sources.', editable=False)),
+ ('created_by', models.ForeignKey(related_name="{u'class': 'host', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ ],
+ options={
+ 'ordering': ('inventory', 'name'),
+ },
+ ),
+ migrations.CreateModel(
+ name='Instance',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('uuid', models.CharField(unique=True, max_length=40)),
+ ('hostname', models.CharField(unique=True, max_length=250)),
+ ('primary', models.BooleanField(default=False)),
+ ('created', models.DateTimeField(auto_now_add=True)),
+ ('modified', models.DateTimeField(auto_now=True)),
+ ],
+ ),
+ migrations.CreateModel(
+ name='Inventory',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('created', models.DateTimeField(default=None, editable=False)),
+ ('modified', models.DateTimeField(default=None, editable=False)),
+ ('description', models.TextField(default=b'', blank=True)),
+ ('active', models.BooleanField(default=True, editable=False)),
+ ('name', models.CharField(unique=True, max_length=512)),
+ ('variables', models.TextField(default=b'', help_text='Inventory variables in JSON or YAML format.', blank=True)),
+ ('has_active_failures', models.BooleanField(default=False, help_text='Flag indicating whether any hosts in this inventory have failed.', editable=False)),
+ ('total_hosts', models.PositiveIntegerField(default=0, help_text='Total number of hosts in this inventory.', editable=False)),
+ ('hosts_with_active_failures', models.PositiveIntegerField(default=0, help_text='Number of hosts in this inventory with active failures.', editable=False)),
+ ('total_groups', models.PositiveIntegerField(default=0, help_text='Total number of groups in this inventory.', editable=False)),
+ ('groups_with_active_failures', models.PositiveIntegerField(default=0, help_text='Number of groups in this inventory with active failures.', editable=False)),
+ ('has_inventory_sources', models.BooleanField(default=False, help_text='Flag indicating whether this inventory has any external inventory sources.', editable=False)),
+ ('total_inventory_sources', models.PositiveIntegerField(default=0, help_text='Total number of external inventory sources configured within this inventory.', editable=False)),
+ ('inventory_sources_with_failures', models.PositiveIntegerField(default=0, help_text='Number of external inventory sources in this inventory with failures.', editable=False)),
+ ('created_by', models.ForeignKey(related_name="{u'class': 'inventory', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ ('modified_by', models.ForeignKey(related_name="{u'class': 'inventory', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ ],
+ options={
+ 'ordering': ('name',),
+ 'verbose_name_plural': 'inventories',
+ },
+ ),
+ migrations.CreateModel(
+ name='JobEvent',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('created', models.DateTimeField(default=None, editable=False)),
+ ('modified', models.DateTimeField(default=None, editable=False)),
+ ('event', models.CharField(max_length=100, choices=[(b'runner_on_failed', 'Host Failed'), (b'runner_on_ok', 'Host OK'), (b'runner_on_error', 'Host Failure'), (b'runner_on_skipped', 'Host Skipped'), (b'runner_on_unreachable', 'Host Unreachable'), (b'runner_on_no_hosts', 'No Hosts Remaining'), (b'runner_on_async_poll', 'Host Polling'), (b'runner_on_async_ok', 'Host Async OK'), (b'runner_on_async_failed', 'Host Async Failure'), (b'runner_on_file_diff', 'File Difference'), (b'playbook_on_start', 'Playbook Started'), (b'playbook_on_notify', 'Running Handlers'), (b'playbook_on_no_hosts_matched', 'No Hosts Matched'), (b'playbook_on_no_hosts_remaining', 'No Hosts Remaining'), (b'playbook_on_task_start', 'Task Started'), (b'playbook_on_vars_prompt', 'Variables Prompted'), (b'playbook_on_setup', 'Gathering Facts'), (b'playbook_on_import_for_host', 'internal: on Import for Host'), (b'playbook_on_not_import_for_host', 'internal: on Not Import for Host'), (b'playbook_on_play_start', 'Play Started'), (b'playbook_on_stats', 'Playbook Complete')])),
+ ('event_data', jsonfield.fields.JSONField(default={}, blank=True)),
+ ('failed', models.BooleanField(default=False, editable=False)),
+ ('changed', models.BooleanField(default=False, editable=False)),
+ ('host_name', models.CharField(default=b'', max_length=1024, editable=False)),
+ ('play', models.CharField(default=b'', max_length=1024, editable=False)),
+ ('role', models.CharField(default=b'', max_length=1024, editable=False)),
+ ('task', models.CharField(default=b'', max_length=1024, editable=False)),
+ ('counter', models.PositiveIntegerField(default=0)),
+ ('host', models.ForeignKey(related_name='job_events_as_primary_host', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Host', null=True)),
+ ('hosts', models.ManyToManyField(related_name='job_events', editable=False, to='main.Host')),
+ ('parent', models.ForeignKey(related_name='children', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.JobEvent', null=True)),
+ ],
+ options={
+ 'ordering': ('pk',),
+ },
+ ),
+ migrations.CreateModel(
+ name='JobHostSummary',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('created', models.DateTimeField(default=None, editable=False)),
+ ('modified', models.DateTimeField(default=None, editable=False)),
+ ('host_name', models.CharField(default=b'', max_length=1024, editable=False)),
+ ('changed', models.PositiveIntegerField(default=0, editable=False)),
+ ('dark', models.PositiveIntegerField(default=0, editable=False)),
+ ('failures', models.PositiveIntegerField(default=0, editable=False)),
+ ('ok', models.PositiveIntegerField(default=0, editable=False)),
+ ('processed', models.PositiveIntegerField(default=0, editable=False)),
+ ('skipped', models.PositiveIntegerField(default=0, editable=False)),
+ ('failed', models.BooleanField(default=False, editable=False)),
+ ('host', models.ForeignKey(related_name='job_host_summaries', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Host', null=True)),
+ ],
+ options={
+ 'ordering': ('-pk',),
+ 'verbose_name_plural': 'job host summaries',
+ },
+ ),
+ migrations.CreateModel(
+ name='JobOrigin',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('created', models.DateTimeField(auto_now_add=True)),
+ ('modified', models.DateTimeField(auto_now=True)),
+ ('instance', models.ForeignKey(to='main.Instance')),
+ ],
+ ),
+ migrations.CreateModel(
+ name='Organization',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('created', models.DateTimeField(default=None, editable=False)),
+ ('modified', models.DateTimeField(default=None, editable=False)),
+ ('description', models.TextField(default=b'', blank=True)),
+ ('active', models.BooleanField(default=True, editable=False)),
+ ('name', models.CharField(unique=True, max_length=512)),
+ ('admins', models.ManyToManyField(related_name='admin_of_organizations', to=settings.AUTH_USER_MODEL, blank=True)),
+ ('created_by', models.ForeignKey(related_name="{u'class': 'organization', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ ('modified_by', models.ForeignKey(related_name="{u'class': 'organization', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ ('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
+ ('users', models.ManyToManyField(related_name='organizations', to=settings.AUTH_USER_MODEL, blank=True)),
+ ],
+ options={
+ 'ordering': ('name',),
+ },
+ ),
+ migrations.CreateModel(
+ name='Permission',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('created', models.DateTimeField(default=None, editable=False)),
+ ('modified', models.DateTimeField(default=None, editable=False)),
+ ('description', models.TextField(default=b'', blank=True)),
+ ('active', models.BooleanField(default=True, editable=False)),
+ ('name', models.CharField(max_length=512)),
+ ('permission_type', models.CharField(max_length=64, choices=[(b'read', 'Read Inventory'), (b'write', 'Edit Inventory'), (b'admin', 'Administrate Inventory'), (b'run', 'Deploy To Inventory'), (b'check', 'Deploy To Inventory (Dry Run)'), (b'scan', 'Scan an Inventory'), (b'create', 'Create a Job Template')])),
+ ('run_ad_hoc_commands', models.BooleanField(default=False, help_text='Execute Commands on the Inventory')),
+ ('created_by', models.ForeignKey(related_name="{u'class': 'permission', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ ('inventory', models.ForeignKey(related_name='permissions', on_delete=django.db.models.deletion.SET_NULL, to='main.Inventory', null=True)),
+ ('modified_by', models.ForeignKey(related_name="{u'class': 'permission', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ ('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
+ ],
+ ),
+ migrations.CreateModel(
+ name='Profile',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('created', models.DateTimeField(default=None, editable=False)),
+ ('modified', models.DateTimeField(default=None, editable=False)),
+ ('ldap_dn', models.CharField(default=b'', max_length=1024)),
+ ('user', awx.main.fields.AutoOneToOneField(related_name='profile', editable=False, to=settings.AUTH_USER_MODEL)),
+ ],
+ ),
+ migrations.CreateModel(
+ name='Schedule',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('created', models.DateTimeField(default=None, editable=False)),
+ ('modified', models.DateTimeField(default=None, editable=False)),
+ ('description', models.TextField(default=b'', blank=True)),
+ ('active', models.BooleanField(default=True, editable=False)),
+ ('name', models.CharField(unique=True, max_length=512)),
+ ('enabled', models.BooleanField(default=True)),
+ ('dtstart', models.DateTimeField(default=None, null=True, editable=False)),
+ ('dtend', models.DateTimeField(default=None, null=True, editable=False)),
+ ('rrule', models.CharField(max_length=255)),
+ ('next_run', models.DateTimeField(default=None, null=True, editable=False)),
+ ('extra_data', jsonfield.fields.JSONField(default={}, blank=True)),
+ ('created_by', models.ForeignKey(related_name="{u'class': 'schedule', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ ('modified_by', models.ForeignKey(related_name="{u'class': 'schedule', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ ('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
+ ],
+ options={
+ 'ordering': ['-next_run'],
+ },
+ ),
+ migrations.CreateModel(
+ name='Team',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('created', models.DateTimeField(default=None, editable=False)),
+ ('modified', models.DateTimeField(default=None, editable=False)),
+ ('description', models.TextField(default=b'', blank=True)),
+ ('active', models.BooleanField(default=True, editable=False)),
+ ('name', models.CharField(max_length=512)),
+ ('created_by', models.ForeignKey(related_name="{u'class': 'team', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ ('modified_by', models.ForeignKey(related_name="{u'class': 'team', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ ('organization', models.ForeignKey(related_name='teams', on_delete=django.db.models.deletion.SET_NULL, to='main.Organization', null=True)),
+ ('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
+ ('users', models.ManyToManyField(related_name='teams', to=settings.AUTH_USER_MODEL, blank=True)),
+ ],
+ options={
+ 'ordering': ('organization__name', 'name'),
+ },
+ ),
+ migrations.CreateModel(
+ name='UnifiedJob',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('created', models.DateTimeField(default=None, editable=False)),
+ ('modified', models.DateTimeField(default=None, editable=False)),
+ ('description', models.TextField(default=b'', blank=True)),
+ ('active', models.BooleanField(default=True, editable=False)),
+ ('name', models.CharField(max_length=512)),
+ ('old_pk', models.PositiveIntegerField(default=None, null=True, editable=False)),
+ ('launch_type', models.CharField(default=b'manual', max_length=20, editable=False, choices=[(b'manual', 'Manual'), (b'relaunch', 'Relaunch'), (b'callback', 'Callback'), (b'scheduled', 'Scheduled'), (b'dependency', 'Dependency')])),
+ ('cancel_flag', models.BooleanField(default=False, editable=False)),
+ ('status', models.CharField(default=b'new', max_length=20, editable=False, choices=[(b'new', 'New'), (b'pending', 'Pending'), (b'waiting', 'Waiting'), (b'running', 'Running'), (b'successful', 'Successful'), (b'failed', 'Failed'), (b'error', 'Error'), (b'canceled', 'Canceled')])),
+ ('failed', models.BooleanField(default=False, editable=False)),
+ ('started', models.DateTimeField(default=None, null=True, editable=False)),
+ ('finished', models.DateTimeField(default=None, null=True, editable=False)),
+ ('elapsed', models.DecimalField(editable=False, max_digits=12, decimal_places=3)),
+ ('job_args', models.TextField(default=b'', editable=False, blank=True)),
+ ('job_cwd', models.CharField(default=b'', max_length=1024, editable=False, blank=True)),
+ ('job_env', jsonfield.fields.JSONField(default={}, editable=False, blank=True)),
+ ('job_explanation', models.TextField(default=b'', editable=False, blank=True)),
+ ('start_args', models.TextField(default=b'', editable=False, blank=True)),
+ ('result_stdout_text', models.TextField(default=b'', editable=False, blank=True)),
+ ('result_stdout_file', models.TextField(default=b'', editable=False, blank=True)),
+ ('result_traceback', models.TextField(default=b'', editable=False, blank=True)),
+ ('celery_task_id', models.CharField(default=b'', max_length=100, editable=False, blank=True)),
+ ],
+ ),
+ migrations.CreateModel(
+ name='UnifiedJobTemplate',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('created', models.DateTimeField(default=None, editable=False)),
+ ('modified', models.DateTimeField(default=None, editable=False)),
+ ('description', models.TextField(default=b'', blank=True)),
+ ('active', models.BooleanField(default=True, editable=False)),
+ ('name', models.CharField(max_length=512)),
+ ('old_pk', models.PositiveIntegerField(default=None, null=True, editable=False)),
+ ('last_job_failed', models.BooleanField(default=False, editable=False)),
+ ('last_job_run', models.DateTimeField(default=None, null=True, editable=False)),
+ ('has_schedules', models.BooleanField(default=False, editable=False)),
+ ('next_job_run', models.DateTimeField(default=None, null=True, editable=False)),
+ ('status', models.CharField(default=b'ok', max_length=32, editable=False, choices=[(b'new', 'New'), (b'pending', 'Pending'), (b'waiting', 'Waiting'), (b'running', 'Running'), (b'successful', 'Successful'), (b'failed', 'Failed'), (b'error', 'Error'), (b'canceled', 'Canceled'), (b'never updated', b'Never Updated'), (b'ok', b'OK'), (b'missing', b'Missing'), (b'none', 'No External Source'), (b'updating', 'Updating')])),
+ ],
+ ),
+ migrations.CreateModel(
+ name='AdHocCommand',
+ fields=[
+ ('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
+ ('job_type', models.CharField(default=b'run', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check'), (b'scan', 'Scan')])),
+ ('limit', models.CharField(default=b'', max_length=1024, blank=True)),
+ ('module_name', models.CharField(default=b'', max_length=1024, blank=True)),
+ ('module_args', models.TextField(default=b'', blank=True)),
+ ('forks', models.PositiveIntegerField(default=0, blank=True)),
+ ('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, b'0 (Normal)'), (1, b'1 (Verbose)'), (2, b'2 (More Verbose)'), (3, b'3 (Debug)'), (4, b'4 (Connection Debug)'), (5, b'5 (WinRM Debug)')])),
+ ('become_enabled', models.BooleanField(default=False)),
+ ],
+ bases=('main.unifiedjob',),
+ ),
+ migrations.CreateModel(
+ name='InventorySource',
+ fields=[
+ ('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
+ ('source', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')])),
+ ('source_path', models.CharField(default=b'', max_length=1024, editable=False, blank=True)),
+ ('source_vars', models.TextField(default=b'', help_text='Inventory source variables in YAML or JSON format.', blank=True)),
+ ('source_regions', models.CharField(default=b'', max_length=1024, blank=True)),
+ ('instance_filters', models.CharField(default=b'', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)),
+ ('group_by', models.CharField(default=b'', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)),
+ ('overwrite', models.BooleanField(default=False, help_text='Overwrite local groups and hosts from remote inventory source.')),
+ ('overwrite_vars', models.BooleanField(default=False, help_text='Overwrite local variables from remote inventory source.')),
+ ('update_on_launch', models.BooleanField(default=False)),
+ ('update_cache_timeout', models.PositiveIntegerField(default=0)),
+ ],
+ bases=('main.unifiedjobtemplate', models.Model),
+ ),
+ migrations.CreateModel(
+ name='InventoryUpdate',
+ fields=[
+ ('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
+ ('source', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'Local File, Directory or Script'), (b'rax', 'Rackspace Cloud Servers'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure'), (b'vmware', 'VMware vCenter'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')])),
+ ('source_path', models.CharField(default=b'', max_length=1024, editable=False, blank=True)),
+ ('source_vars', models.TextField(default=b'', help_text='Inventory source variables in YAML or JSON format.', blank=True)),
+ ('source_regions', models.CharField(default=b'', max_length=1024, blank=True)),
+ ('instance_filters', models.CharField(default=b'', help_text='Comma-separated list of filter expressions (EC2 only). Hosts are imported when ANY of the filters match.', max_length=1024, blank=True)),
+ ('group_by', models.CharField(default=b'', help_text='Limit groups automatically created from inventory source (EC2 only).', max_length=1024, blank=True)),
+ ('overwrite', models.BooleanField(default=False, help_text='Overwrite local groups and hosts from remote inventory source.')),
+ ('overwrite_vars', models.BooleanField(default=False, help_text='Overwrite local variables from remote inventory source.')),
+ ('license_error', models.BooleanField(default=False, editable=False)),
+ ],
+ bases=('main.unifiedjob', models.Model),
+ ),
+ migrations.CreateModel(
+ name='Job',
+ fields=[
+ ('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
+ ('job_type', models.CharField(default=b'run', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check'), (b'scan', 'Scan')])),
+ ('playbook', models.CharField(default=b'', max_length=1024, blank=True)),
+ ('forks', models.PositiveIntegerField(default=0, blank=True)),
+ ('limit', models.CharField(default=b'', max_length=1024, blank=True)),
+ ('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, b'0 (Normal)'), (1, b'1 (Verbose)'), (2, b'2 (More Verbose)'), (3, b'3 (Debug)'), (4, b'4 (Connection Debug)'), (5, b'5 (WinRM Debug)')])),
+ ('extra_vars', models.TextField(default=b'', blank=True)),
+ ('job_tags', models.CharField(default=b'', max_length=1024, blank=True)),
+ ('force_handlers', models.BooleanField(default=False)),
+ ('skip_tags', models.CharField(default=b'', max_length=1024, blank=True)),
+ ('start_at_task', models.CharField(default=b'', max_length=1024, blank=True)),
+ ('become_enabled', models.BooleanField(default=False)),
+ ],
+ options={
+ 'ordering': ('id',),
+ },
+ bases=('main.unifiedjob', models.Model),
+ ),
+ migrations.CreateModel(
+ name='JobTemplate',
+ fields=[
+ ('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
+ ('job_type', models.CharField(default=b'run', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check'), (b'scan', 'Scan')])),
+ ('playbook', models.CharField(default=b'', max_length=1024, blank=True)),
+ ('forks', models.PositiveIntegerField(default=0, blank=True)),
+ ('limit', models.CharField(default=b'', max_length=1024, blank=True)),
+ ('verbosity', models.PositiveIntegerField(default=0, blank=True, choices=[(0, b'0 (Normal)'), (1, b'1 (Verbose)'), (2, b'2 (More Verbose)'), (3, b'3 (Debug)'), (4, b'4 (Connection Debug)'), (5, b'5 (WinRM Debug)')])),
+ ('extra_vars', models.TextField(default=b'', blank=True)),
+ ('job_tags', models.CharField(default=b'', max_length=1024, blank=True)),
+ ('force_handlers', models.BooleanField(default=False)),
+ ('skip_tags', models.CharField(default=b'', max_length=1024, blank=True)),
+ ('start_at_task', models.CharField(default=b'', max_length=1024, blank=True)),
+ ('become_enabled', models.BooleanField(default=False)),
+ ('host_config_key', models.CharField(default=b'', max_length=1024, blank=True)),
+ ('ask_variables_on_launch', models.BooleanField(default=False)),
+ ('survey_enabled', models.BooleanField(default=False)),
+ ('survey_spec', jsonfield.fields.JSONField(default={}, blank=True)),
+ ],
+ options={
+ 'ordering': ('name',),
+ },
+ bases=('main.unifiedjobtemplate', models.Model),
+ ),
+ migrations.CreateModel(
+ name='Project',
+ fields=[
+ ('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
+ ('local_path', models.CharField(help_text='Local path (relative to PROJECTS_ROOT) containing playbooks and related files for this project.', max_length=1024, blank=True)),
+ ('scm_type', models.CharField(default=b'', max_length=8, verbose_name='SCM Type', blank=True, choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion')])),
+ ('scm_url', models.CharField(default=b'', max_length=1024, verbose_name='SCM URL', blank=True)),
+ ('scm_branch', models.CharField(default=b'', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)),
+ ('scm_clean', models.BooleanField(default=False)),
+ ('scm_delete_on_update', models.BooleanField(default=False)),
+ ('scm_delete_on_next_update', models.BooleanField(default=False, editable=False)),
+ ('scm_update_on_launch', models.BooleanField(default=False)),
+ ('scm_update_cache_timeout', models.PositiveIntegerField(default=0, blank=True)),
+ ],
+ options={
+ 'ordering': ('id',),
+ },
+ bases=('main.unifiedjobtemplate', models.Model),
+ ),
+ migrations.CreateModel(
+ name='ProjectUpdate',
+ fields=[
+ ('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
+ ('local_path', models.CharField(help_text='Local path (relative to PROJECTS_ROOT) containing playbooks and related files for this project.', max_length=1024, blank=True)),
+ ('scm_type', models.CharField(default=b'', max_length=8, verbose_name='SCM Type', blank=True, choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion')])),
+ ('scm_url', models.CharField(default=b'', max_length=1024, verbose_name='SCM URL', blank=True)),
+ ('scm_branch', models.CharField(default=b'', help_text='Specific branch, tag or commit to checkout.', max_length=256, verbose_name='SCM Branch', blank=True)),
+ ('scm_clean', models.BooleanField(default=False)),
+ ('scm_delete_on_update', models.BooleanField(default=False)),
+ ],
+ bases=('main.unifiedjob', models.Model),
+ ),
+ migrations.CreateModel(
+ name='SystemJob',
+ fields=[
+ ('unifiedjob_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJob')),
+ ('job_type', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_deleted', 'Purge previously deleted items from the database'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])),
+ ('extra_vars', models.TextField(default=b'', blank=True)),
+ ],
+ options={
+ 'ordering': ('id',),
+ },
+ bases=('main.unifiedjob', models.Model),
+ ),
+ migrations.CreateModel(
+ name='SystemJobTemplate',
+ fields=[
+ ('unifiedjobtemplate_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='main.UnifiedJobTemplate')),
+ ('job_type', models.CharField(default=b'', max_length=32, blank=True, choices=[(b'cleanup_jobs', 'Remove jobs older than a certain number of days'), (b'cleanup_activitystream', 'Remove activity stream entries older than a certain number of days'), (b'cleanup_deleted', 'Purge previously deleted items from the database'), (b'cleanup_facts', 'Purge and/or reduce the granularity of system tracking data')])),
+ ],
+ bases=('main.unifiedjobtemplate', models.Model),
+ ),
+ migrations.AddField(
+ model_name='unifiedjobtemplate',
+ name='created_by',
+ field=models.ForeignKey(related_name="{u'class': 'unifiedjobtemplate', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True),
+ ),
+ migrations.AddField(
+ model_name='unifiedjobtemplate',
+ name='current_job',
+ field=models.ForeignKey(related_name='unifiedjobtemplate_as_current_job+', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.UnifiedJob', null=True),
+ ),
+ migrations.AddField(
+ model_name='unifiedjobtemplate',
+ name='last_job',
+ field=models.ForeignKey(related_name='unifiedjobtemplate_as_last_job+', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.UnifiedJob', null=True),
+ ),
+ migrations.AddField(
+ model_name='unifiedjobtemplate',
+ name='modified_by',
+ field=models.ForeignKey(related_name="{u'class': 'unifiedjobtemplate', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True),
+ ),
+ migrations.AddField(
+ model_name='unifiedjobtemplate',
+ name='next_schedule',
+ field=models.ForeignKey(related_name='unifiedjobtemplate_as_next_schedule+', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Schedule', null=True),
+ ),
+ migrations.AddField(
+ model_name='unifiedjobtemplate',
+ name='polymorphic_ctype',
+ field=models.ForeignKey(related_name='polymorphic_main.unifiedjobtemplate_set+', editable=False, to='contenttypes.ContentType', null=True),
+ ),
+ migrations.AddField(
+ model_name='unifiedjobtemplate',
+ name='tags',
+ field=taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'),
+ ),
+ migrations.AddField(
+ model_name='unifiedjob',
+ name='created_by',
+ field=models.ForeignKey(related_name="{u'class': 'unifiedjob', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True),
+ ),
+ migrations.AddField(
+ model_name='unifiedjob',
+ name='dependent_jobs',
+ field=models.ManyToManyField(related_name='_unifiedjob_dependent_jobs_+', editable=False, to='main.UnifiedJob'),
+ ),
+ migrations.AddField(
+ model_name='unifiedjob',
+ name='modified_by',
+ field=models.ForeignKey(related_name="{u'class': 'unifiedjob', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True),
+ ),
+ migrations.AddField(
+ model_name='unifiedjob',
+ name='polymorphic_ctype',
+ field=models.ForeignKey(related_name='polymorphic_main.unifiedjob_set+', editable=False, to='contenttypes.ContentType', null=True),
+ ),
+ migrations.AddField(
+ model_name='unifiedjob',
+ name='schedule',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Schedule', null=True),
+ ),
+ migrations.AddField(
+ model_name='unifiedjob',
+ name='tags',
+ field=taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'),
+ ),
+ migrations.AddField(
+ model_name='unifiedjob',
+ name='unified_job_template',
+ field=models.ForeignKey(related_name='unifiedjob_unified_jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.UnifiedJobTemplate', null=True),
+ ),
+ migrations.AddField(
+ model_name='schedule',
+ name='unified_job_template',
+ field=models.ForeignKey(related_name='schedules', to='main.UnifiedJobTemplate'),
+ ),
+ migrations.AddField(
+ model_name='permission',
+ name='team',
+ field=models.ForeignKey(related_name='permissions', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='main.Team', null=True),
+ ),
+ migrations.AddField(
+ model_name='permission',
+ name='user',
+ field=models.ForeignKey(related_name='permissions', on_delete=django.db.models.deletion.SET_NULL, blank=True, to=settings.AUTH_USER_MODEL, null=True),
+ ),
+ migrations.AddField(
+ model_name='joborigin',
+ name='unified_job',
+ field=models.OneToOneField(related_name='job_origin', to='main.UnifiedJob'),
+ ),
+ migrations.AddField(
+ model_name='inventory',
+ name='organization',
+ field=models.ForeignKey(related_name='inventories', to='main.Organization', help_text='Organization containing this inventory.'),
+ ),
+ migrations.AddField(
+ model_name='inventory',
+ name='tags',
+ field=taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'),
+ ),
+ migrations.AddField(
+ model_name='host',
+ name='inventory',
+ field=models.ForeignKey(related_name='hosts', to='main.Inventory'),
+ ),
+ migrations.AddField(
+ model_name='host',
+ name='last_job_host_summary',
+ field=models.ForeignKey(related_name='hosts_as_last_job_summary+', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, editable=False, to='main.JobHostSummary', null=True),
+ ),
+ migrations.AddField(
+ model_name='host',
+ name='modified_by',
+ field=models.ForeignKey(related_name="{u'class': 'host', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True),
+ ),
+ migrations.AddField(
+ model_name='host',
+ name='tags',
+ field=taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'),
+ ),
+ migrations.AddField(
+ model_name='group',
+ name='hosts',
+ field=models.ManyToManyField(help_text='Hosts associated directly with this group.', related_name='groups', to='main.Host', blank=True),
+ ),
+ migrations.AddField(
+ model_name='group',
+ name='inventory',
+ field=models.ForeignKey(related_name='groups', to='main.Inventory'),
+ ),
+ migrations.AddField(
+ model_name='group',
+ name='modified_by',
+ field=models.ForeignKey(related_name="{u'class': 'group', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True),
+ ),
+ migrations.AddField(
+ model_name='group',
+ name='parents',
+ field=models.ManyToManyField(related_name='children', to='main.Group', blank=True),
+ ),
+ migrations.AddField(
+ model_name='group',
+ name='tags',
+ field=taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'),
+ ),
+ migrations.AddField(
+ model_name='custominventoryscript',
+ name='organization',
+ field=models.ForeignKey(related_name='custom_inventory_scripts', on_delete=django.db.models.deletion.SET_NULL, to='main.Organization', help_text='Organization owning this inventory script', null=True),
+ ),
+ migrations.AddField(
+ model_name='custominventoryscript',
+ name='tags',
+ field=taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags'),
+ ),
+ migrations.AddField(
+ model_name='credential',
+ name='team',
+ field=models.ForeignKey(related_name='credentials', default=None, blank=True, to='main.Team', null=True),
+ ),
+ migrations.AddField(
+ model_name='credential',
+ name='user',
+ field=models.ForeignKey(related_name='credentials', default=None, blank=True, to=settings.AUTH_USER_MODEL, null=True),
+ ),
+ migrations.AddField(
+ model_name='adhoccommandevent',
+ name='host',
+ field=models.ForeignKey(related_name='ad_hoc_command_events', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Host', null=True),
+ ),
+ migrations.AddField(
+ model_name='activitystream',
+ name='credential',
+ field=models.ManyToManyField(to='main.Credential', blank=True),
+ ),
+ migrations.AddField(
+ model_name='activitystream',
+ name='custom_inventory_script',
+ field=models.ManyToManyField(to='main.CustomInventoryScript', blank=True),
+ ),
+ migrations.AddField(
+ model_name='activitystream',
+ name='group',
+ field=models.ManyToManyField(to='main.Group', blank=True),
+ ),
+ migrations.AddField(
+ model_name='activitystream',
+ name='host',
+ field=models.ManyToManyField(to='main.Host', blank=True),
+ ),
+ migrations.AddField(
+ model_name='activitystream',
+ name='inventory',
+ field=models.ManyToManyField(to='main.Inventory', blank=True),
+ ),
+ migrations.AddField(
+ model_name='activitystream',
+ name='organization',
+ field=models.ManyToManyField(to='main.Organization', blank=True),
+ ),
+ migrations.AddField(
+ model_name='activitystream',
+ name='permission',
+ field=models.ManyToManyField(to='main.Permission', blank=True),
+ ),
+ migrations.AddField(
+ model_name='activitystream',
+ name='schedule',
+ field=models.ManyToManyField(to='main.Schedule', blank=True),
+ ),
+ migrations.AddField(
+ model_name='activitystream',
+ name='team',
+ field=models.ManyToManyField(to='main.Team', blank=True),
+ ),
+ migrations.AddField(
+ model_name='activitystream',
+ name='unified_job',
+ field=models.ManyToManyField(related_name='_activitystream_unified_job_+', to='main.UnifiedJob', blank=True),
+ ),
+ migrations.AddField(
+ model_name='activitystream',
+ name='unified_job_template',
+ field=models.ManyToManyField(related_name='_activitystream_unified_job_template_+', to='main.UnifiedJobTemplate', blank=True),
+ ),
+ migrations.AddField(
+ model_name='activitystream',
+ name='user',
+ field=models.ManyToManyField(to=settings.AUTH_USER_MODEL, blank=True),
+ ),
+ migrations.AlterUniqueTogether(
+ name='unifiedjobtemplate',
+ unique_together=set([('polymorphic_ctype', 'name')]),
+ ),
+ migrations.AddField(
+ model_name='team',
+ name='projects',
+ field=models.ManyToManyField(related_name='teams', to='main.Project', blank=True),
+ ),
+ migrations.AddField(
+ model_name='systemjob',
+ name='system_job_template',
+ field=models.ForeignKey(related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.SystemJobTemplate', null=True),
+ ),
+ migrations.AddField(
+ model_name='projectupdate',
+ name='credential',
+ field=models.ForeignKey(related_name='projectupdates', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ ),
+ migrations.AddField(
+ model_name='projectupdate',
+ name='project',
+ field=models.ForeignKey(related_name='project_updates', editable=False, to='main.Project'),
+ ),
+ migrations.AddField(
+ model_name='project',
+ name='credential',
+ field=models.ForeignKey(related_name='projects', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ ),
+ migrations.AddField(
+ model_name='permission',
+ name='project',
+ field=models.ForeignKey(related_name='permissions', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='main.Project', null=True),
+ ),
+ migrations.AddField(
+ model_name='organization',
+ name='projects',
+ field=models.ManyToManyField(related_name='organizations', to='main.Project', blank=True),
+ ),
+ migrations.AddField(
+ model_name='jobtemplate',
+ name='cloud_credential',
+ field=models.ForeignKey(related_name='jobtemplates_as_cloud_credential+', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ ),
+ migrations.AddField(
+ model_name='jobtemplate',
+ name='credential',
+ field=models.ForeignKey(related_name='jobtemplates', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ ),
+ migrations.AddField(
+ model_name='jobtemplate',
+ name='inventory',
+ field=models.ForeignKey(related_name='jobtemplates', on_delete=django.db.models.deletion.SET_NULL, to='main.Inventory', null=True),
+ ),
+ migrations.AddField(
+ model_name='jobtemplate',
+ name='project',
+ field=models.ForeignKey(related_name='jobtemplates', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Project', null=True),
+ ),
+ migrations.AddField(
+ model_name='jobhostsummary',
+ name='job',
+ field=models.ForeignKey(related_name='job_host_summaries', editable=False, to='main.Job'),
+ ),
+ migrations.AddField(
+ model_name='jobevent',
+ name='job',
+ field=models.ForeignKey(related_name='job_events', editable=False, to='main.Job'),
+ ),
+ migrations.AddField(
+ model_name='job',
+ name='cloud_credential',
+ field=models.ForeignKey(related_name='jobs_as_cloud_credential+', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ ),
+ migrations.AddField(
+ model_name='job',
+ name='credential',
+ field=models.ForeignKey(related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ ),
+ migrations.AddField(
+ model_name='job',
+ name='hosts',
+ field=models.ManyToManyField(related_name='jobs', editable=False, through='main.JobHostSummary', to='main.Host'),
+ ),
+ migrations.AddField(
+ model_name='job',
+ name='inventory',
+ field=models.ForeignKey(related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, to='main.Inventory', null=True),
+ ),
+ migrations.AddField(
+ model_name='job',
+ name='job_template',
+ field=models.ForeignKey(related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.JobTemplate', null=True),
+ ),
+ migrations.AddField(
+ model_name='job',
+ name='project',
+ field=models.ForeignKey(related_name='jobs', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Project', null=True),
+ ),
+ migrations.AddField(
+ model_name='inventoryupdate',
+ name='credential',
+ field=models.ForeignKey(related_name='inventoryupdates', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ ),
+ migrations.AddField(
+ model_name='inventoryupdate',
+ name='inventory_source',
+ field=models.ForeignKey(related_name='inventory_updates', editable=False, to='main.InventorySource'),
+ ),
+ migrations.AddField(
+ model_name='inventoryupdate',
+ name='source_script',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.CustomInventoryScript', null=True),
+ ),
+ migrations.AddField(
+ model_name='inventorysource',
+ name='credential',
+ field=models.ForeignKey(related_name='inventorysources', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.Credential', null=True),
+ ),
+ migrations.AddField(
+ model_name='inventorysource',
+ name='group',
+ field=awx.main.fields.AutoOneToOneField(related_name='inventory_source', null=True, default=None, editable=False, to='main.Group'),
+ ),
+ migrations.AddField(
+ model_name='inventorysource',
+ name='inventory',
+ field=models.ForeignKey(related_name='inventory_sources', default=None, editable=False, to='main.Inventory', null=True),
+ ),
+ migrations.AddField(
+ model_name='inventorysource',
+ name='source_script',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='main.CustomInventoryScript', null=True),
+ ),
+ migrations.AlterUniqueTogether(
+ name='inventory',
+ unique_together=set([('name', 'organization')]),
+ ),
+ migrations.AddField(
+ model_name='host',
+ name='inventory_sources',
+ field=models.ManyToManyField(help_text='Inventory source(s) that created or modified this host.', related_name='hosts', editable=False, to='main.InventorySource'),
+ ),
+ migrations.AddField(
+ model_name='host',
+ name='last_job',
+ field=models.ForeignKey(related_name='hosts_as_last_job+', on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to='main.Job', null=True),
+ ),
+ migrations.AddField(
+ model_name='group',
+ name='inventory_sources',
+ field=models.ManyToManyField(help_text='Inventory source(s) that created or modified this group.', related_name='groups', editable=False, to='main.InventorySource'),
+ ),
+ migrations.AlterUniqueTogether(
+ name='custominventoryscript',
+ unique_together=set([('name', 'organization')]),
+ ),
+ migrations.AlterUniqueTogether(
+ name='credential',
+ unique_together=set([('user', 'team', 'kind', 'name')]),
+ ),
+ migrations.AddField(
+ model_name='adhoccommandevent',
+ name='ad_hoc_command',
+ field=models.ForeignKey(related_name='ad_hoc_command_events', editable=False, to='main.AdHocCommand'),
+ ),
+ migrations.AddField(
+ model_name='adhoccommand',
+ name='credential',
+ field=models.ForeignKey(related_name='ad_hoc_commands', on_delete=django.db.models.deletion.SET_NULL, default=None, to='main.Credential', null=True),
+ ),
+ migrations.AddField(
+ model_name='adhoccommand',
+ name='hosts',
+ field=models.ManyToManyField(related_name='ad_hoc_commands', editable=False, through='main.AdHocCommandEvent', to='main.Host'),
+ ),
+ migrations.AddField(
+ model_name='adhoccommand',
+ name='inventory',
+ field=models.ForeignKey(related_name='ad_hoc_commands', on_delete=django.db.models.deletion.SET_NULL, to='main.Inventory', null=True),
+ ),
+ migrations.AddField(
+ model_name='activitystream',
+ name='ad_hoc_command',
+ field=models.ManyToManyField(to='main.AdHocCommand', blank=True),
+ ),
+ migrations.AddField(
+ model_name='activitystream',
+ name='inventory_source',
+ field=models.ManyToManyField(to='main.InventorySource', blank=True),
+ ),
+ migrations.AddField(
+ model_name='activitystream',
+ name='inventory_update',
+ field=models.ManyToManyField(to='main.InventoryUpdate', blank=True),
+ ),
+ migrations.AddField(
+ model_name='activitystream',
+ name='job',
+ field=models.ManyToManyField(to='main.Job', blank=True),
+ ),
+ migrations.AddField(
+ model_name='activitystream',
+ name='job_template',
+ field=models.ManyToManyField(to='main.JobTemplate', blank=True),
+ ),
+ migrations.AddField(
+ model_name='activitystream',
+ name='project',
+ field=models.ManyToManyField(to='main.Project', blank=True),
+ ),
+ migrations.AddField(
+ model_name='activitystream',
+ name='project_update',
+ field=models.ManyToManyField(to='main.ProjectUpdate', blank=True),
+ ),
+ migrations.AlterUniqueTogether(
+ name='team',
+ unique_together=set([('organization', 'name')]),
+ ),
+ migrations.AlterUniqueTogether(
+ name='jobhostsummary',
+ unique_together=set([('job', 'host_name')]),
+ ),
+ migrations.AlterUniqueTogether(
+ name='host',
+ unique_together=set([('name', 'inventory')]),
+ ),
+ migrations.AlterUniqueTogether(
+ name='group',
+ unique_together=set([('name', 'inventory')]),
+ ),
+ migrations.AlterUniqueTogether(
+ name='adhoccommandevent',
+ unique_together=set([('ad_hoc_command', 'host_name')]),
+ ),
+ ]
diff --git a/awx/main/migrations/0002_v300_changes.py b/awx/main/migrations/0002_v300_changes.py
new file mode 100644
index 0000000000..b4b87e24c9
--- /dev/null
+++ b/awx/main/migrations/0002_v300_changes.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2016 Ansible, Inc.
+# All Rights Reserved.
+
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+from django.conf import settings
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ migrations.swappable_dependency(settings.AUTH_USER_MODEL),
+ ('main', '0001_initial'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='TowerSettings',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('created', models.DateTimeField(default=None, editable=False)),
+ ('modified', models.DateTimeField(default=None, editable=False)),
+ ('key', models.CharField(unique=True, max_length=255)),
+ ('description', models.TextField()),
+ ('category', models.CharField(max_length=128)),
+ ('value', models.TextField()),
+ ('value_type', models.CharField(max_length=12, choices=[(b'string', 'String'), (b'int', 'Integer'), (b'float', 'Decimal'), (b'json', 'JSON'), (b'bool', 'Boolean'), (b'password', 'Password'), (b'list', 'List')])),
+ ('user', models.ForeignKey(related_name='settings', default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
+ ],
+ ),
+ ]
diff --git a/awx/main/migrations/__init__.py b/awx/main/migrations/__init__.py
index e484e62be1..709b95a6a6 100644
--- a/awx/main/migrations/__init__.py
+++ b/awx/main/migrations/__init__.py
@@ -1,2 +1,2 @@
-# Copyright (c) 2015 Ansible, Inc.
+# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
diff --git a/awx/main/models/ad_hoc_commands.py b/awx/main/models/ad_hoc_commands.py
index e47328844c..664269a188 100644
--- a/awx/main/models/ad_hoc_commands.py
+++ b/awx/main/models/ad_hoc_commands.py
@@ -30,9 +30,6 @@ __all__ = ['AdHocCommand', 'AdHocCommandEvent']
class AdHocCommand(UnifiedJob):
- MODULE_NAME_CHOICES = [(x,x) for x in tower_settings.AD_HOC_COMMANDS]
- MODULE_NAME_DEFAULT = 'command' if 'command' in tower_settings.AD_HOC_COMMANDS else None
-
class Meta(object):
app_label = 'main'
@@ -61,9 +58,8 @@ class AdHocCommand(UnifiedJob):
)
module_name = models.CharField(
max_length=1024,
- default=MODULE_NAME_DEFAULT,
- choices=MODULE_NAME_CHOICES,
- blank=bool(MODULE_NAME_DEFAULT),
+ default='',
+ blank=True,
)
module_args = models.TextField(
blank=True,
@@ -88,6 +84,12 @@ class AdHocCommand(UnifiedJob):
through='AdHocCommandEvent',
)
+ def clean_inventory(self):
+ inv = self.inventory
+ if not inv or not inv.active:
+ raise ValidationError('Inventory is no longer available.')
+ return inv
+
def clean_credential(self):
cred = self.credential
if cred and cred.kind != 'ssh':
diff --git a/awx/main/models/base.py b/awx/main/models/base.py
index 27df355652..61515d7d18 100644
--- a/awx/main/models/base.py
+++ b/awx/main/models/base.py
@@ -9,7 +9,6 @@ import shlex
import yaml
# Django
-from django.conf import settings
from django.db import models
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
@@ -59,14 +58,14 @@ PERMISSION_TYPE_CHOICES = [
CLOUD_INVENTORY_SOURCES = ['ec2', 'rax', 'vmware', 'gce', 'azure', 'openstack', 'custom']
-VERBOSITY_CHOICES = getattr(settings, 'VERBOSITY_CHOICES', [
+VERBOSITY_CHOICES = [
(0, '0 (Normal)'),
(1, '1 (Verbose)'),
(2, '2 (More Verbose)'),
(3, '3 (Debug)'),
(4, '4 (Connection Debug)'),
(5, '5 (WinRM Debug)'),
-])
+]
class VarsDictProperty(object):
@@ -157,16 +156,6 @@ class BaseModel(models.Model):
self.save(update_fields=update_fields)
return update_fields
- def save(self, *args, **kwargs):
- # For compatibility with Django 1.4.x, attempt to handle any calls to
- # save that pass update_fields.
- try:
- super(BaseModel, self).save(*args, **kwargs)
- except TypeError:
- if 'update_fields' not in kwargs:
- raise
- kwargs.pop('update_fields')
- super(BaseModel, self).save(*args, **kwargs)
class CreatedModifiedModel(BaseModel):
'''
diff --git a/awx/main/models/configuration.py b/awx/main/models/configuration.py
index 6b35ea4d77..3bd43f5649 100644
--- a/awx/main/models/configuration.py
+++ b/awx/main/models/configuration.py
@@ -6,10 +6,13 @@ import json
# Django
from django.db import models
+from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
+
# Tower
from awx.main.models.base import CreatedModifiedModel
+
class TowerSettings(CreatedModifiedModel):
class Meta:
@@ -53,10 +56,21 @@ class TowerSettings(CreatedModifiedModel):
elif self.value_type == 'list':
converted_type = [x.strip() for x in self.value.split(',')]
elif self.value_type == 'bool':
- converted_type = self.value in [True, "true", "True", 1, "1", "yes"]
+ converted_type = smart_text(self.value).lower() in ('true', 'yes', '1')
elif self.value_type == 'string':
converted_type = self.value
else:
t = __builtins__[self.value_type]
converted_type = t(self.value)
return converted_type
+
+ @value_converted.setter
+ def value_converted(self, value):
+ if self.value_type == 'json':
+ self.value = json.dumps(value)
+ elif self.value_type == 'list':
+ self.value = ','.join(value)
+ elif self.value_type == 'bool':
+ self.value = smart_text(bool(value))
+ else:
+ self.value = smart_text(value)
diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py
index 58875a9630..37b1dafc4b 100644
--- a/awx/main/models/inventory.py
+++ b/awx/main/models/inventory.py
@@ -539,7 +539,7 @@ class Group(CommonModelNameNotUnique):
def mark_actual():
all_group_hosts = Group.hosts.through.objects.select_related("host", "group").filter(group__inventory=self.inventory)
group_hosts = {'groups': {}, 'hosts': {}}
- all_group_parents = Group.parents.through.objects.select_related("parent", "group").filter(from_group__inventory=self.inventory)
+ all_group_parents = Group.parents.through.objects.select_related("from_group", "to_group").filter(from_group__inventory=self.inventory)
group_children = {}
group_parents = {}
marked_hosts = []
diff --git a/awx/main/models/organization.py b/awx/main/models/organization.py
index 0462902c75..c22b907082 100644
--- a/awx/main/models/organization.py
+++ b/awx/main/models/organization.py
@@ -317,3 +317,10 @@ def user_mark_inactive(user, save=True):
user.save()
User.add_to_class('mark_inactive', user_mark_inactive)
+
+
+# Add get_absolute_url method to User model if not present.
+if not hasattr(User, 'get_absolute_url'):
+ def user_get_absolute_url(user):
+ return reverse('api:user_detail', args=(user.pk,))
+ User.add_to_class('get_absolute_url', user_get_absolute_url)
diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py
index b90e29cd85..2fa6512ca0 100644
--- a/awx/main/models/projects.py
+++ b/awx/main/models/projects.py
@@ -11,7 +11,7 @@ import urlparse
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
-from django.utils.encoding import smart_str
+from django.utils.encoding import smart_str, smart_text
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from django.utils.timezone import now, make_aware, get_default_timezone
@@ -181,7 +181,7 @@ class ProjectOptions(models.Model):
# Filter files in a tasks subdirectory.
if 'tasks' in playbook.split(os.sep):
continue
- results.append(playbook)
+ results.append(smart_text(playbook))
return sorted(results, key=lambda x: smart_str(x).lower())
diff --git a/awx/main/models/schedules.py b/awx/main/models/schedules.py
index fbde23d07a..8af6da3be3 100644
--- a/awx/main/models/schedules.py
+++ b/awx/main/models/schedules.py
@@ -47,7 +47,7 @@ class ScheduleManager(ScheduleFilterMethods, models.Manager):
use_for_related_objects = True
- def get_query_set(self):
+ def get_queryset(self):
return ScheduleQuerySet(self.model, using=self._db)
diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py
index 2a7875d321..86ab0b3143 100644
--- a/awx/main/models/unified_jobs.py
+++ b/awx/main/models/unified_jobs.py
@@ -8,13 +8,13 @@ import logging
import re
import os
import os.path
+from collections import OrderedDict
from StringIO import StringIO
# Django
from django.conf import settings
from django.db import models
from django.core.exceptions import NON_FIELD_ERRORS
-from django.utils.datastructures import SortedDict
from django.utils.translation import ugettext_lazy as _
from django.utils.timezone import now
@@ -77,7 +77,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique):
('updating', _('Updating')), # Same as running.
]
- ALL_STATUS_CHOICES = SortedDict(PROJECT_STATUS_CHOICES + INVENTORY_SOURCE_STATUS_CHOICES + JOB_TEMPLATE_STATUS_CHOICES + DEPRECATED_STATUS_CHOICES).items()
+ ALL_STATUS_CHOICES = OrderedDict(PROJECT_STATUS_CHOICES + INVENTORY_SOURCE_STATUS_CHOICES + JOB_TEMPLATE_STATUS_CHOICES + DEPRECATED_STATUS_CHOICES).items()
class Meta:
app_label = 'main'
diff --git a/awx/main/migrations/0001_v12b1_initial.py b/awx/main/south_migrations/0001_v12b1_initial.py
similarity index 100%
rename from awx/main/migrations/0001_v12b1_initial.py
rename to awx/main/south_migrations/0001_v12b1_initial.py
diff --git a/awx/main/migrations/0002_v12b2_changes.py b/awx/main/south_migrations/0002_v12b2_changes.py
similarity index 100%
rename from awx/main/migrations/0002_v12b2_changes.py
rename to awx/main/south_migrations/0002_v12b2_changes.py
diff --git a/awx/main/migrations/0003_v12b2_changes.py b/awx/main/south_migrations/0003_v12b2_changes.py
similarity index 100%
rename from awx/main/migrations/0003_v12b2_changes.py
rename to awx/main/south_migrations/0003_v12b2_changes.py
diff --git a/awx/main/migrations/0004_v12b2_changes.py b/awx/main/south_migrations/0004_v12b2_changes.py
similarity index 100%
rename from awx/main/migrations/0004_v12b2_changes.py
rename to awx/main/south_migrations/0004_v12b2_changes.py
diff --git a/awx/main/migrations/0005_v12b2_changes.py b/awx/main/south_migrations/0005_v12b2_changes.py
similarity index 100%
rename from awx/main/migrations/0005_v12b2_changes.py
rename to awx/main/south_migrations/0005_v12b2_changes.py
diff --git a/awx/main/migrations/0006_v12b2_changes.py b/awx/main/south_migrations/0006_v12b2_changes.py
similarity index 100%
rename from awx/main/migrations/0006_v12b2_changes.py
rename to awx/main/south_migrations/0006_v12b2_changes.py
diff --git a/awx/main/migrations/0007_v12b2_changes.py b/awx/main/south_migrations/0007_v12b2_changes.py
similarity index 100%
rename from awx/main/migrations/0007_v12b2_changes.py
rename to awx/main/south_migrations/0007_v12b2_changes.py
diff --git a/awx/main/migrations/0008_v12changes.py b/awx/main/south_migrations/0008_v12changes.py
similarity index 100%
rename from awx/main/migrations/0008_v12changes.py
rename to awx/main/south_migrations/0008_v12changes.py
diff --git a/awx/main/migrations/0009_v13_changes.py b/awx/main/south_migrations/0009_v13_changes.py
similarity index 100%
rename from awx/main/migrations/0009_v13_changes.py
rename to awx/main/south_migrations/0009_v13_changes.py
diff --git a/awx/main/migrations/0010_v13_changes.py b/awx/main/south_migrations/0010_v13_changes.py
similarity index 100%
rename from awx/main/migrations/0010_v13_changes.py
rename to awx/main/south_migrations/0010_v13_changes.py
diff --git a/awx/main/migrations/0011_v13_encrypt_passwords.py b/awx/main/south_migrations/0011_v13_encrypt_passwords.py
similarity index 100%
rename from awx/main/migrations/0011_v13_encrypt_passwords.py
rename to awx/main/south_migrations/0011_v13_encrypt_passwords.py
diff --git a/awx/main/migrations/0012_v13_changes.py b/awx/main/south_migrations/0012_v13_changes.py
similarity index 100%
rename from awx/main/migrations/0012_v13_changes.py
rename to awx/main/south_migrations/0012_v13_changes.py
diff --git a/awx/main/migrations/0013_v13_changes.py b/awx/main/south_migrations/0013_v13_changes.py
similarity index 100%
rename from awx/main/migrations/0013_v13_changes.py
rename to awx/main/south_migrations/0013_v13_changes.py
diff --git a/awx/main/migrations/0014_v13_changes.py b/awx/main/south_migrations/0014_v13_changes.py
similarity index 100%
rename from awx/main/migrations/0014_v13_changes.py
rename to awx/main/south_migrations/0014_v13_changes.py
diff --git a/awx/main/migrations/0015_v14_changes.py b/awx/main/south_migrations/0015_v14_changes.py
similarity index 100%
rename from awx/main/migrations/0015_v14_changes.py
rename to awx/main/south_migrations/0015_v14_changes.py
diff --git a/awx/main/migrations/0016_v14_changes.py b/awx/main/south_migrations/0016_v14_changes.py
similarity index 100%
rename from awx/main/migrations/0016_v14_changes.py
rename to awx/main/south_migrations/0016_v14_changes.py
diff --git a/awx/main/migrations/0017_v14_changes.py b/awx/main/south_migrations/0017_v14_changes.py
similarity index 100%
rename from awx/main/migrations/0017_v14_changes.py
rename to awx/main/south_migrations/0017_v14_changes.py
diff --git a/awx/main/migrations/0018_v14_changes.py b/awx/main/south_migrations/0018_v14_changes.py
similarity index 100%
rename from awx/main/migrations/0018_v14_changes.py
rename to awx/main/south_migrations/0018_v14_changes.py
diff --git a/awx/main/migrations/0019_v14_changes.py b/awx/main/south_migrations/0019_v14_changes.py
similarity index 100%
rename from awx/main/migrations/0019_v14_changes.py
rename to awx/main/south_migrations/0019_v14_changes.py
diff --git a/awx/main/migrations/0020_v14_changes.py b/awx/main/south_migrations/0020_v14_changes.py
similarity index 100%
rename from awx/main/migrations/0020_v14_changes.py
rename to awx/main/south_migrations/0020_v14_changes.py
diff --git a/awx/main/migrations/0021_v14_changes.py b/awx/main/south_migrations/0021_v14_changes.py
similarity index 100%
rename from awx/main/migrations/0021_v14_changes.py
rename to awx/main/south_migrations/0021_v14_changes.py
diff --git a/awx/main/migrations/0022_v14_changes.py b/awx/main/south_migrations/0022_v14_changes.py
similarity index 100%
rename from awx/main/migrations/0022_v14_changes.py
rename to awx/main/south_migrations/0022_v14_changes.py
diff --git a/awx/main/migrations/0023_v14_changes.py b/awx/main/south_migrations/0023_v14_changes.py
similarity index 100%
rename from awx/main/migrations/0023_v14_changes.py
rename to awx/main/south_migrations/0023_v14_changes.py
diff --git a/awx/main/migrations/0024_v14_changes.py b/awx/main/south_migrations/0024_v14_changes.py
similarity index 100%
rename from awx/main/migrations/0024_v14_changes.py
rename to awx/main/south_migrations/0024_v14_changes.py
diff --git a/awx/main/migrations/0025_v14_changes.py b/awx/main/south_migrations/0025_v14_changes.py
similarity index 100%
rename from awx/main/migrations/0025_v14_changes.py
rename to awx/main/south_migrations/0025_v14_changes.py
diff --git a/awx/main/migrations/0026_v14_changes.py b/awx/main/south_migrations/0026_v14_changes.py
similarity index 100%
rename from awx/main/migrations/0026_v14_changes.py
rename to awx/main/south_migrations/0026_v14_changes.py
diff --git a/awx/main/migrations/0027_v14_changes.py b/awx/main/south_migrations/0027_v14_changes.py
similarity index 100%
rename from awx/main/migrations/0027_v14_changes.py
rename to awx/main/south_migrations/0027_v14_changes.py
diff --git a/awx/main/migrations/0028_v14_changes.py b/awx/main/south_migrations/0028_v14_changes.py
similarity index 100%
rename from awx/main/migrations/0028_v14_changes.py
rename to awx/main/south_migrations/0028_v14_changes.py
diff --git a/awx/main/migrations/0029_v14_changes.py b/awx/main/south_migrations/0029_v14_changes.py
similarity index 100%
rename from awx/main/migrations/0029_v14_changes.py
rename to awx/main/south_migrations/0029_v14_changes.py
diff --git a/awx/main/migrations/0030_v145_changes.py b/awx/main/south_migrations/0030_v145_changes.py
similarity index 100%
rename from awx/main/migrations/0030_v145_changes.py
rename to awx/main/south_migrations/0030_v145_changes.py
diff --git a/awx/main/migrations/0031_v145_changes.py b/awx/main/south_migrations/0031_v145_changes.py
similarity index 100%
rename from awx/main/migrations/0031_v145_changes.py
rename to awx/main/south_migrations/0031_v145_changes.py
diff --git a/awx/main/migrations/0032_v145_changes.py b/awx/main/south_migrations/0032_v145_changes.py
similarity index 100%
rename from awx/main/migrations/0032_v145_changes.py
rename to awx/main/south_migrations/0032_v145_changes.py
diff --git a/awx/main/migrations/0033_auto__chg_field_jobevent_created__chg_field_jobevent_modified.py b/awx/main/south_migrations/0033_auto__chg_field_jobevent_created__chg_field_jobevent_modified.py
similarity index 100%
rename from awx/main/migrations/0033_auto__chg_field_jobevent_created__chg_field_jobevent_modified.py
rename to awx/main/south_migrations/0033_auto__chg_field_jobevent_created__chg_field_jobevent_modified.py
diff --git a/awx/main/migrations/0034_v148_changes.py b/awx/main/south_migrations/0034_v148_changes.py
similarity index 100%
rename from awx/main/migrations/0034_v148_changes.py
rename to awx/main/south_migrations/0034_v148_changes.py
diff --git a/awx/main/migrations/0035_v148_changes.py b/awx/main/south_migrations/0035_v148_changes.py
similarity index 100%
rename from awx/main/migrations/0035_v148_changes.py
rename to awx/main/south_migrations/0035_v148_changes.py
diff --git a/awx/main/migrations/0036_v148_changes.py b/awx/main/south_migrations/0036_v148_changes.py
similarity index 100%
rename from awx/main/migrations/0036_v148_changes.py
rename to awx/main/south_migrations/0036_v148_changes.py
diff --git a/awx/main/migrations/0037_v148_changes.py b/awx/main/south_migrations/0037_v148_changes.py
similarity index 100%
rename from awx/main/migrations/0037_v148_changes.py
rename to awx/main/south_migrations/0037_v148_changes.py
diff --git a/awx/main/migrations/0038_v148_changes.py b/awx/main/south_migrations/0038_v148_changes.py
similarity index 100%
rename from awx/main/migrations/0038_v148_changes.py
rename to awx/main/south_migrations/0038_v148_changes.py
diff --git a/awx/main/migrations/0039_v148_changes.py b/awx/main/south_migrations/0039_v148_changes.py
similarity index 100%
rename from awx/main/migrations/0039_v148_changes.py
rename to awx/main/south_migrations/0039_v148_changes.py
diff --git a/awx/main/migrations/0040_v200_changes.py b/awx/main/south_migrations/0040_v200_changes.py
similarity index 100%
rename from awx/main/migrations/0040_v200_changes.py
rename to awx/main/south_migrations/0040_v200_changes.py
diff --git a/awx/main/migrations/0041_v200_changes.py b/awx/main/south_migrations/0041_v200_changes.py
similarity index 100%
rename from awx/main/migrations/0041_v200_changes.py
rename to awx/main/south_migrations/0041_v200_changes.py
diff --git a/awx/main/migrations/0042_v200_changes.py b/awx/main/south_migrations/0042_v200_changes.py
similarity index 100%
rename from awx/main/migrations/0042_v200_changes.py
rename to awx/main/south_migrations/0042_v200_changes.py
diff --git a/awx/main/migrations/0043_v1411_changes.py b/awx/main/south_migrations/0043_v1411_changes.py
similarity index 100%
rename from awx/main/migrations/0043_v1411_changes.py
rename to awx/main/south_migrations/0043_v1411_changes.py
diff --git a/awx/main/migrations/0044_v1411_changes.py b/awx/main/south_migrations/0044_v1411_changes.py
similarity index 100%
rename from awx/main/migrations/0044_v1411_changes.py
rename to awx/main/south_migrations/0044_v1411_changes.py
diff --git a/awx/main/migrations/0045_v1411_changes.py b/awx/main/south_migrations/0045_v1411_changes.py
similarity index 100%
rename from awx/main/migrations/0045_v1411_changes.py
rename to awx/main/south_migrations/0045_v1411_changes.py
diff --git a/awx/main/migrations/0046_v200_changes.py b/awx/main/south_migrations/0046_v200_changes.py
similarity index 100%
rename from awx/main/migrations/0046_v200_changes.py
rename to awx/main/south_migrations/0046_v200_changes.py
diff --git a/awx/main/migrations/0047_v200_changes.py b/awx/main/south_migrations/0047_v200_changes.py
similarity index 100%
rename from awx/main/migrations/0047_v200_changes.py
rename to awx/main/south_migrations/0047_v200_changes.py
diff --git a/awx/main/migrations/0048_v200_changes.py b/awx/main/south_migrations/0048_v200_changes.py
similarity index 100%
rename from awx/main/migrations/0048_v200_changes.py
rename to awx/main/south_migrations/0048_v200_changes.py
diff --git a/awx/main/migrations/0049_v200_changes.py b/awx/main/south_migrations/0049_v200_changes.py
similarity index 100%
rename from awx/main/migrations/0049_v200_changes.py
rename to awx/main/south_migrations/0049_v200_changes.py
diff --git a/awx/main/migrations/0050_v200_changes.py b/awx/main/south_migrations/0050_v200_changes.py
similarity index 100%
rename from awx/main/migrations/0050_v200_changes.py
rename to awx/main/south_migrations/0050_v200_changes.py
diff --git a/awx/main/migrations/0051_v200_changes.py b/awx/main/south_migrations/0051_v200_changes.py
similarity index 100%
rename from awx/main/migrations/0051_v200_changes.py
rename to awx/main/south_migrations/0051_v200_changes.py
diff --git a/awx/main/migrations/0052_v210_changes.py b/awx/main/south_migrations/0052_v210_changes.py
similarity index 100%
rename from awx/main/migrations/0052_v210_changes.py
rename to awx/main/south_migrations/0052_v210_changes.py
diff --git a/awx/main/migrations/0053_v210_changes.py b/awx/main/south_migrations/0053_v210_changes.py
similarity index 100%
rename from awx/main/migrations/0053_v210_changes.py
rename to awx/main/south_migrations/0053_v210_changes.py
diff --git a/awx/main/migrations/0054_v210_changes.py b/awx/main/south_migrations/0054_v210_changes.py
similarity index 100%
rename from awx/main/migrations/0054_v210_changes.py
rename to awx/main/south_migrations/0054_v210_changes.py
diff --git a/awx/main/migrations/0055_v210_changes.py b/awx/main/south_migrations/0055_v210_changes.py
similarity index 100%
rename from awx/main/migrations/0055_v210_changes.py
rename to awx/main/south_migrations/0055_v210_changes.py
diff --git a/awx/main/migrations/0056_v210_changes.py b/awx/main/south_migrations/0056_v210_changes.py
similarity index 100%
rename from awx/main/migrations/0056_v210_changes.py
rename to awx/main/south_migrations/0056_v210_changes.py
diff --git a/awx/main/migrations/0057_v210_changes.py b/awx/main/south_migrations/0057_v210_changes.py
similarity index 100%
rename from awx/main/migrations/0057_v210_changes.py
rename to awx/main/south_migrations/0057_v210_changes.py
diff --git a/awx/main/migrations/0058_v210_changes.py b/awx/main/south_migrations/0058_v210_changes.py
similarity index 100%
rename from awx/main/migrations/0058_v210_changes.py
rename to awx/main/south_migrations/0058_v210_changes.py
diff --git a/awx/main/migrations/0059_v210_changes.py b/awx/main/south_migrations/0059_v210_changes.py
similarity index 100%
rename from awx/main/migrations/0059_v210_changes.py
rename to awx/main/south_migrations/0059_v210_changes.py
diff --git a/awx/main/migrations/0060_v210_changes.py b/awx/main/south_migrations/0060_v210_changes.py
similarity index 100%
rename from awx/main/migrations/0060_v210_changes.py
rename to awx/main/south_migrations/0060_v210_changes.py
diff --git a/awx/main/migrations/0061_v210_changes.py b/awx/main/south_migrations/0061_v210_changes.py
similarity index 100%
rename from awx/main/migrations/0061_v210_changes.py
rename to awx/main/south_migrations/0061_v210_changes.py
diff --git a/awx/main/migrations/0062_v210_changes.py b/awx/main/south_migrations/0062_v210_changes.py
similarity index 100%
rename from awx/main/migrations/0062_v210_changes.py
rename to awx/main/south_migrations/0062_v210_changes.py
diff --git a/awx/main/migrations/0063_v210_changes.py b/awx/main/south_migrations/0063_v210_changes.py
similarity index 100%
rename from awx/main/migrations/0063_v210_changes.py
rename to awx/main/south_migrations/0063_v210_changes.py
diff --git a/awx/main/migrations/0064_v220_changes.py b/awx/main/south_migrations/0064_v220_changes.py
similarity index 100%
rename from awx/main/migrations/0064_v220_changes.py
rename to awx/main/south_migrations/0064_v220_changes.py
diff --git a/awx/main/migrations/0065_v220_changes.py b/awx/main/south_migrations/0065_v220_changes.py
similarity index 100%
rename from awx/main/migrations/0065_v220_changes.py
rename to awx/main/south_migrations/0065_v220_changes.py
diff --git a/awx/main/migrations/0066_v220_changes.py b/awx/main/south_migrations/0066_v220_changes.py
similarity index 100%
rename from awx/main/migrations/0066_v220_changes.py
rename to awx/main/south_migrations/0066_v220_changes.py
diff --git a/awx/main/migrations/0067_v220_changes.py b/awx/main/south_migrations/0067_v220_changes.py
similarity index 100%
rename from awx/main/migrations/0067_v220_changes.py
rename to awx/main/south_migrations/0067_v220_changes.py
diff --git a/awx/main/migrations/0068_v220_changes.py b/awx/main/south_migrations/0068_v220_changes.py
similarity index 100%
rename from awx/main/migrations/0068_v220_changes.py
rename to awx/main/south_migrations/0068_v220_changes.py
diff --git a/awx/main/migrations/0069_v220_changes.py b/awx/main/south_migrations/0069_v220_changes.py
similarity index 100%
rename from awx/main/migrations/0069_v220_changes.py
rename to awx/main/south_migrations/0069_v220_changes.py
diff --git a/awx/main/migrations/0070_v221_changes.py b/awx/main/south_migrations/0070_v221_changes.py
similarity index 100%
rename from awx/main/migrations/0070_v221_changes.py
rename to awx/main/south_migrations/0070_v221_changes.py
diff --git a/awx/main/migrations/0071_v240_changes.py b/awx/main/south_migrations/0071_v240_changes.py
similarity index 100%
rename from awx/main/migrations/0071_v240_changes.py
rename to awx/main/south_migrations/0071_v240_changes.py
diff --git a/awx/main/migrations/0072_v240_changes.py b/awx/main/south_migrations/0072_v240_changes.py
similarity index 100%
rename from awx/main/migrations/0072_v240_changes.py
rename to awx/main/south_migrations/0072_v240_changes.py
diff --git a/awx/main/migrations/0073_v240_changes.py b/awx/main/south_migrations/0073_v240_changes.py
similarity index 100%
rename from awx/main/migrations/0073_v240_changes.py
rename to awx/main/south_migrations/0073_v240_changes.py
diff --git a/awx/main/migrations/0074_v240_changes.py b/awx/main/south_migrations/0074_v240_changes.py
similarity index 100%
rename from awx/main/migrations/0074_v240_changes.py
rename to awx/main/south_migrations/0074_v240_changes.py
diff --git a/awx/main/migrations/0075_v300_changes.py b/awx/main/south_migrations/0075_v300_changes.py
similarity index 100%
rename from awx/main/migrations/0075_v300_changes.py
rename to awx/main/south_migrations/0075_v300_changes.py
diff --git a/awx/main/south_migrations/__init__.py b/awx/main/south_migrations/__init__.py
new file mode 100644
index 0000000000..e484e62be1
--- /dev/null
+++ b/awx/main/south_migrations/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (c) 2015 Ansible, Inc.
+# All Rights Reserved.
diff --git a/awx/main/tasks.py b/awx/main/tasks.py
index 0ac7776547..4c4ed1ecfb 100644
--- a/awx/main/tasks.py
+++ b/awx/main/tasks.py
@@ -3,6 +3,7 @@
# Python
import codecs
+from collections import OrderedDict
import ConfigParser
import cStringIO
import json
@@ -37,7 +38,6 @@ from celery import Task, task
# Django
from django.conf import settings
from django.db import transaction, DatabaseError
-from django.utils.datastructures import SortedDict
from django.utils.timezone import now
# AWX
@@ -423,7 +423,7 @@ class BaseTask(Task):
prompts, and values are password lookup keys (keys that are returned
from build_passwords).
'''
- return SortedDict()
+ return OrderedDict()
@task_timer
def run_pexpect(self, instance, args, cwd, env, passwords, stdout_handle,
diff --git a/awx/main/test_main.py b/awx/main/test_main.py
new file mode 100644
index 0000000000..b74dd89556
--- /dev/null
+++ b/awx/main/test_main.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2015 Ansible, Inc.
+# All Rights Reserved.
+
+from awx.main.tests import * # noqa
diff --git a/awx/main/tests/__init__.py b/awx/main/tests/__init__.py
index e69de29bb2..e484e62be1 100644
--- a/awx/main/tests/__init__.py
+++ b/awx/main/tests/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (c) 2015 Ansible, Inc.
+# All Rights Reserved.
diff --git a/awx/main/tests/base.py b/awx/main/tests/base.py
index bdea0523a8..93bac00948 100644
--- a/awx/main/tests/base.py
+++ b/awx/main/tests/base.py
@@ -25,6 +25,7 @@ from django.conf import settings, UserSettingsHolder
from django.contrib.auth.models import User
from django.test.client import Client
from django.test.utils import override_settings
+from django.utils.encoding import force_text
# AWX
from awx.main.models import * # noqa
@@ -132,6 +133,7 @@ class BaseTestMixin(QueueTestMixin, MockCommonlySlowTestMixin):
if settings.CALLBACK_CONSUMER_PORT:
callback_port = random.randint(55700, 55799)
settings.CALLBACK_CONSUMER_PORT = 'tcp://127.0.0.1:%d' % callback_port
+ os.environ['CALLBACK_CONSUMER_PORT'] = settings.CALLBACK_CONSUMER_PORT
callback_queue_path = '/tmp/callback_receiver_test_%d.ipc' % callback_port
self._temp_paths.append(callback_queue_path)
settings.CALLBACK_QUEUE_PORT = 'ipc://%s' % callback_queue_path
@@ -425,8 +427,6 @@ class BaseTestMixin(QueueTestMixin, MockCommonlySlowTestMixin):
return_response_object=False, client_kwargs=None):
assert method is not None
method_name = method.lower()
- #if method_name not in ('options', 'head', 'get', 'delete'):
- # assert data is not None
client_kwargs = client_kwargs or {}
if accept:
client_kwargs['HTTP_ACCEPT'] = accept
@@ -457,7 +457,7 @@ class BaseTestMixin(QueueTestMixin, MockCommonlySlowTestMixin):
client = Client(**client_kwargs)
method = getattr(client, method_name)
response = None
- if data is not None:
+ if method_name not in ('options', 'head', 'get', 'delete'):
data_type = data_type or 'json'
if data_type == 'json':
response = method(url, json.dumps(data), 'application/json')
@@ -469,9 +469,9 @@ class BaseTestMixin(QueueTestMixin, MockCommonlySlowTestMixin):
response = method(url)
self.assertFalse(response.status_code == 500 and expect != 500,
- 'Failed (500): %s' % response.content)
+ 'Failed (500): %s' % force_text(response.content))
if expect is not None:
- assert response.status_code == expect, "expected status %s, got %s for url=%s as auth=%s: %s" % (expect, response.status_code, url, auth, response.content)
+ assert response.status_code == expect, u"expected status %s, got %s for url=%s as auth=%s: %s" % (expect, response.status_code, url, auth, force_text(response.content))
if method_name == 'head':
self.assertFalse(response.content)
if return_response_object:
@@ -479,16 +479,16 @@ class BaseTestMixin(QueueTestMixin, MockCommonlySlowTestMixin):
if response.status_code not in [204, 405] and method_name != 'head' and response.content:
# no JSON responses in these at least for now, 409 should probably return some (FIXME)
if response['Content-Type'].startswith('application/json'):
- obj = json.loads(response.content)
+ obj = json.loads(force_text(response.content))
elif response['Content-Type'].startswith('application/yaml'):
- obj = yaml.safe_load(response.content)
+ obj = yaml.safe_load(force_text(response.content))
elif response['Content-Type'].startswith('text/plain'):
obj = {
- 'content': response.content
+ 'content': force_text(response.content)
}
elif response['Content-Type'].startswith('text/html'):
obj = {
- 'content': response.content
+ 'content': force_text(response.content)
}
else:
self.fail('Unsupport response content type %s' % response['Content-Type'])
diff --git a/awx/main/tests/old/activity_stream.py b/awx/main/tests/old/activity_stream.py
index 9798695954..80582d8b6b 100644
--- a/awx/main/tests/old/activity_stream.py
+++ b/awx/main/tests/old/activity_stream.py
@@ -31,6 +31,7 @@ class ActivityStreamTest(BaseTest):
self.options(url, expect=200)
self.head(url, expect=200)
response = self.get(url, expect=200)
+ print response
self.check_pagination_and_size(response, 1, previous=None, next=None)
def test_basic_fields(self):
diff --git a/awx/main/tests/old/ad_hoc.py b/awx/main/tests/old/ad_hoc.py
index 41993f885b..104c67d1fa 100644
--- a/awx/main/tests/old/ad_hoc.py
+++ b/awx/main/tests/old/ad_hoc.py
@@ -18,6 +18,7 @@ from crum import impersonate
# AWX
from awx.main.utils import * # noqa
from awx.main.models import * # noqa
+from awx.main.conf import tower_settings
from awx.main.tests.base import BaseJobExecutionTest
from awx.main.tests.data.ssh import (
TEST_SSH_KEY_DATA,
@@ -746,11 +747,15 @@ class AdHocCommandApiTest(BaseAdHocCommandTest):
# Try to relaunch ad hoc command when module has been removed from
# allowed list of modules.
- with self.settings(AD_HOC_COMMANDS=[]):
+ try:
+ ad_hoc_commands = tower_settings.AD_HOC_COMMANDS
+ tower_settings.AD_HOC_COMMANDS = []
with self.current_user('admin'):
response = self.get(url, expect=200)
self.assertEqual(response['passwords_needed_to_start'], [])
response = self.post(url, {}, expect=400)
+ finally:
+ tower_settings.AD_HOC_COMMANDS = ad_hoc_commands
# Try to relaunch after the inventory has been marked inactive.
self.inventory.mark_inactive()
diff --git a/awx/main/tests/old/commands/commands_monolithic.py b/awx/main/tests/old/commands/commands_monolithic.py
index af4916e42d..d5e00818ca 100644
--- a/awx/main/tests/old/commands/commands_monolithic.py
+++ b/awx/main/tests/old/commands/commands_monolithic.py
@@ -14,7 +14,6 @@ import urlparse
import unittest2 as unittest
# Django
-import django
from django.conf import settings
from django.contrib.auth.models import User
from django.core.management import call_command
@@ -1066,8 +1065,6 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
self.assertNotEqual(new_inv.total_groups, 0)
self.assertElapsedLessThan(30)
- @unittest.skipIf(hasattr(django.db.backend, 'sqlite3'),
- 'Skip this test if we are on sqlite')
def test_splunk_inventory(self):
new_inv = self.organizations[0].inventories.create(name='splunk')
self.assertEqual(new_inv.hosts.count(), 0)
diff --git a/awx/main/tests/old/inventory.py b/awx/main/tests/old/inventory.py
index 9503b4232c..5c48f30bb6 100644
--- a/awx/main/tests/old/inventory.py
+++ b/awx/main/tests/old/inventory.py
@@ -535,7 +535,7 @@ class InventoryTest(BaseTest):
vars_a = dict(asdf=7777, dog='droopy', cat='battlecat', unstructured=dict(a=[1,1,1],b=dict(x=1,y=2)))
vars_b = dict(asdf=8888, dog='snoopy', cat='cheshire', unstructured=dict(a=[2,2,2],b=dict(x=3,y=4)))
vars_c = dict(asdf=9999, dog='pluto', cat='five', unstructured=dict(a=[3,3,3],b=dict(z=5)))
- group = Group.objects.get(id=1)
+ group = Group.objects.order_by('pk')[0]
vdata1_url = reverse('api:group_variable_data', args=(group.pk,))
@@ -1330,6 +1330,18 @@ class InventoryUpdatesTest(BaseTransactionTest):
self.delete(inv_up_url, expect=204)
self.get(inv_up_url, expect=404)
+ def print_group_tree(self, group, depth=0):
+ print (' ' * depth) + '+ ' + group.name
+ for host in group.hosts.order_by('name'):
+ print (' ' * depth) + ' - ' + host.name
+ for child in group.children.order_by('name'):
+ self.print_group_tree(child, depth + 1)
+
+ def print_inventory_tree(self, inventory):
+ # Print out group/host tree for debugging.
+ for group in inventory.root_groups.order_by('name'):
+ self.print_group_tree(group)
+
def test_put_inventory_source_detail_with_regions(self):
creds_url = reverse('api:credential_list')
inv_src_url1 = reverse('api:inventory_source_detail',
@@ -1749,6 +1761,7 @@ class InventoryUpdatesTest(BaseTransactionTest):
# its own child).
self.assertTrue(self.group in self.inventory.root_groups)
# Verify that returned groups are nested:
+ #self.print_inventory_tree(self.inventory)
child_names = self.group.children.values_list('name', flat=True)
for name in child_names:
self.assertFalse(name.startswith('us-'))
@@ -1766,7 +1779,10 @@ class InventoryUpdatesTest(BaseTransactionTest):
self.assertTrue('tags' in child_names)
self.assertTrue('images' in child_names)
self.assertFalse('tag_none' in child_names)
- self.assertTrue('tag_none' in self.group.children.get(name='tags').children.values_list('name', flat=True))
+ # Only check for tag_none as a child of tags if there is a tag_none group;
+ # the test inventory *may* have tags set for all hosts.
+ if self.inventory.groups.filter(name='tag_none').exists():
+ self.assertTrue('tag_none' in self.group.children.get(name='tags').children.values_list('name', flat=True))
self.assertFalse('instances' in child_names)
# Make sure we clean up the cache path when finished (when one is not
# provided explicitly via source_vars).
@@ -1816,7 +1832,10 @@ class InventoryUpdatesTest(BaseTransactionTest):
self.assertTrue(self.group.children.get(name='security_groups').children.filter(active=True).count())
self.assertTrue('tags' in child_names)
self.assertTrue(self.group.children.get(name='tags').children.filter(active=True).count())
- self.assertTrue('tag_none' in self.group.children.get(name='tags').children.values_list('name', flat=True))
+ # Only check for tag_none as a child of tags if there is a tag_none group;
+ # the test inventory *may* have tags set for all hosts.
+ if self.inventory.groups.filter(name='tag_none').exists():
+ self.assertTrue('tag_none' in self.group.children.get(name='tags').children.values_list('name', flat=True))
self.assertTrue('images' in child_names)
self.assertTrue(self.group.children.get(name='images').children.filter(active=True).count())
self.assertTrue('instances' in child_names)
@@ -1840,21 +1859,9 @@ class InventoryUpdatesTest(BaseTransactionTest):
# Replacement text should not be left in inventory source name.
self.assertFalse(InventorySource.objects.filter(name__icontains='__replace_').exists())
# Inventory update name should be based on inventory/group names and need not have the inventory source pk.
- print InventoryUpdate.objects.values_list('name', 'inventory_source__name')
+ #print InventoryUpdate.objects.values_list('name', 'inventory_source__name')
for inventory_update in InventoryUpdate.objects.all():
self.assertFalse(inventory_update.name.endswith(inventory_update.inventory_source.name), inventory_update.name)
- return
- # Print out group/host tree for debugging.
- print
-
- def draw_tree(g, d=0):
- print (' ' * d) + '+ ' + g.name
- for h in g.hosts.order_by('name'):
- print (' ' * d) + ' - ' + h.name
- for c in g.children.order_by('name'):
- draw_tree(c, d + 1)
- for g in self.inventory.root_groups.order_by('name'):
- draw_tree(g)
def test_update_from_rax(self):
source_username = getattr(settings, 'TEST_RACKSPACE_USERNAME', '')
diff --git a/awx/main/tests/old/jobs/jobs_monolithic.py b/awx/main/tests/old/jobs/jobs_monolithic.py
index 72283f29f9..1d36972245 100644
--- a/awx/main/tests/old/jobs/jobs_monolithic.py
+++ b/awx/main/tests/old/jobs/jobs_monolithic.py
@@ -15,7 +15,7 @@ import django.test
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
-from django.utils.encoding import smart_str
+from django.utils.encoding import smart_text
# Requests
import requests
@@ -216,26 +216,26 @@ class JobTemplateTest(BaseJobTestMixin, django.test.TestCase):
# due to being an org admin for that project and no credential assigned to that template
with self.current_user(self.user_bob):
resp = self.get(url, expect=200)
- print [x['name'] for x in resp['results']]
+ #print [x['name'] for x in resp['results']]
self.assertEquals(resp['count'], 3)
# Chuck has permission to see all Eng Job Templates as Lead Engineer
# Note: Since chuck is an org admin he can also see the support scan template
with self.current_user(self.user_chuck):
resp = self.get(url, expect=200)
- print [x['name'] for x in resp['results']]
+ #print [x['name'] for x in resp['results']]
self.assertEquals(resp['count'], 3)
# Doug is in engineering but can only run scan jobs so he can only see the one Job Template
with self.current_user(self.user_doug):
resp = self.get(url, expect=200)
- print [x['name'] for x in resp['results']]
+ #print [x['name'] for x in resp['results']]
self.assertEquals(resp['count'], 1)
# Juan can't see any job templates in Engineering because he lacks the inventory read permission
with self.current_user(self.user_juan):
resp = self.get(url, expect=200)
- print [x['name'] for x in resp['results']]
+ #print [x['name'] for x in resp['results']]
self.assertEquals(resp['count'], 0)
# We give Juan inventory permission and he can see both Job Templates because he already has deploy permission
@@ -248,19 +248,19 @@ class JobTemplateTest(BaseJobTestMixin, django.test.TestCase):
)
with self.current_user(self.user_juan):
resp = self.get(url, expect=200)
- print [x['name'] for x in resp['results']]
+ #print [x['name'] for x in resp['results']]
self.assertEquals(resp['count'], 2)
# Randall is on the ops testers team that has permission to run a single check playbook on ops west
with self.current_user(self.user_randall):
resp = self.get(url, expect=200)
- print [x['name'] for x in resp['results']]
+ #print [x['name'] for x in resp['results']]
self.assertEquals(resp['count'], 1)
# Holly is on the ops east team and can see all of that team's job templates
with self.current_user(self.user_holly):
resp = self.get(url, expect=200)
- print [x['name'] for x in resp['results']]
+ #print [x['name'] for x in resp['results']]
self.assertEquals(resp['count'], 3)
# Chuck is temporarily assigned to ops east team to help them running some playbooks
@@ -268,7 +268,7 @@ class JobTemplateTest(BaseJobTestMixin, django.test.TestCase):
self.team_ops_east.users.add(self.user_chuck)
with self.current_user(self.user_chuck):
resp = self.get(url, expect=200)
- print [x['name'] for x in resp['results']]
+ #print [x['name'] for x in resp['results']]
self.assertEquals(resp['count'], 6)
@@ -313,7 +313,7 @@ class JobTemplateTest(BaseJobTestMixin, django.test.TestCase):
self.assertEqual(jt.inventory.pk, data['inventory'])
self.assertEqual(jt.credential, None)
self.assertEqual(jt.project.pk, data['project'])
- self.assertEqual(smart_str(jt.playbook), data['playbook'])
+ self.assertEqual(smart_text(jt.playbook), data['playbook'])
# Test that all required fields are really required.
data['name'] = 'another new job template'
@@ -903,8 +903,10 @@ class JobTemplateCallbackTest(BaseJobTestMixin, django.test.LiveServerTestCase):
# Set a limit on the job template to verify the callback job limit is
# set to the intersection of this limit and the host name.
- job_template.limit = 'bakers:slicers:packagers'
- job_template.save(update_fields=['limit'])
+ # job_template.limit = 'bakers:slicers:packagers'
+ # job_template.save(update_fields=['limit'])
+ JobTemplate.objects.filter(pk=job_template.pk).update(limit='bakers:slicers:packagers')
+ job_template = JobTemplate.objects.get(pk=job_template.pk)
# Try when hostname is also an IP address, even if a different one is
# specified via ansible_ssh_host.
diff --git a/awx/main/tests/old/projects.py b/awx/main/tests/old/projects.py
index b7378d14ef..b0a32ad6a5 100644
--- a/awx/main/tests/old/projects.py
+++ b/awx/main/tests/old/projects.py
@@ -287,10 +287,7 @@ class ProjectsTest(BaseTransactionTest):
# can list playbooks for projects
proj_playbooks = reverse('api:project_playbooks', args=(self.projects[2].pk,))
got = self.get(proj_playbooks, expect=200, auth=self.get_super_credentials())
- got_new = []
- for g in got:
- got_new.append(g.encode('utf-8'))
- self.assertEqual(got_new, self.projects[2].playbooks)
+ self.assertEqual(got, self.projects[2].playbooks)
# can list member organizations for projects
proj_orgs = reverse('api:project_organizations_list', args=(self.projects[0].pk,))
diff --git a/awx/main/tests/old/scripts.py b/awx/main/tests/old/scripts.py
index cdf6758921..1b5295d0bd 100644
--- a/awx/main/tests/old/scripts.py
+++ b/awx/main/tests/old/scripts.py
@@ -288,8 +288,7 @@ class InventoryScriptTest(BaseScriptTest):
# Valid host, but not part of the specified inventory.
inventory = self.inventories[0]
self.assertTrue(inventory.active)
- host = Host.objects.get(id=12)
- self.assertTrue(host.active)
+ host = Host.objects.filter(active=True).exclude(inventory=inventory)[0]
os.environ['INVENTORY_ID'] = str(inventory.pk)
rc, stdout, stderr = self.run_inventory_script(host=host.name)
self.assertNotEqual(rc, 0, stderr)
diff --git a/awx/main/tests/old/settings.py b/awx/main/tests/old/settings.py
index a727213454..3f08bd1a7a 100644
--- a/awx/main/tests/old/settings.py
+++ b/awx/main/tests/old/settings.py
@@ -83,6 +83,9 @@ class SettingsTest(BaseTest):
self.set_setting('TEST_SETTING_INT', 2)
setting_int = self.get_individual_setting('TEST_SETTING_INT')
self.assertEqual(setting_int['value'], 2)
+ self.set_setting('TEST_SETTING_INT', 3)
+ setting_int = self.get_individual_setting('TEST_SETTING_INT')
+ self.assertEqual(setting_int['value'], 3)
self.post(settings_reset, data={"key": 'TEST_SETTING_INT'}, expect=204)
setting_int = self.get_individual_setting('TEST_SETTING_INT')
self.assertEqual(setting_int['value'], TEST_TOWER_SETTINGS_MANIFEST['TEST_SETTING_INT']['default'])
diff --git a/awx/main/tests/old/users.py b/awx/main/tests/old/users.py
index 91720212ff..c4ec3a87f1 100644
--- a/awx/main/tests/old/users.py
+++ b/awx/main/tests/old/users.py
@@ -947,8 +947,8 @@ class LdapTest(BaseTest):
return user
def test_ldap_auth(self):
- self.use_test_setting('USER_SEARCH')
- self.use_test_setting('ALWAYS_UPDATE_USER')
+ for name in ('USER_SEARCH', 'ALWAYS_UPDATE_USER', 'GROUP_TYPE', 'GROUP_SEARCH'):
+ self.use_test_setting(name)
self.assertEqual(User.objects.filter(username=self.ldap_username).count(), 0)
# Test logging in, user should be created with no flags or fields set.
user = self.check_login()
diff --git a/awx/main/utils.py b/awx/main/utils.py
index 110df9a5fd..5bd00c2da6 100644
--- a/awx/main/utils.py
+++ b/awx/main/utils.py
@@ -354,8 +354,7 @@ def get_type_for_model(model):
'''
Return type name for a given model class.
'''
- from rest_framework.compat import get_concrete_model
- opts = get_concrete_model(model)._meta
+ opts = model._meta.concrete_model._meta
return camelcase_to_underscore(opts.object_name)
diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py
index e338ef14b6..a94bc45d44 100644
--- a/awx/settings/defaults.py
+++ b/awx/settings/defaults.py
@@ -18,6 +18,15 @@ for setting in dir(global_settings):
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
+def is_testing(argv=None):
+ '''Return True if running django or py.test unit tests.'''
+ argv = sys.argv if argv is None else argv
+ if len(argv) >= 1 and ('py.test' in argv[0] or 'py/test.py' in argv[0]):
+ return True
+ elif len(argv) >= 2 and argv[1] == 'test':
+ return True
+ return False
+
DEBUG = True
TEMPLATE_DEBUG = DEBUG
SQL_DEBUG = DEBUG
@@ -32,9 +41,11 @@ DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'awx.sqlite3'),
- # Test database cannot be :memory: for celery/inventory tests to work.
- 'TEST_NAME': os.path.join(BASE_DIR, 'awx_test.sqlite3'),
'ATOMIC_REQUESTS': True,
+ 'TEST': {
+ # Test database cannot be :memory: for celery/inventory tests.
+ 'NAME': os.path.join(BASE_DIR, 'awx_test.sqlite3'),
+ },
}
}
@@ -170,7 +181,6 @@ INSTALLED_APPS = (
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.staticfiles',
- 'south',
'rest_framework',
'django_extensions',
'djcelery',
@@ -188,9 +198,8 @@ INSTALLED_APPS = (
INTERNAL_IPS = ('127.0.0.1',)
REST_FRAMEWORK = {
- 'DEFAULT_PAGINATION_SERIALIZER_CLASS': 'awx.api.pagination.PaginationSerializer',
- 'PAGINATE_BY': 25,
- 'PAGINATE_BY_PARAM': 'page_size',
+ 'DEFAULT_PAGINATION_CLASS': 'awx.api.pagination.Pagination',
+ 'PAGE_SIZE': 25,
'DEFAULT_AUTHENTICATION_CLASSES': (
'awx.api.authentication.TokenAuthentication',
'rest_framework.authentication.BasicAuthentication',
@@ -213,6 +222,7 @@ REST_FRAMEWORK = {
'rest_framework.renderers.JSONRenderer',
'awx.api.renderers.BrowsableAPIRenderer',
),
+ 'DEFAULT_METADATA_CLASS': 'awx.api.metadata.Metadata',
'EXCEPTION_HANDLER': 'awx.api.views.api_exception_handler',
'VIEW_NAME_FUNCTION': 'awx.api.generics.get_view_name',
'VIEW_DESCRIPTION_FUNCTION': 'awx.api.generics.get_view_description',
@@ -309,8 +319,9 @@ DEBUG_TOOLBAR_CONFIG = {
# Use Django-devserver if installed.
try:
- import devserver
- INSTALLED_APPS += (devserver.__name__,)
+ import devserver # noqa
+ # FIXME: devserver has issues with Django 1.8?
+ # INSTALLED_APPS += (devserver.__name__,)
except ImportError:
pass
@@ -329,9 +340,6 @@ DEVSERVER_MODULES = (
# Set default ports for live server tests.
os.environ.setdefault('DJANGO_LIVE_TEST_SERVER_ADDRESS', 'localhost:9013-9199')
-# Skip migrations when running tests.
-SOUTH_TESTS_MIGRATE = False
-
# Initialize Django-Celery.
djcelery.setup_loader()
diff --git a/awx/settings/development.py b/awx/settings/development.py
index 2ceb74623d..a214ab4670 100644
--- a/awx/settings/development.py
+++ b/awx/settings/development.py
@@ -83,7 +83,8 @@ include(optional('/etc/tower/conf.d/*.py'), scope=locals())
# only the defaults.
try:
include(optional('local_*.py'), scope=locals())
- include('postprocess.py', scope=locals())
+ if not is_testing(sys.argv):
+ include('postprocess.py', scope=locals())
except ImportError:
traceback.print_exc()
sys.exit(1)
diff --git a/awx/settings/local_settings.py.docker_compose b/awx/settings/local_settings.py.docker_compose
index 794d3ce073..67ab89e87e 100644
--- a/awx/settings/local_settings.py.docker_compose
+++ b/awx/settings/local_settings.py.docker_compose
@@ -30,23 +30,18 @@ DATABASES = {
}
}
-def is_testing(argv):
- if "py.test" in argv[0] or "py/test.py" in argv[0]:
- return True
- elif argv[1] == "test":
- return True
- return False
-
# Use SQLite for unit tests instead of PostgreSQL. If the lines below are
# commented out, Django will create the test_awx-dev database in PostgreSQL to
# run unit tests.
-if len(sys.argv) >= 2 and is_testing(sys.argv):
+if is_testing(sys.argv):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'awx.sqlite3'),
- # Test database cannot be :memory: for celery/inventory tests.
- 'TEST_NAME': os.path.join(BASE_DIR, 'awx_test.sqlite3'),
+ 'TEST': {
+ # Test database cannot be :memory: for celery/inventory tests.
+ 'NAME': os.path.join(BASE_DIR, 'awx_test.sqlite3'),
+ },
}
}
diff --git a/awx/settings/local_settings.py.example b/awx/settings/local_settings.py.example
index 52648798fe..cb85724366 100644
--- a/awx/settings/local_settings.py.example
+++ b/awx/settings/local_settings.py.example
@@ -30,23 +30,18 @@ DATABASES = {
}
}
-def is_testing(argv):
- if "py.test" in argv[0] or "py/test.py" in argv[0]:
- return True
- elif argv[1] == "test":
- return True
- return False
-
# Use SQLite for unit tests instead of PostgreSQL. If the lines below are
# commented out, Django will create the test_awx-dev database in PostgreSQL to
# run unit tests.
-if len(sys.argv) >= 2 and is_testing(sys.argv):
+if is_testing(sys.argv):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'awx.sqlite3'),
- # Test database cannot be :memory: for celery/inventory tests.
- 'TEST_NAME': os.path.join(BASE_DIR, 'awx_test.sqlite3'),
+ 'TEST': {
+ # Test database cannot be :memory: for celery/inventory tests.
+ 'NAME': os.path.join(BASE_DIR, 'awx_test.sqlite3'),
+ },
}
}
diff --git a/awx/settings/production.py b/awx/settings/production.py
index 397f48070a..766029f87a 100644
--- a/awx/settings/production.py
+++ b/awx/settings/production.py
@@ -21,9 +21,6 @@ SQL_DEBUG = DEBUG
# Clear database settings to force production environment to define them.
DATABASES = {}
-# Enable South to look for migrations in .pyc files.
-SOUTH_USE_PYC = True
-
# Clear the secret key to force production environment to define it.
SECRET_KEY = None
diff --git a/awx/static/api/api.css b/awx/static/api/api.css
new file mode 100644
index 0000000000..b168ebd0d1
--- /dev/null
+++ b/awx/static/api/api.css
@@ -0,0 +1,207 @@
+/*************************************************
+ * Copyright (c) 2016 Ansible, Inc.
+ *
+ * All Rights Reserved
+ *************************************************/
+
+html,
+body {
+ height: 100%;
+}
+body {
+ font-family: 'Open Sans', sans-serif;
+ font-weight: 400;
+ color: #171717;
+ background-color: #f6f6f6;
+ padding-top: 50px;
+ min-height: 100%;
+ position: relative;
+}
+
+body .navbar {
+ background-color: #FFFFFF;
+ border-color: #E8E8E8;
+}
+body .navbar .navbar-brand {
+ color: #707070;
+ padding: 0;
+ font-size: 14px;
+}
+body .navbar .navbar-brand:focus,
+body .navbar .navbar-brand:hover {
+ color: #707070;
+}
+body .navbar .navbar-brand img {
+ display: inline-block;
+ max-width: 150px;
+ max-height: 50px;
+}
+body .navbar .navbar-brand > span {
+ display: inline-block;
+ line-height: 50px;
+ vertical-align: middle;
+ margin-left: -10px;
+}
+body .navbar .navbar-title {
+ float: left;
+ height: 50px;
+ color: #707070;
+ padding: 0;
+ font-size: 14px;
+ display: none;
+}
+body .navbar .navbar-title > span {
+ display: inline-block;
+ line-height: 50px;
+ vertical-align: middle;
+}
+body.show-title .navbar .navbar-title {
+ display: inline-block;
+}
+body .navbar .navbar-nav > li > a {
+ color: #848992;
+}
+body .navbar .navbar-nav > li > a:focus,
+body .navbar .navbar-nav > li > a:hover {
+ color: #848992;
+}
+
+body .page-header {
+ margin-top: 20px;
+}
+body .page-header h1 {
+ font-size: 30px;
+ line-height: 33px;
+}
+body .page-header .toggle-description span.glyphicon {
+ font-size: 14px;
+ vertical-align: top;
+ padding: 2px;
+}
+body ul.breadcrumb,
+body .description,
+body .request-info .prettyprint,
+body .response-info .prettyprint,
+body .well {
+ background-color: #FFFFFF;
+ border-radius: 5px;
+ padding: 10px;
+ border: 1px solid #E8E8E8;
+ margin-top: 20px;
+ box-shadow: none;
+}
+body .description {
+ margin-top: 0;
+ margin-bottom: 20px;
+ padding-bottom: 0;
+ display: none;
+}
+body .request-info .prettyprint {
+ margin-top: 0;
+}
+body .resource-description,
+body .response-info {
+ margin-bottom: 0;
+}
+body .well.tab-content {
+ padding: 20px;
+}
+body .tab-content > .tab-pane {
+ display: block;
+}
+body .well.tab-content .form-actions {
+ float: right;
+}
+body .form-group {
+ margin-bottom: 10px;
+}
+body .form-actions button {
+ margin-left: 5px;
+}
+
+body .wrapper {
+ min-height: 100%;
+ height: auto !important;
+ height: 100%;
+ margin: 0 auto -40px;
+}
+body #push {
+ height: 40px;
+}
+body #footer {
+ width: 100%;
+ overflow: hidden;
+ margin-bottom: 0;
+ height: 40px;
+ color: #848992;
+}
+body #footer .footer-logo {
+ text-align: left;
+}
+body #footer .footer-logo a {
+ display: inline-block;
+}
+body #footer .footer-logo img {
+ height: 40px;
+ width: 250px;
+}
+body #footer .footer-copyright {
+ text-align: right;
+ font-size: 12px;
+ padding-right: 35px;
+ padding-top: 10px;
+}
+body #footer .footer-copyright a {
+ color: #848992;
+}
+
+@media screen and (min-width: 768px) {
+ body .navbar-right {
+ margin-right: 0;
+ }
+}
+
+@media screen and (max-width: 767px) {
+ body .navbar .container .navbar-header,
+ body .navbar .container-fluid .navbar-header {
+ margin-left: -20px;
+ }
+ body.show-title .navbar .navbar-title {
+ display: none;
+}
+ body .navbar .navbar-toggle {
+ background-color: #337ab7;
+ border-color: #2e6da4;
+ }
+ body .navbar .navbar-toggle:active,
+ body .navbar .navbar-toggle:hover {
+ background-color: #286090;
+ border-color: #204d74;
+ }
+ body .navbar .navbar-toggle .icon-bar {
+ background-color: #fff;
+ }
+ body .navbar .tooltip {
+ visibility: hidden;
+ }
+ body .page-header h1 {
+ font-size: 24px;
+ }
+ body .wrapper {
+ margin: 0 auto -65px;
+ }
+ body #push {
+ height: 65px;
+ }
+ body #footer {
+ height: 65px;
+ }
+ body #footer .footer-logo {
+ text-align: center;
+ }
+ body #footer .footer-copyright {
+ text-align: center;
+ padding-right: 15px;
+ padding-top: 0;
+ }
+}
diff --git a/awx/static/api/api.js b/awx/static/api/api.js
new file mode 100644
index 0000000000..bce915a247
--- /dev/null
+++ b/awx/static/api/api.js
@@ -0,0 +1,89 @@
+/*************************************************
+ * Copyright (c) 2016 Ansible, Inc.
+ *
+ * All Rights Reserved
+ *************************************************/
+
+$(function() {
+
+ // Make links from relative URLs to resources.
+ $('span.str').each(function() {
+ var s = $(this).html();
+ if (s.match(/^\"\/.+\/\"$/) || s.match(/^\"\/.+\/\?.*\"$/)) {
+ $(this).html('"' + s.replace(/\"/g, '') + ' "');
+ }
+ });
+
+ // Make links for all inventory script hosts.
+ $('.request-info .pln').filter(function() {
+ return $(this).text() === 'script';
+ }).each(function() {
+ $('.response-info span.str').filter(function() {
+ return $(this).text() === '"hosts"';
+ }).each(function() {
+ $(this).nextUntil('span.pun:contains("]")').filter('span.str').each(function() {
+ if ($(this).text().match(/^\".+\"$/)) {
+ var s = $(this).text().replace(/\"/g, '');
+ $(this).html('"' + s + ' "');
+ }
+ else if ($(this).text() !== '"') {
+ var s = $(this).text();
+ $(this).html('' + s + ' ');
+ }
+ });
+ });
+ });
+
+ // Add classes/icons for dynamically showing/hiding help.
+ if ($('.description').html()) {
+ $('.description').addClass('prettyprint').parent().css('float', 'none');
+ $('.hidden a.hide-description').prependTo('.description');
+ $('a.hide-description').click(function() {
+ $('.description').slideUp('fast');
+ return false;
+ });
+ $('.hidden a.toggle-description').appendTo('.page-header h1');
+ $('a.toggle-description').click(function() {
+ $('.description').slideToggle('fast');
+ return false;
+ });
+ }
+
+ $('[data-toggle="tooltip"]').tooltip();
+
+ if ($(window).scrollTop() >= 115) {
+ $('body').addClass('show-title');
+ }
+ $(window).scroll(function() {
+ if ($(window).scrollTop() >= 115) {
+ $('body').addClass('show-title');
+ }
+ else {
+ $('body').removeClass('show-title');
+ }
+ });
+
+ $('a.resize').click(function() {
+ if ($(this).find('span.glyphicon-resize-full').size()) {
+ $(this).find('span.glyphicon').addClass('glyphicon-resize-small').removeClass('glyphicon-resize-full');
+ $('.container').addClass('container-fluid').removeClass('container');
+ document.cookie = 'api_width=wide; path=/api/';
+ }
+ else {
+ $(this).find('span.glyphicon').addClass('glyphicon-resize-full').removeClass('glyphicon-resize-small');
+ $('.container-fluid').addClass('container').removeClass('container-fluid');
+ document.cookie = 'api_width=fixed; path=/api/';
+ }
+ return false;
+ });
+
+ function getCookie(name) {
+ var value = "; " + document.cookie;
+ var parts = value.split("; " + name + "=");
+ if (parts.length == 2) return parts.pop().split(";").shift();
+ }
+ if (getCookie('api_width') == 'wide') {
+ $('a.resize').click();
+ }
+
+});
diff --git a/awx/static/img/favicon.ico b/awx/static/img/favicon.ico
deleted file mode 100644
index f53629b961..0000000000
Binary files a/awx/static/img/favicon.ico and /dev/null differ
diff --git a/awx/static/img/tower_console_bug.png b/awx/static/img/tower_console_bug.png
deleted file mode 100644
index 90737558ae..0000000000
Binary files a/awx/static/img/tower_console_bug.png and /dev/null differ
diff --git a/awx/static/img/tower_console_logo.png b/awx/static/img/tower_console_logo.png
deleted file mode 100644
index c93291a1c9..0000000000
Binary files a/awx/static/img/tower_console_logo.png and /dev/null differ
diff --git a/awx/templates/rest_framework/api.html b/awx/templates/rest_framework/api.html
index 2129ca2bc8..bdd3123dac 100644
--- a/awx/templates/rest_framework/api.html
+++ b/awx/templates/rest_framework/api.html
@@ -1,254 +1,69 @@
{% extends 'rest_framework/base.html' %}
-{% load i18n %}
+{% load i18n staticfiles %}
{% block title %}{{ name }} · {% trans 'Ansible Tower REST API' %}{% endblock %}
+{% block bootstrap_theme %}
+
+
+{% endblock %}
+
{% block style %}
+
{{ block.super }}
-
-
{% endblock %}
-{% block branding %}
- {% block branding_title %}{% trans 'REST API' %}{% endblock %}
-{% endblock %}
-
-{% block userlinks %}
- {% if user.is_authenticated %}
-
- {{ user }}
-
- {% endif %}
-{% endblock %}
-
-{% block footer %}
-
+{% block navbar %}
+
{% endblock %}
{% block script %}
+
+
{{ block.super }}
-
+
{% endblock %}
diff --git a/awx/templates/rest_framework/base.html b/awx/templates/rest_framework/base.html
index c34c4a9e36..8cb5ab97fb 100644
--- a/awx/templates/rest_framework/base.html
+++ b/awx/templates/rest_framework/base.html
@@ -1,235 +1,268 @@
{# Copy of base.html from rest_framework with minor Ansible Tower change. #}
-{% load url from future %}
+{% load staticfiles %}
{% load rest_framework %}
+{% load i18n %}
-
- {% block head %}
-
- {% block meta %}
-
-
- {% endblock %}
-
- {% block title %}Django REST framework{% endblock %}
-
- {% block style %}
- {% block bootstrap_theme %}
-
-
- {% endblock %}
-
-
- {% endblock %}
+
+ {% block head %}
+ {% block meta %}
+
+
{% endblock %}
-
-
+ {% block title %}Django REST framework{% endblock %}
-
+ {% block style %}
+ {% block bootstrap_theme %}
+
+
+ {% endblock %}
+
+
+ {% endblock %}
+
+ {% endblock %}
+
+
+{% block body %}
+
+
+
{% block navbar %}
-
-
-
+
+
+
+ {% block branding %}
+
+ Django REST framework {{ version }}
+
+ {% endblock %}
+
+
+ {% block userlinks %}
+ {% if user.is_authenticated %}
+ {% optional_logout request user %}
+ {% else %}
+ {% optional_login request %}
+ {% endif %}
+ {% endblock %}
+
-
+
{% endblock %}
- {% block breadcrumbs %}
-
- {% for breadcrumb_name, breadcrumb_url in breadcrumblist %}
-
- {{ breadcrumb_name }} {% if not forloop.last %}› {% endif %}
-
- {% endfor %}
-
- {% endblock %}
+
+ {% block breadcrumbs %}
+
+ {% endblock %}
-
-
+
+
{% if 'GET' in allowed_methods %}
-
{% endif %}
{% if options_form %}
-
- {% csrf_token %}
-
- OPTIONS
-
+
+ OPTIONS
+
{% endif %}
{% if delete_form %}
-
- {% csrf_token %}
-
- DELETE
-
+
+ DELETE
+
{% endif %}
-
-
- {% block description %}
- {{ description }}
- {% endblock %}
-
-
{{ request.method }} {{ request.get_full_path }}
+ {% if filter_form %}
+
+
+ {% trans "Filters" %}
+
+ {% endif %}
+
+
+
+
+ {% block description %}
+ {{ description }}
+ {% endblock %}
+
+
+ {% if paginator %}
+
+ {% get_pagination_html paginator %}
+
+ {% endif %}
+
+
+
{{ request.method }} {{ request.get_full_path }}
+
+
-
{{ content }}
+{{ content }}{% endautoescape %}
-
+
- {% if display_edit_forms %}
+ {% if display_edit_forms %}
- {% if post_form or raw_data_post_form %}
-
- {% if post_form %}
-
- {% endif %}
-
- {% if post_form %}
-
- {% endif %}
-
- {% with form=raw_data_post_form %}
-
-
- {% include "rest_framework/raw_data_form.html" %}
-
- POST
-
-
-
- {% endwith %}
-
-
-
+ {% if post_form or raw_data_post_form %}
+
+ {% if post_form %}
+
{% endif %}
- {% if put_form or raw_data_put_form or raw_data_patch_form %}
-
- {% if put_form %}
-
- {% endif %}
-
- {% if put_form %}
-
- {% endif %}
-
- {% with form=raw_data_put_or_patch_form %}
-
-
- {% include "rest_framework/raw_data_form.html" %}
-
- {% if raw_data_put_form %}
- PUT
- {% endif %}
- {% if raw_data_patch_form %}
- PATCH
- {% endif %}
-
-
-
- {% endwith %}
-
+
+ {% if post_form %}
+
+ {% with form=post_form %}
+
+
+ {% csrf_token %}
+ {{ post_form }}
+
+ POST
+
+
+
+ {% endwith %}
+ {% endif %}
+
+
+ {% with form=raw_data_post_form %}
+
+
+ {% include "rest_framework/raw_data_form.html" %}
+
+ POST
+
+
+
+ {% endwith %}
+
- {% endif %}
+
{% endif %}
-
-
+ {% if put_form or raw_data_put_form or raw_data_patch_form %}
+
+ {% if put_form %}
+
+ {% endif %}
-
-
+
+ {% if put_form %}
+
+ {% endif %}
-
+
+ {% with form=raw_data_put_or_patch_form %}
+
+
+ {% include "rest_framework/raw_data_form.html" %}
+
+ {% if raw_data_put_form %}
+ PUT
+ {% endif %}
+ {% if raw_data_patch_form %}
+ PATCH
+ {% endif %}
+
+
+
+ {% endwith %}
+
+
+
+ {% endif %}
+ {% endif %}
+
+
+ {# div#push added for Ansible Tower. #}
+
+
-
-
-
-
- {% block footer %}
- {% endblock %}
-
- {% block script %}
-
+ {% block script %}
+
+
+
- {% endblock %}
-
+
+ {% endblock %}
+
+ {% if filter_form %}
+ {{ filter_form }}
+ {% endif %}
+
+
+{% endblock %}
diff --git a/awx/ui/context_processors.py b/awx/ui/context_processors.py
index 6c197b5a2f..d3aeee34a6 100644
--- a/awx/ui/context_processors.py
+++ b/awx/ui/context_processors.py
@@ -15,4 +15,5 @@ def settings(request):
def version(request):
return {
'version': get_awx_version(),
+ 'tower_version': get_awx_version(),
}
diff --git a/awx/wsgi.py b/awx/wsgi.py
index fe86ad27e7..4b2666a409 100644
--- a/awx/wsgi.py
+++ b/awx/wsgi.py
@@ -5,7 +5,7 @@ import logging
from awx import __version__ as tower_version
# Prepare the AWX environment.
-from awx import prepare_env
+from awx import prepare_env, MODE
prepare_env()
from django.core.wsgi import get_wsgi_application # NOQA
@@ -19,14 +19,15 @@ For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
-logger = logging.getLogger('awx.main.models.jobs')
-try:
- fd = open("/var/lib/awx/.tower_version", "r")
- if fd.read().strip() != tower_version:
- raise Exception()
-except Exception:
- logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
- raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
+if MODE == 'production':
+ logger = logging.getLogger('awx.main.models.jobs')
+ try:
+ fd = open("/var/lib/awx/.tower_version", "r")
+ if fd.read().strip() != tower_version:
+ raise Exception()
+ except Exception:
+ logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
+ raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.")
# Return the default Django WSGI application.
application = get_wsgi_application()
diff --git a/docs/licenses/South.txt b/docs/licenses/South.txt
deleted file mode 100644
index 33666dddf7..0000000000
--- a/docs/licenses/South.txt
+++ /dev/null
@@ -1,73 +0,0 @@
-Apache License
-Version 2.0, January 2004
-http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
-"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
-
-"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
-
-"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
-
-"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
-
-"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
-
-"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
-
-"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
-
-"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
-
-"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
-
-"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
-
- (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
-
- You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
-
-6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
-To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.
-
-Copyright [yyyy] [name of copyright owner]
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
\ No newline at end of file
diff --git a/docs/licenses/django-rest-framework-yaml.txt b/docs/licenses/django-rest-framework-yaml.txt
new file mode 100644
index 0000000000..73434ad61f
--- /dev/null
+++ b/docs/licenses/django-rest-framework-yaml.txt
@@ -0,0 +1,13 @@
+Copyright (c) 2014, José Padilla
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
\ No newline at end of file
diff --git a/requirements/requirements.txt b/requirements/requirements.txt
index 085340d2f6..48857bc6d2 100644
--- a/requirements/requirements.txt
+++ b/requirements/requirements.txt
@@ -15,17 +15,18 @@ cmd2==0.6.8
cryptography==0.9.3
d2to1==0.2.11
defusedxml==0.4.1
-Django==1.6.7
+Django==1.8.8
django-auth-ldap==1.2.6
-django-celery==3.1.10
+django-celery==3.1.17
django-crum==0.6.1
-django-extensions==1.3.3
-django-polymorphic==0.5.3
+django-extensions==1.5.9
+django-polymorphic==0.7.2
django-radius==1.0.0
-djangorestframework==2.3.13
+djangorestframework==3.3.2
+djangorestframework-yaml==1.0.2
django-split-settings==0.1.1
django-statsd-mozilla==0.3.16
-django-taggit==0.11.2
+django-taggit==0.17.6
git+https://github.com/matburt/dm.xmlsec.binding.git@master#egg=dm.xmlsec.binding
dogpile.cache==0.5.6
dogpile.core==0.4.1
@@ -35,7 +36,7 @@ gevent==1.1rc3
gevent-websocket==0.9.3
git+https://github.com/chrismeyersfsu/django-jsonfield.git@tower_0.9.12#egg=django-jsonfield
git+https://github.com/chrismeyersfsu/django-qsstats-magic.git@tower_0.7.2#egg=django-qsstats-magic
-git+https://github.com/chrismeyersfsu/django-rest-framework-mongoengine.git@0c79515257a33a0ce61500b65fa497398628a03d#egg=django-rest-framework-mongoengine
+git+https://github.com/umutbozkurt/django-rest-framework-mongoengine.git@5dfa1df79f81765d36c0de31dc1c2f390e42d428#egg=django-rest-framework-mongoengine
git+https://github.com/chrismeyersfsu/gevent-socketio.git@tower_0.3.6#egg=gevent-socketio
git+https://github.com/chrismeyersfsu/python-ipy.git@fix-127_localhost#egg=IPy
git+https://github.com/chrismeyersfsu/python-keystoneclient.git@1.3.0#egg=python-keystoneclient
@@ -52,7 +53,7 @@ jsonpatch==1.11
jsonpointer==1.9
jsonschema==2.5.1
keyring==4.1
-kombu==3.0.21
+kombu==3.0.30
lxml==3.4.4
M2Crypto==0.22.3
Markdown==2.4.1
@@ -113,7 +114,6 @@ requests==2.5.1
requests-oauthlib==0.5.0
simplejson==3.6.0
six==1.9.0
-South==1.0.2
statsd==3.2.1
stevedore==1.3.0
suds==0.4
diff --git a/requirements/requirements_dev.txt b/requirements/requirements_dev.txt
index bd7d5f54c8..60c9779367 100644
--- a/requirements/requirements_dev.txt
+++ b/requirements/requirements_dev.txt
@@ -1,6 +1,6 @@
-r requirements.txt
django-devserver
-django-debug-toolbar==1.3.2
+django-debug-toolbar==1.4
unittest2
pep8
flake8
diff --git a/setup.cfg b/setup.cfg
index 97fc100a1b..97ce43f2e8 100755
--- a/setup.cfg
+++ b/setup.cfg
@@ -14,8 +14,8 @@
# W391 - Blank line at end of file
# W293 - Blank line contains whitespace
ignore=E201,E203,E221,E225,E231,E241,E251,E261,E265,E302,E303,E501,W291,W391,W293
-exclude=.tox,awx/lib/site-packages,awx/plugins/inventory/ec2.py,awx/plugins/inventory/gce.py,awx/plugins/inventory/vmware.py,awx/plugins/inventory/windows_azure.py,awx/plugins/inventory/openstack.py,awx/ui,awx/api/urls.py,awx/main/migrations,awx/main/tests/data
+exclude=.tox,awx/lib/site-packages,awx/plugins/inventory/ec2.py,awx/plugins/inventory/gce.py,awx/plugins/inventory/vmware.py,awx/plugins/inventory/windows_azure.py,awx/plugins/inventory/openstack.py,awx/ui,awx/api/urls.py,awx/main/migrations,awx/main/south_migrations,awx/main/tests/data
[flake8]
ignore=E201,E203,E221,E225,E231,E241,E251,E261,E265,E302,E303,E501,W291,W391,W293,E731
-exclude=.tox,awx/lib/site-packages,awx/plugins/inventory,awx/ui,awx/api/urls.py,awx/main/migrations,awx/main/tests/data,node_modules/,awx/projects/,tools/docker,awx/settings/local_settings.py
+exclude=.tox,awx/lib/site-packages,awx/plugins/inventory,awx/ui,awx/api/urls.py,awx/main/migrations,awx/main/south_migrations,awx/main/tests/data,node_modules/,awx/projects/,tools/docker,awx/settings/local_settings.py