mirror of
https://github.com/ansible/awx.git
synced 2026-01-17 04:31:21 -03:30
Merge branch 'devel' into 169-v1
This commit is contained in:
commit
e8e6f50573
@ -24,6 +24,7 @@ Have questions about this document or anything not covered here? Come chat with
|
||||
* [Start a shell](#start-the-shell)
|
||||
* [Create a superuser](#create-a-superuser)
|
||||
* [Load the data](#load-the-data)
|
||||
* [Building API Documentation](#build-documentation)
|
||||
* [Accessing the AWX web interface](#accessing-the-awx-web-interface)
|
||||
* [Purging containers and images](#purging-containers-and-images)
|
||||
* [What should I work on?](#what-should-i-work-on)
|
||||
@ -261,6 +262,20 @@ You can optionally load some demo data. This will create a demo project, invento
|
||||
> This information will persist in the database running in the `tools_postgres_1` container, until the container is removed. You may periodically need to recreate
|
||||
this container, and thus the database, if the database schema changes in an upstream commit.
|
||||
|
||||
##### Building API Documentation
|
||||
|
||||
AWX includes support for building [Swagger/OpenAPI
|
||||
documentation](https://swagger.io). To build the documentation locally, run:
|
||||
|
||||
```bash
|
||||
(container)/awx_devel$ make swagger
|
||||
```
|
||||
|
||||
This will write a file named `swagger.json` that contains the API specification
|
||||
in OpenAPI format. A variety of online tools are available for translating
|
||||
this data into more consumable formats (such as HTML). http://editor.swagger.io
|
||||
is an example of one such service.
|
||||
|
||||
### Accessing the AWX web interface
|
||||
|
||||
You can now log into the AWX web interface at [https://localhost:8043](https://localhost:8043), and access the API directly at [https://localhost:8043/api/](https://localhost:8043/api/).
|
||||
|
||||
16
Makefile
16
Makefile
@ -23,7 +23,7 @@ COMPOSE_HOST ?= $(shell hostname)
|
||||
|
||||
VENV_BASE ?= /venv
|
||||
SCL_PREFIX ?=
|
||||
CELERY_SCHEDULE_FILE ?= /celerybeat-schedule
|
||||
CELERY_SCHEDULE_FILE ?= /var/lib/awx/beat.db
|
||||
|
||||
DEV_DOCKER_TAG_BASE ?= gcr.io/ansible-tower-engineering
|
||||
# Python packages to install only from source (not from binary wheels)
|
||||
@ -216,13 +216,11 @@ init:
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(MANAGEMENT_COMMAND) provision_instance --hostname=$(COMPOSE_HOST); \
|
||||
$(MANAGEMENT_COMMAND) register_queue --queuename=tower --hostnames=$(COMPOSE_HOST);\
|
||||
$(MANAGEMENT_COMMAND) register_queue --queuename=tower --instance_percent=100;\
|
||||
if [ "$(AWX_GROUP_QUEUES)" == "tower,thepentagon" ]; then \
|
||||
$(MANAGEMENT_COMMAND) provision_instance --hostname=isolated; \
|
||||
$(MANAGEMENT_COMMAND) register_queue --queuename='thepentagon' --hostnames=isolated --controller=tower; \
|
||||
$(MANAGEMENT_COMMAND) generate_isolated_key | ssh -o "StrictHostKeyChecking no" root@isolated 'cat > /root/.ssh/authorized_keys'; \
|
||||
elif [ "$(AWX_GROUP_QUEUES)" != "tower" ]; then \
|
||||
$(MANAGEMENT_COMMAND) register_queue --queuename=$(firstword $(subst $(comma), ,$(AWX_GROUP_QUEUES))) --hostnames=$(COMPOSE_HOST); \
|
||||
fi;
|
||||
|
||||
# Refresh development environment after pulling new code.
|
||||
@ -299,7 +297,7 @@ uwsgi: collectstatic
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --master-fifo=/awxfifo --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)" --hook-accepting1-once="exec:kill -1 `cat /tmp/celery_pid`"
|
||||
uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --master-fifo=/awxfifo --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)" --hook-accepting1-once="exec:/bin/sh -c '[ -f /tmp/celery_pid ] && kill -1 `cat /tmp/celery_pid` || true'"
|
||||
|
||||
daphne:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
@ -326,7 +324,7 @@ celeryd:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
celery worker -A awx -l DEBUG -B -Ofair --autoscale=100,4 --schedule=$(CELERY_SCHEDULE_FILE) -Q tower_scheduler,tower_broadcast_all,$(COMPOSE_HOST),$(AWX_GROUP_QUEUES) -n celery@$(COMPOSE_HOST) --pidfile /tmp/celery_pid
|
||||
celery worker -A awx -l DEBUG -B -Ofair --autoscale=100,4 --schedule=$(CELERY_SCHEDULE_FILE) -Q tower_broadcast_all -n celery@$(COMPOSE_HOST) --pidfile /tmp/celery_pid
|
||||
|
||||
# Run to start the zeromq callback receiver
|
||||
receiver:
|
||||
@ -365,6 +363,12 @@ pyflakes: reports
|
||||
pylint: reports
|
||||
@(set -o pipefail && $@ | reports/$@.report)
|
||||
|
||||
swagger: reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
(set -o pipefail && py.test awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
|
||||
|
||||
check: flake8 pep8 # pyflakes pylint
|
||||
|
||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
||||
|
||||
@ -5,6 +5,7 @@
|
||||
import inspect
|
||||
import logging
|
||||
import time
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@ -26,6 +27,10 @@ from rest_framework import generics
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework import views
|
||||
from rest_framework.permissions import AllowAny
|
||||
|
||||
# cryptography
|
||||
from cryptography.fernet import InvalidToken
|
||||
|
||||
# AWX
|
||||
from awx.api.filters import FieldLookupBackend
|
||||
@ -33,9 +38,9 @@ from awx.main.models import * # noqa
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.utils import * # noqa
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
||||
from awx.api.versioning import URLPathVersioning, get_request_version
|
||||
from awx.api.metadata import SublistAttachDetatchMetadata
|
||||
from awx.api.metadata import SublistAttachDetatchMetadata, Metadata
|
||||
|
||||
__all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView',
|
||||
'ListCreateAPIView', 'SubListAPIView', 'SubListCreateAPIView',
|
||||
@ -47,7 +52,8 @@ __all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView',
|
||||
'ResourceAccessList',
|
||||
'ParentMixin',
|
||||
'DeleteLastUnattachLabelMixin',
|
||||
'SubListAttachDetachAPIView',]
|
||||
'SubListAttachDetachAPIView',
|
||||
'CopyAPIView']
|
||||
|
||||
logger = logging.getLogger('awx.api.generics')
|
||||
analytics_logger = logging.getLogger('awx.analytics.performance')
|
||||
@ -91,8 +97,17 @@ def get_view_description(cls, request, html=False):
|
||||
return mark_safe(desc)
|
||||
|
||||
|
||||
def get_default_schema():
|
||||
if settings.SETTINGS_MODULE == 'awx.settings.development':
|
||||
from awx.api.swagger import AutoSchema
|
||||
return AutoSchema()
|
||||
else:
|
||||
return views.APIView.schema
|
||||
|
||||
|
||||
class APIView(views.APIView):
|
||||
|
||||
schema = get_default_schema()
|
||||
versioning_class = URLPathVersioning
|
||||
|
||||
def initialize_request(self, request, *args, **kwargs):
|
||||
@ -176,27 +191,14 @@ class APIView(views.APIView):
|
||||
and in the browsable API.
|
||||
"""
|
||||
func = self.settings.VIEW_DESCRIPTION_FUNCTION
|
||||
return func(self.__class__, self._request, html)
|
||||
return func(self.__class__, getattr(self, '_request', None), html)
|
||||
|
||||
def get_description_context(self):
|
||||
return {
|
||||
'view': self,
|
||||
'docstring': type(self).__doc__ or '',
|
||||
'new_in_13': getattr(self, 'new_in_13', False),
|
||||
'new_in_14': getattr(self, 'new_in_14', False),
|
||||
'new_in_145': getattr(self, 'new_in_145', False),
|
||||
'new_in_148': getattr(self, 'new_in_148', False),
|
||||
'new_in_200': getattr(self, 'new_in_200', False),
|
||||
'new_in_210': getattr(self, 'new_in_210', False),
|
||||
'new_in_220': getattr(self, 'new_in_220', False),
|
||||
'new_in_230': getattr(self, 'new_in_230', False),
|
||||
'new_in_240': getattr(self, 'new_in_240', False),
|
||||
'new_in_300': getattr(self, 'new_in_300', False),
|
||||
'new_in_310': getattr(self, 'new_in_310', False),
|
||||
'new_in_320': getattr(self, 'new_in_320', False),
|
||||
'new_in_330': getattr(self, 'new_in_330', False),
|
||||
'new_in_api_v2': getattr(self, 'new_in_api_v2', False),
|
||||
'deprecated': getattr(self, 'deprecated', False),
|
||||
'swagger_method': getattr(self.request, 'swagger_method', None),
|
||||
}
|
||||
|
||||
def get_description(self, request, html=False):
|
||||
@ -214,7 +216,7 @@ class APIView(views.APIView):
|
||||
context['deprecated'] = True
|
||||
|
||||
description = render_to_string(template_list, context)
|
||||
if context.get('deprecated'):
|
||||
if context.get('deprecated') and context.get('swagger_method') is None:
|
||||
# render deprecation messages at the very top
|
||||
description = '\n'.join([render_to_string('api/_deprecated.md', context), description])
|
||||
return description
|
||||
@ -747,3 +749,152 @@ class ResourceAccessList(ParentMixin, ListAPIView):
|
||||
for r in roles:
|
||||
ancestors.update(set(r.ancestors.all()))
|
||||
return User.objects.filter(roles__in=list(ancestors)).distinct()
|
||||
|
||||
|
||||
def trigger_delayed_deep_copy(*args, **kwargs):
|
||||
from awx.main.tasks import deep_copy_model_obj
|
||||
connection.on_commit(lambda: deep_copy_model_obj.delay(*args, **kwargs))
|
||||
|
||||
|
||||
class CopyAPIView(GenericAPIView):
|
||||
|
||||
serializer_class = CopySerializer
|
||||
permission_classes = (AllowAny,)
|
||||
copy_return_serializer_class = None
|
||||
new_in_330 = True
|
||||
new_in_api_v2 = True
|
||||
|
||||
def _get_copy_return_serializer(self, *args, **kwargs):
|
||||
if not self.copy_return_serializer_class:
|
||||
return self.get_serializer(*args, **kwargs)
|
||||
serializer_class_store = self.serializer_class
|
||||
self.serializer_class = self.copy_return_serializer_class
|
||||
ret = self.get_serializer(*args, **kwargs)
|
||||
self.serializer_class = serializer_class_store
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def _decrypt_model_field_if_needed(obj, field_name, field_val):
|
||||
if field_name in getattr(type(obj), 'REENCRYPTION_BLACKLIST_AT_COPY', []):
|
||||
return field_val
|
||||
if isinstance(field_val, dict):
|
||||
for sub_field in field_val:
|
||||
if isinstance(sub_field, six.string_types) \
|
||||
and isinstance(field_val[sub_field], six.string_types):
|
||||
try:
|
||||
field_val[sub_field] = decrypt_field(obj, field_name, sub_field)
|
||||
except InvalidToken:
|
||||
# Catching the corner case with v1 credential fields
|
||||
field_val[sub_field] = decrypt_field(obj, sub_field)
|
||||
elif isinstance(field_val, six.string_types):
|
||||
field_val = decrypt_field(obj, field_name)
|
||||
return field_val
|
||||
|
||||
def _build_create_dict(self, obj):
|
||||
ret = {}
|
||||
if self.copy_return_serializer_class:
|
||||
all_fields = Metadata().get_serializer_info(
|
||||
self._get_copy_return_serializer(), method='POST'
|
||||
)
|
||||
for field_name, field_info in all_fields.items():
|
||||
if not hasattr(obj, field_name) or field_info.get('read_only', True):
|
||||
continue
|
||||
ret[field_name] = CopyAPIView._decrypt_model_field_if_needed(
|
||||
obj, field_name, getattr(obj, field_name)
|
||||
)
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def copy_model_obj(old_parent, new_parent, model, obj, creater, copy_name='', create_kwargs=None):
|
||||
fields_to_preserve = set(getattr(model, 'FIELDS_TO_PRESERVE_AT_COPY', []))
|
||||
fields_to_discard = set(getattr(model, 'FIELDS_TO_DISCARD_AT_COPY', []))
|
||||
m2m_to_preserve = {}
|
||||
o2m_to_preserve = {}
|
||||
create_kwargs = create_kwargs or {}
|
||||
for field_name in fields_to_discard:
|
||||
create_kwargs.pop(field_name, None)
|
||||
for field in model._meta.get_fields():
|
||||
try:
|
||||
field_val = getattr(obj, field.name)
|
||||
except AttributeError:
|
||||
continue
|
||||
# Adjust copy blacklist fields here.
|
||||
if field.name in fields_to_discard or field.name in [
|
||||
'id', 'pk', 'polymorphic_ctype', 'unifiedjobtemplate_ptr', 'created_by', 'modified_by'
|
||||
] or field.name.endswith('_role'):
|
||||
create_kwargs.pop(field.name, None)
|
||||
continue
|
||||
if field.one_to_many:
|
||||
if field.name in fields_to_preserve:
|
||||
o2m_to_preserve[field.name] = field_val
|
||||
elif field.many_to_many:
|
||||
if field.name in fields_to_preserve and not old_parent:
|
||||
m2m_to_preserve[field.name] = field_val
|
||||
elif field.many_to_one and not field_val:
|
||||
create_kwargs.pop(field.name, None)
|
||||
elif field.many_to_one and field_val == old_parent:
|
||||
create_kwargs[field.name] = new_parent
|
||||
elif field.name == 'name' and not old_parent:
|
||||
create_kwargs[field.name] = copy_name or field_val + ' copy'
|
||||
elif field.name in fields_to_preserve:
|
||||
create_kwargs[field.name] = CopyAPIView._decrypt_model_field_if_needed(
|
||||
obj, field.name, field_val
|
||||
)
|
||||
new_obj = model.objects.create(**create_kwargs)
|
||||
# Need to save separatedly because Djang-crum get_current_user would
|
||||
# not work properly in non-request-response-cycle context.
|
||||
new_obj.created_by = creater
|
||||
new_obj.save()
|
||||
for m2m in m2m_to_preserve:
|
||||
for related_obj in m2m_to_preserve[m2m].all():
|
||||
getattr(new_obj, m2m).add(related_obj)
|
||||
if not old_parent:
|
||||
sub_objects = []
|
||||
for o2m in o2m_to_preserve:
|
||||
for sub_obj in o2m_to_preserve[o2m].all():
|
||||
sub_model = type(sub_obj)
|
||||
sub_objects.append((sub_model.__module__, sub_model.__name__, sub_obj.pk))
|
||||
return new_obj, sub_objects
|
||||
ret = {obj: new_obj}
|
||||
for o2m in o2m_to_preserve:
|
||||
for sub_obj in o2m_to_preserve[o2m].all():
|
||||
ret.update(CopyAPIView.copy_model_obj(obj, new_obj, type(sub_obj), sub_obj, creater))
|
||||
return ret
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
create_kwargs = self._build_create_dict(obj)
|
||||
for key in create_kwargs:
|
||||
create_kwargs[key] = getattr(create_kwargs[key], 'pk', None) or create_kwargs[key]
|
||||
return Response({'can_copy': request.user.can_access(self.model, 'add', create_kwargs)})
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
create_kwargs = self._build_create_dict(obj)
|
||||
create_kwargs_check = {}
|
||||
for key in create_kwargs:
|
||||
create_kwargs_check[key] = getattr(create_kwargs[key], 'pk', None) or create_kwargs[key]
|
||||
if not request.user.can_access(self.model, 'add', create_kwargs_check):
|
||||
raise PermissionDenied()
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
if not serializer.is_valid():
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
new_obj, sub_objs = CopyAPIView.copy_model_obj(
|
||||
None, None, self.model, obj, request.user, create_kwargs=create_kwargs,
|
||||
copy_name=serializer.validated_data.get('name', '')
|
||||
)
|
||||
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role:
|
||||
new_obj.admin_role.members.add(request.user)
|
||||
if sub_objs:
|
||||
permission_check_func = None
|
||||
if hasattr(type(self), 'deep_copy_permission_check_func'):
|
||||
permission_check_func = (
|
||||
type(self).__module__, type(self).__name__, 'deep_copy_permission_check_func'
|
||||
)
|
||||
trigger_delayed_deep_copy(
|
||||
self.model.__module__, self.model.__name__,
|
||||
obj.pk, new_obj.pk, request.user.pk, sub_objs,
|
||||
permission_check_func=permission_check_func
|
||||
)
|
||||
serializer = self._get_copy_return_serializer(new_obj)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
@ -190,23 +190,6 @@ class Metadata(metadata.SimpleMetadata):
|
||||
finally:
|
||||
delattr(view, '_request')
|
||||
|
||||
# Add version number in which view was added to Tower.
|
||||
added_in_version = '1.2'
|
||||
for version in ('3.2.0', '3.1.0', '3.0.0', '2.4.0', '2.3.0', '2.2.0',
|
||||
'2.1.0', '2.0.0', '1.4.8', '1.4.5', '1.4', '1.3'):
|
||||
if getattr(view, 'new_in_%s' % version.replace('.', ''), False):
|
||||
added_in_version = version
|
||||
break
|
||||
metadata['added_in_version'] = added_in_version
|
||||
|
||||
# Add API version number in which view was added to Tower.
|
||||
added_in_api_version = 'v1'
|
||||
for version in ('v2',):
|
||||
if getattr(view, 'new_in_api_%s' % version, False):
|
||||
added_in_api_version = version
|
||||
break
|
||||
metadata['added_in_api_version'] = added_in_api_version
|
||||
|
||||
# Add type(s) handled by this view/serializer.
|
||||
if hasattr(view, 'get_serializer'):
|
||||
serializer = view.get_serializer()
|
||||
|
||||
@ -33,7 +33,7 @@ class OrderedDictLoader(yaml.SafeLoader):
|
||||
key = self.construct_object(key_node, deep=deep)
|
||||
try:
|
||||
hash(key)
|
||||
except TypeError, exc:
|
||||
except TypeError as exc:
|
||||
raise yaml.constructor.ConstructorError(
|
||||
"while constructing a mapping", node.start_mark,
|
||||
"found unacceptable key (%s)" % exc, key_node.start_mark
|
||||
|
||||
@ -130,6 +130,22 @@ def reverse_gfk(content_object, request):
|
||||
}
|
||||
|
||||
|
||||
class CopySerializer(serializers.Serializer):
|
||||
|
||||
name = serializers.CharField()
|
||||
|
||||
def validate(self, attrs):
|
||||
name = attrs.get('name')
|
||||
view = self.context.get('view', None)
|
||||
obj = view.get_object()
|
||||
if name == obj.name:
|
||||
raise serializers.ValidationError(_(
|
||||
'The original object is already named {}, a copy from'
|
||||
' it cannot have the same name.'.format(name)
|
||||
))
|
||||
return attrs
|
||||
|
||||
|
||||
class BaseSerializerMetaclass(serializers.SerializerMetaclass):
|
||||
'''
|
||||
Custom metaclass to enable attribute inheritance from Meta objects on
|
||||
@ -1003,6 +1019,7 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
|
||||
notification_templates_error = self.reverse('api:project_notification_templates_error_list', kwargs={'pk': obj.pk}),
|
||||
access_list = self.reverse('api:project_access_list', kwargs={'pk': obj.pk}),
|
||||
object_roles = self.reverse('api:project_object_roles_list', kwargs={'pk': obj.pk}),
|
||||
copy = self.reverse('api:project_copy', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
if obj.organization:
|
||||
res['organization'] = self.reverse('api:organization_detail',
|
||||
@ -1156,6 +1173,7 @@ class InventorySerializer(BaseSerializerWithVariables):
|
||||
access_list = self.reverse('api:inventory_access_list', kwargs={'pk': obj.pk}),
|
||||
object_roles = self.reverse('api:inventory_object_roles_list', kwargs={'pk': obj.pk}),
|
||||
instance_groups = self.reverse('api:inventory_instance_groups_list', kwargs={'pk': obj.pk}),
|
||||
copy = self.reverse('api:inventory_copy', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
if obj.insights_credential:
|
||||
res['insights_credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.insights_credential.pk})
|
||||
@ -1173,7 +1191,7 @@ class InventorySerializer(BaseSerializerWithVariables):
|
||||
if host_filter:
|
||||
try:
|
||||
SmartFilter().query_from_string(host_filter)
|
||||
except RuntimeError, e:
|
||||
except RuntimeError as e:
|
||||
raise models.base.ValidationError(e)
|
||||
return host_filter
|
||||
|
||||
@ -1513,6 +1531,7 @@ class CustomInventoryScriptSerializer(BaseSerializer):
|
||||
res = super(CustomInventoryScriptSerializer, self).get_related(obj)
|
||||
res.update(dict(
|
||||
object_roles = self.reverse('api:inventory_script_object_roles_list', kwargs={'pk': obj.pk}),
|
||||
copy = self.reverse('api:inventory_script_copy', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
|
||||
if obj.organization:
|
||||
@ -2070,6 +2089,7 @@ class CredentialSerializer(BaseSerializer):
|
||||
object_roles = self.reverse('api:credential_object_roles_list', kwargs={'pk': obj.pk}),
|
||||
owner_users = self.reverse('api:credential_owner_users_list', kwargs={'pk': obj.pk}),
|
||||
owner_teams = self.reverse('api:credential_owner_teams_list', kwargs={'pk': obj.pk}),
|
||||
copy = self.reverse('api:credential_copy', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
|
||||
# TODO: remove when API v1 is removed
|
||||
@ -2547,6 +2567,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
||||
labels = self.reverse('api:job_template_label_list', kwargs={'pk': obj.pk}),
|
||||
object_roles = self.reverse('api:job_template_object_roles_list', kwargs={'pk': obj.pk}),
|
||||
instance_groups = self.reverse('api:job_template_instance_groups_list', kwargs={'pk': obj.pk}),
|
||||
copy = self.reverse('api:job_template_copy', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
if obj.host_config_key:
|
||||
res['callback'] = self.reverse('api:job_template_callback', kwargs={'pk': obj.pk})
|
||||
@ -2968,7 +2989,14 @@ class SystemJobSerializer(UnifiedJobSerializer):
|
||||
return res
|
||||
|
||||
def get_result_stdout(self, obj):
|
||||
return obj.result_stdout
|
||||
try:
|
||||
return obj.result_stdout
|
||||
except StdoutMaxBytesExceeded as e:
|
||||
return _(
|
||||
"Standard Output too large to display ({text_size} bytes), "
|
||||
"only download supported for sizes over {supported_size} bytes").format(
|
||||
text_size=e.total, supported_size=e.supported
|
||||
)
|
||||
|
||||
|
||||
class SystemJobCancelSerializer(SystemJobSerializer):
|
||||
@ -3107,6 +3135,12 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
||||
ret['extra_data'] = obj.display_extra_data()
|
||||
return ret
|
||||
|
||||
def get_summary_fields(self, obj):
|
||||
summary_fields = super(LaunchConfigurationBaseSerializer, self).get_summary_fields(obj)
|
||||
# Credential would be an empty dictionary in this case
|
||||
summary_fields.pop('credential', None)
|
||||
return summary_fields
|
||||
|
||||
def validate(self, attrs):
|
||||
attrs = super(LaunchConfigurationBaseSerializer, self).validate(attrs)
|
||||
|
||||
@ -3782,6 +3816,7 @@ class NotificationTemplateSerializer(BaseSerializer):
|
||||
res.update(dict(
|
||||
test = self.reverse('api:notification_template_test', kwargs={'pk': obj.pk}),
|
||||
notifications = self.reverse('api:notification_template_notification_list', kwargs={'pk': obj.pk}),
|
||||
copy = self.reverse('api:notification_template_copy', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
if obj.organization:
|
||||
res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
|
||||
@ -3887,6 +3922,7 @@ class SchedulePreviewSerializer(BaseSerializer):
|
||||
# - BYYEARDAY
|
||||
# - BYWEEKNO
|
||||
# - Multiple DTSTART or RRULE elements
|
||||
# - Can't contain both COUNT and UNTIL
|
||||
# - COUNT > 999
|
||||
def validate_rrule(self, value):
|
||||
rrule_value = value
|
||||
@ -3921,6 +3957,8 @@ class SchedulePreviewSerializer(BaseSerializer):
|
||||
raise serializers.ValidationError(_("BYYEARDAY not supported."))
|
||||
if 'byweekno' in rrule_value.lower():
|
||||
raise serializers.ValidationError(_("BYWEEKNO not supported."))
|
||||
if 'COUNT' in rrule_value and 'UNTIL' in rrule_value:
|
||||
raise serializers.ValidationError(_("RRULE may not contain both COUNT and UNTIL"))
|
||||
if match_count:
|
||||
count_val = match_count.groups()[0].strip().split("=")
|
||||
if int(count_val[1]) > 999:
|
||||
@ -3946,6 +3984,15 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
|
||||
))
|
||||
if obj.unified_job_template:
|
||||
res['unified_job_template'] = obj.unified_job_template.get_absolute_url(self.context.get('request'))
|
||||
try:
|
||||
if obj.unified_job_template.project:
|
||||
res['project'] = obj.unified_job_template.project.get_absolute_url(self.context.get('request'))
|
||||
except ObjectDoesNotExist:
|
||||
pass
|
||||
if obj.inventory:
|
||||
res['inventory'] = obj.inventory.get_absolute_url(self.context.get('request'))
|
||||
elif obj.unified_job_template and getattr(obj.unified_job_template, 'inventory', None):
|
||||
res['inventory'] = obj.unified_job_template.inventory.get_absolute_url(self.context.get('request'))
|
||||
return res
|
||||
|
||||
def validate_unified_job_template(self, value):
|
||||
@ -3968,8 +4015,10 @@ class InstanceSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Instance
|
||||
fields = ("id", "type", "url", "related", "uuid", "hostname", "created", "modified",
|
||||
"version", "capacity", "consumed_capacity", "percent_capacity_remaining", "jobs_running")
|
||||
read_only_fields = ('uuid', 'hostname', 'version')
|
||||
fields = ("id", "type", "url", "related", "uuid", "hostname", "created", "modified", 'capacity_adjustment',
|
||||
"version", "capacity", "consumed_capacity", "percent_capacity_remaining", "jobs_running",
|
||||
"cpu", "memory", "cpu_capacity", "mem_capacity", "enabled")
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(InstanceSerializer, self).get_related(obj)
|
||||
@ -4002,7 +4051,8 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
model = InstanceGroup
|
||||
fields = ("id", "type", "url", "related", "name", "created", "modified",
|
||||
"capacity", "committed_capacity", "consumed_capacity",
|
||||
"percent_capacity_remaining", "jobs_running", "instances", "controller")
|
||||
"percent_capacity_remaining", "jobs_running", "instances", "controller",
|
||||
"policy_instance_percentage", "policy_instance_minimum", "policy_instance_list")
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(InstanceGroupSerializer, self).get_related(obj)
|
||||
|
||||
103
awx/api/swagger.py
Normal file
103
awx/api/swagger.py
Normal file
@ -0,0 +1,103 @@
|
||||
import json
|
||||
import warnings
|
||||
|
||||
from coreapi.document import Object, Link
|
||||
|
||||
from rest_framework import exceptions
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.renderers import CoreJSONRenderer
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.schemas import SchemaGenerator, AutoSchema as DRFAuthSchema
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from rest_framework_swagger import renderers
|
||||
|
||||
|
||||
class AutoSchema(DRFAuthSchema):
|
||||
|
||||
def get_link(self, path, method, base_url):
|
||||
link = super(AutoSchema, self).get_link(path, method, base_url)
|
||||
try:
|
||||
serializer = self.view.get_serializer()
|
||||
except Exception:
|
||||
serializer = None
|
||||
warnings.warn('{}.get_serializer() raised an exception during '
|
||||
'schema generation. Serializer fields will not be '
|
||||
'generated for {} {}.'
|
||||
.format(self.view.__class__.__name__, method, path))
|
||||
|
||||
link.__dict__['deprecated'] = getattr(self.view, 'deprecated', False)
|
||||
|
||||
# auto-generate a topic/tag for the serializer based on its model
|
||||
if hasattr(self.view, 'swagger_topic'):
|
||||
link.__dict__['topic'] = str(self.view.swagger_topic).title()
|
||||
elif serializer and hasattr(serializer, 'Meta'):
|
||||
link.__dict__['topic'] = str(
|
||||
serializer.Meta.model._meta.verbose_name_plural
|
||||
).title()
|
||||
elif hasattr(self.view, 'model'):
|
||||
link.__dict__['topic'] = str(self.view.model._meta.verbose_name_plural).title()
|
||||
else:
|
||||
warnings.warn('Could not determine a Swagger tag for path {}'.format(path))
|
||||
return link
|
||||
|
||||
def get_description(self, path, method):
|
||||
self.view._request = self.view.request
|
||||
setattr(self.view.request, 'swagger_method', method)
|
||||
description = super(AutoSchema, self).get_description(path, method)
|
||||
return description
|
||||
|
||||
|
||||
class SwaggerSchemaView(APIView):
|
||||
_ignore_model_permissions = True
|
||||
exclude_from_schema = True
|
||||
permission_classes = [AllowAny]
|
||||
renderer_classes = [
|
||||
CoreJSONRenderer,
|
||||
renderers.OpenAPIRenderer,
|
||||
renderers.SwaggerUIRenderer
|
||||
]
|
||||
|
||||
def get(self, request):
|
||||
generator = SchemaGenerator(
|
||||
title='Ansible Tower API',
|
||||
patterns=None,
|
||||
urlconf=None
|
||||
)
|
||||
schema = generator.get_schema(request=request)
|
||||
# python core-api doesn't support the deprecation yet, so track it
|
||||
# ourselves and return it in a response header
|
||||
_deprecated = []
|
||||
|
||||
# By default, DRF OpenAPI serialization places all endpoints in
|
||||
# a single node based on their root path (/api). Instead, we want to
|
||||
# group them by topic/tag so that they're categorized in the rendered
|
||||
# output
|
||||
document = schema._data.pop('api')
|
||||
for path, node in document.items():
|
||||
if isinstance(node, Object):
|
||||
for action in node.values():
|
||||
topic = getattr(action, 'topic', None)
|
||||
if topic:
|
||||
schema._data.setdefault(topic, Object())
|
||||
schema._data[topic]._data[path] = node
|
||||
|
||||
if isinstance(action, Object):
|
||||
for link in action.links.values():
|
||||
if link.deprecated:
|
||||
_deprecated.append(link.url)
|
||||
elif isinstance(node, Link):
|
||||
topic = getattr(node, 'topic', None)
|
||||
if topic:
|
||||
schema._data.setdefault(topic, Object())
|
||||
schema._data[topic]._data[path] = node
|
||||
|
||||
if not schema:
|
||||
raise exceptions.ValidationError(
|
||||
'The schema generator did not return a schema Document'
|
||||
)
|
||||
|
||||
return Response(
|
||||
schema,
|
||||
headers={'X-Deprecated-Paths': json.dumps(_deprecated)}
|
||||
)
|
||||
@ -1,14 +0,0 @@
|
||||
{% if not version_label_flag or version_label_flag == 'true' %}
|
||||
{% if new_in_13 %}> _Added in AWX 1.3_{% endif %}
|
||||
{% if new_in_14 %}> _Added in AWX 1.4_{% endif %}
|
||||
{% if new_in_145 %}> _Added in Ansible Tower 1.4.5_{% endif %}
|
||||
{% if new_in_148 %}> _Added in Ansible Tower 1.4.8_{% endif %}
|
||||
{% if new_in_200 %}> _Added in Ansible Tower 2.0.0_{% endif %}
|
||||
{% if new_in_220 %}> _Added in Ansible Tower 2.2.0_{% endif %}
|
||||
{% if new_in_230 %}> _Added in Ansible Tower 2.3.0_{% endif %}
|
||||
{% if new_in_240 %}> _Added in Ansible Tower 2.4.0_{% endif %}
|
||||
{% if new_in_300 %}> _Added in Ansible Tower 3.0.0_{% endif %}
|
||||
{% if new_in_310 %}> _New in Ansible Tower 3.1.0_{% endif %}
|
||||
{% if new_in_320 %}> _New in Ansible Tower 3.2.0_{% endif %}
|
||||
{% if new_in_330 %}> _New in Ansible Tower 3.3.0_{% endif %}
|
||||
{% endif %}
|
||||
3
awx/api/templates/api/ad_hoc_command_relaunch.md
Normal file
3
awx/api/templates/api/ad_hoc_command_relaunch.md
Normal file
@ -0,0 +1,3 @@
|
||||
Relaunch an Ad Hoc Command:
|
||||
|
||||
Make a POST request to this resource to launch a job. If any passwords or variables are required then they should be passed in via POST data. In order to determine what values are required in order to launch a job based on this job template you may make a GET request to this endpoint.
|
||||
@ -1,4 +1,5 @@
|
||||
Site configuration settings and general information.
|
||||
{% ifmeth GET %}
|
||||
# Site configuration settings and general information
|
||||
|
||||
Make a GET request to this resource to retrieve the configuration containing
|
||||
the following fields (some fields may not be visible to all users):
|
||||
@ -11,6 +12,10 @@ the following fields (some fields may not be visible to all users):
|
||||
* `license_info`: Information about the current license.
|
||||
* `version`: Version of Ansible Tower package installed.
|
||||
* `eula`: The current End-User License Agreement
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth POST %}
|
||||
# Install or update an existing license
|
||||
|
||||
(_New in Ansible Tower 2.0.0_) Make a POST request to this resource as a super
|
||||
user to install or update the existing license. The license data itself can
|
||||
@ -18,3 +23,11 @@ be POSTed as a normal json data structure.
|
||||
|
||||
(_New in Ansible Tower 2.1.1_) The POST must include a `eula_accepted` boolean
|
||||
element indicating acceptance of the End-User License Agreement.
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth DELETE %}
|
||||
# Delete an existing license
|
||||
|
||||
(_New in Ansible Tower 2.0.0_) Make a DELETE request to this resource as a super
|
||||
user to delete the existing license
|
||||
{% endifmeth %}
|
||||
|
||||
@ -1,3 +1 @@
|
||||
{{ docstring }}
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@ -1,3 +1,5 @@
|
||||
{% ifmeth POST %}
|
||||
# Generate an Auth Token
|
||||
Make a POST request to this resource with `username` and `password` fields to
|
||||
obtain an authentication token to use for subsequent requests.
|
||||
|
||||
@ -32,6 +34,10 @@ agent that originally obtained it.
|
||||
Each request that uses the token for authentication will refresh its expiration
|
||||
timestamp and keep it from expiring. A token only expires when it is not used
|
||||
for the configured timeout interval (default 1800 seconds).
|
||||
{% endifmeth %}
|
||||
|
||||
A DELETE request with the token set will cause the token to be invalidated and
|
||||
no further requests can be made with it.
|
||||
{% ifmeth DELETE %}
|
||||
# Delete an Auth Token
|
||||
A DELETE request with the token header set will cause the token to be
|
||||
invalidated and no further requests can be made with it.
|
||||
{% endifmeth %}
|
||||
|
||||
@ -1,9 +1,13 @@
|
||||
{% ifmeth GET %}
|
||||
# Retrieve {{ model_verbose_name|title }} Variable Data:
|
||||
|
||||
Make a GET request to this resource to retrieve all variables defined for this
|
||||
Make a GET request to this resource to retrieve all variables defined for a
|
||||
{{ model_verbose_name }}.
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth PUT PATCH %}
|
||||
# Update {{ model_verbose_name|title }} Variable Data:
|
||||
|
||||
Make a PUT request to this resource to update variables defined for this
|
||||
Make a PUT or PATCH request to this resource to update variables defined for a
|
||||
{{ model_verbose_name }}.
|
||||
{% endifmeth %}
|
||||
|
||||
@ -38,5 +38,3 @@ Data about failed and successfull hosts by inventory will be given as:
|
||||
"id": 2,
|
||||
"name": "Test Inventory"
|
||||
},
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@ -1,3 +1,5 @@
|
||||
# View Statistics for Job Runs
|
||||
|
||||
Make a GET request to this resource to retrieve aggregate statistics about job runs suitable for graphing.
|
||||
|
||||
## Parmeters and Filtering
|
||||
@ -33,5 +35,3 @@ Data will be returned in the following format:
|
||||
|
||||
Each element contains an epoch timestamp represented in seconds and a numerical value indicating
|
||||
the number of events during that time period
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@ -1,3 +1 @@
|
||||
Make a GET request to this resource to retrieve aggregate statistics for Tower.
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
# List All {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
|
||||
# List All {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}:
|
||||
|
||||
Make a GET request to this resource to retrieve a list of all
|
||||
{{ model_verbose_name_plural }} directly or indirectly belonging to this
|
||||
|
||||
@ -1,9 +1,7 @@
|
||||
# List Potential Child Groups for this {{ parent_model_verbose_name|title }}:
|
||||
# List Potential Child Groups for {{ parent_model_verbose_name|title|anora }}:
|
||||
|
||||
Make a GET request to this resource to retrieve a list of
|
||||
{{ model_verbose_name_plural }} available to be added as children of the
|
||||
current {{ parent_model_verbose_name }}.
|
||||
|
||||
{% include "api/_list_common.md" %}
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
# List All {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
|
||||
# List All {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}:
|
||||
|
||||
Make a GET request to this resource to retrieve a list of all
|
||||
{{ model_verbose_name_plural }} of which the selected
|
||||
|
||||
@ -1,3 +1,5 @@
|
||||
# List Fact Scans for a Host Specific Host Scan
|
||||
|
||||
Make a GET request to this resource to retrieve system tracking data for a particular scan
|
||||
|
||||
You may filter by datetime:
|
||||
@ -7,5 +9,3 @@ You may filter by datetime:
|
||||
and module
|
||||
|
||||
`?datetime=2015-06-01&module=ansible`
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
@ -1,3 +1,5 @@
|
||||
# List Fact Scans for a Host by Module and Date
|
||||
|
||||
Make a GET request to this resource to retrieve system tracking scans by module and date/time
|
||||
|
||||
You may filter scan runs using the `from` and `to` properties:
|
||||
@ -7,5 +9,3 @@ You may filter scan runs using the `from` and `to` properties:
|
||||
You may also filter by module
|
||||
|
||||
`?module=packages`
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
1
awx/api/templates/api/host_insights.md
Normal file
1
awx/api/templates/api/host_insights.md
Normal file
@ -0,0 +1 @@
|
||||
# List Red Hat Insights for a Host
|
||||
@ -29,5 +29,3 @@ Response code from this action will be:
|
||||
- 202 if some inventory source updates were successful, but some failed
|
||||
- 400 if all of the inventory source updates failed
|
||||
- 400 if there are no inventory sources in the inventory
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@ -1,7 +1,9 @@
|
||||
# List Root {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
|
||||
{% ifmeth GET %}
|
||||
# List Root {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}:
|
||||
|
||||
Make a GET request to this resource to retrieve a list of root (top-level)
|
||||
{{ model_verbose_name_plural }} associated with this
|
||||
{{ parent_model_verbose_name }}.
|
||||
|
||||
{% include "api/_list_common.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
@ -9,5 +9,3 @@ cancelled. The response will include the following field:
|
||||
Make a POST request to this resource to cancel a pending or running inventory
|
||||
update. The response status code will be 202 if successful, or 405 if the
|
||||
update cannot be canceled.
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@ -9,5 +9,3 @@ from its inventory source. The response will include the following field:
|
||||
Make a POST request to this resource to update the inventory source. If
|
||||
successful, the response status code will be 202. If the inventory source is
|
||||
not defined or cannot be updated, a 405 status code will be returned.
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
# Group Tree for this {{ model_verbose_name|title }}:
|
||||
# Group Tree for {{ model_verbose_name|title|anora }}:
|
||||
|
||||
Make a GET request to this resource to retrieve a hierarchical view of groups
|
||||
associated with the selected {{ model_verbose_name }}.
|
||||
@ -11,5 +11,3 @@ also containing a list of its children.
|
||||
Each group data structure includes the following fields:
|
||||
|
||||
{% include "api/_result_fields_common.md" %}
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@ -1,10 +1,15 @@
|
||||
# Cancel Job
|
||||
{% ifmeth GET %}
|
||||
# Determine if a Job can be cancelled
|
||||
|
||||
Make a GET request to this resource to determine if the job can be cancelled.
|
||||
The response will include the following field:
|
||||
|
||||
* `can_cancel`: Indicates whether this job can be canceled (boolean, read-only)
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth POST %}
|
||||
# Cancel a Job
|
||||
Make a POST request to this resource to cancel a pending or running job. The
|
||||
response status code will be 202 if successful, or 405 if the job cannot be
|
||||
canceled.
|
||||
{% endifmeth %}
|
||||
|
||||
@ -23,5 +23,3 @@ Will show only failed plays. Alternatively `false` may be used.
|
||||
?play__icontains=test
|
||||
|
||||
Will filter plays matching the substring `test`
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@ -25,5 +25,3 @@ Will show only failed plays. Alternatively `false` may be used.
|
||||
?task__icontains=test
|
||||
|
||||
Will filter tasks matching the substring `test`
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@ -1,3 +1,3 @@
|
||||
Relaunch a job:
|
||||
Relaunch a Job:
|
||||
|
||||
Make a POST request to this resource to launch a job. If any passwords or variables are required then they should be passed in via POST data. In order to determine what values are required in order to launch a job based on this job template you may make a GET request to this endpoint.
|
||||
Make a POST request to this resource to launch a job. If any passwords or variables are required then they should be passed in via POST data. In order to determine what values are required in order to launch a job based on this job template you may make a GET request to this endpoint.
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
# Start Job
|
||||
{% ifmeth GET %}
|
||||
# Determine if a Job can be started
|
||||
|
||||
Make a GET request to this resource to determine if the job can be started and
|
||||
whether any passwords are required to start the job. The response will include
|
||||
@ -7,10 +8,14 @@ the following fields:
|
||||
* `can_start`: Flag indicating if this job can be started (boolean, read-only)
|
||||
* `passwords_needed_to_start`: Password names required to start the job (array,
|
||||
read-only)
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth POST %}
|
||||
# Start a Job
|
||||
Make a POST request to this resource to start the job. If any passwords are
|
||||
required, they must be passed via POST data.
|
||||
|
||||
If successful, the response status code will be 202. If any required passwords
|
||||
are not provided, a 400 status code will be returned. If the job cannot be
|
||||
started, a 405 status code will be returned.
|
||||
{% endifmeth %}
|
||||
|
||||
@ -1,13 +1,7 @@
|
||||
{% with 'false' as version_label_flag %}
|
||||
{% include "api/sub_list_create_api_view.md" %}
|
||||
{% endwith %}
|
||||
|
||||
Labels not associated with any other resources are deleted. A label can become disassociated with a resource as a result of 3 events.
|
||||
|
||||
1. A label is explicitly disassociated with a related job template
|
||||
2. A job is deleted with labels
|
||||
3. A cleanup job deletes a job with labels
|
||||
|
||||
{% with 'true' as version_label_flag %}
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
{% endwith %}
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
{% ifmeth GET %}
|
||||
# List {{ model_verbose_name_plural|title }}:
|
||||
|
||||
Make a GET request to this resource to retrieve the list of
|
||||
{{ model_verbose_name_plural }}.
|
||||
|
||||
{% include "api/_list_common.md" %}
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
{% include "api/list_api_view.md" %}
|
||||
|
||||
# Create {{ model_verbose_name_plural|title }}:
|
||||
# Create {{ model_verbose_name|title|anora }}:
|
||||
|
||||
Make a POST request to this resource with the following {{ model_verbose_name }}
|
||||
fields to create a new {{ model_verbose_name }}:
|
||||
@ -8,5 +8,3 @@ fields to create a new {{ model_verbose_name }}:
|
||||
{% with write_only=1 %}
|
||||
{% include "api/_result_fields_common.md" with serializer_fields=serializer_create_fields %}
|
||||
{% endwith %}
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
# Retrieve {{ model_verbose_name|title }} Playbooks:
|
||||
|
||||
Make GET request to this resource to retrieve a list of playbooks available
|
||||
for this {{ model_verbose_name }}.
|
||||
for {{ model_verbose_name|anora }}.
|
||||
|
||||
@ -9,5 +9,3 @@ cancelled. The response will include the following field:
|
||||
Make a POST request to this resource to cancel a pending or running project
|
||||
update. The response status code will be 202 if successful, or 405 if the
|
||||
update cannot be canceled.
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@ -8,5 +8,3 @@ from its SCM source. The response will include the following field:
|
||||
|
||||
Make a POST request to this resource to update the project. If the project
|
||||
cannot be updated, a 405 status code will be returned.
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@ -2,11 +2,9 @@
|
||||
### Note: starting from api v2, this resource object can be accessed via its named URL.
|
||||
{% endif %}
|
||||
|
||||
# Retrieve {{ model_verbose_name|title }}:
|
||||
# Retrieve {{ model_verbose_name|title|anora }}:
|
||||
|
||||
Make GET request to this resource to retrieve a single {{ model_verbose_name }}
|
||||
record containing the following fields:
|
||||
|
||||
{% include "api/_result_fields_common.md" %}
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@ -2,15 +2,17 @@
|
||||
### Note: starting from api v2, this resource object can be accessed via its named URL.
|
||||
{% endif %}
|
||||
|
||||
# Retrieve {{ model_verbose_name|title }}:
|
||||
{% ifmeth GET %}
|
||||
# Retrieve {{ model_verbose_name|title|anora }}:
|
||||
|
||||
Make GET request to this resource to retrieve a single {{ model_verbose_name }}
|
||||
record containing the following fields:
|
||||
|
||||
{% include "api/_result_fields_common.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
# Delete {{ model_verbose_name|title }}:
|
||||
{% ifmeth DELETE %}
|
||||
# Delete {{ model_verbose_name|title|anora }}:
|
||||
|
||||
Make a DELETE request to this resource to delete this {{ model_verbose_name }}.
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
@ -2,14 +2,17 @@
|
||||
### Note: starting from api v2, this resource object can be accessed via its named URL.
|
||||
{% endif %}
|
||||
|
||||
# Retrieve {{ model_verbose_name|title }}:
|
||||
{% ifmeth GET %}
|
||||
# Retrieve {{ model_verbose_name|title|anora }}:
|
||||
|
||||
Make GET request to this resource to retrieve a single {{ model_verbose_name }}
|
||||
record containing the following fields:
|
||||
|
||||
{% include "api/_result_fields_common.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
# Update {{ model_verbose_name|title }}:
|
||||
{% ifmeth PUT PATCH %}
|
||||
# Update {{ model_verbose_name|title|anora }}:
|
||||
|
||||
Make a PUT or PATCH request to this resource to update this
|
||||
{{ model_verbose_name }}. The following fields may be modified:
|
||||
@ -17,9 +20,12 @@ Make a PUT or PATCH request to this resource to update this
|
||||
{% with write_only=1 %}
|
||||
{% include "api/_result_fields_common.md" with serializer_fields=serializer_update_fields %}
|
||||
{% endwith %}
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth PUT %}
|
||||
For a PUT request, include **all** fields in the request.
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth PATCH %}
|
||||
For a PATCH request, include only the fields that are being modified.
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
@ -2,14 +2,17 @@
|
||||
### Note: starting from api v2, this resource object can be accessed via its named URL.
|
||||
{% endif %}
|
||||
|
||||
# Retrieve {{ model_verbose_name|title }}:
|
||||
{% ifmeth GET %}
|
||||
# Retrieve {{ model_verbose_name|title|anora }}:
|
||||
|
||||
Make GET request to this resource to retrieve a single {{ model_verbose_name }}
|
||||
record containing the following fields:
|
||||
|
||||
{% include "api/_result_fields_common.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
# Update {{ model_verbose_name|title }}:
|
||||
{% ifmeth PUT PATCH %}
|
||||
# Update {{ model_verbose_name|title|anora }}:
|
||||
|
||||
Make a PUT or PATCH request to this resource to update this
|
||||
{{ model_verbose_name }}. The following fields may be modified:
|
||||
@ -17,13 +20,18 @@ Make a PUT or PATCH request to this resource to update this
|
||||
{% with write_only=1 %}
|
||||
{% include "api/_result_fields_common.md" with serializer_fields=serializer_update_fields %}
|
||||
{% endwith %}
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth PUT %}
|
||||
For a PUT request, include **all** fields in the request.
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth PATCH %}
|
||||
For a PATCH request, include only the fields that are being modified.
|
||||
{% endifmeth %}
|
||||
|
||||
# Delete {{ model_verbose_name|title }}:
|
||||
{% ifmeth DELETE %}
|
||||
# Delete {{ model_verbose_name|title|anora }}:
|
||||
|
||||
Make a DELETE request to this resource to delete this {{ model_verbose_name }}.
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
1
awx/api/templates/api/setting_logging_test.md
Normal file
1
awx/api/templates/api/setting_logging_test.md
Normal file
@ -0,0 +1 @@
|
||||
# Test Logging Configuration
|
||||
@ -1,9 +1,9 @@
|
||||
# List {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
|
||||
{% ifmeth GET %}
|
||||
# List {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}:
|
||||
|
||||
Make a GET request to this resource to retrieve a list of
|
||||
{{ model_verbose_name_plural }} associated with the selected
|
||||
{{ parent_model_verbose_name }}.
|
||||
|
||||
{% include "api/_list_common.md" %}
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
{% include "api/sub_list_api_view.md" %}
|
||||
|
||||
# Create {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
|
||||
# Create {{ model_verbose_name|title|anora }} for {{ parent_model_verbose_name|title|anora }}:
|
||||
|
||||
Make a POST request to this resource with the following {{ model_verbose_name }}
|
||||
fields to create a new {{ model_verbose_name }} associated with this
|
||||
@ -25,7 +25,7 @@ delete the associated {{ model_verbose_name }}.
|
||||
}
|
||||
|
||||
{% else %}
|
||||
# Add {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}:
|
||||
# Add {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}:
|
||||
|
||||
Make a POST request to this resource with only an `id` field to associate an
|
||||
existing {{ model_verbose_name }} with this {{ parent_model_verbose_name }}.
|
||||
@ -37,5 +37,3 @@ remove the {{ model_verbose_name }} from this {{ parent_model_verbose_name }}
|
||||
{% if model_verbose_name != "label" %} without deleting the {{ model_verbose_name }}{% endif %}.
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@ -1,12 +1,16 @@
|
||||
# List Roles for this Team:
|
||||
# List Roles for a Team:
|
||||
|
||||
{% ifmeth GET %}
|
||||
Make a GET request to this resource to retrieve a list of roles associated with the selected team.
|
||||
|
||||
{% include "api/_list_common.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth POST %}
|
||||
# Associate Roles with this Team:
|
||||
|
||||
Make a POST request to this resource to add or remove a role from this team. The following fields may be modified:
|
||||
|
||||
* `id`: The Role ID to add to the team. (int, required)
|
||||
* `disassociate`: Provide if you want to remove the role. (any value, optional)
|
||||
{% endifmeth %}
|
||||
|
||||
@ -25,5 +25,3 @@ dark background.
|
||||
Files over {{ settings.STDOUT_MAX_BYTES_DISPLAY|filesizeformat }} (configurable)
|
||||
will not display in the browser. Use the `txt_download` or `ansi_download`
|
||||
formats to download the file directly to view it.
|
||||
|
||||
{% include "api/_new_in_awx.md" %}
|
||||
|
||||
@ -1,3 +1,5 @@
|
||||
# Retrieve Information about the current User
|
||||
|
||||
Make a GET request to retrieve user information about the current user.
|
||||
|
||||
One result should be returned containing the following fields:
|
||||
|
||||
@ -1,12 +1,16 @@
|
||||
# List Roles for this User:
|
||||
# List Roles for a User:
|
||||
|
||||
{% ifmeth GET %}
|
||||
Make a GET request to this resource to retrieve a list of roles associated with the selected user.
|
||||
|
||||
{% include "api/_list_common.md" %}
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth POST %}
|
||||
# Associate Roles with this User:
|
||||
|
||||
Make a POST request to this resource to add or remove a role from this user. The following fields may be modified:
|
||||
|
||||
* `id`: The Role ID to add to the user. (int, required)
|
||||
* `disassociate`: Provide if you want to remove the role. (any value, optional)
|
||||
{% endifmeth %}
|
||||
|
||||
@ -11,6 +11,7 @@ from awx.api.views import (
|
||||
CredentialObjectRolesList,
|
||||
CredentialOwnerUsersList,
|
||||
CredentialOwnerTeamsList,
|
||||
CredentialCopy,
|
||||
)
|
||||
|
||||
|
||||
@ -22,6 +23,7 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', CredentialObjectRolesList.as_view(), name='credential_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/owner_users/$', CredentialOwnerUsersList.as_view(), name='credential_owner_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/owner_teams/$', CredentialOwnerTeamsList.as_view(), name='credential_owner_teams_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', CredentialCopy.as_view(), name='credential_copy'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@ -20,6 +20,7 @@ from awx.api.views import (
|
||||
InventoryAccessList,
|
||||
InventoryObjectRolesList,
|
||||
InventoryInstanceGroupsList,
|
||||
InventoryCopy,
|
||||
)
|
||||
|
||||
|
||||
@ -40,6 +41,7 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', InventoryAccessList.as_view(), name='inventory_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', InventoryObjectRolesList.as_view(), name='inventory_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', InventoryInstanceGroupsList.as_view(), name='inventory_instance_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', InventoryCopy.as_view(), name='inventory_copy'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@ -7,6 +7,7 @@ from awx.api.views import (
|
||||
InventoryScriptList,
|
||||
InventoryScriptDetail,
|
||||
InventoryScriptObjectRolesList,
|
||||
InventoryScriptCopy,
|
||||
)
|
||||
|
||||
|
||||
@ -14,6 +15,7 @@ urls = [
|
||||
url(r'^$', InventoryScriptList.as_view(), name='inventory_script_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', InventoryScriptDetail.as_view(), name='inventory_script_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', InventoryScriptObjectRolesList.as_view(), name='inventory_script_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', InventoryScriptCopy.as_view(), name='inventory_script_copy'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@ -19,6 +19,7 @@ from awx.api.views import (
|
||||
JobTemplateAccessList,
|
||||
JobTemplateObjectRolesList,
|
||||
JobTemplateLabelList,
|
||||
JobTemplateCopy,
|
||||
)
|
||||
|
||||
|
||||
@ -41,6 +42,7 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', JobTemplateAccessList.as_view(), name='job_template_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', JobTemplateObjectRolesList.as_view(), name='job_template_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', JobTemplateLabelList.as_view(), name='job_template_label_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', JobTemplateCopy.as_view(), name='job_template_copy'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@ -8,6 +8,7 @@ from awx.api.views import (
|
||||
NotificationTemplateDetail,
|
||||
NotificationTemplateTest,
|
||||
NotificationTemplateNotificationList,
|
||||
NotificationTemplateCopy,
|
||||
)
|
||||
|
||||
|
||||
@ -16,6 +17,7 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/$', NotificationTemplateDetail.as_view(), name='notification_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/test/$', NotificationTemplateTest.as_view(), name='notification_template_test'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', NotificationTemplateNotificationList.as_view(), name='notification_template_notification_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', NotificationTemplateCopy.as_view(), name='notification_template_copy'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@ -19,10 +19,11 @@ from awx.api.views import (
|
||||
ProjectNotificationTemplatesSuccessList,
|
||||
ProjectObjectRolesList,
|
||||
ProjectAccessList,
|
||||
ProjectCopy,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
urls = [
|
||||
url(r'^$', ProjectList.as_view(), name='project_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', ProjectDetail.as_view(), name='project_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/playbooks/$', ProjectPlaybooks.as_view(), name='project_playbooks'),
|
||||
@ -39,6 +40,7 @@ urls = [
|
||||
name='project_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', ProjectObjectRolesList.as_view(), name='project_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', ProjectAccessList.as_view(), name='project_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', ProjectCopy.as_view(), name='project_copy'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from django.conf import settings
|
||||
from django.conf.urls import include, url
|
||||
|
||||
from awx.api.views import (
|
||||
@ -123,5 +124,10 @@ app_name = 'api'
|
||||
urlpatterns = [
|
||||
url(r'^$', ApiRootView.as_view(), name='api_root_view'),
|
||||
url(r'^(?P<version>(v2))/', include(v2_urls)),
|
||||
url(r'^(?P<version>(v1|v2))/', include(v1_urls))
|
||||
url(r'^(?P<version>(v1|v2))/', include(v1_urls)),
|
||||
]
|
||||
if settings.SETTINGS_MODULE == 'awx.settings.development':
|
||||
from awx.api.swagger import SwaggerSchemaView
|
||||
urlpatterns += [
|
||||
url(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view'),
|
||||
]
|
||||
|
||||
370
awx/api/views.py
370
awx/api/views.py
File diff suppressed because it is too large
Load Diff
@ -275,7 +275,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
setting_ids[setting.key] = setting.id
|
||||
try:
|
||||
value = decrypt_field(setting, 'value')
|
||||
except ValueError, e:
|
||||
except ValueError as e:
|
||||
#TODO: Remove in Tower 3.3
|
||||
logger.debug('encountered error decrypting field: %s - attempting fallback to old', e)
|
||||
value = old_decrypt_field(setting, 'value')
|
||||
|
||||
@ -44,7 +44,6 @@ class SettingCategoryList(ListAPIView):
|
||||
model = Setting # Not exactly, but needed for the view.
|
||||
serializer_class = SettingCategorySerializer
|
||||
filter_backends = []
|
||||
new_in_310 = True
|
||||
view_name = _('Setting Categories')
|
||||
|
||||
def get_queryset(self):
|
||||
@ -69,7 +68,6 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
model = Setting # Not exactly, but needed for the view.
|
||||
serializer_class = SettingSingletonSerializer
|
||||
filter_backends = []
|
||||
new_in_310 = True
|
||||
view_name = _('Setting Detail')
|
||||
|
||||
def get_queryset(self):
|
||||
@ -170,7 +168,6 @@ class SettingLoggingTest(GenericAPIView):
|
||||
serializer_class = SettingSingletonSerializer
|
||||
permission_classes = (IsSuperUser,)
|
||||
filter_backends = []
|
||||
new_in_320 = True
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
defaults = dict()
|
||||
|
||||
@ -29,6 +29,8 @@ import threading
|
||||
import uuid
|
||||
import memcache
|
||||
|
||||
from six.moves import xrange
|
||||
|
||||
__all__ = ['event_context']
|
||||
|
||||
|
||||
|
||||
@ -424,6 +424,18 @@ class InstanceAccess(BaseAccess):
|
||||
return Instance.objects.filter(
|
||||
rampart_groups__in=self.user.get_queryset(InstanceGroup)).distinct()
|
||||
|
||||
|
||||
def can_attach(self, obj, sub_obj, relationship, data,
|
||||
skip_sub_obj_read_check=False):
|
||||
if relationship == 'rampart_groups' and isinstance(sub_obj, InstanceGroup):
|
||||
return self.user.is_superuser
|
||||
return super(InstanceAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
||||
|
||||
def can_unattach(self, obj, sub_obj, relationship, data=None):
|
||||
if relationship == 'rampart_groups' and isinstance(sub_obj, InstanceGroup):
|
||||
return self.user.is_superuser
|
||||
return super(InstanceAccess, self).can_unattach(obj, sub_obj, relationship, *args, **kwargs)
|
||||
|
||||
def can_add(self, data):
|
||||
return False
|
||||
|
||||
@ -444,13 +456,13 @@ class InstanceGroupAccess(BaseAccess):
|
||||
organization__in=Organization.accessible_pk_qs(self.user, 'admin_role'))
|
||||
|
||||
def can_add(self, data):
|
||||
return False
|
||||
return self.user.is_superuser
|
||||
|
||||
def can_change(self, obj, data):
|
||||
return False
|
||||
return self.user.is_superuser
|
||||
|
||||
def can_delete(self, obj):
|
||||
return False
|
||||
return self.user.is_superuser
|
||||
|
||||
|
||||
class UserAccess(BaseAccess):
|
||||
|
||||
@ -47,7 +47,7 @@ def open_fifo_write(path, data):
|
||||
This blocks the thread until an external process (such as ssh-agent)
|
||||
reads data from the pipe.
|
||||
'''
|
||||
os.mkfifo(path, 0600)
|
||||
os.mkfifo(path, 0o600)
|
||||
thread.start_new_thread(lambda p, d: open(p, 'w').write(d), (path, data))
|
||||
|
||||
|
||||
|
||||
@ -356,7 +356,7 @@ class SmartFilterField(models.TextField):
|
||||
value = urllib.unquote(value)
|
||||
try:
|
||||
SmartFilter().query_from_string(value)
|
||||
except RuntimeError, e:
|
||||
except RuntimeError as e:
|
||||
raise models.base.ValidationError(e)
|
||||
return super(SmartFilterField, self).get_prep_value(value)
|
||||
|
||||
@ -695,11 +695,10 @@ class CredentialTypeInjectorField(JSONSchemaField):
|
||||
'properties': {
|
||||
'file': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'template': {'type': 'string'},
|
||||
'patternProperties': {
|
||||
'^template(\.[a-zA-Z_]+[a-zA-Z0-9_]*)?$': {'type': 'string'},
|
||||
},
|
||||
'additionalProperties': False,
|
||||
'required': ['template'],
|
||||
},
|
||||
'env': {
|
||||
'type': 'object',
|
||||
@ -749,8 +748,22 @@ class CredentialTypeInjectorField(JSONSchemaField):
|
||||
|
||||
class TowerNamespace:
|
||||
filename = None
|
||||
|
||||
valid_namespace['tower'] = TowerNamespace()
|
||||
|
||||
# ensure either single file or multi-file syntax is used (but not both)
|
||||
template_names = [x for x in value.get('file', {}).keys() if x.startswith('template')]
|
||||
if 'template' in template_names and len(template_names) > 1:
|
||||
raise django_exceptions.ValidationError(
|
||||
_('Must use multi-file syntax when injecting multiple files'),
|
||||
code='invalid',
|
||||
params={'value': value},
|
||||
)
|
||||
if 'template' not in template_names:
|
||||
valid_namespace['tower'].filename = TowerNamespace()
|
||||
for template_name in template_names:
|
||||
template_name = template_name.split('.')[1]
|
||||
setattr(valid_namespace['tower'].filename, template_name, 'EXAMPLE')
|
||||
|
||||
for type_, injector in value.items():
|
||||
for key, tmpl in injector.items():
|
||||
try:
|
||||
|
||||
@ -17,7 +17,7 @@ class Command(BaseCommand):
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
if getattr(settings, 'AWX_ISOLATED_PRIVATE_KEY', False):
|
||||
print settings.AWX_ISOLATED_PUBLIC_KEY
|
||||
print(settings.AWX_ISOLATED_PUBLIC_KEY)
|
||||
return
|
||||
|
||||
key = rsa.generate_private_key(
|
||||
@ -41,4 +41,4 @@ class Command(BaseCommand):
|
||||
) + " generated-by-awx@%s" % datetime.datetime.utcnow().isoformat()
|
||||
)
|
||||
pemfile.save()
|
||||
print pemfile.value
|
||||
print(pemfile.value)
|
||||
|
||||
@ -17,6 +17,10 @@ class Command(BaseCommand):
|
||||
help='Comma-Delimited Hosts to add to the Queue')
|
||||
parser.add_argument('--controller', dest='controller', type=str,
|
||||
default='', help='The controlling group (makes this an isolated group)')
|
||||
parser.add_argument('--instance_percent', dest='instance_percent', type=int, default=0,
|
||||
help='The percentage of active instances that will be assigned to this group'),
|
||||
parser.add_argument('--instance_minimum', dest='instance_minimum', type=int, default=0,
|
||||
help='The minimum number of instance that will be retained for this group from available instances')
|
||||
|
||||
def handle(self, **options):
|
||||
queuename = options.get('queuename')
|
||||
@ -38,7 +42,9 @@ class Command(BaseCommand):
|
||||
changed = True
|
||||
else:
|
||||
print("Creating instance group {}".format(queuename))
|
||||
ig = InstanceGroup(name=queuename)
|
||||
ig = InstanceGroup(name=queuename,
|
||||
policy_instance_percentage=options.get('instance_percent'),
|
||||
policy_instance_minimum=options.get('instance_minimum'))
|
||||
if control_ig:
|
||||
ig.controller = control_ig
|
||||
ig.save()
|
||||
@ -60,5 +66,7 @@ class Command(BaseCommand):
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("Instance already registered {}".format(instance[0].hostname))
|
||||
ig.policy_instance_list = instance_list
|
||||
ig.save()
|
||||
if changed:
|
||||
print('(changed: True)')
|
||||
|
||||
@ -41,10 +41,9 @@ class Command(BaseCommand):
|
||||
run.open_fifo_write(ssh_key_path, settings.AWX_ISOLATED_PRIVATE_KEY)
|
||||
args = run.wrap_args_with_ssh_agent(args, ssh_key_path, ssh_auth_sock)
|
||||
try:
|
||||
print ' '.join(args)
|
||||
print(' '.join(args))
|
||||
subprocess.check_call(args)
|
||||
except subprocess.CalledProcessError as e:
|
||||
sys.exit(e.returncode)
|
||||
finally:
|
||||
shutil.rmtree(path)
|
||||
|
||||
|
||||
@ -2,12 +2,9 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
import sys
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from django.db import models
|
||||
from django.utils.timezone import now
|
||||
from django.db.models import Sum
|
||||
from django.conf import settings
|
||||
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
@ -93,11 +90,6 @@ class InstanceManager(models.Manager):
|
||||
"""Return count of active Tower nodes for licensing."""
|
||||
return self.all().count()
|
||||
|
||||
def total_capacity(self):
|
||||
sumval = self.filter(modified__gte=now() - timedelta(seconds=settings.AWX_ACTIVE_NODE_TIME)) \
|
||||
.aggregate(total_capacity=Sum('capacity'))['total_capacity']
|
||||
return max(50, sumval)
|
||||
|
||||
def my_role(self):
|
||||
# NOTE: TODO: Likely to repurpose this once standalone ramparts are a thing
|
||||
return "tower"
|
||||
|
||||
62
awx/main/migrations/0020_v330_instancegroup_policies.py
Normal file
62
awx/main/migrations/0020_v330_instancegroup_policies.py
Normal file
@ -0,0 +1,62 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
from decimal import Decimal
|
||||
import awx.main.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0019_v330_custom_virtualenv'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='instancegroup',
|
||||
name='policy_instance_list',
|
||||
field=awx.main.fields.JSONField(default=[], help_text='List of exact-match Instances that will always be automatically assigned to this group',
|
||||
blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='instancegroup',
|
||||
name='policy_instance_minimum',
|
||||
field=models.IntegerField(default=0, help_text='Static minimum number of Instances to automatically assign to this group'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='instancegroup',
|
||||
name='policy_instance_percentage',
|
||||
field=models.IntegerField(default=0, help_text='Percentage of Instances to automatically assign to this group'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='instance',
|
||||
name='capacity_adjustment',
|
||||
field=models.DecimalField(decimal_places=2, default=Decimal('1.0'), max_digits=3),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='instance',
|
||||
name='cpu',
|
||||
field=models.IntegerField(default=0, editable=False)
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='instance',
|
||||
name='memory',
|
||||
field=models.BigIntegerField(default=0, editable=False)
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='instance',
|
||||
name='cpu_capacity',
|
||||
field=models.IntegerField(default=0, editable=False)
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='instance',
|
||||
name='mem_capacity',
|
||||
field=models.IntegerField(default=0, editable=False)
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='instance',
|
||||
name='enabled',
|
||||
field=models.BooleanField(default=True)
|
||||
)
|
||||
]
|
||||
@ -184,7 +184,7 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
|
||||
# NOTE: We sorta have to assume the host count matches and that forks default to 5
|
||||
from awx.main.models.inventory import Host
|
||||
count_hosts = Host.objects.filter( enabled=True, inventory__ad_hoc_commands__pk=self.pk).count()
|
||||
return min(count_hosts, 5 if self.forks == 0 else self.forks) * 10
|
||||
return min(count_hosts, 5 if self.forks == 0 else self.forks) + 1
|
||||
|
||||
def copy(self):
|
||||
data = {}
|
||||
|
||||
@ -594,7 +594,7 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
return
|
||||
|
||||
class TowerNamespace:
|
||||
filename = None
|
||||
pass
|
||||
|
||||
tower_namespace = TowerNamespace()
|
||||
|
||||
@ -622,17 +622,25 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
if len(value):
|
||||
namespace[field_name] = value
|
||||
|
||||
file_tmpl = self.injectors.get('file', {}).get('template')
|
||||
if file_tmpl is not None:
|
||||
# If a file template is provided, render the file and update the
|
||||
# special `tower` template namespace so the filename can be
|
||||
# referenced in other injectors
|
||||
file_tmpls = self.injectors.get('file', {})
|
||||
# If any file templates are provided, render the files and update the
|
||||
# special `tower` template namespace so the filename can be
|
||||
# referenced in other injectors
|
||||
for file_label, file_tmpl in file_tmpls.items():
|
||||
data = Template(file_tmpl).render(**namespace)
|
||||
_, path = tempfile.mkstemp(dir=private_data_dir)
|
||||
with open(path, 'w') as f:
|
||||
f.write(data)
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
|
||||
namespace['tower'].filename = path
|
||||
|
||||
# determine if filename indicates single file or many
|
||||
if file_label.find('.') == -1:
|
||||
tower_namespace.filename = path
|
||||
else:
|
||||
if not hasattr(tower_namespace, 'filename'):
|
||||
tower_namespace.filename = TowerNamespace()
|
||||
file_label = file_label.split('.')[1]
|
||||
setattr(tower_namespace.filename, file_label, path)
|
||||
|
||||
for env_var, tmpl in self.injectors.get('env', {}).items():
|
||||
if env_var.startswith('ANSIBLE_') or env_var in self.ENV_BLACKLIST:
|
||||
|
||||
@ -1,8 +1,10 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.db import models
|
||||
from django.db.models.signals import post_save
|
||||
from decimal import Decimal
|
||||
|
||||
from django.db import models, connection
|
||||
from django.db.models.signals import post_save, post_delete
|
||||
from django.dispatch import receiver
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.conf import settings
|
||||
@ -10,12 +12,15 @@ from django.utils.timezone import now, timedelta
|
||||
|
||||
from solo.models import SingletonModel
|
||||
|
||||
from awx import __version__ as awx_application_version
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.managers import InstanceManager, InstanceGroupManager
|
||||
from awx.main.fields import JSONField
|
||||
from awx.main.models.inventory import InventoryUpdate
|
||||
from awx.main.models.jobs import Job
|
||||
from awx.main.models.projects import ProjectUpdate
|
||||
from awx.main.models.unified_jobs import UnifiedJob
|
||||
from awx.main.utils import get_cpu_capacity, get_mem_capacity, get_system_task_capacity
|
||||
|
||||
__all__ = ('Instance', 'InstanceGroup', 'JobOrigin', 'TowerScheduleState',)
|
||||
|
||||
@ -38,6 +43,30 @@ class Instance(models.Model):
|
||||
default=100,
|
||||
editable=False,
|
||||
)
|
||||
capacity_adjustment = models.DecimalField(
|
||||
default=Decimal(1.0),
|
||||
max_digits=3,
|
||||
decimal_places=2,
|
||||
)
|
||||
enabled = models.BooleanField(
|
||||
default=True
|
||||
)
|
||||
cpu = models.IntegerField(
|
||||
default=0,
|
||||
editable=False,
|
||||
)
|
||||
memory = models.BigIntegerField(
|
||||
default=0,
|
||||
editable=False,
|
||||
)
|
||||
cpu_capacity = models.IntegerField(
|
||||
default=0,
|
||||
editable=False,
|
||||
)
|
||||
mem_capacity = models.IntegerField(
|
||||
default=0,
|
||||
editable=False,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
@ -63,6 +92,23 @@ class Instance(models.Model):
|
||||
grace_period = settings.AWX_ISOLATED_PERIODIC_CHECK * 2
|
||||
return self.modified < ref_time - timedelta(seconds=grace_period)
|
||||
|
||||
def is_controller(self):
|
||||
return Instance.objects.filter(rampart_groups__controller__instances=self).exists()
|
||||
|
||||
|
||||
def refresh_capacity(self):
|
||||
cpu = get_cpu_capacity()
|
||||
mem = get_mem_capacity()
|
||||
self.capacity = get_system_task_capacity(self.capacity_adjustment)
|
||||
self.cpu = cpu[0]
|
||||
self.memory = mem[0]
|
||||
self.cpu_capacity = cpu[1]
|
||||
self.mem_capacity = mem[1]
|
||||
self.version = awx_application_version
|
||||
self.save(update_fields=['capacity', 'version', 'modified', 'cpu',
|
||||
'memory', 'cpu_capacity', 'mem_capacity'])
|
||||
|
||||
|
||||
|
||||
class InstanceGroup(models.Model):
|
||||
"""A model representing a Queue/Group of AWX Instances."""
|
||||
@ -85,6 +131,19 @@ class InstanceGroup(models.Model):
|
||||
default=None,
|
||||
null=True
|
||||
)
|
||||
policy_instance_percentage = models.IntegerField(
|
||||
default=0,
|
||||
help_text=_("Percentage of Instances to automatically assign to this group")
|
||||
)
|
||||
policy_instance_minimum = models.IntegerField(
|
||||
default=0,
|
||||
help_text=_("Static minimum number of Instances to automatically assign to this group")
|
||||
)
|
||||
policy_instance_list = JSONField(
|
||||
default=[],
|
||||
blank=True,
|
||||
help_text=_("List of exact-match Instances that will always be automatically assigned to this group")
|
||||
)
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:instance_group_detail', kwargs={'pk': self.pk}, request=request)
|
||||
@ -119,6 +178,32 @@ class JobOrigin(models.Model):
|
||||
app_label = 'main'
|
||||
|
||||
|
||||
@receiver(post_save, sender=InstanceGroup)
|
||||
def on_instance_group_saved(sender, instance, created=False, raw=False, **kwargs):
|
||||
if created:
|
||||
from awx.main.tasks import apply_cluster_membership_policies
|
||||
connection.on_commit(lambda: apply_cluster_membership_policies.apply_async())
|
||||
|
||||
|
||||
@receiver(post_save, sender=Instance)
|
||||
def on_instance_saved(sender, instance, created=False, raw=False, **kwargs):
|
||||
if created:
|
||||
from awx.main.tasks import apply_cluster_membership_policies
|
||||
connection.on_commit(lambda: apply_cluster_membership_policies.apply_async())
|
||||
|
||||
|
||||
@receiver(post_delete, sender=InstanceGroup)
|
||||
def on_instance_group_deleted(sender, instance, using, **kwargs):
|
||||
from awx.main.tasks import apply_cluster_membership_policies
|
||||
connection.on_commit(lambda: apply_cluster_membership_policies.apply_async())
|
||||
|
||||
|
||||
@receiver(post_delete, sender=Instance)
|
||||
def on_instance_deleted(sender, instance, using, **kwargs):
|
||||
from awx.main.tasks import apply_cluster_membership_policies
|
||||
connection.on_commit(lambda: apply_cluster_membership_policies.apply_async())
|
||||
|
||||
|
||||
# Unfortunately, the signal can't just be connected against UnifiedJob; it
|
||||
# turns out that creating a model's subclass doesn't fire the signal for the
|
||||
# superclass model.
|
||||
|
||||
@ -50,6 +50,7 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin):
|
||||
an inventory source contains lists and hosts.
|
||||
'''
|
||||
|
||||
FIELDS_TO_PRESERVE_AT_COPY = ['hosts', 'groups', 'instance_groups']
|
||||
KIND_CHOICES = [
|
||||
('', _('Hosts have a direct link to this inventory.')),
|
||||
('smart', _('Hosts for inventory generated using the host_filter property.')),
|
||||
@ -505,6 +506,10 @@ class Host(CommonModelNameNotUnique):
|
||||
A managed node
|
||||
'''
|
||||
|
||||
FIELDS_TO_PRESERVE_AT_COPY = [
|
||||
'name', 'description', 'groups', 'inventory', 'enabled', 'instance_id', 'variables'
|
||||
]
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
unique_together = (("name", "inventory"),) # FIXME: Add ('instance_id', 'inventory') after migration.
|
||||
@ -692,6 +697,10 @@ class Group(CommonModelNameNotUnique):
|
||||
groups.
|
||||
'''
|
||||
|
||||
FIELDS_TO_PRESERVE_AT_COPY = [
|
||||
'name', 'description', 'inventory', 'children', 'parents', 'hosts', 'variables'
|
||||
]
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
unique_together = (("name", "inventory"),)
|
||||
@ -1602,7 +1611,7 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin,
|
||||
|
||||
@property
|
||||
def task_impact(self):
|
||||
return 50
|
||||
return 1
|
||||
|
||||
# InventoryUpdate credential required
|
||||
# Custom and SCM InventoryUpdate credential not required
|
||||
|
||||
@ -220,6 +220,10 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
||||
A job template is a reusable job definition for applying a project (with
|
||||
playbook) to an inventory source with a given credential.
|
||||
'''
|
||||
FIELDS_TO_PRESERVE_AT_COPY = [
|
||||
'labels', 'instance_groups', 'credentials', 'survey_spec'
|
||||
]
|
||||
FIELDS_TO_DISCARD_AT_COPY = ['vault_credential', 'credential']
|
||||
SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name')]
|
||||
|
||||
class Meta:
|
||||
@ -620,10 +624,10 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
# NOTE: We sorta have to assume the host count matches and that forks default to 5
|
||||
from awx.main.models.inventory import Host
|
||||
if self.launch_type == 'callback':
|
||||
count_hosts = 1
|
||||
count_hosts = 2
|
||||
else:
|
||||
count_hosts = Host.objects.filter(inventory__jobs__pk=self.pk).count()
|
||||
return min(count_hosts, 5 if self.forks == 0 else self.forks) * 10
|
||||
return min(count_hosts, 5 if self.forks == 0 else self.forks) + 1
|
||||
|
||||
@property
|
||||
def successful_hosts(self):
|
||||
@ -1190,7 +1194,7 @@ class SystemJob(UnifiedJob, SystemJobOptions, JobNotificationMixin):
|
||||
|
||||
@property
|
||||
def task_impact(self):
|
||||
return 150
|
||||
return 5
|
||||
|
||||
@property
|
||||
def preferred_instance_groups(self):
|
||||
|
||||
@ -229,6 +229,8 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
||||
'''
|
||||
|
||||
SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')]
|
||||
FIELDS_TO_PRESERVE_AT_COPY = ['labels', 'instance_groups', 'credentials']
|
||||
FIELDS_TO_DISCARD_AT_COPY = ['local_path']
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
@ -492,7 +494,7 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManage
|
||||
|
||||
@property
|
||||
def task_impact(self):
|
||||
return 0 if self.job_type == 'run' else 20
|
||||
return 0 if self.job_type == 'run' else 1
|
||||
|
||||
@property
|
||||
def result_stdout(self):
|
||||
|
||||
@ -127,7 +127,7 @@ class Schedule(CommonModel, LaunchTimeConfig):
|
||||
https://github.com/dateutil/dateutil/pull/619
|
||||
"""
|
||||
kwargs['forceset'] = True
|
||||
kwargs['tzinfos'] = {}
|
||||
kwargs['tzinfos'] = {x: dateutil.tz.tzutc() for x in dateutil.parser.parserinfo().UTCZONE}
|
||||
match = cls.TZID_REGEX.match(rrule)
|
||||
if match is not None:
|
||||
rrule = cls.TZID_REGEX.sub("DTSTART\g<stamp>TZI\g<rrule>", rrule)
|
||||
@ -150,14 +150,13 @@ class Schedule(CommonModel, LaunchTimeConfig):
|
||||
# > UTC time.
|
||||
raise ValueError('RRULE UNTIL values must be specified in UTC')
|
||||
|
||||
try:
|
||||
first_event = x[0]
|
||||
if first_event < now() - datetime.timedelta(days=365 * 5):
|
||||
# For older DTSTART values, if there are more than 1000 recurrences...
|
||||
if len(x[:1001]) > 1000:
|
||||
raise ValueError('RRULE values that yield more than 1000 events are not allowed.')
|
||||
except IndexError:
|
||||
pass
|
||||
if 'MINUTELY' in rrule or 'HOURLY' in rrule:
|
||||
try:
|
||||
first_event = x[0]
|
||||
if first_event < now() - datetime.timedelta(days=365 * 5):
|
||||
raise ValueError('RRULE values with more than 1000 events are not allowed.')
|
||||
except IndexError:
|
||||
pass
|
||||
return x
|
||||
|
||||
def __unicode__(self):
|
||||
|
||||
@ -432,7 +432,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
|
||||
copy_m2m_relationships(self, unified_jt, fields)
|
||||
return unified_jt
|
||||
|
||||
def _accept_or_ignore_job_kwargs(self, _exclude_errors=None, **kwargs):
|
||||
def _accept_or_ignore_job_kwargs(self, _exclude_errors=(), **kwargs):
|
||||
'''
|
||||
Override in subclass if template accepts _any_ prompted params
|
||||
'''
|
||||
|
||||
@ -110,6 +110,13 @@ class WorkflowNodeBase(CreatedModifiedModel, LaunchTimeConfig):
|
||||
|
||||
|
||||
class WorkflowJobTemplateNode(WorkflowNodeBase):
|
||||
FIELDS_TO_PRESERVE_AT_COPY = [
|
||||
'unified_job_template', 'workflow_job_template', 'success_nodes', 'failure_nodes',
|
||||
'always_nodes', 'credentials', 'inventory', 'extra_data', 'survey_passwords',
|
||||
'char_prompts'
|
||||
]
|
||||
REENCRYPTION_BLACKLIST_AT_COPY = ['extra_data', 'survey_passwords']
|
||||
|
||||
workflow_job_template = models.ForeignKey(
|
||||
'WorkflowJobTemplate',
|
||||
related_name='workflow_job_template_nodes',
|
||||
@ -283,6 +290,9 @@ class WorkflowJobOptions(BaseModel):
|
||||
class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTemplateMixin, ResourceMixin):
|
||||
|
||||
SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')]
|
||||
FIELDS_TO_PRESERVE_AT_COPY = [
|
||||
'labels', 'instance_groups', 'workflow_job_template_nodes', 'credentials', 'survey_spec'
|
||||
]
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
@ -353,7 +363,7 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
|
||||
workflow_job.copy_nodes_from_original(original=self)
|
||||
return workflow_job
|
||||
|
||||
def _accept_or_ignore_job_kwargs(self, **kwargs):
|
||||
def _accept_or_ignore_job_kwargs(self, _exclude_errors=(), **kwargs):
|
||||
prompted_fields = {}
|
||||
rejected_fields = {}
|
||||
accepted_vars, rejected_vars, errors_dict = self.accept_or_ignore_variables(kwargs.get('extra_vars', {}))
|
||||
@ -394,11 +404,6 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
|
||||
node_list.append(node.pk)
|
||||
return node_list
|
||||
|
||||
def user_copy(self, user):
|
||||
new_wfjt = self.copy_unified_jt()
|
||||
new_wfjt.copy_nodes_from_original(original=self, user=user)
|
||||
return new_wfjt
|
||||
|
||||
|
||||
class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificationMixin):
|
||||
class Meta:
|
||||
|
||||
@ -5,6 +5,8 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
from six.moves import xrange
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
|
||||
@ -46,6 +48,6 @@ class CallbackQueueDispatcher(object):
|
||||
delivery_mode="persistent" if settings.PERSISTENT_CALLBACK_MESSAGES else "transient",
|
||||
routing_key=self.connection_queue)
|
||||
return
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.info('Publish Job Event Exception: %r, retry=%d', e,
|
||||
retry_count, exc_info=True)
|
||||
|
||||
@ -21,12 +21,12 @@ class LogErrorsTask(Task):
|
||||
super(LogErrorsTask, self).on_failure(exc, task_id, args, kwargs, einfo)
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(base=LogErrorsTask)
|
||||
def run_job_launch(job_id):
|
||||
TaskManager().schedule()
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(base=LogErrorsTask)
|
||||
def run_job_complete(job_id):
|
||||
TaskManager().schedule()
|
||||
|
||||
|
||||
@ -577,5 +577,5 @@ def delete_inventory_for_org(sender, instance, **kwargs):
|
||||
for inventory in inventories:
|
||||
try:
|
||||
inventory.schedule_deletion(user_id=getattr(user, 'id', None))
|
||||
except RuntimeError, e:
|
||||
except RuntimeError as e:
|
||||
logger.debug(e)
|
||||
|
||||
@ -17,19 +17,19 @@ class Migration(DataMigration):
|
||||
obj1 = eval(obj_type + ".objects.get(id=" + str(activity_stream_object.object1_id) + ")")
|
||||
if hasattr(activity_stream_object, activity_stream_object.object1):
|
||||
getattr(activity_stream_object, activity_stream_object.object1).add(obj1)
|
||||
except ObjectDoesNotExist, e:
|
||||
except ObjectDoesNotExist as e:
|
||||
print("Object 1 for AS id=%s does not exist. (Object Type: %s, id: %s" % (str(activity_stream_object.id),
|
||||
activity_stream_object.object1_type,
|
||||
str(activity_stream_object.object1_id)))
|
||||
continue
|
||||
if activity_stream_object.operation in ('associate', 'disassociate'):
|
||||
try:
|
||||
obj_type = "orm." + activity_stream_object.object2_type.split(".")[-1]
|
||||
obj_type = "orm." + activity_stream_object.object2_type.split(".")[-1]
|
||||
if obj_type == 'orm.User':
|
||||
obj_type = 'orm["auth.User"]'
|
||||
obj2 = eval(obj_type + ".objects.get(id=" + str(activity_stream_object.object2_id) + ")")
|
||||
getattr(activity_stream_object, activity_stream_object.object2).add(obj2)
|
||||
except ObjectDoesNotExist, e:
|
||||
except ObjectDoesNotExist as e:
|
||||
print("Object 2 for AS id=%s does not exist. (Object Type: %s, id: %s" % (str(activity_stream_object.id),
|
||||
activity_stream_object.object2_type,
|
||||
str(activity_stream_object.object2_id)))
|
||||
|
||||
@ -2,10 +2,11 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
from collections import OrderedDict
|
||||
from collections import OrderedDict, namedtuple
|
||||
import ConfigParser
|
||||
import cStringIO
|
||||
import functools
|
||||
import importlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
@ -25,12 +26,13 @@ except Exception:
|
||||
psutil = None
|
||||
|
||||
# Celery
|
||||
from celery import Task, shared_task
|
||||
from celery.signals import celeryd_init, worker_process_init, worker_shutdown
|
||||
from celery import Task, shared_task, Celery
|
||||
from celery.signals import celeryd_init, worker_process_init, worker_shutdown, worker_ready, celeryd_after_setup
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.db import transaction, DatabaseError, IntegrityError
|
||||
from django.db.models.fields.related import ForeignKey
|
||||
from django.utils.timezone import now, timedelta
|
||||
from django.utils.encoding import smart_str
|
||||
from django.core.mail import send_mail
|
||||
@ -53,16 +55,17 @@ from awx.main.queue import CallbackQueueDispatcher
|
||||
from awx.main.expect import run, isolated_manager
|
||||
from awx.main.utils import (get_ansible_version, get_ssh_version, decrypt_field, update_scm_url,
|
||||
check_proot_installed, build_proot_temp_dir, get_licenser,
|
||||
wrap_args_with_proot, get_system_task_capacity, OutputEventFilter,
|
||||
ignore_inventory_computed_fields, ignore_inventory_group_removal,
|
||||
get_type_for_model, extract_ansible_vars)
|
||||
wrap_args_with_proot, OutputEventFilter, ignore_inventory_computed_fields,
|
||||
ignore_inventory_group_removal, get_type_for_model, extract_ansible_vars)
|
||||
from awx.main.utils.reload import restart_local_services, stop_local_services
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
from awx.main.utils.ha import update_celery_worker_routes, register_celery_worker_queues
|
||||
from awx.main.utils.handlers import configure_external_logger
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
from awx.conf import settings_registry
|
||||
|
||||
__all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate',
|
||||
'RunAdHocCommand', 'handle_work_error', 'handle_work_success',
|
||||
'RunAdHocCommand', 'handle_work_error', 'handle_work_success', 'apply_cluster_membership_policies',
|
||||
'update_inventory_computed_fields', 'update_host_smart_inventory_memberships',
|
||||
'send_notifications', 'run_administrative_checks', 'purge_old_stdout_files']
|
||||
|
||||
@ -130,6 +133,56 @@ def inform_cluster_of_shutdown(*args, **kwargs):
|
||||
logger.exception('Encountered problem with normal shutdown signal.')
|
||||
|
||||
|
||||
@shared_task(bind=True, queue='tower_instance_router', base=LogErrorsTask)
|
||||
def apply_cluster_membership_policies(self):
|
||||
with advisory_lock('cluster_policy_lock', wait=True):
|
||||
considered_instances = Instance.objects.all().order_by('id')
|
||||
total_instances = considered_instances.count()
|
||||
filtered_instances = []
|
||||
actual_groups = []
|
||||
actual_instances = []
|
||||
Group = namedtuple('Group', ['obj', 'instances'])
|
||||
Node = namedtuple('Instance', ['obj', 'groups'])
|
||||
# Process policy instance list first, these will represent manually managed instances
|
||||
# that will not go through automatic policy determination
|
||||
for ig in InstanceGroup.objects.all():
|
||||
logger.info("Considering group {}".format(ig.name))
|
||||
ig.instances.clear()
|
||||
group_actual = Group(obj=ig, instances=[])
|
||||
for i in ig.policy_instance_list:
|
||||
inst = Instance.objects.filter(hostname=i)
|
||||
if not inst.exists():
|
||||
continue
|
||||
inst = inst[0]
|
||||
logger.info("Policy List, adding {} to {}".format(inst.hostname, ig.name))
|
||||
group_actual.instances.append(inst.id)
|
||||
ig.instances.add(inst)
|
||||
filtered_instances.append(inst)
|
||||
actual_groups.append(group_actual)
|
||||
# Process Instance minimum policies next, since it represents a concrete lower bound to the
|
||||
# number of instances to make available to instance groups
|
||||
actual_instances = [Node(obj=i, groups=[]) for i in filter(lambda x: x not in filtered_instances, considered_instances)]
|
||||
logger.info("Total instances not directly associated: {}".format(total_instances))
|
||||
for g in sorted(actual_groups, cmp=lambda x,y: len(x.instances) - len(y.instances)):
|
||||
for i in sorted(actual_instances, cmp=lambda x,y: len(x.groups) - len(y.groups)):
|
||||
if len(g.instances) >= g.obj.policy_instance_minimum:
|
||||
break
|
||||
logger.info("Policy minimum, adding {} to {}".format(i.obj.hostname, g.obj.name))
|
||||
g.obj.instances.add(i.obj)
|
||||
g.instances.append(i.obj.id)
|
||||
i.groups.append(g.obj.id)
|
||||
# Finally process instance policy percentages
|
||||
for g in sorted(actual_groups, cmp=lambda x,y: len(x.instances) - len(y.instances)):
|
||||
for i in sorted(actual_instances, cmp=lambda x,y: len(x.groups) - len(y.groups)):
|
||||
if 100 * float(len(g.instances)) / len(actual_instances) >= g.obj.policy_instance_percentage:
|
||||
break
|
||||
logger.info("Policy percentage, adding {} to {}".format(i.obj.hostname, g.obj.name))
|
||||
g.instances.append(i.obj.id)
|
||||
g.obj.instances.add(i.obj)
|
||||
i.groups.append(g.obj.id)
|
||||
handle_ha_toplogy_changes()
|
||||
|
||||
|
||||
@shared_task(queue='tower_broadcast_all', bind=True, base=LogErrorsTask)
|
||||
def handle_setting_changes(self, setting_keys):
|
||||
orig_len = len(setting_keys)
|
||||
@ -147,6 +200,45 @@ def handle_setting_changes(self, setting_keys):
|
||||
break
|
||||
|
||||
|
||||
@shared_task(bind=True, queue='tower_broadcast_all', base=LogErrorsTask)
|
||||
def handle_ha_toplogy_changes(self):
|
||||
instance = Instance.objects.me()
|
||||
logger.debug("Reconfigure celeryd queues task on host {}".format(self.request.hostname))
|
||||
awx_app = Celery('awx')
|
||||
awx_app.config_from_object('django.conf:settings', namespace='CELERY')
|
||||
(instance, removed_queues, added_queues) = register_celery_worker_queues(awx_app, self.request.hostname)
|
||||
logger.info("Workers on tower node '{}' removed from queues {} and added to queues {}"
|
||||
.format(instance.hostname, removed_queues, added_queues))
|
||||
updated_routes = update_celery_worker_routes(instance, settings)
|
||||
logger.info("Worker on tower node '{}' updated celery routes {} all routes are now {}"
|
||||
.format(instance.hostname, updated_routes, self.app.conf.CELERY_TASK_ROUTES))
|
||||
|
||||
|
||||
@worker_ready.connect
|
||||
def handle_ha_toplogy_worker_ready(sender, **kwargs):
|
||||
logger.debug("Configure celeryd queues task on host {}".format(sender.hostname))
|
||||
(instance, removed_queues, added_queues) = register_celery_worker_queues(sender.app, sender.hostname)
|
||||
logger.info("Workers on tower node '{}' unsubscribed from queues {} and subscribed to queues {}"
|
||||
.format(instance.hostname, removed_queues, added_queues))
|
||||
|
||||
|
||||
@celeryd_init.connect
|
||||
def handle_update_celery_routes(sender=None, conf=None, **kwargs):
|
||||
conf = conf if conf else sender.app.conf
|
||||
logger.debug("Registering celery routes for {}".format(sender))
|
||||
instance = Instance.objects.me()
|
||||
added_routes = update_celery_worker_routes(instance, conf)
|
||||
logger.info("Workers on tower node '{}' added routes {} all routes are now {}"
|
||||
.format(instance.hostname, added_routes, conf.CELERY_TASK_ROUTES))
|
||||
|
||||
|
||||
@celeryd_after_setup.connect
|
||||
def handle_update_celery_hostname(sender, instance, **kwargs):
|
||||
tower_instance = Instance.objects.me()
|
||||
instance.hostname = 'celery@{}'.format(tower_instance.hostname)
|
||||
logger.warn("Set hostname to {}".format(instance.hostname))
|
||||
|
||||
|
||||
@shared_task(queue='tower', base=LogErrorsTask)
|
||||
def send_notifications(notification_list, job_id=None):
|
||||
if not isinstance(notification_list, list):
|
||||
@ -215,6 +307,7 @@ def cluster_node_heartbeat(self):
|
||||
instance_list = list(Instance.objects.filter(rampart_groups__controller__isnull=True).distinct())
|
||||
this_inst = None
|
||||
lost_instances = []
|
||||
|
||||
for inst in list(instance_list):
|
||||
if inst.hostname == settings.CLUSTER_HOST_ID:
|
||||
this_inst = inst
|
||||
@ -224,11 +317,15 @@ def cluster_node_heartbeat(self):
|
||||
instance_list.remove(inst)
|
||||
if this_inst:
|
||||
startup_event = this_inst.is_lost(ref_time=nowtime)
|
||||
if this_inst.capacity == 0:
|
||||
if this_inst.capacity == 0 and this_inst.enabled:
|
||||
logger.warning('Rejoining the cluster as instance {}.'.format(this_inst.hostname))
|
||||
this_inst.capacity = get_system_task_capacity()
|
||||
this_inst.version = awx_application_version
|
||||
this_inst.save(update_fields=['capacity', 'version', 'modified'])
|
||||
if this_inst.enabled:
|
||||
this_inst.refresh_capacity()
|
||||
handle_ha_toplogy_changes.apply_async()
|
||||
elif this_inst.capacity != 0 and not this_inst.enabled:
|
||||
this_inst.capacity = 0
|
||||
this_inst.save(update_fields=['capacity'])
|
||||
handle_ha_toplogy_changes.apply_async()
|
||||
if startup_event:
|
||||
return
|
||||
else:
|
||||
@ -237,7 +334,7 @@ def cluster_node_heartbeat(self):
|
||||
for other_inst in instance_list:
|
||||
if other_inst.version == "":
|
||||
continue
|
||||
if Version(other_inst.version.split('-', 1)[0]) > Version(awx_application_version) and not settings.DEBUG:
|
||||
if Version(other_inst.version.split('-', 1)[0]) > Version(awx_application_version.split('-', 1)[0]) and not settings.DEBUG:
|
||||
logger.error("Host {} reports version {}, but this node {} is at {}, shutting down".format(other_inst.hostname,
|
||||
other_inst.version,
|
||||
this_inst.hostname,
|
||||
@ -254,6 +351,10 @@ def cluster_node_heartbeat(self):
|
||||
other_inst.save(update_fields=['capacity'])
|
||||
logger.error("Host {} last checked in at {}, marked as lost.".format(
|
||||
other_inst.hostname, other_inst.modified))
|
||||
if settings.AWX_AUTO_DEPROVISION_INSTANCES:
|
||||
deprovision_hostname = other_inst.hostname
|
||||
other_inst.delete()
|
||||
logger.info("Host {} Automatically Deprovisioned.".format(deprovision_hostname))
|
||||
except DatabaseError as e:
|
||||
if 'did not affect any rows' in str(e):
|
||||
logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname))
|
||||
@ -1036,7 +1137,7 @@ class RunJob(BaseTask):
|
||||
# job and visible inside the proot environment (when enabled).
|
||||
cp_dir = os.path.join(kwargs['private_data_dir'], 'cp')
|
||||
if not os.path.exists(cp_dir):
|
||||
os.mkdir(cp_dir, 0700)
|
||||
os.mkdir(cp_dir, 0o700)
|
||||
env['ANSIBLE_SSH_CONTROL_PATH'] = os.path.join(cp_dir, '%%h%%p%%r')
|
||||
|
||||
# Allow the inventory script to include host variables inline via ['_meta']['hostvars'].
|
||||
@ -1723,7 +1824,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
cp.set(section, 'ssl_verify', "false")
|
||||
|
||||
cloudforms_opts = dict(inventory_update.source_vars_dict.items())
|
||||
for opt in ['version', 'purge_actions', 'clean_group_keys', 'nest_tags', 'suffix']:
|
||||
for opt in ['version', 'purge_actions', 'clean_group_keys', 'nest_tags', 'suffix', 'prefer_ipv4']:
|
||||
if opt in cloudforms_opts:
|
||||
cp.set(section, opt, cloudforms_opts[opt])
|
||||
|
||||
@ -2160,6 +2261,62 @@ class RunSystemJob(BaseTask):
|
||||
return settings.BASE_DIR
|
||||
|
||||
|
||||
def _reconstruct_relationships(copy_mapping):
|
||||
for old_obj, new_obj in copy_mapping.items():
|
||||
model = type(old_obj)
|
||||
for field_name in getattr(model, 'FIELDS_TO_PRESERVE_AT_COPY', []):
|
||||
field = model._meta.get_field(field_name)
|
||||
if isinstance(field, ForeignKey):
|
||||
if getattr(new_obj, field_name, None):
|
||||
continue
|
||||
related_obj = getattr(old_obj, field_name)
|
||||
related_obj = copy_mapping.get(related_obj, related_obj)
|
||||
setattr(new_obj, field_name, related_obj)
|
||||
elif field.many_to_many:
|
||||
for related_obj in getattr(old_obj, field_name).all():
|
||||
getattr(new_obj, field_name).add(copy_mapping.get(related_obj, related_obj))
|
||||
new_obj.save()
|
||||
|
||||
|
||||
@shared_task(bind=True, queue='tower', base=LogErrorsTask)
|
||||
def deep_copy_model_obj(
|
||||
self, model_module, model_name, obj_pk, new_obj_pk,
|
||||
user_pk, sub_obj_list, permission_check_func=None
|
||||
):
|
||||
logger.info('Deep copy {} from {} to {}.'.format(model_name, obj_pk, new_obj_pk))
|
||||
from awx.api.generics import CopyAPIView
|
||||
model = getattr(importlib.import_module(model_module), model_name, None)
|
||||
if model is None:
|
||||
return
|
||||
try:
|
||||
obj = model.objects.get(pk=obj_pk)
|
||||
new_obj = model.objects.get(pk=new_obj_pk)
|
||||
creater = User.objects.get(pk=user_pk)
|
||||
except ObjectDoesNotExist:
|
||||
logger.warning("Object or user no longer exists.")
|
||||
return
|
||||
with transaction.atomic():
|
||||
copy_mapping = {}
|
||||
for sub_obj_setup in sub_obj_list:
|
||||
sub_model = getattr(importlib.import_module(sub_obj_setup[0]),
|
||||
sub_obj_setup[1], None)
|
||||
if sub_model is None:
|
||||
continue
|
||||
try:
|
||||
sub_obj = sub_model.objects.get(pk=sub_obj_setup[2])
|
||||
except ObjectDoesNotExist:
|
||||
continue
|
||||
copy_mapping.update(CopyAPIView.copy_model_obj(
|
||||
obj, new_obj, sub_model, sub_obj, creater
|
||||
))
|
||||
_reconstruct_relationships(copy_mapping)
|
||||
if permission_check_func:
|
||||
permission_check_func = getattr(getattr(
|
||||
importlib.import_module(permission_check_func[0]), permission_check_func[1]
|
||||
), permission_check_func[2])
|
||||
permission_check_func(creater, copy_mapping.values())
|
||||
|
||||
|
||||
celery_app.register_task(RunJob())
|
||||
celery_app.register_task(RunProjectUpdate())
|
||||
celery_app.register_task(RunInventoryUpdate())
|
||||
|
||||
0
awx/main/templatetags/__init__.py
Normal file
0
awx/main/templatetags/__init__.py
Normal file
50
awx/main/templatetags/swagger.py
Normal file
50
awx/main/templatetags/swagger.py
Normal file
@ -0,0 +1,50 @@
|
||||
import re
|
||||
from django.utils.encoding import force_unicode
|
||||
from django import template
|
||||
|
||||
register = template.Library()
|
||||
|
||||
CONSONANT_SOUND = re.compile(r'''one(![ir])''', re.IGNORECASE|re.VERBOSE) # noqa
|
||||
VOWEL_SOUND = re.compile(r'''[aeio]|u([aeiou]|[^n][^aeiou]|ni[^dmnl]|nil[^l])|h(ier|onest|onou?r|ors\b|our(!i))|[fhlmnrsx]\b''', re.IGNORECASE|re.VERBOSE) # noqa
|
||||
|
||||
|
||||
@register.filter
|
||||
def anora(text):
|
||||
# https://pypi.python.org/pypi/anora
|
||||
# < 10 lines of BSD-3 code, not worth a dependency
|
||||
text = force_unicode(text)
|
||||
anora = 'an' if not CONSONANT_SOUND.match(text) and VOWEL_SOUND.match(text) else 'a'
|
||||
return anora + ' ' + text
|
||||
|
||||
|
||||
@register.tag(name='ifmeth')
|
||||
def ifmeth(parser, token):
|
||||
"""
|
||||
Used to mark template blocks for Swagger/OpenAPI output.
|
||||
If the specified method matches the *current* method in Swagger/OpenAPI
|
||||
generation, show the block. Otherwise, the block is omitted.
|
||||
|
||||
{% ifmeth GET %}
|
||||
Make a GET request to...
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth PUT PATCH %}
|
||||
Make a PUT or PATCH request to...
|
||||
{% endifmeth %}
|
||||
"""
|
||||
allowed_methods = [m.upper() for m in token.split_contents()[1:]]
|
||||
nodelist = parser.parse(('endifmeth',))
|
||||
parser.delete_first_token()
|
||||
return MethodFilterNode(allowed_methods, nodelist)
|
||||
|
||||
|
||||
class MethodFilterNode(template.Node):
|
||||
def __init__(self, allowed_methods, nodelist):
|
||||
self.allowed_methods = allowed_methods
|
||||
self.nodelist = nodelist
|
||||
|
||||
def render(self, context):
|
||||
swagger_method = context.get('swagger_method')
|
||||
if not swagger_method or swagger_method.upper() in self.allowed_methods:
|
||||
return self.nodelist.render(context)
|
||||
return ''
|
||||
0
awx/main/tests/docs/__init__.py
Normal file
0
awx/main/tests/docs/__init__.py
Normal file
13
awx/main/tests/docs/conftest.py
Normal file
13
awx/main/tests/docs/conftest.py
Normal file
@ -0,0 +1,13 @@
|
||||
from awx.main.tests.functional.conftest import * # noqa
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--release", action="store", help="a release version number, e.g., 3.3.0")
|
||||
|
||||
|
||||
def pytest_generate_tests(metafunc):
|
||||
# This is called for every test. Only get/set command line arguments
|
||||
# if the argument is specified in the list of test "fixturenames".
|
||||
option_value = metafunc.config.option.release
|
||||
if 'release' in metafunc.fixturenames and option_value is not None:
|
||||
metafunc.parametrize("release", [option_value])
|
||||
171
awx/main/tests/docs/test_swagger_generation.py
Normal file
171
awx/main/tests/docs/test_swagger_generation.py
Normal file
@ -0,0 +1,171 @@
|
||||
import datetime
|
||||
import json
|
||||
import re
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.utils.functional import Promise
|
||||
from django.utils.encoding import force_text
|
||||
|
||||
from coreapi.compat import force_bytes
|
||||
from openapi_codec.encode import generate_swagger_object
|
||||
import pytest
|
||||
|
||||
from awx.api.versioning import drf_reverse
|
||||
|
||||
|
||||
class i18nEncoder(DjangoJSONEncoder):
|
||||
def default(self, obj):
|
||||
if isinstance(obj, Promise):
|
||||
return force_text(obj)
|
||||
return super(i18nEncoder, self).default(obj)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestSwaggerGeneration():
|
||||
"""
|
||||
This class is used to generate a Swagger/OpenAPI document for the awx
|
||||
API. A _prepare fixture generates a JSON blob containing OpenAPI data,
|
||||
individual tests have the ability modify the payload.
|
||||
|
||||
Finally, the JSON content is written to a file, `swagger.json`, in the
|
||||
current working directory.
|
||||
|
||||
$ py.test test_swagger_generation.py --version 3.3.0
|
||||
|
||||
To customize the `info.description` in the generated OpenAPI document,
|
||||
modify the text in `awx.api.templates.swagger.description.md`
|
||||
"""
|
||||
JSON = {}
|
||||
|
||||
@pytest.fixture(autouse=True, scope='function')
|
||||
def _prepare(self, get, admin):
|
||||
if not self.__class__.JSON:
|
||||
url = drf_reverse('api:swagger_view') + '?format=openapi'
|
||||
response = get(url, user=admin)
|
||||
data = generate_swagger_object(response.data)
|
||||
if response.has_header('X-Deprecated-Paths'):
|
||||
data['deprecated_paths'] = json.loads(response['X-Deprecated-Paths'])
|
||||
data.update(response.accepted_renderer.get_customizations() or {})
|
||||
|
||||
data['host'] = None
|
||||
data['modified'] = datetime.datetime.utcnow().isoformat()
|
||||
data['schemes'] = ['https']
|
||||
data['consumes'] = ['application/json']
|
||||
|
||||
revised_paths = {}
|
||||
deprecated_paths = data.pop('deprecated_paths', [])
|
||||
for path, node in data['paths'].items():
|
||||
# change {version} in paths to the actual default API version (e.g., v2)
|
||||
revised_paths[path.replace(
|
||||
'{version}',
|
||||
settings.REST_FRAMEWORK['DEFAULT_VERSION']
|
||||
)] = node
|
||||
for method in node:
|
||||
if path in deprecated_paths:
|
||||
node[method]['deprecated'] = True
|
||||
if 'description' in node[method]:
|
||||
# Pop off the first line and use that as the summary
|
||||
lines = node[method]['description'].splitlines()
|
||||
node[method]['summary'] = lines.pop(0).strip('#:')
|
||||
node[method]['description'] = '\n'.join(lines)
|
||||
|
||||
# remove the required `version` parameter
|
||||
for param in node[method].get('parameters'):
|
||||
if param['in'] == 'path' and param['name'] == 'version':
|
||||
node[method]['parameters'].remove(param)
|
||||
data['paths'] = revised_paths
|
||||
self.__class__.JSON = data
|
||||
|
||||
def test_sanity(self, release):
|
||||
JSON = self.__class__.JSON
|
||||
JSON['info']['version'] = release
|
||||
|
||||
# Make some basic assertions about the rendered JSON so we can
|
||||
# be sure it doesn't break across DRF upgrades and view/serializer
|
||||
# changes.
|
||||
assert len(JSON['paths'])
|
||||
|
||||
# The number of API endpoints changes over time, but let's just check
|
||||
# for a reasonable number here; if this test starts failing, raise/lower the bounds
|
||||
paths = JSON['paths']
|
||||
assert 250 < len(paths) < 300
|
||||
assert paths['/api/'].keys() == ['get']
|
||||
assert paths['/api/v2/'].keys() == ['get']
|
||||
assert sorted(
|
||||
paths['/api/v2/credentials/'].keys()
|
||||
) == ['get', 'post']
|
||||
assert sorted(
|
||||
paths['/api/v2/credentials/{id}/'].keys()
|
||||
) == ['delete', 'get', 'patch', 'put']
|
||||
assert paths['/api/v2/settings/'].keys() == ['get']
|
||||
assert paths['/api/v2/settings/{category_slug}/'].keys() == [
|
||||
'get', 'put', 'patch', 'delete'
|
||||
]
|
||||
|
||||
# Test deprecated paths
|
||||
assert paths['/api/v2/jobs/{id}/extra_credentials/']['get']['deprecated'] is True
|
||||
|
||||
@pytest.mark.parametrize('path', [
|
||||
'/api/',
|
||||
'/api/v2/',
|
||||
'/api/v2/ping/',
|
||||
'/api/v2/config/',
|
||||
])
|
||||
def test_basic_paths(self, path, get, admin):
|
||||
# hit a couple important endpoints so we always have example data
|
||||
get(path, user=admin, expect=200)
|
||||
|
||||
def test_autogen_response_examples(self, swagger_autogen):
|
||||
for pattern, node in TestSwaggerGeneration.JSON['paths'].items():
|
||||
pattern = pattern.replace('{id}', '[0-9]+')
|
||||
pattern = pattern.replace('{category_slug}', '[a-zA-Z0-9\-]+')
|
||||
for path, result in swagger_autogen.items():
|
||||
if re.match('^{}$'.format(pattern), path):
|
||||
for key, value in result.items():
|
||||
method, status_code = key
|
||||
content_type, resp, request_data = value
|
||||
if method in node:
|
||||
status_code = str(status_code)
|
||||
if content_type:
|
||||
produces = node[method].setdefault('produces', [])
|
||||
if content_type not in produces:
|
||||
produces.append(content_type)
|
||||
if request_data and status_code.startswith('2'):
|
||||
# DRF builds a schema based on the serializer
|
||||
# fields. This is _pretty good_, but if we
|
||||
# have _actual_ JSON examples, those are even
|
||||
# better and we should use them instead
|
||||
for param in node[method].get('parameters'):
|
||||
if param['in'] == 'body':
|
||||
node[method]['parameters'].remove(param)
|
||||
node[method].setdefault('parameters', []).append({
|
||||
'name': 'data',
|
||||
'in': 'body',
|
||||
'schema': {'example': request_data},
|
||||
})
|
||||
|
||||
# Build response examples
|
||||
if resp:
|
||||
if content_type.startswith('text/html'):
|
||||
continue
|
||||
if content_type == 'application/json':
|
||||
resp = json.loads(resp)
|
||||
node[method]['responses'].setdefault(status_code, {}).setdefault(
|
||||
'examples', {}
|
||||
)[content_type] = resp
|
||||
|
||||
@classmethod
|
||||
def teardown_class(cls):
|
||||
with open('swagger.json', 'w') as f:
|
||||
data = force_bytes(
|
||||
json.dumps(cls.JSON, cls=i18nEncoder, indent=2)
|
||||
)
|
||||
# replace ISO dates w/ the same value so we don't generate
|
||||
# needless diffs
|
||||
data = re.sub(
|
||||
'[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]+Z',
|
||||
'2018-02-01T08:00:00.000000Z',
|
||||
data
|
||||
)
|
||||
f.write(data)
|
||||
@ -35,8 +35,9 @@ def mk_instance(persisted=True, hostname='instance.example.org'):
|
||||
return Instance.objects.get_or_create(uuid=settings.SYSTEM_UUID, hostname=hostname)[0]
|
||||
|
||||
|
||||
def mk_instance_group(name='tower', instance=None):
|
||||
ig, status = InstanceGroup.objects.get_or_create(name=name)
|
||||
def mk_instance_group(name='tower', instance=None, minimum=0, percentage=0):
|
||||
ig, status = InstanceGroup.objects.get_or_create(name=name, policy_instance_minimum=minimum,
|
||||
policy_instance_percentage=percentage)
|
||||
if instance is not None:
|
||||
if type(instance) == list:
|
||||
for i in instance:
|
||||
|
||||
@ -135,8 +135,8 @@ def create_instance(name, instance_groups=None):
|
||||
return mk_instance(hostname=name)
|
||||
|
||||
|
||||
def create_instance_group(name, instances=None):
|
||||
return mk_instance_group(name=name, instance=instances)
|
||||
def create_instance_group(name, instances=None, minimum=0, percentage=0):
|
||||
return mk_instance_group(name=name, instance=instances, minimum=minimum, percentage=percentage)
|
||||
|
||||
|
||||
def create_survey_spec(variables=None, default_type='integer', required=True, min=None, max=None):
|
||||
|
||||
@ -27,6 +27,12 @@ def test_non_job_extra_vars_prohibited(post, project, admin_user):
|
||||
assert 'not allowed on launch' in str(r.data['extra_data'][0])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_wfjt_schedule_accepted(post, workflow_job_template, admin_user):
|
||||
url = reverse('api:workflow_job_template_schedules_list', kwargs={'pk': workflow_job_template.id})
|
||||
post(url, {'name': 'test sch', 'rrule': RRULE_EXAMPLE}, admin_user, expect=201)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_valid_survey_answer(post, admin_user, project, inventory, survey_spec_factory):
|
||||
job_template = JobTemplate.objects.create(
|
||||
@ -60,6 +66,7 @@ def test_valid_survey_answer(post, admin_user, project, inventory, survey_spec_f
|
||||
("DTSTART:20300308T050000Z RRULE:FREQ=YEARLY;INTERVAL=1;BYWEEKNO=20", "BYWEEKNO not supported"),
|
||||
("DTSTART:20300308T050000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=2000", "COUNT > 999 is unsupported"), # noqa
|
||||
("DTSTART:20300308T050000Z RRULE:FREQ=REGULARLY;INTERVAL=1", "rrule parsing failed validation: invalid 'FREQ': REGULARLY"), # noqa
|
||||
("DTSTART:20030925T104941Z RRULE:FREQ=DAILY;INTERVAL=10;COUNT=500;UNTIL=20040925T104941Z", "RRULE may not contain both COUNT and UNTIL"), # noqa
|
||||
("DTSTART;TZID=America/New_York:20300308T050000Z RRULE:FREQ=DAILY;INTERVAL=1", "rrule parsing failed validation"),
|
||||
("DTSTART:20300308T050000 RRULE:FREQ=DAILY;INTERVAL=1", "DTSTART cannot be a naive datetime"),
|
||||
("DTSTART:19700101T000000Z RRULE:FREQ=MINUTELY;INTERVAL=1", "more than 1000 events are not allowed"), # noqa
|
||||
@ -274,3 +281,10 @@ def test_dst_rollback_duplicates(post, admin_user):
|
||||
'2030-11-03 02:30:00-05:00',
|
||||
'2030-11-03 03:30:00-05:00',
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_zoneinfo(get, admin_user):
|
||||
url = reverse('api:schedule_zoneinfo')
|
||||
r = get(url, admin_user, expect=200)
|
||||
assert {'name': 'America/New_York'} in r.data
|
||||
|
||||
@ -158,6 +158,24 @@ def test_text_stdout_from_system_job_events(sqlite_copy_expert, get, admin):
|
||||
assert response.data['result_stdout'].splitlines() == ['Testing %d' % i for i in range(3)]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_text_stdout_with_max_stdout(sqlite_copy_expert, get, admin):
|
||||
job = SystemJob()
|
||||
job.save()
|
||||
total_bytes = settings.STDOUT_MAX_BYTES_DISPLAY + 1
|
||||
large_stdout = 'X' * total_bytes
|
||||
SystemJobEvent(system_job=job, stdout=large_stdout, start_line=0).save()
|
||||
url = reverse('api:system_job_detail', kwargs={'pk': job.pk})
|
||||
response = get(url, user=admin, expect=200)
|
||||
assert response.data['result_stdout'] == (
|
||||
'Standard Output too large to display ({actual} bytes), only download '
|
||||
'supported for sizes over {max} bytes'.format(
|
||||
actual=total_bytes,
|
||||
max=settings.STDOUT_MAX_BYTES_DISPLAY
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('Parent, Child, relation, view', [
|
||||
[Job, JobEvent, 'job', 'api:job_stdout'],
|
||||
|
||||
@ -1,4 +1,3 @@
|
||||
|
||||
# Python
|
||||
import pytest
|
||||
import mock
|
||||
@ -6,6 +5,7 @@ import json
|
||||
import os
|
||||
import six
|
||||
from datetime import timedelta
|
||||
from six.moves import xrange
|
||||
|
||||
# Django
|
||||
from django.core.urlresolvers import resolve
|
||||
@ -33,7 +33,8 @@ from awx.main.models.inventory import (
|
||||
Group,
|
||||
Inventory,
|
||||
InventoryUpdate,
|
||||
InventorySource
|
||||
InventorySource,
|
||||
CustomInventoryScript
|
||||
)
|
||||
from awx.main.models.organization import (
|
||||
Organization,
|
||||
@ -47,6 +48,13 @@ from awx.main.models.notifications import (
|
||||
from awx.main.models.workflow import WorkflowJobTemplate
|
||||
from awx.main.models.ad_hoc_commands import AdHocCommand
|
||||
|
||||
__SWAGGER_REQUESTS__ = {}
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def swagger_autogen(requests=__SWAGGER_REQUESTS__):
|
||||
return requests
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def clear_cache():
|
||||
@ -490,6 +498,13 @@ def inventory_update(inventory_source):
|
||||
return InventoryUpdate.objects.create(inventory_source=inventory_source)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def inventory_script(organization):
|
||||
return CustomInventoryScript.objects.create(name='test inv script',
|
||||
organization=organization,
|
||||
script='#!/usr/bin/python')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def host(group, inventory):
|
||||
return group.hosts.create(name='single-host', inventory=inventory)
|
||||
@ -547,6 +562,9 @@ def _request(verb):
|
||||
assert response.status_code == expect
|
||||
if hasattr(response, 'render'):
|
||||
response.render()
|
||||
__SWAGGER_REQUESTS__.setdefault(request.path, {})[
|
||||
(request.method.lower(), response.status_code)
|
||||
] = (response.get('Content-Type', None), response.content, kwargs.get('data'))
|
||||
return response
|
||||
return rf
|
||||
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
import pytest
|
||||
|
||||
from datetime import timedelta
|
||||
from six.moves import xrange
|
||||
|
||||
from django.utils import timezone
|
||||
|
||||
from awx.main.models import Fact
|
||||
@ -19,7 +21,7 @@ def setup_common(hosts, fact_scans, ts_from=None, ts_to=None, epoch=timezone.now
|
||||
continue
|
||||
facts_known.append(f)
|
||||
fact_objs = Fact.get_timeline(hosts[0].id, module=module_name, ts_from=ts_from, ts_to=ts_to)
|
||||
return (facts_known, fact_objs)
|
||||
return (facts_known, fact_objs)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@ -27,7 +29,7 @@ def test_all(hosts, fact_scans, monkeypatch_jsonbfield_get_db_prep_save):
|
||||
epoch = timezone.now()
|
||||
ts_from = epoch - timedelta(days=1)
|
||||
ts_to = epoch + timedelta(days=10)
|
||||
|
||||
|
||||
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, module_name=None, epoch=epoch)
|
||||
assert 9 == len(facts_known)
|
||||
assert 9 == len(fact_objs)
|
||||
@ -53,7 +55,7 @@ def test_empty_db(hosts, fact_scans, monkeypatch_jsonbfield_get_db_prep_save):
|
||||
epoch = timezone.now()
|
||||
ts_from = epoch - timedelta(days=1)
|
||||
ts_to = epoch + timedelta(days=10)
|
||||
|
||||
|
||||
fact_objs = Fact.get_timeline(hosts[0].id, 'ansible', ts_from, ts_to)
|
||||
|
||||
assert 0 == len(fact_objs)
|
||||
@ -64,7 +66,7 @@ def test_no_results(hosts, fact_scans, monkeypatch_jsonbfield_get_db_prep_save):
|
||||
epoch = timezone.now()
|
||||
ts_from = epoch - timedelta(days=100)
|
||||
ts_to = epoch - timedelta(days=50)
|
||||
|
||||
|
||||
(facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, epoch=epoch)
|
||||
assert 0 == len(fact_objs)
|
||||
|
||||
|
||||
@ -146,15 +146,16 @@ def test_tzinfo_naive_until(job_template, dtstart, until):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_mismatched_until_timezone(job_template):
|
||||
rrule = 'DTSTART;TZID=America/New_York:20180601T120000 RRULE:FREQ=DAILY;INTERVAL=1;UNTIL=20180602T000000' + 'Z' # noqa the Z isn't allowed, because we have a TZID=America/New_York
|
||||
def test_until_must_be_utc(job_template):
|
||||
rrule = 'DTSTART;TZID=America/New_York:20180601T120000 RRULE:FREQ=DAILY;INTERVAL=1;UNTIL=20180602T000000' # noqa the Z is required
|
||||
s = Schedule(
|
||||
name='Some Schedule',
|
||||
rrule=rrule,
|
||||
unified_job_template=job_template
|
||||
)
|
||||
with pytest.raises(ValueError):
|
||||
with pytest.raises(ValueError) as e:
|
||||
s.save()
|
||||
assert 'RRULE UNTIL values must be specified in UTC' in str(e)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@ -171,7 +172,7 @@ def test_utc_until_in_the_past(job_template):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch('awx.main.models.schedules.now', lambda: datetime(2030, 03, 05, tzinfo=pytz.utc))
|
||||
@mock.patch('awx.main.models.schedules.now', lambda: datetime(2030, 3, 5, tzinfo=pytz.utc))
|
||||
def test_dst_phantom_hour(job_template):
|
||||
# The DST period in the United States begins at 02:00 (2 am) local time, so
|
||||
# the hour from 2:00:00 to 2:59:59 does not exist in the night of the
|
||||
|
||||
@ -191,25 +191,6 @@ class TestWorkflowJobTemplate:
|
||||
assert (test_view.is_valid_relation(nodes[2], node_assoc_1) ==
|
||||
{'Error': 'Cannot associate failure_nodes when always_nodes have been associated.'})
|
||||
|
||||
def test_wfjt_copy(self, wfjt, job_template, inventory, admin_user):
|
||||
old_nodes = wfjt.workflow_job_template_nodes.all()
|
||||
node1 = old_nodes[1]
|
||||
node1.unified_job_template = job_template
|
||||
node1.save()
|
||||
node2 = old_nodes[2]
|
||||
node2.inventory = inventory
|
||||
node2.save()
|
||||
new_wfjt = wfjt.user_copy(admin_user)
|
||||
for fd in ['description', 'survey_spec', 'survey_enabled', 'extra_vars']:
|
||||
assert getattr(wfjt, fd) == getattr(new_wfjt, fd)
|
||||
assert new_wfjt.organization == wfjt.organization
|
||||
assert len(new_wfjt.workflow_job_template_nodes.all()) == 3
|
||||
nodes = new_wfjt.workflow_job_template_nodes.all()
|
||||
assert nodes[0].success_nodes.all()[0] == nodes[1]
|
||||
assert nodes[1].failure_nodes.all()[0] == nodes[2]
|
||||
assert nodes[1].unified_job_template == job_template
|
||||
assert nodes[2].inventory == inventory
|
||||
|
||||
def test_wfjt_unique_together_with_org(self, organization):
|
||||
wfjt1 = WorkflowJobTemplate(name='foo', organization=organization)
|
||||
wfjt1.save()
|
||||
|
||||
@ -2,6 +2,8 @@ import pytest
|
||||
import mock
|
||||
from datetime import timedelta
|
||||
from awx.main.scheduler import TaskManager
|
||||
from awx.main.models import InstanceGroup
|
||||
from awx.main.tasks import apply_cluster_membership_policies
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@ -151,3 +153,34 @@ def test_failover_group_run(instance_factory, default_instance_group, mocker,
|
||||
tm.schedule()
|
||||
mock_job.assert_has_calls([mock.call(j1, ig1, []), mock.call(j1_1, ig2, [])])
|
||||
assert mock_job.call_count == 2
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_instance_group_basic_policies(instance_factory, instance_group_factory):
|
||||
i0 = instance_factory("i0")
|
||||
i1 = instance_factory("i1")
|
||||
i2 = instance_factory("i2")
|
||||
i3 = instance_factory("i3")
|
||||
i4 = instance_factory("i4")
|
||||
ig0 = instance_group_factory("ig0")
|
||||
ig1 = instance_group_factory("ig1", minimum=2)
|
||||
ig2 = instance_group_factory("ig2", percentage=50)
|
||||
ig3 = instance_group_factory("ig3", percentage=50)
|
||||
ig0.policy_instance_list.append(i0.hostname)
|
||||
ig0.save()
|
||||
apply_cluster_membership_policies()
|
||||
ig0 = InstanceGroup.objects.get(id=ig0.id)
|
||||
ig1 = InstanceGroup.objects.get(id=ig1.id)
|
||||
ig2 = InstanceGroup.objects.get(id=ig2.id)
|
||||
ig3 = InstanceGroup.objects.get(id=ig3.id)
|
||||
assert len(ig0.instances.all()) == 1
|
||||
assert i0 in ig0.instances.all()
|
||||
assert len(InstanceGroup.objects.get(id=ig1.id).instances.all()) == 2
|
||||
assert i1 in ig1.instances.all()
|
||||
assert i2 in ig1.instances.all()
|
||||
assert len(InstanceGroup.objects.get(id=ig2.id).instances.all()) == 2
|
||||
assert i3 in ig2.instances.all()
|
||||
assert i4 in ig2.instances.all()
|
||||
assert len(InstanceGroup.objects.get(id=ig3.id).instances.all()) == 2
|
||||
assert i1 in ig3.instances.all()
|
||||
assert i2 in ig3.instances.all()
|
||||
|
||||
214
awx/main/tests/functional/test_copy.py
Normal file
214
awx/main/tests/functional/test_copy.py
Normal file
@ -0,0 +1,214 @@
|
||||
import pytest
|
||||
import mock
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.utils import decrypt_field
|
||||
from awx.main.models.workflow import WorkflowJobTemplateNode
|
||||
from awx.main.models.jobs import JobTemplate
|
||||
from awx.main.tasks import deep_copy_model_obj
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_job_template_copy(post, get, project, inventory, machine_credential, vault_credential,
|
||||
credential, alice, job_template_with_survey_passwords, admin):
|
||||
job_template_with_survey_passwords.project = project
|
||||
job_template_with_survey_passwords.inventory = inventory
|
||||
job_template_with_survey_passwords.save()
|
||||
job_template_with_survey_passwords.credentials.add(credential)
|
||||
job_template_with_survey_passwords.credentials.add(machine_credential)
|
||||
job_template_with_survey_passwords.credentials.add(vault_credential)
|
||||
job_template_with_survey_passwords.admin_role.members.add(alice)
|
||||
assert get(
|
||||
reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}),
|
||||
alice, expect=200
|
||||
).data['can_copy'] is False
|
||||
assert get(
|
||||
reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}),
|
||||
admin, expect=200
|
||||
).data['can_copy'] is True
|
||||
jt_copy_pk = post(
|
||||
reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}),
|
||||
{'name': 'new jt name'}, admin, expect=201
|
||||
).data['id']
|
||||
jt_copy = type(job_template_with_survey_passwords).objects.get(pk=jt_copy_pk)
|
||||
assert jt_copy.created_by == admin
|
||||
assert jt_copy.name == 'new jt name'
|
||||
assert jt_copy.project == project
|
||||
assert jt_copy.inventory == inventory
|
||||
assert jt_copy.playbook == job_template_with_survey_passwords.playbook
|
||||
assert jt_copy.credentials.count() == 3
|
||||
assert credential in jt_copy.credentials.all()
|
||||
assert vault_credential in jt_copy.credentials.all()
|
||||
assert machine_credential in jt_copy.credentials.all()
|
||||
assert job_template_with_survey_passwords.survey_spec == jt_copy.survey_spec
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_project_copy(post, get, project, organization, scm_credential, alice):
|
||||
project.credential = scm_credential
|
||||
project.save()
|
||||
project.admin_role.members.add(alice)
|
||||
assert get(
|
||||
reverse('api:project_copy', kwargs={'pk': project.pk}), alice, expect=200
|
||||
).data['can_copy'] is False
|
||||
project.organization.admin_role.members.add(alice)
|
||||
assert get(
|
||||
reverse('api:project_copy', kwargs={'pk': project.pk}), alice, expect=200
|
||||
).data['can_copy'] is True
|
||||
project_copy_pk = post(
|
||||
reverse('api:project_copy', kwargs={'pk': project.pk}),
|
||||
{'name': 'copied project'}, alice, expect=201
|
||||
).data['id']
|
||||
project_copy = type(project).objects.get(pk=project_copy_pk)
|
||||
assert project_copy.created_by == alice
|
||||
assert project_copy.name == 'copied project'
|
||||
assert project_copy.organization == organization
|
||||
assert project_copy.credential == scm_credential
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_inventory_copy(inventory, group_factory, post, get, alice, organization):
|
||||
group_1_1 = group_factory('g_1_1')
|
||||
group_2_1 = group_factory('g_2_1')
|
||||
group_2_2 = group_factory('g_2_2')
|
||||
group_2_1.parents.add(group_1_1)
|
||||
group_2_2.parents.add(group_1_1)
|
||||
group_2_2.parents.add(group_2_1)
|
||||
host = group_1_1.hosts.create(name='host', inventory=inventory)
|
||||
group_2_1.hosts.add(host)
|
||||
inventory.admin_role.members.add(alice)
|
||||
assert get(
|
||||
reverse('api:inventory_copy', kwargs={'pk': inventory.pk}), alice, expect=200
|
||||
).data['can_copy'] is False
|
||||
inventory.organization.admin_role.members.add(alice)
|
||||
assert get(
|
||||
reverse('api:inventory_copy', kwargs={'pk': inventory.pk}), alice, expect=200
|
||||
).data['can_copy'] is True
|
||||
with mock.patch('awx.api.generics.trigger_delayed_deep_copy') as deep_copy_mock:
|
||||
inv_copy_pk = post(
|
||||
reverse('api:inventory_copy', kwargs={'pk': inventory.pk}),
|
||||
{'name': 'new inv name'}, alice, expect=201
|
||||
).data['id']
|
||||
inventory_copy = type(inventory).objects.get(pk=inv_copy_pk)
|
||||
args, kwargs = deep_copy_mock.call_args
|
||||
deep_copy_model_obj(*args, **kwargs)
|
||||
group_1_1_copy = inventory_copy.groups.get(name='g_1_1')
|
||||
group_2_1_copy = inventory_copy.groups.get(name='g_2_1')
|
||||
group_2_2_copy = inventory_copy.groups.get(name='g_2_2')
|
||||
host_copy = inventory_copy.hosts.get(name='host')
|
||||
assert inventory_copy.organization == organization
|
||||
assert inventory_copy.created_by == alice
|
||||
assert inventory_copy.name == 'new inv name'
|
||||
assert set(group_1_1_copy.parents.all()) == set()
|
||||
assert set(group_2_1_copy.parents.all()) == set([group_1_1_copy])
|
||||
assert set(group_2_2_copy.parents.all()) == set([group_1_1_copy, group_2_1_copy])
|
||||
assert set(group_1_1_copy.hosts.all()) == set([host_copy])
|
||||
assert set(group_2_1_copy.hosts.all()) == set([host_copy])
|
||||
assert set(group_2_2_copy.hosts.all()) == set()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_workflow_job_template_copy(workflow_job_template, post, get, admin, organization):
|
||||
workflow_job_template.organization = organization
|
||||
workflow_job_template.save()
|
||||
jts = [JobTemplate.objects.create(name='test-jt-{}'.format(i)) for i in range(0, 5)]
|
||||
nodes = [
|
||||
WorkflowJobTemplateNode.objects.create(
|
||||
workflow_job_template=workflow_job_template, unified_job_template=jts[i]
|
||||
) for i in range(0, 5)
|
||||
]
|
||||
nodes[0].success_nodes.add(nodes[1])
|
||||
nodes[1].success_nodes.add(nodes[2])
|
||||
nodes[0].failure_nodes.add(nodes[3])
|
||||
nodes[3].failure_nodes.add(nodes[4])
|
||||
with mock.patch('awx.api.generics.trigger_delayed_deep_copy') as deep_copy_mock:
|
||||
wfjt_copy_id = post(
|
||||
reverse('api:workflow_job_template_copy', kwargs={'pk': workflow_job_template.pk}),
|
||||
{'name': 'new wfjt name'}, admin, expect=201
|
||||
).data['id']
|
||||
wfjt_copy = type(workflow_job_template).objects.get(pk=wfjt_copy_id)
|
||||
args, kwargs = deep_copy_mock.call_args
|
||||
deep_copy_model_obj(*args, **kwargs)
|
||||
assert wfjt_copy.organization == organization
|
||||
assert wfjt_copy.created_by == admin
|
||||
assert wfjt_copy.name == 'new wfjt name'
|
||||
copied_node_list = [x for x in wfjt_copy.workflow_job_template_nodes.all()]
|
||||
copied_node_list.sort(key=lambda x: int(x.unified_job_template.name[-1]))
|
||||
for node, success_count, failure_count, always_count in zip(
|
||||
copied_node_list,
|
||||
[1, 1, 0, 0, 0],
|
||||
[1, 0, 0, 1, 0],
|
||||
[0, 0, 0, 0, 0]
|
||||
):
|
||||
assert node.success_nodes.count() == success_count
|
||||
assert node.failure_nodes.count() == failure_count
|
||||
assert node.always_nodes.count() == always_count
|
||||
assert copied_node_list[1] in copied_node_list[0].success_nodes.all()
|
||||
assert copied_node_list[2] in copied_node_list[1].success_nodes.all()
|
||||
assert copied_node_list[3] in copied_node_list[0].failure_nodes.all()
|
||||
assert copied_node_list[4] in copied_node_list[3].failure_nodes.all()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_credential_copy(post, get, machine_credential, credentialtype_ssh, admin):
|
||||
assert get(
|
||||
reverse('api:credential_copy', kwargs={'pk': machine_credential.pk}), admin, expect=200
|
||||
).data['can_copy'] is True
|
||||
credential_copy_pk = post(
|
||||
reverse('api:credential_copy', kwargs={'pk': machine_credential.pk}),
|
||||
{'name': 'copied credential'}, admin, expect=201
|
||||
).data['id']
|
||||
credential_copy = type(machine_credential).objects.get(pk=credential_copy_pk)
|
||||
assert credential_copy.created_by == admin
|
||||
assert credential_copy.name == 'copied credential'
|
||||
assert credential_copy.credential_type == credentialtype_ssh
|
||||
assert credential_copy.inputs['username'] == machine_credential.inputs['username']
|
||||
assert (decrypt_field(credential_copy, 'password') ==
|
||||
decrypt_field(machine_credential, 'password'))
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_notification_template_copy(post, get, notification_template_with_encrypt,
|
||||
organization, alice):
|
||||
#notification_template_with_encrypt.admin_role.members.add(alice)
|
||||
assert get(
|
||||
reverse(
|
||||
'api:notification_template_copy', kwargs={'pk': notification_template_with_encrypt.pk}
|
||||
), alice, expect=200
|
||||
).data['can_copy'] is False
|
||||
notification_template_with_encrypt.organization.admin_role.members.add(alice)
|
||||
assert get(
|
||||
reverse(
|
||||
'api:notification_template_copy', kwargs={'pk': notification_template_with_encrypt.pk}
|
||||
), alice, expect=200
|
||||
).data['can_copy'] is True
|
||||
nt_copy_pk = post(
|
||||
reverse(
|
||||
'api:notification_template_copy', kwargs={'pk': notification_template_with_encrypt.pk}
|
||||
), {'name': 'copied nt'}, alice, expect=201
|
||||
).data['id']
|
||||
notification_template_copy = type(notification_template_with_encrypt).objects.get(pk=nt_copy_pk)
|
||||
assert notification_template_copy.created_by == alice
|
||||
assert notification_template_copy.name == 'copied nt'
|
||||
assert notification_template_copy.organization == organization
|
||||
assert (decrypt_field(notification_template_with_encrypt, 'notification_configuration', 'token') ==
|
||||
decrypt_field(notification_template_copy, 'notification_configuration', 'token'))
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_inventory_script_copy(post, get, inventory_script, organization, alice):
|
||||
assert get(
|
||||
reverse('api:inventory_script_copy', kwargs={'pk': inventory_script.pk}), alice, expect=200
|
||||
).data['can_copy'] is False
|
||||
inventory_script.organization.admin_role.members.add(alice)
|
||||
assert get(
|
||||
reverse('api:inventory_script_copy', kwargs={'pk': inventory_script.pk}), alice, expect=200
|
||||
).data['can_copy'] is True
|
||||
is_copy_pk = post(
|
||||
reverse('api:inventory_script_copy', kwargs={'pk': inventory_script.pk}),
|
||||
{'name': 'copied inv script'}, alice, expect=201
|
||||
).data['id']
|
||||
inventory_script_copy = type(inventory_script).objects.get(pk=is_copy_pk)
|
||||
assert inventory_script_copy.created_by == alice
|
||||
assert inventory_script_copy.name == 'copied inv script'
|
||||
assert inventory_script_copy.organization == organization
|
||||
@ -107,8 +107,11 @@ def test_cred_type_input_schema_validity(input_, valid):
|
||||
({}, True),
|
||||
({'invalid-injector': {}}, False),
|
||||
({'file': 123}, False),
|
||||
({'file': {}}, False),
|
||||
({'file': {}}, True),
|
||||
({'file': {'template': '{{username}}'}}, True),
|
||||
({'file': {'template.username': '{{username}}'}}, True),
|
||||
({'file': {'template.username': '{{username}}', 'template.password': '{{pass}}'}}, True),
|
||||
({'file': {'template': '{{username}}', 'template.password': '{{pass}}'}}, False),
|
||||
({'file': {'foo': 'bar'}}, False),
|
||||
({'env': 123}, False),
|
||||
({'env': {}}, True),
|
||||
|
||||
@ -1,7 +1,8 @@
|
||||
import pytest
|
||||
import mock
|
||||
|
||||
from awx.main.models import AdHocCommand, InventoryUpdate, Job, JobTemplate, ProjectUpdate
|
||||
from awx.main.models import Instance
|
||||
from awx.main.models import AdHocCommand, InventoryUpdate, Job, JobTemplate, ProjectUpdate, Instance
|
||||
from awx.main.tasks import apply_cluster_membership_policies
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
|
||||
@ -30,6 +31,130 @@ def test_instance_dup(org_admin, organization, project, instance_factory, instan
|
||||
assert api_num_instances_oa == (actual_num_instances - 1)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch('awx.main.tasks.handle_ha_toplogy_changes', return_value=None)
|
||||
def test_policy_instance_few_instances(mock, instance_factory, instance_group_factory):
|
||||
i1 = instance_factory("i1")
|
||||
ig_1 = instance_group_factory("ig1", percentage=25)
|
||||
ig_2 = instance_group_factory("ig2", percentage=25)
|
||||
ig_3 = instance_group_factory("ig3", percentage=25)
|
||||
ig_4 = instance_group_factory("ig4", percentage=25)
|
||||
apply_cluster_membership_policies()
|
||||
assert len(ig_1.instances.all()) == 1
|
||||
assert i1 in ig_1.instances.all()
|
||||
assert len(ig_2.instances.all()) == 1
|
||||
assert i1 in ig_2.instances.all()
|
||||
assert len(ig_3.instances.all()) == 1
|
||||
assert i1 in ig_3.instances.all()
|
||||
assert len(ig_4.instances.all()) == 1
|
||||
assert i1 in ig_4.instances.all()
|
||||
i2 = instance_factory("i2")
|
||||
apply_cluster_membership_policies()
|
||||
assert len(ig_1.instances.all()) == 1
|
||||
assert i1 in ig_1.instances.all()
|
||||
assert len(ig_2.instances.all()) == 1
|
||||
assert i2 in ig_2.instances.all()
|
||||
assert len(ig_3.instances.all()) == 1
|
||||
assert i1 in ig_3.instances.all()
|
||||
assert len(ig_4.instances.all()) == 1
|
||||
assert i2 in ig_4.instances.all()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch('awx.main.tasks.handle_ha_toplogy_changes', return_value=None)
|
||||
def test_policy_instance_distribution_uneven(mock, instance_factory, instance_group_factory):
|
||||
i1 = instance_factory("i1")
|
||||
i2 = instance_factory("i2")
|
||||
i3 = instance_factory("i3")
|
||||
ig_1 = instance_group_factory("ig1", percentage=25)
|
||||
ig_2 = instance_group_factory("ig2", percentage=25)
|
||||
ig_3 = instance_group_factory("ig3", percentage=25)
|
||||
ig_4 = instance_group_factory("ig4", percentage=25)
|
||||
apply_cluster_membership_policies()
|
||||
assert len(ig_1.instances.all()) == 1
|
||||
assert i1 in ig_1.instances.all()
|
||||
assert len(ig_2.instances.all()) == 1
|
||||
assert i2 in ig_2.instances.all()
|
||||
assert len(ig_3.instances.all()) == 1
|
||||
assert i3 in ig_3.instances.all()
|
||||
assert len(ig_4.instances.all()) == 1
|
||||
assert i1 in ig_4.instances.all()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch('awx.main.tasks.handle_ha_toplogy_changes', return_value=None)
|
||||
def test_policy_instance_distribution_even(mock, instance_factory, instance_group_factory):
|
||||
i1 = instance_factory("i1")
|
||||
i2 = instance_factory("i2")
|
||||
i3 = instance_factory("i3")
|
||||
i4 = instance_factory("i4")
|
||||
ig_1 = instance_group_factory("ig1", percentage=25)
|
||||
ig_2 = instance_group_factory("ig2", percentage=25)
|
||||
ig_3 = instance_group_factory("ig3", percentage=25)
|
||||
ig_4 = instance_group_factory("ig4", percentage=25)
|
||||
apply_cluster_membership_policies()
|
||||
assert len(ig_1.instances.all()) == 1
|
||||
assert i1 in ig_1.instances.all()
|
||||
assert len(ig_2.instances.all()) == 1
|
||||
assert i2 in ig_2.instances.all()
|
||||
assert len(ig_3.instances.all()) == 1
|
||||
assert i3 in ig_3.instances.all()
|
||||
assert len(ig_4.instances.all()) == 1
|
||||
assert i4 in ig_4.instances.all()
|
||||
ig_1.policy_instance_minimum = 2
|
||||
ig_1.save()
|
||||
apply_cluster_membership_policies()
|
||||
assert len(ig_1.instances.all()) == 2
|
||||
assert i1 in ig_1.instances.all()
|
||||
assert i2 in ig_1.instances.all()
|
||||
assert len(ig_2.instances.all()) == 1
|
||||
assert i3 in ig_2.instances.all()
|
||||
assert len(ig_3.instances.all()) == 1
|
||||
assert i4 in ig_3.instances.all()
|
||||
assert len(ig_4.instances.all()) == 1
|
||||
assert i1 in ig_4.instances.all()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch('awx.main.tasks.handle_ha_toplogy_changes', return_value=None)
|
||||
def test_policy_instance_distribution_simultaneous(mock, instance_factory, instance_group_factory):
|
||||
i1 = instance_factory("i1")
|
||||
i2 = instance_factory("i2")
|
||||
i3 = instance_factory("i3")
|
||||
i4 = instance_factory("i4")
|
||||
ig_1 = instance_group_factory("ig1", percentage=25, minimum=2)
|
||||
ig_2 = instance_group_factory("ig2", percentage=25)
|
||||
ig_3 = instance_group_factory("ig3", percentage=25)
|
||||
ig_4 = instance_group_factory("ig4", percentage=25)
|
||||
apply_cluster_membership_policies()
|
||||
assert len(ig_1.instances.all()) == 2
|
||||
assert i1 in ig_1.instances.all()
|
||||
assert i2 in ig_1.instances.all()
|
||||
assert len(ig_2.instances.all()) == 1
|
||||
assert i3 in ig_2.instances.all()
|
||||
assert len(ig_3.instances.all()) == 1
|
||||
assert i4 in ig_3.instances.all()
|
||||
assert len(ig_4.instances.all()) == 1
|
||||
assert i1 in ig_4.instances.all()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch('awx.main.tasks.handle_ha_toplogy_changes', return_value=None)
|
||||
def test_policy_instance_list_manually_managed(mock, instance_factory, instance_group_factory):
|
||||
i1 = instance_factory("i1")
|
||||
i2 = instance_factory("i2")
|
||||
ig_1 = instance_group_factory("ig1", percentage=100, minimum=2)
|
||||
ig_2 = instance_group_factory("ig2")
|
||||
ig_2.policy_instance_list = [i2.hostname]
|
||||
ig_2.save()
|
||||
apply_cluster_membership_policies()
|
||||
assert len(ig_1.instances.all()) == 1
|
||||
assert i1 in ig_1.instances.all()
|
||||
assert i2 not in ig_1.instances.all()
|
||||
assert len(ig_2.instances.all()) == 1
|
||||
assert i2 in ig_2.instances.all()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_basic_instance_group_membership(instance_group_factory, default_instance_group, job_factory):
|
||||
j = job_factory()
|
||||
|
||||
@ -1,12 +1,11 @@
|
||||
from awx.main.models import (
|
||||
Job,
|
||||
Instance
|
||||
)
|
||||
from django.test.utils import override_settings
|
||||
import pytest
|
||||
|
||||
import mock
|
||||
import json
|
||||
|
||||
from awx.main.models import Job, Instance
|
||||
from awx.main.tasks import cluster_node_heartbeat
|
||||
from django.test.utils import override_settings
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_orphan_unified_job_creation(instance, inventory):
|
||||
@ -20,13 +19,19 @@ def test_orphan_unified_job_creation(instance, inventory):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch('awx.main.utils.common.get_cpu_capacity', lambda: (2,8))
|
||||
@mock.patch('awx.main.utils.common.get_mem_capacity', lambda: (8000,62))
|
||||
@mock.patch('awx.main.tasks.handle_ha_toplogy_changes.apply_async', lambda: True)
|
||||
def test_job_capacity_and_with_inactive_node():
|
||||
Instance.objects.create(hostname='test-1', capacity=50)
|
||||
assert Instance.objects.total_capacity() == 50
|
||||
Instance.objects.create(hostname='test-2', capacity=50)
|
||||
assert Instance.objects.total_capacity() == 100
|
||||
with override_settings(AWX_ACTIVE_NODE_TIME=0):
|
||||
assert Instance.objects.total_capacity() < 100
|
||||
i = Instance.objects.create(hostname='test-1')
|
||||
i.refresh_capacity()
|
||||
assert i.capacity == 62
|
||||
i.enabled = False
|
||||
i.save()
|
||||
with override_settings(CLUSTER_HOST_ID=i.hostname):
|
||||
cluster_node_heartbeat()
|
||||
i = Instance.objects.get(id=i.id)
|
||||
assert i.capacity == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user