diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c7258dad63..9318918864 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -24,6 +24,7 @@ Have questions about this document or anything not covered here? Come chat with * [Start a shell](#start-the-shell) * [Create a superuser](#create-a-superuser) * [Load the data](#load-the-data) + * [Building API Documentation](#build-documentation) * [Accessing the AWX web interface](#accessing-the-awx-web-interface) * [Purging containers and images](#purging-containers-and-images) * [What should I work on?](#what-should-i-work-on) @@ -261,6 +262,20 @@ You can optionally load some demo data. This will create a demo project, invento > This information will persist in the database running in the `tools_postgres_1` container, until the container is removed. You may periodically need to recreate this container, and thus the database, if the database schema changes in an upstream commit. +##### Building API Documentation + +AWX includes support for building [Swagger/OpenAPI +documentation](https://swagger.io). To build the documentation locally, run: + +```bash +(container)/awx_devel$ make swagger +``` + +This will write a file named `swagger.json` that contains the API specification +in OpenAPI format. A variety of online tools are available for translating +this data into more consumable formats (such as HTML). http://editor.swagger.io +is an example of one such service. + ### Accessing the AWX web interface You can now log into the AWX web interface at [https://localhost:8043](https://localhost:8043), and access the API directly at [https://localhost:8043/api/](https://localhost:8043/api/). diff --git a/Makefile b/Makefile index 58479e0f19..fe221fffe4 100644 --- a/Makefile +++ b/Makefile @@ -23,7 +23,7 @@ COMPOSE_HOST ?= $(shell hostname) VENV_BASE ?= /venv SCL_PREFIX ?= -CELERY_SCHEDULE_FILE ?= /celerybeat-schedule +CELERY_SCHEDULE_FILE ?= /var/lib/awx/beat.db DEV_DOCKER_TAG_BASE ?= gcr.io/ansible-tower-engineering # Python packages to install only from source (not from binary wheels) @@ -216,13 +216,11 @@ init: . $(VENV_BASE)/awx/bin/activate; \ fi; \ $(MANAGEMENT_COMMAND) provision_instance --hostname=$(COMPOSE_HOST); \ - $(MANAGEMENT_COMMAND) register_queue --queuename=tower --hostnames=$(COMPOSE_HOST);\ + $(MANAGEMENT_COMMAND) register_queue --queuename=tower --instance_percent=100;\ if [ "$(AWX_GROUP_QUEUES)" == "tower,thepentagon" ]; then \ $(MANAGEMENT_COMMAND) provision_instance --hostname=isolated; \ $(MANAGEMENT_COMMAND) register_queue --queuename='thepentagon' --hostnames=isolated --controller=tower; \ $(MANAGEMENT_COMMAND) generate_isolated_key | ssh -o "StrictHostKeyChecking no" root@isolated 'cat > /root/.ssh/authorized_keys'; \ - elif [ "$(AWX_GROUP_QUEUES)" != "tower" ]; then \ - $(MANAGEMENT_COMMAND) register_queue --queuename=$(firstword $(subst $(comma), ,$(AWX_GROUP_QUEUES))) --hostnames=$(COMPOSE_HOST); \ fi; # Refresh development environment after pulling new code. @@ -299,7 +297,7 @@ uwsgi: collectstatic @if [ "$(VENV_BASE)" ]; then \ . $(VENV_BASE)/awx/bin/activate; \ fi; \ - uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --master-fifo=/awxfifo --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)" --hook-accepting1-once="exec:kill -1 `cat /tmp/celery_pid`" + uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --master-fifo=/awxfifo --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)" --hook-accepting1-once="exec:/bin/sh -c '[ -f /tmp/celery_pid ] && kill -1 `cat /tmp/celery_pid` || true'" daphne: @if [ "$(VENV_BASE)" ]; then \ @@ -326,7 +324,7 @@ celeryd: @if [ "$(VENV_BASE)" ]; then \ . $(VENV_BASE)/awx/bin/activate; \ fi; \ - celery worker -A awx -l DEBUG -B -Ofair --autoscale=100,4 --schedule=$(CELERY_SCHEDULE_FILE) -Q tower_scheduler,tower_broadcast_all,$(COMPOSE_HOST),$(AWX_GROUP_QUEUES) -n celery@$(COMPOSE_HOST) --pidfile /tmp/celery_pid + celery worker -A awx -l DEBUG -B -Ofair --autoscale=100,4 --schedule=$(CELERY_SCHEDULE_FILE) -Q tower_broadcast_all -n celery@$(COMPOSE_HOST) --pidfile /tmp/celery_pid # Run to start the zeromq callback receiver receiver: @@ -365,6 +363,12 @@ pyflakes: reports pylint: reports @(set -o pipefail && $@ | reports/$@.report) +swagger: reports + @if [ "$(VENV_BASE)" ]; then \ + . $(VENV_BASE)/awx/bin/activate; \ + fi; \ + (set -o pipefail && py.test awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report) + check: flake8 pep8 # pyflakes pylint TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests diff --git a/awx/api/generics.py b/awx/api/generics.py index 73c8ddd1db..41724c9440 100644 --- a/awx/api/generics.py +++ b/awx/api/generics.py @@ -5,6 +5,7 @@ import inspect import logging import time +import six # Django from django.conf import settings @@ -26,6 +27,10 @@ from rest_framework import generics from rest_framework.response import Response from rest_framework import status from rest_framework import views +from rest_framework.permissions import AllowAny + +# cryptography +from cryptography.fernet import InvalidToken # AWX from awx.api.filters import FieldLookupBackend @@ -33,9 +38,9 @@ from awx.main.models import * # noqa from awx.main.access import access_registry from awx.main.utils import * # noqa from awx.main.utils.db import get_all_field_names -from awx.api.serializers import ResourceAccessListElementSerializer +from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer from awx.api.versioning import URLPathVersioning, get_request_version -from awx.api.metadata import SublistAttachDetatchMetadata +from awx.api.metadata import SublistAttachDetatchMetadata, Metadata __all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView', 'ListCreateAPIView', 'SubListAPIView', 'SubListCreateAPIView', @@ -47,7 +52,8 @@ __all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView', 'ResourceAccessList', 'ParentMixin', 'DeleteLastUnattachLabelMixin', - 'SubListAttachDetachAPIView',] + 'SubListAttachDetachAPIView', + 'CopyAPIView'] logger = logging.getLogger('awx.api.generics') analytics_logger = logging.getLogger('awx.analytics.performance') @@ -91,8 +97,17 @@ def get_view_description(cls, request, html=False): return mark_safe(desc) +def get_default_schema(): + if settings.SETTINGS_MODULE == 'awx.settings.development': + from awx.api.swagger import AutoSchema + return AutoSchema() + else: + return views.APIView.schema + + class APIView(views.APIView): + schema = get_default_schema() versioning_class = URLPathVersioning def initialize_request(self, request, *args, **kwargs): @@ -176,27 +191,14 @@ class APIView(views.APIView): and in the browsable API. """ func = self.settings.VIEW_DESCRIPTION_FUNCTION - return func(self.__class__, self._request, html) + return func(self.__class__, getattr(self, '_request', None), html) def get_description_context(self): return { 'view': self, 'docstring': type(self).__doc__ or '', - 'new_in_13': getattr(self, 'new_in_13', False), - 'new_in_14': getattr(self, 'new_in_14', False), - 'new_in_145': getattr(self, 'new_in_145', False), - 'new_in_148': getattr(self, 'new_in_148', False), - 'new_in_200': getattr(self, 'new_in_200', False), - 'new_in_210': getattr(self, 'new_in_210', False), - 'new_in_220': getattr(self, 'new_in_220', False), - 'new_in_230': getattr(self, 'new_in_230', False), - 'new_in_240': getattr(self, 'new_in_240', False), - 'new_in_300': getattr(self, 'new_in_300', False), - 'new_in_310': getattr(self, 'new_in_310', False), - 'new_in_320': getattr(self, 'new_in_320', False), - 'new_in_330': getattr(self, 'new_in_330', False), - 'new_in_api_v2': getattr(self, 'new_in_api_v2', False), 'deprecated': getattr(self, 'deprecated', False), + 'swagger_method': getattr(self.request, 'swagger_method', None), } def get_description(self, request, html=False): @@ -214,7 +216,7 @@ class APIView(views.APIView): context['deprecated'] = True description = render_to_string(template_list, context) - if context.get('deprecated'): + if context.get('deprecated') and context.get('swagger_method') is None: # render deprecation messages at the very top description = '\n'.join([render_to_string('api/_deprecated.md', context), description]) return description @@ -747,3 +749,152 @@ class ResourceAccessList(ParentMixin, ListAPIView): for r in roles: ancestors.update(set(r.ancestors.all())) return User.objects.filter(roles__in=list(ancestors)).distinct() + + +def trigger_delayed_deep_copy(*args, **kwargs): + from awx.main.tasks import deep_copy_model_obj + connection.on_commit(lambda: deep_copy_model_obj.delay(*args, **kwargs)) + + +class CopyAPIView(GenericAPIView): + + serializer_class = CopySerializer + permission_classes = (AllowAny,) + copy_return_serializer_class = None + new_in_330 = True + new_in_api_v2 = True + + def _get_copy_return_serializer(self, *args, **kwargs): + if not self.copy_return_serializer_class: + return self.get_serializer(*args, **kwargs) + serializer_class_store = self.serializer_class + self.serializer_class = self.copy_return_serializer_class + ret = self.get_serializer(*args, **kwargs) + self.serializer_class = serializer_class_store + return ret + + @staticmethod + def _decrypt_model_field_if_needed(obj, field_name, field_val): + if field_name in getattr(type(obj), 'REENCRYPTION_BLACKLIST_AT_COPY', []): + return field_val + if isinstance(field_val, dict): + for sub_field in field_val: + if isinstance(sub_field, six.string_types) \ + and isinstance(field_val[sub_field], six.string_types): + try: + field_val[sub_field] = decrypt_field(obj, field_name, sub_field) + except InvalidToken: + # Catching the corner case with v1 credential fields + field_val[sub_field] = decrypt_field(obj, sub_field) + elif isinstance(field_val, six.string_types): + field_val = decrypt_field(obj, field_name) + return field_val + + def _build_create_dict(self, obj): + ret = {} + if self.copy_return_serializer_class: + all_fields = Metadata().get_serializer_info( + self._get_copy_return_serializer(), method='POST' + ) + for field_name, field_info in all_fields.items(): + if not hasattr(obj, field_name) or field_info.get('read_only', True): + continue + ret[field_name] = CopyAPIView._decrypt_model_field_if_needed( + obj, field_name, getattr(obj, field_name) + ) + return ret + + @staticmethod + def copy_model_obj(old_parent, new_parent, model, obj, creater, copy_name='', create_kwargs=None): + fields_to_preserve = set(getattr(model, 'FIELDS_TO_PRESERVE_AT_COPY', [])) + fields_to_discard = set(getattr(model, 'FIELDS_TO_DISCARD_AT_COPY', [])) + m2m_to_preserve = {} + o2m_to_preserve = {} + create_kwargs = create_kwargs or {} + for field_name in fields_to_discard: + create_kwargs.pop(field_name, None) + for field in model._meta.get_fields(): + try: + field_val = getattr(obj, field.name) + except AttributeError: + continue + # Adjust copy blacklist fields here. + if field.name in fields_to_discard or field.name in [ + 'id', 'pk', 'polymorphic_ctype', 'unifiedjobtemplate_ptr', 'created_by', 'modified_by' + ] or field.name.endswith('_role'): + create_kwargs.pop(field.name, None) + continue + if field.one_to_many: + if field.name in fields_to_preserve: + o2m_to_preserve[field.name] = field_val + elif field.many_to_many: + if field.name in fields_to_preserve and not old_parent: + m2m_to_preserve[field.name] = field_val + elif field.many_to_one and not field_val: + create_kwargs.pop(field.name, None) + elif field.many_to_one and field_val == old_parent: + create_kwargs[field.name] = new_parent + elif field.name == 'name' and not old_parent: + create_kwargs[field.name] = copy_name or field_val + ' copy' + elif field.name in fields_to_preserve: + create_kwargs[field.name] = CopyAPIView._decrypt_model_field_if_needed( + obj, field.name, field_val + ) + new_obj = model.objects.create(**create_kwargs) + # Need to save separatedly because Djang-crum get_current_user would + # not work properly in non-request-response-cycle context. + new_obj.created_by = creater + new_obj.save() + for m2m in m2m_to_preserve: + for related_obj in m2m_to_preserve[m2m].all(): + getattr(new_obj, m2m).add(related_obj) + if not old_parent: + sub_objects = [] + for o2m in o2m_to_preserve: + for sub_obj in o2m_to_preserve[o2m].all(): + sub_model = type(sub_obj) + sub_objects.append((sub_model.__module__, sub_model.__name__, sub_obj.pk)) + return new_obj, sub_objects + ret = {obj: new_obj} + for o2m in o2m_to_preserve: + for sub_obj in o2m_to_preserve[o2m].all(): + ret.update(CopyAPIView.copy_model_obj(obj, new_obj, type(sub_obj), sub_obj, creater)) + return ret + + def get(self, request, *args, **kwargs): + obj = self.get_object() + create_kwargs = self._build_create_dict(obj) + for key in create_kwargs: + create_kwargs[key] = getattr(create_kwargs[key], 'pk', None) or create_kwargs[key] + return Response({'can_copy': request.user.can_access(self.model, 'add', create_kwargs)}) + + def post(self, request, *args, **kwargs): + obj = self.get_object() + create_kwargs = self._build_create_dict(obj) + create_kwargs_check = {} + for key in create_kwargs: + create_kwargs_check[key] = getattr(create_kwargs[key], 'pk', None) or create_kwargs[key] + if not request.user.can_access(self.model, 'add', create_kwargs_check): + raise PermissionDenied() + serializer = self.get_serializer(data=request.data) + if not serializer.is_valid(): + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + new_obj, sub_objs = CopyAPIView.copy_model_obj( + None, None, self.model, obj, request.user, create_kwargs=create_kwargs, + copy_name=serializer.validated_data.get('name', '') + ) + if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role: + new_obj.admin_role.members.add(request.user) + if sub_objs: + permission_check_func = None + if hasattr(type(self), 'deep_copy_permission_check_func'): + permission_check_func = ( + type(self).__module__, type(self).__name__, 'deep_copy_permission_check_func' + ) + trigger_delayed_deep_copy( + self.model.__module__, self.model.__name__, + obj.pk, new_obj.pk, request.user.pk, sub_objs, + permission_check_func=permission_check_func + ) + serializer = self._get_copy_return_serializer(new_obj) + return Response(serializer.data, status=status.HTTP_201_CREATED) diff --git a/awx/api/metadata.py b/awx/api/metadata.py index 7beb3bd5ad..bc44deb6f0 100644 --- a/awx/api/metadata.py +++ b/awx/api/metadata.py @@ -190,23 +190,6 @@ class Metadata(metadata.SimpleMetadata): finally: delattr(view, '_request') - # Add version number in which view was added to Tower. - added_in_version = '1.2' - for version in ('3.2.0', '3.1.0', '3.0.0', '2.4.0', '2.3.0', '2.2.0', - '2.1.0', '2.0.0', '1.4.8', '1.4.5', '1.4', '1.3'): - if getattr(view, 'new_in_%s' % version.replace('.', ''), False): - added_in_version = version - break - metadata['added_in_version'] = added_in_version - - # Add API version number in which view was added to Tower. - added_in_api_version = 'v1' - for version in ('v2',): - if getattr(view, 'new_in_api_%s' % version, False): - added_in_api_version = version - break - metadata['added_in_api_version'] = added_in_api_version - # Add type(s) handled by this view/serializer. if hasattr(view, 'get_serializer'): serializer = view.get_serializer() diff --git a/awx/api/parsers.py b/awx/api/parsers.py index 8115763ae9..1eb005eaeb 100644 --- a/awx/api/parsers.py +++ b/awx/api/parsers.py @@ -33,7 +33,7 @@ class OrderedDictLoader(yaml.SafeLoader): key = self.construct_object(key_node, deep=deep) try: hash(key) - except TypeError, exc: + except TypeError as exc: raise yaml.constructor.ConstructorError( "while constructing a mapping", node.start_mark, "found unacceptable key (%s)" % exc, key_node.start_mark diff --git a/awx/api/serializers.py b/awx/api/serializers.py index d1cdcd8a90..71bafdb518 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -130,6 +130,22 @@ def reverse_gfk(content_object, request): } +class CopySerializer(serializers.Serializer): + + name = serializers.CharField() + + def validate(self, attrs): + name = attrs.get('name') + view = self.context.get('view', None) + obj = view.get_object() + if name == obj.name: + raise serializers.ValidationError(_( + 'The original object is already named {}, a copy from' + ' it cannot have the same name.'.format(name) + )) + return attrs + + class BaseSerializerMetaclass(serializers.SerializerMetaclass): ''' Custom metaclass to enable attribute inheritance from Meta objects on @@ -1003,6 +1019,7 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer): notification_templates_error = self.reverse('api:project_notification_templates_error_list', kwargs={'pk': obj.pk}), access_list = self.reverse('api:project_access_list', kwargs={'pk': obj.pk}), object_roles = self.reverse('api:project_object_roles_list', kwargs={'pk': obj.pk}), + copy = self.reverse('api:project_copy', kwargs={'pk': obj.pk}), )) if obj.organization: res['organization'] = self.reverse('api:organization_detail', @@ -1156,6 +1173,7 @@ class InventorySerializer(BaseSerializerWithVariables): access_list = self.reverse('api:inventory_access_list', kwargs={'pk': obj.pk}), object_roles = self.reverse('api:inventory_object_roles_list', kwargs={'pk': obj.pk}), instance_groups = self.reverse('api:inventory_instance_groups_list', kwargs={'pk': obj.pk}), + copy = self.reverse('api:inventory_copy', kwargs={'pk': obj.pk}), )) if obj.insights_credential: res['insights_credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.insights_credential.pk}) @@ -1173,7 +1191,7 @@ class InventorySerializer(BaseSerializerWithVariables): if host_filter: try: SmartFilter().query_from_string(host_filter) - except RuntimeError, e: + except RuntimeError as e: raise models.base.ValidationError(e) return host_filter @@ -1513,6 +1531,7 @@ class CustomInventoryScriptSerializer(BaseSerializer): res = super(CustomInventoryScriptSerializer, self).get_related(obj) res.update(dict( object_roles = self.reverse('api:inventory_script_object_roles_list', kwargs={'pk': obj.pk}), + copy = self.reverse('api:inventory_script_copy', kwargs={'pk': obj.pk}), )) if obj.organization: @@ -2070,6 +2089,7 @@ class CredentialSerializer(BaseSerializer): object_roles = self.reverse('api:credential_object_roles_list', kwargs={'pk': obj.pk}), owner_users = self.reverse('api:credential_owner_users_list', kwargs={'pk': obj.pk}), owner_teams = self.reverse('api:credential_owner_teams_list', kwargs={'pk': obj.pk}), + copy = self.reverse('api:credential_copy', kwargs={'pk': obj.pk}), )) # TODO: remove when API v1 is removed @@ -2547,6 +2567,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO labels = self.reverse('api:job_template_label_list', kwargs={'pk': obj.pk}), object_roles = self.reverse('api:job_template_object_roles_list', kwargs={'pk': obj.pk}), instance_groups = self.reverse('api:job_template_instance_groups_list', kwargs={'pk': obj.pk}), + copy = self.reverse('api:job_template_copy', kwargs={'pk': obj.pk}), )) if obj.host_config_key: res['callback'] = self.reverse('api:job_template_callback', kwargs={'pk': obj.pk}) @@ -2968,7 +2989,14 @@ class SystemJobSerializer(UnifiedJobSerializer): return res def get_result_stdout(self, obj): - return obj.result_stdout + try: + return obj.result_stdout + except StdoutMaxBytesExceeded as e: + return _( + "Standard Output too large to display ({text_size} bytes), " + "only download supported for sizes over {supported_size} bytes").format( + text_size=e.total, supported_size=e.supported + ) class SystemJobCancelSerializer(SystemJobSerializer): @@ -3107,6 +3135,12 @@ class LaunchConfigurationBaseSerializer(BaseSerializer): ret['extra_data'] = obj.display_extra_data() return ret + def get_summary_fields(self, obj): + summary_fields = super(LaunchConfigurationBaseSerializer, self).get_summary_fields(obj) + # Credential would be an empty dictionary in this case + summary_fields.pop('credential', None) + return summary_fields + def validate(self, attrs): attrs = super(LaunchConfigurationBaseSerializer, self).validate(attrs) @@ -3782,6 +3816,7 @@ class NotificationTemplateSerializer(BaseSerializer): res.update(dict( test = self.reverse('api:notification_template_test', kwargs={'pk': obj.pk}), notifications = self.reverse('api:notification_template_notification_list', kwargs={'pk': obj.pk}), + copy = self.reverse('api:notification_template_copy', kwargs={'pk': obj.pk}), )) if obj.organization: res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk}) @@ -3887,6 +3922,7 @@ class SchedulePreviewSerializer(BaseSerializer): # - BYYEARDAY # - BYWEEKNO # - Multiple DTSTART or RRULE elements + # - Can't contain both COUNT and UNTIL # - COUNT > 999 def validate_rrule(self, value): rrule_value = value @@ -3921,6 +3957,8 @@ class SchedulePreviewSerializer(BaseSerializer): raise serializers.ValidationError(_("BYYEARDAY not supported.")) if 'byweekno' in rrule_value.lower(): raise serializers.ValidationError(_("BYWEEKNO not supported.")) + if 'COUNT' in rrule_value and 'UNTIL' in rrule_value: + raise serializers.ValidationError(_("RRULE may not contain both COUNT and UNTIL")) if match_count: count_val = match_count.groups()[0].strip().split("=") if int(count_val[1]) > 999: @@ -3946,6 +3984,15 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria )) if obj.unified_job_template: res['unified_job_template'] = obj.unified_job_template.get_absolute_url(self.context.get('request')) + try: + if obj.unified_job_template.project: + res['project'] = obj.unified_job_template.project.get_absolute_url(self.context.get('request')) + except ObjectDoesNotExist: + pass + if obj.inventory: + res['inventory'] = obj.inventory.get_absolute_url(self.context.get('request')) + elif obj.unified_job_template and getattr(obj.unified_job_template, 'inventory', None): + res['inventory'] = obj.unified_job_template.inventory.get_absolute_url(self.context.get('request')) return res def validate_unified_job_template(self, value): @@ -3968,8 +4015,10 @@ class InstanceSerializer(BaseSerializer): class Meta: model = Instance - fields = ("id", "type", "url", "related", "uuid", "hostname", "created", "modified", - "version", "capacity", "consumed_capacity", "percent_capacity_remaining", "jobs_running") + read_only_fields = ('uuid', 'hostname', 'version') + fields = ("id", "type", "url", "related", "uuid", "hostname", "created", "modified", 'capacity_adjustment', + "version", "capacity", "consumed_capacity", "percent_capacity_remaining", "jobs_running", + "cpu", "memory", "cpu_capacity", "mem_capacity", "enabled") def get_related(self, obj): res = super(InstanceSerializer, self).get_related(obj) @@ -4002,7 +4051,8 @@ class InstanceGroupSerializer(BaseSerializer): model = InstanceGroup fields = ("id", "type", "url", "related", "name", "created", "modified", "capacity", "committed_capacity", "consumed_capacity", - "percent_capacity_remaining", "jobs_running", "instances", "controller") + "percent_capacity_remaining", "jobs_running", "instances", "controller", + "policy_instance_percentage", "policy_instance_minimum", "policy_instance_list") def get_related(self, obj): res = super(InstanceGroupSerializer, self).get_related(obj) diff --git a/awx/api/swagger.py b/awx/api/swagger.py new file mode 100644 index 0000000000..b67f2d4a26 --- /dev/null +++ b/awx/api/swagger.py @@ -0,0 +1,103 @@ +import json +import warnings + +from coreapi.document import Object, Link + +from rest_framework import exceptions +from rest_framework.permissions import AllowAny +from rest_framework.renderers import CoreJSONRenderer +from rest_framework.response import Response +from rest_framework.schemas import SchemaGenerator, AutoSchema as DRFAuthSchema +from rest_framework.views import APIView + +from rest_framework_swagger import renderers + + +class AutoSchema(DRFAuthSchema): + + def get_link(self, path, method, base_url): + link = super(AutoSchema, self).get_link(path, method, base_url) + try: + serializer = self.view.get_serializer() + except Exception: + serializer = None + warnings.warn('{}.get_serializer() raised an exception during ' + 'schema generation. Serializer fields will not be ' + 'generated for {} {}.' + .format(self.view.__class__.__name__, method, path)) + + link.__dict__['deprecated'] = getattr(self.view, 'deprecated', False) + + # auto-generate a topic/tag for the serializer based on its model + if hasattr(self.view, 'swagger_topic'): + link.__dict__['topic'] = str(self.view.swagger_topic).title() + elif serializer and hasattr(serializer, 'Meta'): + link.__dict__['topic'] = str( + serializer.Meta.model._meta.verbose_name_plural + ).title() + elif hasattr(self.view, 'model'): + link.__dict__['topic'] = str(self.view.model._meta.verbose_name_plural).title() + else: + warnings.warn('Could not determine a Swagger tag for path {}'.format(path)) + return link + + def get_description(self, path, method): + self.view._request = self.view.request + setattr(self.view.request, 'swagger_method', method) + description = super(AutoSchema, self).get_description(path, method) + return description + + +class SwaggerSchemaView(APIView): + _ignore_model_permissions = True + exclude_from_schema = True + permission_classes = [AllowAny] + renderer_classes = [ + CoreJSONRenderer, + renderers.OpenAPIRenderer, + renderers.SwaggerUIRenderer + ] + + def get(self, request): + generator = SchemaGenerator( + title='Ansible Tower API', + patterns=None, + urlconf=None + ) + schema = generator.get_schema(request=request) + # python core-api doesn't support the deprecation yet, so track it + # ourselves and return it in a response header + _deprecated = [] + + # By default, DRF OpenAPI serialization places all endpoints in + # a single node based on their root path (/api). Instead, we want to + # group them by topic/tag so that they're categorized in the rendered + # output + document = schema._data.pop('api') + for path, node in document.items(): + if isinstance(node, Object): + for action in node.values(): + topic = getattr(action, 'topic', None) + if topic: + schema._data.setdefault(topic, Object()) + schema._data[topic]._data[path] = node + + if isinstance(action, Object): + for link in action.links.values(): + if link.deprecated: + _deprecated.append(link.url) + elif isinstance(node, Link): + topic = getattr(node, 'topic', None) + if topic: + schema._data.setdefault(topic, Object()) + schema._data[topic]._data[path] = node + + if not schema: + raise exceptions.ValidationError( + 'The schema generator did not return a schema Document' + ) + + return Response( + schema, + headers={'X-Deprecated-Paths': json.dumps(_deprecated)} + ) diff --git a/awx/api/templates/api/_new_in_awx.md b/awx/api/templates/api/_new_in_awx.md deleted file mode 100644 index b3e4552517..0000000000 --- a/awx/api/templates/api/_new_in_awx.md +++ /dev/null @@ -1,14 +0,0 @@ -{% if not version_label_flag or version_label_flag == 'true' %} -{% if new_in_13 %}> _Added in AWX 1.3_{% endif %} -{% if new_in_14 %}> _Added in AWX 1.4_{% endif %} -{% if new_in_145 %}> _Added in Ansible Tower 1.4.5_{% endif %} -{% if new_in_148 %}> _Added in Ansible Tower 1.4.8_{% endif %} -{% if new_in_200 %}> _Added in Ansible Tower 2.0.0_{% endif %} -{% if new_in_220 %}> _Added in Ansible Tower 2.2.0_{% endif %} -{% if new_in_230 %}> _Added in Ansible Tower 2.3.0_{% endif %} -{% if new_in_240 %}> _Added in Ansible Tower 2.4.0_{% endif %} -{% if new_in_300 %}> _Added in Ansible Tower 3.0.0_{% endif %} -{% if new_in_310 %}> _New in Ansible Tower 3.1.0_{% endif %} -{% if new_in_320 %}> _New in Ansible Tower 3.2.0_{% endif %} -{% if new_in_330 %}> _New in Ansible Tower 3.3.0_{% endif %} -{% endif %} diff --git a/awx/api/templates/api/ad_hoc_command_relaunch.md b/awx/api/templates/api/ad_hoc_command_relaunch.md new file mode 100644 index 0000000000..fdddd4b6ba --- /dev/null +++ b/awx/api/templates/api/ad_hoc_command_relaunch.md @@ -0,0 +1,3 @@ +Relaunch an Ad Hoc Command: + +Make a POST request to this resource to launch a job. If any passwords or variables are required then they should be passed in via POST data. In order to determine what values are required in order to launch a job based on this job template you may make a GET request to this endpoint. diff --git a/awx/api/templates/api/api_v1_config_view.md b/awx/api/templates/api/api_v1_config_view.md index d99b97d553..d037ff4408 100644 --- a/awx/api/templates/api/api_v1_config_view.md +++ b/awx/api/templates/api/api_v1_config_view.md @@ -1,4 +1,5 @@ -Site configuration settings and general information. +{% ifmeth GET %} +# Site configuration settings and general information Make a GET request to this resource to retrieve the configuration containing the following fields (some fields may not be visible to all users): @@ -11,6 +12,10 @@ the following fields (some fields may not be visible to all users): * `license_info`: Information about the current license. * `version`: Version of Ansible Tower package installed. * `eula`: The current End-User License Agreement +{% endifmeth %} + +{% ifmeth POST %} +# Install or update an existing license (_New in Ansible Tower 2.0.0_) Make a POST request to this resource as a super user to install or update the existing license. The license data itself can @@ -18,3 +23,11 @@ be POSTed as a normal json data structure. (_New in Ansible Tower 2.1.1_) The POST must include a `eula_accepted` boolean element indicating acceptance of the End-User License Agreement. +{% endifmeth %} + +{% ifmeth DELETE %} +# Delete an existing license + +(_New in Ansible Tower 2.0.0_) Make a DELETE request to this resource as a super +user to delete the existing license +{% endifmeth %} diff --git a/awx/api/templates/api/api_view.md b/awx/api/templates/api/api_view.md index d716841f26..1fb0d77840 100644 --- a/awx/api/templates/api/api_view.md +++ b/awx/api/templates/api/api_view.md @@ -1,3 +1 @@ {{ docstring }} - -{% include "api/_new_in_awx.md" %} diff --git a/awx/api/templates/api/auth_token_view.md b/awx/api/templates/api/auth_token_view.md index 69078842d4..5df4892370 100644 --- a/awx/api/templates/api/auth_token_view.md +++ b/awx/api/templates/api/auth_token_view.md @@ -1,3 +1,5 @@ +{% ifmeth POST %} +# Generate an Auth Token Make a POST request to this resource with `username` and `password` fields to obtain an authentication token to use for subsequent requests. @@ -32,6 +34,10 @@ agent that originally obtained it. Each request that uses the token for authentication will refresh its expiration timestamp and keep it from expiring. A token only expires when it is not used for the configured timeout interval (default 1800 seconds). +{% endifmeth %} -A DELETE request with the token set will cause the token to be invalidated and -no further requests can be made with it. +{% ifmeth DELETE %} +# Delete an Auth Token +A DELETE request with the token header set will cause the token to be +invalidated and no further requests can be made with it. +{% endifmeth %} diff --git a/awx/api/templates/api/base_variable_data.md b/awx/api/templates/api/base_variable_data.md index 19994530e0..7fcb717c3d 100644 --- a/awx/api/templates/api/base_variable_data.md +++ b/awx/api/templates/api/base_variable_data.md @@ -1,9 +1,13 @@ +{% ifmeth GET %} # Retrieve {{ model_verbose_name|title }} Variable Data: -Make a GET request to this resource to retrieve all variables defined for this +Make a GET request to this resource to retrieve all variables defined for a {{ model_verbose_name }}. +{% endifmeth %} +{% ifmeth PUT PATCH %} # Update {{ model_verbose_name|title }} Variable Data: -Make a PUT request to this resource to update variables defined for this +Make a PUT or PATCH request to this resource to update variables defined for a {{ model_verbose_name }}. +{% endifmeth %} diff --git a/awx/api/templates/api/dashboard_inventory_graph_view.md b/awx/api/templates/api/dashboard_inventory_graph_view.md index ab31a13887..7353bf5006 100644 --- a/awx/api/templates/api/dashboard_inventory_graph_view.md +++ b/awx/api/templates/api/dashboard_inventory_graph_view.md @@ -38,5 +38,3 @@ Data about failed and successfull hosts by inventory will be given as: "id": 2, "name": "Test Inventory" }, - -{% include "api/_new_in_awx.md" %} diff --git a/awx/api/templates/api/dashboard_jobs_graph_view.md b/awx/api/templates/api/dashboard_jobs_graph_view.md index cab9a25a2c..2e510b2a56 100644 --- a/awx/api/templates/api/dashboard_jobs_graph_view.md +++ b/awx/api/templates/api/dashboard_jobs_graph_view.md @@ -1,3 +1,5 @@ +# View Statistics for Job Runs + Make a GET request to this resource to retrieve aggregate statistics about job runs suitable for graphing. ## Parmeters and Filtering @@ -33,5 +35,3 @@ Data will be returned in the following format: Each element contains an epoch timestamp represented in seconds and a numerical value indicating the number of events during that time period - -{% include "api/_new_in_awx.md" %} diff --git a/awx/api/templates/api/dashboard_view.md b/awx/api/templates/api/dashboard_view.md index 16e16dbcc0..112c639e13 100644 --- a/awx/api/templates/api/dashboard_view.md +++ b/awx/api/templates/api/dashboard_view.md @@ -1,3 +1 @@ Make a GET request to this resource to retrieve aggregate statistics for Tower. - -{% include "api/_new_in_awx.md" %} diff --git a/awx/api/templates/api/group_all_hosts_list.md b/awx/api/templates/api/group_all_hosts_list.md index 4c021634c7..1d8e594c7d 100644 --- a/awx/api/templates/api/group_all_hosts_list.md +++ b/awx/api/templates/api/group_all_hosts_list.md @@ -1,4 +1,4 @@ -# List All {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}: +# List All {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}: Make a GET request to this resource to retrieve a list of all {{ model_verbose_name_plural }} directly or indirectly belonging to this diff --git a/awx/api/templates/api/group_potential_children_list.md b/awx/api/templates/api/group_potential_children_list.md index a22c10f3d9..a67d8ef8ff 100644 --- a/awx/api/templates/api/group_potential_children_list.md +++ b/awx/api/templates/api/group_potential_children_list.md @@ -1,9 +1,7 @@ -# List Potential Child Groups for this {{ parent_model_verbose_name|title }}: +# List Potential Child Groups for {{ parent_model_verbose_name|title|anora }}: Make a GET request to this resource to retrieve a list of {{ model_verbose_name_plural }} available to be added as children of the current {{ parent_model_verbose_name }}. {% include "api/_list_common.md" %} - -{% include "api/_new_in_awx.md" %} diff --git a/awx/api/templates/api/host_all_groups_list.md b/awx/api/templates/api/host_all_groups_list.md index c4275e0158..b53ddba15b 100644 --- a/awx/api/templates/api/host_all_groups_list.md +++ b/awx/api/templates/api/host_all_groups_list.md @@ -1,4 +1,4 @@ -# List All {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}: +# List All {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}: Make a GET request to this resource to retrieve a list of all {{ model_verbose_name_plural }} of which the selected diff --git a/awx/api/templates/api/host_fact_compare_view.md b/awx/api/templates/api/host_fact_compare_view.md index a9b21079e9..871bc75068 100644 --- a/awx/api/templates/api/host_fact_compare_view.md +++ b/awx/api/templates/api/host_fact_compare_view.md @@ -1,3 +1,5 @@ +# List Fact Scans for a Host Specific Host Scan + Make a GET request to this resource to retrieve system tracking data for a particular scan You may filter by datetime: @@ -7,5 +9,3 @@ You may filter by datetime: and module `?datetime=2015-06-01&module=ansible` - -{% include "api/_new_in_awx.md" %} \ No newline at end of file diff --git a/awx/api/templates/api/host_fact_versions_list.md b/awx/api/templates/api/host_fact_versions_list.md index dd6e7a1afb..e2948505a7 100644 --- a/awx/api/templates/api/host_fact_versions_list.md +++ b/awx/api/templates/api/host_fact_versions_list.md @@ -1,3 +1,5 @@ +# List Fact Scans for a Host by Module and Date + Make a GET request to this resource to retrieve system tracking scans by module and date/time You may filter scan runs using the `from` and `to` properties: @@ -7,5 +9,3 @@ You may filter scan runs using the `from` and `to` properties: You may also filter by module `?module=packages` - -{% include "api/_new_in_awx.md" %} \ No newline at end of file diff --git a/awx/api/templates/api/host_insights.md b/awx/api/templates/api/host_insights.md new file mode 100644 index 0000000000..a474be953a --- /dev/null +++ b/awx/api/templates/api/host_insights.md @@ -0,0 +1 @@ +# List Red Hat Insights for a Host diff --git a/awx/api/templates/api/inventory_inventory_sources_update.md b/awx/api/templates/api/inventory_inventory_sources_update.md index 709b53afbf..edf17a27ce 100644 --- a/awx/api/templates/api/inventory_inventory_sources_update.md +++ b/awx/api/templates/api/inventory_inventory_sources_update.md @@ -29,5 +29,3 @@ Response code from this action will be: - 202 if some inventory source updates were successful, but some failed - 400 if all of the inventory source updates failed - 400 if there are no inventory sources in the inventory - -{% include "api/_new_in_awx.md" %} diff --git a/awx/api/templates/api/inventory_root_groups_list.md b/awx/api/templates/api/inventory_root_groups_list.md index 17c95e03ba..41df816c10 100644 --- a/awx/api/templates/api/inventory_root_groups_list.md +++ b/awx/api/templates/api/inventory_root_groups_list.md @@ -1,7 +1,9 @@ -# List Root {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}: +{% ifmeth GET %} +# List Root {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}: Make a GET request to this resource to retrieve a list of root (top-level) {{ model_verbose_name_plural }} associated with this {{ parent_model_verbose_name }}. {% include "api/_list_common.md" %} +{% endifmeth %} diff --git a/awx/api/templates/api/inventory_source_cancel.md b/awx/api/templates/api/inventory_source_cancel.md index 945ca93011..f0a05788bb 100644 --- a/awx/api/templates/api/inventory_source_cancel.md +++ b/awx/api/templates/api/inventory_source_cancel.md @@ -9,5 +9,3 @@ cancelled. The response will include the following field: Make a POST request to this resource to cancel a pending or running inventory update. The response status code will be 202 if successful, or 405 if the update cannot be canceled. - -{% include "api/_new_in_awx.md" %} diff --git a/awx/api/templates/api/inventory_source_update_view.md b/awx/api/templates/api/inventory_source_update_view.md index 1d71a794b7..34290acc40 100644 --- a/awx/api/templates/api/inventory_source_update_view.md +++ b/awx/api/templates/api/inventory_source_update_view.md @@ -9,5 +9,3 @@ from its inventory source. The response will include the following field: Make a POST request to this resource to update the inventory source. If successful, the response status code will be 202. If the inventory source is not defined or cannot be updated, a 405 status code will be returned. - -{% include "api/_new_in_awx.md" %} diff --git a/awx/api/templates/api/inventory_tree_view.md b/awx/api/templates/api/inventory_tree_view.md index 9818b56880..74af6b2f89 100644 --- a/awx/api/templates/api/inventory_tree_view.md +++ b/awx/api/templates/api/inventory_tree_view.md @@ -1,4 +1,4 @@ -# Group Tree for this {{ model_verbose_name|title }}: +# Group Tree for {{ model_verbose_name|title|anora }}: Make a GET request to this resource to retrieve a hierarchical view of groups associated with the selected {{ model_verbose_name }}. @@ -11,5 +11,3 @@ also containing a list of its children. Each group data structure includes the following fields: {% include "api/_result_fields_common.md" %} - -{% include "api/_new_in_awx.md" %} diff --git a/awx/api/templates/api/job_cancel.md b/awx/api/templates/api/job_cancel.md index f0acece331..9afb6d5031 100644 --- a/awx/api/templates/api/job_cancel.md +++ b/awx/api/templates/api/job_cancel.md @@ -1,10 +1,15 @@ -# Cancel Job +{% ifmeth GET %} +# Determine if a Job can be cancelled Make a GET request to this resource to determine if the job can be cancelled. The response will include the following field: * `can_cancel`: Indicates whether this job can be canceled (boolean, read-only) +{% endifmeth %} +{% ifmeth POST %} +# Cancel a Job Make a POST request to this resource to cancel a pending or running job. The response status code will be 202 if successful, or 405 if the job cannot be canceled. +{% endifmeth %} diff --git a/awx/api/templates/api/job_job_plays_list.md b/awx/api/templates/api/job_job_plays_list.md index 22194615a0..e546f8d35c 100644 --- a/awx/api/templates/api/job_job_plays_list.md +++ b/awx/api/templates/api/job_job_plays_list.md @@ -23,5 +23,3 @@ Will show only failed plays. Alternatively `false` may be used. ?play__icontains=test Will filter plays matching the substring `test` - -{% include "api/_new_in_awx.md" %} diff --git a/awx/api/templates/api/job_job_tasks_list.md b/awx/api/templates/api/job_job_tasks_list.md index dca4ed10fe..892e8bb99e 100644 --- a/awx/api/templates/api/job_job_tasks_list.md +++ b/awx/api/templates/api/job_job_tasks_list.md @@ -25,5 +25,3 @@ Will show only failed plays. Alternatively `false` may be used. ?task__icontains=test Will filter tasks matching the substring `test` - -{% include "api/_new_in_awx.md" %} diff --git a/awx/api/templates/api/job_relaunch.md b/awx/api/templates/api/job_relaunch.md index 7e9ea316ce..e0946435ff 100644 --- a/awx/api/templates/api/job_relaunch.md +++ b/awx/api/templates/api/job_relaunch.md @@ -1,3 +1,3 @@ -Relaunch a job: +Relaunch a Job: -Make a POST request to this resource to launch a job. If any passwords or variables are required then they should be passed in via POST data. In order to determine what values are required in order to launch a job based on this job template you may make a GET request to this endpoint. \ No newline at end of file +Make a POST request to this resource to launch a job. If any passwords or variables are required then they should be passed in via POST data. In order to determine what values are required in order to launch a job based on this job template you may make a GET request to this endpoint. diff --git a/awx/api/templates/api/job_start.md b/awx/api/templates/api/job_start.md index 4b15ebc76b..43104dd2bc 100644 --- a/awx/api/templates/api/job_start.md +++ b/awx/api/templates/api/job_start.md @@ -1,4 +1,5 @@ -# Start Job +{% ifmeth GET %} +# Determine if a Job can be started Make a GET request to this resource to determine if the job can be started and whether any passwords are required to start the job. The response will include @@ -7,10 +8,14 @@ the following fields: * `can_start`: Flag indicating if this job can be started (boolean, read-only) * `passwords_needed_to_start`: Password names required to start the job (array, read-only) +{% endifmeth %} +{% ifmeth POST %} +# Start a Job Make a POST request to this resource to start the job. If any passwords are required, they must be passed via POST data. If successful, the response status code will be 202. If any required passwords are not provided, a 400 status code will be returned. If the job cannot be started, a 405 status code will be returned. +{% endifmeth %} diff --git a/awx/api/templates/api/job_template_label_list.md b/awx/api/templates/api/job_template_label_list.md index fa2163141b..763db77282 100644 --- a/awx/api/templates/api/job_template_label_list.md +++ b/awx/api/templates/api/job_template_label_list.md @@ -1,13 +1,7 @@ -{% with 'false' as version_label_flag %} {% include "api/sub_list_create_api_view.md" %} -{% endwith %} Labels not associated with any other resources are deleted. A label can become disassociated with a resource as a result of 3 events. 1. A label is explicitly disassociated with a related job template 2. A job is deleted with labels 3. A cleanup job deletes a job with labels - -{% with 'true' as version_label_flag %} -{% include "api/_new_in_awx.md" %} -{% endwith %} diff --git a/awx/api/templates/api/list_api_view.md b/awx/api/templates/api/list_api_view.md index c45b46c40f..598de2ec42 100644 --- a/awx/api/templates/api/list_api_view.md +++ b/awx/api/templates/api/list_api_view.md @@ -1,8 +1,8 @@ +{% ifmeth GET %} # List {{ model_verbose_name_plural|title }}: Make a GET request to this resource to retrieve the list of {{ model_verbose_name_plural }}. {% include "api/_list_common.md" %} - -{% include "api/_new_in_awx.md" %} +{% endifmeth %} diff --git a/awx/api/templates/api/list_create_api_view.md b/awx/api/templates/api/list_create_api_view.md index 400eeade18..6dc204b49f 100644 --- a/awx/api/templates/api/list_create_api_view.md +++ b/awx/api/templates/api/list_create_api_view.md @@ -1,6 +1,6 @@ {% include "api/list_api_view.md" %} -# Create {{ model_verbose_name_plural|title }}: +# Create {{ model_verbose_name|title|anora }}: Make a POST request to this resource with the following {{ model_verbose_name }} fields to create a new {{ model_verbose_name }}: @@ -8,5 +8,3 @@ fields to create a new {{ model_verbose_name }}: {% with write_only=1 %} {% include "api/_result_fields_common.md" with serializer_fields=serializer_create_fields %} {% endwith %} - -{% include "api/_new_in_awx.md" %} diff --git a/awx/api/templates/api/project_playbooks.md b/awx/api/templates/api/project_playbooks.md index 7b319258d0..2969381466 100644 --- a/awx/api/templates/api/project_playbooks.md +++ b/awx/api/templates/api/project_playbooks.md @@ -1,4 +1,4 @@ # Retrieve {{ model_verbose_name|title }} Playbooks: Make GET request to this resource to retrieve a list of playbooks available -for this {{ model_verbose_name }}. +for {{ model_verbose_name|anora }}. diff --git a/awx/api/templates/api/project_update_cancel.md b/awx/api/templates/api/project_update_cancel.md index 287c15d169..63e34c6170 100644 --- a/awx/api/templates/api/project_update_cancel.md +++ b/awx/api/templates/api/project_update_cancel.md @@ -9,5 +9,3 @@ cancelled. The response will include the following field: Make a POST request to this resource to cancel a pending or running project update. The response status code will be 202 if successful, or 405 if the update cannot be canceled. - -{% include "api/_new_in_awx.md" %} diff --git a/awx/api/templates/api/project_update_view.md b/awx/api/templates/api/project_update_view.md index 086c6e6ee1..9ff267f9e5 100644 --- a/awx/api/templates/api/project_update_view.md +++ b/awx/api/templates/api/project_update_view.md @@ -8,5 +8,3 @@ from its SCM source. The response will include the following field: Make a POST request to this resource to update the project. If the project cannot be updated, a 405 status code will be returned. - -{% include "api/_new_in_awx.md" %} diff --git a/awx/api/templates/api/retrieve_api_view.md b/awx/api/templates/api/retrieve_api_view.md index 64b7fec852..822233a00e 100644 --- a/awx/api/templates/api/retrieve_api_view.md +++ b/awx/api/templates/api/retrieve_api_view.md @@ -2,11 +2,9 @@ ### Note: starting from api v2, this resource object can be accessed via its named URL. {% endif %} -# Retrieve {{ model_verbose_name|title }}: +# Retrieve {{ model_verbose_name|title|anora }}: Make GET request to this resource to retrieve a single {{ model_verbose_name }} record containing the following fields: {% include "api/_result_fields_common.md" %} - -{% include "api/_new_in_awx.md" %} diff --git a/awx/api/templates/api/retrieve_destroy_api_view.md b/awx/api/templates/api/retrieve_destroy_api_view.md index 6872c59d4b..e8a6d7cf96 100644 --- a/awx/api/templates/api/retrieve_destroy_api_view.md +++ b/awx/api/templates/api/retrieve_destroy_api_view.md @@ -2,15 +2,17 @@ ### Note: starting from api v2, this resource object can be accessed via its named URL. {% endif %} -# Retrieve {{ model_verbose_name|title }}: +{% ifmeth GET %} +# Retrieve {{ model_verbose_name|title|anora }}: Make GET request to this resource to retrieve a single {{ model_verbose_name }} record containing the following fields: {% include "api/_result_fields_common.md" %} +{% endifmeth %} -# Delete {{ model_verbose_name|title }}: +{% ifmeth DELETE %} +# Delete {{ model_verbose_name|title|anora }}: Make a DELETE request to this resource to delete this {{ model_verbose_name }}. - -{% include "api/_new_in_awx.md" %} +{% endifmeth %} diff --git a/awx/api/templates/api/retrieve_update_api_view.md b/awx/api/templates/api/retrieve_update_api_view.md index 21e4255bf1..8f3bce0d06 100644 --- a/awx/api/templates/api/retrieve_update_api_view.md +++ b/awx/api/templates/api/retrieve_update_api_view.md @@ -2,14 +2,17 @@ ### Note: starting from api v2, this resource object can be accessed via its named URL. {% endif %} -# Retrieve {{ model_verbose_name|title }}: +{% ifmeth GET %} +# Retrieve {{ model_verbose_name|title|anora }}: Make GET request to this resource to retrieve a single {{ model_verbose_name }} record containing the following fields: {% include "api/_result_fields_common.md" %} +{% endifmeth %} -# Update {{ model_verbose_name|title }}: +{% ifmeth PUT PATCH %} +# Update {{ model_verbose_name|title|anora }}: Make a PUT or PATCH request to this resource to update this {{ model_verbose_name }}. The following fields may be modified: @@ -17,9 +20,12 @@ Make a PUT or PATCH request to this resource to update this {% with write_only=1 %} {% include "api/_result_fields_common.md" with serializer_fields=serializer_update_fields %} {% endwith %} +{% endifmeth %} +{% ifmeth PUT %} For a PUT request, include **all** fields in the request. +{% endifmeth %} +{% ifmeth PATCH %} For a PATCH request, include only the fields that are being modified. - -{% include "api/_new_in_awx.md" %} +{% endifmeth %} diff --git a/awx/api/templates/api/retrieve_update_destroy_api_view.md b/awx/api/templates/api/retrieve_update_destroy_api_view.md index bfc99bb293..261f2cb667 100644 --- a/awx/api/templates/api/retrieve_update_destroy_api_view.md +++ b/awx/api/templates/api/retrieve_update_destroy_api_view.md @@ -2,14 +2,17 @@ ### Note: starting from api v2, this resource object can be accessed via its named URL. {% endif %} -# Retrieve {{ model_verbose_name|title }}: +{% ifmeth GET %} +# Retrieve {{ model_verbose_name|title|anora }}: Make GET request to this resource to retrieve a single {{ model_verbose_name }} record containing the following fields: {% include "api/_result_fields_common.md" %} +{% endifmeth %} -# Update {{ model_verbose_name|title }}: +{% ifmeth PUT PATCH %} +# Update {{ model_verbose_name|title|anora }}: Make a PUT or PATCH request to this resource to update this {{ model_verbose_name }}. The following fields may be modified: @@ -17,13 +20,18 @@ Make a PUT or PATCH request to this resource to update this {% with write_only=1 %} {% include "api/_result_fields_common.md" with serializer_fields=serializer_update_fields %} {% endwith %} +{% endifmeth %} +{% ifmeth PUT %} For a PUT request, include **all** fields in the request. +{% endifmeth %} +{% ifmeth PATCH %} For a PATCH request, include only the fields that are being modified. +{% endifmeth %} -# Delete {{ model_verbose_name|title }}: +{% ifmeth DELETE %} +# Delete {{ model_verbose_name|title|anora }}: Make a DELETE request to this resource to delete this {{ model_verbose_name }}. - -{% include "api/_new_in_awx.md" %} +{% endifmeth %} diff --git a/awx/api/templates/api/setting_logging_test.md b/awx/api/templates/api/setting_logging_test.md new file mode 100644 index 0000000000..149fac28ae --- /dev/null +++ b/awx/api/templates/api/setting_logging_test.md @@ -0,0 +1 @@ +# Test Logging Configuration diff --git a/awx/api/templates/api/sub_list_api_view.md b/awx/api/templates/api/sub_list_api_view.md index 9993819bc3..e42d2f02b2 100644 --- a/awx/api/templates/api/sub_list_api_view.md +++ b/awx/api/templates/api/sub_list_api_view.md @@ -1,9 +1,9 @@ -# List {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}: +{% ifmeth GET %} +# List {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}: Make a GET request to this resource to retrieve a list of {{ model_verbose_name_plural }} associated with the selected {{ parent_model_verbose_name }}. {% include "api/_list_common.md" %} - -{% include "api/_new_in_awx.md" %} +{% endifmeth %} diff --git a/awx/api/templates/api/sub_list_create_api_view.md b/awx/api/templates/api/sub_list_create_api_view.md index 74b91b5084..17856295be 100644 --- a/awx/api/templates/api/sub_list_create_api_view.md +++ b/awx/api/templates/api/sub_list_create_api_view.md @@ -1,6 +1,6 @@ {% include "api/sub_list_api_view.md" %} -# Create {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}: +# Create {{ model_verbose_name|title|anora }} for {{ parent_model_verbose_name|title|anora }}: Make a POST request to this resource with the following {{ model_verbose_name }} fields to create a new {{ model_verbose_name }} associated with this @@ -25,7 +25,7 @@ delete the associated {{ model_verbose_name }}. } {% else %} -# Add {{ model_verbose_name_plural|title }} for this {{ parent_model_verbose_name|title }}: +# Add {{ model_verbose_name_plural|title }} for {{ parent_model_verbose_name|title|anora }}: Make a POST request to this resource with only an `id` field to associate an existing {{ model_verbose_name }} with this {{ parent_model_verbose_name }}. @@ -37,5 +37,3 @@ remove the {{ model_verbose_name }} from this {{ parent_model_verbose_name }} {% if model_verbose_name != "label" %} without deleting the {{ model_verbose_name }}{% endif %}. {% endif %} {% endif %} - -{% include "api/_new_in_awx.md" %} diff --git a/awx/api/templates/api/team_roles_list.md b/awx/api/templates/api/team_roles_list.md index bf5bc24917..8aa39a76cb 100644 --- a/awx/api/templates/api/team_roles_list.md +++ b/awx/api/templates/api/team_roles_list.md @@ -1,12 +1,16 @@ -# List Roles for this Team: +# List Roles for a Team: +{% ifmeth GET %} Make a GET request to this resource to retrieve a list of roles associated with the selected team. {% include "api/_list_common.md" %} +{% endifmeth %} +{% ifmeth POST %} # Associate Roles with this Team: Make a POST request to this resource to add or remove a role from this team. The following fields may be modified: * `id`: The Role ID to add to the team. (int, required) * `disassociate`: Provide if you want to remove the role. (any value, optional) +{% endifmeth %} diff --git a/awx/api/templates/api/unified_job_stdout.md b/awx/api/templates/api/unified_job_stdout.md index d86c6e2378..717110202b 100644 --- a/awx/api/templates/api/unified_job_stdout.md +++ b/awx/api/templates/api/unified_job_stdout.md @@ -25,5 +25,3 @@ dark background. Files over {{ settings.STDOUT_MAX_BYTES_DISPLAY|filesizeformat }} (configurable) will not display in the browser. Use the `txt_download` or `ansi_download` formats to download the file directly to view it. - -{% include "api/_new_in_awx.md" %} diff --git a/awx/api/templates/api/user_me_list.md b/awx/api/templates/api/user_me_list.md index 50bea61bd8..3935d23e09 100644 --- a/awx/api/templates/api/user_me_list.md +++ b/awx/api/templates/api/user_me_list.md @@ -1,3 +1,5 @@ +# Retrieve Information about the current User + Make a GET request to retrieve user information about the current user. One result should be returned containing the following fields: diff --git a/awx/api/templates/api/user_roles_list.md b/awx/api/templates/api/user_roles_list.md index 06c06cf1b3..d8ee253418 100644 --- a/awx/api/templates/api/user_roles_list.md +++ b/awx/api/templates/api/user_roles_list.md @@ -1,12 +1,16 @@ -# List Roles for this User: +# List Roles for a User: +{% ifmeth GET %} Make a GET request to this resource to retrieve a list of roles associated with the selected user. {% include "api/_list_common.md" %} +{% endifmeth %} +{% ifmeth POST %} # Associate Roles with this User: Make a POST request to this resource to add or remove a role from this user. The following fields may be modified: * `id`: The Role ID to add to the user. (int, required) * `disassociate`: Provide if you want to remove the role. (any value, optional) +{% endifmeth %} diff --git a/awx/api/urls/credential.py b/awx/api/urls/credential.py index b8480ab4e8..c444da9090 100644 --- a/awx/api/urls/credential.py +++ b/awx/api/urls/credential.py @@ -11,6 +11,7 @@ from awx.api.views import ( CredentialObjectRolesList, CredentialOwnerUsersList, CredentialOwnerTeamsList, + CredentialCopy, ) @@ -22,6 +23,7 @@ urls = [ url(r'^(?P[0-9]+)/object_roles/$', CredentialObjectRolesList.as_view(), name='credential_object_roles_list'), url(r'^(?P[0-9]+)/owner_users/$', CredentialOwnerUsersList.as_view(), name='credential_owner_users_list'), url(r'^(?P[0-9]+)/owner_teams/$', CredentialOwnerTeamsList.as_view(), name='credential_owner_teams_list'), + url(r'^(?P[0-9]+)/copy/$', CredentialCopy.as_view(), name='credential_copy'), ] __all__ = ['urls'] diff --git a/awx/api/urls/inventory.py b/awx/api/urls/inventory.py index 0d8e2ca8d5..c2f67ab457 100644 --- a/awx/api/urls/inventory.py +++ b/awx/api/urls/inventory.py @@ -20,6 +20,7 @@ from awx.api.views import ( InventoryAccessList, InventoryObjectRolesList, InventoryInstanceGroupsList, + InventoryCopy, ) @@ -40,6 +41,7 @@ urls = [ url(r'^(?P[0-9]+)/access_list/$', InventoryAccessList.as_view(), name='inventory_access_list'), url(r'^(?P[0-9]+)/object_roles/$', InventoryObjectRolesList.as_view(), name='inventory_object_roles_list'), url(r'^(?P[0-9]+)/instance_groups/$', InventoryInstanceGroupsList.as_view(), name='inventory_instance_groups_list'), + url(r'^(?P[0-9]+)/copy/$', InventoryCopy.as_view(), name='inventory_copy'), ] __all__ = ['urls'] diff --git a/awx/api/urls/inventory_script.py b/awx/api/urls/inventory_script.py index 088ccf21ca..03852e78cb 100644 --- a/awx/api/urls/inventory_script.py +++ b/awx/api/urls/inventory_script.py @@ -7,6 +7,7 @@ from awx.api.views import ( InventoryScriptList, InventoryScriptDetail, InventoryScriptObjectRolesList, + InventoryScriptCopy, ) @@ -14,6 +15,7 @@ urls = [ url(r'^$', InventoryScriptList.as_view(), name='inventory_script_list'), url(r'^(?P[0-9]+)/$', InventoryScriptDetail.as_view(), name='inventory_script_detail'), url(r'^(?P[0-9]+)/object_roles/$', InventoryScriptObjectRolesList.as_view(), name='inventory_script_object_roles_list'), + url(r'^(?P[0-9]+)/copy/$', InventoryScriptCopy.as_view(), name='inventory_script_copy'), ] __all__ = ['urls'] diff --git a/awx/api/urls/job_template.py b/awx/api/urls/job_template.py index 32b11444be..b11dbf4fea 100644 --- a/awx/api/urls/job_template.py +++ b/awx/api/urls/job_template.py @@ -19,6 +19,7 @@ from awx.api.views import ( JobTemplateAccessList, JobTemplateObjectRolesList, JobTemplateLabelList, + JobTemplateCopy, ) @@ -41,6 +42,7 @@ urls = [ url(r'^(?P[0-9]+)/access_list/$', JobTemplateAccessList.as_view(), name='job_template_access_list'), url(r'^(?P[0-9]+)/object_roles/$', JobTemplateObjectRolesList.as_view(), name='job_template_object_roles_list'), url(r'^(?P[0-9]+)/labels/$', JobTemplateLabelList.as_view(), name='job_template_label_list'), + url(r'^(?P[0-9]+)/copy/$', JobTemplateCopy.as_view(), name='job_template_copy'), ] __all__ = ['urls'] diff --git a/awx/api/urls/notification_template.py b/awx/api/urls/notification_template.py index eba6be5ef3..8473878922 100644 --- a/awx/api/urls/notification_template.py +++ b/awx/api/urls/notification_template.py @@ -8,6 +8,7 @@ from awx.api.views import ( NotificationTemplateDetail, NotificationTemplateTest, NotificationTemplateNotificationList, + NotificationTemplateCopy, ) @@ -16,6 +17,7 @@ urls = [ url(r'^(?P[0-9]+)/$', NotificationTemplateDetail.as_view(), name='notification_template_detail'), url(r'^(?P[0-9]+)/test/$', NotificationTemplateTest.as_view(), name='notification_template_test'), url(r'^(?P[0-9]+)/notifications/$', NotificationTemplateNotificationList.as_view(), name='notification_template_notification_list'), + url(r'^(?P[0-9]+)/copy/$', NotificationTemplateCopy.as_view(), name='notification_template_copy'), ] __all__ = ['urls'] diff --git a/awx/api/urls/project.py b/awx/api/urls/project.py index 629ec1ce05..263014e6e2 100644 --- a/awx/api/urls/project.py +++ b/awx/api/urls/project.py @@ -19,10 +19,11 @@ from awx.api.views import ( ProjectNotificationTemplatesSuccessList, ProjectObjectRolesList, ProjectAccessList, + ProjectCopy, ) -urls = [ +urls = [ url(r'^$', ProjectList.as_view(), name='project_list'), url(r'^(?P[0-9]+)/$', ProjectDetail.as_view(), name='project_detail'), url(r'^(?P[0-9]+)/playbooks/$', ProjectPlaybooks.as_view(), name='project_playbooks'), @@ -39,6 +40,7 @@ urls = [ name='project_notification_templates_success_list'), url(r'^(?P[0-9]+)/object_roles/$', ProjectObjectRolesList.as_view(), name='project_object_roles_list'), url(r'^(?P[0-9]+)/access_list/$', ProjectAccessList.as_view(), name='project_access_list'), + url(r'^(?P[0-9]+)/copy/$', ProjectCopy.as_view(), name='project_copy'), ] __all__ = ['urls'] diff --git a/awx/api/urls/urls.py b/awx/api/urls/urls.py index 15af2b4dca..387857fd1f 100644 --- a/awx/api/urls/urls.py +++ b/awx/api/urls/urls.py @@ -2,6 +2,7 @@ # All Rights Reserved. from __future__ import absolute_import, unicode_literals +from django.conf import settings from django.conf.urls import include, url from awx.api.views import ( @@ -123,5 +124,10 @@ app_name = 'api' urlpatterns = [ url(r'^$', ApiRootView.as_view(), name='api_root_view'), url(r'^(?P(v2))/', include(v2_urls)), - url(r'^(?P(v1|v2))/', include(v1_urls)) + url(r'^(?P(v1|v2))/', include(v1_urls)), ] +if settings.SETTINGS_MODULE == 'awx.settings.development': + from awx.api.swagger import SwaggerSchemaView + urlpatterns += [ + url(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view'), + ] diff --git a/awx/api/views.py b/awx/api/views.py index ac835a30d0..23b8d0e769 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -16,7 +16,7 @@ import six # Django from django.conf import settings -from django.core.exceptions import FieldError +from django.core.exceptions import FieldError, ObjectDoesNotExist from django.db.models import Q, Count, F from django.db import IntegrityError, transaction from django.shortcuts import get_object_or_404 @@ -57,7 +57,7 @@ import pytz from wsgiref.util import FileWrapper # AWX -from awx.main.tasks import send_notifications +from awx.main.tasks import send_notifications, handle_ha_toplogy_changes from awx.main.access import get_user_queryset from awx.main.ha import is_ha_environment from awx.api.authentication import TokenGetAuthentication @@ -148,15 +148,51 @@ class UnifiedJobDeletionMixin(object): return Response(status=status.HTTP_204_NO_CONTENT) +class InstanceGroupMembershipMixin(object): + ''' + Manages signaling celery to reload its queue configuration on Instance Group membership changes + ''' + def attach(self, request, *args, **kwargs): + response = super(InstanceGroupMembershipMixin, self).attach(request, *args, **kwargs) + sub_id, res = self.attach_validate(request) + if status.is_success(response.status_code): + if self.parent_model is Instance: + ig_obj = get_object_or_400(self.model, pk=sub_id) + inst_name = ig_obj.hostname + else: + ig_obj = self.get_parent_object() + inst_name = get_object_or_400(self.model, pk=sub_id).hostname + if inst_name not in ig_obj.policy_instance_list: + ig_obj.policy_instance_list.append(inst_name) + ig_obj.save() + return response + + def unattach(self, request, *args, **kwargs): + response = super(InstanceGroupMembershipMixin, self).unattach(request, *args, **kwargs) + sub_id, res = self.attach_validate(request) + if status.is_success(response.status_code): + if self.parent_model is Instance: + ig_obj = get_object_or_400(self.model, pk=sub_id) + inst_name = self.get_parent_object().hostname + else: + ig_obj = self.get_parent_object() + inst_name = get_object_or_400(self.model, pk=sub_id).hostname + if inst_name in ig_obj.policy_instance_list: + ig_obj.policy_instance_list.pop(ig_obj.policy_instance_list.index(inst_name)) + ig_obj.save() + return response + + class ApiRootView(APIView): authentication_classes = [] permission_classes = (AllowAny,) view_name = _('REST API') versioning_class = None + swagger_topic = 'Versioning' def get(self, request, format=None): - ''' list supported API versions ''' + ''' List supported API versions ''' v1 = reverse('api:api_v1_root_view', kwargs={'version': 'v1'}) v2 = reverse('api:api_v2_root_view', kwargs={'version': 'v2'}) @@ -175,9 +211,10 @@ class ApiVersionRootView(APIView): authentication_classes = [] permission_classes = (AllowAny,) + swagger_topic = 'Versioning' def get(self, request, format=None): - ''' list top level resources ''' + ''' List top level resources ''' data = OrderedDict() data['authtoken'] = reverse('api:auth_token_view', request=request) data['ping'] = reverse('api:api_v1_ping_view', request=request) @@ -228,8 +265,6 @@ class ApiV1RootView(ApiVersionRootView): class ApiV2RootView(ApiVersionRootView): view_name = _('Version 2') - new_in_320 = True - new_in_api_v2 = True class ApiV1PingView(APIView): @@ -239,10 +274,10 @@ class ApiV1PingView(APIView): permission_classes = (AllowAny,) authentication_classes = () view_name = _('Ping') - new_in_210 = True + swagger_topic = 'System Configuration' def get(self, request, format=None): - """Return some basic information about this instance. + """Return some basic information about this instance Everything returned here should be considered public / insecure, as this requires no auth and is intended for use by the installer process. @@ -270,6 +305,7 @@ class ApiV1ConfigView(APIView): permission_classes = (IsAuthenticated,) view_name = _('Configuration') + swagger_topic = 'System Configuration' def check_permissions(self, request): super(ApiV1ConfigView, self).check_permissions(request) @@ -277,7 +313,7 @@ class ApiV1ConfigView(APIView): self.permission_denied(request) # Raises PermissionDenied exception. def get(self, request, format=None): - '''Return various sitewide configuration settings.''' + '''Return various sitewide configuration settings''' if request.user.is_superuser or request.user.is_system_auditor: license_data = get_license(show_key=True) @@ -371,7 +407,7 @@ class ApiV1ConfigView(APIView): class DashboardView(APIView): view_name = _("Dashboard") - new_in_14 = True + swagger_topic = 'Dashboard' def get(self, request, format=None): ''' Show Dashboard Details ''' @@ -470,7 +506,7 @@ class DashboardView(APIView): class DashboardJobsGraphView(APIView): view_name = _("Dashboard Jobs Graphs") - new_in_200 = True + swagger_topic = 'Jobs' def get(self, request, format=None): period = request.query_params.get('period', 'month') @@ -522,15 +558,27 @@ class InstanceList(ListAPIView): view_name = _("Instances") model = Instance serializer_class = InstanceSerializer - new_in_320 = True -class InstanceDetail(RetrieveAPIView): +class InstanceDetail(RetrieveUpdateAPIView): view_name = _("Instance Detail") model = Instance serializer_class = InstanceSerializer - new_in_320 = True + + + def update(self, request, *args, **kwargs): + r = super(InstanceDetail, self).update(request, *args, **kwargs) + if status.is_success(r.status_code): + obj = self.get_object() + if obj.enabled: + obj.refresh_capacity() + else: + obj.capacity = 0 + obj.save() + handle_ha_toplogy_changes.apply_async() + r.data = InstanceSerializer(obj, context=self.get_serializer_context()).to_representation(obj) + return r class InstanceUnifiedJobsList(SubListAPIView): @@ -539,7 +587,6 @@ class InstanceUnifiedJobsList(SubListAPIView): model = UnifiedJob serializer_class = UnifiedJobSerializer parent_model = Instance - new_in_320 = True def get_queryset(self): po = self.get_parent_object() @@ -548,30 +595,27 @@ class InstanceUnifiedJobsList(SubListAPIView): return qs -class InstanceInstanceGroupsList(SubListAPIView): +class InstanceInstanceGroupsList(InstanceGroupMembershipMixin, SubListCreateAttachDetachAPIView): view_name = _("Instance's Instance Groups") model = InstanceGroup serializer_class = InstanceGroupSerializer parent_model = Instance - new_in_320 = True relationship = 'rampart_groups' -class InstanceGroupList(ListAPIView): +class InstanceGroupList(ListCreateAPIView): view_name = _("Instance Groups") model = InstanceGroup serializer_class = InstanceGroupSerializer - new_in_320 = True -class InstanceGroupDetail(RetrieveAPIView): +class InstanceGroupDetail(RetrieveUpdateDestroyAPIView): view_name = _("Instance Group Detail") model = InstanceGroup serializer_class = InstanceGroupSerializer - new_in_320 = True class InstanceGroupUnifiedJobsList(SubListAPIView): @@ -581,16 +625,14 @@ class InstanceGroupUnifiedJobsList(SubListAPIView): serializer_class = UnifiedJobSerializer parent_model = InstanceGroup relationship = "unifiedjob_set" - new_in_320 = True -class InstanceGroupInstanceList(SubListAPIView): +class InstanceGroupInstanceList(InstanceGroupMembershipMixin, SubListAttachDetachAPIView): view_name = _("Instance Group's Instances") model = Instance serializer_class = InstanceSerializer parent_model = InstanceGroup - new_in_320 = True relationship = "instances" @@ -599,14 +641,12 @@ class ScheduleList(ListAPIView): view_name = _("Schedules") model = Schedule serializer_class = ScheduleSerializer - new_in_148 = True class ScheduleDetail(RetrieveUpdateDestroyAPIView): model = Schedule serializer_class = ScheduleSerializer - new_in_148 = True class SchedulePreview(GenericAPIView): @@ -614,7 +654,6 @@ class SchedulePreview(GenericAPIView): model = Schedule view_name = _('Schedule Recurrence Rule Preview') serializer_class = SchedulePreviewSerializer - new_in_api_v2 = True def post(self, request): serializer = self.get_serializer(data=request.data) @@ -641,9 +680,14 @@ class SchedulePreview(GenericAPIView): class ScheduleZoneInfo(APIView): + swagger_topic = 'System Configuration' + def get(self, request): from dateutil.zoneinfo import get_zonefile_instance - return Response(sorted(get_zonefile_instance().zones.keys())) + return Response([ + {'name': zone} + for zone in sorted(get_zonefile_instance().zones) + ]) class LaunchConfigCredentialsBase(SubListAttachDetachAPIView): @@ -682,8 +726,6 @@ class LaunchConfigCredentialsBase(SubListAttachDetachAPIView): class ScheduleCredentialsList(LaunchConfigCredentialsBase): parent_model = Schedule - new_in_330 = True - new_in_api_v2 = True class ScheduleUnifiedJobsList(SubListAPIView): @@ -693,14 +735,14 @@ class ScheduleUnifiedJobsList(SubListAPIView): parent_model = Schedule relationship = 'unifiedjob_set' view_name = _('Schedule Jobs List') - new_in_148 = True class AuthView(APIView): + ''' List enabled single-sign-on endpoints ''' authentication_classes = [] permission_classes = (AllowAny,) - new_in_240 = True + swagger_topic = 'System Configuration' def get(self, request): from rest_framework.reverse import reverse @@ -744,6 +786,7 @@ class AuthTokenView(APIView): permission_classes = (AllowAny,) serializer_class = AuthTokenSerializer model = AuthToken + swagger_topic = 'Authentication' def get_serializer(self, *args, **kwargs): serializer = self.serializer_class(*args, **kwargs) @@ -933,7 +976,7 @@ class OrganizationDetail(RetrieveUpdateDestroyAPIView): def get_serializer_context(self, *args, **kwargs): full_context = super(OrganizationDetail, self).get_serializer_context(*args, **kwargs) - if not hasattr(self, 'kwargs'): + if not hasattr(self, 'kwargs') or 'pk' not in self.kwargs: return full_context org_id = int(self.kwargs['pk']) @@ -1020,7 +1063,6 @@ class OrganizationWorkflowJobTemplatesList(SubListCreateAttachDetachAPIView): parent_model = Organization relationship = 'workflows' parent_key = 'organization' - new_in_310 = True class OrganizationTeamsList(SubListCreateAttachDetachAPIView): @@ -1038,7 +1080,6 @@ class OrganizationActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIV serializer_class = ActivityStreamSerializer parent_model = Organization relationship = 'activitystream_set' - new_in_145 = True class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView): @@ -1056,7 +1097,6 @@ class OrganizationNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView) serializer_class = NotificationTemplateSerializer parent_model = Organization relationship = 'notification_templates_any' - new_in_300 = True class OrganizationNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView): @@ -1065,7 +1105,6 @@ class OrganizationNotificationTemplatesErrorList(SubListCreateAttachDetachAPIVie serializer_class = NotificationTemplateSerializer parent_model = Organization relationship = 'notification_templates_error' - new_in_300 = True class OrganizationNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView): @@ -1074,7 +1113,6 @@ class OrganizationNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIV serializer_class = NotificationTemplateSerializer parent_model = Organization relationship = 'notification_templates_success' - new_in_300 = True class OrganizationInstanceGroupsList(SubListAttachDetachAPIView): @@ -1083,14 +1121,12 @@ class OrganizationInstanceGroupsList(SubListAttachDetachAPIView): serializer_class = InstanceGroupSerializer parent_model = Organization relationship = 'instance_groups' - new_in_320 = True class OrganizationAccessList(ResourceAccessList): model = User # needs to be User for AccessLists's parent_model = Organization - new_in_300 = True class OrganizationObjectRolesList(SubListAPIView): @@ -1098,7 +1134,6 @@ class OrganizationObjectRolesList(SubListAPIView): model = Role serializer_class = RoleSerializer parent_model = Organization - new_in_300 = True def get_queryset(self): po = self.get_parent_object() @@ -1138,7 +1173,6 @@ class TeamRolesList(SubListAttachDetachAPIView): metadata_class = RoleMetadata parent_model = Team relationship='member_role.children' - new_in_300 = True def get_queryset(self): team = get_object_or_404(Team, pk=self.kwargs['pk']) @@ -1176,7 +1210,6 @@ class TeamObjectRolesList(SubListAPIView): model = Role serializer_class = RoleSerializer parent_model = Team - new_in_300 = True def get_queryset(self): po = self.get_parent_object() @@ -1208,7 +1241,6 @@ class TeamActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): serializer_class = ActivityStreamSerializer parent_model = Team relationship = 'activitystream_set' - new_in_145 = True def get_queryset(self): parent = self.get_parent_object() @@ -1224,7 +1256,6 @@ class TeamAccessList(ResourceAccessList): model = User # needs to be User for AccessLists's parent_model = Team - new_in_300 = True class ProjectList(ListCreateAPIView): @@ -1297,7 +1328,6 @@ class ProjectSchedulesList(SubListCreateAPIView): parent_model = Project relationship = 'schedules' parent_key = 'unified_job_template' - new_in_148 = True class ProjectScmInventorySources(SubListAPIView): @@ -1308,7 +1338,6 @@ class ProjectScmInventorySources(SubListAPIView): parent_model = Project relationship = 'scm_inventory_sources' parent_key = 'source_project' - new_in_320 = True class ProjectActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): @@ -1317,7 +1346,6 @@ class ProjectActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): serializer_class = ActivityStreamSerializer parent_model = Project relationship = 'activitystream_set' - new_in_145 = True def get_queryset(self): parent = self.get_parent_object() @@ -1336,7 +1364,6 @@ class ProjectNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView): serializer_class = NotificationTemplateSerializer parent_model = Project relationship = 'notification_templates_any' - new_in_300 = True class ProjectNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView): @@ -1345,7 +1372,6 @@ class ProjectNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView): serializer_class = NotificationTemplateSerializer parent_model = Project relationship = 'notification_templates_error' - new_in_300 = True class ProjectNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView): @@ -1354,7 +1380,6 @@ class ProjectNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView): serializer_class = NotificationTemplateSerializer parent_model = Project relationship = 'notification_templates_success' - new_in_300 = True class ProjectUpdatesList(SubListAPIView): @@ -1363,7 +1388,6 @@ class ProjectUpdatesList(SubListAPIView): serializer_class = ProjectUpdateSerializer parent_model = Project relationship = 'project_updates' - new_in_13 = True class ProjectUpdateView(RetrieveAPIView): @@ -1371,7 +1395,6 @@ class ProjectUpdateView(RetrieveAPIView): model = Project serializer_class = ProjectUpdateViewSerializer permission_classes = (ProjectUpdatePermission,) - new_in_13 = True def post(self, request, *args, **kwargs): obj = self.get_object() @@ -1395,14 +1418,12 @@ class ProjectUpdateList(ListAPIView): model = ProjectUpdate serializer_class = ProjectUpdateListSerializer - new_in_13 = True class ProjectUpdateDetail(UnifiedJobDeletionMixin, RetrieveDestroyAPIView): model = ProjectUpdate serializer_class = ProjectUpdateSerializer - new_in_13 = True class ProjectUpdateEventsList(SubListAPIView): @@ -1449,7 +1470,6 @@ class ProjectUpdateCancel(RetrieveAPIView): model = ProjectUpdate obj_permission_type = 'cancel' serializer_class = ProjectUpdateCancelSerializer - new_in_13 = True def post(self, request, *args, **kwargs): obj = self.get_object() @@ -1466,7 +1486,6 @@ class ProjectUpdateNotificationsList(SubListAPIView): serializer_class = NotificationSerializer parent_model = ProjectUpdate relationship = 'notifications' - new_in_300 = True class ProjectUpdateScmInventoryUpdates(SubListCreateAPIView): @@ -1477,14 +1496,12 @@ class ProjectUpdateScmInventoryUpdates(SubListCreateAPIView): parent_model = ProjectUpdate relationship = 'scm_inventory_updates' parent_key = 'source_project_update' - new_in_320 = True class ProjectAccessList(ResourceAccessList): model = User # needs to be User for AccessLists's parent_model = Project - new_in_300 = True class ProjectObjectRolesList(SubListAPIView): @@ -1492,7 +1509,6 @@ class ProjectObjectRolesList(SubListAPIView): model = Role serializer_class = RoleSerializer parent_model = Project - new_in_300 = True def get_queryset(self): po = self.get_parent_object() @@ -1500,6 +1516,12 @@ class ProjectObjectRolesList(SubListAPIView): return Role.objects.filter(content_type=content_type, object_id=po.pk) +class ProjectCopy(CopyAPIView): + + model = Project + copy_return_serializer_class = ProjectSerializer + + class UserList(ListCreateAPIView): model = User @@ -1553,7 +1575,6 @@ class UserRolesList(SubListAttachDetachAPIView): parent_model = User relationship='roles' permission_classes = (IsAuthenticated,) - new_in_300 = True def get_queryset(self): u = get_object_or_404(User, pk=self.kwargs['pk']) @@ -1646,7 +1667,6 @@ class UserActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): serializer_class = ActivityStreamSerializer parent_model = User relationship = 'activitystream_set' - new_in_145 = True def get_queryset(self): parent = self.get_parent_object() @@ -1697,23 +1717,18 @@ class UserAccessList(ResourceAccessList): model = User # needs to be User for AccessLists's parent_model = User - new_in_300 = True class CredentialTypeList(ListCreateAPIView): model = CredentialType serializer_class = CredentialTypeSerializer - new_in_320 = True - new_in_api_v2 = True class CredentialTypeDetail(RetrieveUpdateDestroyAPIView): model = CredentialType serializer_class = CredentialTypeSerializer - new_in_320 = True - new_in_api_v2 = True def destroy(self, request, *args, **kwargs): instance = self.get_object() @@ -1730,8 +1745,6 @@ class CredentialTypeCredentialList(SubListAPIView): parent_model = CredentialType relationship = 'credentials' serializer_class = CredentialSerializer - new_in_320 = True - new_in_api_v2 = True class CredentialTypeActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): @@ -1740,8 +1753,6 @@ class CredentialTypeActivityStreamList(ActivityStreamEnforcementMixin, SubListAP serializer_class = ActivityStreamSerializer parent_model = CredentialType relationship = 'activitystream_set' - new_in_320 = True - new_in_api_v2 = True # remove in 3.3 @@ -1769,7 +1780,6 @@ class CredentialOwnerUsersList(SubListAPIView): serializer_class = UserSerializer parent_model = Credential relationship = 'admin_role.members' - new_in_300 = True class CredentialOwnerTeamsList(SubListAPIView): @@ -1777,7 +1787,6 @@ class CredentialOwnerTeamsList(SubListAPIView): model = Team serializer_class = TeamSerializer parent_model = Credential - new_in_300 = True def get_queryset(self): credential = get_object_or_404(self.parent_model, pk=self.kwargs['pk']) @@ -1858,14 +1867,12 @@ class CredentialActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIVie serializer_class = ActivityStreamSerializer parent_model = Credential relationship = 'activitystream_set' - new_in_145 = True class CredentialAccessList(ResourceAccessList): model = User # needs to be User for AccessLists's parent_model = Credential - new_in_300 = True class CredentialObjectRolesList(SubListAPIView): @@ -1873,7 +1880,6 @@ class CredentialObjectRolesList(SubListAPIView): model = Role serializer_class = RoleSerializer parent_model = Credential - new_in_300 = True def get_queryset(self): po = self.get_parent_object() @@ -1881,18 +1887,22 @@ class CredentialObjectRolesList(SubListAPIView): return Role.objects.filter(content_type=content_type, object_id=po.pk) +class CredentialCopy(CopyAPIView): + + model = Credential + copy_return_serializer_class = CredentialSerializer + + class InventoryScriptList(ListCreateAPIView): model = CustomInventoryScript serializer_class = CustomInventoryScriptSerializer - new_in_210 = True class InventoryScriptDetail(RetrieveUpdateDestroyAPIView): model = CustomInventoryScript serializer_class = CustomInventoryScriptSerializer - new_in_210 = True def destroy(self, request, *args, **kwargs): instance = self.get_object() @@ -1910,7 +1920,6 @@ class InventoryScriptObjectRolesList(SubListAPIView): model = Role serializer_class = RoleSerializer parent_model = CustomInventoryScript - new_in_300 = True def get_queryset(self): po = self.get_parent_object() @@ -1918,6 +1927,12 @@ class InventoryScriptObjectRolesList(SubListAPIView): return Role.objects.filter(content_type=content_type, object_id=po.pk) +class InventoryScriptCopy(CopyAPIView): + + model = CustomInventoryScript + copy_return_serializer_class = CustomInventoryScriptSerializer + + class InventoryList(ListCreateAPIView): model = Inventory @@ -1982,7 +1997,7 @@ class InventoryDetail(ControlledByScmMixin, RetrieveUpdateDestroyAPIView): try: obj.schedule_deletion(getattr(request.user, 'id', None)) return Response(status=status.HTTP_202_ACCEPTED) - except RuntimeError, e: + except RuntimeError as e: return Response(dict(error=_("{0}".format(e))), status=status.HTTP_400_BAD_REQUEST) @@ -1992,7 +2007,6 @@ class InventoryActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView serializer_class = ActivityStreamSerializer parent_model = Inventory relationship = 'activitystream_set' - new_in_145 = True def get_queryset(self): parent = self.get_parent_object() @@ -2007,14 +2021,12 @@ class InventoryInstanceGroupsList(SubListAttachDetachAPIView): serializer_class = InstanceGroupSerializer parent_model = Inventory relationship = 'instance_groups' - new_in_320 = True class InventoryAccessList(ResourceAccessList): model = User # needs to be User for AccessLists's parent_model = Inventory - new_in_300 = True class InventoryObjectRolesList(SubListAPIView): @@ -2022,7 +2034,6 @@ class InventoryObjectRolesList(SubListAPIView): model = Role serializer_class = RoleSerializer parent_model = Inventory - new_in_300 = True def get_queryset(self): po = self.get_parent_object() @@ -2036,7 +2047,6 @@ class InventoryJobTemplateList(SubListAPIView): serializer_class = JobTemplateSerializer parent_model = Inventory relationship = 'jobtemplates' - new_in_300 = True def get_queryset(self): parent = self.get_parent_object() @@ -2045,6 +2055,12 @@ class InventoryJobTemplateList(SubListAPIView): return qs.filter(inventory=parent) +class InventoryCopy(CopyAPIView): + + model = Inventory + copy_return_serializer_class = InventorySerializer + + class HostRelatedSearchMixin(object): @property @@ -2088,8 +2104,6 @@ class HostAnsibleFactsDetail(RetrieveAPIView): model = Host serializer_class = AnsibleFactsSerializer - new_in_320 = True - new_in_api_v2 = True class InventoryHostsList(HostRelatedSearchMixin, SubListCreateAttachDetachAPIView): @@ -2150,7 +2164,6 @@ class HostInventorySourcesList(SubListAPIView): serializer_class = InventorySourceSerializer parent_model = Host relationship = 'inventory_sources' - new_in_148 = True class HostSmartInventoriesList(SubListAPIView): @@ -2158,7 +2171,6 @@ class HostSmartInventoriesList(SubListAPIView): serializer_class = InventorySerializer parent_model = Host relationship = 'smart_inventories' - new_in_320 = True class HostActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): @@ -2167,7 +2179,6 @@ class HostActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): serializer_class = ActivityStreamSerializer parent_model = Host relationship = 'activitystream_set' - new_in_145 = True def get_queryset(self): parent = self.get_parent_object() @@ -2181,7 +2192,6 @@ class HostFactVersionsList(SystemTrackingEnforcementMixin, ParentMixin, ListAPIV model = Fact serializer_class = FactVersionSerializer parent_model = Host - new_in_220 = True def get_queryset(self): from_spec = self.request.query_params.get('from', None) @@ -2205,7 +2215,6 @@ class HostFactVersionsList(SystemTrackingEnforcementMixin, ParentMixin, ListAPIV class HostFactCompareView(SystemTrackingEnforcementMixin, SubDetailAPIView): model = Fact - new_in_220 = True parent_model = Host serializer_class = FactSerializer @@ -2226,8 +2235,6 @@ class HostInsights(GenericAPIView): model = Host serializer_class = EmptySerializer - new_in_320 = True - new_in_api_v2 = True def _extract_insights_creds(self, credential): return (credential.inputs['username'], decrypt_field(credential, 'password')) @@ -2246,7 +2253,7 @@ class HostInsights(GenericAPIView): except requests.exceptions.Timeout: return (dict(error=_('Request to {} timed out.').format(url)), status.HTTP_504_GATEWAY_TIMEOUT) except requests.exceptions.RequestException as e: - return (dict(error=_('Unkown exception {} while trying to GET {}').format(e, url)), status.HTTP_502_BAD_GATEWAY) + return (dict(error=_('Unknown exception {} while trying to GET {}').format(e, url)), status.HTTP_502_BAD_GATEWAY) if res.status_code == 401: return (dict(error=_('Unauthorized access. Please check your Insights Credential username and password.')), status.HTTP_502_BAD_GATEWAY) @@ -2349,7 +2356,6 @@ class GroupPotentialChildrenList(SubListAPIView): model = Group serializer_class = GroupSerializer parent_model = Group - new_in_14 = True def get_queryset(self): parent = self.get_parent_object() @@ -2414,7 +2420,6 @@ class GroupInventorySourcesList(SubListAPIView): serializer_class = InventorySourceSerializer parent_model = Group relationship = 'inventory_sources' - new_in_148 = True class GroupActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): @@ -2423,7 +2428,6 @@ class GroupActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): serializer_class = ActivityStreamSerializer parent_model = Group relationship = 'activitystream_set' - new_in_145 = True def get_queryset(self): parent = self.get_parent_object() @@ -2530,7 +2534,6 @@ class InventoryTreeView(RetrieveAPIView): model = Inventory serializer_class = GroupTreeSerializer filter_backends = () - new_in_13 = True def _populate_group_children(self, group_data, all_group_data_map, group_children_map): if 'children' in group_data: @@ -2568,7 +2571,6 @@ class InventoryInventorySourcesList(SubListCreateAPIView): always_allow_superuser = False relationship = 'inventory_sources' parent_key = 'inventory' - new_in_320 = True class InventoryInventorySourcesUpdate(RetrieveAPIView): @@ -2578,7 +2580,6 @@ class InventoryInventorySourcesUpdate(RetrieveAPIView): obj_permission_type = 'start' serializer_class = InventorySourceUpdateSerializer permission_classes = (InventoryInventorySourcesUpdatePermission,) - new_in_320 = True def retrieve(self, request, *args, **kwargs): inventory = self.get_object() @@ -2626,12 +2627,11 @@ class InventorySourceList(ListCreateAPIView): model = InventorySource serializer_class = InventorySourceSerializer always_allow_superuser = False - new_in_320 = True @property def allowed_methods(self): methods = super(InventorySourceList, self).allowed_methods - if get_request_version(self.request) == 1: + if get_request_version(getattr(self, 'request', None)) == 1: methods.remove('POST') return methods @@ -2640,7 +2640,6 @@ class InventorySourceDetail(RetrieveUpdateDestroyAPIView): model = InventorySource serializer_class = InventorySourceSerializer - new_in_14 = True def destroy(self, request, *args, **kwargs): obj = self.get_object() @@ -2661,7 +2660,6 @@ class InventorySourceSchedulesList(SubListCreateAPIView): parent_model = InventorySource relationship = 'schedules' parent_key = 'unified_job_template' - new_in_148 = True class InventorySourceActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): @@ -2670,7 +2668,6 @@ class InventorySourceActivityStreamList(ActivityStreamEnforcementMixin, SubListA serializer_class = ActivityStreamSerializer parent_model = InventorySource relationship = 'activitystream_set' - new_in_145 = True class InventorySourceNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView): @@ -2679,7 +2676,6 @@ class InventorySourceNotificationTemplatesAnyList(SubListCreateAttachDetachAPIVi serializer_class = NotificationTemplateSerializer parent_model = InventorySource relationship = 'notification_templates_any' - new_in_300 = True def post(self, request, *args, **kwargs): parent = self.get_parent_object() @@ -2706,7 +2702,6 @@ class InventorySourceHostsList(HostRelatedSearchMixin, SubListDestroyAPIView): serializer_class = HostSerializer parent_model = InventorySource relationship = 'hosts' - new_in_148 = True check_sub_obj_permission = False capabilities_prefetch = ['inventory.admin'] @@ -2717,7 +2712,6 @@ class InventorySourceGroupsList(SubListDestroyAPIView): serializer_class = GroupSerializer parent_model = InventorySource relationship = 'groups' - new_in_148 = True check_sub_obj_permission = False @@ -2727,7 +2721,6 @@ class InventorySourceUpdatesList(SubListAPIView): serializer_class = InventoryUpdateSerializer parent_model = InventorySource relationship = 'inventory_updates' - new_in_14 = True class InventorySourceUpdateView(RetrieveAPIView): @@ -2735,7 +2728,6 @@ class InventorySourceUpdateView(RetrieveAPIView): model = InventorySource obj_permission_type = 'start' serializer_class = InventorySourceUpdateSerializer - new_in_14 = True def post(self, request, *args, **kwargs): obj = self.get_object() @@ -2763,7 +2755,6 @@ class InventoryUpdateDetail(UnifiedJobDeletionMixin, RetrieveDestroyAPIView): model = InventoryUpdate serializer_class = InventoryUpdateSerializer - new_in_14 = True class InventoryUpdateCancel(RetrieveAPIView): @@ -2771,7 +2762,6 @@ class InventoryUpdateCancel(RetrieveAPIView): model = InventoryUpdate obj_permission_type = 'cancel' serializer_class = InventoryUpdateCancelSerializer - new_in_14 = True def post(self, request, *args, **kwargs): obj = self.get_object() @@ -2788,7 +2778,6 @@ class InventoryUpdateNotificationsList(SubListAPIView): serializer_class = NotificationSerializer parent_model = InventoryUpdate relationship = 'notifications' - new_in_300 = True class JobTemplateList(ListCreateAPIView): @@ -2998,7 +2987,6 @@ class JobTemplateSchedulesList(SubListCreateAPIView): parent_model = JobTemplate relationship = 'schedules' parent_key = 'unified_job_template' - new_in_148 = True class JobTemplateSurveySpec(GenericAPIView): @@ -3006,7 +2994,6 @@ class JobTemplateSurveySpec(GenericAPIView): model = JobTemplate obj_permission_type = 'admin' serializer_class = EmptySerializer - new_in_210 = True def get(self, request, *args, **kwargs): obj = self.get_object() @@ -3113,7 +3100,6 @@ class JobTemplateSurveySpec(GenericAPIView): class WorkflowJobTemplateSurveySpec(WorkflowsEnforcementMixin, JobTemplateSurveySpec): model = WorkflowJobTemplate - new_in_310 = True class JobTemplateActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): @@ -3122,7 +3108,6 @@ class JobTemplateActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIVi serializer_class = ActivityStreamSerializer parent_model = JobTemplate relationship = 'activitystream_set' - new_in_145 = True class JobTemplateNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView): @@ -3131,7 +3116,6 @@ class JobTemplateNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView): serializer_class = NotificationTemplateSerializer parent_model = JobTemplate relationship = 'notification_templates_any' - new_in_300 = True class JobTemplateNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView): @@ -3140,7 +3124,6 @@ class JobTemplateNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView serializer_class = NotificationTemplateSerializer parent_model = JobTemplate relationship = 'notification_templates_error' - new_in_300 = True class JobTemplateNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView): @@ -3149,7 +3132,6 @@ class JobTemplateNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIVi serializer_class = NotificationTemplateSerializer parent_model = JobTemplate relationship = 'notification_templates_success' - new_in_300 = True class JobTemplateCredentialsList(SubListCreateAttachDetachAPIView): @@ -3158,8 +3140,6 @@ class JobTemplateCredentialsList(SubListCreateAttachDetachAPIView): serializer_class = CredentialSerializer parent_model = JobTemplate relationship = 'credentials' - new_in_330 = True - new_in_api_v2 = True def get_queryset(self): # Return the full list of credentials @@ -3183,8 +3163,6 @@ class JobTemplateCredentialsList(SubListCreateAttachDetachAPIView): class JobTemplateExtraCredentialsList(JobTemplateCredentialsList): deprecated = True - new_in_320 = True - new_in_330 = False def get_queryset(self): sublist_qs = super(JobTemplateExtraCredentialsList, self).get_queryset() @@ -3204,7 +3182,6 @@ class JobTemplateLabelList(DeleteLastUnattachLabelMixin, SubListCreateAttachDeta serializer_class = LabelSerializer parent_model = JobTemplate relationship = 'labels' - new_in_300 = True def post(self, request, *args, **kwargs): # If a label already exists in the database, attach it instead of erroring out @@ -3393,14 +3370,12 @@ class JobTemplateInstanceGroupsList(SubListAttachDetachAPIView): serializer_class = InstanceGroupSerializer parent_model = JobTemplate relationship = 'instance_groups' - new_in_320 = True class JobTemplateAccessList(ResourceAccessList): model = User # needs to be User for AccessLists's parent_model = JobTemplate - new_in_300 = True class JobTemplateObjectRolesList(SubListAPIView): @@ -3408,7 +3383,6 @@ class JobTemplateObjectRolesList(SubListAPIView): model = Role serializer_class = RoleSerializer parent_model = JobTemplate - new_in_300 = True def get_queryset(self): po = self.get_parent_object() @@ -3416,18 +3390,22 @@ class JobTemplateObjectRolesList(SubListAPIView): return Role.objects.filter(content_type=content_type, object_id=po.pk) +class JobTemplateCopy(CopyAPIView): + + model = JobTemplate + copy_return_serializer_class = JobTemplateSerializer + + class WorkflowJobNodeList(WorkflowsEnforcementMixin, ListAPIView): model = WorkflowJobNode serializer_class = WorkflowJobNodeListSerializer - new_in_310 = True class WorkflowJobNodeDetail(WorkflowsEnforcementMixin, RetrieveAPIView): model = WorkflowJobNode serializer_class = WorkflowJobNodeDetailSerializer - new_in_310 = True class WorkflowJobNodeCredentialsList(SubListAPIView): @@ -3436,29 +3414,23 @@ class WorkflowJobNodeCredentialsList(SubListAPIView): serializer_class = CredentialSerializer parent_model = WorkflowJobNode relationship = 'credentials' - new_in_330 = True - new_in_api_v2 = True class WorkflowJobTemplateNodeList(WorkflowsEnforcementMixin, ListCreateAPIView): model = WorkflowJobTemplateNode serializer_class = WorkflowJobTemplateNodeSerializer - new_in_310 = True class WorkflowJobTemplateNodeDetail(WorkflowsEnforcementMixin, RetrieveUpdateDestroyAPIView): model = WorkflowJobTemplateNode serializer_class = WorkflowJobTemplateNodeDetailSerializer - new_in_310 = True class WorkflowJobTemplateNodeCredentialsList(LaunchConfigCredentialsBase): parent_model = WorkflowJobTemplateNode - new_in_330 = True - new_in_api_v2 = True class WorkflowJobTemplateNodeChildrenBaseList(WorkflowsEnforcementMixin, EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView): @@ -3469,7 +3441,6 @@ class WorkflowJobTemplateNodeChildrenBaseList(WorkflowsEnforcementMixin, Enforce parent_model = WorkflowJobTemplateNode relationship = '' enforce_parent_relationship = 'workflow_job_template' - new_in_310 = True ''' Limit the set of WorkflowJobTemplateNodes to the related nodes of specified by @@ -3539,7 +3510,6 @@ class WorkflowJobNodeChildrenBaseList(WorkflowsEnforcementMixin, SubListAPIView) serializer_class = WorkflowJobNodeListSerializer parent_model = WorkflowJobNode relationship = '' - new_in_310 = True # #Limit the set of WorkflowJobeNodes to the related nodes of specified by @@ -3568,7 +3538,6 @@ class WorkflowJobTemplateList(WorkflowsEnforcementMixin, ListCreateAPIView): model = WorkflowJobTemplate serializer_class = WorkflowJobTemplateListSerializer always_allow_superuser = False - new_in_310 = True class WorkflowJobTemplateDetail(WorkflowsEnforcementMixin, RetrieveUpdateDestroyAPIView): @@ -3576,14 +3545,12 @@ class WorkflowJobTemplateDetail(WorkflowsEnforcementMixin, RetrieveUpdateDestroy model = WorkflowJobTemplate serializer_class = WorkflowJobTemplateSerializer always_allow_superuser = False - new_in_310 = True -class WorkflowJobTemplateCopy(WorkflowsEnforcementMixin, GenericAPIView): +class WorkflowJobTemplateCopy(WorkflowsEnforcementMixin, CopyAPIView): model = WorkflowJobTemplate - serializer_class = EmptySerializer - new_in_310 = True + copy_return_serializer_class = WorkflowJobTemplateSerializer def get(self, request, *args, **kwargs): obj = self.get_object() @@ -3599,22 +3566,24 @@ class WorkflowJobTemplateCopy(WorkflowsEnforcementMixin, GenericAPIView): data.update(messages) return Response(data) - def post(self, request, *args, **kwargs): - obj = self.get_object() - if not request.user.can_access(self.model, 'copy', obj): - raise PermissionDenied() - new_obj = obj.user_copy(request.user) - if request.user not in new_obj.admin_role: - new_obj.admin_role.members.add(request.user) - data = OrderedDict() - data.update(WorkflowJobTemplateSerializer( - new_obj, context=self.get_serializer_context()).to_representation(new_obj)) - return Response(data, status=status.HTTP_201_CREATED) + @staticmethod + def deep_copy_permission_check_func(user, new_objs): + for obj in new_objs: + for field_name in obj._get_workflow_job_field_names(): + item = getattr(obj, field_name, None) + if item is None: + continue + if field_name in ['inventory']: + if not user.can_access(item.__class__, 'use', item): + setattr(obj, field_name, None) + if field_name in ['unified_job_template']: + if not user.can_access(item.__class__, 'start', item, validate_license=False): + setattr(obj, field_name, None) + obj.save() class WorkflowJobTemplateLabelList(WorkflowsEnforcementMixin, JobTemplateLabelList): parent_model = WorkflowJobTemplate - new_in_310 = True class WorkflowJobTemplateLaunch(WorkflowsEnforcementMixin, RetrieveAPIView): @@ -3623,7 +3592,6 @@ class WorkflowJobTemplateLaunch(WorkflowsEnforcementMixin, RetrieveAPIView): model = WorkflowJobTemplate obj_permission_type = 'start' serializer_class = WorkflowJobLaunchSerializer - new_in_310 = True always_allow_superuser = False def update_raw_data(self, data): @@ -3663,7 +3631,6 @@ class WorkflowJobRelaunch(WorkflowsEnforcementMixin, GenericAPIView): model = WorkflowJob obj_permission_type = 'start' serializer_class = EmptySerializer - new_in_310 = True def check_object_permissions(self, request, obj): if request.method == 'POST' and obj: @@ -3692,7 +3659,6 @@ class WorkflowJobTemplateWorkflowNodesList(WorkflowsEnforcementMixin, SubListCre parent_model = WorkflowJobTemplate relationship = 'workflow_job_template_nodes' parent_key = 'workflow_job_template' - new_in_310 = True def get_queryset(self): return super(WorkflowJobTemplateWorkflowNodesList, self).get_queryset().order_by('id') @@ -3705,7 +3671,6 @@ class WorkflowJobTemplateJobsList(WorkflowsEnforcementMixin, SubListAPIView): parent_model = WorkflowJobTemplate relationship = 'workflow_jobs' parent_key = 'workflow_job_template' - new_in_310 = True class WorkflowJobTemplateSchedulesList(WorkflowsEnforcementMixin, SubListCreateAPIView): @@ -3717,7 +3682,6 @@ class WorkflowJobTemplateSchedulesList(WorkflowsEnforcementMixin, SubListCreateA parent_model = WorkflowJobTemplate relationship = 'schedules' parent_key = 'unified_job_template' - new_in_310 = True class WorkflowJobTemplateNotificationTemplatesAnyList(WorkflowsEnforcementMixin, SubListCreateAttachDetachAPIView): @@ -3726,7 +3690,6 @@ class WorkflowJobTemplateNotificationTemplatesAnyList(WorkflowsEnforcementMixin, serializer_class = NotificationTemplateSerializer parent_model = WorkflowJobTemplate relationship = 'notification_templates_any' - new_in_310 = True class WorkflowJobTemplateNotificationTemplatesErrorList(WorkflowsEnforcementMixin, SubListCreateAttachDetachAPIView): @@ -3735,7 +3698,6 @@ class WorkflowJobTemplateNotificationTemplatesErrorList(WorkflowsEnforcementMixi serializer_class = NotificationTemplateSerializer parent_model = WorkflowJobTemplate relationship = 'notification_templates_error' - new_in_310 = True class WorkflowJobTemplateNotificationTemplatesSuccessList(WorkflowsEnforcementMixin, SubListCreateAttachDetachAPIView): @@ -3744,14 +3706,12 @@ class WorkflowJobTemplateNotificationTemplatesSuccessList(WorkflowsEnforcementMi serializer_class = NotificationTemplateSerializer parent_model = WorkflowJobTemplate relationship = 'notification_templates_success' - new_in_310 = True class WorkflowJobTemplateAccessList(WorkflowsEnforcementMixin, ResourceAccessList): model = User # needs to be User for AccessLists's parent_model = WorkflowJobTemplate - new_in_310 = True class WorkflowJobTemplateObjectRolesList(WorkflowsEnforcementMixin, SubListAPIView): @@ -3759,7 +3719,6 @@ class WorkflowJobTemplateObjectRolesList(WorkflowsEnforcementMixin, SubListAPIVi model = Role serializer_class = RoleSerializer parent_model = WorkflowJobTemplate - new_in_310 = True def get_queryset(self): po = self.get_parent_object() @@ -3773,7 +3732,6 @@ class WorkflowJobTemplateActivityStreamList(WorkflowsEnforcementMixin, ActivityS serializer_class = ActivityStreamSerializer parent_model = WorkflowJobTemplate relationship = 'activitystream_set' - new_in_310 = True def get_queryset(self): parent = self.get_parent_object() @@ -3787,14 +3745,12 @@ class WorkflowJobList(WorkflowsEnforcementMixin, ListCreateAPIView): model = WorkflowJob serializer_class = WorkflowJobListSerializer - new_in_310 = True class WorkflowJobDetail(WorkflowsEnforcementMixin, UnifiedJobDeletionMixin, RetrieveDestroyAPIView): model = WorkflowJob serializer_class = WorkflowJobSerializer - new_in_310 = True class WorkflowJobWorkflowNodesList(WorkflowsEnforcementMixin, SubListAPIView): @@ -3805,7 +3761,6 @@ class WorkflowJobWorkflowNodesList(WorkflowsEnforcementMixin, SubListAPIView): parent_model = WorkflowJob relationship = 'workflow_job_nodes' parent_key = 'workflow_job' - new_in_310 = True def get_queryset(self): return super(WorkflowJobWorkflowNodesList, self).get_queryset().order_by('id') @@ -3816,7 +3771,6 @@ class WorkflowJobCancel(WorkflowsEnforcementMixin, RetrieveAPIView): model = WorkflowJob obj_permission_type = 'cancel' serializer_class = WorkflowJobCancelSerializer - new_in_310 = True def post(self, request, *args, **kwargs): obj = self.get_object() @@ -3835,7 +3789,6 @@ class WorkflowJobNotificationsList(WorkflowsEnforcementMixin, SubListAPIView): serializer_class = NotificationSerializer parent_model = WorkflowJob relationship = 'notifications' - new_in_310 = True class WorkflowJobActivityStreamList(WorkflowsEnforcementMixin, ActivityStreamEnforcementMixin, SubListAPIView): @@ -3844,14 +3797,12 @@ class WorkflowJobActivityStreamList(WorkflowsEnforcementMixin, ActivityStreamEnf serializer_class = ActivityStreamSerializer parent_model = WorkflowJob relationship = 'activitystream_set' - new_in_310 = True class SystemJobTemplateList(ListAPIView): model = SystemJobTemplate serializer_class = SystemJobTemplateSerializer - new_in_210 = True def get(self, request, *args, **kwargs): if not request.user.is_superuser and not request.user.is_system_auditor: @@ -3863,7 +3814,6 @@ class SystemJobTemplateDetail(RetrieveAPIView): model = SystemJobTemplate serializer_class = SystemJobTemplateSerializer - new_in_210 = True class SystemJobTemplateLaunch(GenericAPIView): @@ -3871,7 +3821,6 @@ class SystemJobTemplateLaunch(GenericAPIView): model = SystemJobTemplate obj_permission_type = 'start' serializer_class = EmptySerializer - new_in_210 = True def get(self, request, *args, **kwargs): return Response({}) @@ -3896,7 +3845,6 @@ class SystemJobTemplateSchedulesList(SubListCreateAPIView): parent_model = SystemJobTemplate relationship = 'schedules' parent_key = 'unified_job_template' - new_in_210 = True class SystemJobTemplateJobsList(SubListAPIView): @@ -3906,7 +3854,6 @@ class SystemJobTemplateJobsList(SubListAPIView): parent_model = SystemJobTemplate relationship = 'jobs' parent_key = 'system_job_template' - new_in_210 = True class SystemJobTemplateNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView): @@ -3915,7 +3862,6 @@ class SystemJobTemplateNotificationTemplatesAnyList(SubListCreateAttachDetachAPI serializer_class = NotificationTemplateSerializer parent_model = SystemJobTemplate relationship = 'notification_templates_any' - new_in_300 = True class SystemJobTemplateNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView): @@ -3924,7 +3870,6 @@ class SystemJobTemplateNotificationTemplatesErrorList(SubListCreateAttachDetachA serializer_class = NotificationTemplateSerializer parent_model = SystemJobTemplate relationship = 'notification_templates_error' - new_in_300 = True class SystemJobTemplateNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView): @@ -3933,7 +3878,6 @@ class SystemJobTemplateNotificationTemplatesSuccessList(SubListCreateAttachDetac serializer_class = NotificationTemplateSerializer parent_model = SystemJobTemplate relationship = 'notification_templates_success' - new_in_300 = True class JobList(ListCreateAPIView): @@ -3945,7 +3889,7 @@ class JobList(ListCreateAPIView): @property def allowed_methods(self): methods = super(JobList, self).allowed_methods - if get_request_version(self.request) > 1: + if get_request_version(getattr(self, 'request', None)) > 1: methods.remove('POST') return methods @@ -3977,15 +3921,11 @@ class JobCredentialsList(SubListAPIView): serializer_class = CredentialSerializer parent_model = Job relationship = 'credentials' - new_in_api_v2 = True - new_in_330 = True class JobExtraCredentialsList(JobCredentialsList): deprecated = True - new_in_320 = True - new_in_330 = False def get_queryset(self): sublist_qs = super(JobExtraCredentialsList, self).get_queryset() @@ -4000,12 +3940,10 @@ class JobLabelList(SubListAPIView): parent_model = Job relationship = 'labels' parent_key = 'job' - new_in_300 = True class WorkflowJobLabelList(WorkflowsEnforcementMixin, JobLabelList): parent_model = WorkflowJob - new_in_310 = True class JobActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): @@ -4014,7 +3952,6 @@ class JobActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): serializer_class = ActivityStreamSerializer parent_model = Job relationship = 'activitystream_set' - new_in_145 = True # TODO: remove endpoint in 3.3 @@ -4135,13 +4072,20 @@ class JobCreateSchedule(RetrieveAPIView): model = Job obj_permission_type = 'start' serializer_class = JobCreateScheduleSerializer - new_in_330 = True def post(self, request, *args, **kwargs): obj = self.get_object() if not obj.can_schedule: - return Response({"error": _('Information needed to schedule this job is missing.')}, + if getattr(obj, 'passwords_needed_to_start', None): + return Response({"error": _('Cannot create schedule because job requires credential passwords.')}, + status=status.HTTP_400_BAD_REQUEST) + try: + obj.launch_config + except ObjectDoesNotExist: + return Response({"error": _('Cannot create schedule because job was launched by legacy method.')}, + status=status.HTTP_400_BAD_REQUEST) + return Response({"error": _('Cannot create schedule because a related resource is missing.')}, status=status.HTTP_400_BAD_REQUEST) config = obj.launch_config @@ -4182,7 +4126,6 @@ class JobNotificationsList(SubListAPIView): serializer_class = NotificationSerializer parent_model = Job relationship = 'notifications' - new_in_300 = True class BaseJobHostSummariesList(SubListAPIView): @@ -4299,7 +4242,6 @@ class AdHocCommandList(ListCreateAPIView): model = AdHocCommand serializer_class = AdHocCommandListSerializer - new_in_220 = True always_allow_superuser = False @csrf_exempt @@ -4374,7 +4316,6 @@ class AdHocCommandDetail(UnifiedJobDeletionMixin, RetrieveDestroyAPIView): model = AdHocCommand serializer_class = AdHocCommandSerializer - new_in_220 = True class AdHocCommandCancel(RetrieveAPIView): @@ -4382,7 +4323,6 @@ class AdHocCommandCancel(RetrieveAPIView): model = AdHocCommand obj_permission_type = 'cancel' serializer_class = AdHocCommandCancelSerializer - new_in_220 = True def post(self, request, *args, **kwargs): obj = self.get_object() @@ -4398,7 +4338,6 @@ class AdHocCommandRelaunch(GenericAPIView): model = AdHocCommand obj_permission_type = 'start' serializer_class = AdHocCommandRelaunchSerializer - new_in_220 = True # FIXME: Figure out why OPTIONS request still shows all fields. @@ -4455,14 +4394,12 @@ class AdHocCommandEventList(ListAPIView): model = AdHocCommandEvent serializer_class = AdHocCommandEventSerializer - new_in_220 = True class AdHocCommandEventDetail(RetrieveAPIView): model = AdHocCommandEvent serializer_class = AdHocCommandEventSerializer - new_in_220 = True class BaseAdHocCommandEventsList(SubListAPIView): @@ -4472,13 +4409,11 @@ class BaseAdHocCommandEventsList(SubListAPIView): parent_model = None # Subclasses must define this attribute. relationship = 'ad_hoc_command_events' view_name = _('Ad Hoc Command Events List') - new_in_220 = True class HostAdHocCommandEventsList(BaseAdHocCommandEventsList): parent_model = Host - new_in_220 = True #class GroupJobEventsList(BaseJobEventsList): @@ -4488,7 +4423,6 @@ class HostAdHocCommandEventsList(BaseAdHocCommandEventsList): class AdHocCommandAdHocCommandEventsList(BaseAdHocCommandEventsList): parent_model = AdHocCommand - new_in_220 = True class AdHocCommandActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): @@ -4497,7 +4431,6 @@ class AdHocCommandActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIV serializer_class = ActivityStreamSerializer parent_model = AdHocCommand relationship = 'activitystream_set' - new_in_220 = True class AdHocCommandNotificationsList(SubListAPIView): @@ -4506,14 +4439,12 @@ class AdHocCommandNotificationsList(SubListAPIView): serializer_class = NotificationSerializer parent_model = AdHocCommand relationship = 'notifications' - new_in_300 = True class SystemJobList(ListCreateAPIView): model = SystemJob serializer_class = SystemJobListSerializer - new_in_210 = True def get(self, request, *args, **kwargs): if not request.user.is_superuser and not request.user.is_system_auditor: @@ -4525,7 +4456,6 @@ class SystemJobDetail(UnifiedJobDeletionMixin, RetrieveDestroyAPIView): model = SystemJob serializer_class = SystemJobSerializer - new_in_210 = True class SystemJobCancel(RetrieveAPIView): @@ -4533,7 +4463,6 @@ class SystemJobCancel(RetrieveAPIView): model = SystemJob obj_permission_type = 'cancel' serializer_class = SystemJobCancelSerializer - new_in_210 = True def post(self, request, *args, **kwargs): obj = self.get_object() @@ -4550,14 +4479,12 @@ class SystemJobNotificationsList(SubListAPIView): serializer_class = NotificationSerializer parent_model = SystemJob relationship = 'notifications' - new_in_300 = True class UnifiedJobTemplateList(ListAPIView): model = UnifiedJobTemplate serializer_class = UnifiedJobTemplateSerializer - new_in_148 = True capabilities_prefetch = [ 'admin', 'execute', {'copy': ['jobtemplate.project.use', 'jobtemplate.inventory.use', @@ -4569,7 +4496,6 @@ class UnifiedJobList(ListAPIView): model = UnifiedJob serializer_class = UnifiedJobListSerializer - new_in_148 = True class StdoutANSIFilter(object): @@ -4607,7 +4533,6 @@ class UnifiedJobStdout(RetrieveAPIView): PlainTextRenderer, AnsiTextRenderer, renderers.JSONRenderer, DownloadTextRenderer, AnsiDownloadRenderer] filter_backends = () - new_in_148 = True def retrieve(self, request, *args, **kwargs): unified_job = self.get_object() @@ -4678,7 +4603,6 @@ class UnifiedJobStdout(RetrieveAPIView): class ProjectUpdateStdout(UnifiedJobStdout): model = ProjectUpdate - new_in_13 = True class InventoryUpdateStdout(UnifiedJobStdout): @@ -4694,21 +4618,18 @@ class JobStdout(UnifiedJobStdout): class AdHocCommandStdout(UnifiedJobStdout): model = AdHocCommand - new_in_220 = True class NotificationTemplateList(ListCreateAPIView): model = NotificationTemplate serializer_class = NotificationTemplateSerializer - new_in_300 = True class NotificationTemplateDetail(RetrieveUpdateDestroyAPIView): model = NotificationTemplate serializer_class = NotificationTemplateSerializer - new_in_300 = True def delete(self, request, *args, **kwargs): obj = self.get_object() @@ -4721,12 +4642,12 @@ class NotificationTemplateDetail(RetrieveUpdateDestroyAPIView): class NotificationTemplateTest(GenericAPIView): + '''Test a Notification Template''' view_name = _('Notification Template Test') model = NotificationTemplate obj_permission_type = 'start' serializer_class = EmptySerializer - new_in_300 = True def post(self, request, *args, **kwargs): obj = self.get_object() @@ -4752,49 +4673,48 @@ class NotificationTemplateNotificationList(SubListAPIView): parent_model = NotificationTemplate relationship = 'notifications' parent_key = 'notification_template' - new_in_300 = True + + +class NotificationTemplateCopy(CopyAPIView): + + model = NotificationTemplate + copy_return_serializer_class = NotificationTemplateSerializer class NotificationList(ListAPIView): model = Notification serializer_class = NotificationSerializer - new_in_300 = True class NotificationDetail(RetrieveAPIView): model = Notification serializer_class = NotificationSerializer - new_in_300 = True class LabelList(ListCreateAPIView): model = Label serializer_class = LabelSerializer - new_in_300 = True class LabelDetail(RetrieveUpdateAPIView): model = Label serializer_class = LabelSerializer - new_in_300 = True class ActivityStreamList(ActivityStreamEnforcementMixin, SimpleListAPIView): model = ActivityStream serializer_class = ActivityStreamSerializer - new_in_145 = True class ActivityStreamDetail(ActivityStreamEnforcementMixin, RetrieveAPIView): model = ActivityStream serializer_class = ActivityStreamSerializer - new_in_145 = True class RoleList(ListAPIView): @@ -4802,7 +4722,6 @@ class RoleList(ListAPIView): model = Role serializer_class = RoleSerializer permission_classes = (IsAuthenticated,) - new_in_300 = True def get_queryset(self): result = Role.visible_roles(self.request.user) @@ -4821,7 +4740,6 @@ class RoleDetail(RetrieveAPIView): model = Role serializer_class = RoleSerializer - new_in_300 = True class RoleUsersList(SubListAttachDetachAPIView): @@ -4830,7 +4748,6 @@ class RoleUsersList(SubListAttachDetachAPIView): serializer_class = UserSerializer parent_model = Role relationship = 'members' - new_in_300 = True def get_queryset(self): role = self.get_parent_object() @@ -4872,7 +4789,6 @@ class RoleTeamsList(SubListAttachDetachAPIView): parent_model = Role relationship = 'member_role.parents' permission_classes = (IsAuthenticated,) - new_in_300 = True def get_queryset(self): role = self.get_parent_object() @@ -4924,7 +4840,6 @@ class RoleParentsList(SubListAPIView): parent_model = Role relationship = 'parents' permission_classes = (IsAuthenticated,) - new_in_300 = True def get_queryset(self): role = Role.objects.get(pk=self.kwargs['pk']) @@ -4938,7 +4853,6 @@ class RoleChildrenList(SubListAPIView): parent_model = Role relationship = 'children' permission_classes = (IsAuthenticated,) - new_in_300 = True def get_queryset(self): role = Role.objects.get(pk=self.kwargs['pk']) diff --git a/awx/conf/settings.py b/awx/conf/settings.py index ff2b0adc51..0af16846b7 100644 --- a/awx/conf/settings.py +++ b/awx/conf/settings.py @@ -275,7 +275,7 @@ class SettingsWrapper(UserSettingsHolder): setting_ids[setting.key] = setting.id try: value = decrypt_field(setting, 'value') - except ValueError, e: + except ValueError as e: #TODO: Remove in Tower 3.3 logger.debug('encountered error decrypting field: %s - attempting fallback to old', e) value = old_decrypt_field(setting, 'value') diff --git a/awx/conf/views.py b/awx/conf/views.py index 524abf476a..189dd387dc 100644 --- a/awx/conf/views.py +++ b/awx/conf/views.py @@ -44,7 +44,6 @@ class SettingCategoryList(ListAPIView): model = Setting # Not exactly, but needed for the view. serializer_class = SettingCategorySerializer filter_backends = [] - new_in_310 = True view_name = _('Setting Categories') def get_queryset(self): @@ -69,7 +68,6 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView): model = Setting # Not exactly, but needed for the view. serializer_class = SettingSingletonSerializer filter_backends = [] - new_in_310 = True view_name = _('Setting Detail') def get_queryset(self): @@ -170,7 +168,6 @@ class SettingLoggingTest(GenericAPIView): serializer_class = SettingSingletonSerializer permission_classes = (IsSuperUser,) filter_backends = [] - new_in_320 = True def post(self, request, *args, **kwargs): defaults = dict() diff --git a/awx/lib/awx_display_callback/events.py b/awx/lib/awx_display_callback/events.py index b5ea72b21e..ca801925f5 100644 --- a/awx/lib/awx_display_callback/events.py +++ b/awx/lib/awx_display_callback/events.py @@ -29,6 +29,8 @@ import threading import uuid import memcache +from six.moves import xrange + __all__ = ['event_context'] diff --git a/awx/main/access.py b/awx/main/access.py index 174396e59b..5ce76a52f8 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -424,6 +424,18 @@ class InstanceAccess(BaseAccess): return Instance.objects.filter( rampart_groups__in=self.user.get_queryset(InstanceGroup)).distinct() + + def can_attach(self, obj, sub_obj, relationship, data, + skip_sub_obj_read_check=False): + if relationship == 'rampart_groups' and isinstance(sub_obj, InstanceGroup): + return self.user.is_superuser + return super(InstanceAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs) + + def can_unattach(self, obj, sub_obj, relationship, data=None): + if relationship == 'rampart_groups' and isinstance(sub_obj, InstanceGroup): + return self.user.is_superuser + return super(InstanceAccess, self).can_unattach(obj, sub_obj, relationship, *args, **kwargs) + def can_add(self, data): return False @@ -444,13 +456,13 @@ class InstanceGroupAccess(BaseAccess): organization__in=Organization.accessible_pk_qs(self.user, 'admin_role')) def can_add(self, data): - return False + return self.user.is_superuser def can_change(self, obj, data): - return False + return self.user.is_superuser def can_delete(self, obj): - return False + return self.user.is_superuser class UserAccess(BaseAccess): diff --git a/awx/main/expect/run.py b/awx/main/expect/run.py index 496c7583e0..ce685325c7 100755 --- a/awx/main/expect/run.py +++ b/awx/main/expect/run.py @@ -47,7 +47,7 @@ def open_fifo_write(path, data): This blocks the thread until an external process (such as ssh-agent) reads data from the pipe. ''' - os.mkfifo(path, 0600) + os.mkfifo(path, 0o600) thread.start_new_thread(lambda p, d: open(p, 'w').write(d), (path, data)) diff --git a/awx/main/fields.py b/awx/main/fields.py index 777836ebf3..fff3dd5277 100644 --- a/awx/main/fields.py +++ b/awx/main/fields.py @@ -356,7 +356,7 @@ class SmartFilterField(models.TextField): value = urllib.unquote(value) try: SmartFilter().query_from_string(value) - except RuntimeError, e: + except RuntimeError as e: raise models.base.ValidationError(e) return super(SmartFilterField, self).get_prep_value(value) @@ -695,11 +695,10 @@ class CredentialTypeInjectorField(JSONSchemaField): 'properties': { 'file': { 'type': 'object', - 'properties': { - 'template': {'type': 'string'}, + 'patternProperties': { + '^template(\.[a-zA-Z_]+[a-zA-Z0-9_]*)?$': {'type': 'string'}, }, 'additionalProperties': False, - 'required': ['template'], }, 'env': { 'type': 'object', @@ -749,8 +748,22 @@ class CredentialTypeInjectorField(JSONSchemaField): class TowerNamespace: filename = None - valid_namespace['tower'] = TowerNamespace() + + # ensure either single file or multi-file syntax is used (but not both) + template_names = [x for x in value.get('file', {}).keys() if x.startswith('template')] + if 'template' in template_names and len(template_names) > 1: + raise django_exceptions.ValidationError( + _('Must use multi-file syntax when injecting multiple files'), + code='invalid', + params={'value': value}, + ) + if 'template' not in template_names: + valid_namespace['tower'].filename = TowerNamespace() + for template_name in template_names: + template_name = template_name.split('.')[1] + setattr(valid_namespace['tower'].filename, template_name, 'EXAMPLE') + for type_, injector in value.items(): for key, tmpl in injector.items(): try: diff --git a/awx/main/management/commands/generate_isolated_key.py b/awx/main/management/commands/generate_isolated_key.py index 862fefda5e..00c020a9a7 100644 --- a/awx/main/management/commands/generate_isolated_key.py +++ b/awx/main/management/commands/generate_isolated_key.py @@ -17,7 +17,7 @@ class Command(BaseCommand): def handle(self, *args, **kwargs): if getattr(settings, 'AWX_ISOLATED_PRIVATE_KEY', False): - print settings.AWX_ISOLATED_PUBLIC_KEY + print(settings.AWX_ISOLATED_PUBLIC_KEY) return key = rsa.generate_private_key( @@ -41,4 +41,4 @@ class Command(BaseCommand): ) + " generated-by-awx@%s" % datetime.datetime.utcnow().isoformat() ) pemfile.save() - print pemfile.value + print(pemfile.value) diff --git a/awx/main/management/commands/register_queue.py b/awx/main/management/commands/register_queue.py index 548e305bcc..1e7912836d 100644 --- a/awx/main/management/commands/register_queue.py +++ b/awx/main/management/commands/register_queue.py @@ -17,6 +17,10 @@ class Command(BaseCommand): help='Comma-Delimited Hosts to add to the Queue') parser.add_argument('--controller', dest='controller', type=str, default='', help='The controlling group (makes this an isolated group)') + parser.add_argument('--instance_percent', dest='instance_percent', type=int, default=0, + help='The percentage of active instances that will be assigned to this group'), + parser.add_argument('--instance_minimum', dest='instance_minimum', type=int, default=0, + help='The minimum number of instance that will be retained for this group from available instances') def handle(self, **options): queuename = options.get('queuename') @@ -38,7 +42,9 @@ class Command(BaseCommand): changed = True else: print("Creating instance group {}".format(queuename)) - ig = InstanceGroup(name=queuename) + ig = InstanceGroup(name=queuename, + policy_instance_percentage=options.get('instance_percent'), + policy_instance_minimum=options.get('instance_minimum')) if control_ig: ig.controller = control_ig ig.save() @@ -60,5 +66,7 @@ class Command(BaseCommand): sys.exit(1) else: print("Instance already registered {}".format(instance[0].hostname)) + ig.policy_instance_list = instance_list + ig.save() if changed: print('(changed: True)') diff --git a/awx/main/management/commands/test_isolated_connection.py b/awx/main/management/commands/test_isolated_connection.py index e2bfcc2f50..23bd20bf50 100644 --- a/awx/main/management/commands/test_isolated_connection.py +++ b/awx/main/management/commands/test_isolated_connection.py @@ -41,10 +41,9 @@ class Command(BaseCommand): run.open_fifo_write(ssh_key_path, settings.AWX_ISOLATED_PRIVATE_KEY) args = run.wrap_args_with_ssh_agent(args, ssh_key_path, ssh_auth_sock) try: - print ' '.join(args) + print(' '.join(args)) subprocess.check_call(args) except subprocess.CalledProcessError as e: sys.exit(e.returncode) finally: shutil.rmtree(path) - diff --git a/awx/main/managers.py b/awx/main/managers.py index aa478cb027..70c402f672 100644 --- a/awx/main/managers.py +++ b/awx/main/managers.py @@ -2,12 +2,9 @@ # All Rights Reserved. import sys -from datetime import timedelta import logging from django.db import models -from django.utils.timezone import now -from django.db.models import Sum from django.conf import settings from awx.main.utils.filters import SmartFilter @@ -93,11 +90,6 @@ class InstanceManager(models.Manager): """Return count of active Tower nodes for licensing.""" return self.all().count() - def total_capacity(self): - sumval = self.filter(modified__gte=now() - timedelta(seconds=settings.AWX_ACTIVE_NODE_TIME)) \ - .aggregate(total_capacity=Sum('capacity'))['total_capacity'] - return max(50, sumval) - def my_role(self): # NOTE: TODO: Likely to repurpose this once standalone ramparts are a thing return "tower" diff --git a/awx/main/migrations/0020_v330_instancegroup_policies.py b/awx/main/migrations/0020_v330_instancegroup_policies.py new file mode 100644 index 0000000000..a6716352e9 --- /dev/null +++ b/awx/main/migrations/0020_v330_instancegroup_policies.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals + +from django.db import migrations, models +from decimal import Decimal +import awx.main.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0019_v330_custom_virtualenv'), + ] + + operations = [ + migrations.AddField( + model_name='instancegroup', + name='policy_instance_list', + field=awx.main.fields.JSONField(default=[], help_text='List of exact-match Instances that will always be automatically assigned to this group', + blank=True), + ), + migrations.AddField( + model_name='instancegroup', + name='policy_instance_minimum', + field=models.IntegerField(default=0, help_text='Static minimum number of Instances to automatically assign to this group'), + ), + migrations.AddField( + model_name='instancegroup', + name='policy_instance_percentage', + field=models.IntegerField(default=0, help_text='Percentage of Instances to automatically assign to this group'), + ), + migrations.AddField( + model_name='instance', + name='capacity_adjustment', + field=models.DecimalField(decimal_places=2, default=Decimal('1.0'), max_digits=3), + ), + migrations.AddField( + model_name='instance', + name='cpu', + field=models.IntegerField(default=0, editable=False) + ), + migrations.AddField( + model_name='instance', + name='memory', + field=models.BigIntegerField(default=0, editable=False) + ), + migrations.AddField( + model_name='instance', + name='cpu_capacity', + field=models.IntegerField(default=0, editable=False) + ), + migrations.AddField( + model_name='instance', + name='mem_capacity', + field=models.IntegerField(default=0, editable=False) + ), + migrations.AddField( + model_name='instance', + name='enabled', + field=models.BooleanField(default=True) + ) + ] diff --git a/awx/main/models/ad_hoc_commands.py b/awx/main/models/ad_hoc_commands.py index 56137378d6..3913a4ace7 100644 --- a/awx/main/models/ad_hoc_commands.py +++ b/awx/main/models/ad_hoc_commands.py @@ -184,7 +184,7 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin): # NOTE: We sorta have to assume the host count matches and that forks default to 5 from awx.main.models.inventory import Host count_hosts = Host.objects.filter( enabled=True, inventory__ad_hoc_commands__pk=self.pk).count() - return min(count_hosts, 5 if self.forks == 0 else self.forks) * 10 + return min(count_hosts, 5 if self.forks == 0 else self.forks) + 1 def copy(self): data = {} diff --git a/awx/main/models/credential/__init__.py b/awx/main/models/credential/__init__.py index face2befdb..86c3930299 100644 --- a/awx/main/models/credential/__init__.py +++ b/awx/main/models/credential/__init__.py @@ -594,7 +594,7 @@ class CredentialType(CommonModelNameNotUnique): return class TowerNamespace: - filename = None + pass tower_namespace = TowerNamespace() @@ -622,17 +622,25 @@ class CredentialType(CommonModelNameNotUnique): if len(value): namespace[field_name] = value - file_tmpl = self.injectors.get('file', {}).get('template') - if file_tmpl is not None: - # If a file template is provided, render the file and update the - # special `tower` template namespace so the filename can be - # referenced in other injectors + file_tmpls = self.injectors.get('file', {}) + # If any file templates are provided, render the files and update the + # special `tower` template namespace so the filename can be + # referenced in other injectors + for file_label, file_tmpl in file_tmpls.items(): data = Template(file_tmpl).render(**namespace) _, path = tempfile.mkstemp(dir=private_data_dir) with open(path, 'w') as f: f.write(data) os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) - namespace['tower'].filename = path + + # determine if filename indicates single file or many + if file_label.find('.') == -1: + tower_namespace.filename = path + else: + if not hasattr(tower_namespace, 'filename'): + tower_namespace.filename = TowerNamespace() + file_label = file_label.split('.')[1] + setattr(tower_namespace.filename, file_label, path) for env_var, tmpl in self.injectors.get('env', {}).items(): if env_var.startswith('ANSIBLE_') or env_var in self.ENV_BLACKLIST: diff --git a/awx/main/models/ha.py b/awx/main/models/ha.py index f2e57f7a07..bf1d7f8266 100644 --- a/awx/main/models/ha.py +++ b/awx/main/models/ha.py @@ -1,8 +1,10 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. -from django.db import models -from django.db.models.signals import post_save +from decimal import Decimal + +from django.db import models, connection +from django.db.models.signals import post_save, post_delete from django.dispatch import receiver from django.utils.translation import ugettext_lazy as _ from django.conf import settings @@ -10,12 +12,15 @@ from django.utils.timezone import now, timedelta from solo.models import SingletonModel +from awx import __version__ as awx_application_version from awx.api.versioning import reverse from awx.main.managers import InstanceManager, InstanceGroupManager +from awx.main.fields import JSONField from awx.main.models.inventory import InventoryUpdate from awx.main.models.jobs import Job from awx.main.models.projects import ProjectUpdate from awx.main.models.unified_jobs import UnifiedJob +from awx.main.utils import get_cpu_capacity, get_mem_capacity, get_system_task_capacity __all__ = ('Instance', 'InstanceGroup', 'JobOrigin', 'TowerScheduleState',) @@ -38,6 +43,30 @@ class Instance(models.Model): default=100, editable=False, ) + capacity_adjustment = models.DecimalField( + default=Decimal(1.0), + max_digits=3, + decimal_places=2, + ) + enabled = models.BooleanField( + default=True + ) + cpu = models.IntegerField( + default=0, + editable=False, + ) + memory = models.BigIntegerField( + default=0, + editable=False, + ) + cpu_capacity = models.IntegerField( + default=0, + editable=False, + ) + mem_capacity = models.IntegerField( + default=0, + editable=False, + ) class Meta: app_label = 'main' @@ -63,6 +92,23 @@ class Instance(models.Model): grace_period = settings.AWX_ISOLATED_PERIODIC_CHECK * 2 return self.modified < ref_time - timedelta(seconds=grace_period) + def is_controller(self): + return Instance.objects.filter(rampart_groups__controller__instances=self).exists() + + + def refresh_capacity(self): + cpu = get_cpu_capacity() + mem = get_mem_capacity() + self.capacity = get_system_task_capacity(self.capacity_adjustment) + self.cpu = cpu[0] + self.memory = mem[0] + self.cpu_capacity = cpu[1] + self.mem_capacity = mem[1] + self.version = awx_application_version + self.save(update_fields=['capacity', 'version', 'modified', 'cpu', + 'memory', 'cpu_capacity', 'mem_capacity']) + + class InstanceGroup(models.Model): """A model representing a Queue/Group of AWX Instances.""" @@ -85,6 +131,19 @@ class InstanceGroup(models.Model): default=None, null=True ) + policy_instance_percentage = models.IntegerField( + default=0, + help_text=_("Percentage of Instances to automatically assign to this group") + ) + policy_instance_minimum = models.IntegerField( + default=0, + help_text=_("Static minimum number of Instances to automatically assign to this group") + ) + policy_instance_list = JSONField( + default=[], + blank=True, + help_text=_("List of exact-match Instances that will always be automatically assigned to this group") + ) def get_absolute_url(self, request=None): return reverse('api:instance_group_detail', kwargs={'pk': self.pk}, request=request) @@ -119,6 +178,32 @@ class JobOrigin(models.Model): app_label = 'main' +@receiver(post_save, sender=InstanceGroup) +def on_instance_group_saved(sender, instance, created=False, raw=False, **kwargs): + if created: + from awx.main.tasks import apply_cluster_membership_policies + connection.on_commit(lambda: apply_cluster_membership_policies.apply_async()) + + +@receiver(post_save, sender=Instance) +def on_instance_saved(sender, instance, created=False, raw=False, **kwargs): + if created: + from awx.main.tasks import apply_cluster_membership_policies + connection.on_commit(lambda: apply_cluster_membership_policies.apply_async()) + + +@receiver(post_delete, sender=InstanceGroup) +def on_instance_group_deleted(sender, instance, using, **kwargs): + from awx.main.tasks import apply_cluster_membership_policies + connection.on_commit(lambda: apply_cluster_membership_policies.apply_async()) + + +@receiver(post_delete, sender=Instance) +def on_instance_deleted(sender, instance, using, **kwargs): + from awx.main.tasks import apply_cluster_membership_policies + connection.on_commit(lambda: apply_cluster_membership_policies.apply_async()) + + # Unfortunately, the signal can't just be connected against UnifiedJob; it # turns out that creating a model's subclass doesn't fire the signal for the # superclass model. diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index fc437e236e..9878aab1d9 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -50,6 +50,7 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin): an inventory source contains lists and hosts. ''' + FIELDS_TO_PRESERVE_AT_COPY = ['hosts', 'groups', 'instance_groups'] KIND_CHOICES = [ ('', _('Hosts have a direct link to this inventory.')), ('smart', _('Hosts for inventory generated using the host_filter property.')), @@ -505,6 +506,10 @@ class Host(CommonModelNameNotUnique): A managed node ''' + FIELDS_TO_PRESERVE_AT_COPY = [ + 'name', 'description', 'groups', 'inventory', 'enabled', 'instance_id', 'variables' + ] + class Meta: app_label = 'main' unique_together = (("name", "inventory"),) # FIXME: Add ('instance_id', 'inventory') after migration. @@ -692,6 +697,10 @@ class Group(CommonModelNameNotUnique): groups. ''' + FIELDS_TO_PRESERVE_AT_COPY = [ + 'name', 'description', 'inventory', 'children', 'parents', 'hosts', 'variables' + ] + class Meta: app_label = 'main' unique_together = (("name", "inventory"),) @@ -1602,7 +1611,7 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin, @property def task_impact(self): - return 50 + return 1 # InventoryUpdate credential required # Custom and SCM InventoryUpdate credential not required diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 4d3213b12c..0d27329cdf 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -220,6 +220,10 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour A job template is a reusable job definition for applying a project (with playbook) to an inventory source with a given credential. ''' + FIELDS_TO_PRESERVE_AT_COPY = [ + 'labels', 'instance_groups', 'credentials', 'survey_spec' + ] + FIELDS_TO_DISCARD_AT_COPY = ['vault_credential', 'credential'] SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name')] class Meta: @@ -620,10 +624,10 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana # NOTE: We sorta have to assume the host count matches and that forks default to 5 from awx.main.models.inventory import Host if self.launch_type == 'callback': - count_hosts = 1 + count_hosts = 2 else: count_hosts = Host.objects.filter(inventory__jobs__pk=self.pk).count() - return min(count_hosts, 5 if self.forks == 0 else self.forks) * 10 + return min(count_hosts, 5 if self.forks == 0 else self.forks) + 1 @property def successful_hosts(self): @@ -1190,7 +1194,7 @@ class SystemJob(UnifiedJob, SystemJobOptions, JobNotificationMixin): @property def task_impact(self): - return 150 + return 5 @property def preferred_instance_groups(self): diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index 5794d170d7..ef3f809a74 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -229,6 +229,8 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn ''' SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')] + FIELDS_TO_PRESERVE_AT_COPY = ['labels', 'instance_groups', 'credentials'] + FIELDS_TO_DISCARD_AT_COPY = ['local_path'] class Meta: app_label = 'main' @@ -492,7 +494,7 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManage @property def task_impact(self): - return 0 if self.job_type == 'run' else 20 + return 0 if self.job_type == 'run' else 1 @property def result_stdout(self): diff --git a/awx/main/models/schedules.py b/awx/main/models/schedules.py index 59629c89a7..011ad82ed3 100644 --- a/awx/main/models/schedules.py +++ b/awx/main/models/schedules.py @@ -127,7 +127,7 @@ class Schedule(CommonModel, LaunchTimeConfig): https://github.com/dateutil/dateutil/pull/619 """ kwargs['forceset'] = True - kwargs['tzinfos'] = {} + kwargs['tzinfos'] = {x: dateutil.tz.tzutc() for x in dateutil.parser.parserinfo().UTCZONE} match = cls.TZID_REGEX.match(rrule) if match is not None: rrule = cls.TZID_REGEX.sub("DTSTART\gTZI\g", rrule) @@ -150,14 +150,13 @@ class Schedule(CommonModel, LaunchTimeConfig): # > UTC time. raise ValueError('RRULE UNTIL values must be specified in UTC') - try: - first_event = x[0] - if first_event < now() - datetime.timedelta(days=365 * 5): - # For older DTSTART values, if there are more than 1000 recurrences... - if len(x[:1001]) > 1000: - raise ValueError('RRULE values that yield more than 1000 events are not allowed.') - except IndexError: - pass + if 'MINUTELY' in rrule or 'HOURLY' in rrule: + try: + first_event = x[0] + if first_event < now() - datetime.timedelta(days=365 * 5): + raise ValueError('RRULE values with more than 1000 events are not allowed.') + except IndexError: + pass return x def __unicode__(self): diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index c2a74adfc8..16c043a991 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -432,7 +432,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio copy_m2m_relationships(self, unified_jt, fields) return unified_jt - def _accept_or_ignore_job_kwargs(self, _exclude_errors=None, **kwargs): + def _accept_or_ignore_job_kwargs(self, _exclude_errors=(), **kwargs): ''' Override in subclass if template accepts _any_ prompted params ''' diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index d47745e1b8..e3bf2640dd 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -110,6 +110,13 @@ class WorkflowNodeBase(CreatedModifiedModel, LaunchTimeConfig): class WorkflowJobTemplateNode(WorkflowNodeBase): + FIELDS_TO_PRESERVE_AT_COPY = [ + 'unified_job_template', 'workflow_job_template', 'success_nodes', 'failure_nodes', + 'always_nodes', 'credentials', 'inventory', 'extra_data', 'survey_passwords', + 'char_prompts' + ] + REENCRYPTION_BLACKLIST_AT_COPY = ['extra_data', 'survey_passwords'] + workflow_job_template = models.ForeignKey( 'WorkflowJobTemplate', related_name='workflow_job_template_nodes', @@ -283,6 +290,9 @@ class WorkflowJobOptions(BaseModel): class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTemplateMixin, ResourceMixin): SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')] + FIELDS_TO_PRESERVE_AT_COPY = [ + 'labels', 'instance_groups', 'workflow_job_template_nodes', 'credentials', 'survey_spec' + ] class Meta: app_label = 'main' @@ -353,7 +363,7 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl workflow_job.copy_nodes_from_original(original=self) return workflow_job - def _accept_or_ignore_job_kwargs(self, **kwargs): + def _accept_or_ignore_job_kwargs(self, _exclude_errors=(), **kwargs): prompted_fields = {} rejected_fields = {} accepted_vars, rejected_vars, errors_dict = self.accept_or_ignore_variables(kwargs.get('extra_vars', {})) @@ -394,11 +404,6 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl node_list.append(node.pk) return node_list - def user_copy(self, user): - new_wfjt = self.copy_unified_jt() - new_wfjt.copy_nodes_from_original(original=self, user=user) - return new_wfjt - class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificationMixin): class Meta: diff --git a/awx/main/queue.py b/awx/main/queue.py index 401c73f831..3f68c68c2a 100644 --- a/awx/main/queue.py +++ b/awx/main/queue.py @@ -5,6 +5,8 @@ import logging import os +from six.moves import xrange + # Django from django.conf import settings @@ -46,6 +48,6 @@ class CallbackQueueDispatcher(object): delivery_mode="persistent" if settings.PERSISTENT_CALLBACK_MESSAGES else "transient", routing_key=self.connection_queue) return - except Exception, e: + except Exception as e: self.logger.info('Publish Job Event Exception: %r, retry=%d', e, retry_count, exc_info=True) diff --git a/awx/main/scheduler/tasks.py b/awx/main/scheduler/tasks.py index 70d4c95354..89e36f6a93 100644 --- a/awx/main/scheduler/tasks.py +++ b/awx/main/scheduler/tasks.py @@ -21,12 +21,12 @@ class LogErrorsTask(Task): super(LogErrorsTask, self).on_failure(exc, task_id, args, kwargs, einfo) -@shared_task +@shared_task(base=LogErrorsTask) def run_job_launch(job_id): TaskManager().schedule() -@shared_task +@shared_task(base=LogErrorsTask) def run_job_complete(job_id): TaskManager().schedule() diff --git a/awx/main/signals.py b/awx/main/signals.py index d0bad86619..534a22dd0d 100644 --- a/awx/main/signals.py +++ b/awx/main/signals.py @@ -577,5 +577,5 @@ def delete_inventory_for_org(sender, instance, **kwargs): for inventory in inventories: try: inventory.schedule_deletion(user_id=getattr(user, 'id', None)) - except RuntimeError, e: + except RuntimeError as e: logger.debug(e) diff --git a/awx/main/south_migrations/0031_v145_changes.py b/awx/main/south_migrations/0031_v145_changes.py index 0f88ddcc73..f224bc64d1 100644 --- a/awx/main/south_migrations/0031_v145_changes.py +++ b/awx/main/south_migrations/0031_v145_changes.py @@ -17,19 +17,19 @@ class Migration(DataMigration): obj1 = eval(obj_type + ".objects.get(id=" + str(activity_stream_object.object1_id) + ")") if hasattr(activity_stream_object, activity_stream_object.object1): getattr(activity_stream_object, activity_stream_object.object1).add(obj1) - except ObjectDoesNotExist, e: + except ObjectDoesNotExist as e: print("Object 1 for AS id=%s does not exist. (Object Type: %s, id: %s" % (str(activity_stream_object.id), activity_stream_object.object1_type, str(activity_stream_object.object1_id))) continue if activity_stream_object.operation in ('associate', 'disassociate'): try: - obj_type = "orm." + activity_stream_object.object2_type.split(".")[-1] + obj_type = "orm." + activity_stream_object.object2_type.split(".")[-1] if obj_type == 'orm.User': obj_type = 'orm["auth.User"]' obj2 = eval(obj_type + ".objects.get(id=" + str(activity_stream_object.object2_id) + ")") getattr(activity_stream_object, activity_stream_object.object2).add(obj2) - except ObjectDoesNotExist, e: + except ObjectDoesNotExist as e: print("Object 2 for AS id=%s does not exist. (Object Type: %s, id: %s" % (str(activity_stream_object.id), activity_stream_object.object2_type, str(activity_stream_object.object2_id))) diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 1f74fb04a1..10fde0d82b 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -2,10 +2,11 @@ # All Rights Reserved. # Python -from collections import OrderedDict +from collections import OrderedDict, namedtuple import ConfigParser import cStringIO import functools +import importlib import json import logging import os @@ -25,12 +26,13 @@ except Exception: psutil = None # Celery -from celery import Task, shared_task -from celery.signals import celeryd_init, worker_process_init, worker_shutdown +from celery import Task, shared_task, Celery +from celery.signals import celeryd_init, worker_process_init, worker_shutdown, worker_ready, celeryd_after_setup # Django from django.conf import settings from django.db import transaction, DatabaseError, IntegrityError +from django.db.models.fields.related import ForeignKey from django.utils.timezone import now, timedelta from django.utils.encoding import smart_str from django.core.mail import send_mail @@ -53,16 +55,17 @@ from awx.main.queue import CallbackQueueDispatcher from awx.main.expect import run, isolated_manager from awx.main.utils import (get_ansible_version, get_ssh_version, decrypt_field, update_scm_url, check_proot_installed, build_proot_temp_dir, get_licenser, - wrap_args_with_proot, get_system_task_capacity, OutputEventFilter, - ignore_inventory_computed_fields, ignore_inventory_group_removal, - get_type_for_model, extract_ansible_vars) + wrap_args_with_proot, OutputEventFilter, ignore_inventory_computed_fields, + ignore_inventory_group_removal, get_type_for_model, extract_ansible_vars) from awx.main.utils.reload import restart_local_services, stop_local_services +from awx.main.utils.pglock import advisory_lock +from awx.main.utils.ha import update_celery_worker_routes, register_celery_worker_queues from awx.main.utils.handlers import configure_external_logger from awx.main.consumers import emit_channel_notification from awx.conf import settings_registry __all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate', - 'RunAdHocCommand', 'handle_work_error', 'handle_work_success', + 'RunAdHocCommand', 'handle_work_error', 'handle_work_success', 'apply_cluster_membership_policies', 'update_inventory_computed_fields', 'update_host_smart_inventory_memberships', 'send_notifications', 'run_administrative_checks', 'purge_old_stdout_files'] @@ -130,6 +133,56 @@ def inform_cluster_of_shutdown(*args, **kwargs): logger.exception('Encountered problem with normal shutdown signal.') +@shared_task(bind=True, queue='tower_instance_router', base=LogErrorsTask) +def apply_cluster_membership_policies(self): + with advisory_lock('cluster_policy_lock', wait=True): + considered_instances = Instance.objects.all().order_by('id') + total_instances = considered_instances.count() + filtered_instances = [] + actual_groups = [] + actual_instances = [] + Group = namedtuple('Group', ['obj', 'instances']) + Node = namedtuple('Instance', ['obj', 'groups']) + # Process policy instance list first, these will represent manually managed instances + # that will not go through automatic policy determination + for ig in InstanceGroup.objects.all(): + logger.info("Considering group {}".format(ig.name)) + ig.instances.clear() + group_actual = Group(obj=ig, instances=[]) + for i in ig.policy_instance_list: + inst = Instance.objects.filter(hostname=i) + if not inst.exists(): + continue + inst = inst[0] + logger.info("Policy List, adding {} to {}".format(inst.hostname, ig.name)) + group_actual.instances.append(inst.id) + ig.instances.add(inst) + filtered_instances.append(inst) + actual_groups.append(group_actual) + # Process Instance minimum policies next, since it represents a concrete lower bound to the + # number of instances to make available to instance groups + actual_instances = [Node(obj=i, groups=[]) for i in filter(lambda x: x not in filtered_instances, considered_instances)] + logger.info("Total instances not directly associated: {}".format(total_instances)) + for g in sorted(actual_groups, cmp=lambda x,y: len(x.instances) - len(y.instances)): + for i in sorted(actual_instances, cmp=lambda x,y: len(x.groups) - len(y.groups)): + if len(g.instances) >= g.obj.policy_instance_minimum: + break + logger.info("Policy minimum, adding {} to {}".format(i.obj.hostname, g.obj.name)) + g.obj.instances.add(i.obj) + g.instances.append(i.obj.id) + i.groups.append(g.obj.id) + # Finally process instance policy percentages + for g in sorted(actual_groups, cmp=lambda x,y: len(x.instances) - len(y.instances)): + for i in sorted(actual_instances, cmp=lambda x,y: len(x.groups) - len(y.groups)): + if 100 * float(len(g.instances)) / len(actual_instances) >= g.obj.policy_instance_percentage: + break + logger.info("Policy percentage, adding {} to {}".format(i.obj.hostname, g.obj.name)) + g.instances.append(i.obj.id) + g.obj.instances.add(i.obj) + i.groups.append(g.obj.id) + handle_ha_toplogy_changes() + + @shared_task(queue='tower_broadcast_all', bind=True, base=LogErrorsTask) def handle_setting_changes(self, setting_keys): orig_len = len(setting_keys) @@ -147,6 +200,45 @@ def handle_setting_changes(self, setting_keys): break +@shared_task(bind=True, queue='tower_broadcast_all', base=LogErrorsTask) +def handle_ha_toplogy_changes(self): + instance = Instance.objects.me() + logger.debug("Reconfigure celeryd queues task on host {}".format(self.request.hostname)) + awx_app = Celery('awx') + awx_app.config_from_object('django.conf:settings', namespace='CELERY') + (instance, removed_queues, added_queues) = register_celery_worker_queues(awx_app, self.request.hostname) + logger.info("Workers on tower node '{}' removed from queues {} and added to queues {}" + .format(instance.hostname, removed_queues, added_queues)) + updated_routes = update_celery_worker_routes(instance, settings) + logger.info("Worker on tower node '{}' updated celery routes {} all routes are now {}" + .format(instance.hostname, updated_routes, self.app.conf.CELERY_TASK_ROUTES)) + + +@worker_ready.connect +def handle_ha_toplogy_worker_ready(sender, **kwargs): + logger.debug("Configure celeryd queues task on host {}".format(sender.hostname)) + (instance, removed_queues, added_queues) = register_celery_worker_queues(sender.app, sender.hostname) + logger.info("Workers on tower node '{}' unsubscribed from queues {} and subscribed to queues {}" + .format(instance.hostname, removed_queues, added_queues)) + + +@celeryd_init.connect +def handle_update_celery_routes(sender=None, conf=None, **kwargs): + conf = conf if conf else sender.app.conf + logger.debug("Registering celery routes for {}".format(sender)) + instance = Instance.objects.me() + added_routes = update_celery_worker_routes(instance, conf) + logger.info("Workers on tower node '{}' added routes {} all routes are now {}" + .format(instance.hostname, added_routes, conf.CELERY_TASK_ROUTES)) + + +@celeryd_after_setup.connect +def handle_update_celery_hostname(sender, instance, **kwargs): + tower_instance = Instance.objects.me() + instance.hostname = 'celery@{}'.format(tower_instance.hostname) + logger.warn("Set hostname to {}".format(instance.hostname)) + + @shared_task(queue='tower', base=LogErrorsTask) def send_notifications(notification_list, job_id=None): if not isinstance(notification_list, list): @@ -215,6 +307,7 @@ def cluster_node_heartbeat(self): instance_list = list(Instance.objects.filter(rampart_groups__controller__isnull=True).distinct()) this_inst = None lost_instances = [] + for inst in list(instance_list): if inst.hostname == settings.CLUSTER_HOST_ID: this_inst = inst @@ -224,11 +317,15 @@ def cluster_node_heartbeat(self): instance_list.remove(inst) if this_inst: startup_event = this_inst.is_lost(ref_time=nowtime) - if this_inst.capacity == 0: + if this_inst.capacity == 0 and this_inst.enabled: logger.warning('Rejoining the cluster as instance {}.'.format(this_inst.hostname)) - this_inst.capacity = get_system_task_capacity() - this_inst.version = awx_application_version - this_inst.save(update_fields=['capacity', 'version', 'modified']) + if this_inst.enabled: + this_inst.refresh_capacity() + handle_ha_toplogy_changes.apply_async() + elif this_inst.capacity != 0 and not this_inst.enabled: + this_inst.capacity = 0 + this_inst.save(update_fields=['capacity']) + handle_ha_toplogy_changes.apply_async() if startup_event: return else: @@ -237,7 +334,7 @@ def cluster_node_heartbeat(self): for other_inst in instance_list: if other_inst.version == "": continue - if Version(other_inst.version.split('-', 1)[0]) > Version(awx_application_version) and not settings.DEBUG: + if Version(other_inst.version.split('-', 1)[0]) > Version(awx_application_version.split('-', 1)[0]) and not settings.DEBUG: logger.error("Host {} reports version {}, but this node {} is at {}, shutting down".format(other_inst.hostname, other_inst.version, this_inst.hostname, @@ -254,6 +351,10 @@ def cluster_node_heartbeat(self): other_inst.save(update_fields=['capacity']) logger.error("Host {} last checked in at {}, marked as lost.".format( other_inst.hostname, other_inst.modified)) + if settings.AWX_AUTO_DEPROVISION_INSTANCES: + deprovision_hostname = other_inst.hostname + other_inst.delete() + logger.info("Host {} Automatically Deprovisioned.".format(deprovision_hostname)) except DatabaseError as e: if 'did not affect any rows' in str(e): logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname)) @@ -1036,7 +1137,7 @@ class RunJob(BaseTask): # job and visible inside the proot environment (when enabled). cp_dir = os.path.join(kwargs['private_data_dir'], 'cp') if not os.path.exists(cp_dir): - os.mkdir(cp_dir, 0700) + os.mkdir(cp_dir, 0o700) env['ANSIBLE_SSH_CONTROL_PATH'] = os.path.join(cp_dir, '%%h%%p%%r') # Allow the inventory script to include host variables inline via ['_meta']['hostvars']. @@ -1723,7 +1824,7 @@ class RunInventoryUpdate(BaseTask): cp.set(section, 'ssl_verify', "false") cloudforms_opts = dict(inventory_update.source_vars_dict.items()) - for opt in ['version', 'purge_actions', 'clean_group_keys', 'nest_tags', 'suffix']: + for opt in ['version', 'purge_actions', 'clean_group_keys', 'nest_tags', 'suffix', 'prefer_ipv4']: if opt in cloudforms_opts: cp.set(section, opt, cloudforms_opts[opt]) @@ -2160,6 +2261,62 @@ class RunSystemJob(BaseTask): return settings.BASE_DIR +def _reconstruct_relationships(copy_mapping): + for old_obj, new_obj in copy_mapping.items(): + model = type(old_obj) + for field_name in getattr(model, 'FIELDS_TO_PRESERVE_AT_COPY', []): + field = model._meta.get_field(field_name) + if isinstance(field, ForeignKey): + if getattr(new_obj, field_name, None): + continue + related_obj = getattr(old_obj, field_name) + related_obj = copy_mapping.get(related_obj, related_obj) + setattr(new_obj, field_name, related_obj) + elif field.many_to_many: + for related_obj in getattr(old_obj, field_name).all(): + getattr(new_obj, field_name).add(copy_mapping.get(related_obj, related_obj)) + new_obj.save() + + +@shared_task(bind=True, queue='tower', base=LogErrorsTask) +def deep_copy_model_obj( + self, model_module, model_name, obj_pk, new_obj_pk, + user_pk, sub_obj_list, permission_check_func=None +): + logger.info('Deep copy {} from {} to {}.'.format(model_name, obj_pk, new_obj_pk)) + from awx.api.generics import CopyAPIView + model = getattr(importlib.import_module(model_module), model_name, None) + if model is None: + return + try: + obj = model.objects.get(pk=obj_pk) + new_obj = model.objects.get(pk=new_obj_pk) + creater = User.objects.get(pk=user_pk) + except ObjectDoesNotExist: + logger.warning("Object or user no longer exists.") + return + with transaction.atomic(): + copy_mapping = {} + for sub_obj_setup in sub_obj_list: + sub_model = getattr(importlib.import_module(sub_obj_setup[0]), + sub_obj_setup[1], None) + if sub_model is None: + continue + try: + sub_obj = sub_model.objects.get(pk=sub_obj_setup[2]) + except ObjectDoesNotExist: + continue + copy_mapping.update(CopyAPIView.copy_model_obj( + obj, new_obj, sub_model, sub_obj, creater + )) + _reconstruct_relationships(copy_mapping) + if permission_check_func: + permission_check_func = getattr(getattr( + importlib.import_module(permission_check_func[0]), permission_check_func[1] + ), permission_check_func[2]) + permission_check_func(creater, copy_mapping.values()) + + celery_app.register_task(RunJob()) celery_app.register_task(RunProjectUpdate()) celery_app.register_task(RunInventoryUpdate()) diff --git a/awx/main/templatetags/__init__.py b/awx/main/templatetags/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/main/templatetags/swagger.py b/awx/main/templatetags/swagger.py new file mode 100644 index 0000000000..314d599710 --- /dev/null +++ b/awx/main/templatetags/swagger.py @@ -0,0 +1,50 @@ +import re +from django.utils.encoding import force_unicode +from django import template + +register = template.Library() + +CONSONANT_SOUND = re.compile(r'''one(![ir])''', re.IGNORECASE|re.VERBOSE) # noqa +VOWEL_SOUND = re.compile(r'''[aeio]|u([aeiou]|[^n][^aeiou]|ni[^dmnl]|nil[^l])|h(ier|onest|onou?r|ors\b|our(!i))|[fhlmnrsx]\b''', re.IGNORECASE|re.VERBOSE) # noqa + + +@register.filter +def anora(text): + # https://pypi.python.org/pypi/anora + # < 10 lines of BSD-3 code, not worth a dependency + text = force_unicode(text) + anora = 'an' if not CONSONANT_SOUND.match(text) and VOWEL_SOUND.match(text) else 'a' + return anora + ' ' + text + + +@register.tag(name='ifmeth') +def ifmeth(parser, token): + """ + Used to mark template blocks for Swagger/OpenAPI output. + If the specified method matches the *current* method in Swagger/OpenAPI + generation, show the block. Otherwise, the block is omitted. + + {% ifmeth GET %} + Make a GET request to... + {% endifmeth %} + + {% ifmeth PUT PATCH %} + Make a PUT or PATCH request to... + {% endifmeth %} + """ + allowed_methods = [m.upper() for m in token.split_contents()[1:]] + nodelist = parser.parse(('endifmeth',)) + parser.delete_first_token() + return MethodFilterNode(allowed_methods, nodelist) + + +class MethodFilterNode(template.Node): + def __init__(self, allowed_methods, nodelist): + self.allowed_methods = allowed_methods + self.nodelist = nodelist + + def render(self, context): + swagger_method = context.get('swagger_method') + if not swagger_method or swagger_method.upper() in self.allowed_methods: + return self.nodelist.render(context) + return '' diff --git a/awx/main/tests/docs/__init__.py b/awx/main/tests/docs/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/main/tests/docs/conftest.py b/awx/main/tests/docs/conftest.py new file mode 100644 index 0000000000..bd0cf1c99f --- /dev/null +++ b/awx/main/tests/docs/conftest.py @@ -0,0 +1,13 @@ +from awx.main.tests.functional.conftest import * # noqa + + +def pytest_addoption(parser): + parser.addoption("--release", action="store", help="a release version number, e.g., 3.3.0") + + +def pytest_generate_tests(metafunc): + # This is called for every test. Only get/set command line arguments + # if the argument is specified in the list of test "fixturenames". + option_value = metafunc.config.option.release + if 'release' in metafunc.fixturenames and option_value is not None: + metafunc.parametrize("release", [option_value]) diff --git a/awx/main/tests/docs/test_swagger_generation.py b/awx/main/tests/docs/test_swagger_generation.py new file mode 100644 index 0000000000..b84e4e9a52 --- /dev/null +++ b/awx/main/tests/docs/test_swagger_generation.py @@ -0,0 +1,171 @@ +import datetime +import json +import re + +from django.conf import settings +from django.core.serializers.json import DjangoJSONEncoder +from django.utils.functional import Promise +from django.utils.encoding import force_text + +from coreapi.compat import force_bytes +from openapi_codec.encode import generate_swagger_object +import pytest + +from awx.api.versioning import drf_reverse + + +class i18nEncoder(DjangoJSONEncoder): + def default(self, obj): + if isinstance(obj, Promise): + return force_text(obj) + return super(i18nEncoder, self).default(obj) + + +@pytest.mark.django_db +class TestSwaggerGeneration(): + """ + This class is used to generate a Swagger/OpenAPI document for the awx + API. A _prepare fixture generates a JSON blob containing OpenAPI data, + individual tests have the ability modify the payload. + + Finally, the JSON content is written to a file, `swagger.json`, in the + current working directory. + + $ py.test test_swagger_generation.py --version 3.3.0 + + To customize the `info.description` in the generated OpenAPI document, + modify the text in `awx.api.templates.swagger.description.md` + """ + JSON = {} + + @pytest.fixture(autouse=True, scope='function') + def _prepare(self, get, admin): + if not self.__class__.JSON: + url = drf_reverse('api:swagger_view') + '?format=openapi' + response = get(url, user=admin) + data = generate_swagger_object(response.data) + if response.has_header('X-Deprecated-Paths'): + data['deprecated_paths'] = json.loads(response['X-Deprecated-Paths']) + data.update(response.accepted_renderer.get_customizations() or {}) + + data['host'] = None + data['modified'] = datetime.datetime.utcnow().isoformat() + data['schemes'] = ['https'] + data['consumes'] = ['application/json'] + + revised_paths = {} + deprecated_paths = data.pop('deprecated_paths', []) + for path, node in data['paths'].items(): + # change {version} in paths to the actual default API version (e.g., v2) + revised_paths[path.replace( + '{version}', + settings.REST_FRAMEWORK['DEFAULT_VERSION'] + )] = node + for method in node: + if path in deprecated_paths: + node[method]['deprecated'] = True + if 'description' in node[method]: + # Pop off the first line and use that as the summary + lines = node[method]['description'].splitlines() + node[method]['summary'] = lines.pop(0).strip('#:') + node[method]['description'] = '\n'.join(lines) + + # remove the required `version` parameter + for param in node[method].get('parameters'): + if param['in'] == 'path' and param['name'] == 'version': + node[method]['parameters'].remove(param) + data['paths'] = revised_paths + self.__class__.JSON = data + + def test_sanity(self, release): + JSON = self.__class__.JSON + JSON['info']['version'] = release + + # Make some basic assertions about the rendered JSON so we can + # be sure it doesn't break across DRF upgrades and view/serializer + # changes. + assert len(JSON['paths']) + + # The number of API endpoints changes over time, but let's just check + # for a reasonable number here; if this test starts failing, raise/lower the bounds + paths = JSON['paths'] + assert 250 < len(paths) < 300 + assert paths['/api/'].keys() == ['get'] + assert paths['/api/v2/'].keys() == ['get'] + assert sorted( + paths['/api/v2/credentials/'].keys() + ) == ['get', 'post'] + assert sorted( + paths['/api/v2/credentials/{id}/'].keys() + ) == ['delete', 'get', 'patch', 'put'] + assert paths['/api/v2/settings/'].keys() == ['get'] + assert paths['/api/v2/settings/{category_slug}/'].keys() == [ + 'get', 'put', 'patch', 'delete' + ] + + # Test deprecated paths + assert paths['/api/v2/jobs/{id}/extra_credentials/']['get']['deprecated'] is True + + @pytest.mark.parametrize('path', [ + '/api/', + '/api/v2/', + '/api/v2/ping/', + '/api/v2/config/', + ]) + def test_basic_paths(self, path, get, admin): + # hit a couple important endpoints so we always have example data + get(path, user=admin, expect=200) + + def test_autogen_response_examples(self, swagger_autogen): + for pattern, node in TestSwaggerGeneration.JSON['paths'].items(): + pattern = pattern.replace('{id}', '[0-9]+') + pattern = pattern.replace('{category_slug}', '[a-zA-Z0-9\-]+') + for path, result in swagger_autogen.items(): + if re.match('^{}$'.format(pattern), path): + for key, value in result.items(): + method, status_code = key + content_type, resp, request_data = value + if method in node: + status_code = str(status_code) + if content_type: + produces = node[method].setdefault('produces', []) + if content_type not in produces: + produces.append(content_type) + if request_data and status_code.startswith('2'): + # DRF builds a schema based on the serializer + # fields. This is _pretty good_, but if we + # have _actual_ JSON examples, those are even + # better and we should use them instead + for param in node[method].get('parameters'): + if param['in'] == 'body': + node[method]['parameters'].remove(param) + node[method].setdefault('parameters', []).append({ + 'name': 'data', + 'in': 'body', + 'schema': {'example': request_data}, + }) + + # Build response examples + if resp: + if content_type.startswith('text/html'): + continue + if content_type == 'application/json': + resp = json.loads(resp) + node[method]['responses'].setdefault(status_code, {}).setdefault( + 'examples', {} + )[content_type] = resp + + @classmethod + def teardown_class(cls): + with open('swagger.json', 'w') as f: + data = force_bytes( + json.dumps(cls.JSON, cls=i18nEncoder, indent=2) + ) + # replace ISO dates w/ the same value so we don't generate + # needless diffs + data = re.sub( + '[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]+Z', + '2018-02-01T08:00:00.000000Z', + data + ) + f.write(data) diff --git a/awx/main/tests/factories/fixtures.py b/awx/main/tests/factories/fixtures.py index eee545336d..2f3ec0656f 100644 --- a/awx/main/tests/factories/fixtures.py +++ b/awx/main/tests/factories/fixtures.py @@ -35,8 +35,9 @@ def mk_instance(persisted=True, hostname='instance.example.org'): return Instance.objects.get_or_create(uuid=settings.SYSTEM_UUID, hostname=hostname)[0] -def mk_instance_group(name='tower', instance=None): - ig, status = InstanceGroup.objects.get_or_create(name=name) +def mk_instance_group(name='tower', instance=None, minimum=0, percentage=0): + ig, status = InstanceGroup.objects.get_or_create(name=name, policy_instance_minimum=minimum, + policy_instance_percentage=percentage) if instance is not None: if type(instance) == list: for i in instance: diff --git a/awx/main/tests/factories/tower.py b/awx/main/tests/factories/tower.py index a8f20f941f..ecb395dd99 100644 --- a/awx/main/tests/factories/tower.py +++ b/awx/main/tests/factories/tower.py @@ -135,8 +135,8 @@ def create_instance(name, instance_groups=None): return mk_instance(hostname=name) -def create_instance_group(name, instances=None): - return mk_instance_group(name=name, instance=instances) +def create_instance_group(name, instances=None, minimum=0, percentage=0): + return mk_instance_group(name=name, instance=instances, minimum=minimum, percentage=percentage) def create_survey_spec(variables=None, default_type='integer', required=True, min=None, max=None): diff --git a/awx/main/tests/functional/api/test_schedules.py b/awx/main/tests/functional/api/test_schedules.py index c6cfc5d91a..a5e4d94c91 100644 --- a/awx/main/tests/functional/api/test_schedules.py +++ b/awx/main/tests/functional/api/test_schedules.py @@ -27,6 +27,12 @@ def test_non_job_extra_vars_prohibited(post, project, admin_user): assert 'not allowed on launch' in str(r.data['extra_data'][0]) +@pytest.mark.django_db +def test_wfjt_schedule_accepted(post, workflow_job_template, admin_user): + url = reverse('api:workflow_job_template_schedules_list', kwargs={'pk': workflow_job_template.id}) + post(url, {'name': 'test sch', 'rrule': RRULE_EXAMPLE}, admin_user, expect=201) + + @pytest.mark.django_db def test_valid_survey_answer(post, admin_user, project, inventory, survey_spec_factory): job_template = JobTemplate.objects.create( @@ -60,6 +66,7 @@ def test_valid_survey_answer(post, admin_user, project, inventory, survey_spec_f ("DTSTART:20300308T050000Z RRULE:FREQ=YEARLY;INTERVAL=1;BYWEEKNO=20", "BYWEEKNO not supported"), ("DTSTART:20300308T050000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=2000", "COUNT > 999 is unsupported"), # noqa ("DTSTART:20300308T050000Z RRULE:FREQ=REGULARLY;INTERVAL=1", "rrule parsing failed validation: invalid 'FREQ': REGULARLY"), # noqa + ("DTSTART:20030925T104941Z RRULE:FREQ=DAILY;INTERVAL=10;COUNT=500;UNTIL=20040925T104941Z", "RRULE may not contain both COUNT and UNTIL"), # noqa ("DTSTART;TZID=America/New_York:20300308T050000Z RRULE:FREQ=DAILY;INTERVAL=1", "rrule parsing failed validation"), ("DTSTART:20300308T050000 RRULE:FREQ=DAILY;INTERVAL=1", "DTSTART cannot be a naive datetime"), ("DTSTART:19700101T000000Z RRULE:FREQ=MINUTELY;INTERVAL=1", "more than 1000 events are not allowed"), # noqa @@ -274,3 +281,10 @@ def test_dst_rollback_duplicates(post, admin_user): '2030-11-03 02:30:00-05:00', '2030-11-03 03:30:00-05:00', ] + + +@pytest.mark.django_db +def test_zoneinfo(get, admin_user): + url = reverse('api:schedule_zoneinfo') + r = get(url, admin_user, expect=200) + assert {'name': 'America/New_York'} in r.data diff --git a/awx/main/tests/functional/api/test_unified_jobs_stdout.py b/awx/main/tests/functional/api/test_unified_jobs_stdout.py index 99b5619a7f..6cec0ab4fe 100644 --- a/awx/main/tests/functional/api/test_unified_jobs_stdout.py +++ b/awx/main/tests/functional/api/test_unified_jobs_stdout.py @@ -158,6 +158,24 @@ def test_text_stdout_from_system_job_events(sqlite_copy_expert, get, admin): assert response.data['result_stdout'].splitlines() == ['Testing %d' % i for i in range(3)] +@pytest.mark.django_db +def test_text_stdout_with_max_stdout(sqlite_copy_expert, get, admin): + job = SystemJob() + job.save() + total_bytes = settings.STDOUT_MAX_BYTES_DISPLAY + 1 + large_stdout = 'X' * total_bytes + SystemJobEvent(system_job=job, stdout=large_stdout, start_line=0).save() + url = reverse('api:system_job_detail', kwargs={'pk': job.pk}) + response = get(url, user=admin, expect=200) + assert response.data['result_stdout'] == ( + 'Standard Output too large to display ({actual} bytes), only download ' + 'supported for sizes over {max} bytes'.format( + actual=total_bytes, + max=settings.STDOUT_MAX_BYTES_DISPLAY + ) + ) + + @pytest.mark.django_db @pytest.mark.parametrize('Parent, Child, relation, view', [ [Job, JobEvent, 'job', 'api:job_stdout'], diff --git a/awx/main/tests/functional/conftest.py b/awx/main/tests/functional/conftest.py index d245ff3c7d..b799763094 100644 --- a/awx/main/tests/functional/conftest.py +++ b/awx/main/tests/functional/conftest.py @@ -1,4 +1,3 @@ - # Python import pytest import mock @@ -6,6 +5,7 @@ import json import os import six from datetime import timedelta +from six.moves import xrange # Django from django.core.urlresolvers import resolve @@ -33,7 +33,8 @@ from awx.main.models.inventory import ( Group, Inventory, InventoryUpdate, - InventorySource + InventorySource, + CustomInventoryScript ) from awx.main.models.organization import ( Organization, @@ -47,6 +48,13 @@ from awx.main.models.notifications import ( from awx.main.models.workflow import WorkflowJobTemplate from awx.main.models.ad_hoc_commands import AdHocCommand +__SWAGGER_REQUESTS__ = {} + + +@pytest.fixture(scope="session") +def swagger_autogen(requests=__SWAGGER_REQUESTS__): + return requests + @pytest.fixture(autouse=True) def clear_cache(): @@ -490,6 +498,13 @@ def inventory_update(inventory_source): return InventoryUpdate.objects.create(inventory_source=inventory_source) +@pytest.fixture +def inventory_script(organization): + return CustomInventoryScript.objects.create(name='test inv script', + organization=organization, + script='#!/usr/bin/python') + + @pytest.fixture def host(group, inventory): return group.hosts.create(name='single-host', inventory=inventory) @@ -547,6 +562,9 @@ def _request(verb): assert response.status_code == expect if hasattr(response, 'render'): response.render() + __SWAGGER_REQUESTS__.setdefault(request.path, {})[ + (request.method.lower(), response.status_code) + ] = (response.get('Content-Type', None), response.content, kwargs.get('data')) return response return rf diff --git a/awx/main/tests/functional/models/fact/test_get_timeline.py b/awx/main/tests/functional/models/fact/test_get_timeline.py index 940498f913..7dbcb4bb4e 100644 --- a/awx/main/tests/functional/models/fact/test_get_timeline.py +++ b/awx/main/tests/functional/models/fact/test_get_timeline.py @@ -1,6 +1,8 @@ import pytest from datetime import timedelta +from six.moves import xrange + from django.utils import timezone from awx.main.models import Fact @@ -19,7 +21,7 @@ def setup_common(hosts, fact_scans, ts_from=None, ts_to=None, epoch=timezone.now continue facts_known.append(f) fact_objs = Fact.get_timeline(hosts[0].id, module=module_name, ts_from=ts_from, ts_to=ts_to) - return (facts_known, fact_objs) + return (facts_known, fact_objs) @pytest.mark.django_db @@ -27,7 +29,7 @@ def test_all(hosts, fact_scans, monkeypatch_jsonbfield_get_db_prep_save): epoch = timezone.now() ts_from = epoch - timedelta(days=1) ts_to = epoch + timedelta(days=10) - + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, module_name=None, epoch=epoch) assert 9 == len(facts_known) assert 9 == len(fact_objs) @@ -53,7 +55,7 @@ def test_empty_db(hosts, fact_scans, monkeypatch_jsonbfield_get_db_prep_save): epoch = timezone.now() ts_from = epoch - timedelta(days=1) ts_to = epoch + timedelta(days=10) - + fact_objs = Fact.get_timeline(hosts[0].id, 'ansible', ts_from, ts_to) assert 0 == len(fact_objs) @@ -64,7 +66,7 @@ def test_no_results(hosts, fact_scans, monkeypatch_jsonbfield_get_db_prep_save): epoch = timezone.now() ts_from = epoch - timedelta(days=100) ts_to = epoch - timedelta(days=50) - + (facts_known, fact_objs) = setup_common(hosts, fact_scans, ts_from, ts_to, epoch=epoch) assert 0 == len(fact_objs) diff --git a/awx/main/tests/functional/models/test_schedule.py b/awx/main/tests/functional/models/test_schedule.py index 3768058f4f..101afa8b99 100644 --- a/awx/main/tests/functional/models/test_schedule.py +++ b/awx/main/tests/functional/models/test_schedule.py @@ -146,15 +146,16 @@ def test_tzinfo_naive_until(job_template, dtstart, until): @pytest.mark.django_db -def test_mismatched_until_timezone(job_template): - rrule = 'DTSTART;TZID=America/New_York:20180601T120000 RRULE:FREQ=DAILY;INTERVAL=1;UNTIL=20180602T000000' + 'Z' # noqa the Z isn't allowed, because we have a TZID=America/New_York +def test_until_must_be_utc(job_template): + rrule = 'DTSTART;TZID=America/New_York:20180601T120000 RRULE:FREQ=DAILY;INTERVAL=1;UNTIL=20180602T000000' # noqa the Z is required s = Schedule( name='Some Schedule', rrule=rrule, unified_job_template=job_template ) - with pytest.raises(ValueError): + with pytest.raises(ValueError) as e: s.save() + assert 'RRULE UNTIL values must be specified in UTC' in str(e) @pytest.mark.django_db @@ -171,7 +172,7 @@ def test_utc_until_in_the_past(job_template): @pytest.mark.django_db -@mock.patch('awx.main.models.schedules.now', lambda: datetime(2030, 03, 05, tzinfo=pytz.utc)) +@mock.patch('awx.main.models.schedules.now', lambda: datetime(2030, 3, 5, tzinfo=pytz.utc)) def test_dst_phantom_hour(job_template): # The DST period in the United States begins at 02:00 (2 am) local time, so # the hour from 2:00:00 to 2:59:59 does not exist in the night of the diff --git a/awx/main/tests/functional/models/test_workflow.py b/awx/main/tests/functional/models/test_workflow.py index af6724d8b7..61882f2097 100644 --- a/awx/main/tests/functional/models/test_workflow.py +++ b/awx/main/tests/functional/models/test_workflow.py @@ -191,25 +191,6 @@ class TestWorkflowJobTemplate: assert (test_view.is_valid_relation(nodes[2], node_assoc_1) == {'Error': 'Cannot associate failure_nodes when always_nodes have been associated.'}) - def test_wfjt_copy(self, wfjt, job_template, inventory, admin_user): - old_nodes = wfjt.workflow_job_template_nodes.all() - node1 = old_nodes[1] - node1.unified_job_template = job_template - node1.save() - node2 = old_nodes[2] - node2.inventory = inventory - node2.save() - new_wfjt = wfjt.user_copy(admin_user) - for fd in ['description', 'survey_spec', 'survey_enabled', 'extra_vars']: - assert getattr(wfjt, fd) == getattr(new_wfjt, fd) - assert new_wfjt.organization == wfjt.organization - assert len(new_wfjt.workflow_job_template_nodes.all()) == 3 - nodes = new_wfjt.workflow_job_template_nodes.all() - assert nodes[0].success_nodes.all()[0] == nodes[1] - assert nodes[1].failure_nodes.all()[0] == nodes[2] - assert nodes[1].unified_job_template == job_template - assert nodes[2].inventory == inventory - def test_wfjt_unique_together_with_org(self, organization): wfjt1 = WorkflowJobTemplate(name='foo', organization=organization) wfjt1.save() diff --git a/awx/main/tests/functional/task_management/test_rampart_groups.py b/awx/main/tests/functional/task_management/test_rampart_groups.py index f4c6ba95bf..9b4b3eac44 100644 --- a/awx/main/tests/functional/task_management/test_rampart_groups.py +++ b/awx/main/tests/functional/task_management/test_rampart_groups.py @@ -2,6 +2,8 @@ import pytest import mock from datetime import timedelta from awx.main.scheduler import TaskManager +from awx.main.models import InstanceGroup +from awx.main.tasks import apply_cluster_membership_policies @pytest.mark.django_db @@ -151,3 +153,34 @@ def test_failover_group_run(instance_factory, default_instance_group, mocker, tm.schedule() mock_job.assert_has_calls([mock.call(j1, ig1, []), mock.call(j1_1, ig2, [])]) assert mock_job.call_count == 2 + + +@pytest.mark.django_db +def test_instance_group_basic_policies(instance_factory, instance_group_factory): + i0 = instance_factory("i0") + i1 = instance_factory("i1") + i2 = instance_factory("i2") + i3 = instance_factory("i3") + i4 = instance_factory("i4") + ig0 = instance_group_factory("ig0") + ig1 = instance_group_factory("ig1", minimum=2) + ig2 = instance_group_factory("ig2", percentage=50) + ig3 = instance_group_factory("ig3", percentage=50) + ig0.policy_instance_list.append(i0.hostname) + ig0.save() + apply_cluster_membership_policies() + ig0 = InstanceGroup.objects.get(id=ig0.id) + ig1 = InstanceGroup.objects.get(id=ig1.id) + ig2 = InstanceGroup.objects.get(id=ig2.id) + ig3 = InstanceGroup.objects.get(id=ig3.id) + assert len(ig0.instances.all()) == 1 + assert i0 in ig0.instances.all() + assert len(InstanceGroup.objects.get(id=ig1.id).instances.all()) == 2 + assert i1 in ig1.instances.all() + assert i2 in ig1.instances.all() + assert len(InstanceGroup.objects.get(id=ig2.id).instances.all()) == 2 + assert i3 in ig2.instances.all() + assert i4 in ig2.instances.all() + assert len(InstanceGroup.objects.get(id=ig3.id).instances.all()) == 2 + assert i1 in ig3.instances.all() + assert i2 in ig3.instances.all() diff --git a/awx/main/tests/functional/test_copy.py b/awx/main/tests/functional/test_copy.py new file mode 100644 index 0000000000..99e123a8fa --- /dev/null +++ b/awx/main/tests/functional/test_copy.py @@ -0,0 +1,214 @@ +import pytest +import mock + +from awx.api.versioning import reverse +from awx.main.utils import decrypt_field +from awx.main.models.workflow import WorkflowJobTemplateNode +from awx.main.models.jobs import JobTemplate +from awx.main.tasks import deep_copy_model_obj + + +@pytest.mark.django_db +def test_job_template_copy(post, get, project, inventory, machine_credential, vault_credential, + credential, alice, job_template_with_survey_passwords, admin): + job_template_with_survey_passwords.project = project + job_template_with_survey_passwords.inventory = inventory + job_template_with_survey_passwords.save() + job_template_with_survey_passwords.credentials.add(credential) + job_template_with_survey_passwords.credentials.add(machine_credential) + job_template_with_survey_passwords.credentials.add(vault_credential) + job_template_with_survey_passwords.admin_role.members.add(alice) + assert get( + reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}), + alice, expect=200 + ).data['can_copy'] is False + assert get( + reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}), + admin, expect=200 + ).data['can_copy'] is True + jt_copy_pk = post( + reverse('api:job_template_copy', kwargs={'pk': job_template_with_survey_passwords.pk}), + {'name': 'new jt name'}, admin, expect=201 + ).data['id'] + jt_copy = type(job_template_with_survey_passwords).objects.get(pk=jt_copy_pk) + assert jt_copy.created_by == admin + assert jt_copy.name == 'new jt name' + assert jt_copy.project == project + assert jt_copy.inventory == inventory + assert jt_copy.playbook == job_template_with_survey_passwords.playbook + assert jt_copy.credentials.count() == 3 + assert credential in jt_copy.credentials.all() + assert vault_credential in jt_copy.credentials.all() + assert machine_credential in jt_copy.credentials.all() + assert job_template_with_survey_passwords.survey_spec == jt_copy.survey_spec + + +@pytest.mark.django_db +def test_project_copy(post, get, project, organization, scm_credential, alice): + project.credential = scm_credential + project.save() + project.admin_role.members.add(alice) + assert get( + reverse('api:project_copy', kwargs={'pk': project.pk}), alice, expect=200 + ).data['can_copy'] is False + project.organization.admin_role.members.add(alice) + assert get( + reverse('api:project_copy', kwargs={'pk': project.pk}), alice, expect=200 + ).data['can_copy'] is True + project_copy_pk = post( + reverse('api:project_copy', kwargs={'pk': project.pk}), + {'name': 'copied project'}, alice, expect=201 + ).data['id'] + project_copy = type(project).objects.get(pk=project_copy_pk) + assert project_copy.created_by == alice + assert project_copy.name == 'copied project' + assert project_copy.organization == organization + assert project_copy.credential == scm_credential + + +@pytest.mark.django_db +def test_inventory_copy(inventory, group_factory, post, get, alice, organization): + group_1_1 = group_factory('g_1_1') + group_2_1 = group_factory('g_2_1') + group_2_2 = group_factory('g_2_2') + group_2_1.parents.add(group_1_1) + group_2_2.parents.add(group_1_1) + group_2_2.parents.add(group_2_1) + host = group_1_1.hosts.create(name='host', inventory=inventory) + group_2_1.hosts.add(host) + inventory.admin_role.members.add(alice) + assert get( + reverse('api:inventory_copy', kwargs={'pk': inventory.pk}), alice, expect=200 + ).data['can_copy'] is False + inventory.organization.admin_role.members.add(alice) + assert get( + reverse('api:inventory_copy', kwargs={'pk': inventory.pk}), alice, expect=200 + ).data['can_copy'] is True + with mock.patch('awx.api.generics.trigger_delayed_deep_copy') as deep_copy_mock: + inv_copy_pk = post( + reverse('api:inventory_copy', kwargs={'pk': inventory.pk}), + {'name': 'new inv name'}, alice, expect=201 + ).data['id'] + inventory_copy = type(inventory).objects.get(pk=inv_copy_pk) + args, kwargs = deep_copy_mock.call_args + deep_copy_model_obj(*args, **kwargs) + group_1_1_copy = inventory_copy.groups.get(name='g_1_1') + group_2_1_copy = inventory_copy.groups.get(name='g_2_1') + group_2_2_copy = inventory_copy.groups.get(name='g_2_2') + host_copy = inventory_copy.hosts.get(name='host') + assert inventory_copy.organization == organization + assert inventory_copy.created_by == alice + assert inventory_copy.name == 'new inv name' + assert set(group_1_1_copy.parents.all()) == set() + assert set(group_2_1_copy.parents.all()) == set([group_1_1_copy]) + assert set(group_2_2_copy.parents.all()) == set([group_1_1_copy, group_2_1_copy]) + assert set(group_1_1_copy.hosts.all()) == set([host_copy]) + assert set(group_2_1_copy.hosts.all()) == set([host_copy]) + assert set(group_2_2_copy.hosts.all()) == set() + + +@pytest.mark.django_db +def test_workflow_job_template_copy(workflow_job_template, post, get, admin, organization): + workflow_job_template.organization = organization + workflow_job_template.save() + jts = [JobTemplate.objects.create(name='test-jt-{}'.format(i)) for i in range(0, 5)] + nodes = [ + WorkflowJobTemplateNode.objects.create( + workflow_job_template=workflow_job_template, unified_job_template=jts[i] + ) for i in range(0, 5) + ] + nodes[0].success_nodes.add(nodes[1]) + nodes[1].success_nodes.add(nodes[2]) + nodes[0].failure_nodes.add(nodes[3]) + nodes[3].failure_nodes.add(nodes[4]) + with mock.patch('awx.api.generics.trigger_delayed_deep_copy') as deep_copy_mock: + wfjt_copy_id = post( + reverse('api:workflow_job_template_copy', kwargs={'pk': workflow_job_template.pk}), + {'name': 'new wfjt name'}, admin, expect=201 + ).data['id'] + wfjt_copy = type(workflow_job_template).objects.get(pk=wfjt_copy_id) + args, kwargs = deep_copy_mock.call_args + deep_copy_model_obj(*args, **kwargs) + assert wfjt_copy.organization == organization + assert wfjt_copy.created_by == admin + assert wfjt_copy.name == 'new wfjt name' + copied_node_list = [x for x in wfjt_copy.workflow_job_template_nodes.all()] + copied_node_list.sort(key=lambda x: int(x.unified_job_template.name[-1])) + for node, success_count, failure_count, always_count in zip( + copied_node_list, + [1, 1, 0, 0, 0], + [1, 0, 0, 1, 0], + [0, 0, 0, 0, 0] + ): + assert node.success_nodes.count() == success_count + assert node.failure_nodes.count() == failure_count + assert node.always_nodes.count() == always_count + assert copied_node_list[1] in copied_node_list[0].success_nodes.all() + assert copied_node_list[2] in copied_node_list[1].success_nodes.all() + assert copied_node_list[3] in copied_node_list[0].failure_nodes.all() + assert copied_node_list[4] in copied_node_list[3].failure_nodes.all() + + +@pytest.mark.django_db +def test_credential_copy(post, get, machine_credential, credentialtype_ssh, admin): + assert get( + reverse('api:credential_copy', kwargs={'pk': machine_credential.pk}), admin, expect=200 + ).data['can_copy'] is True + credential_copy_pk = post( + reverse('api:credential_copy', kwargs={'pk': machine_credential.pk}), + {'name': 'copied credential'}, admin, expect=201 + ).data['id'] + credential_copy = type(machine_credential).objects.get(pk=credential_copy_pk) + assert credential_copy.created_by == admin + assert credential_copy.name == 'copied credential' + assert credential_copy.credential_type == credentialtype_ssh + assert credential_copy.inputs['username'] == machine_credential.inputs['username'] + assert (decrypt_field(credential_copy, 'password') == + decrypt_field(machine_credential, 'password')) + + +@pytest.mark.django_db +def test_notification_template_copy(post, get, notification_template_with_encrypt, + organization, alice): + #notification_template_with_encrypt.admin_role.members.add(alice) + assert get( + reverse( + 'api:notification_template_copy', kwargs={'pk': notification_template_with_encrypt.pk} + ), alice, expect=200 + ).data['can_copy'] is False + notification_template_with_encrypt.organization.admin_role.members.add(alice) + assert get( + reverse( + 'api:notification_template_copy', kwargs={'pk': notification_template_with_encrypt.pk} + ), alice, expect=200 + ).data['can_copy'] is True + nt_copy_pk = post( + reverse( + 'api:notification_template_copy', kwargs={'pk': notification_template_with_encrypt.pk} + ), {'name': 'copied nt'}, alice, expect=201 + ).data['id'] + notification_template_copy = type(notification_template_with_encrypt).objects.get(pk=nt_copy_pk) + assert notification_template_copy.created_by == alice + assert notification_template_copy.name == 'copied nt' + assert notification_template_copy.organization == organization + assert (decrypt_field(notification_template_with_encrypt, 'notification_configuration', 'token') == + decrypt_field(notification_template_copy, 'notification_configuration', 'token')) + + +@pytest.mark.django_db +def test_inventory_script_copy(post, get, inventory_script, organization, alice): + assert get( + reverse('api:inventory_script_copy', kwargs={'pk': inventory_script.pk}), alice, expect=200 + ).data['can_copy'] is False + inventory_script.organization.admin_role.members.add(alice) + assert get( + reverse('api:inventory_script_copy', kwargs={'pk': inventory_script.pk}), alice, expect=200 + ).data['can_copy'] is True + is_copy_pk = post( + reverse('api:inventory_script_copy', kwargs={'pk': inventory_script.pk}), + {'name': 'copied inv script'}, alice, expect=201 + ).data['id'] + inventory_script_copy = type(inventory_script).objects.get(pk=is_copy_pk) + assert inventory_script_copy.created_by == alice + assert inventory_script_copy.name == 'copied inv script' + assert inventory_script_copy.organization == organization diff --git a/awx/main/tests/functional/test_credential.py b/awx/main/tests/functional/test_credential.py index 1fe909c092..37609cd222 100644 --- a/awx/main/tests/functional/test_credential.py +++ b/awx/main/tests/functional/test_credential.py @@ -107,8 +107,11 @@ def test_cred_type_input_schema_validity(input_, valid): ({}, True), ({'invalid-injector': {}}, False), ({'file': 123}, False), - ({'file': {}}, False), + ({'file': {}}, True), ({'file': {'template': '{{username}}'}}, True), + ({'file': {'template.username': '{{username}}'}}, True), + ({'file': {'template.username': '{{username}}', 'template.password': '{{pass}}'}}, True), + ({'file': {'template': '{{username}}', 'template.password': '{{pass}}'}}, False), ({'file': {'foo': 'bar'}}, False), ({'env': 123}, False), ({'env': {}}, True), diff --git a/awx/main/tests/functional/test_instances.py b/awx/main/tests/functional/test_instances.py index 3454371037..11484dfc6e 100644 --- a/awx/main/tests/functional/test_instances.py +++ b/awx/main/tests/functional/test_instances.py @@ -1,7 +1,8 @@ import pytest +import mock -from awx.main.models import AdHocCommand, InventoryUpdate, Job, JobTemplate, ProjectUpdate -from awx.main.models import Instance +from awx.main.models import AdHocCommand, InventoryUpdate, Job, JobTemplate, ProjectUpdate, Instance +from awx.main.tasks import apply_cluster_membership_policies from awx.api.versioning import reverse @@ -30,6 +31,130 @@ def test_instance_dup(org_admin, organization, project, instance_factory, instan assert api_num_instances_oa == (actual_num_instances - 1) +@pytest.mark.django_db +@mock.patch('awx.main.tasks.handle_ha_toplogy_changes', return_value=None) +def test_policy_instance_few_instances(mock, instance_factory, instance_group_factory): + i1 = instance_factory("i1") + ig_1 = instance_group_factory("ig1", percentage=25) + ig_2 = instance_group_factory("ig2", percentage=25) + ig_3 = instance_group_factory("ig3", percentage=25) + ig_4 = instance_group_factory("ig4", percentage=25) + apply_cluster_membership_policies() + assert len(ig_1.instances.all()) == 1 + assert i1 in ig_1.instances.all() + assert len(ig_2.instances.all()) == 1 + assert i1 in ig_2.instances.all() + assert len(ig_3.instances.all()) == 1 + assert i1 in ig_3.instances.all() + assert len(ig_4.instances.all()) == 1 + assert i1 in ig_4.instances.all() + i2 = instance_factory("i2") + apply_cluster_membership_policies() + assert len(ig_1.instances.all()) == 1 + assert i1 in ig_1.instances.all() + assert len(ig_2.instances.all()) == 1 + assert i2 in ig_2.instances.all() + assert len(ig_3.instances.all()) == 1 + assert i1 in ig_3.instances.all() + assert len(ig_4.instances.all()) == 1 + assert i2 in ig_4.instances.all() + + +@pytest.mark.django_db +@mock.patch('awx.main.tasks.handle_ha_toplogy_changes', return_value=None) +def test_policy_instance_distribution_uneven(mock, instance_factory, instance_group_factory): + i1 = instance_factory("i1") + i2 = instance_factory("i2") + i3 = instance_factory("i3") + ig_1 = instance_group_factory("ig1", percentage=25) + ig_2 = instance_group_factory("ig2", percentage=25) + ig_3 = instance_group_factory("ig3", percentage=25) + ig_4 = instance_group_factory("ig4", percentage=25) + apply_cluster_membership_policies() + assert len(ig_1.instances.all()) == 1 + assert i1 in ig_1.instances.all() + assert len(ig_2.instances.all()) == 1 + assert i2 in ig_2.instances.all() + assert len(ig_3.instances.all()) == 1 + assert i3 in ig_3.instances.all() + assert len(ig_4.instances.all()) == 1 + assert i1 in ig_4.instances.all() + + +@pytest.mark.django_db +@mock.patch('awx.main.tasks.handle_ha_toplogy_changes', return_value=None) +def test_policy_instance_distribution_even(mock, instance_factory, instance_group_factory): + i1 = instance_factory("i1") + i2 = instance_factory("i2") + i3 = instance_factory("i3") + i4 = instance_factory("i4") + ig_1 = instance_group_factory("ig1", percentage=25) + ig_2 = instance_group_factory("ig2", percentage=25) + ig_3 = instance_group_factory("ig3", percentage=25) + ig_4 = instance_group_factory("ig4", percentage=25) + apply_cluster_membership_policies() + assert len(ig_1.instances.all()) == 1 + assert i1 in ig_1.instances.all() + assert len(ig_2.instances.all()) == 1 + assert i2 in ig_2.instances.all() + assert len(ig_3.instances.all()) == 1 + assert i3 in ig_3.instances.all() + assert len(ig_4.instances.all()) == 1 + assert i4 in ig_4.instances.all() + ig_1.policy_instance_minimum = 2 + ig_1.save() + apply_cluster_membership_policies() + assert len(ig_1.instances.all()) == 2 + assert i1 in ig_1.instances.all() + assert i2 in ig_1.instances.all() + assert len(ig_2.instances.all()) == 1 + assert i3 in ig_2.instances.all() + assert len(ig_3.instances.all()) == 1 + assert i4 in ig_3.instances.all() + assert len(ig_4.instances.all()) == 1 + assert i1 in ig_4.instances.all() + + +@pytest.mark.django_db +@mock.patch('awx.main.tasks.handle_ha_toplogy_changes', return_value=None) +def test_policy_instance_distribution_simultaneous(mock, instance_factory, instance_group_factory): + i1 = instance_factory("i1") + i2 = instance_factory("i2") + i3 = instance_factory("i3") + i4 = instance_factory("i4") + ig_1 = instance_group_factory("ig1", percentage=25, minimum=2) + ig_2 = instance_group_factory("ig2", percentage=25) + ig_3 = instance_group_factory("ig3", percentage=25) + ig_4 = instance_group_factory("ig4", percentage=25) + apply_cluster_membership_policies() + assert len(ig_1.instances.all()) == 2 + assert i1 in ig_1.instances.all() + assert i2 in ig_1.instances.all() + assert len(ig_2.instances.all()) == 1 + assert i3 in ig_2.instances.all() + assert len(ig_3.instances.all()) == 1 + assert i4 in ig_3.instances.all() + assert len(ig_4.instances.all()) == 1 + assert i1 in ig_4.instances.all() + + +@pytest.mark.django_db +@mock.patch('awx.main.tasks.handle_ha_toplogy_changes', return_value=None) +def test_policy_instance_list_manually_managed(mock, instance_factory, instance_group_factory): + i1 = instance_factory("i1") + i2 = instance_factory("i2") + ig_1 = instance_group_factory("ig1", percentage=100, minimum=2) + ig_2 = instance_group_factory("ig2") + ig_2.policy_instance_list = [i2.hostname] + ig_2.save() + apply_cluster_membership_policies() + assert len(ig_1.instances.all()) == 1 + assert i1 in ig_1.instances.all() + assert i2 not in ig_1.instances.all() + assert len(ig_2.instances.all()) == 1 + assert i2 in ig_2.instances.all() + + @pytest.mark.django_db def test_basic_instance_group_membership(instance_group_factory, default_instance_group, job_factory): j = job_factory() diff --git a/awx/main/tests/functional/test_jobs.py b/awx/main/tests/functional/test_jobs.py index 6500963fc1..aa95574b36 100644 --- a/awx/main/tests/functional/test_jobs.py +++ b/awx/main/tests/functional/test_jobs.py @@ -1,12 +1,11 @@ -from awx.main.models import ( - Job, - Instance -) -from django.test.utils import override_settings import pytest - +import mock import json +from awx.main.models import Job, Instance +from awx.main.tasks import cluster_node_heartbeat +from django.test.utils import override_settings + @pytest.mark.django_db def test_orphan_unified_job_creation(instance, inventory): @@ -20,13 +19,19 @@ def test_orphan_unified_job_creation(instance, inventory): @pytest.mark.django_db +@mock.patch('awx.main.utils.common.get_cpu_capacity', lambda: (2,8)) +@mock.patch('awx.main.utils.common.get_mem_capacity', lambda: (8000,62)) +@mock.patch('awx.main.tasks.handle_ha_toplogy_changes.apply_async', lambda: True) def test_job_capacity_and_with_inactive_node(): - Instance.objects.create(hostname='test-1', capacity=50) - assert Instance.objects.total_capacity() == 50 - Instance.objects.create(hostname='test-2', capacity=50) - assert Instance.objects.total_capacity() == 100 - with override_settings(AWX_ACTIVE_NODE_TIME=0): - assert Instance.objects.total_capacity() < 100 + i = Instance.objects.create(hostname='test-1') + i.refresh_capacity() + assert i.capacity == 62 + i.enabled = False + i.save() + with override_settings(CLUSTER_HOST_ID=i.hostname): + cluster_node_heartbeat() + i = Instance.objects.get(id=i.id) + assert i.capacity == 0 @pytest.mark.django_db diff --git a/awx/main/tests/functional/test_notifications.py b/awx/main/tests/functional/test_notifications.py index fe350fd988..9e659b0adc 100644 --- a/awx/main/tests/functional/test_notifications.py +++ b/awx/main/tests/functional/test_notifications.py @@ -5,6 +5,8 @@ from requests.adapters import HTTPAdapter from requests.utils import select_proxy from requests.exceptions import ConnectionError +from six.moves import xrange + from awx.api.versioning import reverse from awx.main.models.notifications import NotificationTemplate, Notification from awx.main.models.inventory import Inventory, InventorySource diff --git a/awx/main/tests/unit/api/serializers/test_job_serializers.py b/awx/main/tests/unit/api/serializers/test_job_serializers.py index e3062ef7e2..3c1529cba1 100644 --- a/awx/main/tests/unit/api/serializers/test_job_serializers.py +++ b/awx/main/tests/unit/api/serializers/test_job_serializers.py @@ -3,6 +3,8 @@ import pytest import mock import json +from six.moves import xrange + # AWX from awx.api.serializers import ( JobSerializer, diff --git a/awx/main/tests/unit/api/serializers/test_job_template_serializers.py b/awx/main/tests/unit/api/serializers/test_job_template_serializers.py index ce2cba53e3..29c0512256 100644 --- a/awx/main/tests/unit/api/serializers/test_job_template_serializers.py +++ b/awx/main/tests/unit/api/serializers/test_job_template_serializers.py @@ -2,6 +2,8 @@ import pytest import mock +from six.moves import xrange + # AWX from awx.api.serializers import ( JobTemplateSerializer, diff --git a/awx/main/tests/unit/api/test_views.py b/awx/main/tests/unit/api/test_views.py index add4ebac77..e2e6cde794 100644 --- a/awx/main/tests/unit/api/test_views.py +++ b/awx/main/tests/unit/api/test_views.py @@ -130,7 +130,7 @@ class TestHostInsights(): @pytest.mark.parametrize("status_code, exception, error, message", [ (502, requests.exceptions.SSLError, 'SSLError while trying to connect to https://myexample.com/whocares/me/', None,), (504, requests.exceptions.Timeout, 'Request to https://myexample.com/whocares/me/ timed out.', None,), - (502, requests.exceptions.RequestException, 'booo!', 'Unkown exception booo! while trying to GET https://myexample.com/whocares/me/'), + (502, requests.exceptions.RequestException, 'booo!', 'Unknown exception booo! while trying to GET https://myexample.com/whocares/me/'), ]) def test_get_insights_request_exception(self, patch_parent, mocker, status_code, exception, error, message): view = HostInsights() diff --git a/awx/main/tests/unit/test_tasks.py b/awx/main/tests/unit/test_tasks.py index 2b24c18325..d83dea71dd 100644 --- a/awx/main/tests/unit/test_tasks.py +++ b/awx/main/tests/unit/test_tasks.py @@ -29,6 +29,7 @@ from awx.main.models import ( ProjectUpdate, UnifiedJob, User, + Organization, build_safe_env ) @@ -204,7 +205,6 @@ class TestJobExecution: mock.patch.object(Project, 'get_project_path', lambda *a, **kw: self.project_path), # don't emit websocket statuses; they use the DB and complicate testing mock.patch.object(UnifiedJob, 'websocket_emit_status', mock.Mock()), - mock.patch.object(Job, 'ansible_virtualenv_path', settings.ANSIBLE_VENV_PATH), mock.patch('awx.main.expect.run.run_pexpect', self.run_pexpect), ] for cls in (Job, AdHocCommand): @@ -267,6 +267,8 @@ class TestJobExecution: self.patches.append(patch) patch.start() + job.project = Project(organization=Organization()) + return job @property @@ -353,11 +355,9 @@ class TestGenericRun(TestJobExecution): def test_valid_custom_virtualenv(self): with TemporaryDirectory(dir=settings.BASE_VENV_PATH) as tempdir: + self.instance.project.custom_virtualenv = tempdir os.makedirs(os.path.join(tempdir, 'lib')) os.makedirs(os.path.join(tempdir, 'bin', 'activate')) - venv_patch = mock.patch.object(Job, 'ansible_virtualenv_path', tempdir) - self.patches.append(venv_patch) - venv_patch.start() self.task.run(self.pk) @@ -371,14 +371,11 @@ class TestGenericRun(TestJobExecution): assert '--ro-bind {} {}'.format(path, path) in ' '.join(args) def test_invalid_custom_virtualenv(self): - venv_patch = mock.patch.object(Job, 'ansible_virtualenv_path', '/venv/missing') - self.patches.append(venv_patch) - venv_patch.start() - with pytest.raises(Exception): + self.instance.project.custom_virtualenv = '/venv/missing' self.task.run(self.pk) - tb = self.task.update_model.call_args[-1]['result_traceback'] - assert 'a valid Python virtualenv does not exist at /venv/missing' in tb + tb = self.task.update_model.call_args[-1]['result_traceback'] + assert 'a valid Python virtualenv does not exist at /venv/missing' in tb class TestAdhocRun(TestJobExecution): @@ -1230,6 +1227,50 @@ class TestJobCredentials(TestJobExecution): self.run_pexpect.side_effect = run_pexpect_side_effect self.task.run(self.pk) + def test_custom_environment_injectors_with_files(self): + some_cloud = CredentialType( + kind='cloud', + name='SomeCloud', + managed_by_tower=False, + inputs={ + 'fields': [{ + 'id': 'cert', + 'label': 'Certificate', + 'type': 'string' + }, { + 'id': 'key', + 'label': 'Key', + 'type': 'string' + }] + }, + injectors={ + 'file': { + 'template.cert': '[mycert]\n{{cert}}', + 'template.key': '[mykey]\n{{key}}' + }, + 'env': { + 'MY_CERT_INI_FILE': '{{tower.filename.cert}}', + 'MY_KEY_INI_FILE': '{{tower.filename.key}}' + } + } + ) + credential = Credential( + pk=1, + credential_type=some_cloud, + inputs = {'cert': 'CERT123', 'key': 'KEY123'} + ) + self.instance.credentials.add(credential) + self.task.run(self.pk) + + def run_pexpect_side_effect(*args, **kwargs): + args, cwd, env, stdout = args + assert open(env['MY_CERT_INI_FILE'], 'rb').read() == '[mycert]\nCERT123' + assert open(env['MY_KEY_INI_FILE'], 'rb').read() == '[mykey]\nKEY123' + return ['successful', 0] + + self.run_pexpect.side_effect = run_pexpect_side_effect + self.task.run(self.pk) + def test_multi_cloud(self): gce = CredentialType.defaults['gce']() gce_credential = Credential( @@ -1716,6 +1757,8 @@ class TestInventoryUpdateCredentials(TestJobExecution): self.instance.credential, 'password' ) + self.instance.source_vars = '{"prefer_ipv4": True}' + def run_pexpect_side_effect(*args, **kwargs): args, cwd, env, stdout = args config = ConfigParser.ConfigParser() @@ -1724,6 +1767,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert config.get('cloudforms', 'username') == 'bob' assert config.get('cloudforms', 'password') == 'secret' assert config.get('cloudforms', 'ssl_verify') == 'false' + assert config.get('cloudforms', 'prefer_ipv4') == 'True' cache_path = config.get('cache', 'path') assert cache_path.startswith(env['AWX_PRIVATE_DATA_DIR']) diff --git a/awx/main/tests/unit/utils/test_event_filter.py b/awx/main/tests/unit/utils/test_event_filter.py index 4db92c91ef..85ecc609d0 100644 --- a/awx/main/tests/unit/utils/test_event_filter.py +++ b/awx/main/tests/unit/utils/test_event_filter.py @@ -3,6 +3,8 @@ import base64 import json from StringIO import StringIO +from six.moves import xrange + from awx.main.utils import OutputEventFilter MAX_WIDTH = 78 diff --git a/awx/main/tests/unit/utils/test_ha.py b/awx/main/tests/unit/utils/test_ha.py new file mode 100644 index 0000000000..3dd9adfc35 --- /dev/null +++ b/awx/main/tests/unit/utils/test_ha.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2017 Ansible Tower by Red Hat +# All Rights Reserved. + +# python +import pytest +import mock + +# AWX +from awx.main.utils.ha import ( + _add_remove_celery_worker_queues, + update_celery_worker_routes, +) + + +@pytest.fixture +def conf(): + class Conf(): + CELERY_TASK_ROUTES = dict() + CELERYBEAT_SCHEDULE = dict() + return Conf() + + +class TestAddRemoveCeleryWorkerQueues(): + @pytest.fixture + def instance_generator(self, mocker): + def fn(groups=['east', 'west', 'north', 'south'], hostname='east-1'): + instance = mocker.MagicMock() + instance.hostname = hostname + instance.rampart_groups = mocker.MagicMock() + instance.rampart_groups.values_list = mocker.MagicMock(return_value=groups) + + return instance + return fn + + @pytest.fixture + def worker_queues_generator(self, mocker): + def fn(queues=['east', 'west']): + return [dict(name=n, alias='') for n in queues] + return fn + + @pytest.fixture + def mock_app(self, mocker): + app = mocker.MagicMock() + app.control = mocker.MagicMock() + app.control.cancel_consumer = mocker.MagicMock() + return app + + @pytest.mark.parametrize("static_queues,_worker_queues,groups,hostname,added_expected,removed_expected", [ + (['east', 'west'], ['east', 'west', 'east-1'], [], 'east-1', [], []), + ([], ['east', 'west', 'east-1'], ['east', 'west'], 'east-1', [], []), + ([], ['east', 'west'], ['east', 'west'], 'east-1', ['east-1'], []), + ([], [], ['east', 'west'], 'east-1', ['east', 'west', 'east-1'], []), + ([], ['china', 'russia'], ['east', 'west'], 'east-1', ['east', 'west', 'east-1'], ['china', 'russia']), + ]) + def test__add_remove_celery_worker_queues_noop(self, mock_app, + instance_generator, + worker_queues_generator, + static_queues, _worker_queues, + groups, hostname, + added_expected, removed_expected): + added_expected.append('tower_instance_router') + instance = instance_generator(groups=groups, hostname=hostname) + worker_queues = worker_queues_generator(_worker_queues) + with mock.patch('awx.main.utils.ha.settings.AWX_CELERY_QUEUES_STATIC', static_queues): + (added_queues, removed_queues) = _add_remove_celery_worker_queues(mock_app, instance, worker_queues, hostname) + assert set(added_queues) == set(added_expected) + assert set(removed_queues) == set(removed_expected) + + +class TestUpdateCeleryWorkerRoutes(): + + @pytest.mark.parametrize("is_controller,expected_routes", [ + (False, { + 'awx.main.tasks.cluster_node_heartbeat': {'queue': 'east-1', 'routing_key': 'east-1'}, + 'awx.main.tasks.purge_old_stdout_files': {'queue': 'east-1', 'routing_key': 'east-1'} + }), + (True, { + 'awx.main.tasks.cluster_node_heartbeat': {'queue': 'east-1', 'routing_key': 'east-1'}, + 'awx.main.tasks.purge_old_stdout_files': {'queue': 'east-1', 'routing_key': 'east-1'}, + 'awx.main.tasks.awx_isolated_heartbeat': {'queue': 'east-1', 'routing_key': 'east-1'}, + }), + ]) + def test_update_celery_worker_routes(self, mocker, conf, is_controller, expected_routes): + instance = mocker.MagicMock() + instance.hostname = 'east-1' + instance.is_controller = mocker.MagicMock(return_value=is_controller) + + assert update_celery_worker_routes(instance, conf) == expected_routes + assert conf.CELERY_TASK_ROUTES == expected_routes + + def test_update_celery_worker_routes_deleted(self, mocker, conf): + instance = mocker.MagicMock() + instance.hostname = 'east-1' + instance.is_controller = mocker.MagicMock(return_value=False) + conf.CELERY_TASK_ROUTES = {'awx.main.tasks.awx_isolated_heartbeat': 'foobar'} + + update_celery_worker_routes(instance, conf) + assert 'awx.main.tasks.awx_isolated_heartbeat' not in conf.CELERY_TASK_ROUTES + diff --git a/awx/main/utils/common.py b/awx/main/utils/common.py index 443da1f1f3..c32821169a 100644 --- a/awx/main/utils/common.py +++ b/awx/main/utils/common.py @@ -20,6 +20,8 @@ import six import psutil from StringIO import StringIO +from decimal import Decimal + # Decorator from decorator import decorator @@ -45,7 +47,7 @@ __all__ = ['get_object_or_400', 'get_object_or_403', 'camelcase_to_underscore', 'ignore_inventory_computed_fields', 'ignore_inventory_group_removal', '_inventory_updates', 'get_pk_from_dict', 'getattrd', 'NoDefaultProvided', 'get_current_apps', 'set_current_apps', 'OutputEventFilter', - 'extract_ansible_vars', 'get_search_fields', 'get_system_task_capacity', + 'extract_ansible_vars', 'get_search_fields', 'get_system_task_capacity', 'get_cpu_capacity', 'get_mem_capacity', 'wrap_args_with_proot', 'build_proot_temp_dir', 'check_proot_installed', 'model_to_dict', 'model_instance_diff', 'timestamp_apiformat', 'parse_yaml_or_json', 'RequireDebugTrueOrTest', 'has_model_field_prefetched', 'set_environ', 'IllegalArgumentError', 'get_custom_venv_choices'] @@ -632,19 +634,52 @@ def parse_yaml_or_json(vars_str, silent_failure=True): return vars_dict -@memoize() -def get_system_task_capacity(): +def get_cpu_capacity(): + from django.conf import settings + settings_forkcpu = getattr(settings, 'SYSTEM_TASK_FORKS_CPU', None) + env_forkcpu = os.getenv('SYSTEM_TASK_FORKS_CPU', None) + cpu = psutil.cpu_count() + + if env_forkcpu: + forkcpu = int(env_forkcpu) + elif settings_forkcpu: + forkcpu = int(settings_forkcpu) + else: + forkcpu = 4 + return (cpu, cpu * forkcpu) + + +def get_mem_capacity(): + from django.conf import settings + settings_forkmem = getattr(settings, 'SYSTEM_TASK_FORKS_MEM', None) + env_forkmem = os.getenv('SYSTEM_TASK_FORKS_MEM', None) + if env_forkmem: + forkmem = int(env_forkmem) + elif settings_forkmem: + forkmem = int(settings_forkmem) + else: + forkmem = 100 + + mem = psutil.virtual_memory().total + return (mem, max(1, ((mem / 1024 / 1024) - 2048) / forkmem)) + + +def get_system_task_capacity(scale=Decimal(1.0)): ''' Measure system memory and use it as a baseline for determining the system's capacity ''' from django.conf import settings - if hasattr(settings, 'SYSTEM_TASK_CAPACITY'): - return settings.SYSTEM_TASK_CAPACITY - mem = psutil.virtual_memory() - total_mem_value = mem.total / 1024 / 1024 - if total_mem_value <= 2048: - return 50 - return 50 + ((total_mem_value / 1024) - 2) * 75 + settings_forks = getattr(settings, 'SYSTEM_TASK_FORKS_CAPACITY', None) + env_forks = os.getenv('SYSTEM_TASK_FORKS_CAPACITY', None) + + if env_forks: + return int(env_forks) + elif settings_forks: + return int(settings_forks) + + _, cpu_cap = get_cpu_capacity() + _, mem_cap = get_mem_capacity() + return min(mem_cap, cpu_cap) + ((max(mem_cap, cpu_cap) - min(mem_cap, cpu_cap)) * scale) _inventory_updates = threading.local() @@ -731,7 +766,7 @@ def wrap_args_with_proot(args, cwd, **kwargs): ''' from django.conf import settings cwd = os.path.realpath(cwd) - new_args = [getattr(settings, 'AWX_PROOT_CMD', 'bwrap'), '--unshare-pid', '--dev-bind', '/', '/'] + new_args = [getattr(settings, 'AWX_PROOT_CMD', 'bwrap'), '--unshare-pid', '--dev-bind', '/', '/', '--proc', '/proc'] hide_paths = [settings.AWX_PROOT_BASE_PATH] if not kwargs.get('isolated'): hide_paths.extend(['/etc/tower', '/var/lib/awx', '/var/log', diff --git a/awx/main/utils/ha.py b/awx/main/utils/ha.py new file mode 100644 index 0000000000..bb3a0a73cc --- /dev/null +++ b/awx/main/utils/ha.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2017 Ansible Tower by Red Hat +# All Rights Reserved. + +# Django +from django.conf import settings + +# AWX +from awx.main.models import Instance + + +def _add_remove_celery_worker_queues(app, instance, worker_queues, worker_name): + removed_queues = [] + added_queues = [] + ig_names = set(instance.rampart_groups.values_list('name', flat=True)) + ig_names.add("tower_instance_router") + worker_queue_names = set([q['name'] for q in worker_queues]) + + + # Remove queues that aren't in the instance group + for queue in worker_queues: + if queue['name'] in settings.AWX_CELERY_QUEUES_STATIC or \ + queue['alias'] in settings.AWX_CELERY_QUEUES_STATIC: + continue + + if queue['name'] not in ig_names | set([instance.hostname]) or not instance.enabled: + app.control.cancel_consumer(queue['name'], reply=True, destination=[worker_name]) + removed_queues.append(queue['name']) + + # Add queues for instance and instance groups + for queue_name in ig_names | set([instance.hostname]): + if queue_name not in worker_queue_names: + app.control.add_consumer(queue_name, reply=True, destination=[worker_name]) + added_queues.append(queue_name) + + return (added_queues, removed_queues) + + +def update_celery_worker_routes(instance, conf): + tasks = [ + 'awx.main.tasks.cluster_node_heartbeat', + 'awx.main.tasks.purge_old_stdout_files', + ] + routes_updated = {} + # Instance is, effectively, a controller node + if instance.is_controller(): + tasks.append('awx.main.tasks.awx_isolated_heartbeat') + else: + if 'awx.main.tasks.awx_isolated_heartbeat' in conf.CELERY_TASK_ROUTES: + del conf.CELERY_TASK_ROUTES['awx.main.tasks.awx_isolated_heartbeat'] + + for t in tasks: + conf.CELERY_TASK_ROUTES[t] = {'queue': instance.hostname, 'routing_key': instance.hostname} + routes_updated[t] = conf.CELERY_TASK_ROUTES[t] + + return routes_updated + + +def register_celery_worker_queues(app, celery_worker_name): + instance = Instance.objects.me() + added_queues = [] + removed_queues = [] + + celery_host_queues = app.control.inspect([celery_worker_name]).active_queues() + + celery_worker_queues = celery_host_queues[celery_worker_name] if celery_host_queues else [] + (added_queues, removed_queues) = _add_remove_celery_worker_queues(app, instance, celery_worker_queues, celery_worker_name) + + return (instance, removed_queues, added_queues) + diff --git a/awx/plugins/inventory/cloudforms.ini.example b/awx/plugins/inventory/cloudforms.ini.example index dc055c1fb7..30b9aa609e 100644 --- a/awx/plugins/inventory/cloudforms.ini.example +++ b/awx/plugins/inventory/cloudforms.ini.example @@ -31,6 +31,9 @@ nest_tags = False # Note: This suffix *must* include the leading '.' as it is appended to the hostname as is # suffix = .example.org +# If true, will try and use an IPv4 address for the ansible_ssh_host rather than just the first IP address in the list +prefer_ipv4 = False + [cache] # Maximum time to trust the cache in seconds diff --git a/awx/plugins/inventory/cloudforms.py b/awx/plugins/inventory/cloudforms.py index 25b8d23159..0cdefc93c9 100755 --- a/awx/plugins/inventory/cloudforms.py +++ b/awx/plugins/inventory/cloudforms.py @@ -182,6 +182,11 @@ class CloudFormsInventory(object): else: self.cloudforms_suffix = None + if config.has_option('cloudforms', 'prefer_ipv4'): + self.cloudforms_prefer_ipv4 = config.getboolean('cloudforms', 'prefer_ipv4') + else: + self.cloudforms_prefer_ipv4 = False + # Ansible related try: group_patterns = config.get('ansible', 'group_patterns') @@ -362,7 +367,15 @@ class CloudFormsInventory(object): # Set ansible_ssh_host to the first available ip address if 'ipaddresses' in host and host['ipaddresses'] and isinstance(host['ipaddresses'], list): - host['ansible_ssh_host'] = host['ipaddresses'][0] + # If no preference for IPv4, just use the first entry + if not self.cloudforms_prefer_ipv4: + host['ansible_ssh_host'] = host['ipaddresses'][0] + else: + # Before we search for an IPv4 address, set using the first entry in case we don't find any + host['ansible_ssh_host'] = host['ipaddresses'][0] + for currenthost in host['ipaddresses']: + if '.' in currenthost: + host['ansible_ssh_host'] = currenthost # Create additional groups for key in ('location', 'type', 'vendor'): diff --git a/awx/plugins/inventory/tower.py b/awx/plugins/inventory/tower.py index 63ade6b24b..a8685ea124 100755 --- a/awx/plugins/inventory/tower.py +++ b/awx/plugins/inventory/tower.py @@ -69,7 +69,7 @@ def parse_configuration(): errors.append("Missing TOWER_INVENTORY in environment") if errors: raise RuntimeError("\n".join(errors)) - + return dict(tower_host=host_name, tower_user=username, tower_pass=password, @@ -103,9 +103,9 @@ def read_tower_inventory(tower_host, tower_user, tower_pass, inventory, license_ return response.json() json_reason = response.json() reason = json_reason.get('detail', 'Retrieving Tower Inventory Failed') - except requests.ConnectionError, e: + except requests.ConnectionError as e: reason = "Connection to remote host failed: {}".format(e) - except json.JSONDecodeError, e: + except json.JSONDecodeError as e: reason = "Failed to parse json from host: {}".format(e) raise RuntimeError(reason) diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 2bcb8ee3d2..a0618c238d 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -218,6 +218,7 @@ TEMPLATES = [ ('django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader',), )], + 'builtins': ['awx.main.templatetags.swagger'], }, 'DIRS': [ os.path.join(BASE_DIR, 'templates'), @@ -392,6 +393,18 @@ EMAIL_HOST_USER = '' EMAIL_HOST_PASSWORD = '' EMAIL_USE_TLS = False +# The number of seconds to sleep between status checks for jobs running on isolated nodes +AWX_ISOLATED_CHECK_INTERVAL = 30 + +# The timeout (in seconds) for launching jobs on isolated nodes +AWX_ISOLATED_LAUNCH_TIMEOUT = 600 + +# Ansible connection timeout (in seconds) for communicating with isolated instances +AWX_ISOLATED_CONNECTION_TIMEOUT = 10 + +# The time (in seconds) between the periodic isolated heartbeat status check +AWX_ISOLATED_PERIODIC_CHECK = 600 + # Memcached django cache configuration # CACHES = { # 'default': { @@ -420,6 +433,7 @@ DEVSERVER_DEFAULT_PORT = '8013' # Set default ports for live server tests. os.environ.setdefault('DJANGO_LIVE_TEST_SERVER_ADDRESS', 'localhost:9013-9199') +BROKER_POOL_LIMIT = None CELERY_BROKER_URL = 'amqp://guest:guest@localhost:5672//' CELERY_EVENT_QUEUE_TTL = 5 CELERY_TASK_DEFAULT_QUEUE = 'tower' @@ -435,18 +449,10 @@ CELERY_BEAT_MAX_LOOP_INTERVAL = 60 CELERY_RESULT_BACKEND = 'django-db' CELERY_IMPORTS = ('awx.main.scheduler.tasks',) CELERY_TASK_QUEUES = ( - Queue('default', Exchange('default'), routing_key='default'), Queue('tower', Exchange('tower'), routing_key='tower'), - Queue('tower_scheduler', Exchange('scheduler', type='topic'), routing_key='tower_scheduler.job.#', durable=False), Broadcast('tower_broadcast_all') ) -CELERY_TASK_ROUTES = { - 'awx.main.scheduler.tasks.run_task_manager': {'queue': 'tower', 'routing_key': 'tower'}, - 'awx.main.scheduler.tasks.run_job_launch': {'queue': 'tower_scheduler', 'routing_key': 'tower_scheduler.job.launch'}, - 'awx.main.scheduler.tasks.run_job_complete': {'queue': 'tower_scheduler', 'routing_key': 'tower_scheduler.job.complete'}, - 'awx.main.tasks.cluster_node_heartbeat': {'queue': 'default', 'routing_key': 'cluster.heartbeat'}, - 'awx.main.tasks.purge_old_stdout_files': {'queue': 'default', 'routing_key': 'cluster.heartbeat'}, -} +CELERY_TASK_ROUTES = {} CELERY_BEAT_SCHEDULE = { 'tower_scheduler': { @@ -474,11 +480,21 @@ CELERY_BEAT_SCHEDULE = { 'task_manager': { 'task': 'awx.main.scheduler.tasks.run_task_manager', 'schedule': timedelta(seconds=20), - 'options': {'expires': 20,} + 'options': {'expires': 20} }, + 'isolated_heartbeat': { + 'task': 'awx.main.tasks.awx_isolated_heartbeat', + 'schedule': timedelta(seconds=AWX_ISOLATED_PERIODIC_CHECK), + 'options': {'expires': AWX_ISOLATED_PERIODIC_CHECK * 2}, + } } AWX_INCONSISTENT_TASK_INTERVAL = 60 * 3 +# Celery queues that will always be listened to by celery workers +# Note: Broadcast queues have unique, auto-generated names, with the alias +# property value of the original queue name. +AWX_CELERY_QUEUES_STATIC = ['tower_broadcast_all',] + # Django Caching Configuration if is_testing(): CACHES = { @@ -624,20 +640,8 @@ AWX_PROOT_BASE_PATH = "/tmp" # Note: This setting may be overridden by database settings. AWX_ANSIBLE_CALLBACK_PLUGINS = "" -# Time at which an HA node is considered active -AWX_ACTIVE_NODE_TIME = 7200 - -# The number of seconds to sleep between status checks for jobs running on isolated nodes -AWX_ISOLATED_CHECK_INTERVAL = 30 - -# The timeout (in seconds) for launching jobs on isolated nodes -AWX_ISOLATED_LAUNCH_TIMEOUT = 600 - -# Ansible connection timeout (in seconds) for communicating with isolated instances -AWX_ISOLATED_CONNECTION_TIMEOUT = 10 - -# The time (in seconds) between the periodic isolated heartbeat status check -AWX_ISOLATED_PERIODIC_CHECK = 600 +# Automatically remove nodes that have missed their heartbeats after some time +AWX_AUTO_DEPROVISION_INSTANCES = False # Enable Pendo on the UI, possible values are 'off', 'anonymous', and 'detailed' # Note: This setting may be overridden by database settings. @@ -1118,9 +1122,11 @@ LOGGING = { }, 'awx.main.tasks': { 'handlers': ['task_system'], + 'propagate': False }, 'awx.main.scheduler': { 'handlers': ['task_system'], + 'propagate': False }, 'awx.main.consumers': { 'handlers': ['null'] diff --git a/awx/settings/development.py b/awx/settings/development.py index 682cf21dd8..36fc290d6d 100644 --- a/awx/settings/development.py +++ b/awx/settings/development.py @@ -101,6 +101,8 @@ if 'django_jenkins' in INSTALLED_APPS: PEP8_RCFILE = "setup.cfg" PYLINT_RCFILE = ".pylintrc" +INSTALLED_APPS += ('rest_framework_swagger',) + # Much faster than the default # https://docs.djangoproject.com/en/1.6/topics/auth/passwords/#how-django-stores-passwords PASSWORD_HASHERS = ( @@ -138,15 +140,6 @@ except ImportError: sys.exit(1) CLUSTER_HOST_ID = socket.gethostname() -CELERY_TASK_ROUTES['awx.main.tasks.cluster_node_heartbeat'] = {'queue': CLUSTER_HOST_ID, 'routing_key': CLUSTER_HOST_ID} -# Production only runs this schedule on controlling nodes -# but development will just run it on all nodes -CELERY_TASK_ROUTES['awx.main.tasks.awx_isolated_heartbeat'] = {'queue': CLUSTER_HOST_ID, 'routing_key': CLUSTER_HOST_ID} -CELERY_BEAT_SCHEDULE['isolated_heartbeat'] = { - 'task': 'awx.main.tasks.awx_isolated_heartbeat', - 'schedule': timedelta(seconds = AWX_ISOLATED_PERIODIC_CHECK), - 'options': {'expires': AWX_ISOLATED_PERIODIC_CHECK * 2,} -} # Supervisor service name dictionary used for programatic restart SERVICE_NAME_DICT = { diff --git a/awx/settings/local_settings.py.docker_compose b/awx/settings/local_settings.py.docker_compose index ff0048e102..84592c21bd 100644 --- a/awx/settings/local_settings.py.docker_compose +++ b/awx/settings/local_settings.py.docker_compose @@ -198,6 +198,27 @@ LOGGING['handlers']['syslog'] = { 'formatter': 'simple', } +LOGGING['loggers']['django.request']['handlers'] = ['console'] +LOGGING['loggers']['rest_framework.request']['handlers'] = ['console'] +LOGGING['loggers']['awx']['handlers'] = ['console'] +LOGGING['loggers']['awx.main.commands.run_callback_receiver']['handlers'] = ['console'] +LOGGING['loggers']['awx.main.commands.inventory_import']['handlers'] = ['console'] +LOGGING['loggers']['awx.main.tasks']['handlers'] = ['console'] +LOGGING['loggers']['awx.main.scheduler']['handlers'] = ['console'] +LOGGING['loggers']['django_auth_ldap']['handlers'] = ['console'] +LOGGING['loggers']['social']['handlers'] = ['console'] +LOGGING['loggers']['system_tracking_migrations']['handlers'] = ['console'] +LOGGING['loggers']['rbac_migrations']['handlers'] = ['console'] +LOGGING['loggers']['awx.isolated.manager.playbooks']['handlers'] = ['console'] +LOGGING['handlers']['callback_receiver'] = {'class': 'logging.NullHandler'} +LOGGING['handlers']['fact_receiver'] = {'class': 'logging.NullHandler'} +LOGGING['handlers']['task_system'] = {'class': 'logging.NullHandler'} +LOGGING['handlers']['tower_warnings'] = {'class': 'logging.NullHandler'} +LOGGING['handlers']['rbac_migrations'] = {'class': 'logging.NullHandler'} +LOGGING['handlers']['system_tracking_migrations'] = {'class': 'logging.NullHandler'} +LOGGING['handlers']['management_playbooks'] = {'class': 'logging.NullHandler'} + + # Enable the following lines to also log to a file. #LOGGING['handlers']['file'] = { # 'class': 'logging.FileHandler', diff --git a/awx/sso/conf.py b/awx/sso/conf.py index 2fdf412d82..e2c15c96fa 100644 --- a/awx/sso/conf.py +++ b/awx/sso/conf.py @@ -255,7 +255,7 @@ def _register_ldap(append=None): help_text=_('Mapping of LDAP user schema to Tower API user attributes. The default' ' setting is valid for ActiveDirectory but users with other LDAP' ' configurations may need to change the values. Refer to the Ansible' - ' Tower documentation for additonal details.'), + ' Tower documentation for additional details.'), category=_('LDAP'), category_slug='ldap', placeholder=collections.OrderedDict([ @@ -1185,7 +1185,7 @@ register( field_class=fields.SAMLTeamAttrField, allow_null=True, default=None, - label=_('SAML Team Map'), + label=_('SAML Team Attribute Mapping'), help_text=_('Used to translate user team membership into Tower.'), category=_('SAML'), category_slug='saml', diff --git a/awx/ui/client/lib/components/components.strings.js b/awx/ui/client/lib/components/components.strings.js index 93f5ab1416..a827ee890c 100644 --- a/awx/ui/client/lib/components/components.strings.js +++ b/awx/ui/client/lib/components/components.strings.js @@ -71,16 +71,13 @@ function ComponentsStrings (BaseString) { INVENTORY_SCRIPTS: t.s('Inventory Scripts'), NOTIFICATIONS: t.s('Notifications'), MANAGEMENT_JOBS: t.s('Management Jobs'), + INSTANCES: t.s('Instances'), INSTANCE_GROUPS: t.s('Instance Groups'), SETTINGS: t.s('Settings'), FOOTER_ABOUT: t.s('About'), FOOTER_COPYRIGHT: t.s('Copyright © 2017 Red Hat, Inc.') }; - ns.capacityBar = { - IS_OFFLINE: t.s('Unavailable to run jobs.') - }; - ns.relaunch = { DEFAULT: t.s('Relaunch using the same parameters'), HOSTS: t.s('Relaunch using host parameters'), @@ -90,7 +87,7 @@ function ComponentsStrings (BaseString) { }; ns.list = { - DEFAULT_EMPTY_LIST: t.s('List is empty.') + DEFAULT_EMPTY_LIST: t.s('Please add items to this list.') }; } diff --git a/awx/ui/client/lib/components/index.js b/awx/ui/client/lib/components/index.js index 6f15cae762..d33e79329a 100644 --- a/awx/ui/client/lib/components/index.js +++ b/awx/ui/client/lib/components/index.js @@ -12,6 +12,7 @@ import inputLookup from '~components/input/lookup.directive'; import inputMessage from '~components/input/message.directive'; import inputSecret from '~components/input/secret.directive'; import inputSelect from '~components/input/select.directive'; +import inputSlider from '~components/input/slider.directive'; import inputText from '~components/input/text.directive'; import inputTextarea from '~components/input/textarea.directive'; import inputTextareaSecret from '~components/input/textarea-secret.directive'; @@ -54,6 +55,7 @@ angular .directive('atInputMessage', inputMessage) .directive('atInputSecret', inputSecret) .directive('atInputSelect', inputSelect) + .directive('atInputSlider', inputSlider) .directive('atInputText', inputText) .directive('atInputTextarea', inputTextarea) .directive('atInputTextareaSecret', inputTextareaSecret) diff --git a/awx/ui/client/lib/components/input/_index.less b/awx/ui/client/lib/components/input/_index.less index b92692c34e..dc08df77db 100644 --- a/awx/ui/client/lib/components/input/_index.less +++ b/awx/ui/client/lib/components/input/_index.less @@ -163,7 +163,7 @@ } .at-InputMessage--rejected { - font-size: @at-font-size-help-text; + font-size: @at-font-size-help-text; color: @at-color-error; margin: @at-margin-input-message 0 0 0; padding: 0; @@ -182,7 +182,7 @@ & > i { font-size: @at-font-size-button; - position: absolute; + position: absolute; z-index: 3; pointer-events: none; top: @at-height-input / 3; @@ -218,3 +218,47 @@ min-height: @at-height-textarea; padding: 6px @at-padding-input 0 @at-padding-input; } + +.at-InputSlider { + display: flex; + padding: 5px 0; + + p { + color: @at-color-form-label; + font-size: @at-font-size-help-text; + font-weight: @at-font-weight-body; + margin: 0 0 0 10px; + padding: 0; + width: 50px; + } + + input[type=range] { + -webkit-appearance: none; + width: 100%; + background: transparent; + height: 20px; + border-right: 1px solid @at-color-input-slider-track; + border-left: 1px solid @at-color-input-slider-track; + + &:focus { + outline: none; + } + + &::-webkit-slider-runnable-track { + background: @at-color-input-slider-track; + cursor: pointer; + height: 1px; + width: 100%; + } + &::-webkit-slider-thumb { + -webkit-appearance: none; + background: @at-color-input-slider-thumb; + border-radius: 50%; + border: none; + cursor: pointer; + height: 16px; + margin-top: -7px; + width: 16px; + } + } +} \ No newline at end of file diff --git a/awx/ui/client/lib/components/input/lookup.directive.js b/awx/ui/client/lib/components/input/lookup.directive.js index 1e6cc13588..0447d6b448 100644 --- a/awx/ui/client/lib/components/input/lookup.directive.js +++ b/awx/ui/client/lib/components/input/lookup.directive.js @@ -118,6 +118,10 @@ function AtInputLookupController (baseInputController, $q, $state) { vm.searchAfterDebounce(); }; + + vm.removeTag = (tagToRemove) => { + _.remove(scope.state._value, (tag) => tag === tagToRemove); + }; } AtInputLookupController.$inject = [ diff --git a/awx/ui/client/lib/components/input/lookup.partial.html b/awx/ui/client/lib/components/input/lookup.partial.html index 21ebf03b5d..e3633aa743 100644 --- a/awx/ui/client/lib/components/input/lookup.partial.html +++ b/awx/ui/client/lib/components/input/lookup.partial.html @@ -11,17 +11,30 @@ + class="form-control at-Input" + ng-class="{ 'at-Input--rejected': state._rejected }" + ng-model="state._displayValue" + ng-attr-tabindex="{{ tab || undefined }}" + ng-attr-placeholder="{{::state._placeholder || undefined }}" + ng-change="vm.searchOnInput()" + ng-hide="state._lookupTags" + ng-disabled="state._disabled || form.disabled"> + + +
+
+ +
+
+ {{ tag.hostname }} + {{ tag }} +
+
- + \ No newline at end of file diff --git a/awx/ui/client/lib/components/input/slider.directive.js b/awx/ui/client/lib/components/input/slider.directive.js new file mode 100644 index 0000000000..a2e1b8c28e --- /dev/null +++ b/awx/ui/client/lib/components/input/slider.directive.js @@ -0,0 +1,38 @@ +const templateUrl = require('~components/input/slider.partial.html'); + +function atInputSliderLink (scope, element, attrs, controllers) { + const [formController, inputController] = controllers; + + inputController.init(scope, element, formController); +} + +function atInputSliderController (baseInputController) { + const vm = this || {}; + + vm.init = (_scope_, _element_, form) => { + baseInputController.call(vm, 'input', _scope_, _element_, form); + + vm.check(); + }; +} + +atInputSliderController.$inject = ['BaseInputController']; + +function atInputSlider () { + return { + restrict: 'E', + require: ['^^atForm', 'atInputSlider'], + replace: true, + templateUrl, + controller: atInputSliderController, + controllerAs: 'vm', + link: atInputSliderLink, + scope: { + state: '=?', + col: '@', + tab: '@' + } + }; +} + +export default atInputSlider; diff --git a/awx/ui/client/lib/components/input/slider.partial.html b/awx/ui/client/lib/components/input/slider.partial.html new file mode 100644 index 0000000000..e4649149f8 --- /dev/null +++ b/awx/ui/client/lib/components/input/slider.partial.html @@ -0,0 +1,13 @@ +
+
+ +
+ +

{{ state._value }}%

+
+
+
diff --git a/awx/ui/client/lib/components/list/_index.less b/awx/ui/client/lib/components/list/_index.less index cd1fa9a023..a4daa3248b 100644 --- a/awx/ui/client/lib/components/list/_index.less +++ b/awx/ui/client/lib/components/list/_index.less @@ -86,12 +86,35 @@ border-top: @at-border-default-width solid @at-color-list-border; } +.at-Row--rowLayout { + display: flex; + flex-direction: row; + + .at-RowItem { + margin-right: @at-space-4x; + + &-label { + width: auto; + } + } +} + +.at-RowStatus { + align-self: flex-start; + margin: 0 10px 0 0; +} + +.at-Row-firstColumn { + margin-right: @at-space-4x; +} + .at-Row-actions { display: flex; } .at-Row-items { align-self: flex-start; + flex: 1; } .at-RowItem { @@ -101,10 +124,19 @@ } .at-RowItem--isHeader { + color: @at-color-body-text; margin-bottom: @at-margin-bottom-list-header; line-height: @at-line-height-list-row-item-header; } +.at-RowItem--isHeaderLink { + color: @at-blue; + cursor: pointer; +} +.at-RowItem--isHeaderLink:hover { + color: @at-blue-hover; +} + .at-RowItem--labels { line-height: @at-line-height-list-row-item-labels; } @@ -146,8 +178,26 @@ .at-RowItem-label { text-transform: uppercase; + width: auto; width: @at-width-list-row-item-label; color: @at-color-list-row-item-label; + font-size: @at-font-size; +} + +.at-RowItem-value { + font-size: @at-font-size-3x; +} + +.at-RowItem-badge { + background-color: @at-gray-848992; + border-radius: @at-border-radius; + color: @at-white; + font-size: 11px; + font-weight: normal; + height: 14px; + line-height: 10px; + margin: 0 10px; + padding: 2px 10px; } .at-RowAction { @@ -180,6 +230,11 @@ background-color: @at-color-list-row-action-hover-danger; } +.at-Row .at-Row-checkbox { + align-self: start; + margin: 2px 20px 0 0; +} + @media screen and (max-width: @at-breakpoint-compact-list) { .at-Row-actions { flex-direction: column; diff --git a/awx/ui/client/lib/components/list/row-item.directive.js b/awx/ui/client/lib/components/list/row-item.directive.js index 972008f7a2..e07820468e 100644 --- a/awx/ui/client/lib/components/list/row-item.directive.js +++ b/awx/ui/client/lib/components/list/row-item.directive.js @@ -7,10 +7,13 @@ function atRowItem () { transclude: true, templateUrl, scope: { + badge: '@', headerValue: '@', headerLink: '@', headerTag: '@', labelValue: '@', + labelLink: '@', + labelState: '@', value: '@', valueLink: '@', smartStatus: '=?', diff --git a/awx/ui/client/lib/components/list/row-item.partial.html b/awx/ui/client/lib/components/list/row-item.partial.html index a9b81ae20c..ca58947b79 100644 --- a/awx/ui/client/lib/components/list/row-item.partial.html +++ b/awx/ui/client/lib/components/list/row-item.partial.html @@ -9,13 +9,19 @@
{{ headerTag }}
-
+ +
{{ labelValue }}
+ -
- + \ No newline at end of file diff --git a/awx/ui/client/lib/components/tabs/_index.less b/awx/ui/client/lib/components/tabs/_index.less index 4956444576..84b33c2134 100644 --- a/awx/ui/client/lib/components/tabs/_index.less +++ b/awx/ui/client/lib/components/tabs/_index.less @@ -26,3 +26,7 @@ cursor: not-allowed; } } + +.at-TabGroup + .at-Panel-body { + margin-top: 20px; +} \ No newline at end of file diff --git a/awx/ui/client/lib/components/tabs/tab.partial.html b/awx/ui/client/lib/components/tabs/tab.partial.html index 263a5d1d96..747e470571 100644 --- a/awx/ui/client/lib/components/tabs/tab.partial.html +++ b/awx/ui/client/lib/components/tabs/tab.partial.html @@ -1,6 +1,7 @@ diff --git a/awx/ui/client/lib/models/Base.js b/awx/ui/client/lib/models/Base.js index 8845c24f82..7fafb05c75 100644 --- a/awx/ui/client/lib/models/Base.js +++ b/awx/ui/client/lib/models/Base.js @@ -129,6 +129,10 @@ function httpPost (config = {}) { data: config.data }; + if (config.url) { + req.url = `${this.path}${config.url}`; + } + return $http(req) .then(res => { this.model.GET = res.data; @@ -323,7 +327,7 @@ function has (method, keys) { return value !== undefined && value !== null; } -function extend (method, related) { +function extend (method, related, config = {}) { if (!related) { related = method; method = 'GET'; @@ -337,6 +341,8 @@ function extend (method, related) { url: this.get(`related.${related}`) }; + Object.assign(req, config); + return $http(req) .then(({ data }) => { this.set(method, `related.${related}`, data); diff --git a/awx/ui/client/lib/models/Instance.js b/awx/ui/client/lib/models/Instance.js new file mode 100644 index 0000000000..09b7df0547 --- /dev/null +++ b/awx/ui/client/lib/models/Instance.js @@ -0,0 +1,47 @@ +let Base; + +function createFormSchema (method, config) { + if (!config) { + config = method; + method = 'GET'; + } + + const schema = Object.assign({}, this.options(`actions.${method.toUpperCase()}`)); + + if (config && config.omit) { + config.omit.forEach(key => delete schema[key]); + } + + Object.keys(schema).forEach(key => { + schema[key].id = key; + + if (this.has(key)) { + schema[key]._value = this.get(key); + } + }); + + return schema; +} + +function InstanceModel (method, resource, config) { + // Base takes two args: resource and settings + // resource is the string endpoint + Base.call(this, 'instances'); + + this.Constructor = InstanceModel; + this.createFormSchema = createFormSchema.bind(this); + + return this.create(method, resource, config); +} + +function InstanceModelLoader (BaseModel) { + Base = BaseModel; + + return InstanceModel; +} + +InstanceModelLoader.$inject = [ + 'BaseModel' +]; + +export default InstanceModelLoader; diff --git a/awx/ui/client/lib/models/InstanceGroup.js b/awx/ui/client/lib/models/InstanceGroup.js new file mode 100644 index 0000000000..cc82432c42 --- /dev/null +++ b/awx/ui/client/lib/models/InstanceGroup.js @@ -0,0 +1,47 @@ +let Base; + +function createFormSchema (method, config) { + if (!config) { + config = method; + method = 'GET'; + } + + const schema = Object.assign({}, this.options(`actions.${method.toUpperCase()}`)); + + if (config && config.omit) { + config.omit.forEach(key => delete schema[key]); + } + + Object.keys(schema).forEach(key => { + schema[key].id = key; + + if (this.has(key)) { + schema[key]._value = this.get(key); + } + }); + + return schema; +} + +function InstanceGroupModel (method, resource, config) { + // Base takes two args: resource and settings + // resource is the string endpoint + Base.call(this, 'instance_groups'); + + this.Constructor = InstanceGroupModel; + this.createFormSchema = createFormSchema.bind(this); + + return this.create(method, resource, config); +} + +function InstanceGroupModelLoader (BaseModel) { + Base = BaseModel; + + return InstanceGroupModel; +} + +InstanceGroupModelLoader.$inject = [ + 'BaseModel' +]; + +export default InstanceGroupModelLoader; diff --git a/awx/ui/client/lib/models/index.js b/awx/ui/client/lib/models/index.js index 937d97ac11..6b45bcad52 100644 --- a/awx/ui/client/lib/models/index.js +++ b/awx/ui/client/lib/models/index.js @@ -9,6 +9,8 @@ import Organization from '~models/Organization'; import Project from '~models/Project'; import JobTemplate from '~models/JobTemplate'; import WorkflowJobTemplateNode from '~models/WorkflowJobTemplateNode'; +import Instance from '~models/Instance'; +import InstanceGroup from '~models/InstanceGroup'; import InventorySource from '~models/InventorySource'; import Inventory from '~models/Inventory'; import InventoryScript from '~models/InventoryScript'; @@ -34,6 +36,8 @@ angular .service('ProjectModel', Project) .service('JobTemplateModel', JobTemplate) .service('WorkflowJobTemplateNodeModel', WorkflowJobTemplateNode) + .service('InstanceModel', Instance) + .service('InstanceGroupModel', InstanceGroup) .service('InventorySourceModel', InventorySource) .service('InventoryModel', Inventory) .service('InventoryScriptModel', InventoryScript) diff --git a/awx/ui/client/lib/services/base-string.service.js b/awx/ui/client/lib/services/base-string.service.js index 30762fb8db..d17533f5b6 100644 --- a/awx/ui/client/lib/services/base-string.service.js +++ b/awx/ui/client/lib/services/base-string.service.js @@ -67,6 +67,7 @@ function BaseStringService (namespace) { this.OFF = t.s('OFF'); this.YAML = t.s('YAML'); this.JSON = t.s('JSON'); + this.deleteResource = { HEADER: t.s('Delete'), USED_BY: resourceType => t.s('The {{ resourceType }} is currently being used by other resources.', { resourceType }), diff --git a/awx/ui/client/lib/theme/_global.less b/awx/ui/client/lib/theme/_global.less index b1e46927ef..6995b224a5 100644 --- a/awx/ui/client/lib/theme/_global.less +++ b/awx/ui/client/lib/theme/_global.less @@ -15,7 +15,17 @@ background: @at-color-disabled; } } - + +.at-Button--add { + &:extend(.at-Button--success all); + &:before { + content: "+"; + font-size: 20px; + } + border-color: transparent; + margin-left: @at-space-2x; +} + .at-Button--info { .at-mixin-Button(); .at-mixin-ButtonColor('at-color-info', 'at-color-default'); @@ -26,7 +36,7 @@ .at-mixin-ButtonColor('at-color-error', 'at-color-default'); } -.at-ButtonHollow--default { +.at-ButtonHollow--default { .at-mixin-Button(); .at-mixin-ButtonHollow( 'at-color-default', @@ -41,5 +51,5 @@ } .at-Button--expand { - width: 100%; + width: 100%; } diff --git a/awx/ui/client/lib/theme/_mixins.less b/awx/ui/client/lib/theme/_mixins.less index 6dc36a7b24..701613a2ec 100644 --- a/awx/ui/client/lib/theme/_mixins.less +++ b/awx/ui/client/lib/theme/_mixins.less @@ -21,6 +21,7 @@ } .at-mixin-Button () { + border-radius: @at-border-radius; height: @at-height-input; padding: @at-padding-button-vertical @at-padding-button-horizontal; font-size: @at-font-size-body; @@ -101,4 +102,4 @@ .at-mixin-FontFixedWidth () { font-family: Menlo, Monaco, Consolas, "Courier New", monospace; -} +} \ No newline at end of file diff --git a/awx/ui/client/lib/theme/_variables.less b/awx/ui/client/lib/theme/_variables.less index be5cde41ee..cd3a8a6675 100644 --- a/awx/ui/client/lib/theme/_variables.less +++ b/awx/ui/client/lib/theme/_variables.less @@ -147,6 +147,8 @@ @at-color-input-icon: @at-gray-b7; @at-color-input-placeholder: @at-gray-848992; @at-color-input-text: @at-gray-161b1f; +@at-color-input-slider-thumb: @at-blue; +@at-color-input-slider-track: @at-gray-b7; @at-color-icon-dismiss: @at-gray-d7; @at-color-icon-popover: @at-gray-848992; diff --git a/awx/ui/client/lib/theme/index.less b/awx/ui/client/lib/theme/index.less index 416c4a3f8d..c236928da6 100644 --- a/awx/ui/client/lib/theme/index.less +++ b/awx/ui/client/lib/theme/index.less @@ -72,7 +72,9 @@ @import '../../src/home/dashboard/lists/dashboard-list.block.less'; @import '../../src/home/dashboard/dashboard.block.less'; @import '../../src/instance-groups/capacity-bar/capacity-bar.block.less'; +@import '../../src/instance-groups/capacity-adjuster/capacity-adjuster.block.less'; @import '../../src/instance-groups/instance-group.block.less'; +@import '../../src/instance-groups/instances/instance-modal.block.less'; @import '../../src/inventories-hosts/inventories/insights/insights.block.less'; @import '../../src/inventories-hosts/inventories/list/host-summary-popover/host-summary-popover.block.less'; @import '../../src/inventories-hosts/inventories/related/hosts/related-groups-labels/relatedGroupsLabelsList.block.less'; diff --git a/awx/ui/client/src/activity-stream/factories/build-anchor.factory.js b/awx/ui/client/src/activity-stream/factories/build-anchor.factory.js index 914b22c53a..8fe870ce81 100644 --- a/awx/ui/client/src/activity-stream/factories/build-anchor.factory.js +++ b/awx/ui/client/src/activity-stream/factories/build-anchor.factory.js @@ -20,10 +20,10 @@ export default function BuildAnchor($log, $filter) { if (activity.operation === 'create' || activity.operation === 'delete'){ // the API formats the changes.inventory field as str 'myInventoryName-PrimaryKey' var inventory_id = _.last(activity.changes.inventory.split('-')); - url += 'inventories/' + inventory_id + '/groups/edit/' + activity.changes.id; + url += 'inventories/inventory/' + inventory_id + '/groups/edit/' + activity.changes.id; } else { - url += 'inventories/' + activity.summary_fields.inventory[0].id + '/groups/edit/' + (activity.changes.id || activity.changes.object1_pk); + url += 'inventories/inventory/' + activity.summary_fields.inventory[0].id + '/groups/edit/' + (activity.changes.id || activity.changes.object1_pk); } break; case 'host': @@ -33,7 +33,7 @@ export default function BuildAnchor($log, $filter) { url += 'jobs/' + obj.id; break; case 'inventory': - url += 'inventories/' + obj.id + '/'; + url += obj.kind && obj.kind === "smart" ? 'inventories/smart/' + obj.id + '/' : 'inventories/inventory' + obj.id + '/'; break; case 'schedule': // schedule urls depend on the resource they're associated with diff --git a/awx/ui/client/src/app.js b/awx/ui/client/src/app.js index e1667e6fb8..e95779eb38 100644 --- a/awx/ui/client/src/app.js +++ b/awx/ui/client/src/app.js @@ -97,7 +97,6 @@ angular users.name, projects.name, scheduler.name, - instanceGroups.name, 'Utilities', 'templates', @@ -105,6 +104,7 @@ angular 'AWDirectives', 'features', + instanceGroups, atFeatures, atLibComponents, atLibModels, diff --git a/awx/ui/client/src/configuration/auth-form/configuration-auth.controller.js b/awx/ui/client/src/configuration/auth-form/configuration-auth.controller.js index 5cd9e72a11..5b80e40ddb 100644 --- a/awx/ui/client/src/configuration/auth-form/configuration-auth.controller.js +++ b/awx/ui/client/src/configuration/auth-form/configuration-auth.controller.js @@ -17,6 +17,11 @@ export default [ 'configurationGithubTeamForm', 'configurationGoogleForm', 'configurationLdapForm', + 'configurationLdap1Form', + 'configurationLdap2Form', + 'configurationLdap3Form', + 'configurationLdap4Form', + 'configurationLdap5Form', 'configurationRadiusForm', 'configurationTacacsForm', 'configurationSamlForm', @@ -39,6 +44,11 @@ export default [ configurationGithubTeamForm, configurationGoogleForm, configurationLdapForm, + configurationLdap1Form, + configurationLdap2Form, + configurationLdap3Form, + configurationLdap4Form, + configurationLdap5Form, configurationRadiusForm, configurationTacacsForm, configurationSamlForm, @@ -55,6 +65,8 @@ export default [ var formTracker = $scope.$parent.vm.formTracker; var dropdownValue = 'azure'; var activeAuthForm = 'azure'; + var ldapDropdownValue = ''; + let codeInputInitialized = false; // Default active form @@ -62,10 +74,16 @@ export default [ formTracker.setCurrentAuth(activeAuthForm); } - var activeForm = function() { + const getActiveAuthForm = () => { + if (authVm.dropdownValue === 'ldap') { + return `ldap${authVm.ldapDropdownValue}`; + } + return authVm.dropdownValue; + }; + var activeForm = function() { if(!$scope.$parent[formTracker.currentFormName()].$dirty) { - authVm.activeAuthForm = authVm.dropdownValue; + authVm.activeAuthForm = getActiveAuthForm(); formTracker.setCurrentAuth(authVm.activeAuthForm); startCodeMirrors(); } else { @@ -78,7 +96,7 @@ export default [ onClick: function() { $scope.$parent.vm.populateFromApi(); $scope.$parent[formTracker.currentFormName()].$setPristine(); - authVm.activeAuthForm = authVm.dropdownValue; + authVm.activeAuthForm = getActiveAuthForm(); formTracker.setCurrentAuth(authVm.activeAuthForm); $('#FormModal-dialog').dialog('close'); } @@ -89,7 +107,7 @@ export default [ .then(function() { $scope.$parent[formTracker.currentFormName()].$setPristine(); $scope.$parent.vm.populateFromApi(); - authVm.activeAuthForm = authVm.dropdownValue; + authVm.activeAuthForm = getActiveAuthForm(); formTracker.setCurrentAuth(authVm.activeAuthForm); $('#FormModal-dialog').dialog('close'); }); @@ -100,11 +118,12 @@ export default [ $scope.$parent.vm.triggerModal(msg, title, buttons); } formTracker.setCurrentAuth(authVm.activeAuthForm); + authVm.ldapSelected = (authVm.activeAuthForm.indexOf('ldap') !== -1); }; var dropdownOptions = [ {label: i18n._('Azure AD'), value: 'azure'}, - {label: i18n._('GitHub'), value: 'github'}, + {label: i18n._('GitHub'), value: 'github'}, {label: i18n._('GitHub Org'), value: 'github_org'}, {label: i18n._('GitHub Team'), value: 'github_team'}, {label: i18n._('Google OAuth2'), value: 'google_oauth'}, @@ -114,48 +133,97 @@ export default [ {label: i18n._('TACACS+'), value: 'tacacs'} ]; + var ldapDropdownOptions = [ + {label: i18n._('Default'), value: ''}, + {label: i18n._('LDAP 1 (Optional)'), value: '1'}, + {label: i18n._('LDAP 2 (Optional)'), value: '2'}, + {label: i18n._('LDAP 3 (Optional)'), value: '3'}, + {label: i18n._('LDAP 4 (Optional)'), value: '4'}, + {label: i18n._('LDAP 5 (Optional)'), value: '5'}, + ]; + CreateSelect2({ element: '#configure-dropdown-nav', multiple: false, }); - var authForms = [{ + CreateSelect2({ + element: '#configure-ldap-dropdown', + multiple: false, + }); + + var authForms = [ + { formDef: configurationAzureForm, id: 'auth-azure-form', name: 'azure' - }, { + }, + { formDef: configurationGithubForm, id: 'auth-github-form', name: 'github' - }, { + }, + { formDef: configurationGithubOrgForm, id: 'auth-github-org-form', name: 'github_org' - }, { + }, + { formDef: configurationGithubTeamForm, id: 'auth-github-team-form', name: 'github_team' - }, { + }, + { formDef: configurationGoogleForm, id: 'auth-google-form', name: 'google_oauth' - }, { - formDef: configurationLdapForm, - id: 'auth-ldap-form', - name: 'ldap' - }, { + }, + { formDef: configurationRadiusForm, id: 'auth-radius-form', name: 'radius' - }, { + }, + { formDef: configurationTacacsForm, id: 'auth-tacacs-form', name: 'tacacs' - }, { + }, + { formDef: configurationSamlForm, id: 'auth-saml-form', name: 'saml' - }, ]; + }, + { + formDef: configurationLdapForm, + id: 'auth-ldap-form', + name: 'ldap' + }, + { + formDef: configurationLdap1Form, + id: 'auth-ldap1-form', + name: 'ldap1' + }, + { + formDef: configurationLdap2Form, + id: 'auth-ldap2-form', + name: 'ldap2' + }, + { + formDef: configurationLdap3Form, + id: 'auth-ldap3-form', + name: 'ldap3' + }, + { + formDef: configurationLdap4Form, + id: 'auth-ldap4-form', + name: 'ldap4' + }, + { + formDef: configurationLdap5Form, + id: 'auth-ldap5-form', + name: 'ldap5' + }, + ]; var forms = _.pluck(authForms, 'formDef'); _.each(forms, function(form) { @@ -179,10 +247,8 @@ export default [ form.buttons.save.disabled = $rootScope.user_is_system_auditor; }); - function startCodeMirrors(key){ - var form = _.find(authForms, function(f){ - return f.name === $scope.authVm.activeAuthForm; - }); + function startCodeMirrors(key) { + var form = _.find(authForms, f => f.name === $scope.authVm.activeAuthForm); if(!key){ // Attach codemirror to fields that need it @@ -246,9 +312,23 @@ export default [ // Flag to avoid re-rendering and breaking Select2 dropdowns on tab switching var dropdownRendered = false; - function populateLDAPGroupType(flag){ - if($scope.$parent.AUTH_LDAP_GROUP_TYPE !== null) { - $scope.$parent.AUTH_LDAP_GROUP_TYPE = _.find($scope.$parent.AUTH_LDAP_GROUP_TYPE_options, { value: $scope.$parent.AUTH_LDAP_GROUP_TYPE }); + function populateLDAPGroupType(flag, index = null){ + let groupPropName; + let groupOptionsPropName; + let selectElementId; + + if (index) { + groupPropName = `AUTH_LDAP_${index}_GROUP_TYPE`; + groupOptionsPropName = `${groupPropName}_options`; + selectElementId = `#configuration_ldap${index}_template_${groupPropName}`; + } else { + groupPropName = 'AUTH_LDAP_GROUP_TYPE'; + groupOptionsPropName = `${groupPropName}_options`; + selectElementId = `#configuration_ldap_template_${groupPropName}`; + } + + if($scope.$parent[groupPropName] !== null) { + $scope.$parent[groupPropName] = _.find($scope[groupOptionsPropName], { value: $scope.$parent[groupPropName] }); } if(flag !== undefined){ @@ -258,7 +338,7 @@ export default [ if(!dropdownRendered) { dropdownRendered = true; CreateSelect2({ - element: '#configuration_ldap_template_AUTH_LDAP_GROUP_TYPE', + element: selectElementId, multiple: false, placeholder: i18n._('Select group types'), }); @@ -284,13 +364,12 @@ export default [ } } - $scope.$on('AUTH_LDAP_GROUP_TYPE_populated', function(e, data, flag) { - populateLDAPGroupType(flag); - }); - - $scope.$on('TACACSPLUS_AUTH_PROTOCOL_populated', function(e, data, flag) { - populateTacacsProtocol(flag); - }); + $scope.$on('AUTH_LDAP_GROUP_TYPE_populated', (e, data, flag) => populateLDAPGroupType(flag)); + $scope.$on('AUTH_LDAP_1_GROUP_TYPE_populated', (e, data, flag) => populateLDAPGroupType(flag, 1)); + $scope.$on('AUTH_LDAP_2_GROUP_TYPE_populated', (e, data, flag) => populateLDAPGroupType(flag, 2)); + $scope.$on('AUTH_LDAP_3_GROUP_TYPE_populated', (e, data, flag) => populateLDAPGroupType(flag, 3)); + $scope.$on('AUTH_LDAP_4_GROUP_TYPE_populated', (e, data, flag) => populateLDAPGroupType(flag, 4)); + $scope.$on('AUTH_LDAP_5_GROUP_TYPE_populated', (e, data, flag) => populateLDAPGroupType(flag, 5)); $scope.$on('$locationChangeStart', (event, url) => { let parts = url.split('/'); @@ -311,6 +390,12 @@ export default [ } populateLDAPGroupType(false); + populateLDAPGroupType(false, 1); + populateLDAPGroupType(false, 2); + populateLDAPGroupType(false, 3); + populateLDAPGroupType(false, 4); + populateLDAPGroupType(false, 5); + populateTacacsProtocol(false); }); @@ -328,7 +413,9 @@ export default [ activeAuthForm: activeAuthForm, authForms: authForms, dropdownOptions: dropdownOptions, - dropdownValue: dropdownValue + dropdownValue: dropdownValue, + ldapDropdownValue: ldapDropdownValue, + ldapDropdownOptions: ldapDropdownOptions, }); } ]; diff --git a/awx/ui/client/src/configuration/auth-form/configuration-auth.partial.html b/awx/ui/client/src/configuration/auth-form/configuration-auth.partial.html index e1ffb3d9f4..2bfa19b401 100644 --- a/awx/ui/client/src/configuration/auth-form/configuration-auth.partial.html +++ b/awx/ui/client/src/configuration/auth-form/configuration-auth.partial.html @@ -1,63 +1,89 @@
+
Sub Category
- + +
+ +
+
LDAP Server
+
+ +
-
-
-
-
-
-
-
-
+
-
-
-
-
+
+
+
+
+
-
-
-
-
+
+
+
-
-
-
-
+
+
+
-
-
-
-
+
+
+
-
-
-
-
+
+
+
-
-
-
-
+
+
+
-
-
-
-
+
+
+
-
-
+
+
+
+ +
+
+
+ +
+
+
+ +
+
+
+ +
+
+
+ +
+
+
+ +
+
diff --git a/awx/ui/client/src/configuration/auth-form/sub-forms/auth-ldap1.form.js b/awx/ui/client/src/configuration/auth-form/sub-forms/auth-ldap1.form.js new file mode 100644 index 0000000000..8430fecee3 --- /dev/null +++ b/awx/ui/client/src/configuration/auth-form/sub-forms/auth-ldap1.form.js @@ -0,0 +1,108 @@ +/************************************************* + * Copyright (c) 2016 Ansible, Inc. + * + * All Rights Reserved + *************************************************/ + +export default ['i18n', function(i18n) { + return { + // editTitle: 'Authorization Configuration', + name: 'configuration_ldap1_template', + showActions: true, + showHeader: false, + + fields: { + AUTH_LDAP_1_SERVER_URI: { + type: 'text', + reset: 'AUTH_LDAP_1_SERVER_URI' + }, + AUTH_LDAP_1_BIND_DN: { + type: 'text', + reset: 'AUTH_LDAP_1_BIND_DN' + }, + AUTH_LDAP_1_BIND_PASSWORD: { + type: 'sensitive', + hasShowInputButton: true, + }, + AUTH_LDAP_1_USER_SEARCH: { + type: 'textarea', + rows: 6, + codeMirror: true, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + reset: 'AUTH_LDAP_1_USER_SEARCH' + }, + AUTH_LDAP_1_GROUP_SEARCH: { + type: 'textarea', + rows: 6, + codeMirror: true, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + reset: 'AUTH_LDAP_1_GROUP_SEARCH' + }, + AUTH_LDAP_1_USER_DN_TEMPLATE: { + type: 'text', + reset: 'AUTH_LDAP_1_USER_DN_TEMPLATE' + }, + AUTH_LDAP_1_USER_ATTR_MAP: { + type: 'textarea', + reset: 'AUTH_LDAP_1_USER_ATTR_MAP', + rows: 6, + codeMirror: true, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + }, + AUTH_LDAP_1_GROUP_TYPE: { + type: 'select', + reset: 'AUTH_LDAP_1_GROUP_TYPE', + ngOptions: 'group.label for group in AUTH_LDAP_1_GROUP_TYPE_options track by group.value', + }, + AUTH_LDAP_1_REQUIRE_GROUP: { + type: 'text', + reset: 'AUTH_LDAP_1_REQUIRE_GROUP' + }, + AUTH_LDAP_1_DENY_GROUP: { + type: 'text', + reset: 'AUTH_LDAP_1_DENY_GROUP' + }, + AUTH_LDAP_1_START_TLS: { + type: 'toggleSwitch' + }, + AUTH_LDAP_1_USER_FLAGS_BY_GROUP: { + type: 'textarea', + reset: 'AUTH_LDAP_1_USER_FLAGS_BY_GROUP', + codeMirror: true, + rows: 6, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + }, + AUTH_LDAP_1_ORGANIZATION_MAP: { + type: 'textarea', + reset: 'AUTH_LDAP_1_ORGANIZATION_MAP', + codeMirror: true, + rows: 6, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + }, + AUTH_LDAP_1_TEAM_MAP: { + type: 'textarea', + reset: 'AUTH_LDAP_1_TEAM_MAP', + codeMirror: true, + rows: 6, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + } + }, + + buttons: { + reset: { + ngShow: '!user_is_system_auditor', + ngClick: 'vm.resetAllConfirm()', + label: i18n._('Revert all to default'), + class: 'Form-resetAll' + }, + cancel: { + ngClick: 'vm.formCancel()', + }, + save: { + ngClick: 'vm.formSave()', + ngDisabled: "!ldap_auth || configuration_ldap1_template_form.$invalid || configuration_ldap1_template_form.$pending" + } + } + }; +} +]; diff --git a/awx/ui/client/src/configuration/auth-form/sub-forms/auth-ldap2.form.js b/awx/ui/client/src/configuration/auth-form/sub-forms/auth-ldap2.form.js new file mode 100644 index 0000000000..09230cb802 --- /dev/null +++ b/awx/ui/client/src/configuration/auth-form/sub-forms/auth-ldap2.form.js @@ -0,0 +1,108 @@ +/************************************************* + * Copyright (c) 2018 Ansible, Inc. + * + * All Rights Reserved + *************************************************/ + +export default ['i18n', function(i18n) { + return { + // editTitle: 'Authorization Configuration', + name: 'configuration_ldap2_template', + showActions: true, + showHeader: false, + + fields: { + AUTH_LDAP_2_SERVER_URI: { + type: 'text', + reset: 'AUTH_LDAP_2_SERVER_URI' + }, + AUTH_LDAP_2_BIND_DN: { + type: 'text', + reset: 'AUTH_LDAP_2_BIND_DN' + }, + AUTH_LDAP_2_BIND_PASSWORD: { + type: 'sensitive', + hasShowInputButton: true, + }, + AUTH_LDAP_2_USER_SEARCH: { + type: 'textarea', + rows: 6, + codeMirror: true, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + reset: 'AUTH_LDAP_2_USER_SEARCH' + }, + AUTH_LDAP_2_GROUP_SEARCH: { + type: 'textarea', + rows: 6, + codeMirror: true, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + reset: 'AUTH_LDAP_2_GROUP_SEARCH' + }, + AUTH_LDAP_2_USER_DN_TEMPLATE: { + type: 'text', + reset: 'AUTH_LDAP_2_USER_DN_TEMPLATE' + }, + AUTH_LDAP_2_USER_ATTR_MAP: { + type: 'textarea', + reset: 'AUTH_LDAP_2_USER_ATTR_MAP', + rows: 6, + codeMirror: true, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + }, + AUTH_LDAP_2_GROUP_TYPE: { + type: 'select', + reset: 'AUTH_LDAP_2_GROUP_TYPE', + ngOptions: 'group.label for group in AUTH_LDAP_2_GROUP_TYPE_options track by group.value', + }, + AUTH_LDAP_2_REQUIRE_GROUP: { + type: 'text', + reset: 'AUTH_LDAP_2_REQUIRE_GROUP' + }, + AUTH_LDAP_2_DENY_GROUP: { + type: 'text', + reset: 'AUTH_LDAP_2_DENY_GROUP' + }, + AUTH_LDAP_2_START_TLS: { + type: 'toggleSwitch' + }, + AUTH_LDAP_2_USER_FLAGS_BY_GROUP: { + type: 'textarea', + reset: 'AUTH_LDAP_2_USER_FLAGS_BY_GROUP', + codeMirror: true, + rows: 6, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + }, + AUTH_LDAP_2_ORGANIZATION_MAP: { + type: 'textarea', + reset: 'AUTH_LDAP_2_ORGANIZATION_MAP', + codeMirror: true, + rows: 6, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + }, + AUTH_LDAP_2_TEAM_MAP: { + type: 'textarea', + reset: 'AUTH_LDAP_2_TEAM_MAP', + codeMirror: true, + rows: 6, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + } + }, + + buttons: { + reset: { + ngShow: '!user_is_system_auditor', + ngClick: 'vm.resetAllConfirm()', + label: i18n._('Revert all to default'), + class: 'Form-resetAll' + }, + cancel: { + ngClick: 'vm.formCancel()', + }, + save: { + ngClick: 'vm.formSave()', + ngDisabled: "!ldap_auth || configuration_ldap2_template_form.$invalid || configuration_ldap2_template_form.$pending" + } + } + }; +} +]; diff --git a/awx/ui/client/src/configuration/auth-form/sub-forms/auth-ldap3.form.js b/awx/ui/client/src/configuration/auth-form/sub-forms/auth-ldap3.form.js new file mode 100644 index 0000000000..9c26c22829 --- /dev/null +++ b/awx/ui/client/src/configuration/auth-form/sub-forms/auth-ldap3.form.js @@ -0,0 +1,108 @@ +/************************************************* + * Copyright (c) 2018 Ansible, Inc. + * + * All Rights Reserved + *************************************************/ + +export default ['i18n', function(i18n) { + return { + // editTitle: 'Authorization Configuration', + name: 'configuration_ldap3_template', + showActions: true, + showHeader: false, + + fields: { + AUTH_LDAP_3_SERVER_URI: { + type: 'text', + reset: 'AUTH_LDAP_3_SERVER_URI' + }, + AUTH_LDAP_3_BIND_DN: { + type: 'text', + reset: 'AUTH_LDAP_3_BIND_DN' + }, + AUTH_LDAP_3_BIND_PASSWORD: { + type: 'sensitive', + hasShowInputButton: true, + }, + AUTH_LDAP_3_USER_SEARCH: { + type: 'textarea', + rows: 6, + codeMirror: true, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + reset: 'AUTH_LDAP_3_USER_SEARCH' + }, + AUTH_LDAP_3_GROUP_SEARCH: { + type: 'textarea', + rows: 6, + codeMirror: true, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + reset: 'AUTH_LDAP_3_GROUP_SEARCH' + }, + AUTH_LDAP_3_USER_DN_TEMPLATE: { + type: 'text', + reset: 'AUTH_LDAP_3_USER_DN_TEMPLATE' + }, + AUTH_LDAP_3_USER_ATTR_MAP: { + type: 'textarea', + reset: 'AUTH_LDAP_3_USER_ATTR_MAP', + rows: 6, + codeMirror: true, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + }, + AUTH_LDAP_3_GROUP_TYPE: { + type: 'select', + reset: 'AUTH_LDAP_3_GROUP_TYPE', + ngOptions: 'group.label for group in AUTH_LDAP_3_GROUP_TYPE_options track by group.value', + }, + AUTH_LDAP_3_REQUIRE_GROUP: { + type: 'text', + reset: 'AUTH_LDAP_3_REQUIRE_GROUP' + }, + AUTH_LDAP_3_DENY_GROUP: { + type: 'text', + reset: 'AUTH_LDAP_3_DENY_GROUP' + }, + AUTH_LDAP_3_START_TLS: { + type: 'toggleSwitch' + }, + AUTH_LDAP_3_USER_FLAGS_BY_GROUP: { + type: 'textarea', + reset: 'AUTH_LDAP_3_USER_FLAGS_BY_GROUP', + codeMirror: true, + rows: 6, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + }, + AUTH_LDAP_3_ORGANIZATION_MAP: { + type: 'textarea', + reset: 'AUTH_LDAP_3_ORGANIZATION_MAP', + codeMirror: true, + rows: 6, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + }, + AUTH_LDAP_3_TEAM_MAP: { + type: 'textarea', + reset: 'AUTH_LDAP_3_TEAM_MAP', + codeMirror: true, + rows: 6, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + } + }, + + buttons: { + reset: { + ngShow: '!user_is_system_auditor', + ngClick: 'vm.resetAllConfirm()', + label: i18n._('Revert all to default'), + class: 'Form-resetAll' + }, + cancel: { + ngClick: 'vm.formCancel()', + }, + save: { + ngClick: 'vm.formSave()', + ngDisabled: "!ldap_auth || configuration_ldap3_template_form.$invalid || configuration_ldap3_template_form.$pending" + } + } + }; +} +]; diff --git a/awx/ui/client/src/configuration/auth-form/sub-forms/auth-ldap4.form.js b/awx/ui/client/src/configuration/auth-form/sub-forms/auth-ldap4.form.js new file mode 100644 index 0000000000..95c9f6d175 --- /dev/null +++ b/awx/ui/client/src/configuration/auth-form/sub-forms/auth-ldap4.form.js @@ -0,0 +1,108 @@ +/************************************************* + * Copyright (c) 2018 Ansible, Inc. + * + * All Rights Reserved + *************************************************/ + +export default ['i18n', function(i18n) { + return { + // editTitle: 'Authorization Configuration', + name: 'configuration_ldap4_template', + showActions: true, + showHeader: false, + + fields: { + AUTH_LDAP_4_SERVER_URI: { + type: 'text', + reset: 'AUTH_LDAP_4_SERVER_URI' + }, + AUTH_LDAP_4_BIND_DN: { + type: 'text', + reset: 'AUTH_LDAP_4_BIND_DN' + }, + AUTH_LDAP_4_BIND_PASSWORD: { + type: 'sensitive', + hasShowInputButton: true, + }, + AUTH_LDAP_4_USER_SEARCH: { + type: 'textarea', + rows: 6, + codeMirror: true, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + reset: 'AUTH_LDAP_4_USER_SEARCH' + }, + AUTH_LDAP_4_GROUP_SEARCH: { + type: 'textarea', + rows: 6, + codeMirror: true, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + reset: 'AUTH_LDAP_4_GROUP_SEARCH' + }, + AUTH_LDAP_4_USER_DN_TEMPLATE: { + type: 'text', + reset: 'AUTH_LDAP_4_USER_DN_TEMPLATE' + }, + AUTH_LDAP_4_USER_ATTR_MAP: { + type: 'textarea', + reset: 'AUTH_LDAP_4_USER_ATTR_MAP', + rows: 6, + codeMirror: true, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + }, + AUTH_LDAP_4_GROUP_TYPE: { + type: 'select', + reset: 'AUTH_LDAP_4_GROUP_TYPE', + ngOptions: 'group.label for group in AUTH_LDAP_4_GROUP_TYPE_options track by group.value', + }, + AUTH_LDAP_4_REQUIRE_GROUP: { + type: 'text', + reset: 'AUTH_LDAP_4_REQUIRE_GROUP' + }, + AUTH_LDAP_4_DENY_GROUP: { + type: 'text', + reset: 'AUTH_LDAP_4_DENY_GROUP' + }, + AUTH_LDAP_4_START_TLS: { + type: 'toggleSwitch' + }, + AUTH_LDAP_4_USER_FLAGS_BY_GROUP: { + type: 'textarea', + reset: 'AUTH_LDAP_4_USER_FLAGS_BY_GROUP', + codeMirror: true, + rows: 6, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + }, + AUTH_LDAP_4_ORGANIZATION_MAP: { + type: 'textarea', + reset: 'AUTH_LDAP_4_ORGANIZATION_MAP', + codeMirror: true, + rows: 6, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + }, + AUTH_LDAP_4_TEAM_MAP: { + type: 'textarea', + reset: 'AUTH_LDAP_4_TEAM_MAP', + codeMirror: true, + rows: 6, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + } + }, + + buttons: { + reset: { + ngShow: '!user_is_system_auditor', + ngClick: 'vm.resetAllConfirm()', + label: i18n._('Revert all to default'), + class: 'Form-resetAll' + }, + cancel: { + ngClick: 'vm.formCancel()', + }, + save: { + ngClick: 'vm.formSave()', + ngDisabled: "!ldap_auth || configuration_ldap4_template_form.$invalid || configuration_ldap4_template_form.$pending" + } + } + }; +} +]; diff --git a/awx/ui/client/src/configuration/auth-form/sub-forms/auth-ldap5.form.js b/awx/ui/client/src/configuration/auth-form/sub-forms/auth-ldap5.form.js new file mode 100644 index 0000000000..fa93437367 --- /dev/null +++ b/awx/ui/client/src/configuration/auth-form/sub-forms/auth-ldap5.form.js @@ -0,0 +1,108 @@ +/************************************************* + * Copyright (c) 2018 Ansible, Inc. + * + * All Rights Reserved + *************************************************/ + +export default ['i18n', function(i18n) { + return { + // editTitle: 'Authorization Configuration', + name: 'configuration_ldap5_template', + showActions: true, + showHeader: false, + + fields: { + AUTH_LDAP_5_SERVER_URI: { + type: 'text', + reset: 'AUTH_LDAP_5_SERVER_URI' + }, + AUTH_LDAP_5_BIND_DN: { + type: 'text', + reset: 'AUTH_LDAP_5_BIND_DN' + }, + AUTH_LDAP_5_BIND_PASSWORD: { + type: 'sensitive', + hasShowInputButton: true, + }, + AUTH_LDAP_5_USER_SEARCH: { + type: 'textarea', + rows: 6, + codeMirror: true, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + reset: 'AUTH_LDAP_5_USER_SEARCH' + }, + AUTH_LDAP_5_GROUP_SEARCH: { + type: 'textarea', + rows: 6, + codeMirror: true, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + reset: 'AUTH_LDAP_5_GROUP_SEARCH' + }, + AUTH_LDAP_5_USER_DN_TEMPLATE: { + type: 'text', + reset: 'AUTH_LDAP_5_USER_DN_TEMPLATE' + }, + AUTH_LDAP_5_USER_ATTR_MAP: { + type: 'textarea', + reset: 'AUTH_LDAP_5_USER_ATTR_MAP', + rows: 6, + codeMirror: true, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + }, + AUTH_LDAP_5_GROUP_TYPE: { + type: 'select', + reset: 'AUTH_LDAP_5_GROUP_TYPE', + ngOptions: 'group.label for group in AUTH_LDAP_5_GROUP_TYPE_options track by group.value', + }, + AUTH_LDAP_5_REQUIRE_GROUP: { + type: 'text', + reset: 'AUTH_LDAP_5_REQUIRE_GROUP' + }, + AUTH_LDAP_5_DENY_GROUP: { + type: 'text', + reset: 'AUTH_LDAP_5_DENY_GROUP' + }, + AUTH_LDAP_5_START_TLS: { + type: 'toggleSwitch' + }, + AUTH_LDAP_5_USER_FLAGS_BY_GROUP: { + type: 'textarea', + reset: 'AUTH_LDAP_5_USER_FLAGS_BY_GROUP', + codeMirror: true, + rows: 6, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + }, + AUTH_LDAP_5_ORGANIZATION_MAP: { + type: 'textarea', + reset: 'AUTH_LDAP_5_ORGANIZATION_MAP', + codeMirror: true, + rows: 6, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + }, + AUTH_LDAP_5_TEAM_MAP: { + type: 'textarea', + reset: 'AUTH_LDAP_5_TEAM_MAP', + codeMirror: true, + rows: 6, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + } + }, + + buttons: { + reset: { + ngShow: '!user_is_system_auditor', + ngClick: 'vm.resetAllConfirm()', + label: i18n._('Revert all to default'), + class: 'Form-resetAll' + }, + cancel: { + ngClick: 'vm.formCancel()', + }, + save: { + ngClick: 'vm.formSave()', + ngDisabled: "!ldap_auth || configuration_ldap5_template_form.$invalid || configuration_ldap5_template_form.$pending" + } + } + }; +} +]; diff --git a/awx/ui/client/src/configuration/auth-form/sub-forms/auth-saml.form.js b/awx/ui/client/src/configuration/auth-form/sub-forms/auth-saml.form.js index c7c8cc289d..ad103461f9 100644 --- a/awx/ui/client/src/configuration/auth-form/sub-forms/auth-saml.form.js +++ b/awx/ui/client/src/configuration/auth-form/sub-forms/auth-saml.form.js @@ -71,15 +71,49 @@ export default ['i18n', function(i18n) { codeMirror: true, class: 'Form-textAreaLabel Form-formGroup--fullWidth' }, + SOCIAL_AUTH_SAML_ORGANIZATION_ATTR: { + type: 'textarea', + reset: 'SOCIAL_AUTH_SAML_ORGANIZATION_ATTR', + rows: 6, + codeMirror: true, + class: 'Form-textAreaLabel Form-formGroup--fullWidth' + }, SOCIAL_AUTH_SAML_TEAM_MAP: { type: 'textarea', reset: 'SOCIAL_AUTH_SAML_TEAM_MAP', rows: 6, codeMirror: true, class: 'Form-textAreaLabel Form-formGroup--fullWidth' - } + }, + SOCIAL_AUTH_SAML_TEAM_ATTR: { + type: 'textarea', + reset: 'SOCIAL_AUTH_SAML_TEAM_ATTR', + rows: 6, + codeMirror: true, + class: 'Form-textAreaLabel Form-formGroup--fullWidth' + }, + SOCIAL_AUTH_SAML_SECURITY_CONFIG: { + type: 'textarea', + reset: 'SOCIAL_AUTH_SAML_SECURITY_CONFIG', + codeMirror: true, + rows: 6, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + }, + SOCIAL_AUTH_SAML_SP_EXTRA: { + type: 'textarea', + reset: 'SOCIAL_AUTH_SAML_SP_EXTRA', + codeMirror: true, + rows: 6, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + }, + SOCIAL_AUTH_SAML_EXTRA_DATA: { + type: 'textarea', + reset: 'SOCIAL_AUTH_SAML_EXTRA_DATA', + codeMirror: true, + rows: 6, + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + }, }, - buttons: { reset: { ngShow: '!user_is_system_auditor', diff --git a/awx/ui/client/src/configuration/configuration.block.less b/awx/ui/client/src/configuration/configuration.block.less index bc6f4faa38..7cba028ef9 100644 --- a/awx/ui/client/src/configuration/configuration.block.less +++ b/awx/ui/client/src/configuration/configuration.block.less @@ -38,6 +38,13 @@ justify-content: flex-end; } +.Form-nav--ldapDropdownContainer { + align-items: center; + width: 100%; + margin: 0 0 auto auto; + display: flex; +} + .Form-nav--dropdown { width: 285px; } @@ -166,3 +173,7 @@ input#filePickerText { .LogAggregator-failedNotification{ max-width: 300px; } + +hr { + height: 1px; +} diff --git a/awx/ui/client/src/configuration/configuration.controller.js b/awx/ui/client/src/configuration/configuration.controller.js index 8fbf100eaa..fec8c23227 100644 --- a/awx/ui/client/src/configuration/configuration.controller.js +++ b/awx/ui/client/src/configuration/configuration.controller.js @@ -15,6 +15,11 @@ export default [ 'configurationGithubTeamForm', 'configurationGoogleForm', 'configurationLdapForm', + 'configurationLdap1Form', + 'configurationLdap2Form', + 'configurationLdap3Form', + 'configurationLdap4Form', + 'configurationLdap5Form', 'configurationRadiusForm', 'configurationTacacsForm', 'configurationSamlForm', @@ -34,6 +39,11 @@ export default [ configurationGithubTeamForm, configurationGoogleForm, configurationLdapForm, + configurationLdap1Form, + configurationLdap2Form, + configurationLdap3Form, + configurationLdap4Form, + configurationLdap5Form, configurationRadiusForm, configurationTacacsForm, configurationSamlForm, @@ -52,6 +62,11 @@ export default [ 'github_team': configurationGithubTeamForm, 'google_oauth': configurationGoogleForm, 'ldap': configurationLdapForm, + 'ldap1': configurationLdap1Form, + 'ldap2': configurationLdap2Form, + 'ldap3': configurationLdap3Form, + 'ldap4': configurationLdap4Form, + 'ldap5': configurationLdap5Form, 'radius': configurationRadiusForm, 'tacacs': configurationTacacsForm, 'saml': configurationSamlForm, @@ -85,9 +100,14 @@ export default [ // the ConfigurationUtils.arrayToList() // does a string.split(', ') w/ an extra space // behind the comma. + + const isLdap = (key.indexOf("AUTH_LDAP") !== -1); + const isLdapUserSearch = isLdap && (key.indexOf("USER_SEARCH") !== -1); + const isLdapGroupSearch = isLdap && (key.indexOf("GROUP_SEARCH") !== -1); + if(key === "AD_HOC_COMMANDS"){ $scope[key] = data[key]; - } else if (key === "AUTH_LDAP_USER_SEARCH" || key === "AUTH_LDAP_GROUP_SEARCH") { + } else if (isLdapUserSearch || isLdapGroupSearch) { $scope[key] = JSON.stringify(data[key]); } else { $scope[key] = ConfigurationUtils.arrayToList(data[key], key); @@ -339,7 +359,12 @@ export default [ $scope.$broadcast(key+'_reverted'); } else if($scope[key + '_field'].hasOwnProperty('codeMirror')){ - if (key === "AUTH_LDAP_USER_SEARCH" || key === "AUTH_LDAP_GROUP_SEARCH") { + const isLdap = (key.indexOf("AUTH_LDAP") !== -1); + + const isLdapUserSearch = isLdap && (key.indexOf("USER_SEARCH") !== -1); + const isLdapGroupSearch = isLdap && (key.indexOf("GROUP_SEARCH") !== -1); + + if (isLdapUserSearch || isLdapGroupSearch) { $scope[key] = '[]'; } else { $scope[key] = '{}'; diff --git a/awx/ui/client/src/configuration/main.js b/awx/ui/client/src/configuration/main.js index 40715249b1..3846354546 100644 --- a/awx/ui/client/src/configuration/main.js +++ b/awx/ui/client/src/configuration/main.js @@ -17,6 +17,11 @@ import configurationGithubOrgForm from './auth-form/sub-forms/auth-github-org.fo import configurationGithubTeamForm from './auth-form/sub-forms/auth-github-team.form'; import configurationGoogleForm from './auth-form/sub-forms/auth-google-oauth2.form'; import configurationLdapForm from './auth-form/sub-forms/auth-ldap.form.js'; +import configurationLdap1Form from './auth-form/sub-forms/auth-ldap1.form.js'; +import configurationLdap2Form from './auth-form/sub-forms/auth-ldap2.form.js'; +import configurationLdap3Form from './auth-form/sub-forms/auth-ldap3.form.js'; +import configurationLdap4Form from './auth-form/sub-forms/auth-ldap4.form.js'; +import configurationLdap5Form from './auth-form/sub-forms/auth-ldap5.form.js'; import configurationRadiusForm from './auth-form/sub-forms/auth-radius.form.js'; import configurationTacacsForm from './auth-form/sub-forms/auth-tacacs.form.js'; import configurationSamlForm from './auth-form/sub-forms/auth-saml.form'; @@ -39,6 +44,11 @@ angular.module('configuration', []) .factory('configurationGithubTeamForm', configurationGithubTeamForm) .factory('configurationGoogleForm', configurationGoogleForm) .factory('configurationLdapForm', configurationLdapForm) + .factory('configurationLdap1Form', configurationLdap1Form) + .factory('configurationLdap2Form', configurationLdap2Form) + .factory('configurationLdap3Form', configurationLdap3Form) + .factory('configurationLdap4Form', configurationLdap4Form) + .factory('configurationLdap5Form', configurationLdap5Form) .factory('configurationRadiusForm', configurationRadiusForm) .factory('configurationTacacsForm', configurationTacacsForm) .factory('configurationSamlForm', configurationSamlForm) diff --git a/awx/ui/client/src/instance-groups/add-edit/add-edit-instance-groups.view.html b/awx/ui/client/src/instance-groups/add-edit/add-edit-instance-groups.view.html new file mode 100644 index 0000000000..8567f095ce --- /dev/null +++ b/awx/ui/client/src/instance-groups/add-edit/add-edit-instance-groups.view.html @@ -0,0 +1,30 @@ + + + {{ vm.panelTitle }} + + + + {{:: vm.strings.get('tab.DETAILS') }} + {{:: vm.strings.get('tab.INSTANCES') }} + {{:: vm.strings.get('tab.JOBS') }} + + + + + + + + + + + + +
+ + + + + +
+
+
\ No newline at end of file diff --git a/awx/ui/client/src/instance-groups/add-edit/add-instance-group.controller.js b/awx/ui/client/src/instance-groups/add-edit/add-instance-group.controller.js new file mode 100644 index 0000000000..bb0b3ea413 --- /dev/null +++ b/awx/ui/client/src/instance-groups/add-edit/add-instance-group.controller.js @@ -0,0 +1,44 @@ +function AddController ($scope, $state, models, strings) { + const vm = this || {}; + const { instanceGroup, instance } = models; + + vm.mode = 'add'; + vm.strings = strings; + vm.panelTitle = strings.get('state.ADD_BREADCRUMB_LABEL'); + + vm.tab = { + details: { _active: true }, + instances: {_disabled: true }, + jobs: {_disabled: true } + }; + + vm.form = instanceGroup.createFormSchema('post'); + + // Default policy instance percentage value is 0 + vm.form.policy_instance_percentage._value = 0; + + vm.form.policy_instance_list._lookupTags = true; + vm.form.policy_instance_list._model = instance; + vm.form.policy_instance_list._placeholder = "Policy Instance List"; + vm.form.policy_instance_list._resource = 'instances'; + vm.form.policy_instance_list._route = 'instanceGroups.add.modal.instances'; + vm.form.policy_instance_list._value = []; + + vm.form.save = data => { + data.policy_instance_list = data.policy_instance_list.map(instance => instance.hostname); + return instanceGroup.request('post', { data }); + }; + + vm.form.onSaveSuccess = res => { + $state.go('instanceGroups.edit', { instance_group_id: res.data.id }, { reload: true }); + }; +} + +AddController.$inject = [ + '$scope', + '$state', + 'resolvedModels', + 'InstanceGroupsStrings' +]; + +export default AddController; \ No newline at end of file diff --git a/awx/ui/client/src/instance-groups/add-edit/edit-instance-group.controller.js b/awx/ui/client/src/instance-groups/add-edit/edit-instance-group.controller.js new file mode 100644 index 0000000000..ee0802bbe2 --- /dev/null +++ b/awx/ui/client/src/instance-groups/add-edit/edit-instance-group.controller.js @@ -0,0 +1,54 @@ +function EditController ($rootScope, $state, models, strings) { + const vm = this || {}; + const { instanceGroup, instance } = models; + + $rootScope.breadcrumb.instance_group_name = instanceGroup.get('name'); + + vm.mode = 'edit'; + vm.strings = strings; + vm.panelTitle = instanceGroup.get('name'); + + vm.tab = { + details: { + _active: true, + _go: 'instanceGroups.edit', + _params: { instance_group_id: instanceGroup.get('id') } + }, + instances: { + _go: 'instanceGroups.instances', + _params: { instance_group_id: instanceGroup.get('id') } + }, + jobs: { + _go: 'instanceGroups.jobs', + _params: { instance_group_id: instanceGroup.get('id') } + } + }; + + vm.form = instanceGroup.createFormSchema('put'); + + vm.form.policy_instance_list._lookupTags = true; + vm.form.policy_instance_list._model = instance; + vm.form.policy_instance_list._placeholder = "Policy Instance List"; + vm.form.policy_instance_list._resource = 'instances'; + vm.form.policy_instance_list._route = 'instanceGroups.edit.modal.instances'; + vm.form.policy_instance_list._value = instanceGroup.get('policy_instance_list'); + + vm.form.save = data => { + instanceGroup.unset('policy_instance_list'); + data.policy_instance_list = data.policy_instance_list.map(instance => instance.hostname || instance); + return instanceGroup.request('put', { data }); + }; + + vm.form.onSaveSuccess = res => { + $state.go('instanceGroups.edit', { instance_group_id: res.data.id }, { reload: true }); + }; +} + +EditController.$inject = [ + '$rootScope', + '$state', + 'resolvedModels', + 'InstanceGroupsStrings' +]; + +export default EditController; \ No newline at end of file diff --git a/awx/ui/client/src/instance-groups/add-edit/instance-list-policy.directive.js b/awx/ui/client/src/instance-groups/add-edit/instance-list-policy.directive.js new file mode 100644 index 0000000000..897261c9c5 --- /dev/null +++ b/awx/ui/client/src/instance-groups/add-edit/instance-list-policy.directive.js @@ -0,0 +1,84 @@ +const templateUrl = require('./instance-list-policy.partial.html'); + +function InstanceListPolicyLink (scope, el, attrs, controllers) { + const instancePolicyController = controllers[0]; + const formController = controllers[1]; + const models = scope.$resolve.resolvedModels; + + instancePolicyController.init(formController, models); +} + + +function InstanceListPolicyController ($scope, $state, strings) { + const vm = this || {}; + let form; + let instance; + let instanceGroup; + + vm.init = (_form_, _models_) => { + form = _form_; + ({ instance, instanceGroup} = _models_); + + vm.strings = strings; + vm.instanceGroupId = instanceGroup.get('id'); + vm.defaultParams = { page_size: '10', order_by: 'hostname' }; + + if (vm.instanceGroupId === undefined) { + vm.setInstances(); + } else { + vm.setRelatedInstances(); + } + }; + + vm.setInstances = () => { + vm.instances = instance.get('results').map(instance => { + instance.isSelected = false; + return instance; + }); + }; + + vm.setRelatedInstances = () => { + vm.instanceGroupName = instanceGroup.get('name'); + vm.relatedInstances = instanceGroup.get('policy_instance_list'); + + vm.instances = instance.get('results').map(instance => { + instance.isSelected = vm.relatedInstances.includes(instance.hostname); + return instance; + }); + }; + + $scope.$watch('vm.instances', function() { + vm.selectedRows = _.filter(vm.instances, 'isSelected'); + vm.deselectedRows = _.filter(vm.instances, 'isSelected', false); + }, true); + + vm.submit = () => { + form.components + .filter(component => component.category === 'input') + .filter(component => component.state.id === 'policy_instance_list') + .forEach(component => { + component.state._value = vm.selectedRows; + }); + + $state.go("^.^"); + }; +} + +InstanceListPolicyController.$inject = [ + '$scope', + '$state', + 'InstanceGroupsStrings' +]; + +function instanceListPolicy () { + return { + restrict: 'E', + link: InstanceListPolicyLink, + controller: InstanceListPolicyController, + controllerAs: 'vm', + require: ['instanceListPolicy', '^atForm'], + templateUrl + }; +} + +export default instanceListPolicy; diff --git a/awx/ui/client/src/instance-groups/add-edit/instance-list-policy.partial.html b/awx/ui/client/src/instance-groups/add-edit/instance-list-policy.partial.html new file mode 100644 index 0000000000..218e61421d --- /dev/null +++ b/awx/ui/client/src/instance-groups/add-edit/instance-list-policy.partial.html @@ -0,0 +1,53 @@ + \ No newline at end of file diff --git a/awx/ui/client/src/instance-groups/capacity-adjuster/capacity-adjuster.block.less b/awx/ui/client/src/instance-groups/capacity-adjuster/capacity-adjuster.block.less new file mode 100644 index 0000000000..251124f6d2 --- /dev/null +++ b/awx/ui/client/src/instance-groups/capacity-adjuster/capacity-adjuster.block.less @@ -0,0 +1,11 @@ +.CapacityAdjuster { + .at-InputSlider { + align-items: center; + margin-right: @at-space-4x; + } + + .at-InputSlider p { + white-space: nowrap; + margin: 0 10px; + } +} \ No newline at end of file diff --git a/awx/ui/client/src/instance-groups/capacity-adjuster/capacity-adjuster.directive.js b/awx/ui/client/src/instance-groups/capacity-adjuster/capacity-adjuster.directive.js new file mode 100644 index 0000000000..5600502578 --- /dev/null +++ b/awx/ui/client/src/instance-groups/capacity-adjuster/capacity-adjuster.directive.js @@ -0,0 +1,57 @@ +function CapacityAdjuster (templateUrl, ProcessErrors, Wait) { + return { + scope: { + state: '=' + }, + templateUrl: templateUrl('instance-groups/capacity-adjuster/capacity-adjuster'), + restrict: 'E', + replace: true, + link: function(scope) { + const adjustment_values = [{ + label: 'CPU', + value: scope.state.cpu_capacity, + },{ + label: 'RAM', + value: scope.state.mem_capacity + }]; + + scope.min_capacity = _.min(adjustment_values, 'value'); + scope.max_capacity = _.max(adjustment_values, 'value'); + + }, + controller: function($http) { + const vm = this || {}; + + vm.slide = (state) => { + Wait('start'); + const data = { + "capacity_adjustment": `${state.capacity_adjustment}` + }; + const req = { + method: 'PUT', + url: state.url, + data + }; + $http(req) + .catch(({data, status}) => { + ProcessErrors(data, status, null, { + hdr: 'Error!', + msg: 'Call failed. Return status: ' + status + }); + }) + .finally(() => { + Wait('stop'); + }); + }; + }, + controllerAs: 'vm' + }; +} + +CapacityAdjuster.$inject = [ + 'templateUrl', + 'ProcessErrors', + 'Wait' +]; + +export default CapacityAdjuster; \ No newline at end of file diff --git a/awx/ui/client/src/instance-groups/capacity-adjuster/capacity-adjuster.partial.html b/awx/ui/client/src/instance-groups/capacity-adjuster/capacity-adjuster.partial.html new file mode 100644 index 0000000000..ebadbe8461 --- /dev/null +++ b/awx/ui/client/src/instance-groups/capacity-adjuster/capacity-adjuster.partial.html @@ -0,0 +1,13 @@ +
+
+

{{min_capacity.label}} {{min_capacity.value}}

+ +

{{max_capacity.label}} {{max_capacity.value}}

+
+
diff --git a/awx/ui/client/src/instance-groups/capacity-bar/capacity-bar.block.less b/awx/ui/client/src/instance-groups/capacity-bar/capacity-bar.block.less index 1caba245f0..658f3eca02 100644 --- a/awx/ui/client/src/instance-groups/capacity-bar/capacity-bar.block.less +++ b/awx/ui/client/src/instance-groups/capacity-bar/capacity-bar.block.less @@ -1,21 +1,22 @@ capacity-bar { - - width: 50%; - margin-right: 25px; - min-width: 100px; - display: flex; align-items: center; + color: @at-color-body-background-dark; + display: flex; + font-size: @at-font-size; + min-width: 100px; + white-space: nowrap; .CapacityBar { background-color: @default-bg; - display: flex; - flex: 0 0 auto; - height: 10px; - border: 1px solid @default-link; - width: 100%; border-radius: 100vw; + border: 1px solid @default-link; + display: flex; + flex: 1; + height: 10px; + margin-right: @at-space-2x; + min-width: 100px; overflow: hidden; - margin-right: 10px; + width: 100%; } .CapacityBar-remaining { @@ -28,14 +29,21 @@ capacity-bar { } .CapacityBar--offline { - border-color: @d7grey; + color: @at-red; + border-color: @at-gray-a9; .CapacityBar-remaining { - background-color: @d7grey; + background-color: @at-gray-b7; } } - .Capacity-details--percentage { - color: @default-data-txt; + .Capacity-details--label { + margin-right: @at-space-2x; + text-align: right; + text-transform: uppercase; } -} + + .Capacity-details--percentage { + width: 40px; + } +} \ No newline at end of file diff --git a/awx/ui/client/src/instance-groups/capacity-bar/capacity-bar.directive.js b/awx/ui/client/src/instance-groups/capacity-bar/capacity-bar.directive.js index 5ea07d2dd3..e1aea6a50f 100644 --- a/awx/ui/client/src/instance-groups/capacity-bar/capacity-bar.directive.js +++ b/awx/ui/client/src/instance-groups/capacity-bar/capacity-bar.directive.js @@ -1,44 +1,52 @@ -export default ['templateUrl', 'ComponentsStrings', - function (templateUrl, strings) { - return { - scope: { - capacity: '=', - totalCapacity: '=' - }, - templateUrl: templateUrl('instance-groups/capacity-bar/capacity-bar'), - restrict: 'E', - link: function(scope) { - scope.isOffline = false; +function CapacityBar (templateUrl, strings) { + return { + scope: { + capacity: '=', + totalCapacity: '=', + labelValue: '@', + badge: '=' + }, + templateUrl: templateUrl('instance-groups/capacity-bar/capacity-bar'), + restrict: 'E', + link: function(scope) { + scope.isOffline = false; - scope.$watch('totalCapacity', function(val) { - if (val === 0) { - scope.isOffline = true; - scope.offlineTip = strings.get(`capacityBar.IS_OFFLINE`); - } else { - scope.isOffline = false; - scope.offlineTip = null; - } - }, true); + scope.$watch('totalCapacity', function(val) { + if (val === 0) { + scope.isOffline = true; + scope.labelValue = strings.get(`capacityBar.IS_OFFLINE_LABEL`); + scope.offlineTip = strings.get(`capacityBar.IS_OFFLINE`); + } else { + scope.isOffline = false; + scope.offlineTip = null; + } + }, true); - scope.$watch('capacity', function() { - if (scope.totalCapacity !== 0) { - var percentageCapacity = Math - .round(scope.capacity / scope.totalCapacity * 1000) / 10; + scope.$watch('capacity', function() { + if (scope.totalCapacity !== 0) { + var percentageCapacity = Math + .round(scope.capacity / scope.totalCapacity * 1000) / 10; - scope.CapacityStyle = { - 'flex-grow': percentageCapacity * 0.01 - }; + scope.CapacityStyle = { + 'flex-grow': percentageCapacity * 0.01 + }; - scope.consumedCapacity = `${percentageCapacity}%`; - } else { - scope.CapacityStyle = { - 'flex-grow': 1 - }; + scope.consumedCapacity = `${percentageCapacity}%`; + } else { + scope.CapacityStyle = { + 'flex-grow': 1 + }; - scope.consumedCapacity = null; - } - }, true); - } - }; - } + scope.consumedCapacity = null; + } + }, true); + } + }; +} + +CapacityBar.$inject = [ + 'templateUrl', + 'InstanceGroupsStrings' ]; + +export default CapacityBar; \ No newline at end of file diff --git a/awx/ui/client/src/instance-groups/capacity-bar/capacity-bar.partial.html b/awx/ui/client/src/instance-groups/capacity-bar/capacity-bar.partial.html index d80ff84bc0..a708bb87d0 100644 --- a/awx/ui/client/src/instance-groups/capacity-bar/capacity-bar.partial.html +++ b/awx/ui/client/src/instance-groups/capacity-bar/capacity-bar.partial.html @@ -1,11 +1,20 @@ + + {{ labelValue }} + +
-
-
+
+
-{{ consumedCapacity }} + + + {{ consumedCapacity }} + \ No newline at end of file diff --git a/awx/ui/client/src/instance-groups/capacity-bar/main.js b/awx/ui/client/src/instance-groups/capacity-bar/main.js deleted file mode 100644 index e330c7080c..0000000000 --- a/awx/ui/client/src/instance-groups/capacity-bar/main.js +++ /dev/null @@ -1,5 +0,0 @@ -import capacityBar from './capacity-bar.directive'; - -export default - angular.module('capacityBarDirective', []) - .directive('capacityBar', capacityBar); \ No newline at end of file diff --git a/awx/ui/client/src/instance-groups/instance-group.partial.html b/awx/ui/client/src/instance-groups/instance-group.partial.html deleted file mode 100644 index df3be6f2ca..0000000000 --- a/awx/ui/client/src/instance-groups/instance-group.partial.html +++ /dev/null @@ -1,33 +0,0 @@ -
-
-
-
-
-
{{ instanceGroupName }}
-
-
-
-

Used Capacity

- -
-
-

Running Jobs

- - {{ instanceGroupJobsRunning }} - -
-
-
- -
-
-
-
INSTANCES
-
JOBS
-
-
-
-
-
diff --git a/awx/ui/client/src/instance-groups/instance-groups.partial.html b/awx/ui/client/src/instance-groups/instance-groups.partial.html index baeaf59f00..217efd98d0 100644 --- a/awx/ui/client/src/instance-groups/instance-groups.partial.html +++ b/awx/ui/client/src/instance-groups/instance-groups.partial.html @@ -1,11 +1,13 @@
+
+
-
-
-
+
+ +
diff --git a/awx/ui/client/src/instance-groups/instance-groups.route.js b/awx/ui/client/src/instance-groups/instance-groups.route.js deleted file mode 100644 index c265b35cd9..0000000000 --- a/awx/ui/client/src/instance-groups/instance-groups.route.js +++ /dev/null @@ -1,41 +0,0 @@ -import {templateUrl} from '../shared/template-url/template-url.factory'; -import { N_ } from '../i18n'; - -export default { - name: 'instanceGroups', - url: '/instance_groups', - searchPrefix: 'instance_group', - ncyBreadcrumb: { - label: N_('INSTANCE GROUPS') - }, - params: { - instance_group_search: { - value: { - page_size: '20', - order_by: 'name' - }, - dynamic: true - } - }, - data: { - alwaysShowRefreshButton: true, - }, - views: { - '@': { - templateUrl: templateUrl('./instance-groups/instance-groups'), - }, - 'list@instanceGroups': { - templateUrl: templateUrl('./instance-groups/list/instance-groups-list'), - controller: 'InstanceGroupsList' - - } - }, - resolve: { - Dataset: ['InstanceGroupList', 'QuerySet', '$stateParams', 'GetBasePath', - function(list, qs, $stateParams, GetBasePath) { - let path = GetBasePath(list.basePath) || GetBasePath(list.name); - return qs.search(path, $stateParams[`${list.iterator}_search`]); - } - ] - } -}; diff --git a/awx/ui/client/src/instance-groups/instance-groups.strings.js b/awx/ui/client/src/instance-groups/instance-groups.strings.js new file mode 100644 index 0000000000..79a21e075e --- /dev/null +++ b/awx/ui/client/src/instance-groups/instance-groups.strings.js @@ -0,0 +1,30 @@ +function InstanceGroupsStrings (BaseString) { + BaseString.call(this, 'instanceGroups'); + + const { t } = this; + const ns = this.instanceGroups; + + ns.state = { + ADD_BREADCRUMB_LABEL: t.s('CREATE INSTANCE GROUP'), + EDIT_BREADCRUMB_LABEL: t.s('EDIT INSTANCE GROUP') + }; + + ns.tab = { + DETAILS: t.s('DETAILS'), + INSTANCES: t.s('INSTANCES'), + JOBS: t.s('JOBS') + }; + + ns.instance = { + PANEL_TITLE: t.s('SELECT INSTANCE') + }; + + ns.capacityBar = { + IS_OFFLINE: t.s('Unavailable to run jobs.'), + IS_OFFLINE_LABEL: t.s('Unavailable') + }; +} + +InstanceGroupsStrings.$inject = ['BaseStringService']; + +export default InstanceGroupsStrings; \ No newline at end of file diff --git a/awx/ui/client/src/instance-groups/instances/instance-jobs/instance-jobs-list.route.js b/awx/ui/client/src/instance-groups/instances/instance-jobs/instance-jobs-list.route.js deleted file mode 100644 index 1d82ca854e..0000000000 --- a/awx/ui/client/src/instance-groups/instances/instance-jobs/instance-jobs-list.route.js +++ /dev/null @@ -1,41 +0,0 @@ -import { N_ } from '../../../i18n'; - -export default { - name: 'instanceGroups.instances.list.job.list', - url: '/jobs', - searchPrefix: 'instance_job', - ncyBreadcrumb: { - parent: 'instanceGroups.instances.list', - label: N_('{{ breadcrumb.instance_name }}') - }, - params: { - instance_job_search: { - value: { - page_size: '20', - order_by: '-finished', - not__launch_type: 'sync' - }, - dynamic: true - } - }, - views: { - 'list@instanceGroups.instances.list.job': { - templateProvider: function(InstanceJobsList, generateList) { - let html = generateList.build({ - list: InstanceJobsList - }); - return html; - }, - controller: 'InstanceJobsController' - } - }, - - resolve: { - Dataset: ['InstanceJobsList', 'QuerySet', '$stateParams', 'GetBasePath', - function(list, qs, $stateParams, GetBasePath) { - let path = `${GetBasePath('instances')}${$stateParams.instance_id}/jobs`; - return qs.search(path, $stateParams[`${list.iterator}_search`]); - } - ], - } -}; diff --git a/awx/ui/client/src/instance-groups/instances/instance-jobs/instance-jobs.controller.js b/awx/ui/client/src/instance-groups/instances/instance-jobs/instance-jobs.controller.js index a7d50764f5..492c256e1d 100644 --- a/awx/ui/client/src/instance-groups/instances/instance-jobs/instance-jobs.controller.js +++ b/awx/ui/client/src/instance-groups/instances/instance-jobs/instance-jobs.controller.js @@ -1,82 +1,89 @@ -export default ['$scope','InstanceJobsList', 'GetBasePath', 'Rest', 'Dataset','Find', '$state', '$q', - function($scope, InstanceJobsList, GetBasePath, Rest, Dataset, Find, $state, $q) { - let list = InstanceJobsList; +function InstanceJobsController ($scope, $filter, $state, model, strings, jobStrings, Instance) { + const vm = this || {}; + let { instance } = model; + const instance_id = instance.get('id'); - init(); + init(); - function init(){ - $scope.optionsDefer = $q.defer(); - $scope.list = list; - $scope[`${list.iterator}_dataset`] = Dataset.data; - $scope[list.name] = $scope[`${list.iterator}_dataset`].results; - } + function init(){ + vm.strings = strings; + vm.jobStrings = jobStrings; + vm.queryset = { page_size: '10', order_by: '-finished'}; + vm.jobs = instance.get('related.jobs.results'); + vm.dataset = instance.get('related.jobs'); + vm.count = instance.get('related.jobs.count'); + vm.panelTitle = `${jobStrings.get('list.PANEL_TITLE')} | ${instance.get('hostname')}`; - $scope.$on(`${list.iterator}_options`, function(event, data){ - $scope.options = data.data.actions.GET; - optionsRequestDataProcessing(); - }); - - // iterate over the list and add fields like type label, after the - // OPTIONS request returns, or the list is sorted/paginated/searched - function optionsRequestDataProcessing(){ - - if($scope[list.name] && $scope[list.name].length > 0) { - $scope[list.name].forEach(function(item, item_idx) { - var itm = $scope[list.name][item_idx]; - - if(item.summary_fields && item.summary_fields.source_workflow_job && - item.summary_fields.source_workflow_job.id){ - item.workflow_result_link = `/#/workflows/${item.summary_fields.source_workflow_job.id}`; - } - - // Set the item type label - if (list.fields.type && $scope.options && - $scope.options.hasOwnProperty('type')) { - $scope.options.type.choices.forEach(function(choice) { - if (choice[0] === item.type) { - itm.type_label = choice[1]; - } - }); - } - buildTooltips(itm); - }); - } - } - - function buildTooltips(job) { - job.status_tip = 'Job ' + job.status + ". Click for details."; - } - - $scope.viewjobResults = function(job) { - var goTojobResults = function(state) { - $state.go(state, { id: job.id }, { reload: true }); - }; - switch (job.type) { - case 'job': - goTojobResults('jobResult'); - break; - case 'ad_hoc_command': - goTojobResults('adHocJobStdout'); - break; - case 'system_job': - goTojobResults('managementJobStdout'); - break; - case 'project_update': - goTojobResults('scmUpdateStdout'); - break; - case 'inventory_update': - goTojobResults('inventorySyncStdout'); - break; - case 'workflow_job': - goTojobResults('workflowResults'); - break; - } + vm.tab = { + details: {_hide: true}, + instances: {_hide: true}, + jobs: {_hide: true} }; - - $scope.$watchCollection(`${$scope.list.name}`, function() { - optionsRequestDataProcessing(); - } - ); } -]; \ No newline at end of file + + vm.getTime = function(time) { + let val = ""; + if (time) { + val += $filter('longDate')(time); + } + if (val === "") { + val = undefined; + } + return val; + }; + + $scope.isSuccessful = function (status) { + return (status === "successful"); + }; + + $scope.viewjobResults = function(job) { + var goTojobResults = function(state) { + $state.go(state, { id: job.id }, { reload: true }); + }; + switch (job.type) { + case 'job': + goTojobResults('jobResult'); + break; + case 'ad_hoc_command': + goTojobResults('adHocJobStdout'); + break; + case 'system_job': + goTojobResults('managementJobStdout'); + break; + case 'project_update': + goTojobResults('scmUpdateStdout'); + break; + case 'inventory_update': + goTojobResults('inventorySyncStdout'); + break; + case 'workflow_job': + goTojobResults('workflowResults'); + break; + } + }; + + $scope.$on('ws-jobs', () => { + new Instance(['get', 'options'], [instance_id, instance_id]) + .then((data) => { + return data.extend('get', 'jobs', {params: {page_size: "10", order_by: "-finished"}}); + }) + .then((data) => { + instance = data; + init(); + }); + }); + +} + +InstanceJobsController.$inject = [ + '$scope', + '$filter', + '$state', + 'resolvedModels', + 'InstanceGroupsStrings', + 'JobStrings', + 'InstanceModel' +]; + +export default InstanceJobsController; \ No newline at end of file diff --git a/awx/ui/client/src/instance-groups/instances/instance-jobs/instance-jobs.list.js b/awx/ui/client/src/instance-groups/instances/instance-jobs/instance-jobs.list.js deleted file mode 100644 index 58476f0054..0000000000 --- a/awx/ui/client/src/instance-groups/instances/instance-jobs/instance-jobs.list.js +++ /dev/null @@ -1,78 +0,0 @@ -export default ['i18n', function(i18n) { - return { - - name: 'instance_jobs', - iterator: 'instance_job', - index: false, - hover: false, - well: false, - emptyListText: i18n._('No jobs have yet run.'), - title: false, - basePath: 'api/v2/instances/{{$stateParams.instance_id}}/jobs', - - fields: { - status: { - label: '', - columnClass: 'col-lg-1 col-md-1 col-sm-2 col-xs-2 List-staticColumn--smallStatus', - dataTipWatch: 'instance_job.status_tip', - awToolTip: "{{ instance_job.status_tip }}", - awTipPlacement: "right", - dataTitle: "{{ instance_job.status_popover_title }}", - icon: 'icon-job-{{ instance_job.status }}', - iconOnly: true, - ngClick:"viewjobResults(instance_job)", - nosort: true - }, - id: { - label: i18n._('ID'), - ngClick:"viewjobResults(instance_job)", - columnClass: 'col-lg-1 col-md-1 col-sm-2 col-xs-2 List-staticColumnAdjacent', - awToolTip: "{{ instance_job.status_tip }}", - dataPlacement: 'top', - noLink: true - }, - name: { - label: i18n._('Name'), - columnClass: 'col-lg-2 col-md-3 col-sm-4 col-xs-6', - ngClick: "viewjobResults(instance_job)", - nosort: true, - badgePlacement: 'right', - badgeCustom: true, - badgeIcon: ` - - W - - ` - }, - type: { - label: i18n._('Type'), - ngBind: 'instance_job.type_label', - link: false, - columnClass: "col-lg-2 hidden-md hidden-sm hidden-xs", - nosort: true - }, - finished: { - label: i18n._('Finished'), - noLink: true, - filter: "longDate", - columnClass: "col-lg-2 col-md-3 col-sm-3 hidden-xs", - key: true, - desc: true, - nosort: true - }, - labels: { - label: i18n._('Labels'), - type: 'labels', - nosort: true, - showDelete: false, - columnClass: 'List-tableCell col-lg-4 col-md-4 hidden-sm hidden-xs', - sourceModel: 'labels', - sourceField: 'name', - }, - } - }; -}]; diff --git a/awx/ui/client/src/instance-groups/instances/instance-jobs/instance-jobs.partial.html b/awx/ui/client/src/instance-groups/instances/instance-jobs/instance-jobs.partial.html deleted file mode 100644 index 9c40fe931f..0000000000 --- a/awx/ui/client/src/instance-groups/instances/instance-jobs/instance-jobs.partial.html +++ /dev/null @@ -1,32 +0,0 @@ -
-
-
-
-
-
{{ instanceName }}
-
-
-
-

Used Capacity

- -
-
-

Running Jobs

- - {{ instanceJobsRunning }} - -
-
-
- -
-
-
-
JOBS
-
-
-
-
-
diff --git a/awx/ui/client/src/instance-groups/instances/instance-jobs/instance-jobs.route.js b/awx/ui/client/src/instance-groups/instances/instance-jobs/instance-jobs.route.js deleted file mode 100644 index 7e9be9a9de..0000000000 --- a/awx/ui/client/src/instance-groups/instances/instance-jobs/instance-jobs.route.js +++ /dev/null @@ -1,38 +0,0 @@ -import { templateUrl } from '../../../shared/template-url/template-url.factory'; - -export default { - name: 'instanceGroups.instances.list.job', - url: '/:instance_id', - abstract: true, - ncyBreadcrumb: { - skip: true - }, - views: { - 'instanceJobs@instanceGroups': { - templateUrl: templateUrl('./instance-groups/instances/instance-jobs/instance-jobs'), - controller: function($scope, $rootScope, instance) { - $scope.instanceName = instance.hostname; - $scope.instanceCapacity = instance.consumed_capacity; - $scope.instanceTotalCapacity = instance.capacity; - $scope.instanceJobsRunning = instance.jobs_running; - $rootScope.breadcrumb.instance_name = instance.hostname; - } - } - }, - resolve: { - instance: ['GetBasePath', 'Rest', 'ProcessErrors', '$stateParams', function(GetBasePath, Rest, ProcessErrors, $stateParams) { - let url = GetBasePath('instances') + $stateParams.instance_id; - Rest.setUrl(url); - return Rest.get() - .then(({data}) => { - return data; - }) - .catch(({data, status}) => { - ProcessErrors(null, data, status, null, { - hdr: 'Error!', - msg: 'Failed to get instance groups info. GET returned status: ' + status - }); - }); - }] - } -}; diff --git a/awx/ui/client/src/instance-groups/instances/instance-modal.block.less b/awx/ui/client/src/instance-groups/instances/instance-modal.block.less new file mode 100644 index 0000000000..49ea9e10e7 --- /dev/null +++ b/awx/ui/client/src/instance-groups/instances/instance-modal.block.less @@ -0,0 +1,24 @@ +.Modal-backdrop { + position: fixed; + top: 0px; + left: 0px; + height:100%; + width:100%; + background: #000; + z-index: 2; + opacity: 0.5; +} + +.Modal-holder { + position: fixed; + top: 1; + left: 0px; + right: 0px; + top: 0px; + bottom: 0px; + z-index: 3; + + .modal-dialog { + padding-top: 100px; + } +} diff --git a/awx/ui/client/src/instance-groups/instances/instance-modal.controller.js b/awx/ui/client/src/instance-groups/instances/instance-modal.controller.js new file mode 100644 index 0000000000..4d492c65b8 --- /dev/null +++ b/awx/ui/client/src/instance-groups/instances/instance-modal.controller.js @@ -0,0 +1,84 @@ +function InstanceModalController ($scope, $state, models, strings, ProcessErrors, Wait) { + const { instance, instanceGroup } = models; + const vm = this || {}; + + vm.setInstances = () => { + vm.instances = instance.get('results').map(instance => { + instance.isSelected = false; + return instance; + }); + }; + + vm.setRelatedInstances = () => { + vm.instanceGroupName = instanceGroup.get('name'); + vm.relatedInstances = instanceGroup.get('related.instances.results'); + vm.relatedInstanceIds = vm.relatedInstances.map(instance => instance.id); + vm.instances = instance.get('results').map(instance => { + instance.isSelected = vm.relatedInstanceIds.includes(instance.id); + return instance; + }); + }; + + init(); + + function init() { + vm.strings = strings; + vm.panelTitle = strings.get('instance.PANEL_TITLE'); + vm.instanceGroupId = instanceGroup.get('id'); + + if (vm.instanceGroupId === undefined) { + vm.setInstances(); + } else { + vm.setRelatedInstances(); + } + } + + $scope.$watch('vm.instances', function() { + vm.selectedRows = _.filter(vm.instances, 'isSelected'); + vm.deselectedRows = _.filter(vm.instances, 'isSelected', false); + }, true); + + vm.submit = () => { + Wait('start'); + const associate = vm.selectedRows + .map(instance => ({id: instance.id})); + const disassociate = vm.deselectedRows + .map(instance => ({id: instance.id, disassociate: true})); + + const all = associate.concat(disassociate); + const defers = all.map((data) => { + const config = { + url: `${vm.instanceGroupId}/instances/`, + data: data + }; + return instanceGroup.http.post(config); + }); + + Promise.all(defers) + .then(vm.onSaveSuccess) + .catch(({data, status}) => { + ProcessErrors($scope, data, status, null, { + hdr: 'Error!', + msg: 'Call failed. Return status: ' + status + }); + }) + .finally(() => { + Wait('stop'); + }); + }; + + vm.onSaveSuccess = () => { + $state.go('instanceGroups.instances', {}, {reload: 'instanceGroups.instances'}); + }; +} + +InstanceModalController.$inject = [ + '$scope', + '$state', + 'resolvedModels', + 'InstanceGroupsStrings', + 'ProcessErrors', + 'Wait' +]; + +export default InstanceModalController; diff --git a/awx/ui/client/src/instance-groups/instances/instance-modal.partial.html b/awx/ui/client/src/instance-groups/instances/instance-modal.partial.html new file mode 100644 index 0000000000..15c614a1df --- /dev/null +++ b/awx/ui/client/src/instance-groups/instances/instance-modal.partial.html @@ -0,0 +1,55 @@ + \ No newline at end of file diff --git a/awx/ui/client/src/instance-groups/instances/instances-list.partial.html b/awx/ui/client/src/instance-groups/instances/instances-list.partial.html index da8f052423..a75448dc22 100644 --- a/awx/ui/client/src/instance-groups/instances/instances-list.partial.html +++ b/awx/ui/client/src/instance-groups/instances/instances-list.partial.html @@ -1,44 +1,73 @@ -
- - + + + {{ vm.panelTitle }} + -
PLEASE ADD ITEMS TO THIS LIST
-
- - - - - - - - - - - - - - - - -
- "{{'Name' | translate}}" - - - Running Jobs - - Used Capacity -
- {{ instance.hostname }} - - - {{ instance.jobs_running }} - - - -
-
-
+ + {{:: vm.strings.get('tab.DETAILS') }} + {{:: vm.strings.get('tab.INSTANCES') }} + {{:: vm.strings.get('tab.JOBS') }} + + + +
+ + + +
+ +
+
+
+ + +
+
+ + +
+
+ +
+ +
+ + +
+
+ +
+ + +
+
+
+
+ diff --git a/awx/ui/client/src/instance-groups/instances/instances-list.route.js b/awx/ui/client/src/instance-groups/instances/instances-list.route.js deleted file mode 100644 index 16549d9d1e..0000000000 --- a/awx/ui/client/src/instance-groups/instances/instances-list.route.js +++ /dev/null @@ -1,35 +0,0 @@ -import {templateUrl} from '../../shared/template-url/template-url.factory'; -import { N_ } from '../../i18n'; - -export default { - name: 'instanceGroups.instances.list', - url: '/instances', - searchPrefix: 'instance', - ncyBreadcrumb: { - parent: 'instanceGroups', - label: N_('{{breadcrumb.instance_group_name}}') - }, - params: { - instance_search: { - value: { - page_size: '20', - order_by: 'hostname' - }, - dynamic: true - } - }, - views: { - 'list@instanceGroups.instances': { - templateUrl: templateUrl('./instance-groups/instances/instances-list'), - controller: 'InstanceListController' - } - }, - resolve: { - Dataset: ['InstanceList', 'QuerySet', '$stateParams', 'GetBasePath', - function(list, qs, $stateParams, GetBasePath) { - let path = `${GetBasePath('instance_groups')}${$stateParams.instance_group_id}/instances`; - return qs.search(path, $stateParams[`${list.iterator}_search`]); - } - ] - } -}; diff --git a/awx/ui/client/src/instance-groups/instances/instances.controller.js b/awx/ui/client/src/instance-groups/instances/instances.controller.js index 0481d84263..26d6ef3e71 100644 --- a/awx/ui/client/src/instance-groups/instances/instances.controller.js +++ b/awx/ui/client/src/instance-groups/instances/instances.controller.js @@ -1,20 +1,97 @@ -export default ['$scope', 'InstanceList', 'GetBasePath', 'Rest', 'Dataset','Find', '$state', '$q', - function($scope, InstanceList, GetBasePath, Rest, Dataset, Find, $state, $q) { - let list = InstanceList; +function InstancesController ($scope, $state, $http, models, Instance, strings, Dataset, ProcessErrors) { + const { instanceGroup } = models; + const vm = this || {}; + vm.strings = strings; + vm.panelTitle = instanceGroup.get('name'); + vm.instances = instanceGroup.get('related.instances.results'); + vm.instance_group_id = instanceGroup.get('id'); - init(); + init(); - function init(){ - $scope.optionsDefer = $q.defer(); - $scope.list = list; - $scope[`${list.iterator}_dataset`] = Dataset.data; - $scope[list.name] = $scope[`${list.iterator}_dataset`].results; - } - - $scope.isActive = function(id) { - let selected = parseInt($state.params.instance_id); - return id === selected; + function init() { + $scope.list = { + name: 'instances', + iterator: 'instance', + basePath: `/api/v2/instance_groups/${vm.instance_group_id}/instances/` }; + $scope.collection = { + iterator: 'instance', + basePath: `/api/v2/instance_groups/${vm.instance_group_id}/instances/` + }; + + $scope[`${$scope.list.iterator}_dataset`] = Dataset.data; + $scope[$scope.list.name] = $scope[`${$scope.list.iterator}_dataset`].results; + $scope.instances = vm.instances; + + $scope.$on('updateDataset', function(e, dataset) { + $scope[`${$scope.list.iterator}_dataset`] = dataset; + $scope[$scope.list.name] = dataset.results; + vm.instances = dataset.results; + }); } -]; \ No newline at end of file + + vm.tab = { + details: { + _go: 'instanceGroups.edit', + _params: { instance_group_id: vm.instance_group_id } + }, + instances: { + _active: true, + _go: 'instanceGroups.instances', + _params: { instance_group_id: vm.instance_group_id } + }, + jobs: { + _go: 'instanceGroups.jobs', + _params: { instance_group_id: vm.instance_group_id } + } + }; + + vm.toggle = (toggled) => { + const instance = _.find(vm.instances, 'id', toggled.id); + instance.enabled = !instance.enabled; + + const data = { + "capacity_adjustment": instance.capacity_adjustment, + "enabled": instance.enabled + }; + + const req = { + method: 'PUT', + url: instance.url, + data + }; + + $http(req).then(vm.onSaveSuccess) + .catch(({data, status}) => { + ProcessErrors($scope, data, status, null, { + hdr: 'Error!', + msg: 'Call failed. Return status: ' + status + }); + }); + }; + + vm.onSaveSuccess = () => { + $state.transitionTo($state.current, $state.params, { + reload: true, location: true, inherit: false, notify: true + }); + }; + + $scope.isActive = function(id) { + let selected = parseInt($state.params.instance_id); + return id === selected; + }; +} + +InstancesController.$inject = [ + '$scope', + '$state', + '$http', + 'resolvedModels', + 'InstanceModel', + 'InstanceGroupsStrings', + 'Dataset', + 'ProcessErrors' +]; + +export default InstancesController; diff --git a/awx/ui/client/src/instance-groups/instances/instances.list.js b/awx/ui/client/src/instance-groups/instances/instances.list.js deleted file mode 100644 index 048279d6c8..0000000000 --- a/awx/ui/client/src/instance-groups/instances/instances.list.js +++ /dev/null @@ -1,29 +0,0 @@ -export default ['i18n', function(i18n) { - return { - name: 'instances' , - iterator: 'instance', - listTitle: false, - index: false, - hover: false, - tabs: true, - well: true, - - fields: { - hostname: { - key: true, - label: i18n._('Name'), - columnClass: 'col-md-3 col-sm-9 col-xs-9', - modalColumnClass: 'col-md-8', - uiSref: 'instanceGroups.instances.list.job({instance_id: instance.id})' - }, - consumed_capacity: { - label: i18n._('Capacity'), - nosort: true, - }, - jobs_running: { - label: i18n._('Running Jobs'), - nosort: true, - }, - } - }; -}]; diff --git a/awx/ui/client/src/instance-groups/instances/instances.route.js b/awx/ui/client/src/instance-groups/instances/instances.route.js deleted file mode 100644 index 8890171b58..0000000000 --- a/awx/ui/client/src/instance-groups/instances/instances.route.js +++ /dev/null @@ -1,35 +0,0 @@ -import {templateUrl} from '../../shared/template-url/template-url.factory'; - -export default { - name: 'instanceGroups.instances', - url: '/:instance_group_id', - abstract: true, - views: { - 'instances@instanceGroups': { - templateUrl: templateUrl('./instance-groups/instance-group'), - controller: function($scope, $rootScope, instanceGroup) { - $scope.instanceGroupName = instanceGroup.name; - $scope.instanceGroupCapacity = instanceGroup.consumed_capacity; - $scope.instanceGroupTotalCapacity = instanceGroup.capacity; - $scope.instanceGroupJobsRunning = instanceGroup.jobs_running; - $rootScope.breadcrumb.instance_group_name = instanceGroup.name; - } - } - }, - resolve: { - instanceGroup: ['GetBasePath', 'Rest', 'ProcessErrors', '$stateParams', function(GetBasePath, Rest, ProcessErrors, $stateParams) { - let url = GetBasePath('instance_groups') + $stateParams.instance_group_id; - Rest.setUrl(url); - return Rest.get() - .then(({data}) => { - return data; - }) - .catch(({data, status}) => { - ProcessErrors(null, data, status, null, { - hdr: 'Error!', - msg: 'Failed to get instance groups info. GET returned status: ' + status - }); - }); - }] - } -}; diff --git a/awx/ui/client/src/instance-groups/jobs/jobs-list.partial.html b/awx/ui/client/src/instance-groups/jobs/jobs-list.partial.html new file mode 100644 index 0000000000..8fdea7b2da --- /dev/null +++ b/awx/ui/client/src/instance-groups/jobs/jobs-list.partial.html @@ -0,0 +1,86 @@ + + + {{ vm.panelTitle }} + + + {{:: vm.strings.get('tab.DETAILS') }} + {{:: vm.strings.get('tab.INSTANCES') }} + {{:: vm.strings.get('tab.JOBS') }} + + + +
+ + +
+ + +
+ + + +
+
+ + + + + + + + + + + + + + + + +
+
+
+ + +
+
diff --git a/awx/ui/client/src/instance-groups/jobs/jobs-list.route.js b/awx/ui/client/src/instance-groups/jobs/jobs-list.route.js deleted file mode 100644 index 03854eca20..0000000000 --- a/awx/ui/client/src/instance-groups/jobs/jobs-list.route.js +++ /dev/null @@ -1,41 +0,0 @@ -import { N_ } from '../../i18n'; - -export default { - name: 'instanceGroups.instances.jobs', - url: '/jobs', - searchPrefix: 'job', - ncyBreadcrumb: { - parent: 'instanceGroups.instances.list', - label: N_('JOBS') - }, - params: { - job_search: { - value: { - page_size: '20', - order_by: '-finished', - not__launch_type: 'sync' - }, - dynamic: true - }, - instance_group_id: null - }, - views: { - 'list@instanceGroups.instances': { - templateProvider: function(JobsList, generateList) { - let html = generateList.build({ - list: JobsList - }); - return html; - }, - controller: 'JobsListController' - } - }, - resolve: { - Dataset: ['JobsList', 'QuerySet', '$stateParams', 'GetBasePath', - function(list, qs, $stateParams, GetBasePath) { - let path = `${GetBasePath('instance_groups')}${$stateParams.instance_group_id}/jobs`; - return qs.search(path, $stateParams[`${list.iterator}_search`]); - } - ] - } -}; diff --git a/awx/ui/client/src/instance-groups/jobs/jobs.controller.js b/awx/ui/client/src/instance-groups/jobs/jobs.controller.js index cfe2f73327..0b54a962b8 100644 --- a/awx/ui/client/src/instance-groups/jobs/jobs.controller.js +++ b/awx/ui/client/src/instance-groups/jobs/jobs.controller.js @@ -1,82 +1,100 @@ -export default ['$scope','JobsList', 'GetBasePath', 'Rest', 'Dataset','Find', '$state', '$q', - function($scope, JobsList, GetBasePath, Rest, Dataset, Find, $state, $q) { - let list = JobsList; +function InstanceGroupJobsController ($scope, $filter, $state, model, strings, jobStrings, InstanceGroup) { + const vm = this || {}; + let { instanceGroup } = model; + const instance_group_id = instanceGroup.get('id'); - init(); + init(); - function init(){ - $scope.optionsDefer = $q.defer(); - $scope.list = list; - $scope[`${list.iterator}_dataset`] = Dataset.data; - $scope[list.name] = $scope[`${list.iterator}_dataset`].results; - } + function init(){ + vm.strings = strings; + vm.jobStrings = jobStrings; + vm.queryset = { page_size: '10', order_by: '-finished', instance_group_id: instance_group_id }; + vm.jobs = instanceGroup.get('related.jobs.results'); + vm.dataset = instanceGroup.get('related.jobs'); + vm.count = instanceGroup.get('related.jobs.count'); + vm.panelTitle = instanceGroup.get('name'); - $scope.$on(`${list.iterator}_options`, function(event, data){ - $scope.options = data.data.actions.GET; - optionsRequestDataProcessing(); - }); - - // iterate over the list and add fields like type label, after the - // OPTIONS request returns, or the list is sorted/paginated/searched - function optionsRequestDataProcessing(){ - - if($scope[list.name] && $scope[list.name].length > 0) { - $scope[list.name].forEach(function(item, item_idx) { - var itm = $scope[list.name][item_idx]; - if(item.summary_fields && item.summary_fields.source_workflow_job && - item.summary_fields.source_workflow_job.id){ - item.workflow_result_link = `/#/workflows/${item.summary_fields.source_workflow_job.id}`; - } - - // Set the item type label - if (list.fields.type && $scope.options && - $scope.options.hasOwnProperty('type')) { - $scope.options.type.choices.forEach(function(choice) { - if (choice[0] === item.type) { - itm.type_label = choice[1]; - } - }); - } - buildTooltips(itm); - }); + vm.tab = { + details: { + _go: 'instanceGroups.edit', + _params: { instance_group_id }, + _label: strings.get('tab.DETAILS') + }, + instances: { + _go: 'instanceGroups.instances', + _params: { instance_group_id }, + _label: strings.get('tab.INSTANCES') + }, + jobs: { + _active: true, + _label: strings.get('tab.JOBS') } - } - - function buildTooltips(job) { - job.status_tip = 'Job ' + job.status + ". Click for details."; - } - - $scope.viewjobResults = function(job) { - var goTojobResults = function(state) { - $state.go(state, { id: job.id }, { reload: true }); - }; - switch (job.type) { - case 'job': - goTojobResults('jobResult'); - break; - case 'ad_hoc_command': - goTojobResults('adHocJobStdout'); - break; - case 'system_job': - goTojobResults('managementJobStdout'); - break; - case 'project_update': - goTojobResults('scmUpdateStdout'); - break; - case 'inventory_update': - goTojobResults('inventorySyncStdout'); - break; - case 'workflow_job': - goTojobResults('workflowResults'); - break; - } - }; - - $scope.$watchCollection(`${$scope.list.name}`, function() { - optionsRequestDataProcessing(); - } - ); } -]; \ No newline at end of file + + vm.getTime = function(time) { + let val = ""; + if (time) { + val += $filter('longDate')(time); + } + if (val === "") { + val = undefined; + } + return val; + }; + + $scope.isSuccessful = function (status) { + return (status === "successful"); + }; + + vm.viewjobResults = function(job) { + var goTojobResults = function(state) { + $state.go(state, { id: job.id }, { reload: true }); + }; + switch (job.type) { + case 'job': + goTojobResults('jobResult'); + break; + case 'ad_hoc_command': + goTojobResults('adHocJobStdout'); + break; + case 'system_job': + goTojobResults('managementJobStdout'); + break; + case 'project_update': + goTojobResults('scmUpdateStdout'); + break; + case 'inventory_update': + goTojobResults('inventorySyncStdout'); + break; + case 'workflow_job': + goTojobResults('workflowResults'); + break; + } + }; + + $scope.$on('ws-jobs', () => { + new InstanceGroup(['get', 'options'], [instance_group_id, instance_group_id]) + .then((instance_group) => { + return instance_group.extend('get', 'jobs', {params: {page_size: "10", order_by: "-finished"}}); + }) + .then((instance_group) => { + instanceGroup = instance_group; + init(); + }); + }); + +} + +InstanceGroupJobsController.$inject = [ + '$scope', + '$filter', + '$state', + 'resolvedModels', + 'InstanceGroupsStrings', + 'JobStrings', + 'InstanceGroupModel' +]; + +export default InstanceGroupJobsController; \ No newline at end of file diff --git a/awx/ui/client/src/instance-groups/jobs/jobs.strings.js b/awx/ui/client/src/instance-groups/jobs/jobs.strings.js new file mode 100644 index 0000000000..8ecffd5a0a --- /dev/null +++ b/awx/ui/client/src/instance-groups/jobs/jobs.strings.js @@ -0,0 +1,30 @@ +function JobStrings (BaseString) { + BaseString.call(this, 'jobs'); + + const { t } = this; + const ns = this.jobs; + + ns.state = { + LIST_BREADCRUMB_LABEL: t.s('JOBS') + }; + + ns.list = { + PANEL_TITLE: t.s('JOBS'), + ADD_BUTTON_LABEL: t.s('ADD'), + ADD_DD_JT_LABEL: t.s('Job Template'), + ADD_DD_WF_LABEL: t.s('Workflow Template'), + ROW_ITEM_LABEL_ACTIVITY: t.s('Activity'), + ROW_ITEM_LABEL_INVENTORY: t.s('Inventory'), + ROW_ITEM_LABEL_PROJECT: t.s('Project'), + ROW_ITEM_LABEL_TEMPLATE: t.s('Template'), + ROW_ITEM_LABEL_CREDENTIALS: t.s('Credentials'), + ROW_ITEM_LABEL_MODIFIED: t.s('Last Modified'), + ROW_ITEM_LABEL_RAN: t.s('Last Ran'), + ROW_ITEM_LABEL_STARTED: t.s('Started'), + ROW_ITEM_LABEL_FINISHED: t.s('Finished') + }; +} + +JobStrings.$inject = ['BaseStringService']; + +export default JobStrings; diff --git a/awx/ui/client/src/instance-groups/list/instance-groups-list.controller.js b/awx/ui/client/src/instance-groups/list/instance-groups-list.controller.js index 381e2419bf..1660b0b0da 100644 --- a/awx/ui/client/src/instance-groups/list/instance-groups-list.controller.js +++ b/awx/ui/client/src/instance-groups/list/instance-groups-list.controller.js @@ -1,19 +1,66 @@ -export default ['$scope', 'InstanceGroupList', 'GetBasePath', 'Rest', 'Dataset','Find', '$state', - function($scope, InstanceGroupList, GetBasePath, Rest, Dataset, Find, $state) { - let list = InstanceGroupList; +export default ['$scope', 'resolvedModels', 'Dataset', '$state', 'ComponentsStrings', 'ProcessErrors', 'Wait', + function($scope, resolvedModels, Dataset, $state, strings, ProcessErrors, Wait) { + const vm = this; + const { instanceGroup } = resolvedModels; + + vm.strings = strings; + $scope.selection = {}; init(); function init(){ - $scope.list = list; - $scope[`${list.iterator}_dataset`] = Dataset.data; - $scope[list.name] = $scope[`${list.iterator}_dataset`].results; + $scope.list = { + iterator: 'instance_group', + name: 'instance_groups' + }; + + $scope.collection = { + basePath: 'instance_groups', + iterator: 'instance_group' + }; + + $scope[`${$scope.list.iterator}_dataset`] = Dataset.data; + $scope[$scope.list.name] = $scope[`${$scope.list.iterator}_dataset`].results; $scope.instanceGroupCount = Dataset.data.count; + + $scope.$on('updateDataset', function(e, dataset) { + $scope[`${$scope.list.iterator}_dataset`] = dataset; + $scope[$scope.list.name] = dataset.results; + }); } - $scope.isActive = function(id) { - let selected = parseInt($state.params.instance_group_id); - return id === selected; + $scope.$watch('$state.params.instance_group_id', () => { + vm.activeId = parseInt($state.params.instance_group_id); + }); + + vm.delete = () => { + Wait('start'); + let deletables = $scope.selection; + deletables = Object.keys(deletables).filter((n) => deletables[n]); + + deletables.forEach((data) => { + let promise = instanceGroup.http.delete({resource: data}); + Promise.resolve(promise).then(vm.onSaveSuccess) + .catch(({data, status}) => { + ProcessErrors($scope, data, status, null, { + hdr: 'Error!', + msg: 'Call failed. Return status: ' + status + }); + }) + .finally(() => { + Wait('stop'); + }); + }); + }; + + vm.onSaveSuccess = () => { + $state.transitionTo($state.current, $state.params, { + reload: true, location: true, inherit: false, notify: true + }); + }; + + $scope.createInstanceGroup = () => { + $state.go('instanceGroups.add'); }; } -]; \ No newline at end of file +]; diff --git a/awx/ui/client/src/instance-groups/list/instance-groups-list.partial.html b/awx/ui/client/src/instance-groups/list/instance-groups-list.partial.html index f3d470afd9..e60b5feeea 100644 --- a/awx/ui/client/src/instance-groups/list/instance-groups-list.partial.html +++ b/awx/ui/client/src/instance-groups/list/instance-groups-list.partial.html @@ -1,63 +1,82 @@ -
-
-
- INSTANCE GROUPS -
+ + + {{ vm.strings.get('layout.INSTANCE_GROUPS') }} {{ instanceGroupCount }} -
-
+ - - + +
+ + +
+
+ +
+ +
+
-
PLEASE ADD ITEMS TO THIS LIST
+ + -
- - - - - - - - - - - - - - - - -
- "{{'Name' | translate}}" - - - Running Jobs - - Used Capacity -
- {{ instance_group.name }} - {{ instance_group.instances }} - - - {{ instance_group.jobs_running }} - - - -
-
+ + +
+ + + +
+ + + + + +
+ +
+ +
+ +
+
+
+
+ + + - - diff --git a/awx/ui/client/src/instance-groups/main.js b/awx/ui/client/src/instance-groups/main.js index 4f9410f0e0..f241df97cb 100644 --- a/awx/ui/client/src/instance-groups/main.js +++ b/awx/ui/client/src/instance-groups/main.js @@ -1,58 +1,349 @@ -import InstanceGroupsList from './list/instance-groups-list.controller'; +import { templateUrl } from '../shared/template-url/template-url.factory'; +import CapacityAdjuster from './capacity-adjuster/capacity-adjuster.directive'; +import CapacityBar from './capacity-bar/capacity-bar.directive'; import instanceGroupsMultiselect from '../shared/instance-groups-multiselect/instance-groups.directive'; import instanceGroupsModal from '../shared/instance-groups-multiselect/instance-groups-modal/instance-groups-modal.directive'; -import instanceGroupsRoute from './instance-groups.route'; -import instancesListRoute from './instances/instances-list.route'; -import JobsList from './jobs/jobs.list'; -import jobsListRoute from './jobs/jobs-list.route'; -import JobsListController from './jobs/jobs.controller'; -import InstanceList from './instances/instances.list'; -import instancesRoute from './instances/instances.route'; + +import AddEditTemplate from './add-edit/add-edit-instance-groups.view.html'; +import AddInstanceGroupController from './add-edit/add-instance-group.controller'; +import EditInstanceGroupController from './add-edit/edit-instance-group.controller'; +import InstanceListPolicy from './add-edit/instance-list-policy.directive.js'; + +import InstanceGroupsTemplate from './list/instance-groups-list.partial.html'; +import InstanceGroupsListController from './list/instance-groups-list.controller'; + +import InstancesTemplate from './instances/instances-list.partial.html'; import InstanceListController from './instances/instances.controller'; -import InstanceJobsList from './instances/instance-jobs/instance-jobs.list'; -import instanceJobsRoute from './instances/instance-jobs/instance-jobs.route'; -import instanceJobsListRoute from './instances/instance-jobs/instance-jobs-list.route'; -import InstanceJobsController from './instances/instance-jobs/instance-jobs.controller'; -import CapacityBar from './capacity-bar/main'; + +import JobsTemplate from './jobs/jobs-list.partial.html'; +import InstanceGroupJobsListController from './jobs/jobs.controller'; +import InstanceJobsListController from './instances/instance-jobs/instance-jobs.controller'; + +import InstanceModalTemplate from './instances/instance-modal.partial.html'; +import InstanceModalController from './instances/instance-modal.controller.js'; + import list from './instance-groups.list'; import service from './instance-groups.service'; -export default -angular.module('instanceGroups', [CapacityBar.name]) +import InstanceGroupsStrings from './instance-groups.strings'; +import JobStrings from './jobs/jobs.strings'; + +const MODULE_NAME = 'instanceGroups'; + +function InstanceGroupsResolve ($q, $stateParams, InstanceGroup, Instance) { + const instanceGroupId = $stateParams.instance_group_id; + const instanceId = $stateParams.instance_id; + let promises = {}; + + if (!instanceGroupId && !instanceId) { + promises.instanceGroup = new InstanceGroup(['get', 'options']); + promises.instance = new Instance(['get', 'options']); + + return $q.all(promises); + } + + if (instanceGroupId && instanceId) { + promises.instance = new Instance(['get', 'options'], [instanceId, instanceId]) + .then((instance) => instance.extend('get', 'jobs', {params: {page_size: "10", order_by: "-finished"}})); + + return $q.all(promises); + } + + promises.instanceGroup = new InstanceGroup(['get', 'options'], [instanceGroupId, instanceGroupId]) + .then((instanceGroup) => instanceGroup.extend('get', 'jobs', {params: {page_size: "10", order_by: "-finished"}})) + .then((instanceGroup) => instanceGroup.extend('get', 'instances')); + promises.instance = new Instance('get'); + + + return $q.all(promises) + .then(models => models); +} + +InstanceGroupsResolve.$inject = [ + '$q', + '$stateParams', + 'InstanceGroupModel', + 'InstanceModel' +]; + +function InstanceGroupsRun ($stateExtender, strings, ComponentsStrings) { + $stateExtender.addState({ + name: 'instanceGroups', + url: '/instance_groups', + searchPrefix: 'instance_group', + ncyBreadcrumb: { + label: ComponentsStrings.get('layout.INSTANCE_GROUPS') + }, + params: { + instance_group_search: { + value: { + page_size: '10', + order_by: 'name' + }, + dynamic: true + } + }, + data: { + alwaysShowRefreshButton: true, + }, + views: { + '@': { + templateUrl: templateUrl('./instance-groups/instance-groups'), + }, + 'list@instanceGroups': { + templateUrl: InstanceGroupsTemplate, + controller: 'InstanceGroupsListController', + controllerAs: 'vm' + } + }, + resolve: { + resolvedModels: InstanceGroupsResolve, + Dataset: ['InstanceGroupList', 'QuerySet', '$stateParams', 'GetBasePath', + function(list, qs, $stateParams, GetBasePath) { + let path = GetBasePath(list.basePath) || GetBasePath(list.name); + return qs.search(path, $stateParams[`${list.iterator}_search`]); + } + ] + } + }); + + $stateExtender.addState({ + name: 'instanceGroups.add', + url: '/add', + ncyBreadcrumb: { + label: strings.get('state.ADD_BREADCRUMB_LABEL') + }, + views: { + 'add@instanceGroups': { + templateUrl: AddEditTemplate, + controller: AddInstanceGroupController, + controllerAs: 'vm' + } + }, + resolve: { + resolvedModels: InstanceGroupsResolve + } + }); + + $stateExtender.addState({ + name: 'instanceGroups.add.modal', + abstract: true, + ncyBreadcrumb: { + skip: true, + }, + views: { + "modal": { + template: ` + `, + } + } + }); + + $stateExtender.addState({ + name: 'instanceGroups.add.modal.instances', + ncyBreadcrumb: { + skip: true, + }, + views: { + "modal": { + template: '', + } + }, + resolvedModels: InstanceGroupsResolve + }); + + $stateExtender.addState({ + name: 'instanceGroups.edit', + route: '/:instance_group_id', + ncyBreadcrumb: { + label: strings.get('state.EDIT_BREADCRUMB_LABEL') + }, + views: { + 'edit@instanceGroups': { + templateUrl: AddEditTemplate, + controller: EditInstanceGroupController, + controllerAs: 'vm' + } + }, + resolve: { + resolvedModels: InstanceGroupsResolve + } + }); + + + $stateExtender.addState({ + name: 'instanceGroups.edit.modal', + abstract: true, + ncyBreadcrumb: { + skip: true, + }, + views: { + "modal": { + template: ` + `, + } + } + }); + + $stateExtender.addState({ + name: 'instanceGroups.edit.modal.instances', + ncyBreadcrumb: { + skip: true, + }, + views: { + "modal": { + template: '', + } + }, + resolvedModels: InstanceGroupsResolve + }); + + $stateExtender.addState({ + name: 'instanceGroups.instances', + url: '/:instance_group_id/instances', + ncyBreadcrumb: { + parent: 'instanceGroups.edit', + label: ComponentsStrings.get('layout.INSTANCES') + }, + params: { + instance_search: { + value: { + page_size: '10', + order_by: 'hostname' + }, + dynamic: true + } + }, + views: { + 'instances@instanceGroups': { + templateUrl: InstancesTemplate, + controller: 'InstanceListController', + controllerAs: 'vm' + } + }, + resolve: { + resolvedModels: InstanceGroupsResolve + } + }); + + $stateExtender.addState({ + name: 'instanceGroups.instances.modal', + abstract: true, + ncyBreadcrumb: { + skip: true, + }, + views: { + "modal": { + template: ` + `, + } + } + }); + + $stateExtender.addState({ + name: 'instanceGroups.instances.modal.add', + ncyBreadcrumb: { + skip: true, + }, + views: { + "modal": { + templateUrl: InstanceModalTemplate, + controller: InstanceModalController, + controllerAs: 'vm' + } + }, + resolvedModels: InstanceGroupsResolve + }); + + $stateExtender.addState({ + name: 'instanceGroups.instanceJobs', + url: '/:instance_group_id/instances/:instance_id/jobs', + ncyBreadcrumb: { + parent: 'instanceGroups.instances', + label: ComponentsStrings.get('layout.JOBS') + }, + views: { + 'instanceJobs@instanceGroups': { + templateUrl: JobsTemplate, + controller: 'InstanceJobsListController', + controllerAs: 'vm' + }, + }, + params: { + job_search: { + value: { + page_size: '10', + order_by: '-finished' + }, + dynamic: true + }, + }, + data: { + socket: { + "groups": { + "jobs": ["status_changed"], + } + } + }, + resolvedModels: InstanceGroupsResolve + }); + + $stateExtender.addState({ + name: 'instanceGroups.jobs', + url: '/:instance_group_id/jobs', + ncyBreadcrumb: { + parent: 'instanceGroups.edit', + label: ComponentsStrings.get('layout.JOBS') + }, + params: { + job_search: { + value: { + page_size: '10', + order_by: '-finished' + }, + dynamic: true + } + }, + data: { + socket: { + "groups": { + "jobs": ["status_changed"], + } + } + }, + views: { + 'jobs@instanceGroups': { + templateUrl: JobsTemplate, + controller: 'InstanceGroupJobsListController', + controllerAs: 'vm' + }, + }, + resolve: { + resolvedModels: InstanceGroupsResolve + } + }); +} + +InstanceGroupsRun.$inject = [ + '$stateExtender', + 'InstanceGroupsStrings', + 'ComponentsStrings' +]; + +angular.module(MODULE_NAME, []) .service('InstanceGroupsService', service) .factory('InstanceGroupList', list) - .factory('JobsList', JobsList) - .factory('InstanceList', InstanceList) - .factory('InstanceJobsList', InstanceJobsList) - .controller('InstanceGroupsList', InstanceGroupsList) - .controller('JobsListController', JobsListController) + .controller('InstanceGroupsListController', InstanceGroupsListController) + .controller('InstanceGroupJobsListController', InstanceGroupJobsListController) .controller('InstanceListController', InstanceListController) - .controller('InstanceJobsController', InstanceJobsController) + .controller('InstanceJobsListController', InstanceJobsListController) + .directive('instanceListPolicy', InstanceListPolicy) .directive('instanceGroupsMultiselect', instanceGroupsMultiselect) .directive('instanceGroupsModal', instanceGroupsModal) - .config(['$stateProvider', 'stateDefinitionsProvider', '$stateExtenderProvider', - function($stateProvider, stateDefinitionsProvider, $stateExtenderProvider) { - let stateExtender = $stateExtenderProvider.$get(); + .directive('capacityAdjuster', CapacityAdjuster) + .directive('capacityBar', CapacityBar) + .service('InstanceGroupsStrings', InstanceGroupsStrings) + .service('JobStrings', JobStrings) + .run(InstanceGroupsRun); - - function generateInstanceGroupsStates() { - return new Promise((resolve) => { - resolve({ - states: [ - stateExtender.buildDefinition(instanceGroupsRoute), - stateExtender.buildDefinition(instancesRoute), - stateExtender.buildDefinition(instancesListRoute), - stateExtender.buildDefinition(jobsListRoute), - stateExtender.buildDefinition(instanceJobsRoute), - stateExtender.buildDefinition(instanceJobsListRoute) - ] - }); - }); - } - - $stateProvider.state({ - name: 'instanceGroups.**', - url: '/instance_groups', - lazyLoad: () => generateInstanceGroupsStates() - }); - }]); +export default MODULE_NAME; diff --git a/awx/ui/client/src/shared/directives.js b/awx/ui/client/src/shared/directives.js index 2013c4392c..5df24c2450 100644 --- a/awx/ui/client/src/shared/directives.js +++ b/awx/ui/client/src/shared/directives.js @@ -38,7 +38,7 @@ angular.module('AWDirectives', ['RestServices', 'Utilities']) }; }) -// caplitalize Add to any input field where the first letter of each +// capitalize Add to any input field where the first letter of each // word should be capitalized. Use in place of css test-transform. // For some reason "text-transform: capitalize" in breadcrumbs // causes a break at each blank space. And of course, @@ -65,6 +65,26 @@ angular.module('AWDirectives', ['RestServices', 'Utilities']) }; }) +// stringToNumber +// +// If your model does not contain actual numbers then this directive +// will do the conversion in the ngModel $formatters and $parsers pipeline. +// +.directive('stringToNumber', function() { + return { + require: 'ngModel', + restrict: 'A', + link: function(scope, element, attrs, ngModel) { + ngModel.$parsers.push(function(value) { + return '' + value; + }); + ngModel.$formatters.push(function(value) { + return parseFloat(value); + }); + } + }; +}) + // imageUpload // // Accepts image and returns base64 information with basic validation diff --git a/awx/ui/client/src/shared/multi-select-preview/multi-select-preview.partial.html b/awx/ui/client/src/shared/multi-select-preview/multi-select-preview.partial.html index ba96157381..48eb41b1b5 100644 --- a/awx/ui/client/src/shared/multi-select-preview/multi-select-preview.partial.html +++ b/awx/ui/client/src/shared/multi-select-preview/multi-select-preview.partial.html @@ -11,6 +11,7 @@
{{selectedRow.name}} + {{selectedRow.hostname}}
diff --git a/awx/ui/client/src/smart-status/smart-status.block.less b/awx/ui/client/src/smart-status/smart-status.block.less index 96ce93228f..01f9cfee65 100644 --- a/awx/ui/client/src/smart-status/smart-status.block.less +++ b/awx/ui/client/src/smart-status/smart-status.block.less @@ -11,14 +11,14 @@ } .SmartStatus-icon { - width: 16px; - height: 16px; + width: 14px; + height: 14px; } .SmartStatus-iconDirectionPlaceholder { - width: 16px; - height: 8px; + width: 14px; + height: 7px; border: 1px solid #d7d7d7; background: #f2f2f2; } @@ -32,8 +32,8 @@ } .SmartStatus-iconIndicator { - width: 16px; - height: 8px; + width: 14px; + height: 7px; } .SmartStatus-iconIndicator--success { @@ -45,8 +45,8 @@ } .SmartStatus-iconPlaceholder { - height: 15px; - width: 15px; + height: 14px; + width: 14px; border: 1px solid #d7d7d7; background: #f2f2f2; } diff --git a/awx/ui/client/src/users/edit/users-edit.controller.js b/awx/ui/client/src/users/edit/users-edit.controller.js index a8639bc1f5..289f2d5556 100644 --- a/awx/ui/client/src/users/edit/users-edit.controller.js +++ b/awx/ui/client/src/users/edit/users-edit.controller.js @@ -132,6 +132,9 @@ export default ['$scope', '$rootScope', '$stateParams', 'UserForm', 'Rest', case 'organization': $state.go('organizations.edit', { "organization_id": id }, { reload: true }); break; + case 'team': + $state.go('teams.edit', { "team_id": id }, { reload: true }); + break; case 'credential': $state.go('credentials.edit', { "credential_id": id }, { reload: true }); break; diff --git a/awx/ui/conf.py b/awx/ui/conf.py index 0d626c28f0..f83458d1cc 100644 --- a/awx/ui/conf.py +++ b/awx/ui/conf.py @@ -57,8 +57,8 @@ register( 'MAX_UI_JOB_EVENTS', field_class=fields.IntegerField, min_value=100, - label=_('Max Job Events Retreived by UI'), - help_text=_('Maximum number of job events for the UI to retreive within a ' + label=_('Max Job Events Retrieved by UI'), + help_text=_('Maximum number of job events for the UI to retrieve within a ' 'single request.'), category=_('UI'), category_slug='ui', diff --git a/awx/ui/test/e2e/fixtures.js b/awx/ui/test/e2e/fixtures.js index a0f886e074..caaabb5ff3 100644 --- a/awx/ui/test/e2e/fixtures.js +++ b/awx/ui/test/e2e/fixtures.js @@ -61,8 +61,21 @@ const getInventory = (namespace = session) => getOrganization(namespace) .then(organization => getOrCreate('/inventories/', { name: `${namespace}-inventory`, description: namespace, - organization: organization.id - })); + organization: organization.id, + }).then(inventory => getOrCreate('/hosts/', { + name: `${namespace}-host`, + description: namespace, + inventory: inventory.id, + variables: JSON.stringify({ ansible_connection: 'local' }), + }).then(() => inventory))); + +const getHost = (namespace = session) => getInventory(namespace) + .then(inventory => getOrCreate('/hosts/', { + name: `${namespace}-host`, + description: namespace, + inventory: inventory.id, + variables: JSON.stringify({ ansible_connection: 'local' }), + }).then((host) => host)); const getInventoryScript = (namespace = session) => getOrganization(namespace) .then(organization => getOrCreate('/inventory_scripts/', { @@ -182,7 +195,7 @@ const waitForJob = endpoint => { const completed = statuses.indexOf(update.data.status) > -1; if (completed) { - return resolve(); + return resolve(update.data); } if (--attempts <= 0) { @@ -206,6 +219,15 @@ const getUpdatedProject = (namespace = session) => getProject(namespace) return project; }); +const getJob = (namespace = session) => getJobTemplate(namespace) + .then(template => { + const launchURL = template.related.launch; + return post(launchURL, {}).then(response => { + const jobURL = response.data.url; + return waitForJob(jobURL).then(() => response.data); + }); + }); + const getJobTemplate = (namespace = session) => { const promises = [ getInventory(namespace), @@ -271,6 +293,29 @@ const getJobTemplateAdmin = (namespace = session) => { .then(spread(user => user)); }; +const getProjectAdmin = (namespace = session) => { + const rolePromise = getUpdatedProject(namespace) + .then(obj => obj.summary_fields.object_roles.admin_role); + + const userPromise = getOrganization(namespace) + .then(obj => getOrCreate('/users/', { + username: `project-admin-${uuid().substr(0, 8)}`, + organization: obj.id, + first_name: 'firstname', + last_name: 'lastname', + email: 'null@ansible.com', + is_superuser: false, + is_system_auditor: false, + password: AWX_E2E_PASSWORD + })); + + const assignRolePromise = Promise.all([userPromise, rolePromise]) + .then(spread((user, role) => post(`/api/v2/roles/${role.id}/users/`, { id: user.id }))); + + return Promise.all([userPromise, assignRolePromise]) + .then(spread(user => user)); +}; + const getInventorySourceSchedule = (namespace = session) => getInventorySource(namespace) .then(source => getOrCreate(source.related.schedules, { name: `${source.name}-schedule`, @@ -282,7 +327,7 @@ const getJobTemplateSchedule = (namespace = session) => getJobTemplate(namespace .then(template => getOrCreate(template.related.schedules, { name: `${template.name}-schedule`, description: namespace, - rrule: 'DTSTART:20171104T040000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=1' + rrule: 'DTSTART:20351104T040000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=1' })); module.exports = { @@ -299,8 +344,11 @@ module.exports = { getNotificationTemplate, getOrCreate, getOrganization, + getProjectAdmin, getSmartInventory, getTeam, getUpdatedProject, - getUser + getUser, + getJob, + getHost, }; diff --git a/awx/ui/test/e2e/objects/jobs.js b/awx/ui/test/e2e/objects/jobs.js new file mode 100644 index 0000000000..93c43caccd --- /dev/null +++ b/awx/ui/test/e2e/objects/jobs.js @@ -0,0 +1,10 @@ +import _ from 'lodash'; + +module.exports = { + url () { + return `${this.api.globals.launch_url}/#/jobs`; + }, + sections: {}, // TODO: Fill this out + elements: {}, // TODO: Fill this out + commands: [], // TODO: Fill this out as needed +}; diff --git a/awx/ui/test/e2e/tests/test-xss.js b/awx/ui/test/e2e/tests/test-xss.js index 7888824691..16d84cd8ba 100644 --- a/awx/ui/test/e2e/tests/test-xss.js +++ b/awx/ui/test/e2e/tests/test-xss.js @@ -1,17 +1,19 @@ import { getAdminMachineCredential, + getHost, getInventory, getInventoryScript, getInventorySource, getInventorySourceSchedule, getJobTemplate, - getJobTemplateAdmin, getJobTemplateSchedule, getNotificationTemplate, getOrganization, + getProjectAdmin, getSmartInventory, getTeam, getUpdatedProject, + getJob, } from '../fixtures'; const data = {}; @@ -21,9 +23,11 @@ const pages = {}; module.exports = { before: (client, done) => { const namespace = '
test
'; + const namespaceShort = '
t
'; const resources = [ getOrganization(namespace).then(obj => { data.organization = obj; }), + getHost(namespaceShort).then(obj => { data.host = obj; }), getInventory(namespace).then(obj => { data.inventory = obj; }), getInventoryScript(namespace).then(obj => { data.inventoryScript = obj; }), getSmartInventory(namespace).then(obj => { data.smartInventory = obj; }), @@ -34,8 +38,9 @@ module.exports = { getJobTemplate(namespace).then(obj => { data.jobTemplate = obj; }), getJobTemplateSchedule(namespace).then(obj => { data.jobTemplateSchedule = obj; }), getTeam(namespace).then(obj => { data.team = obj; }), - getJobTemplateAdmin(namespace).then(obj => { data.user = obj; }), + getProjectAdmin(namespace).then(obj => { data.user = obj; }), getNotificationTemplate(namespace).then(obj => { data.notification = obj; }), + getJob(namespaceShort).then(obj => { data.job = obj; }), ]; Promise.all(resources) @@ -49,9 +54,11 @@ module.exports = { pages.teams = client.page.teams(); pages.users = client.page.users(); pages.notificationTemplates = client.page.notificationTemplates(); + pages.jobs = client.page.jobs(); urls.organization = `${pages.organizations.url()}/${data.organization.id}`; urls.inventory = `${pages.inventories.url()}/inventory/${data.inventory.id}`; + urls.inventoryHosts = `${urls.inventory}/hosts`; urls.inventoryScript = `${pages.inventoryScripts.url()}/${data.inventoryScript.id}`; urls.inventorySource = `${urls.inventory}/inventory_sources/edit/${data.inventorySource.id}`; urls.sourceSchedule = `${urls.inventorySource}/schedules/${data.sourceSchedule.id}`; @@ -63,6 +70,8 @@ module.exports = { urls.team = `${pages.teams.url()}/${data.team.id}`; urls.user = `${pages.users.url()}/${data.user.id}`; urls.notification = `${pages.notificationTemplates.url()}/${data.notification.id}`; + urls.jobs = `${pages.jobs.url()}`; + urls.jobsSchedules = `${pages.jobs.url()}/schedules`; client.useCss(); client.login(); @@ -97,75 +106,6 @@ module.exports = { client.pause(500).expect.element('div.spinny').not.visible; client.expect.element('#multi-credential-modal').not.present; }, - 'check template roles list for unsanitized content': client => { - const itemDelete = `#permissions_table tr[id="${data.user.id}"] div[class*="RoleList-deleteContainer"]`; - - client.expect.element('#permissions_tab').visible; - client.expect.element('#permissions_tab').enabled; - - client.click('#permissions_tab'); - - client.expect.element('div.spinny').visible; - client.expect.element('div.spinny').not.visible; - - client.expect.element('#xss').not.present; - client.expect.element('[class=xss]').not.present; - - client.expect.element('div[ui-view="related"]').visible; - client.expect.element('div[ui-view="related"] smart-search input').enabled; - - client.sendKeys('div[ui-view="related"] smart-search input', `id:${data.user.id}`); - client.sendKeys('div[ui-view="related"] smart-search input', client.Keys.ENTER); - - client.expect.element('div.spinny').not.visible; - - client.expect.element(itemDelete).visible; - client.expect.element(itemDelete).enabled; - - client.click(itemDelete); - - client.expect.element('#prompt-header').visible; - client.expect.element('#prompt-header').text.equal('USER ACCESS REMOVAL'); - client.expect.element('#prompt_cancel_btn').enabled; - - client.expect.element('#xss').not.present; - client.expect.element('[class=xss]').not.present; - - client.click('#prompt_cancel_btn'); - - client.expect.element('#prompt-header').not.visible; - }, - 'check template permissions view for unsanitized content': client => { - client.expect.element('button[aw-tool-tip="Add a permission"]').visible; - client.expect.element('button[aw-tool-tip="Add a permission"]').enabled; - - client.click('button[aw-tool-tip="Add a permission"]'); - client.expect.element('div.spinny').not.visible; - - client.expect.element('div[class="AddPermissions-header"]').visible; - client.expect.element('div[class="AddPermissions-header"]').attribute('innerHTML') - .contains('<div id="xss" class="xss">test</div>'); - - client.expect.element('#xss').not.present; - client.expect.element('[class=xss]').not.present; - - client.expect.element('div[class="AddPermissions-dialog"] button[class*="exit"]').enabled; - - client.click('div[class="AddPermissions-dialog"] button[class*="exit"]'); - - client.expect.element('div.spinny').visible; - client.expect.element('div.spinny').not.visible; - - // client.expect.element('div.spinny').visible; - client.expect.element('div.spinny').not.visible; - client.waitForAngular(); - - client.expect.element('#job_template_tab').enabled; - - client.click('#job_template_tab'); - - client.expect.element('#job_template_form').visible; - }, 'check template list for unsanitized content': client => { const itemRow = `#row-${data.jobTemplate.id}`; const itemName = `${itemRow} .at-RowItem-header`; @@ -219,7 +159,7 @@ module.exports = { client.expect.element('[class=xss]').not.present; }, 'check user roles list for unsanitized content': client => { - const adminRole = data.jobTemplate.summary_fields.object_roles.admin_role; + const adminRole = data.project.summary_fields.object_roles.admin_role; const itemDelete = `#permissions_table tr[id="${adminRole.id}"] #delete-action`; client.expect.element('#permissions_tab').visible; @@ -498,6 +438,75 @@ module.exports = { client.expect.element('#xss').not.present; client.expect.element('[class=xss]').not.present; }, + 'check project roles list for unsanitized content': client => { + const itemDelete = `#permissions_table tr[id="${data.user.id}"] div[class*="RoleList-deleteContainer"]`; + + client.expect.element('#permissions_tab').visible; + client.expect.element('#permissions_tab').enabled; + + client.click('#permissions_tab'); + + client.expect.element('div.spinny').visible; + client.expect.element('div.spinny').not.visible; + + client.expect.element('#xss').not.present; + client.expect.element('[class=xss]').not.present; + + client.expect.element('div[ui-view="related"]').visible; + client.expect.element('div[ui-view="related"] smart-search input').enabled; + + client.sendKeys('div[ui-view="related"] smart-search input', `id:${data.user.id}`); + client.sendKeys('div[ui-view="related"] smart-search input', client.Keys.ENTER); + + client.expect.element('div.spinny').not.visible; + + client.expect.element(itemDelete).visible; + client.expect.element(itemDelete).enabled; + + client.click(itemDelete); + + client.expect.element('#prompt-header').visible; + client.expect.element('#prompt-header').text.equal('USER ACCESS REMOVAL'); + client.expect.element('#prompt_cancel_btn').enabled; + + client.expect.element('#xss').not.present; + client.expect.element('[class=xss]').not.present; + + client.click('#prompt_cancel_btn'); + + client.expect.element('#prompt-header').not.visible; + }, + 'check project permissions view for unsanitized content': client => { + client.expect.element('button[aw-tool-tip="Add a permission"]').visible; + client.expect.element('button[aw-tool-tip="Add a permission"]').enabled; + + client.click('button[aw-tool-tip="Add a permission"]'); + client.expect.element('div.spinny').not.visible; + + client.expect.element('div[class="AddPermissions-header"]').visible; + client.expect.element('div[class="AddPermissions-header"]').attribute('innerHTML') + .contains('<div id="xss" class="xss">test</div>'); + + client.expect.element('#xss').not.present; + client.expect.element('[class=xss]').not.present; + + client.expect.element('div[class="AddPermissions-dialog"] button[class*="exit"]').enabled; + + client.click('div[class="AddPermissions-dialog"] button[class*="exit"]'); + + client.expect.element('div.spinny').visible; + client.expect.element('div.spinny').not.visible; + + // client.expect.element('div.spinny').visible; + client.expect.element('div.spinny').not.visible; + client.waitForAngular(); + + client.expect.element('#project_tab').enabled; + + client.click('#project_tab'); + + client.expect.element('#project_form').visible; + }, 'check project list for unsanitized content': client => { const itemRow = `#projects_table tr[id="${data.project.id}"]`; const itemName = `${itemRow} td[class*="name-"] a`; @@ -655,6 +664,40 @@ module.exports = { client.navigateTo(urls.jobTemplateSchedule); client.expect.element('#xss').not.present; client.expect.element('[class=xss]').not.present; + }, + 'check job schedules view for unsanitized content': client => { + const itemRow = `#schedules_table tr[id="${data.jobTemplateSchedule.id}"]`; + const itemName = `${itemRow} td[class*="name-"] a`; + + client.navigateTo(urls.jobsSchedules); + + client.moveToElement(itemName, 0, 0, () => { + client.expect.element(itemName).attribute('aria-describedby'); + client.getAttribute(itemName, 'aria-describedby', ({ value }) => { + const tooltip = `#${value}`; + client.expect.element(tooltip).present; + client.expect.element(tooltip).visible; + + client.expect.element('#xss').not.present; + client.expect.element('[class=xss]').not.present; + client.expect.element(tooltip).attribute('innerHTML') + .contains('<div id="xss" class="xss">test</div>'); + }); + }); + client.end(); + }, + 'check host recent jobs popup for unsanitized content': client => { + const itemRow = `#hosts_table tr[id="${data.host.id}"]`; + const itemName = `${itemRow} td[class*="active_failures-"] a`; + const popOver = `${itemRow} td[class*="active_failures-"] div[class*="popover"]`; + + client.navigateTo(urls.inventoryHosts); + + client.click(itemName); + client.expect.element(popOver).present; + + client.expect.element('[class=xss]').not.present; + client.end(); }, }; diff --git a/docs/auth/ldap.md b/docs/auth/ldap.md index b9a172c3d0..107ee8c9ef 100644 --- a/docs/auth/ldap.md +++ b/docs/auth/ldap.md @@ -1,6 +1,12 @@ # LDAP The Lightweight Directory Access Protocol (LDAP) is an open, vendor-neutral, industry standard application protocol for accessing and maintaining distributed directory information services over an Internet Protocol (IP) network. Directory services play an important role in developing intranet and Internet applications by allowing the sharing of information about users, systems, networks, services, and applications throughout the network. +# Configure LDAP Authentication +Please see the Tower documentation as well as Ansible blog posts for basic LDAP configuration. + +LDAP Authentication provides duplicate sets of configuration fields for authentication with up to six different LDAP servers. +The default set of configuration fields take the form `AUTH_LDAP_`. Configuration fields for additional ldap servers are numbered `AUTH_LDAP__`. + ## Test environment setup Please see README.md of this repository: https://github.com/jangsutsr/deploy_ldap.git. diff --git a/docs/auth/saml.md b/docs/auth/saml.md index 58e765ce20..7af1730336 100644 --- a/docs/auth/saml.md +++ b/docs/auth/saml.md @@ -2,7 +2,11 @@ Security Assertion Markup Language, or SAML, is an open standard for exchanging authentication and/or authorization data between an identity provider (i.e. LDAP) and a service provider (i.e. AWX). More concretely, AWX can be configured to talk with SAML in order to authenticate (create/login/logout) users of AWX. User Team and Organization membership can be embedded in the SAML response to AWX. # Configure SAML Authentication -Please see the Tower documentation as well as Ansible blog posts for basic SAML configuration. +Please see the Tower documentation as well as Ansible blog posts for basic SAML configuration. Note that AWX's SAML implementation relies on python-social-auth which uses python-saml. AWX exposes 3 fields that are directly passed to the lower libraries: +* `SOCIAL_AUTH_SAML_SP_EXTRA` is passed to the `python-saml` library configuration's `sp` setting. +* `SOCIAL_AUTH_SAML_SECURITY_CONFIG` is passed to the `python-saml` library configuration's `security` setting. +* `SOCIAL_AUTH_SAML_EXTRA_DATA` +See http://python-social-auth-docs.readthedocs.io/en/latest/backends/saml.html#advanced-settings for more information. # Configure SAML for Team and Organization Membership AWX can be configured to look for particular attributes that contain AWX Team and Organization membership to associate with users when they login to AWX. The attribute names are defined in AWX settings. Specifically, the authentication settings tab and SAML sub category fields *SAML Team Map* and *SAML Organization Attribute Mapping*. The meaning and usefulness of these settings is best motivated through example. @@ -80,6 +84,7 @@ Below is another example of a SAML attribute that contains a Team membership in } ``` **saml_attr:** The saml attribute name where the team array can be found. -**remove:** True to remove user from all Teams before adding the user to the list of Teams. False to keep the user in whatever Team(s) they are in while adding the user to the Team(s) in the SAML attribute. -**team_org_map:** An array of dictionaries of the form `{ "team": "", "organization": "" }` that defines mapping from AWX Team -> AWX Organization. This is needed because the same named Team can exist in multiple Organizations in Tower. The organization to which a team listed in a SAML attribute belongs to would be ambiguous without this mapping. +**remove:** True to remove user from all Teams before adding the user to the list of Teams. False to keep the user in whatever Team(s) they are in while adding the user to the Team(s) in the SAML attribute. + +**team_org_map:** An array of dictionaries of the form `{ "team": "", "organization": "" }` that defines mapping from AWX Team -> AWX Organization. This is needed because the same named Team can exist in multiple Organizations in Tower. The organization to which a team listed in a SAML attribute belongs to would be ambiguous without this mapping. diff --git a/docs/capacity.md b/docs/capacity.md new file mode 100644 index 0000000000..ceee3c42c3 --- /dev/null +++ b/docs/capacity.md @@ -0,0 +1,99 @@ +## Ansible Tower Capacity Determination and Job Impact + +The Ansible Tower capacity system determines how many jobs can run on an Instance given the amount of resources +available to the Instance and the size of the jobs that are running (referred herafter as `Impact`). +The algorithm used to determine this is based entirely on two things: + +* How much memory is available to the system (`mem_capacity`) +* How much CPU is available to the system (`cpu_capacity`) + +Capacity also impacts Instance Groups. Since Groups are composed of Instances, likewise Instances can be +assigned to multiple Groups. This means that impact to one Instance can potentially affect the overall capacity of +other Groups. + +Instance Groups (not Instances themselves) can be assigned to be used by Jobs at various levels (see clustering.md). +When the Task Manager is preparing its graph to determine which Group a Job will run on it will commit the capacity of +an Instance Group to a job that hasn't or isn't ready to start yet. + +Finally, if only one Instance is available, in smaller configurations, for a Job to run the Task Manager will allow that +Job to run on the Instance even if it would push the Instance over capacity. We do this as a way to guarantee that Jobs +themselves won't get clogged as a result of an under provisioned system. + +These concepts mean that, in general, Capacity and Impact is not a zero-sum system relative to Jobs and Instances/Instance Groups + +### Resource Determination For Capacity Algorithm + + +The capacity algorithms are defined in order to determine how many `forks` a system is capable of running simultaneously. This controls how +many systems Ansible itself will communicate with simultaneously. Increasing the number of forks a Tower system is running will, in general, +allow jobs to run faster by performing more work in parallel. The tradeoff is that will increase the load on the system which could cause work +to slow down overall. + +Tower can operate in two modes when determining capacity. `mem_capacity` (the default) will allow you to overcommit CPU resources while protecting the system +from running out of memory. If most of your work is not cpu-bound then selecting this mode will maximize the number of forks. + +#### Memory Relative Capacity +`mem_capacity` is calculated relative to the amount of memory needed per-fork. Taking into account the overhead for Tower's internal components this comes out +to be about `100MB` per-fork. When considering the amount of memory available to Ansible jobs the capacity algorithm will reserve 2GB of memory to account +for the presence of other Tower services. The algorithm itself looks like this: + + (mem - 2048) / mem_per_fork + +As an example: + + (4096 - 2048) / 100 == ~20 + +So a system with 4GB of memory would be capable of running 20 forks. The value `mem_per_fork` can be controlled by setting the Tower settings value +(or environment variable) `SYSTEM_TASK_FORKS_MEM` which defaults to `100`. + +#### CPU Relative Capacity + +Often times Ansible workloads can be fairly cpu-bound. In these cases sometimes reducing the simultaneous workload allows more tasks to run faster and reduces +the average time-to-completion of those jobs. + +Just as the Tower `mem_capacity` algorithm uses the amount of memory need per-fork, the `cpu_capacity` algorithm looks at the amount of cpu resources is needed +per fork. The baseline value for this is `4` forks per-core. The algorithm itself looks like this: + + cpus * fork_per_cpu + +For example a 4-core system: + + 4 * 4 == 16 + +The value `fork_per_cpu` can be controlled by setting the Tower settings value (or environment variable) `SYSTEM_TASK_FORKS_CPU` which defaults to `4`. + +### Job Impacts Relative To Capacity + +When selecting the capacity it's important to understand how each job type affects capacity. + +It's helpful to understand what `forks` mean to Ansible: http://docs.ansible.com/ansible/latest/intro_configuration.html#forks + +The default forks value for ansible is `5`. However, if Tower knows that you're running against fewer systems than that then the actual concurrency value +will be lower. + +When a job is run, Tower will add `1` to the number of forks selected to compensate for the Ansible parent process. So if you are running a playbook against `5` +systems with a `forks` value of `5` then the actual `forks` value from the perspective of Job Impact will be 6. + +#### Impact of Job types in Tower + +Jobs and Ad-hoc jobs follow the above model `forks + 1`. + +Other job types have a fixed impact: + +* Inventory Updates: 1 +* Project Updates: 1 +* System Jobs: 5 + +### Selecting the right capacity + +Selecting between a `memory` focused capacity algorithm and a `cpu` focused capacity for your Tower use means you'll be selecting between a minimum +and maximum value. In the above examples the CPU capacity would allow a maximum of 16 forks while the Memory capacity would allow 20. For some systems +the disparity between these can be large and often times you may want to have a balance between these two. + +An `Instance` field `capacity_adjustment` allows you to select how much of one or the other you want to consider. It is represented as a value between 0.0 +and 1.0. If set to a value of `1.0` then the largest value will be used. In the above example, that would be Memory capacity so a value of `20` forks would +be selected. If set to a value of `0.0` then the smallest value will be used. A value of `0.5` would be a 50/50 balance between the two algorithms which would +be `18`: + + 16 + (20 - 16) * 0.5 == 18 + diff --git a/docs/clustering.md b/docs/clustering.md index df52cbadd8..fa53ee52c6 100644 --- a/docs/clustering.md +++ b/docs/clustering.md @@ -28,6 +28,8 @@ It's important to point out a few existing things: * Existing old-style HA deployments will be transitioned automatically to the new HA system during the upgrade process to 3.1. * Manual projects will need to be synced to all instances by the customer +Ansible Tower 3.3 adds support for container-based clusters using Openshift or Kubernetes + ## Important Changes * There is no concept of primary/secondary in the new Tower system. *All* systems are primary. @@ -226,6 +228,47 @@ show up in api endpoints and stats monitoring. These groups can be removed with $ awx-manage unregister_queue --queuename= ``` +### Configuring Instances and Instance Groups from the API + +Instance Groups can be created by posting to `/api/v2/instance_groups` as a System Admin. + +Once created, `Instances` can be associated with an Instance Group with: + +``` +HTTP POST /api/v2/instance_groups/x/instances/ {'id': y}` +``` + +An `Instance` that is added to an `InstanceGroup` will automatically reconfigure itself to listen on the group's work queue. See the following +section `Instance Group Policies` for more details. + +### Instance Group Policies + +Tower `Instances` can be configured to automatically join `Instance Groups` when they come online by defining a policy. These policies are evaluated for +every new Instance that comes online. + +Instance Group Policies are controlled by 3 optional fields on an `Instance Group`: + +* `policy_instance_percentage`: This is a number between 0 - 100. It gaurantees that this percentage of active Tower instances will be added + to this `Instance Group`. As new instances come online, if the number of Instances in this group relative to the total number of instances + is less than the given percentage then new ones will be added until the percentage condition is satisfied. +* `policy_instance_minimum`: This policy attempts to keep at least this many `Instances` in the `Instance Group`. If the number of + available instances is lower than this minimum then all `Instances` will be placed in this `Instance Group`. +* `policy_instance_list`: This is a fixed list of `Instance` names. These `Instances` will *always* be added to this `Instance Group`. + Further, by adding Instances to this list you are declaring that you will manually manage those Instances and they will not be eligible under any other + policy. This means they will not be automatically added to any other `Instance Group` even if the policy would cause them to be matched. + +> NOTES + +* `Instances` that are assigned directly to `Instance Groups` by posting to `/api/v2/instance_groups/x/instances` or + `/api/v2/instances/x/instance_groups` are automatically added to the `policy_instance_list`. This means they are subject to the + normal caveats for `policy_instance_list` and must be manually managed. +* `policy_instance_percentage` and `policy_instance_minimum` work together. For example, if you have a `policy_instance_percentage` of + 50% and a `policy_instance_minimum` of 2 and you start 6 `Instances`. 3 of them would be assigned to the `Instance Group`. If you reduce the number + of `Instances` to 2 then both of them would be assigned to the `Instance Group` to satisfy `policy_instance_minimum`. In this way, you can set a lower + bound on the amount of available resources. +* Policies don't actively prevent `Instances` from being associated with multiple `Instance Groups` but this can effectively be achieved by making the percentages + sum to 100. If you have 4 `Instance Groups` assign each a percentage value of 25 and the `Instances` will be distributed among them with no overlap. + ### Status and Monitoring Tower itself reports as much status as it can via the api at `/api/v2/ping` in order to provide validation of the health @@ -310,6 +353,13 @@ used to specify a preferred instance group on the job template or inventory, but still allow the job to be submitted to any instance if those are out of capacity. +#### Instance Enable / Disable + +In order to support temporarily taking an `Instance` offline there is a boolean property `enabled` defined on each instance. + +When this property is disabled no jobs will be assigned to that `Instance`. Existing jobs will finish but no new work will be +assigned. + ## Acceptance Criteria When verifying acceptance we should ensure the following statements are true diff --git a/docs/custom_credential_types.md b/docs/custom_credential_types.md index c1b5387565..33426c2011 100644 --- a/docs/custom_credential_types.md +++ b/docs/custom_credential_types.md @@ -194,7 +194,8 @@ certificate/key data: } } - +Note that the single and multi-file syntax cannot be mixed within the same +``Credential Type``. Job and Job Template Credential Assignment ------------------------------------------ @@ -326,6 +327,8 @@ When verifying acceptance we should ensure the following statements are true: * Custom `Credential Types` should support injecting both single and multiple files. (Furthermore, the new syntax for injecting multiple files should work properly even if only a single file is injected). +* Users should not be able to use the syntax for injecting single and + multiple files in the same custom credential. * The default `Credential Types` included with Tower in 3.2 should be non-editable/readonly and cannot be deleted by any user. * Stored `Credential` values for _all_ types should be consistent before and diff --git a/docs/overview.md b/docs/overview.md new file mode 100644 index 0000000000..fe417c4a14 --- /dev/null +++ b/docs/overview.md @@ -0,0 +1,118 @@ +# awx +awx provides a web interface and distributed task engine for scheduling and +running Ansible playbooks. As such, it relies heavily on the interfaces +provided by Ansible. This document provides a birds-eye view of the notable +touchpoints between awx and Ansible. + +## Terminology +awx has a variety of concepts which map to components of Ansible, or +which further abstract them to provide functionality on top of Ansible. A few +of the most notable ones are: + +### Projects +Projects represent a collection of Ansible playbooks. Most awx users create +Projects that import periodically from source control systems (such as git, +mercurial, or subversion repositories). This import is accomplished via an +ansible playbook included with awx (which makes use of the various source +control management modules in Ansible). + +### Inventories +awx manages Inventories, Groups, and Hosts, and provides a RESTful interface +that maps to static and dynamic Ansible inventories. Inventory data can +be entered into awx manually, but many users perform Inventory Syncs to import +inventory data from a variety of external sources. + +### Job Templates +A Job Template is a definition and set of parameters for running +`ansible-playbook`. If defines metadata about a given playbook run, such as: + +* a named identifier +* an associated inventory to run against +* the project and `.yml` playbook to run +* a variety of other options which map directly to ansible-playbook + arguments (extra_vars, verbosity, forks, limit, etc...) + +### Credentials +awx stores sensitive credential data which can be attached to `ansible-playbook` +processes that it runs. This data can be oriented towards SSH connection +authentication (usernames, passwords, SSH keys and passphrases), +ansible-specific prompts (such as Vault passwords), or environmental +authentication values which various Ansible modules depend on (such as setting +`AWS_ACCESS_KEY_ID` in an environment variable, or specifying +`ansible_ssh_user` as an extra variable). + +## Canonical Example +Bringing all of this terminology together, a "Getting Started using AWX" might +involve: + +* Creating a new Project that imports playbooks from e.g., a remote git repository +* Manually creating or importing an Inventory which defines where the playbook(s) will run +* Optionally, saving a Credential which contains SSH authentication details for + the host(s) where the playbook will run +* Creating a Job Template that specifies which Project and playbook to run and + where to run it (Inventory), and any necessary Credentials for e.g., SSH + authentication +* Launching the Job Template and viewing the results + +## awx's Interaction with Ansible +The touchpoints between awx and Ansible are mostly encompassed by +everything that happens *after* a job is started in awx. Specifically, this +includes: + +* Any time a Job Template is launched +* Any time a Project Update is performed +* Any time an Inventory Sync is performed +* Any time an Adhoc Command is run + +### Spawning Ansible Processes +awx relies on a handful of stable interfaces in its interaction with Ansible. +The first of these are the actual CLI for `ansible-playbook` and +`ansible-inventory`. + +When a Job Template or Project Update is run in awx, an actual +`ansible-playbook` command is composed and spawned in a pseudoterminal on one +of the servers/containers that make up the awx installation. This process runs +until completion (or until a configurable timeout), and the return code, +stdout, and stderr of the process are recorded in the awx database. Adhoc +commands work the same way, though they spawn `ansible` processes instead of +`ansible-playbook`. + +Similarly, when an Inventory Sync runs, an actual `ansible-inventory` process +runs, and its output is parsed and persisted into the awx database as Hosts and +Groups. + +awx relies on stability in CLI behavior to function properly across Ansible +releases; this includes the actual CLI arguments _and_ the behavior of task +execution and prompts (such as password, become, and Vault prompts). + +### Capturing Event Data +awx applies an Ansible callback plugin to all `ansible-playbook` and `ansible` +processes it spawns. This allows Ansible events to be captured and persisted +into the awx database; this process is what drives the "streaming" web UI +you'll see if you launch a job from the awx web interface and watch its results +appears on the screen. awx relies on stability in this plugin interface, the +heirarchy of emitted events based on strategy, and _especially_ the structure +of event data to work across Ansible releases: + +![Event Data Diagram](https://user-images.githubusercontent.com/722880/35641610-ae7f1dea-068e-11e8-84fb-0f96043d53e4.png) + +### Fact Caching +awx provides a custom fact caching implementation that allows users to store +facts for playbook runs across subsequent Job Template runs. Specifically, awx +makes use of the `jsonfile` fact cache plugin; after `ansible-playbook` runs +have exited, awx consumes the entire `jsonfile` cache and persists it in the +awx database. On subsequent Job Template runs, prior `jsonfile` caches are +restored to the local file system so the new `ansible-playbook` process makes +use of them. + +### Environment-Based Configuration +awx injects credentials and module configuration for a number of Ansible +modules via environment variables. Examples include: + +* `ANSIBLE_NET_*` and other well-known environment variables for network device authentication +* API keys and other credential values which are utilized + (`AWS_ACCESS_KEY_ID`, `GCE_EMAIL`, etc...) +* SSH-oriented configuration flags, such as `ANSIBLE_SSH_CONTROL_PATH` + +awx relies on stability in these configuration options to reliably support +credential injection for supported Ansible modules. diff --git a/docs/resource_copy.md b/docs/resource_copy.md new file mode 100644 index 0000000000..da85d55225 --- /dev/null +++ b/docs/resource_copy.md @@ -0,0 +1,166 @@ +Starting from Tower 3.3 and API v2, user are able to copy some existing resource objects to quickly +create new resource objects via POSTing to corresponding `/copy/` endpoint. A new `CopyAPIView` class +is introduced as the base view class for `/copy/` endpoints. It mimics the process of manually fetching +fields from the existing object to create a new object, plus the ability to automatically detect sub +structures of existing objects and make a background task-based deep copy when necessary. + +## Usage +If an AWX resource is copiable, all of its object detail API views will have a related URL field +`"copy"`, which has form `/api////copy/`. GET to this endpoint +will return `can_copy`, which is a boolean indicating whether the current user can execute a copy +operation; POST to this endpoint actually copies the resource object. One field `name` is required +which will later be used as the name of the created copy. Upon success, 201 will be returned, along +with the created copy. + +For some resources like credential, the copy process is not time-consuming, thus the entire copy +process will take place in the request-response cycle, and the created object copy is returned as +POST response. + +For some other resources like inventory, the copy process can take longer, depending on the number +of sub-objects to copy (will explain later). Thus, although the created copy will be returned, the +copy process is not finished yet. All sub-objects (like all hosts and groups of an inventory) will +not be created until after the background copy task is finished in success. + +Currently the available list of copiable resources are: + +- job templates +- projects +- inventories +- workflows +- credentials +- notifications +- inventory scripts + +For most of the resources above, only the object to be copied itself will be copied; For some resources +like inventories, however, sub resources belonging to the resource will also be copied to maintain the +full functionality of the copied new resource. In specific: + +- When an inventory is copied, all its hosts, groups and inventory sources are copied. +- When a workflow job template is copied, all its workflow job template nodes are copied. + +## How to add a copy end-point for a resource +The copy behavior of different resources largely follow the same pattern, therefore a unified way of +enabling copy capability for resources is available for developers: + +Firstly, create a `/copy/` url endpoint for the target resource. + +Secondly, create a view class as handler to `/copy/` endpoint. This view class should be subclassed +from `awx.api.generics.CopyAPIView`. Here is an example: +```python +class JobTemplateCopy(CopyAPIView): + + model = JobTemplate + copy_return_serializer_class = JobTemplateSerializer +``` +Note the above example declares a custom class attribute `copy_return_serializer_class`. This attribute +is used by `CopyAPIView` to render the created copy in POST response, so in most cases the value should +be the same as `serializer_class` of corresponding resource detail view, like here the value is the +`serializer_class` of `JobTemplateDetail`. + +Thirdly, for the underlying model of the resource, Add 2 macros, `FIELDS_TO_PRESERVE_AT_COPY` and +`FIELDS_TO_DISCARD_AT_COPY`, as needed. Here is an example: +```python +class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, ResourceMixin): + ''' + A job template is a reusable job definition for applying a project (with + playbook) to an inventory source with a given credential. + ''' + FIELDS_TO_PRESERVE_AT_COPY = [ + 'labels', 'instance_groups', 'credentials', 'survey_spec' + ] + FIELDS_TO_DISCARD_AT_COPY = ['vault_credential', 'credential'] +``` +When copying a resource object, basically all fields necessary for creating a new resource (fields +composing a valid POST body for creating new resources) are extracted from the original object and +used to create the copy. + +However, sometimes we need more fields to be copied, like `credentials` of a job template, which +cannot be provided during creation. In this case we list such fields in `FIELDS_TO_PRESERVE_AT_COPY` +so that these fields won't be missed. + +On the other hand, sometimes we do not want to include some fields provided in create POST body, +like `vault_credential` and `credential` fields used for creating a job template, which do not have +tangible field correspondence in `JobTemplate` model. In this case we list such fields in +`FIELDS_TO_DISCARD_AT_COPY` so that those fields won't be included. + +For models that will be part of a deep copy, like hosts and workflow job template nodes, the related +POST body for creating a new object is not available. Therefore all necessary fields for creating +a new resource should also be included in `FIELDS_TO_PRESERVE_AT_COPY`. + +Lastly, unit test copy behavior of the new endpoint in `/awx/main/tests/functional/test_copy.py` and +update docs (like this doc). + +Fields in `FIELDS_TO_PRESERVE_AT_COPY` must be solid model fields, while fields in +`FIELDS_TO_DISCARD_AT_COPY` do not need to be. Note there are hidden fields not visible from model +definition, namely reverse relationships and fields inherited from super classes or mix-ins. A help +script `tools/scripts/list_fields.py` is available to inspect a model and list details of all its +available fields. +``` +# In shell_plus +>>> from list_fields import pretty_print_model_fields +>>> pretty_print_model_fields(JobTemplate) +``` + +`CopyAPIView` will automatically detect sub objects of an object, and do a deep copy of all sub objects +as a background celery task. There are sometimes permission issues with sub object copy. For example, +when copying nodes of a workflow job template, there are cases where the user performing copy has no use +permission of related credential and inventory of some nodes, and it is desired those fields will be +`None`. In order to do that, developer should provide a static method `deep_copy_permission_check_func` +under corresponding specific copy view. Like +```python +class WorkflowJobTemplateCopy(WorkflowsEnforcementMixin, CopyAPIView): + + model = WorkflowJobTemplate + copy_return_serializer_class = WorkflowJobTemplateSerializer + + # Other code + + @staticmethod + def deep_copy_permission_check_func(user, new_objs): + # method body + + # Other code +``` +Static method `deep_copy_permission_check_func` must have and only have two arguments: `user`, the +user performing the copy; `new_objs`, a list of all sub objects of the created copy. Sub objects in +`new_objs` are initially populated disregarding any permission constraints, developer shall check +`user`'s permission against these new sub objects and react like unlink related objects or sending +warning logs. `deep_copy_permission_check_func` should not return anything. + +Lastly, macro `REENCRYPTION_BLACKLIST_AT_COPY` is available as part of a model definition. It is a +list of field names which will escape re-encryption during copy. For example, `extra_data` field +of workflow job template nodes. + +## Acceptance Criteria +* Credentials should be able to copy themselves. The behavior of copying credential A shall be exactly + the same as creating a credential B with all needed fields for creation coming from credential A. +* Inventories should be able to copy themselves. The behavior of copying inventory A shall be exactly + the same as creating an inventory B with all needed fields for creation coming from inventory A. Other + than that, inventory B should inherit A's `instance_groups`, and have exactly the same host and group + structures as A. +* Inventory scripts should be able to copy themselves. The behavior of copying inventory script A + shall be exactly the same as creating an inventory script B with all needed fields for creation + coming from inventory script A. +* Job templates should be able to copy themselves. The behavior of copying job template A + shall be exactly the same as creating a job template B with all needed fields for creation + coming from job template A. Other than that, job template B should inherit A's `labels`, + `instance_groups`, `credentials` and `survey_spec`. +* Notification templates should be able to copy themselves. The behavior of copying notification + template A shall be exactly the same as creating a notification template B with all needed fields + for creation coming from notification template A. +* Projects should be able to copy themselves. The behavior of copying project A shall be the + same as creating a project B with all needed fields for creation coming from project A, except for + `local_path`, which will be populated by triggered project update. Other than that, project B + should inherit A's `labels`, `instance_groups` and `credentials`. +* Workflow Job templates should be able to copy themselves. The behavior of copying workflow job + template A shall be exactly the same as creating a workflow job template B with all needed fields + for creation coming from workflow job template A. Other than that, workflow job template B should + inherit A's `labels`, `instance_groups`, `credentials` and `survey_spec`, and have exactly the + same workflow job template node structure as A. +* In all copy processes, `name` field of the created copy of the original object should be able to + customize in the POST body. +* The permission for a user to make a copy for an existing resource object should be the same as the + permission for a user to create a brand new resource object using fields from the existing object. +* The RBAC behavior of original workflow job template `/copy/` should be pertained. That is, if the + user has no necessary permission to the related project and credential of a workflow job template + node, the copied workflow job template node should have those fields empty. diff --git a/docs/schedules.md b/docs/schedules.md index f733b37aa7..2cfd11676d 100644 --- a/docs/schedules.md +++ b/docs/schedules.md @@ -41,9 +41,9 @@ A list of _valid_ zone identifiers (which can vary by system) can be found at: HTTP GET /api/v2/schedules/zoneinfo/ [ - "Africa/Abidjan", - "Africa/Accra", - "Africa/Addis_Ababa", + {"name": "Africa/Abidjan"}, + {"name": "Africa/Accra"}, + {"name": "Africa/Addis_Ababa"}, ... ] diff --git a/installer/image_build/files/launch_awx_task.sh b/installer/image_build/files/launch_awx_task.sh index b2e489d069..395622ac53 100755 --- a/installer/image_build/files/launch_awx_task.sh +++ b/installer/image_build/files/launch_awx_task.sh @@ -4,7 +4,12 @@ if [ `id -u` -ge 500 ]; then cat /tmp/passwd > /etc/passwd rm /tmp/passwd fi + +ANSIBLE_REMOTE_TEMP=/tmp ANSIBLE_LOCAL_TEMP=/tmp ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=$DATABASE_HOST port=$DATABASE_PORT" all +ANSIBLE_REMOTE_TEMP=/tmp ANSIBLE_LOCAL_TEMP=/tmp ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=$MEMCACHED_HOST port=11211" all +ANSIBLE_REMOTE_TEMP=/tmp ANSIBLE_LOCAL_TEMP=/tmp ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=$RABBITMQ_HOST port=5672" all ANSIBLE_REMOTE_TEMP=/tmp ANSIBLE_LOCAL_TEMP=/tmp ansible -i "127.0.0.1," -c local -v -m postgresql_db -U $DATABASE_USER -a "name=$DATABASE_NAME owner=$DATABASE_USER login_user=$DATABASE_USER login_host=$DATABASE_HOST login_password=$DATABASE_PASSWORD port=$DATABASE_PORT" all + awx-manage migrate --noinput --fake-initial if [ ! -z "$AWX_ADMIN_USER" ]&&[ ! -z "$AWX_ADMIN_PASSWORD" ]; then echo "from django.contrib.auth.models import User; User.objects.create_superuser('$AWX_ADMIN_USER', 'root@localhost', '$AWX_ADMIN_PASSWORD')" | awx-manage shell @@ -14,5 +19,5 @@ else awx-manage create_preload_data fi awx-manage provision_instance --hostname=$(hostname) -awx-manage register_queue --queuename=tower --hostnames=$(hostname) +awx-manage register_queue --queuename=tower --instance_percent=100 supervisord -c /supervisor_task.conf diff --git a/installer/image_build/files/settings.py b/installer/image_build/files/settings.py index d9a56df2f1..aac778aaba 100644 --- a/installer/image_build/files/settings.py +++ b/installer/image_build/files/settings.py @@ -31,9 +31,6 @@ AWX_PROOT_ENABLED = False CLUSTER_HOST_ID = "awx" SYSTEM_UUID = '00000000-0000-0000-0000-000000000000' -CELERY_TASK_QUEUES += (Queue(CLUSTER_HOST_ID, Exchange(CLUSTER_HOST_ID), routing_key=CLUSTER_HOST_ID),) -CELERY_TASK_ROUTES['awx.main.tasks.cluster_node_heartbeat'] = {'queue': CLUSTER_HOST_ID, 'routing_key': CLUSTER_HOST_ID} -CELERY_TASK_ROUTES['awx.main.tasks.purge_old_stdout_files'] = {'queue': CLUSTER_HOST_ID, 'routing_key': CLUSTER_HOST_ID} ############################################################################### diff --git a/installer/image_build/files/supervisor.conf b/installer/image_build/files/supervisor.conf index ec0acac101..cfcaf5ebe9 100644 --- a/installer/image_build/files/supervisor.conf +++ b/installer/image_build/files/supervisor.conf @@ -41,6 +41,15 @@ priority=5 # TODO: Exit Handler +[eventlistener:awx-config-watcher] +command=/usr/bin/config-watcher +stderr_logfile=/dev/stdout +stderr_logfile_maxbytes=0 +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes=0 +events=TICK_60 +priority=0 + [unix_http_server] file=/tmp/supervisor.sock diff --git a/installer/image_build/files/supervisor_task.conf b/installer/image_build/files/supervisor_task.conf index 857f941c96..83107bf6e7 100644 --- a/installer/image_build/files/supervisor_task.conf +++ b/installer/image_build/files/supervisor_task.conf @@ -3,8 +3,7 @@ nodaemon = True umask = 022 [program:celery] -# TODO: Needs to be reworked to dynamically use instance group queues -command = /var/lib/awx/venv/awx/bin/celery worker -A awx -l debug --autoscale=4 -Ofair -Q tower_scheduler,tower_broadcast_all,tower,%(host_node_name)s -n celery@localhost +command = /var/lib/awx/venv/awx/bin/celery worker -A awx -B -l debug --autoscale=4 -Ofair -s /var/lib/awx/beat.db -Q tower_broadcast_all -n celery@%(ENV_HOSTNAME)s directory = /var/lib/awx environment = LANGUAGE="en_US.UTF-8",LANG="en_US.UTF-8",LC_ALL="en_US.UTF-8",LC_CTYPE="en_US.UTF-8" #user = {{ aw_user }} @@ -16,18 +15,6 @@ stdout_logfile_maxbytes=0 stderr_logfile=/dev/stderr stderr_logfile_maxbytes=0 -[program:awx-celeryd-beat] -command = /var/lib/awx/venv/awx/bin/celery beat -A awx -l debug --pidfile= -s /var/lib/awx/beat.db -directory = /var/lib/awx -autostart = true -autorestart = true -stopwaitsecs = 5 -redirect_stderr=true -stdout_logfile = /dev/stdout -stdout_logfile_maxbytes = 0 -stderr_logfile = /dev/stderr -stderr_logfile_maxbytes = 0 - [program:callback-receiver] command = awx-manage run_callback_receiver directory = /var/lib/awx @@ -56,6 +43,15 @@ priority=5 # TODO: Exit Handler +[eventlistener:awx-config-watcher] +command=/usr/bin/config-watcher +stderr_logfile=/dev/stdout +stderr_logfile_maxbytes=0 +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes=0 +events=TICK_60 +priority=0 + [unix_http_server] file=/tmp/supervisor.sock diff --git a/installer/image_build/tasks/main.yml b/installer/image_build/tasks/main.yml index d3dd66207f..43474db68d 100644 --- a/installer/image_build/tasks/main.yml +++ b/installer/image_build/tasks/main.yml @@ -163,19 +163,25 @@ dest: "{{ docker_base_path }}/requirements" delegate_to: localhost +- name: Stage config watcher + copy: + src: ../tools/scripts/config-watcher + dest: "{{ docker_base_path }}/config-watcher" + delegate_to: localhost + - name: Stage Makefile copy: src: ../Makefile dest: "{{ docker_base_path }}/Makefile" delegate_to: localhost -- name: State ansible repo +- name: Stage ansible repo copy: src: 'ansible.repo' dest: '{{ docker_base_path }}/ansible.repo' delegate_to: localhost -- name: State ansible repo key +- name: Stage ansible repo key copy: src: 'RPM-GPG-KEY-ansible-release' dest: '{{ docker_base_path }}/RPM-GPG-KEY-ansible-release' diff --git a/installer/image_build/templates/Dockerfile.j2 b/installer/image_build/templates/Dockerfile.j2 index 16c118b1fe..ab3db53490 100644 --- a/installer/image_build/templates/Dockerfile.j2 +++ b/installer/image_build/templates/Dockerfile.j2 @@ -22,6 +22,7 @@ ADD requirements/requirements_ansible.txt \ requirements/requirements_git.txt \ /tmp/requirements/ ADD ansible.repo /etc/yum.repos.d/ansible.repo +ADD config-watcher /usr/bin/config-watcher ADD RPM-GPG-KEY-ansible-release /etc/pki/rpm-gpg/RPM-GPG-KEY-ansible-release # OS Dependencies WORKDIR /tmp @@ -50,7 +51,7 @@ ADD supervisor.conf /supervisor.conf ADD supervisor_task.conf /supervisor_task.conf ADD launch_awx.sh /usr/bin/launch_awx.sh ADD launch_awx_task.sh /usr/bin/launch_awx_task.sh -RUN chmod +rx /usr/bin/launch_awx.sh && chmod +rx /usr/bin/launch_awx_task.sh +RUN chmod +rx /usr/bin/launch_awx.sh && chmod +rx /usr/bin/launch_awx_task.sh && chmod +rx /usr/bin/config-watcher ADD settings.py /etc/tower/settings.py RUN chmod g+w /etc/passwd RUN chmod -R 777 /var/log/nginx && chmod -R 777 /var/lib/nginx diff --git a/installer/inventory b/installer/inventory index d1a742d314..fb966933eb 100644 --- a/installer/inventory +++ b/installer/inventory @@ -90,3 +90,7 @@ pg_port=5432 #awx_container_search_domains=example.com,ansible.com # Alternate DNS servers #awx_alternate_dns_servers="10.1.2.3,10.2.3.4" + +# AWX project data folder. If you need access to the location where AWX stores the projects +# it manages from the docker host, you can set this to turn it into a volume for the container. +#project_data_dir=/var/lib/awx/projects \ No newline at end of file diff --git a/installer/kubernetes/tasks/main.yml b/installer/kubernetes/tasks/main.yml index bd9d63677d..53916f56ba 100644 --- a/installer/kubernetes/tasks/main.yml +++ b/installer/kubernetes/tasks/main.yml @@ -96,6 +96,12 @@ path: "{{ kubernetes_base_path }}" state: directory +- name: Template Kubernetes AWX etcd2 + template: + src: etcd.yml.j2 + dest: "{{ kubernetes_base_path }}/etcd.yml" + mode: '0600' + - name: Template Kubernetes AWX Config template: src: configmap.yml.j2 @@ -108,6 +114,9 @@ dest: "{{ kubernetes_base_path }}/deployment.yml" mode: '0600' +- name: Apply etcd deployment + shell: "kubectl apply -f {{ kubernetes_base_path }}/etcd.yml" + - name: Apply Configmap shell: "kubectl apply -f {{ kubernetes_base_path }}/configmap.yml" diff --git a/installer/kubernetes/templates/configmap.yml.j2 b/installer/kubernetes/templates/configmap.yml.j2 index 9aafb888fd..fa61fcda83 100644 --- a/installer/kubernetes/templates/configmap.yml.j2 +++ b/installer/kubernetes/templates/configmap.yml.j2 @@ -13,6 +13,8 @@ data: # Container environments don't like chroots AWX_PROOT_ENABLED = False + AWX_AUTO_DEPROVISION_INSTANCES = True + #Autoprovisioning should replace this CLUSTER_HOST_ID = socket.gethostname() SYSTEM_UUID = '00000000-0000-0000-0000-000000000000' diff --git a/installer/kubernetes/templates/deployment.yml.j2 b/installer/kubernetes/templates/deployment.yml.j2 index 8b7b0580f8..16c86e6227 100644 --- a/installer/kubernetes/templates/deployment.yml.j2 +++ b/installer/kubernetes/templates/deployment.yml.j2 @@ -36,23 +36,51 @@ spec: value: ({{ pg_port|default('5432') }}) - name: DATABASE_PASSWORD value: {{ pg_password }} + - name: MEMCACHED_HOST + value: {{ memcached_hostname|default('localhost') }} + - name: RABBITMQ_HOST + value: {{ rabbitmq_hostname|default('localhost') }} - name: AWX_ADMIN_USER value: {{ default_admin_user|default('admin') }} - name: AWX_ADMIN_PASSWORD value: {{ default_admin_password|default('password') }} - name: awx-rabbit - image: rabbitmq:3 + image: ansible/awx_rabbitmq:latest + imagePullPolicy: Always env: + # For consupmption by rabbitmq-env.conf + - name: MY_POD_IP + valueFrom: + fieldRef: + fieldPath: status.podIP + - name: RABBITMQ_USE_LONGNAME + value: "true" + - name: ERLANG_COOKIE + value: "test" - name: RABBITMQ_ERLANG_COOKIE - value: secretb + value: "secretb" - name: RABBITMQ_NODENAME - value: rabbitmq + value: "rabbit@$(MY_POD_IP)" + - name: AUTOCLUSTER_TYPE + value: "etcd" + - name: AUTOCLUSTER_DELAY + value: "60" + - name: ETCD_HOST + value: "etcd" + - name: AUTOCLUSTER_CLEANUP + value: "true" + - name: CLEANUP_WARN_ONLY + value: "false" + - name: CLEANUP_INTERVAL + value: "30" - name: RABBITMQ_DEFAULT_USER value: awx - name: RABBITMQ_DEFAULT_PASS value: abcdefg - name: RABBITMQ_DEFAULT_VHOST value: awx + - name: RABBITMQ_CONFIG_FILE + value: /etc/rabbitmq/rabbitmq - name: awx-memcached image: memcached volumes: diff --git a/installer/kubernetes/templates/etcd.yml.j2 b/installer/kubernetes/templates/etcd.yml.j2 new file mode 100644 index 0000000000..e3c93905aa --- /dev/null +++ b/installer/kubernetes/templates/etcd.yml.j2 @@ -0,0 +1,44 @@ +--- +apiVersion: extensions/v1beta1 +kind: Deployment +metadata: + name: etcd + namespace: {{ awx_kubernetes_namespace }} +spec: + replicas: 1 + template: + metadata: + labels: + name: awx-etcd2 + service: etcd + spec: + containers: + - name: etcd + image: elcolio/etcd:latest + ports: + - containerPort: 4001 + volumeMounts: + - mountPath: /data + name: datadir + volumes: + - name: datadir + emptyDir: {} +--- +apiVersion: v1 +kind: Service +metadata: + annotations: + labels: + name: awx-etcd + name: etcd + namespace: {{ awx_kubernetes_namespace }} +spec: + ports: + - name: etcd + port: 4001 + protocol: TCP + targetPort: 4001 + selector: + name: awx-etcd2 + sessionAffinity: None + type: ClusterIP diff --git a/installer/local_docker/tasks/standalone.yml b/installer/local_docker/tasks/standalone.yml index f6a51f71d1..ac23f74e66 100644 --- a/installer/local_docker/tasks/standalone.yml +++ b/installer/local_docker/tasks/standalone.yml @@ -79,6 +79,7 @@ state: started restart_policy: unless-stopped image: "{{ awx_web_docker_actual_image }}" + volumes: "{{ project_data_dir + ':/var/lib/awx/projects:rw' if project_data_dir is defined else omit }}" user: root ports: - "{{ host_port }}:8052" @@ -112,6 +113,7 @@ state: started restart_policy: unless-stopped image: "{{ awx_task_docker_actual_image }}" + volumes: "{{ project_data_dir + ':/var/lib/awx/projects:rw' if project_data_dir is defined else omit }}" links: "{{ awx_task_container_links|list }}" user: root hostname: awx diff --git a/installer/openshift/tasks/main.yml b/installer/openshift/tasks/main.yml index 0de60caa98..fd6c967d24 100644 --- a/installer/openshift/tasks/main.yml +++ b/installer/openshift/tasks/main.yml @@ -121,6 +121,15 @@ dest: "{{ openshift_base_path }}/deployment.yml" mode: '0600' +- name: Template Openshift AWX etcd2 + template: + src: etcd.yml.j2 + dest: "{{ openshift_base_path }}/etcd.yml" + mode: '0600' + +- name: Apply etcd deployment + shell: "oc apply -f {{ openshift_base_path }}/etcd.yml" + - name: Apply Configmap shell: "oc apply -f {{ openshift_base_path }}/configmap.yml" diff --git a/installer/openshift/templates/configmap.yml.j2 b/installer/openshift/templates/configmap.yml.j2 index 79c14fefee..8fb1e2b4bf 100644 --- a/installer/openshift/templates/configmap.yml.j2 +++ b/installer/openshift/templates/configmap.yml.j2 @@ -12,7 +12,10 @@ data: # Container environments don't like chroots AWX_PROOT_ENABLED = False - + + # Automatically deprovision pods that go offline + AWX_AUTO_DEPROVISION_INSTANCES = True + #Autoprovisioning should replace this CLUSTER_HOST_ID = socket.gethostname() SYSTEM_UUID = '00000000-0000-0000-0000-000000000000' diff --git a/installer/openshift/templates/deployment.yml.j2 b/installer/openshift/templates/deployment.yml.j2 index 775ad8a49c..2f247d8f56 100644 --- a/installer/openshift/templates/deployment.yml.j2 +++ b/installer/openshift/templates/deployment.yml.j2 @@ -36,23 +36,51 @@ spec: value: ({{ pg_port|default('5432') }}) - name: DATABASE_PASSWORD value: {{ pg_password }} + - name: MEMCACHED_HOST + value: {{ memcached_hostname|default('localhost') }} + - name: RABBITMQ_HOST + value: {{ rabbitmq_hostname|default('localhost') }} - name: AWX_ADMIN_USER value: {{ default_admin_user|default('admin') }} - name: AWX_ADMIN_PASSWORD value: {{ default_admin_password|default('password') }} - name: awx-rabbit - image: rabbitmq:3 + image: ansible/awx_rabbitmq:latest + imagePullPolicy: Always env: + # For consupmption by rabbitmq-env.conf + - name: MY_POD_IP + valueFrom: + fieldRef: + fieldPath: status.podIP + - name: RABBITMQ_USE_LONGNAME + value: "true" + - name: ERLANG_COOKIE + value: "test" - name: RABBITMQ_ERLANG_COOKIE - value: secretb + value: "secretb" - name: RABBITMQ_NODENAME - value: rabbitmq + value: "rabbit@$(MY_POD_IP)" + - name: AUTOCLUSTER_TYPE + value: "etcd" + - name: AUTOCLUSTER_DELAY + value: "60" + - name: ETCD_HOST + value: "etcd" + - name: AUTOCLUSTER_CLEANUP + value: "true" + - name: CLEANUP_WARN_ONLY + value: "false" + - name: CLEANUP_INTERVAL + value: "30" - name: RABBITMQ_DEFAULT_USER value: awx - name: RABBITMQ_DEFAULT_PASS value: abcdefg - name: RABBITMQ_DEFAULT_VHOST value: awx + - name: RABBITMQ_CONFIG_FILE + value: /etc/rabbitmq/rabbitmq - name: awx-memcached image: memcached volumes: @@ -80,6 +108,23 @@ spec: selector: name: awx-web-deploy --- +--- +apiVersion: v1 +kind: Service +metadata: + name: awx-rmq-mgmt + namespace: {{ awx_openshift_project }} + labels: + name: awx-rmq-mgmt +spec: + type: ClusterIP + ports: + - name: rmqmgmt + port: 15672 + targetPort: 15672 + selector: + name: awx-web-deploy +--- apiVersion: v1 kind: Route metadata: diff --git a/installer/openshift/templates/etcd.yml.j2 b/installer/openshift/templates/etcd.yml.j2 new file mode 100644 index 0000000000..abbfe13185 --- /dev/null +++ b/installer/openshift/templates/etcd.yml.j2 @@ -0,0 +1,44 @@ +--- +apiVersion: extensions/v1beta1 +kind: Deployment +metadata: + name: etcd + namespace: {{ awx_openshift_project }} +spec: + replicas: 1 + template: + metadata: + labels: + name: awx-etcd2 + service: etcd + spec: + containers: + - name: etcd + image: elcolio/etcd:latest + ports: + - containerPort: 4001 + volumeMounts: + - mountPath: /data + name: datadir + volumes: + - name: datadir + emptyDir: {} +--- +apiVersion: v1 +kind: Service +metadata: + annotations: + labels: + name: awx-etcd + name: etcd + namespace: {{ awx_openshift_project }} +spec: + ports: + - name: etcd + port: 4001 + protocol: TCP + targetPort: 4001 + selector: + name: awx-etcd2 + sessionAffinity: None + type: ClusterIP diff --git a/requirements/requirements_dev.txt b/requirements/requirements_dev.txt index 131b8ebef5..367bf85567 100644 --- a/requirements/requirements_dev.txt +++ b/requirements/requirements_dev.txt @@ -1,4 +1,5 @@ django-debug-toolbar==1.5 +django-rest-swagger pprofile ipython==5.2.1 unittest2 diff --git a/shippable.yml b/shippable.yml index 8bbadc8bad..871a497d91 100644 --- a/shippable.yml +++ b/shippable.yml @@ -7,6 +7,7 @@ env: - AWX_BUILD_TARGET=test - AWX_BUILD_TARGET=ui-test-ci - AWX_BUILD_TARGET="flake8 jshint" + - AWX_BUILD_TARGET="swagger" branches: only: diff --git a/tools/data_generators/rbac_dummy_data_generator.py b/tools/data_generators/rbac_dummy_data_generator.py index 5467b1cbb9..566ce540fa 100755 --- a/tools/data_generators/rbac_dummy_data_generator.py +++ b/tools/data_generators/rbac_dummy_data_generator.py @@ -89,7 +89,7 @@ options = vars(options) if options['preset']: - print ' Using preset data numbers set ' + str(options['preset']) + print(' Using preset data numbers set ' + str(options['preset'])) # Read the numbers of resources from presets file, if provided presets_filename = os.path.abspath(os.path.join( os.path.dirname(os.path.abspath(__file__)), 'presets.tsv')) diff --git a/tools/docker-compose/supervisor.conf b/tools/docker-compose/supervisor.conf index 7f72e269c4..cedb784324 100644 --- a/tools/docker-compose/supervisor.conf +++ b/tools/docker-compose/supervisor.conf @@ -4,7 +4,7 @@ minfds = 4096 nodaemon=true [program:celeryd] -command = celery worker -A awx -l DEBUG -B -Ofair --autoscale=100,4 --schedule=/celerybeat-schedule -Q tower_scheduler,tower_broadcast_all,%(ENV_AWX_GROUP_QUEUES)s,%(ENV_HOSTNAME)s -n celery@%(ENV_HOSTNAME)s +command = celery worker -A awx -l DEBUG -B --autoscale=20,3 -Ofair -s /var/lib/awx/beat.db -Q tower_broadcast_all -n celery@%(ENV_HOSTNAME)s autostart = true autorestart = true redirect_stderr=true diff --git a/tools/rdb.py b/tools/rdb.py index 5b56703a01..d184922d2d 100644 --- a/tools/rdb.py +++ b/tools/rdb.py @@ -183,7 +183,7 @@ def listen(): def _consume(queue): sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.bind(('0.0.0.0', 6899)) - print 'listening for rdb notifications on :6899...' + print('listening for rdb notifications on :6899...') while True: r, w, x = select.select([sock], [], []) for i in r: @@ -201,13 +201,13 @@ def listen(): if port == 'q': break port = int(port) - print 'opening telnet session at localhost:%d...' % port + print('opening telnet session at localhost:%d...' % port) telnet(port) - print 'listening for rdb notifications on :6899...' + print('listening for rdb notifications on :6899...') except Empty: pass except KeyboardInterrupt: - print 'got Ctrl-C' + print('got Ctrl-C') queue.put('q') @@ -218,18 +218,18 @@ def telnet(port): try: s.connect(('0.0.0.0', port)) except Exception: - print 'unable to connect' + print('unable to connect') return - print 'connected to 0.0.0.0:%d' % port + print('connected to 0.0.0.0:%d' % port) while True: socket_list = [sys.stdin, s] - r, w, e = select.select(socket_list , [], []) + r, w, e = select.select(socket_list, [], []) for sock in r: if sock == s: data = sock.recv(4096) if not data: - print 'connection closed' + print('connection closed') return else: sys.stdout.write(data) diff --git a/tools/scripts/config-watcher b/tools/scripts/config-watcher new file mode 100755 index 0000000000..ffa2e56a1f --- /dev/null +++ b/tools/scripts/config-watcher @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +import os +import sys +import hashlib +from supervisor import childutils + + +def hash(f): + s = hashlib.sha1() + with open(f, "rb") as fd: + for chunk in iter(lambda: fd.read(4096), b""): + s.update(chunk) + return s.hexdigest() + + +def last_hash(f): + with open(f, "r") as fd: + return fd.read().strip() + + +def write_hash(f, h): + with open(f, "w") as fd: + fd.write(h) + + +def main(): + while 1: + rpc = childutils.getRPCInterface(os.environ) + headers, payload = childutils.listener.wait(sys.stdin, sys.stdout) + if not headers['eventname'].startswith('TICK'): + childutils.listener.ok(sys.stdout) + continue + try: + current_hash = hash("/etc/tower/settings.py") + except: + sys.stderr.write("Could not open settings.py, skipping config watcher") + childutils.listener.ok(sys.stdout) + continue + try: + if current_hash == last_hash("/var/lib/awx/.configsha"): + childutils.listener.ok(sys.stdout) + continue + else: + sys.stderr.write("Config changed, reloading services") + for proc in rpc.supervisor.getAllProcessInfo(): + group = proc['group'] + name = proc['name'] + program = "{}:{}".format(group, name) + if group == "tower-processes": + sys.stderr.write('Restarting %s\n' % program) + rpc.supervisor.stopProcess(program) + rpc.supervisor.startProcess(program) + + except: + sys.stderr.write("No previous hash found") + write_hash("/var/lib/awx/.configsha") + childutils.listener.ok(sys.stdout) diff --git a/tools/scripts/list_fields.py b/tools/scripts/list_fields.py new file mode 100755 index 0000000000..1e637e9c1b --- /dev/null +++ b/tools/scripts/list_fields.py @@ -0,0 +1,46 @@ +__all__ = ['pretty_print_model_fields'] + + +def _get_class_full_name(cls_): + return cls_.__module__ + '.' + cls_.__name__ + + +class _ModelFieldRow(object): + + def __init__(self, field): + self.field = field + self.name = field.name + self.type_ = _get_class_full_name(type(field)) + if self.field.many_to_many\ + or self.field.many_to_one\ + or self.field.one_to_many\ + or self.field.one_to_one: + self.related_model = _get_class_full_name(self.field.remote_field.model) + else: + self.related_model = 'N/A' + + def pretty_print(self, max_name_len, max_type_len, max_rel_model_len): + row = [] + row.append(self.name) + row.append(' ' * (max_name_len - len(self.name))) + row.append('|') + row.append(self.type_) + row.append(' ' * (max_type_len - len(self.type_))) + row.append('|') + row.append(self.related_model) + row.append(' ' * (max_rel_model_len - len(self.related_model))) + print(''.join(row)) + + +def pretty_print_model_fields(model): + field_info_rows = [] + max_lens = [0, 0, 0] + for field in model._meta.get_fields(): + field_info_rows.append(_ModelFieldRow(field)) + max_lens[0] = max(max_lens[0], len(field_info_rows[-1].name)) + max_lens[1] = max(max_lens[1], len(field_info_rows[-1].type_)) + max_lens[2] = max(max_lens[2], len(field_info_rows[-1].related_model)) + print('=' * (sum(max_lens) + len(max_lens) - 1)) + for row in field_info_rows: + row.pretty_print(*max_lens) + print('=' * (sum(max_lens) + len(max_lens) - 1))