diff --git a/Makefile b/Makefile index e7df5b3d48..0b9cc2a61c 100644 --- a/Makefile +++ b/Makefile @@ -296,7 +296,7 @@ uwsgi: collectstatic @if [ "$(VENV_BASE)" ]; then \ . $(VENV_BASE)/awx/bin/activate; \ fi; \ - uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --master-fifo=/awxfifo --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)" --hook-accepting1-once="exec:/bin/sh -c '[ -f /tmp/celery_pid ] && kill -1 `cat /tmp/celery_pid` || true'" + uwsgi -b 32768 --socket 127.0.0.1:8050 --module=awx.wsgi:application --home=/venv/awx --chdir=/awx_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --lazy-apps --logformat "%(addr) %(method) %(uri) - %(proto) %(status)" --hook-accepting1-once="exec:/bin/sh -c '[ -f /tmp/celery_pid ] && kill -1 `cat /tmp/celery_pid` || true'" daphne: @if [ "$(VENV_BASE)" ]; then \ @@ -372,13 +372,14 @@ awx-link: sed -i "s/placeholder/$(shell git describe --long | sed 's/\./\\./g')/" /awx_devel/awx.egg-info/PKG-INFO cp /tmp/awx.egg-link /venv/awx/lib/python2.7/site-packages/awx.egg-link -TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests +TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests awx/network_ui/tests/unit + # Run all API unit tests. test: @if [ "$(VENV_BASE)" ]; then \ . $(VENV_BASE)/awx/bin/activate; \ fi; \ - py.test $(TEST_DIRS) + py.test -n auto $(TEST_DIRS) test_combined: test_ansible test @@ -386,7 +387,7 @@ test_unit: @if [ "$(VENV_BASE)" ]; then \ . $(VENV_BASE)/awx/bin/activate; \ fi; \ - py.test awx/main/tests/unit awx/conf/tests/unit awx/sso/tests/unit + py.test awx/main/tests/unit awx/conf/tests/unit awx/sso/tests/unit awx/network_ui/tests/unit test_ansible: @if [ "$(VENV_BASE)" ]; then \ diff --git a/awx/api/conf.py b/awx/api/conf.py index 34bf305f20..58aa9b4cc8 100644 --- a/awx/api/conf.py +++ b/awx/api/conf.py @@ -4,6 +4,7 @@ from django.utils.translation import ugettext_lazy as _ # AWX from awx.conf import fields, register from awx.api.fields import OAuth2ProviderField +from oauth2_provider.settings import oauth2_settings register( @@ -36,7 +37,7 @@ register( register( 'OAUTH2_PROVIDER', field_class=OAuth2ProviderField, - default={'ACCESS_TOKEN_EXPIRE_SECONDS': 315360000000, + default={'ACCESS_TOKEN_EXPIRE_SECONDS': oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS, 'AUTHORIZATION_CODE_EXPIRE_SECONDS': 600}, label=_('OAuth 2 Timeout Settings'), help_text=_('Dictionary for customizing OAuth 2 timeouts, available items are ' diff --git a/awx/api/filters.py b/awx/api/filters.py index 1c5a47f847..81290c377b 100644 --- a/awx/api/filters.py +++ b/awx/api/filters.py @@ -77,6 +77,63 @@ class TypeFilterBackend(BaseFilterBackend): raise ParseError(*e.args) +def get_field_from_path(model, path): + ''' + Given a Django ORM lookup path (possibly over multiple models) + Returns the last field in the line, and also the revised lookup path + ex., given + model=Organization + path='project__timeout' + returns tuple of field at the end of the line as well as a corrected + path, for special cases we do substitutions + (, 'project__timeout') + ''' + # Store of all the fields used to detect repeats + field_set = set([]) + new_parts = [] + for name in path.split('__'): + if model is None: + raise ParseError(_('No related model for field {}.').format(name)) + # HACK: Make project and inventory source filtering by old field names work for backwards compatibility. + if model._meta.object_name in ('Project', 'InventorySource'): + name = { + 'current_update': 'current_job', + 'last_update': 'last_job', + 'last_update_failed': 'last_job_failed', + 'last_updated': 'last_job_run', + }.get(name, name) + + if name == 'type' and 'polymorphic_ctype' in get_all_field_names(model): + name = 'polymorphic_ctype' + new_parts.append('polymorphic_ctype__model') + else: + new_parts.append(name) + + if name in getattr(model, 'PASSWORD_FIELDS', ()): + raise PermissionDenied(_('Filtering on password fields is not allowed.')) + elif name == 'pk': + field = model._meta.pk + else: + name_alt = name.replace("_", "") + if name_alt in model._meta.fields_map.keys(): + field = model._meta.fields_map[name_alt] + new_parts.pop() + new_parts.append(name_alt) + else: + field = model._meta.get_field(name) + if isinstance(field, ForeignObjectRel) and getattr(field.field, '__prevent_search__', False): + raise PermissionDenied(_('Filtering on %s is not allowed.' % name)) + elif getattr(field, '__prevent_search__', False): + raise PermissionDenied(_('Filtering on %s is not allowed.' % name)) + if field in field_set: + # Field traversed twice, could create infinite JOINs, DoSing Tower + raise ParseError(_('Loops not allowed in filters, detected on field {}.').format(field.name)) + field_set.add(field) + model = getattr(field, 'related_model', None) + + return field, '__'.join(new_parts) + + class FieldLookupBackend(BaseFilterBackend): ''' Filter using field lookups provided via query string parameters. @@ -91,61 +148,23 @@ class FieldLookupBackend(BaseFilterBackend): 'isnull', 'search') def get_field_from_lookup(self, model, lookup): - field = None - parts = lookup.split('__') - if parts and parts[-1] not in self.SUPPORTED_LOOKUPS: - parts.append('exact') + + if '__' in lookup and lookup.rsplit('__', 1)[-1] in self.SUPPORTED_LOOKUPS: + path, suffix = lookup.rsplit('__', 1) + else: + path = lookup + suffix = 'exact' + + if not path: + raise ParseError(_('Query string field name not provided.')) + # FIXME: Could build up a list of models used across relationships, use # those lookups combined with request.user.get_queryset(Model) to make # sure user cannot query using objects he could not view. - new_parts = [] + field, new_path = get_field_from_path(model, path) - # Store of all the fields used to detect repeats - field_set = set([]) - - for name in parts[:-1]: - # HACK: Make project and inventory source filtering by old field names work for backwards compatibility. - if model._meta.object_name in ('Project', 'InventorySource'): - name = { - 'current_update': 'current_job', - 'last_update': 'last_job', - 'last_update_failed': 'last_job_failed', - 'last_updated': 'last_job_run', - }.get(name, name) - - if name == 'type' and 'polymorphic_ctype' in get_all_field_names(model): - name = 'polymorphic_ctype' - new_parts.append('polymorphic_ctype__model') - else: - new_parts.append(name) - - if name in getattr(model, 'PASSWORD_FIELDS', ()): - raise PermissionDenied(_('Filtering on password fields is not allowed.')) - elif name == 'pk': - field = model._meta.pk - else: - name_alt = name.replace("_", "") - if name_alt in model._meta.fields_map.keys(): - field = model._meta.fields_map[name_alt] - new_parts.pop() - new_parts.append(name_alt) - else: - field = model._meta.get_field(name) - if 'auth' in name or 'token' in name: - raise PermissionDenied(_('Filtering on %s is not allowed.' % name)) - if isinstance(field, ForeignObjectRel) and getattr(field.field, '__prevent_search__', False): - raise PermissionDenied(_('Filtering on %s is not allowed.' % name)) - elif getattr(field, '__prevent_search__', False): - raise PermissionDenied(_('Filtering on %s is not allowed.' % name)) - if field in field_set: - # Field traversed twice, could create infinite JOINs, DoSing Tower - raise ParseError(_('Loops not allowed in filters, detected on field {}.').format(field.name)) - field_set.add(field) - model = getattr(field, 'related_model', None) or field.model - - if parts: - new_parts.append(parts[-1]) - new_lookup = '__'.join(new_parts) + new_lookup = new_path + new_lookup = '__'.join([new_path, suffix]) return field, new_lookup def to_python_related(self, value): @@ -371,7 +390,7 @@ class OrderByBackend(BaseFilterBackend): else: order_by = (value,) if order_by: - order_by = self._strip_sensitive_model_fields(queryset.model, order_by) + order_by = self._validate_ordering_fields(queryset.model, order_by) # Special handling of the type field for ordering. In this # case, we're not sorting exactly on the type field, but @@ -396,15 +415,17 @@ class OrderByBackend(BaseFilterBackend): # Return a 400 for invalid field names. raise ParseError(*e.args) - def _strip_sensitive_model_fields(self, model, order_by): + def _validate_ordering_fields(self, model, order_by): for field_name in order_by: # strip off the negation prefix `-` if it exists - _field_name = field_name.split('-')[-1] + prefix = '' + path = field_name + if field_name[0] == '-': + prefix = field_name[0] + path = field_name[1:] try: - # if the field name is encrypted/sensitive, don't sort on it - if _field_name in getattr(model, 'PASSWORD_FIELDS', ()) or \ - getattr(model._meta.get_field(_field_name), '__prevent_search__', False): - raise ParseError(_('cannot order by field %s') % _field_name) - except FieldDoesNotExist: - pass - yield field_name + field, new_path = get_field_from_path(model, path) + new_path = '{}{}'.format(prefix, new_path) + except (FieldError, FieldDoesNotExist) as e: + raise ParseError(e.args[0]) + yield new_path diff --git a/awx/api/generics.py b/awx/api/generics.py index e12949515b..c62f3cc6dd 100644 --- a/awx/api/generics.py +++ b/awx/api/generics.py @@ -6,6 +6,7 @@ import inspect import logging import time import six +import urllib # Django from django.conf import settings @@ -29,6 +30,7 @@ from rest_framework.response import Response from rest_framework import status from rest_framework import views from rest_framework.permissions import AllowAny +from rest_framework.renderers import JSONRenderer # cryptography from cryptography.fernet import InvalidToken @@ -39,7 +41,7 @@ from awx.main.models import * # noqa from awx.main.access import access_registry from awx.main.utils import * # noqa from awx.main.utils.db import get_all_field_names -from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer +from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer, UserSerializer from awx.api.versioning import URLPathVersioning, get_request_version from awx.api.metadata import SublistAttachDetatchMetadata, Metadata @@ -70,6 +72,13 @@ class LoggedLoginView(auth_views.LoginView): if current_user and getattr(current_user, 'pk', None) and current_user != original_user: logger.info("User {} logged in.".format(current_user.username)) if request.user.is_authenticated: + logger.info(smart_text(u"User {} logged in".format(self.request.user.username))) + ret.set_cookie('userLoggedIn', 'true') + current_user = UserSerializer(self.request.user) + current_user = JSONRenderer().render(current_user.data) + current_user = urllib.quote('%s' % current_user, '') + ret.set_cookie('current_user', current_user) + return ret else: ret.status_code = 401 @@ -82,6 +91,7 @@ class LoggedLogoutView(auth_views.LogoutView): original_user = getattr(request, 'user', None) ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs) current_user = getattr(request, 'user', None) + ret.set_cookie('userLoggedIn', 'false') if (not current_user or not getattr(current_user, 'pk', True)) \ and current_user != original_user: logger.info("User {} logged out.".format(original_user.username)) @@ -868,6 +878,9 @@ class CopyAPIView(GenericAPIView): obj, field.name, field_val ) new_obj = model.objects.create(**create_kwargs) + logger.debug(six.text_type('Deep copy: Created new object {}({})').format( + new_obj, model + )) # Need to save separatedly because Djang-crum get_current_user would # not work properly in non-request-response-cycle context. new_obj.created_by = creater diff --git a/awx/api/metadata.py b/awx/api/metadata.py index ebeb5f3286..e11474f27b 100644 --- a/awx/api/metadata.py +++ b/awx/api/metadata.py @@ -62,15 +62,11 @@ class Metadata(metadata.SimpleMetadata): opts = serializer.Meta.model._meta.concrete_model._meta verbose_name = smart_text(opts.verbose_name) field_info['help_text'] = field_help_text[field.field_name].format(verbose_name) - # If field is not part of the model, then show it as non-filterable - else: - is_model_field = False - for model_field in serializer.Meta.model._meta.fields: - if field.field_name == model_field.name: - is_model_field = True - break - if not is_model_field: - field_info['filterable'] = False + + for model_field in serializer.Meta.model._meta.fields: + if field.field_name == model_field.name: + field_info['filterable'] = True + break # Indicate if a field has a default value. # FIXME: Still isn't showing all default values? diff --git a/awx/api/serializers.py b/awx/api/serializers.py index c774898d0d..8e8b9930c5 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -14,7 +14,6 @@ from datetime import timedelta # OAuth2 from oauthlib.common import generate_token -from oauth2_provider.settings import oauth2_settings # Django from django.conf import settings @@ -1024,7 +1023,7 @@ class UserAuthorizedTokenSerializer(BaseSerializer): validated_data['user'] = current_user validated_data['token'] = generate_token() validated_data['expires'] = now() + timedelta( - seconds=oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS + seconds=settings.OAUTH2_PROVIDER['ACCESS_TOKEN_EXPIRE_SECONDS'] ) obj = super(OAuth2TokenSerializer, self).create(validated_data) obj.save() @@ -1176,7 +1175,7 @@ class OAuth2TokenSerializer(BaseSerializer): validated_data['user'] = current_user validated_data['token'] = generate_token() validated_data['expires'] = now() + timedelta( - seconds=oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS + seconds=settings.OAUTH2_PROVIDER['ACCESS_TOKEN_EXPIRE_SECONDS'] ) obj = super(OAuth2TokenSerializer, self).create(validated_data) if obj.application and obj.application.user: @@ -1239,7 +1238,7 @@ class OAuth2AuthorizedTokenSerializer(BaseSerializer): validated_data['user'] = current_user validated_data['token'] = generate_token() validated_data['expires'] = now() + timedelta( - seconds=oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS + seconds=settings.OAUTH2_PROVIDER['ACCESS_TOKEN_EXPIRE_SECONDS'] ) obj = super(OAuth2AuthorizedTokenSerializer, self).create(validated_data) if obj.application and obj.application.user: @@ -1306,7 +1305,7 @@ class OAuth2PersonalTokenSerializer(BaseSerializer): validated_data['user'] = self.context['request'].user validated_data['token'] = generate_token() validated_data['expires'] = now() + timedelta( - seconds=oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS + seconds=settings.OAUTH2_PROVIDER['ACCESS_TOKEN_EXPIRE_SECONDS'] ) validated_data['application'] = None obj = super(OAuth2PersonalTokenSerializer, self).create(validated_data) @@ -4512,9 +4511,19 @@ class SchedulePreviewSerializer(BaseSerializer): class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSerializer): show_capabilities = ['edit', 'delete'] + timezone = serializers.SerializerMethodField() + until = serializers.SerializerMethodField() + class Meta: model = Schedule - fields = ('*', 'unified_job_template', 'enabled', 'dtstart', 'dtend', 'rrule', 'next_run',) + fields = ('*', 'unified_job_template', 'enabled', 'dtstart', 'dtend', 'rrule', 'next_run', 'timezone', + 'until') + + def get_timezone(self, obj): + return obj.timezone + + def get_until(self, obj): + return obj.until def get_related(self, obj): res = super(ScheduleSerializer, self).get_related(obj) @@ -4600,7 +4609,7 @@ class InstanceGroupSerializer(BaseSerializer): "this group when new instances come online.") ) policy_instance_list = serializers.ListField( - child=serializers.CharField(), + child=serializers.CharField(), required=False, help_text=_("List of exact-match Instances that will be assigned to this group") ) @@ -4627,6 +4636,11 @@ class InstanceGroupSerializer(BaseSerializer): raise serializers.ValidationError(_('{} is not a valid hostname of an existing instance.').format(instance_name)) return value + def validate_name(self, value): + if self.instance and self.instance.name == 'tower' and value != 'tower': + raise serializers.ValidationError(_('tower instance group name may not be changed.')) + return value + def get_jobs_qs(self): # Store running jobs queryset in context, so it will be shared in ListView if 'running_jobs' not in self.context: diff --git a/awx/api/urls/user.py b/awx/api/urls/user.py index 3e37de1dda..9ecebbb044 100644 --- a/awx/api/urls/user.py +++ b/awx/api/urls/user.py @@ -15,7 +15,7 @@ from awx.api.views import ( UserActivityStreamList, UserAccessList, OAuth2ApplicationList, - OAuth2TokenList, + OAuth2UserTokenList, OAuth2PersonalTokenList, UserAuthorizedTokenList, ) @@ -32,7 +32,7 @@ urls = [ url(r'^(?P[0-9]+)/activity_stream/$', UserActivityStreamList.as_view(), name='user_activity_stream_list'), url(r'^(?P[0-9]+)/access_list/$', UserAccessList.as_view(), name='user_access_list'), url(r'^(?P[0-9]+)/applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'), - url(r'^(?P[0-9]+)/tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'), + url(r'^(?P[0-9]+)/tokens/$', OAuth2UserTokenList.as_view(), name='o_auth2_token_list'), url(r'^(?P[0-9]+)/authorized_tokens/$', UserAuthorizedTokenList.as_view(), name='user_authorized_token_list'), url(r'^(?P[0-9]+)/personal_tokens/$', OAuth2PersonalTokenList.as_view(), name='o_auth2_personal_token_list'), diff --git a/awx/api/views.py b/awx/api/views.py index 5e080ccea9..5f1d8b22af 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -404,9 +404,11 @@ class ApiV1ConfigView(APIView): data.update(dict( project_base_dir = settings.PROJECTS_ROOT, project_local_paths = Project.get_local_path_choices(), - custom_virtualenvs = get_custom_venv_choices(), )) + if JobTemplate.accessible_objects(request.user, 'admin_role').exists(): + data['custom_virtualenvs'] = get_custom_venv_choices() + return Response(data) def post(self, request): @@ -610,6 +612,7 @@ class InstanceList(ListAPIView): view_name = _("Instances") model = Instance serializer_class = InstanceSerializer + search_fields = ('hostname',) class InstanceDetail(RetrieveUpdateAPIView): @@ -696,6 +699,7 @@ class InstanceGroupInstanceList(InstanceGroupMembershipMixin, SubListAttachDetac serializer_class = InstanceSerializer parent_model = InstanceGroup relationship = "instances" + search_fields = ('hostname',) class ScheduleList(ListAPIView): @@ -745,11 +749,11 @@ class ScheduleZoneInfo(APIView): swagger_topic = 'System Configuration' def get(self, request): - from dateutil.zoneinfo import get_zonefile_instance - return Response([ + zones = [ {'name': zone} - for zone in sorted(get_zonefile_instance().zones) - ]) + for zone in Schedule.get_zoneinfo() + ] + return Response(zones) class LaunchConfigCredentialsBase(SubListAttachDetachAPIView): @@ -1072,6 +1076,7 @@ class OrganizationActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIV serializer_class = ActivityStreamSerializer parent_model = Organization relationship = 'activitystream_set' + search_fields = ('changes',) class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView): @@ -1126,6 +1131,7 @@ class OrganizationObjectRolesList(SubListAPIView): model = Role serializer_class = RoleSerializer parent_model = Organization + search_fields = ('role_field', 'content_type__model',) def get_queryset(self): po = self.get_parent_object() @@ -1165,6 +1171,7 @@ class TeamRolesList(SubListAttachDetachAPIView): metadata_class = RoleMetadata parent_model = Team relationship='member_role.children' + search_fields = ('role_field', 'content_type__model',) def get_queryset(self): team = get_object_or_404(Team, pk=self.kwargs['pk']) @@ -1202,6 +1209,7 @@ class TeamObjectRolesList(SubListAPIView): model = Role serializer_class = RoleSerializer parent_model = Team + search_fields = ('role_field', 'content_type__model',) def get_queryset(self): po = self.get_parent_object() @@ -1233,6 +1241,7 @@ class TeamActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): serializer_class = ActivityStreamSerializer parent_model = Team relationship = 'activitystream_set' + search_fields = ('changes',) def get_queryset(self): parent = self.get_parent_object() @@ -1328,6 +1337,7 @@ class ProjectActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): serializer_class = ActivityStreamSerializer parent_model = Project relationship = 'activitystream_set' + search_fields = ('changes',) def get_queryset(self): parent = self.get_parent_object() @@ -1415,6 +1425,7 @@ class ProjectUpdateEventsList(SubListAPIView): parent_model = ProjectUpdate relationship = 'project_update_events' view_name = _('Project Update Events List') + search_fields = ('stdout',) def finalize_response(self, request, response, *args, **kwargs): response['X-UI-Max-Events'] = settings.MAX_UI_JOB_EVENTS @@ -1428,6 +1439,7 @@ class SystemJobEventsList(SubListAPIView): parent_model = SystemJob relationship = 'system_job_events' view_name = _('System Job Events List') + search_fields = ('stdout',) def finalize_response(self, request, response, *args, **kwargs): response['X-UI-Max-Events'] = settings.MAX_UI_JOB_EVENTS @@ -1441,6 +1453,7 @@ class InventoryUpdateEventsList(SubListAPIView): parent_model = InventoryUpdate relationship = 'inventory_update_events' view_name = _('Inventory Update Events List') + search_fields = ('stdout',) def finalize_response(self, request, response, *args, **kwargs): response['X-UI-Max-Events'] = settings.MAX_UI_JOB_EVENTS @@ -1468,6 +1481,7 @@ class ProjectUpdateNotificationsList(SubListAPIView): serializer_class = NotificationSerializer parent_model = ProjectUpdate relationship = 'notifications' + search_fields = ('subject', 'notification_type', 'body',) class ProjectUpdateScmInventoryUpdates(SubListCreateAPIView): @@ -1491,6 +1505,7 @@ class ProjectObjectRolesList(SubListAPIView): model = Role serializer_class = RoleSerializer parent_model = Project + search_fields = ('role_field', 'content_type__model',) def get_queryset(self): po = self.get_parent_object() @@ -1573,6 +1588,7 @@ class OAuth2ApplicationActivityStreamList(ActivityStreamEnforcementMixin, SubLis parent_model = OAuth2Application relationship = 'activitystream_set' swagger_topic = 'Authentication' + search_fields = ('changes',) class OAuth2TokenList(ListCreateAPIView): @@ -1582,6 +1598,18 @@ class OAuth2TokenList(ListCreateAPIView): model = OAuth2AccessToken serializer_class = OAuth2TokenSerializer swagger_topic = 'Authentication' + + +class OAuth2UserTokenList(SubListCreateAPIView): + + view_name = _("OAuth2 User Tokens") + + model = OAuth2AccessToken + serializer_class = OAuth2TokenSerializer + parent_model = User + relationship = 'main_oauth2accesstoken' + parent_key = 'user' + swagger_topic = 'Authentication' class OAuth2AuthorizedTokenList(SubListCreateAPIView): @@ -1657,6 +1685,7 @@ class OAuth2TokenActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIVi parent_model = OAuth2AccessToken relationship = 'activitystream_set' swagger_topic = 'Authentication' + search_fields = ('changes',) class UserTeamsList(ListAPIView): @@ -1680,6 +1709,7 @@ class UserRolesList(SubListAttachDetachAPIView): parent_model = User relationship='roles' permission_classes = (IsAuthenticated,) + search_fields = ('role_field', 'content_type__model',) def get_queryset(self): u = get_object_or_404(User, pk=self.kwargs['pk']) @@ -1766,6 +1796,7 @@ class UserActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): serializer_class = ActivityStreamSerializer parent_model = User relationship = 'activitystream_set' + search_fields = ('changes',) def get_queryset(self): parent = self.get_parent_object() @@ -1852,6 +1883,7 @@ class CredentialTypeActivityStreamList(ActivityStreamEnforcementMixin, SubListAP serializer_class = ActivityStreamSerializer parent_model = CredentialType relationship = 'activitystream_set' + search_fields = ('changes',) # remove in 3.3 @@ -1965,6 +1997,7 @@ class CredentialActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIVie serializer_class = ActivityStreamSerializer parent_model = Credential relationship = 'activitystream_set' + search_fields = ('changes',) class CredentialAccessList(ResourceAccessList): @@ -1978,6 +2011,7 @@ class CredentialObjectRolesList(SubListAPIView): model = Role serializer_class = RoleSerializer parent_model = Credential + search_fields = ('role_field', 'content_type__model',) def get_queryset(self): po = self.get_parent_object() @@ -2018,6 +2052,7 @@ class InventoryScriptObjectRolesList(SubListAPIView): model = Role serializer_class = RoleSerializer parent_model = CustomInventoryScript + search_fields = ('role_field', 'content_type__model',) def get_queryset(self): po = self.get_parent_object() @@ -2105,6 +2140,7 @@ class InventoryActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView serializer_class = ActivityStreamSerializer parent_model = Inventory relationship = 'activitystream_set' + search_fields = ('changes',) def get_queryset(self): parent = self.get_parent_object() @@ -2132,6 +2168,7 @@ class InventoryObjectRolesList(SubListAPIView): model = Role serializer_class = RoleSerializer parent_model = Inventory + search_fields = ('role_field', 'content_type__model',) def get_queryset(self): po = self.get_parent_object() @@ -2275,6 +2312,7 @@ class HostActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): serializer_class = ActivityStreamSerializer parent_model = Host relationship = 'activitystream_set' + search_fields = ('changes',) def get_queryset(self): parent = self.get_parent_object() @@ -2288,6 +2326,7 @@ class HostFactVersionsList(SystemTrackingEnforcementMixin, ParentMixin, ListAPIV model = Fact serializer_class = FactVersionSerializer parent_model = Host + search_fields = ('facts',) def get_queryset(self): from_spec = self.request.query_params.get('from', None) @@ -2521,6 +2560,7 @@ class GroupActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): serializer_class = ActivityStreamSerializer parent_model = Group relationship = 'activitystream_set' + search_fields = ('changes',) def get_queryset(self): parent = self.get_parent_object() @@ -2752,6 +2792,7 @@ class InventorySourceActivityStreamList(ActivityStreamEnforcementMixin, SubListA serializer_class = ActivityStreamSerializer parent_model = InventorySource relationship = 'activitystream_set' + search_fields = ('changes',) class InventorySourceNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView): @@ -2891,6 +2932,7 @@ class InventoryUpdateNotificationsList(SubListAPIView): serializer_class = NotificationSerializer parent_model = InventoryUpdate relationship = 'notifications' + search_fields = ('subject', 'notification_type', 'body',) class JobTemplateList(ListCreateAPIView): @@ -3229,6 +3271,7 @@ class JobTemplateActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIVi serializer_class = ActivityStreamSerializer parent_model = JobTemplate relationship = 'activitystream_set' + search_fields = ('changes',) class JobTemplateNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView): @@ -3512,6 +3555,7 @@ class JobTemplateObjectRolesList(SubListAPIView): model = Role serializer_class = RoleSerializer parent_model = JobTemplate + search_fields = ('role_field', 'content_type__model',) def get_queryset(self): po = self.get_parent_object() @@ -3529,6 +3573,7 @@ class WorkflowJobNodeList(WorkflowsEnforcementMixin, ListAPIView): model = WorkflowJobNode serializer_class = WorkflowJobNodeListSerializer + search_fields = ('unified_job_template__name', 'unified_job_template__description',) class WorkflowJobNodeDetail(WorkflowsEnforcementMixin, RetrieveAPIView): @@ -3549,6 +3594,7 @@ class WorkflowJobTemplateNodeList(WorkflowsEnforcementMixin, ListCreateAPIView): model = WorkflowJobTemplateNode serializer_class = WorkflowJobTemplateNodeSerializer + search_fields = ('unified_job_template__name', 'unified_job_template__description',) class WorkflowJobTemplateNodeDetail(WorkflowsEnforcementMixin, RetrieveUpdateDestroyAPIView): @@ -3570,6 +3616,7 @@ class WorkflowJobTemplateNodeChildrenBaseList(WorkflowsEnforcementMixin, Enforce parent_model = WorkflowJobTemplateNode relationship = '' enforce_parent_relationship = 'workflow_job_template' + search_fields = ('unified_job_template__name', 'unified_job_template__description',) ''' Limit the set of WorkflowJobTemplateNodes to the related nodes of specified by @@ -3639,6 +3686,7 @@ class WorkflowJobNodeChildrenBaseList(WorkflowsEnforcementMixin, SubListAPIView) serializer_class = WorkflowJobNodeListSerializer parent_model = WorkflowJobNode relationship = '' + search_fields = ('unified_job_template__name', 'unified_job_template__description',) # #Limit the set of WorkflowJobeNodes to the related nodes of specified by @@ -3702,12 +3750,18 @@ class WorkflowJobTemplateCopy(WorkflowsEnforcementMixin, CopyAPIView): item = getattr(obj, field_name, None) if item is None: continue - if field_name in ['inventory']: + elif field_name in ['inventory']: if not user.can_access(item.__class__, 'use', item): setattr(obj, field_name, None) - if field_name in ['unified_job_template']: + elif field_name in ['unified_job_template']: if not user.can_access(item.__class__, 'start', item, validate_license=False): setattr(obj, field_name, None) + elif field_name in ['credentials']: + for cred in item.all(): + if not user.can_access(cred.__class__, 'use', cred): + logger.debug(six.text_type( + 'Deep copy: removing {} from relationship due to permissions').format(cred)) + item.remove(cred.pk) obj.save() @@ -3788,6 +3842,7 @@ class WorkflowJobTemplateWorkflowNodesList(WorkflowsEnforcementMixin, SubListCre parent_model = WorkflowJobTemplate relationship = 'workflow_job_template_nodes' parent_key = 'workflow_job_template' + search_fields = ('unified_job_template__name', 'unified_job_template__description',) def get_queryset(self): return super(WorkflowJobTemplateWorkflowNodesList, self).get_queryset().order_by('id') @@ -3848,6 +3903,7 @@ class WorkflowJobTemplateObjectRolesList(WorkflowsEnforcementMixin, SubListAPIVi model = Role serializer_class = RoleSerializer parent_model = WorkflowJobTemplate + search_fields = ('role_field', 'content_type__model',) def get_queryset(self): po = self.get_parent_object() @@ -3861,6 +3917,7 @@ class WorkflowJobTemplateActivityStreamList(WorkflowsEnforcementMixin, ActivityS serializer_class = ActivityStreamSerializer parent_model = WorkflowJobTemplate relationship = 'activitystream_set' + search_fields = ('changes',) def get_queryset(self): parent = self.get_parent_object() @@ -3890,6 +3947,7 @@ class WorkflowJobWorkflowNodesList(WorkflowsEnforcementMixin, SubListAPIView): parent_model = WorkflowJob relationship = 'workflow_job_nodes' parent_key = 'workflow_job' + search_fields = ('unified_job_template__name', 'unified_job_template__description',) def get_queryset(self): return super(WorkflowJobWorkflowNodesList, self).get_queryset().order_by('id') @@ -3918,6 +3976,7 @@ class WorkflowJobNotificationsList(WorkflowsEnforcementMixin, SubListAPIView): serializer_class = NotificationSerializer parent_model = WorkflowJob relationship = 'notifications' + search_fields = ('subject', 'notification_type', 'body',) class WorkflowJobActivityStreamList(WorkflowsEnforcementMixin, ActivityStreamEnforcementMixin, SubListAPIView): @@ -3926,6 +3985,7 @@ class WorkflowJobActivityStreamList(WorkflowsEnforcementMixin, ActivityStreamEnf serializer_class = ActivityStreamSerializer parent_model = WorkflowJob relationship = 'activitystream_set' + search_fields = ('changes',) class SystemJobTemplateList(ListAPIView): @@ -4081,6 +4141,7 @@ class JobActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): serializer_class = ActivityStreamSerializer parent_model = Job relationship = 'activitystream_set' + search_fields = ('changes',) # TODO: remove endpoint in 3.3 @@ -4284,6 +4345,7 @@ class JobNotificationsList(SubListAPIView): serializer_class = NotificationSerializer parent_model = Job relationship = 'notifications' + search_fields = ('subject', 'notification_type', 'body',) class BaseJobHostSummariesList(SubListAPIView): @@ -4293,6 +4355,7 @@ class BaseJobHostSummariesList(SubListAPIView): parent_model = None # Subclasses must define this attribute. relationship = 'job_host_summaries' view_name = _('Job Host Summaries List') + search_fields = ('host_name',) def get_queryset(self): parent = self.get_parent_object() @@ -4325,6 +4388,7 @@ class JobEventList(ListAPIView): model = JobEvent serializer_class = JobEventSerializer + search_fields = ('stdout',) class JobEventDetail(RetrieveAPIView): @@ -4340,6 +4404,7 @@ class JobEventChildrenList(SubListAPIView): parent_model = JobEvent relationship = 'children' view_name = _('Job Event Children List') + search_fields = ('stdout',) class JobEventHostsList(HostRelatedSearchMixin, SubListAPIView): @@ -4553,6 +4618,7 @@ class AdHocCommandEventList(ListAPIView): model = AdHocCommandEvent serializer_class = AdHocCommandEventSerializer + search_fields = ('stdout',) class AdHocCommandEventDetail(RetrieveAPIView): @@ -4568,6 +4634,7 @@ class BaseAdHocCommandEventsList(SubListAPIView): parent_model = None # Subclasses must define this attribute. relationship = 'ad_hoc_command_events' view_name = _('Ad Hoc Command Events List') + search_fields = ('stdout',) class HostAdHocCommandEventsList(BaseAdHocCommandEventsList): @@ -4590,6 +4657,7 @@ class AdHocCommandActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIV serializer_class = ActivityStreamSerializer parent_model = AdHocCommand relationship = 'activitystream_set' + search_fields = ('changes',) class AdHocCommandNotificationsList(SubListAPIView): @@ -4598,6 +4666,7 @@ class AdHocCommandNotificationsList(SubListAPIView): serializer_class = NotificationSerializer parent_model = AdHocCommand relationship = 'notifications' + search_fields = ('subject', 'notification_type', 'body',) class SystemJobList(ListCreateAPIView): @@ -4638,6 +4707,7 @@ class SystemJobNotificationsList(SubListAPIView): serializer_class = NotificationSerializer parent_model = SystemJob relationship = 'notifications' + search_fields = ('subject', 'notification_type', 'body',) class UnifiedJobTemplateList(ListAPIView): @@ -4706,7 +4776,6 @@ class UnifiedJobStdout(RetrieveAPIView): try: target_format = request.accepted_renderer.format if target_format in ('html', 'api', 'json'): - content_format = request.query_params.get('content_format', 'html') content_encoding = request.query_params.get('content_encoding', None) start_line = request.query_params.get('start_line', 0) end_line = request.query_params.get('end_line', None) @@ -4732,10 +4801,10 @@ class UnifiedJobStdout(RetrieveAPIView): if target_format == 'api': return Response(mark_safe(data)) if target_format == 'json': - if content_encoding == 'base64' and content_format == 'ansi': - return Response({'range': {'start': start, 'end': end, 'absolute_end': absolute_end}, 'content': b64encode(content.encode('utf-8'))}) - elif content_format == 'html': - return Response({'range': {'start': start, 'end': end, 'absolute_end': absolute_end}, 'content': body}) + content = content.encode('utf-8') + if content_encoding == 'base64': + content = b64encode(content) + return Response({'range': {'start': start, 'end': end, 'absolute_end': absolute_end}, 'content': content}) return Response(data) elif target_format == 'txt': return Response(unified_job.result_stdout) @@ -4843,6 +4912,7 @@ class NotificationTemplateNotificationList(SubListAPIView): parent_model = NotificationTemplate relationship = 'notifications' parent_key = 'notification_template' + search_fields = ('subject', 'notification_type', 'body',) class NotificationTemplateCopy(CopyAPIView): @@ -4855,6 +4925,7 @@ class NotificationList(ListAPIView): model = Notification serializer_class = NotificationSerializer + search_fields = ('subject', 'notification_type', 'body',) class NotificationDetail(RetrieveAPIView): @@ -4879,6 +4950,7 @@ class ActivityStreamList(ActivityStreamEnforcementMixin, SimpleListAPIView): model = ActivityStream serializer_class = ActivityStreamSerializer + search_fields = ('changes',) class ActivityStreamDetail(ActivityStreamEnforcementMixin, RetrieveAPIView): @@ -4892,6 +4964,7 @@ class RoleList(ListAPIView): model = Role serializer_class = RoleSerializer permission_classes = (IsAuthenticated,) + search_fields = ('role_field', 'content_type__model',) def get_queryset(self): result = Role.visible_roles(self.request.user) @@ -5004,6 +5077,7 @@ class RoleParentsList(SubListAPIView): parent_model = Role relationship = 'parents' permission_classes = (IsAuthenticated,) + search_fields = ('role_field', 'content_type__model',) def get_queryset(self): role = Role.objects.get(pk=self.kwargs['pk']) @@ -5017,6 +5091,7 @@ class RoleChildrenList(SubListAPIView): parent_model = Role relationship = 'children' permission_classes = (IsAuthenticated,) + search_fields = ('role_field', 'content_type__model',) def get_queryset(self): role = Role.objects.get(pk=self.kwargs['pk']) diff --git a/awx/conf/apps.py b/awx/conf/apps.py index 06c2facb7a..4f9a36395c 100644 --- a/awx/conf/apps.py +++ b/awx/conf/apps.py @@ -2,8 +2,6 @@ from django.apps import AppConfig # from django.core import checks from django.utils.translation import ugettext_lazy as _ -from awx.main.utils.handlers import configure_external_logger -from django.conf import settings class ConfConfig(AppConfig): @@ -11,16 +9,7 @@ class ConfConfig(AppConfig): name = 'awx.conf' verbose_name = _('Configuration') - def configure_oauth2_provider(self, settings): - from oauth2_provider import settings as o_settings - o_settings.oauth2_settings = o_settings.OAuth2ProviderSettings( - settings.OAUTH2_PROVIDER, o_settings.DEFAULTS, - o_settings.IMPORT_STRINGS, o_settings.MANDATORY - ) - def ready(self): self.module.autodiscover() from .settings import SettingsWrapper SettingsWrapper.initialize() - configure_external_logger(settings) - self.configure_oauth2_provider(settings) diff --git a/awx/conf/settings.py b/awx/conf/settings.py index 4263deaa1d..2f7970ec2b 100644 --- a/awx/conf/settings.py +++ b/awx/conf/settings.py @@ -5,6 +5,8 @@ import logging import sys import threading import time +import StringIO +import traceback import six @@ -62,11 +64,19 @@ __all__ = ['SettingsWrapper', 'get_settings_to_cache', 'SETTING_CACHE_NOTSET'] def _log_database_error(): try: yield - except (ProgrammingError, OperationalError) as e: - if get_tower_migration_version() < '310': + except (ProgrammingError, OperationalError): + if 'migrate' in sys.argv and get_tower_migration_version() < '310': logger.info('Using default settings until version 3.1 migration.') else: - logger.warning('Database settings are not available, using defaults (%s)', e, exc_info=True) + # Somewhat ugly - craming the full stack trace into the log message + # the available exc_info does not give information about the real caller + # TODO: replace in favor of stack_info kwarg in python 3 + sio = StringIO.StringIO() + traceback.print_stack(file=sio) + sinfo = sio.getvalue() + sio.close() + sinfo = sinfo.strip('\n') + logger.warning('Database settings are not available, using defaults, logged from:\n{}'.format(sinfo)) finally: pass diff --git a/awx/conf/tests/__init__.py b/awx/conf/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/conf/tests/functional/test_api.py b/awx/conf/tests/functional/test_api.py index 0e845238f6..1be22ee2dc 100644 --- a/awx/conf/tests/functional/test_api.py +++ b/awx/conf/tests/functional/test_api.py @@ -338,13 +338,14 @@ def test_setting_singleton_delete_no_read_only_fields(api_request, dummy_setting @pytest.mark.django_db def test_setting_logging_test(api_request): - with mock.patch('awx.conf.views.BaseHTTPSHandler.perform_test') as mock_func: + with mock.patch('awx.conf.views.AWXProxyHandler.perform_test') as mock_func: api_request( 'post', reverse('api:setting_logging_test'), data={'LOG_AGGREGATOR_HOST': 'http://foobar', 'LOG_AGGREGATOR_TYPE': 'logstash'} ) - test_arguments = mock_func.call_args[0][0] - assert test_arguments.LOG_AGGREGATOR_HOST == 'http://foobar' - assert test_arguments.LOG_AGGREGATOR_TYPE == 'logstash' - assert test_arguments.LOG_AGGREGATOR_LEVEL == 'DEBUG' + call = mock_func.call_args_list[0] + args, kwargs = call + given_settings = kwargs['custom_settings'] + assert given_settings.LOG_AGGREGATOR_HOST == 'http://foobar' + assert given_settings.LOG_AGGREGATOR_TYPE == 'logstash' diff --git a/awx/conf/tests/test_env.py b/awx/conf/tests/test_env.py new file mode 100644 index 0000000000..135c90d99b --- /dev/null +++ b/awx/conf/tests/test_env.py @@ -0,0 +1,6 @@ + + +# Ensure that our autouse overwrites are working +def test_cache(settings): + assert settings.CACHES['default']['BACKEND'] == 'django.core.cache.backends.locmem.LocMemCache' + assert settings.CACHES['default']['LOCATION'].startswith('unique-') diff --git a/awx/conf/views.py b/awx/conf/views.py index 60ea39d911..e10fe7ad32 100644 --- a/awx/conf/views.py +++ b/awx/conf/views.py @@ -21,7 +21,7 @@ from awx.api.generics import * # noqa from awx.api.permissions import IsSuperUser from awx.api.versioning import reverse, get_request_version from awx.main.utils import * # noqa -from awx.main.utils.handlers import BaseHTTPSHandler, UDPHandler, LoggingConnectivityException +from awx.main.utils.handlers import AWXProxyHandler, LoggingConnectivityException from awx.main.tasks import handle_setting_changes from awx.conf.license import get_licensed_features from awx.conf.models import Setting @@ -198,12 +198,9 @@ class SettingLoggingTest(GenericAPIView): mock_settings = MockSettings() for k, v in serializer.validated_data.items(): setattr(mock_settings, k, v) - mock_settings.LOG_AGGREGATOR_LEVEL = 'DEBUG' + AWXProxyHandler().perform_test(custom_settings=mock_settings) if mock_settings.LOG_AGGREGATOR_PROTOCOL.upper() == 'UDP': - UDPHandler.perform_test(mock_settings) return Response(status=status.HTTP_201_CREATED) - else: - BaseHTTPSHandler.perform_test(mock_settings) except LoggingConnectivityException as e: return Response({'error': str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) return Response(status=status.HTTP_200_OK) diff --git a/awx/locale/django.pot b/awx/locale/django.pot index fc8c1554da..af37525098 100644 --- a/awx/locale/django.pot +++ b/awx/locale/django.pot @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2017-11-30 20:23+0000\n" +"POT-Creation-Date: 2018-05-07 21:24+0000\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -17,91 +17,105 @@ msgstr "" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" -#: awx/api/authentication.py:67 -msgid "Invalid token header. No credentials provided." -msgstr "" - -#: awx/api/authentication.py:70 -msgid "Invalid token header. Token string should not contain spaces." -msgstr "" - -#: awx/api/authentication.py:105 -msgid "User inactive or deleted" -msgstr "" - -#: awx/api/authentication.py:161 -msgid "Invalid task token" -msgstr "" - -#: awx/api/conf.py:12 +#: awx/api/conf.py:15 msgid "Idle Time Force Log Out" msgstr "" -#: awx/api/conf.py:13 +#: awx/api/conf.py:16 msgid "" "Number of seconds that a user is inactive before they will need to login " "again." msgstr "" -#: awx/api/conf.py:14 awx/api/conf.py:24 awx/api/conf.py:33 awx/sso/conf.py:85 -#: awx/sso/conf.py:96 awx/sso/conf.py:108 awx/sso/conf.py:123 +#: awx/api/conf.py:17 awx/api/conf.py:26 awx/api/conf.py:34 awx/api/conf.py:47 +#: awx/sso/conf.py:85 awx/sso/conf.py:96 awx/sso/conf.py:108 +#: awx/sso/conf.py:123 msgid "Authentication" msgstr "" -#: awx/api/conf.py:22 -msgid "Maximum number of simultaneous logins" +#: awx/api/conf.py:24 +msgid "Maximum number of simultaneous logged in sessions" msgstr "" -#: awx/api/conf.py:23 +#: awx/api/conf.py:25 msgid "" -"Maximum number of simultaneous logins a user may have. To disable enter -1." -msgstr "" - -#: awx/api/conf.py:31 -msgid "Enable HTTP Basic Auth" +"Maximum number of simultaneous logged in sessions a user may have. To " +"disable enter -1." msgstr "" #: awx/api/conf.py:32 +msgid "Enable HTTP Basic Auth" +msgstr "" + +#: awx/api/conf.py:33 msgid "Enable HTTP Basic Auth for the API Browser." msgstr "" -#: awx/api/filters.py:129 +#: awx/api/conf.py:42 +msgid "OAuth 2 Timeout Settings" +msgstr "" + +#: awx/api/conf.py:43 +msgid "" +"Dictionary for customizing OAuth 2 timeouts, available items are " +"`ACCESS_TOKEN_EXPIRE_SECONDS`, the duration of access tokens in the number " +"of seconds, and `AUTHORIZATION_CODE_EXPIRE_SECONDS`, the duration of " +"authorization grants in the number of seconds." +msgstr "" + +#: awx/api/exceptions.py:16 +msgid "Resource is being used by running jobs." +msgstr "" + +#: awx/api/fields.py:81 +#, python-brace-format +msgid "Invalid key names: {invalid_key_names}" +msgstr "" + +#: awx/api/fields.py:107 +msgid "Credential {} does not exist" +msgstr "" + +#: awx/api/filters.py:96 +msgid "No related model for field {}." +msgstr "" + +#: awx/api/filters.py:113 msgid "Filtering on password fields is not allowed." msgstr "" -#: awx/api/filters.py:141 awx/api/filters.py:143 +#: awx/api/filters.py:125 awx/api/filters.py:127 #, python-format msgid "Filtering on %s is not allowed." msgstr "" -#: awx/api/filters.py:146 +#: awx/api/filters.py:130 msgid "Loops not allowed in filters, detected on field {}." msgstr "" -#: awx/api/filters.py:171 +#: awx/api/filters.py:159 +msgid "Query string field name not provided." +msgstr "" + +#: awx/api/filters.py:186 #, python-brace-format msgid "Invalid {field_name} id: {field_id}" msgstr "" -#: awx/api/filters.py:302 +#: awx/api/filters.py:319 #, python-format msgid "cannot filter on kind %s" msgstr "" -#: awx/api/filters.py:409 -#, python-format -msgid "cannot order by field %s" -msgstr "" - -#: awx/api/generics.py:550 awx/api/generics.py:612 +#: awx/api/generics.py:600 awx/api/generics.py:662 msgid "\"id\" field must be an integer." msgstr "" -#: awx/api/generics.py:609 +#: awx/api/generics.py:659 msgid "\"id\" is required to disassociate" msgstr "" -#: awx/api/generics.py:660 +#: awx/api/generics.py:710 msgid "{} 'id' field is missing." msgstr "" @@ -141,872 +155,1060 @@ msgstr "" msgid "Timestamp when this {} was last modified." msgstr "" -#: awx/api/parsers.py:64 +#: awx/api/parsers.py:33 msgid "JSON parse error - not a JSON object" msgstr "" -#: awx/api/parsers.py:67 +#: awx/api/parsers.py:36 #, python-format msgid "" "JSON parse error - %s\n" "Possible cause: trailing comma." msgstr "" -#: awx/api/serializers.py:268 +#: awx/api/serializers.py:153 +msgid "" +"The original object is already named {}, a copy from it cannot have the same " +"name." +msgstr "" + +#: awx/api/serializers.py:295 msgid "Playbook Run" msgstr "" -#: awx/api/serializers.py:269 +#: awx/api/serializers.py:296 msgid "Command" msgstr "" -#: awx/api/serializers.py:270 awx/main/models/unified_jobs.py:435 +#: awx/api/serializers.py:297 awx/main/models/unified_jobs.py:525 msgid "SCM Update" msgstr "" -#: awx/api/serializers.py:271 +#: awx/api/serializers.py:298 msgid "Inventory Sync" msgstr "" -#: awx/api/serializers.py:272 +#: awx/api/serializers.py:299 msgid "Management Job" msgstr "" -#: awx/api/serializers.py:273 +#: awx/api/serializers.py:300 msgid "Workflow Job" msgstr "" -#: awx/api/serializers.py:274 +#: awx/api/serializers.py:301 msgid "Workflow Template" msgstr "" -#: awx/api/serializers.py:701 awx/api/serializers.py:759 awx/api/views.py:4365 -#, python-format +#: awx/api/serializers.py:696 msgid "" -"Standard Output too large to display (%(text_size)d bytes), only download " -"supported for sizes over %(supported_size)d bytes" +"Indicates whether all of the events generated by this unified job have been " +"saved to the database." msgstr "" -#: awx/api/serializers.py:774 +#: awx/api/serializers.py:852 msgid "Write-only field used to change the password." msgstr "" -#: awx/api/serializers.py:776 +#: awx/api/serializers.py:854 msgid "Set if the account is managed by an external service" msgstr "" -#: awx/api/serializers.py:800 +#: awx/api/serializers.py:878 msgid "Password required for new User." msgstr "" -#: awx/api/serializers.py:886 +#: awx/api/serializers.py:969 #, python-format msgid "Unable to change %s on user managed by LDAP." msgstr "" -#: awx/api/serializers.py:1050 +#: awx/api/serializers.py:1169 +msgid "Must be a simple space-separated string with allowed scopes {}." +msgstr "" + +#: awx/api/serializers.py:1386 +msgid "This path is already being used by another manual project." +msgstr "" + +#: awx/api/serializers.py:1467 msgid "Organization is missing" msgstr "" -#: awx/api/serializers.py:1054 +#: awx/api/serializers.py:1471 msgid "Update options must be set to false for manual projects." msgstr "" -#: awx/api/serializers.py:1060 +#: awx/api/serializers.py:1477 msgid "Array of playbooks available within this project." msgstr "" -#: awx/api/serializers.py:1079 +#: awx/api/serializers.py:1496 msgid "" "Array of inventory files and directories available within this project, not " "comprehensive." msgstr "" -#: awx/api/serializers.py:1201 +#: awx/api/serializers.py:1629 msgid "Smart inventories must specify host_filter" msgstr "" -#: awx/api/serializers.py:1303 +#: awx/api/serializers.py:1733 #, python-format msgid "Invalid port specification: %s" msgstr "" -#: awx/api/serializers.py:1314 +#: awx/api/serializers.py:1744 msgid "Cannot create Host for Smart Inventory" msgstr "" -#: awx/api/serializers.py:1336 awx/api/serializers.py:3321 -#: awx/api/serializers.py:3406 awx/main/validators.py:198 -msgid "Must be valid JSON or YAML." -msgstr "" - -#: awx/api/serializers.py:1432 +#: awx/api/serializers.py:1856 msgid "Invalid group name." msgstr "" -#: awx/api/serializers.py:1437 +#: awx/api/serializers.py:1861 msgid "Cannot create Group for Smart Inventory" msgstr "" -#: awx/api/serializers.py:1509 +#: awx/api/serializers.py:1936 msgid "" "Script must begin with a hashbang sequence: i.e.... #!/usr/bin/env python" msgstr "" -#: awx/api/serializers.py:1555 +#: awx/api/serializers.py:1984 msgid "`{}` is a prohibited environment variable" msgstr "" -#: awx/api/serializers.py:1566 +#: awx/api/serializers.py:1995 msgid "If 'source' is 'custom', 'source_script' must be provided." msgstr "" -#: awx/api/serializers.py:1572 +#: awx/api/serializers.py:2001 msgid "Must provide an inventory." msgstr "" -#: awx/api/serializers.py:1576 +#: awx/api/serializers.py:2005 msgid "" "The 'source_script' does not belong to the same organization as the " "inventory." msgstr "" -#: awx/api/serializers.py:1578 +#: awx/api/serializers.py:2007 msgid "'source_script' doesn't exist." msgstr "" -#: awx/api/serializers.py:1602 +#: awx/api/serializers.py:2041 msgid "Automatic group relationship, will be removed in 3.3" msgstr "" -#: awx/api/serializers.py:1679 +#: awx/api/serializers.py:2127 msgid "Cannot use manual project for SCM-based inventory." msgstr "" -#: awx/api/serializers.py:1685 +#: awx/api/serializers.py:2133 msgid "" "Manual inventory sources are created automatically when a group is created " "in the v1 API." msgstr "" -#: awx/api/serializers.py:1690 +#: awx/api/serializers.py:2138 msgid "Setting not compatible with existing schedules." msgstr "" -#: awx/api/serializers.py:1695 +#: awx/api/serializers.py:2143 msgid "Cannot create Inventory Source for Smart Inventory" msgstr "" -#: awx/api/serializers.py:1709 +#: awx/api/serializers.py:2194 #, python-format msgid "Cannot set %s if not SCM type." msgstr "" -#: awx/api/serializers.py:1950 +#: awx/api/serializers.py:2461 msgid "Modifications not allowed for managed credential types" msgstr "" -#: awx/api/serializers.py:1955 +#: awx/api/serializers.py:2466 msgid "" "Modifications to inputs are not allowed for credential types that are in use" msgstr "" -#: awx/api/serializers.py:1961 +#: awx/api/serializers.py:2472 #, python-format msgid "Must be 'cloud' or 'net', not %s" msgstr "" -#: awx/api/serializers.py:1967 +#: awx/api/serializers.py:2478 msgid "'ask_at_runtime' is not supported for custom credentials." msgstr "" -#: awx/api/serializers.py:2140 +#: awx/api/serializers.py:2656 #, python-format msgid "\"%s\" is not a valid choice" msgstr "" -#: awx/api/serializers.py:2159 -#, python-format -msgid "'%s' is not a valid field for %s" +#: awx/api/serializers.py:2675 +#, python-brace-format +msgid "'{field_name}' is not a valid field for {credential_type_name}" msgstr "" -#: awx/api/serializers.py:2180 +#: awx/api/serializers.py:2696 msgid "" "You cannot change the credential type of the credential, as it may break the " "functionality of the resources using it." msgstr "" -#: awx/api/serializers.py:2191 +#: awx/api/serializers.py:2708 msgid "" "Write-only field used to add user to owner role. If provided, do not give " "either team or organization. Only valid for creation." msgstr "" -#: awx/api/serializers.py:2196 +#: awx/api/serializers.py:2713 msgid "" "Write-only field used to add team to owner role. If provided, do not give " "either user or organization. Only valid for creation." msgstr "" -#: awx/api/serializers.py:2201 +#: awx/api/serializers.py:2718 msgid "" "Inherit permissions from organization roles. If provided on creation, do not " "give either user or team." msgstr "" -#: awx/api/serializers.py:2217 +#: awx/api/serializers.py:2734 msgid "Missing 'user', 'team', or 'organization'." msgstr "" -#: awx/api/serializers.py:2257 +#: awx/api/serializers.py:2774 msgid "" "Credential organization must be set and match before assigning to a team" msgstr "" -#: awx/api/serializers.py:2424 +#: awx/api/serializers.py:2974 msgid "You must provide a cloud credential." msgstr "" -#: awx/api/serializers.py:2425 +#: awx/api/serializers.py:2975 msgid "You must provide a network credential." msgstr "" -#: awx/api/serializers.py:2441 +#: awx/api/serializers.py:2976 awx/main/models/jobs.py:155 +msgid "You must provide an SSH credential." +msgstr "" + +#: awx/api/serializers.py:2977 +msgid "You must provide a vault credential." +msgstr "" + +#: awx/api/serializers.py:2996 msgid "This field is required." msgstr "" -#: awx/api/serializers.py:2443 awx/api/serializers.py:2445 +#: awx/api/serializers.py:2998 awx/api/serializers.py:3000 msgid "Playbook not found for project." msgstr "" -#: awx/api/serializers.py:2447 +#: awx/api/serializers.py:3002 msgid "Must select playbook for project." msgstr "" -#: awx/api/serializers.py:2522 +#: awx/api/serializers.py:3082 +msgid "Cannot enable provisioning callback without an inventory set." +msgstr "" + +#: awx/api/serializers.py:3085 msgid "Must either set a default value or ask to prompt on launch." msgstr "" -#: awx/api/serializers.py:2524 awx/main/models/jobs.py:326 +#: awx/api/serializers.py:3087 awx/main/models/jobs.py:310 msgid "Job types 'run' and 'check' must have assigned a project." msgstr "" -#: awx/api/serializers.py:2611 +#: awx/api/serializers.py:3203 msgid "Invalid job template." msgstr "" -#: awx/api/serializers.py:2708 -msgid "Neither credential nor vault credential provided." +#: awx/api/serializers.py:3276 +msgid "No change to job limit" msgstr "" -#: awx/api/serializers.py:2711 +#: awx/api/serializers.py:3277 +msgid "All failed and unreachable hosts" +msgstr "" + +#: awx/api/serializers.py:3292 +msgid "Missing passwords needed to start: {}" +msgstr "" + +#: awx/api/serializers.py:3311 +msgid "Relaunch by host status not available until job finishes running." +msgstr "" + +#: awx/api/serializers.py:3325 msgid "Job Template Project is missing or undefined." msgstr "" -#: awx/api/serializers.py:2713 +#: awx/api/serializers.py:3327 msgid "Job Template Inventory is missing or undefined." msgstr "" -#: awx/api/serializers.py:2782 awx/main/tasks.py:2186 +#: awx/api/serializers.py:3365 +msgid "Unknown, job may have been ran before launch configurations were saved." +msgstr "" + +#: awx/api/serializers.py:3432 awx/main/tasks.py:2238 msgid "{} are prohibited from use in ad hoc commands." msgstr "" -#: awx/api/serializers.py:3008 -#, python-format -msgid "%(job_type)s is not a valid job type. The choices are %(choices)s." +#: awx/api/serializers.py:3501 awx/api/views.py:4763 +#, python-brace-format +msgid "" +"Standard Output too large to display ({text_size} bytes), only download " +"supported for sizes over {supported_size} bytes." msgstr "" -#: awx/api/serializers.py:3013 -msgid "Workflow job template is missing during creation." +#: awx/api/serializers.py:3697 +msgid "Provided variable {} has no database value to replace with." msgstr "" -#: awx/api/serializers.py:3018 +#: awx/api/serializers.py:3773 #, python-format msgid "Cannot nest a %s inside a WorkflowJobTemplate" msgstr "" -#: awx/api/serializers.py:3291 -#, python-format -msgid "Job Template '%s' is missing or undefined." +#: awx/api/serializers.py:3780 awx/api/views.py:776 +msgid "Related template is not configured to accept credentials on launch." msgstr "" -#: awx/api/serializers.py:3294 +#: awx/api/serializers.py:4234 msgid "The inventory associated with this Job Template is being deleted." msgstr "" -#: awx/api/serializers.py:3335 awx/api/views.py:3023 -#, python-format -msgid "Cannot assign multiple %s credentials." +#: awx/api/serializers.py:4236 +msgid "The provided inventory is being deleted." msgstr "" -#: awx/api/serializers.py:3337 awx/api/views.py:3026 -msgid "Extra credentials must be network or cloud." +#: awx/api/serializers.py:4244 +msgid "Cannot assign multiple {} credentials." msgstr "" -#: awx/api/serializers.py:3474 +#: awx/api/serializers.py:4257 +msgid "" +"Removing {} credential at launch time without replacement is not supported. " +"Provided list lacked credential(s): {}." +msgstr "" + +#: awx/api/serializers.py:4382 msgid "" "Missing required fields for Notification Configuration: notification_type" msgstr "" -#: awx/api/serializers.py:3497 +#: awx/api/serializers.py:4405 msgid "No values specified for field '{}'" msgstr "" -#: awx/api/serializers.py:3502 +#: awx/api/serializers.py:4410 msgid "Missing required fields for Notification Configuration: {}." msgstr "" -#: awx/api/serializers.py:3505 +#: awx/api/serializers.py:4413 msgid "Configuration field '{}' incorrect type, expected {}." msgstr "" -#: awx/api/serializers.py:3558 -msgid "Inventory Source must be a cloud resource." -msgstr "" - -#: awx/api/serializers.py:3560 -msgid "Manual Project cannot have a schedule set." -msgstr "" - -#: awx/api/serializers.py:3563 +#: awx/api/serializers.py:4475 msgid "" -"Inventory sources with `update_on_project_update` cannot be scheduled. " -"Schedule its source project `{}` instead." +"Valid DTSTART required in rrule. Value should start with: DTSTART:" +"YYYYMMDDTHHMMSSZ" msgstr "" -#: awx/api/serializers.py:3582 -msgid "Projects and inventory updates cannot accept extra variables." +#: awx/api/serializers.py:4477 +msgid "" +"DTSTART cannot be a naive datetime. Specify ;TZINFO= or YYYYMMDDTHHMMSSZZ." msgstr "" -#: awx/api/serializers.py:3604 -msgid "DTSTART required in rrule. Value should match: DTSTART:YYYYMMDDTHHMMSSZ" -msgstr "" - -#: awx/api/serializers.py:3606 +#: awx/api/serializers.py:4479 msgid "Multiple DTSTART is not supported." msgstr "" -#: awx/api/serializers.py:3608 -msgid "RRULE require in rrule." +#: awx/api/serializers.py:4481 +msgid "RRULE required in rrule." msgstr "" -#: awx/api/serializers.py:3610 +#: awx/api/serializers.py:4483 msgid "Multiple RRULE is not supported." msgstr "" -#: awx/api/serializers.py:3612 +#: awx/api/serializers.py:4485 msgid "INTERVAL required in rrule." msgstr "" -#: awx/api/serializers.py:3614 -msgid "TZID is not supported." -msgstr "" - -#: awx/api/serializers.py:3616 +#: awx/api/serializers.py:4487 msgid "SECONDLY is not supported." msgstr "" -#: awx/api/serializers.py:3618 +#: awx/api/serializers.py:4489 msgid "Multiple BYMONTHDAYs not supported." msgstr "" -#: awx/api/serializers.py:3620 +#: awx/api/serializers.py:4491 msgid "Multiple BYMONTHs not supported." msgstr "" -#: awx/api/serializers.py:3622 +#: awx/api/serializers.py:4493 msgid "BYDAY with numeric prefix not supported." msgstr "" -#: awx/api/serializers.py:3624 +#: awx/api/serializers.py:4495 msgid "BYYEARDAY not supported." msgstr "" -#: awx/api/serializers.py:3626 +#: awx/api/serializers.py:4497 msgid "BYWEEKNO not supported." msgstr "" -#: awx/api/serializers.py:3630 +#: awx/api/serializers.py:4499 +msgid "RRULE may not contain both COUNT and UNTIL" +msgstr "" + +#: awx/api/serializers.py:4503 msgid "COUNT > 999 is unsupported." msgstr "" -#: awx/api/serializers.py:3634 -msgid "rrule parsing failed validation." +#: awx/api/serializers.py:4507 +msgid "rrule parsing failed validation: {}" msgstr "" -#: awx/api/serializers.py:3760 +#: awx/api/serializers.py:4538 +msgid "Inventory Source must be a cloud resource." +msgstr "" + +#: awx/api/serializers.py:4540 +msgid "Manual Project cannot have a schedule set." +msgstr "" + +#: awx/api/serializers.py:4553 +msgid "" +"Count of jobs in the running or waiting state that are targeted for this " +"instance" +msgstr "" + +#: awx/api/serializers.py:4593 +msgid "" +"Minimum percentage of all instances that will be automatically assigned to " +"this group when new instances come online." +msgstr "" + +#: awx/api/serializers.py:4598 +msgid "" +"Static minimum number of Instances that will be automatically assign to this " +"group when new instances come online." +msgstr "" + +#: awx/api/serializers.py:4603 +msgid "List of exact-match Instances that will be assigned to this group" +msgstr "" + +#: awx/api/serializers.py:4624 +msgid "Duplicate entry {}." +msgstr "" + +#: awx/api/serializers.py:4626 +msgid "{} is not a valid hostname of an existing instance." +msgstr "" + +#: awx/api/serializers.py:4631 +msgid "tower instance group name may not be changed." +msgstr "" + +#: awx/api/serializers.py:4711 msgid "" "A summary of the new and changed values when an object is created, updated, " "or deleted" msgstr "" -#: awx/api/serializers.py:3762 +#: awx/api/serializers.py:4713 msgid "" "For create, update, and delete events this is the object type that was " "affected. For associate and disassociate events this is the object type " "associated or disassociated with object2." msgstr "" -#: awx/api/serializers.py:3765 +#: awx/api/serializers.py:4716 msgid "" "Unpopulated for create, update, and delete events. For associate and " "disassociate events this is the object type that object1 is being associated " "with." msgstr "" -#: awx/api/serializers.py:3768 +#: awx/api/serializers.py:4719 msgid "The action taken with respect to the given object(s)." msgstr "" -#: awx/api/serializers.py:3885 -msgid "Unable to login with provided credentials." -msgstr "" - -#: awx/api/serializers.py:3887 -msgid "Must include \"username\" and \"password\"." -msgstr "" - -#: awx/api/views.py:108 +#: awx/api/views.py:116 msgid "Your license does not allow use of the activity stream." msgstr "" -#: awx/api/views.py:118 +#: awx/api/views.py:126 msgid "Your license does not permit use of system tracking." msgstr "" -#: awx/api/views.py:128 +#: awx/api/views.py:136 msgid "Your license does not allow use of workflows." msgstr "" -#: awx/api/views.py:142 +#: awx/api/views.py:150 msgid "Cannot delete job resource when associated workflow job is running." msgstr "" -#: awx/api/views.py:146 +#: awx/api/views.py:155 msgid "Cannot delete running job resource." msgstr "" -#: awx/api/views.py:155 awx/templates/rest_framework/api.html:28 +#: awx/api/views.py:160 +msgid "Job has not finished processing events." +msgstr "" + +#: awx/api/views.py:219 +msgid "Related job {} is still processing events." +msgstr "" + +#: awx/api/views.py:226 awx/templates/rest_framework/api.html:28 msgid "REST API" msgstr "" -#: awx/api/views.py:164 awx/templates/rest_framework/api.html:4 +#: awx/api/views.py:236 awx/templates/rest_framework/api.html:4 msgid "AWX REST API" msgstr "" -#: awx/api/views.py:226 +#: awx/api/views.py:250 +msgid "API OAuth 2 Authorization Root" +msgstr "" + +#: awx/api/views.py:315 msgid "Version 1" msgstr "" -#: awx/api/views.py:230 +#: awx/api/views.py:319 msgid "Version 2" msgstr "" -#: awx/api/views.py:241 +#: awx/api/views.py:328 msgid "Ping" msgstr "" -#: awx/api/views.py:272 awx/conf/apps.py:12 +#: awx/api/views.py:359 awx/conf/apps.py:10 msgid "Configuration" msgstr "" -#: awx/api/views.py:325 +#: awx/api/views.py:414 msgid "Invalid license data" msgstr "" -#: awx/api/views.py:327 +#: awx/api/views.py:416 msgid "Missing 'eula_accepted' property" msgstr "" -#: awx/api/views.py:331 +#: awx/api/views.py:420 msgid "'eula_accepted' value is invalid" msgstr "" -#: awx/api/views.py:334 +#: awx/api/views.py:423 msgid "'eula_accepted' must be True" msgstr "" -#: awx/api/views.py:341 +#: awx/api/views.py:430 msgid "Invalid JSON" msgstr "" -#: awx/api/views.py:349 +#: awx/api/views.py:438 msgid "Invalid License" msgstr "" -#: awx/api/views.py:359 +#: awx/api/views.py:448 msgid "Invalid license" msgstr "" -#: awx/api/views.py:367 +#: awx/api/views.py:456 #, python-format msgid "Failed to remove license (%s)" msgstr "" -#: awx/api/views.py:372 +#: awx/api/views.py:461 msgid "Dashboard" msgstr "" -#: awx/api/views.py:471 +#: awx/api/views.py:560 msgid "Dashboard Jobs Graphs" msgstr "" -#: awx/api/views.py:507 +#: awx/api/views.py:596 #, python-format msgid "Unknown period \"%s\"" msgstr "" -#: awx/api/views.py:521 +#: awx/api/views.py:610 msgid "Instances" msgstr "" -#: awx/api/views.py:529 +#: awx/api/views.py:617 msgid "Instance Detail" msgstr "" -#: awx/api/views.py:537 -msgid "Instance Running Jobs" +#: awx/api/views.py:638 +msgid "Instance Jobs" msgstr "" -#: awx/api/views.py:552 +#: awx/api/views.py:652 msgid "Instance's Instance Groups" msgstr "" -#: awx/api/views.py:562 +#: awx/api/views.py:661 msgid "Instance Groups" msgstr "" -#: awx/api/views.py:570 +#: awx/api/views.py:669 msgid "Instance Group Detail" msgstr "" -#: awx/api/views.py:578 +#: awx/api/views.py:677 +msgid "Isolated Groups can not be removed from the API" +msgstr "" + +#: awx/api/views.py:679 +msgid "" +"Instance Groups acting as a controller for an Isolated Group can not be " +"removed from the API" +msgstr "" + +#: awx/api/views.py:685 msgid "Instance Group Running Jobs" msgstr "" -#: awx/api/views.py:588 +#: awx/api/views.py:694 msgid "Instance Group's Instances" msgstr "" -#: awx/api/views.py:598 +#: awx/api/views.py:703 msgid "Schedules" msgstr "" -#: awx/api/views.py:617 +#: awx/api/views.py:717 +msgid "Schedule Recurrence Rule Preview" +msgstr "" + +#: awx/api/views.py:763 +msgid "Cannot assign credential when related template is null." +msgstr "" + +#: awx/api/views.py:768 +msgid "Related template cannot accept {} on launch." +msgstr "" + +#: awx/api/views.py:770 +msgid "" +"Credential that requires user input on launch cannot be used in saved launch " +"configuration." +msgstr "" + +#: awx/api/views.py:778 +#, python-brace-format +msgid "" +"This launch configuration already provides a {credential_type} credential." +msgstr "" + +#: awx/api/views.py:781 +#, python-brace-format +msgid "Related template already uses {credential_type} credential." +msgstr "" + +#: awx/api/views.py:799 msgid "Schedule Jobs List" msgstr "" -#: awx/api/views.py:843 +#: awx/api/views.py:954 msgid "Your license only permits a single organization to exist." msgstr "" -#: awx/api/views.py:1079 awx/api/views.py:4666 +#: awx/api/views.py:1183 awx/api/views.py:4972 msgid "You cannot assign an Organization role as a child role for a Team." msgstr "" -#: awx/api/views.py:1083 awx/api/views.py:4680 +#: awx/api/views.py:1187 awx/api/views.py:4986 msgid "You cannot grant system-level permissions to a team." msgstr "" -#: awx/api/views.py:1090 awx/api/views.py:4672 +#: awx/api/views.py:1194 awx/api/views.py:4978 msgid "" "You cannot grant credential access to a team when the Organization field " "isn't set, or belongs to a different organization" msgstr "" -#: awx/api/views.py:1180 -msgid "Cannot delete project." -msgstr "" - -#: awx/api/views.py:1215 +#: awx/api/views.py:1306 msgid "Project Schedules" msgstr "" -#: awx/api/views.py:1227 +#: awx/api/views.py:1317 msgid "Project SCM Inventory Sources" msgstr "" -#: awx/api/views.py:1354 +#: awx/api/views.py:1417 +msgid "Project Update Events List" +msgstr "" + +#: awx/api/views.py:1430 +msgid "System Job Events List" +msgstr "" + +#: awx/api/views.py:1443 +msgid "Inventory Update Events List" +msgstr "" + +#: awx/api/views.py:1475 msgid "Project Update SCM Inventory Updates" msgstr "" -#: awx/api/views.py:1409 +#: awx/api/views.py:1533 msgid "Me" msgstr "" -#: awx/api/views.py:1453 awx/api/views.py:4623 -msgid "You may not perform any action with your own admin_role." +#: awx/api/views.py:1541 +msgid "OAuth 2 Applications" msgstr "" -#: awx/api/views.py:1459 awx/api/views.py:4627 -msgid "You may not change the membership of a users admin_role" +#: awx/api/views.py:1550 +msgid "OAuth 2 Application Detail" msgstr "" -#: awx/api/views.py:1464 awx/api/views.py:4632 +#: awx/api/views.py:1559 +msgid "OAuth 2 Application Tokens" +msgstr "" + +#: awx/api/views.py:1580 +msgid "OAuth2 Tokens" +msgstr "" + +#: awx/api/views.py:1589 +msgid "OAuth2 Authorized Access Tokens" +msgstr "" + +#: awx/api/views.py:1604 +msgid "OAuth2 User Authorized Access Tokens" +msgstr "" + +#: awx/api/views.py:1619 +msgid "Organization OAuth2 Applications" +msgstr "" + +#: awx/api/views.py:1631 +msgid "OAuth2 Personal Access Tokens" +msgstr "" + +#: awx/api/views.py:1646 +msgid "OAuth Token Detail" +msgstr "" + +#: awx/api/views.py:1704 awx/api/views.py:4939 msgid "" "You cannot grant credential access to a user not in the credentials' " "organization" msgstr "" -#: awx/api/views.py:1468 awx/api/views.py:4636 +#: awx/api/views.py:1708 awx/api/views.py:4943 msgid "You cannot grant private credential access to another user" msgstr "" -#: awx/api/views.py:1566 +#: awx/api/views.py:1805 #, python-format msgid "Cannot change %s." msgstr "" -#: awx/api/views.py:1572 +#: awx/api/views.py:1811 msgid "Cannot delete user." msgstr "" -#: awx/api/views.py:1601 +#: awx/api/views.py:1835 msgid "Deletion not allowed for managed credential types" msgstr "" -#: awx/api/views.py:1603 +#: awx/api/views.py:1837 msgid "Credential types that are in use cannot be deleted" msgstr "" -#: awx/api/views.py:1781 +#: awx/api/views.py:2009 msgid "Cannot delete inventory script." msgstr "" -#: awx/api/views.py:1866 +#: awx/api/views.py:2099 #, python-brace-format msgid "{0}" msgstr "" -#: awx/api/views.py:2101 +#: awx/api/views.py:2326 msgid "Fact not found." msgstr "" -#: awx/api/views.py:2125 +#: awx/api/views.py:2348 msgid "SSLError while trying to connect to {}" msgstr "" -#: awx/api/views.py:2127 +#: awx/api/views.py:2350 msgid "Request to {} timed out." msgstr "" -#: awx/api/views.py:2129 -msgid "Unkown exception {} while trying to GET {}" +#: awx/api/views.py:2352 +msgid "Unknown exception {} while trying to GET {}" msgstr "" -#: awx/api/views.py:2132 +#: awx/api/views.py:2355 msgid "" "Unauthorized access. Please check your Insights Credential username and " "password." msgstr "" -#: awx/api/views.py:2134 +#: awx/api/views.py:2358 msgid "" "Failed to gather reports and maintenance plans from Insights API at URL {}. " "Server responded with {} status code and message {}" msgstr "" -#: awx/api/views.py:2140 +#: awx/api/views.py:2365 msgid "Expected JSON response from Insights but instead got {}" msgstr "" -#: awx/api/views.py:2147 +#: awx/api/views.py:2372 msgid "This host is not recognized as an Insights host." msgstr "" -#: awx/api/views.py:2152 +#: awx/api/views.py:2377 msgid "The Insights Credential for \"{}\" was not found." msgstr "" -#: awx/api/views.py:2221 +#: awx/api/views.py:2445 msgid "Cyclical Group association." msgstr "" -#: awx/api/views.py:2499 +#: awx/api/views.py:2658 msgid "Inventory Source List" msgstr "" -#: awx/api/views.py:2512 +#: awx/api/views.py:2670 msgid "Inventory Sources Update" msgstr "" -#: awx/api/views.py:2542 +#: awx/api/views.py:2703 msgid "Could not start because `can_update` returned False" msgstr "" -#: awx/api/views.py:2550 +#: awx/api/views.py:2711 msgid "No inventory sources to update." msgstr "" -#: awx/api/views.py:2582 -msgid "Cannot delete inventory source." -msgstr "" - -#: awx/api/views.py:2590 +#: awx/api/views.py:2740 msgid "Inventory Source Schedules" msgstr "" -#: awx/api/views.py:2620 +#: awx/api/views.py:2767 msgid "Notification Templates can only be assigned when source is one of {}." msgstr "" -#: awx/api/views.py:2851 +#: awx/api/views.py:2822 +msgid "Vault credentials are not yet supported for inventory sources." +msgstr "" + +#: awx/api/views.py:2827 +msgid "Source already has cloud credential assigned." +msgstr "" + +#: awx/api/views.py:2986 +msgid "" +"'credentials' cannot be used in combination with 'credential', " +"'vault_credential', or 'extra_credentials'." +msgstr "" + +#: awx/api/views.py:3098 msgid "Job Template Schedules" msgstr "" -#: awx/api/views.py:2871 awx/api/views.py:2882 +#: awx/api/views.py:3116 awx/api/views.py:3127 msgid "Your license does not allow adding surveys." msgstr "" -#: awx/api/views.py:2889 -msgid "'name' missing from survey spec." +#: awx/api/views.py:3146 +msgid "Field '{}' is missing from survey spec." msgstr "" -#: awx/api/views.py:2891 -msgid "'description' missing from survey spec." +#: awx/api/views.py:3148 +msgid "Expected {} for field '{}', received {} type." msgstr "" -#: awx/api/views.py:2893 -msgid "'spec' missing from survey spec." -msgstr "" - -#: awx/api/views.py:2895 -msgid "'spec' must be a list of items." -msgstr "" - -#: awx/api/views.py:2897 +#: awx/api/views.py:3152 msgid "'spec' doesn't contain any items." msgstr "" -#: awx/api/views.py:2903 +#: awx/api/views.py:3161 #, python-format msgid "Survey question %s is not a json object." msgstr "" -#: awx/api/views.py:2905 +#: awx/api/views.py:3163 #, python-format msgid "'type' missing from survey question %s." msgstr "" -#: awx/api/views.py:2907 +#: awx/api/views.py:3165 #, python-format msgid "'question_name' missing from survey question %s." msgstr "" -#: awx/api/views.py:2909 +#: awx/api/views.py:3167 #, python-format msgid "'variable' missing from survey question %s." msgstr "" -#: awx/api/views.py:2911 +#: awx/api/views.py:3169 #, python-format msgid "'variable' '%(item)s' duplicated in survey question %(survey)s." msgstr "" -#: awx/api/views.py:2916 +#: awx/api/views.py:3174 #, python-format msgid "'required' missing from survey question %s." msgstr "" -#: awx/api/views.py:2921 +#: awx/api/views.py:3179 #, python-brace-format msgid "Value {question_default} for '{variable_name}' expected to be a string." msgstr "" -#: awx/api/views.py:2928 +#: awx/api/views.py:3189 #, python-brace-format msgid "" -"$encrypted$ is reserved keyword for password questions and may not be used " -"as a default for '{variable_name}' in survey question {question_position}." +"$encrypted$ is a reserved keyword for password question defaults, survey " +"question {question_position} is type {question_type}." msgstr "" -#: awx/api/views.py:3049 +#: awx/api/views.py:3205 +#, python-brace-format +msgid "" +"$encrypted$ is a reserved keyword, may not be used for new default in " +"position {question_position}." +msgstr "" + +#: awx/api/views.py:3278 +#, python-brace-format +msgid "Cannot assign multiple {credential_type} credentials." +msgstr "" + +#: awx/api/views.py:3296 +msgid "Extra credentials must be network or cloud." +msgstr "" + +#: awx/api/views.py:3318 msgid "Maximum number of labels for {} reached." msgstr "" -#: awx/api/views.py:3170 +#: awx/api/views.py:3441 msgid "No matching host could be found!" msgstr "" -#: awx/api/views.py:3173 +#: awx/api/views.py:3444 msgid "Multiple hosts matched the request!" msgstr "" -#: awx/api/views.py:3178 +#: awx/api/views.py:3449 msgid "Cannot start automatically, user input required!" msgstr "" -#: awx/api/views.py:3185 +#: awx/api/views.py:3456 msgid "Host callback job already pending." msgstr "" -#: awx/api/views.py:3199 +#: awx/api/views.py:3471 awx/api/views.py:4212 msgid "Error starting job!" msgstr "" -#: awx/api/views.py:3306 +#: awx/api/views.py:3587 #, python-brace-format msgid "Cannot associate {0} when {1} have been associated." msgstr "" -#: awx/api/views.py:3331 +#: awx/api/views.py:3612 msgid "Multiple parent relationship not allowed." msgstr "" -#: awx/api/views.py:3336 +#: awx/api/views.py:3617 msgid "Cycle detected." msgstr "" -#: awx/api/views.py:3540 +#: awx/api/views.py:3807 msgid "Workflow Job Template Schedules" msgstr "" -#: awx/api/views.py:3685 awx/api/views.py:4268 +#: awx/api/views.py:3938 awx/api/views.py:4610 msgid "Superuser privileges needed." msgstr "" -#: awx/api/views.py:3717 +#: awx/api/views.py:3970 msgid "System Job Template Schedules" msgstr "" -#: awx/api/views.py:3780 +#: awx/api/views.py:4028 msgid "POST not allowed for Job launching in version 2 of the api" msgstr "" -#: awx/api/views.py:3942 +#: awx/api/views.py:4195 +#, python-brace-format +msgid "Wait until job finishes before retrying on {status_value} hosts." +msgstr "" + +#: awx/api/views.py:4200 +#, python-brace-format +msgid "Cannot retry on {status_value} hosts, playbook stats not available." +msgstr "" + +#: awx/api/views.py:4205 +#, python-brace-format +msgid "Cannot relaunch because previous job had 0 {status_value} hosts." +msgstr "" + +#: awx/api/views.py:4234 +msgid "Cannot create schedule because job requires credential passwords." +msgstr "" + +#: awx/api/views.py:4239 +msgid "Cannot create schedule because job was launched by legacy method." +msgstr "" + +#: awx/api/views.py:4241 +msgid "Cannot create schedule because a related resource is missing." +msgstr "" + +#: awx/api/views.py:4295 msgid "Job Host Summaries List" msgstr "" -#: awx/api/views.py:3989 +#: awx/api/views.py:4342 msgid "Job Event Children List" msgstr "" -#: awx/api/views.py:3998 +#: awx/api/views.py:4351 msgid "Job Event Hosts List" msgstr "" -#: awx/api/views.py:4008 +#: awx/api/views.py:4360 msgid "Job Events List" msgstr "" -#: awx/api/views.py:4222 +#: awx/api/views.py:4570 msgid "Ad Hoc Command Events List" msgstr "" -#: awx/api/views.py:4437 -msgid "Error generating stdout download file: {}" -msgstr "" - -#: awx/api/views.py:4450 -#, python-format -msgid "Error generating stdout download file: %s" -msgstr "" - -#: awx/api/views.py:4495 +#: awx/api/views.py:4809 msgid "Delete not allowed while there are pending notifications" msgstr "" -#: awx/api/views.py:4502 +#: awx/api/views.py:4817 msgid "Notification Template Test" msgstr "" @@ -1158,19 +1360,31 @@ msgstr "" msgid "Example setting which can be different for each user." msgstr "" -#: awx/conf/conf.py:95 awx/conf/registry.py:85 awx/conf/views.py:56 +#: awx/conf/conf.py:95 awx/conf/registry.py:85 awx/conf/views.py:55 msgid "User" msgstr "" -#: awx/conf/fields.py:63 +#: awx/conf/fields.py:60 awx/sso/fields.py:583 +#, python-brace-format +msgid "" +"Expected None, True, False, a string or list of strings but got {input_type} " +"instead." +msgstr "" + +#: awx/conf/fields.py:104 msgid "Enter a valid URL" msgstr "" -#: awx/conf/fields.py:95 +#: awx/conf/fields.py:136 #, python-brace-format msgid "\"{input}\" is not a valid string." msgstr "" +#: awx/conf/fields.py:151 +#, python-brace-format +msgid "Expected a list of tuples of max length 2 but got {input_type} instead." +msgstr "" + #: awx/conf/license.py:22 msgid "Your Tower license does not allow that." msgstr "" @@ -1261,9 +1475,9 @@ msgstr "" #: awx/conf/tests/unit/test_settings.py:411 #: awx/conf/tests/unit/test_settings.py:430 #: awx/conf/tests/unit/test_settings.py:466 awx/main/conf.py:22 -#: awx/main/conf.py:32 awx/main/conf.py:42 awx/main/conf.py:51 -#: awx/main/conf.py:63 awx/main/conf.py:81 awx/main/conf.py:96 -#: awx/main/conf.py:121 +#: awx/main/conf.py:32 awx/main/conf.py:42 awx/main/conf.py:52 +#: awx/main/conf.py:61 awx/main/conf.py:73 awx/main/conf.py:86 +#: awx/main/conf.py:99 awx/main/conf.py:124 msgid "System" msgstr "" @@ -1275,103 +1489,104 @@ msgstr "" msgid "OtherSystem" msgstr "" -#: awx/conf/views.py:48 +#: awx/conf/views.py:47 msgid "Setting Categories" msgstr "" -#: awx/conf/views.py:73 +#: awx/conf/views.py:71 msgid "Setting Detail" msgstr "" -#: awx/conf/views.py:168 +#: awx/conf/views.py:166 msgid "Logging Connectivity Test" msgstr "" -#: awx/main/access.py:44 -msgid "Resource is being used by running jobs." +#: awx/main/access.py:57 +#, python-format +msgid "Required related field %s for permission check." msgstr "" -#: awx/main/access.py:237 +#: awx/main/access.py:73 #, python-format msgid "Bad data found in related field %s." msgstr "" -#: awx/main/access.py:281 +#: awx/main/access.py:293 msgid "License is missing." msgstr "" -#: awx/main/access.py:283 +#: awx/main/access.py:295 msgid "License has expired." msgstr "" -#: awx/main/access.py:291 +#: awx/main/access.py:303 #, python-format msgid "License count of %s instances has been reached." msgstr "" -#: awx/main/access.py:293 +#: awx/main/access.py:305 #, python-format msgid "License count of %s instances has been exceeded." msgstr "" -#: awx/main/access.py:295 +#: awx/main/access.py:307 msgid "Host count exceeds available instances." msgstr "" -#: awx/main/access.py:299 +#: awx/main/access.py:311 #, python-format msgid "Feature %s is not enabled in the active license." msgstr "" -#: awx/main/access.py:301 +#: awx/main/access.py:313 msgid "Features not found in active license." msgstr "" -#: awx/main/access.py:707 +#: awx/main/access.py:823 msgid "Unable to change inventory on a host." msgstr "" -#: awx/main/access.py:724 awx/main/access.py:769 +#: awx/main/access.py:840 awx/main/access.py:885 msgid "Cannot associate two items from different inventories." msgstr "" -#: awx/main/access.py:757 +#: awx/main/access.py:873 msgid "Unable to change inventory on a group." msgstr "" -#: awx/main/access.py:1017 +#: awx/main/access.py:1131 msgid "Unable to change organization on a team." msgstr "" -#: awx/main/access.py:1030 +#: awx/main/access.py:1148 msgid "The {} role cannot be assigned to a team" msgstr "" -#: awx/main/access.py:1032 +#: awx/main/access.py:1150 msgid "The admin_role for a User cannot be assigned to a team" msgstr "" -#: awx/main/access.py:1479 +#: awx/main/access.py:1517 msgid "Job has been orphaned from its job template." msgstr "" -#: awx/main/access.py:1481 -msgid "You do not have execute permission to related job template." +#: awx/main/access.py:1519 +msgid "Job was launched with unknown prompted fields." msgstr "" -#: awx/main/access.py:1484 +#: awx/main/access.py:1521 msgid "Job was launched with prompted fields." msgstr "" -#: awx/main/access.py:1486 +#: awx/main/access.py:1523 msgid " Organization level permissions required." msgstr "" -#: awx/main/access.py:1488 +#: awx/main/access.py:1525 msgid " You do not have permission to related resources." msgstr "" -#: awx/main/access.py:1833 +#: awx/main/access.py:1935 msgid "" "You do not have permission to the workflow job resources required for " "relaunch." @@ -1410,328 +1625,355 @@ msgid "" msgstr "" #: awx/main/conf.py:49 -msgid "Enable Administrator Alerts" +msgid "Organization Admins Can Manage Users and Teams" msgstr "" #: awx/main/conf.py:50 -msgid "Email Admin users for system events that may require attention." +msgid "" +"Controls whether any Organization Admin has the privileges to create and " +"manage users and teams. You may want to disable this ability if you are " +"using an LDAP or SAML integration." +msgstr "" + +#: awx/main/conf.py:59 +msgid "Enable Administrator Alerts" msgstr "" #: awx/main/conf.py:60 +msgid "Email Admin users for system events that may require attention." +msgstr "" + +#: awx/main/conf.py:70 msgid "Base URL of the Tower host" msgstr "" -#: awx/main/conf.py:61 +#: awx/main/conf.py:71 msgid "" "This setting is used by services like notifications to render a valid url to " "the Tower host." msgstr "" -#: awx/main/conf.py:70 +#: awx/main/conf.py:80 msgid "Remote Host Headers" msgstr "" -#: awx/main/conf.py:71 +#: awx/main/conf.py:81 msgid "" "HTTP headers and meta keys to search to determine remote host name or IP. " "Add additional items to this list, such as \"HTTP_X_FORWARDED_FOR\", if " -"behind a reverse proxy.\n" -"\n" -"Note: The headers will be searched in order and the first found remote host " -"name or IP will be used.\n" -"\n" -"In the below example 8.8.8.7 would be the chosen IP address.\n" -"X-Forwarded-For: 8.8.8.7, 192.168.2.1, 127.0.0.1\n" -"Host: 127.0.0.1\n" -"REMOTE_HOST_HEADERS = ['HTTP_X_FORWARDED_FOR', 'REMOTE_ADDR', 'REMOTE_HOST']" +"behind a reverse proxy. See the \"Proxy Support\" section of the " +"Adminstrator guide formore details." msgstr "" -#: awx/main/conf.py:88 +#: awx/main/conf.py:93 msgid "Proxy IP Whitelist" msgstr "" -#: awx/main/conf.py:89 +#: awx/main/conf.py:94 msgid "" "If Tower is behind a reverse proxy/load balancer, use this setting to " "whitelist the proxy IP addresses from which Tower should trust custom " -"REMOTE_HOST_HEADERS header values\n" -"REMOTE_HOST_HEADERS = ['HTTP_X_FORWARDED_FOR', ''REMOTE_ADDR', " -"'REMOTE_HOST']\n" -"PROXY_IP_WHITELIST = ['10.0.1.100', '10.0.1.101']\n" -"If this setting is an empty list (the default), the headers specified by " -"REMOTE_HOST_HEADERS will be trusted unconditionally')" +"REMOTE_HOST_HEADERS header values. If this setting is an empty list (the " +"default), the headers specified by REMOTE_HOST_HEADERS will be trusted " +"unconditionally')" msgstr "" -#: awx/main/conf.py:117 +#: awx/main/conf.py:120 msgid "License" msgstr "" -#: awx/main/conf.py:118 +#: awx/main/conf.py:121 msgid "" "The license controls which features and functionality are enabled. Use /api/" "v1/config/ to update or change the license." msgstr "" -#: awx/main/conf.py:128 +#: awx/main/conf.py:131 msgid "Ansible Modules Allowed for Ad Hoc Jobs" msgstr "" -#: awx/main/conf.py:129 +#: awx/main/conf.py:132 msgid "List of modules allowed to be used by ad-hoc jobs." msgstr "" -#: awx/main/conf.py:130 awx/main/conf.py:140 awx/main/conf.py:151 -#: awx/main/conf.py:161 awx/main/conf.py:171 awx/main/conf.py:181 -#: awx/main/conf.py:192 awx/main/conf.py:204 awx/main/conf.py:216 -#: awx/main/conf.py:229 awx/main/conf.py:241 awx/main/conf.py:251 -#: awx/main/conf.py:262 awx/main/conf.py:272 awx/main/conf.py:282 -#: awx/main/conf.py:292 awx/main/conf.py:304 awx/main/conf.py:316 -#: awx/main/conf.py:328 awx/main/conf.py:341 +#: awx/main/conf.py:133 awx/main/conf.py:155 awx/main/conf.py:164 +#: awx/main/conf.py:175 awx/main/conf.py:185 awx/main/conf.py:195 +#: awx/main/conf.py:205 awx/main/conf.py:216 awx/main/conf.py:228 +#: awx/main/conf.py:240 awx/main/conf.py:253 awx/main/conf.py:265 +#: awx/main/conf.py:275 awx/main/conf.py:286 awx/main/conf.py:297 +#: awx/main/conf.py:307 awx/main/conf.py:317 awx/main/conf.py:329 +#: awx/main/conf.py:341 awx/main/conf.py:353 awx/main/conf.py:367 msgid "Jobs" msgstr "" -#: awx/main/conf.py:138 +#: awx/main/conf.py:142 +msgid "Always" +msgstr "" + +#: awx/main/conf.py:143 +msgid "Never" +msgstr "" + +#: awx/main/conf.py:144 +msgid "Only On Job Template Definitions" +msgstr "" + +#: awx/main/conf.py:147 +msgid "When can extra variables contain Jinja templates?" +msgstr "" + +#: awx/main/conf.py:149 +msgid "" +"Ansible allows variable substitution via the Jinja2 templating language for " +"--extra-vars. This poses a potential security risk where Tower users with " +"the ability to specify extra vars at job launch time can use Jinja2 " +"templates to run arbitrary Python. It is recommended that this value be set " +"to \"template\" or \"never\"." +msgstr "" + +#: awx/main/conf.py:162 msgid "Enable job isolation" msgstr "" -#: awx/main/conf.py:139 +#: awx/main/conf.py:163 msgid "" "Isolates an Ansible job from protected parts of the system to prevent " "exposing sensitive information." msgstr "" -#: awx/main/conf.py:147 +#: awx/main/conf.py:171 msgid "Job execution path" msgstr "" -#: awx/main/conf.py:148 +#: awx/main/conf.py:172 msgid "" "The directory in which Tower will create new temporary directories for job " "execution and isolation (such as credential files and custom inventory " "scripts)." msgstr "" -#: awx/main/conf.py:159 +#: awx/main/conf.py:183 msgid "Paths to hide from isolated jobs" msgstr "" -#: awx/main/conf.py:160 +#: awx/main/conf.py:184 msgid "" "Additional paths to hide from isolated processes. Enter one path per line." msgstr "" -#: awx/main/conf.py:169 +#: awx/main/conf.py:193 msgid "Paths to expose to isolated jobs" msgstr "" -#: awx/main/conf.py:170 +#: awx/main/conf.py:194 msgid "" "Whitelist of paths that would otherwise be hidden to expose to isolated " "jobs. Enter one path per line." msgstr "" -#: awx/main/conf.py:179 +#: awx/main/conf.py:203 msgid "Isolated status check interval" msgstr "" -#: awx/main/conf.py:180 +#: awx/main/conf.py:204 msgid "" "The number of seconds to sleep between status checks for jobs running on " "isolated instances." msgstr "" -#: awx/main/conf.py:189 +#: awx/main/conf.py:213 msgid "Isolated launch timeout" msgstr "" -#: awx/main/conf.py:190 +#: awx/main/conf.py:214 msgid "" "The timeout (in seconds) for launching jobs on isolated instances. This " "includes the time needed to copy source control files (playbooks) to the " "isolated instance." msgstr "" -#: awx/main/conf.py:201 +#: awx/main/conf.py:225 msgid "Isolated connection timeout" msgstr "" -#: awx/main/conf.py:202 +#: awx/main/conf.py:226 msgid "" "Ansible SSH connection timeout (in seconds) to use when communicating with " "isolated instances. Value should be substantially greater than expected " "network latency." msgstr "" -#: awx/main/conf.py:212 +#: awx/main/conf.py:236 msgid "Generate RSA keys for isolated instances" msgstr "" -#: awx/main/conf.py:213 +#: awx/main/conf.py:237 msgid "" "If set, a random RSA key will be generated and distributed to isolated " "instances. To disable this behavior and manage authentication for isolated " "instances outside of Tower, disable this setting." msgstr "" -#: awx/main/conf.py:227 awx/main/conf.py:228 +#: awx/main/conf.py:251 awx/main/conf.py:252 msgid "The RSA private key for SSH traffic to isolated instances" msgstr "" -#: awx/main/conf.py:239 awx/main/conf.py:240 +#: awx/main/conf.py:263 awx/main/conf.py:264 msgid "The RSA public key for SSH traffic to isolated instances" msgstr "" -#: awx/main/conf.py:249 +#: awx/main/conf.py:273 msgid "Extra Environment Variables" msgstr "" -#: awx/main/conf.py:250 +#: awx/main/conf.py:274 msgid "" "Additional environment variables set for playbook runs, inventory updates, " "project updates, and notification sending." msgstr "" -#: awx/main/conf.py:260 +#: awx/main/conf.py:284 msgid "Standard Output Maximum Display Size" msgstr "" -#: awx/main/conf.py:261 +#: awx/main/conf.py:285 msgid "" "Maximum Size of Standard Output in bytes to display before requiring the " "output be downloaded." msgstr "" -#: awx/main/conf.py:270 +#: awx/main/conf.py:294 msgid "Job Event Standard Output Maximum Display Size" msgstr "" -#: awx/main/conf.py:271 +#: awx/main/conf.py:296 msgid "" "Maximum Size of Standard Output in bytes to display for a single job or ad " "hoc command event. `stdout` will end with `…` when truncated." msgstr "" -#: awx/main/conf.py:280 +#: awx/main/conf.py:305 msgid "Maximum Scheduled Jobs" msgstr "" -#: awx/main/conf.py:281 +#: awx/main/conf.py:306 msgid "" "Maximum number of the same job template that can be waiting to run when " "launching from a schedule before no more are created." msgstr "" -#: awx/main/conf.py:290 +#: awx/main/conf.py:315 msgid "Ansible Callback Plugins" msgstr "" -#: awx/main/conf.py:291 +#: awx/main/conf.py:316 msgid "" "List of paths to search for extra callback plugins to be used when running " "jobs. Enter one path per line." msgstr "" -#: awx/main/conf.py:301 +#: awx/main/conf.py:326 msgid "Default Job Timeout" msgstr "" -#: awx/main/conf.py:302 +#: awx/main/conf.py:327 msgid "" "Maximum time in seconds to allow jobs to run. Use value of 0 to indicate " "that no timeout should be imposed. A timeout set on an individual job " "template will override this." msgstr "" -#: awx/main/conf.py:313 +#: awx/main/conf.py:338 msgid "Default Inventory Update Timeout" msgstr "" -#: awx/main/conf.py:314 +#: awx/main/conf.py:339 msgid "" "Maximum time in seconds to allow inventory updates to run. Use value of 0 to " "indicate that no timeout should be imposed. A timeout set on an individual " "inventory source will override this." msgstr "" -#: awx/main/conf.py:325 +#: awx/main/conf.py:350 msgid "Default Project Update Timeout" msgstr "" -#: awx/main/conf.py:326 +#: awx/main/conf.py:351 msgid "" "Maximum time in seconds to allow project updates to run. Use value of 0 to " "indicate that no timeout should be imposed. A timeout set on an individual " "project will override this." msgstr "" -#: awx/main/conf.py:337 +#: awx/main/conf.py:362 msgid "Per-Host Ansible Fact Cache Timeout" msgstr "" -#: awx/main/conf.py:338 +#: awx/main/conf.py:363 msgid "" "Maximum time, in seconds, that stored Ansible facts are considered valid " "since the last time they were modified. Only valid, non-stale, facts will be " "accessible by a playbook. Note, this does not influence the deletion of " -"ansible_facts from the database." +"ansible_facts from the database. Use a value of 0 to indicate that no " +"timeout should be imposed." msgstr "" -#: awx/main/conf.py:350 +#: awx/main/conf.py:376 msgid "Logging Aggregator" msgstr "" -#: awx/main/conf.py:351 +#: awx/main/conf.py:377 msgid "Hostname/IP where external logs will be sent to." msgstr "" -#: awx/main/conf.py:352 awx/main/conf.py:363 awx/main/conf.py:375 -#: awx/main/conf.py:385 awx/main/conf.py:397 awx/main/conf.py:412 -#: awx/main/conf.py:424 awx/main/conf.py:433 awx/main/conf.py:443 -#: awx/main/conf.py:453 awx/main/conf.py:464 awx/main/conf.py:476 -#: awx/main/conf.py:489 +#: awx/main/conf.py:378 awx/main/conf.py:389 awx/main/conf.py:401 +#: awx/main/conf.py:411 awx/main/conf.py:423 awx/main/conf.py:438 +#: awx/main/conf.py:450 awx/main/conf.py:459 awx/main/conf.py:469 +#: awx/main/conf.py:479 awx/main/conf.py:490 awx/main/conf.py:502 +#: awx/main/conf.py:515 msgid "Logging" msgstr "" -#: awx/main/conf.py:360 +#: awx/main/conf.py:386 msgid "Logging Aggregator Port" msgstr "" -#: awx/main/conf.py:361 +#: awx/main/conf.py:387 msgid "" "Port on Logging Aggregator to send logs to (if required and not provided in " "Logging Aggregator)." msgstr "" -#: awx/main/conf.py:373 +#: awx/main/conf.py:399 msgid "Logging Aggregator Type" msgstr "" -#: awx/main/conf.py:374 +#: awx/main/conf.py:400 msgid "Format messages for the chosen log aggregator." msgstr "" -#: awx/main/conf.py:383 +#: awx/main/conf.py:409 msgid "Logging Aggregator Username" msgstr "" -#: awx/main/conf.py:384 +#: awx/main/conf.py:410 msgid "Username for external log aggregator (if required)." msgstr "" -#: awx/main/conf.py:395 +#: awx/main/conf.py:421 msgid "Logging Aggregator Password/Token" msgstr "" -#: awx/main/conf.py:396 +#: awx/main/conf.py:422 msgid "" "Password or authentication token for external log aggregator (if required)." msgstr "" -#: awx/main/conf.py:405 +#: awx/main/conf.py:431 msgid "Loggers Sending Data to Log Aggregator Form" msgstr "" -#: awx/main/conf.py:406 +#: awx/main/conf.py:432 msgid "" "List of loggers that will send HTTP logs to the collector, these can include " "any or all of: \n" @@ -1741,57 +1983,57 @@ msgid "" "system_tracking - facts gathered from scan jobs." msgstr "" -#: awx/main/conf.py:419 +#: awx/main/conf.py:445 msgid "Log System Tracking Facts Individually" msgstr "" -#: awx/main/conf.py:420 +#: awx/main/conf.py:446 msgid "" -"If set, system tracking facts will be sent for each package, service, " -"orother item found in a scan, allowing for greater search query granularity. " +"If set, system tracking facts will be sent for each package, service, or " +"other item found in a scan, allowing for greater search query granularity. " "If unset, facts will be sent as a single dictionary, allowing for greater " "efficiency in fact processing." msgstr "" -#: awx/main/conf.py:431 +#: awx/main/conf.py:457 msgid "Enable External Logging" msgstr "" -#: awx/main/conf.py:432 +#: awx/main/conf.py:458 msgid "Enable sending logs to external log aggregator." msgstr "" -#: awx/main/conf.py:441 +#: awx/main/conf.py:467 msgid "Cluster-wide Tower unique identifier." msgstr "" -#: awx/main/conf.py:442 +#: awx/main/conf.py:468 msgid "Useful to uniquely identify Tower instances." msgstr "" -#: awx/main/conf.py:451 +#: awx/main/conf.py:477 msgid "Logging Aggregator Protocol" msgstr "" -#: awx/main/conf.py:452 +#: awx/main/conf.py:478 msgid "Protocol used to communicate with log aggregator." msgstr "" -#: awx/main/conf.py:460 +#: awx/main/conf.py:486 msgid "TCP Connection Timeout" msgstr "" -#: awx/main/conf.py:461 +#: awx/main/conf.py:487 msgid "" "Number of seconds for a TCP connection to external log aggregator to " "timeout. Applies to HTTPS and TCP log aggregator protocols." msgstr "" -#: awx/main/conf.py:471 +#: awx/main/conf.py:497 msgid "Enable/disable HTTPS certificate verification" msgstr "" -#: awx/main/conf.py:472 +#: awx/main/conf.py:498 msgid "" "Flag to control enable/disable of certificate verification when " "LOG_AGGREGATOR_PROTOCOL is \"https\". If enabled, Tower's log handler will " @@ -1799,11 +2041,11 @@ msgid "" "connection." msgstr "" -#: awx/main/conf.py:484 +#: awx/main/conf.py:510 msgid "Logging Aggregator Level Threshold" msgstr "" -#: awx/main/conf.py:485 +#: awx/main/conf.py:511 msgid "" "Level threshold used by log handler. Severities from lowest to highest are " "DEBUG, INFO, WARNING, ERROR, CRITICAL. Messages less severe than the " @@ -1811,133 +2053,182 @@ msgid "" "anlytics ignore this setting)" msgstr "" -#: awx/main/conf.py:508 awx/sso/conf.py:1105 +#: awx/main/conf.py:534 awx/sso/conf.py:1262 msgid "\n" msgstr "" -#: awx/main/constants.py:10 +#: awx/main/constants.py:17 msgid "Sudo" msgstr "" -#: awx/main/constants.py:10 +#: awx/main/constants.py:17 msgid "Su" msgstr "" -#: awx/main/constants.py:10 +#: awx/main/constants.py:17 msgid "Pbrun" msgstr "" -#: awx/main/constants.py:10 +#: awx/main/constants.py:17 msgid "Pfexec" msgstr "" -#: awx/main/constants.py:10 +#: awx/main/constants.py:18 msgid "DZDO" msgstr "" -#: awx/main/constants.py:10 +#: awx/main/constants.py:18 msgid "Pmrun" msgstr "" -#: awx/main/constants.py:10 +#: awx/main/constants.py:18 msgid "Runas" msgstr "" -#: awx/main/fields.py:57 -#, python-format -msgid "'%s' is not one of ['%s']" +#: awx/main/constants.py:19 +msgid "Enable" msgstr "" -#: awx/main/fields.py:533 +#: awx/main/constants.py:19 +msgid "Doas" +msgstr "" + +#: awx/main/constants.py:21 +msgid "None" +msgstr "" + +#: awx/main/fields.py:62 +#, python-brace-format +msgid "'{value}' is not one of ['{allowed_values}']" +msgstr "" + +#: awx/main/fields.py:418 +#, python-brace-format +msgid "{type} provided in relative path {path}, expected {expected_type}" +msgstr "" + +#: awx/main/fields.py:423 +#, python-brace-format +msgid "{type} provided, expected {expected_type}" +msgstr "" + +#: awx/main/fields.py:428 +#, python-brace-format +msgid "Schema validation error in relative path {path} ({error})" +msgstr "" + +#: awx/main/fields.py:549 +msgid "secret values must be of type string, not {}" +msgstr "" + +#: awx/main/fields.py:584 #, python-format msgid "cannot be set unless \"%s\" is set" msgstr "" -#: awx/main/fields.py:549 +#: awx/main/fields.py:600 #, python-format msgid "required for %s" msgstr "" -#: awx/main/fields.py:573 +#: awx/main/fields.py:624 msgid "must be set when SSH key is encrypted." msgstr "" -#: awx/main/fields.py:579 +#: awx/main/fields.py:630 msgid "should not be set when SSH key is not encrypted." msgstr "" -#: awx/main/fields.py:637 +#: awx/main/fields.py:688 msgid "'dependencies' is not supported for custom credentials." msgstr "" -#: awx/main/fields.py:651 +#: awx/main/fields.py:702 msgid "\"tower\" is a reserved field name" msgstr "" -#: awx/main/fields.py:658 +#: awx/main/fields.py:709 #, python-format msgid "field IDs must be unique (%s)" msgstr "" -#: awx/main/fields.py:671 -#, python-format -msgid "%s not allowed for %s type (%s)" +#: awx/main/fields.py:722 +msgid "become_method is a reserved type name" msgstr "" -#: awx/main/fields.py:755 -#, python-format -msgid "%s uses an undefined field (%s)" +#: awx/main/fields.py:733 +#, python-brace-format +msgid "{sub_key} not allowed for {element_type} type ({element_id})" msgstr "" -#: awx/main/middleware.py:157 +#: awx/main/fields.py:813 +msgid "Must use multi-file syntax when injecting multiple files" +msgstr "" + +#: awx/main/fields.py:831 +#, python-brace-format +msgid "{sub_key} uses an undefined field ({error_msg})" +msgstr "" + +#: awx/main/fields.py:838 +#, python-brace-format +msgid "" +"Syntax error rendering template for {sub_key} inside of {type} ({error_msg})" +msgstr "" + +#: awx/main/middleware.py:146 msgid "Formats of all available named urls" msgstr "" -#: awx/main/middleware.py:158 +#: awx/main/middleware.py:147 msgid "" "Read-only list of key-value pairs that shows the standard format of all " "available named URLs." msgstr "" -#: awx/main/middleware.py:160 awx/main/middleware.py:170 +#: awx/main/middleware.py:149 awx/main/middleware.py:159 msgid "Named URL" msgstr "" -#: awx/main/middleware.py:167 +#: awx/main/middleware.py:156 msgid "List of all named url graph nodes." msgstr "" -#: awx/main/middleware.py:168 +#: awx/main/middleware.py:157 msgid "" "Read-only list of key-value pairs that exposes named URL graph topology. Use " "this list to programmatically generate named URLs for resources" msgstr "" -#: awx/main/migrations/_reencrypt.py:25 awx/main/models/notifications.py:33 +#: awx/main/migrations/_reencrypt.py:26 awx/main/models/notifications.py:35 msgid "Email" msgstr "" -#: awx/main/migrations/_reencrypt.py:26 awx/main/models/notifications.py:34 +#: awx/main/migrations/_reencrypt.py:27 awx/main/models/notifications.py:36 msgid "Slack" msgstr "" -#: awx/main/migrations/_reencrypt.py:27 awx/main/models/notifications.py:35 +#: awx/main/migrations/_reencrypt.py:28 awx/main/models/notifications.py:37 msgid "Twilio" msgstr "" -#: awx/main/migrations/_reencrypt.py:28 awx/main/models/notifications.py:36 +#: awx/main/migrations/_reencrypt.py:29 awx/main/models/notifications.py:38 msgid "Pagerduty" msgstr "" -#: awx/main/migrations/_reencrypt.py:29 awx/main/models/notifications.py:37 +#: awx/main/migrations/_reencrypt.py:30 awx/main/models/notifications.py:39 msgid "HipChat" msgstr "" -#: awx/main/migrations/_reencrypt.py:30 awx/main/models/notifications.py:38 +#: awx/main/migrations/_reencrypt.py:31 awx/main/models/notifications.py:41 +msgid "Mattermost" +msgstr "" + +#: awx/main/migrations/_reencrypt.py:32 awx/main/models/notifications.py:40 msgid "Webhook" msgstr "" -#: awx/main/migrations/_reencrypt.py:31 awx/main/models/notifications.py:39 +#: awx/main/migrations/_reencrypt.py:33 awx/main/models/notifications.py:43 msgid "IRC" msgstr "" @@ -1961,244 +2252,325 @@ msgstr "" msgid "Entity was Disassociated with another Entity" msgstr "" -#: awx/main/models/ad_hoc_commands.py:100 +#: awx/main/models/ad_hoc_commands.py:95 msgid "No valid inventory." msgstr "" -#: awx/main/models/ad_hoc_commands.py:107 +#: awx/main/models/ad_hoc_commands.py:102 msgid "You must provide a machine / SSH credential." msgstr "" -#: awx/main/models/ad_hoc_commands.py:118 -#: awx/main/models/ad_hoc_commands.py:126 +#: awx/main/models/ad_hoc_commands.py:113 +#: awx/main/models/ad_hoc_commands.py:121 msgid "Invalid type for ad hoc command" msgstr "" -#: awx/main/models/ad_hoc_commands.py:121 +#: awx/main/models/ad_hoc_commands.py:116 msgid "Unsupported module for ad hoc commands." msgstr "" -#: awx/main/models/ad_hoc_commands.py:129 +#: awx/main/models/ad_hoc_commands.py:124 #, python-format msgid "No argument passed to %s module." msgstr "" -#: awx/main/models/ad_hoc_commands.py:245 awx/main/models/jobs.py:911 -msgid "Host Failed" -msgstr "" - -#: awx/main/models/ad_hoc_commands.py:246 awx/main/models/jobs.py:912 -msgid "Host OK" -msgstr "" - -#: awx/main/models/ad_hoc_commands.py:247 awx/main/models/jobs.py:915 -msgid "Host Unreachable" -msgstr "" - -#: awx/main/models/ad_hoc_commands.py:252 awx/main/models/jobs.py:914 -msgid "Host Skipped" -msgstr "" - -#: awx/main/models/ad_hoc_commands.py:262 awx/main/models/jobs.py:942 -msgid "Debug" -msgstr "" - -#: awx/main/models/ad_hoc_commands.py:263 awx/main/models/jobs.py:943 -msgid "Verbose" -msgstr "" - -#: awx/main/models/ad_hoc_commands.py:264 awx/main/models/jobs.py:944 -msgid "Deprecated" -msgstr "" - -#: awx/main/models/ad_hoc_commands.py:265 awx/main/models/jobs.py:945 -msgid "Warning" -msgstr "" - -#: awx/main/models/ad_hoc_commands.py:266 awx/main/models/jobs.py:946 -msgid "System Warning" -msgstr "" - -#: awx/main/models/ad_hoc_commands.py:267 awx/main/models/jobs.py:947 -#: awx/main/models/unified_jobs.py:64 -msgid "Error" -msgstr "" - -#: awx/main/models/base.py:40 awx/main/models/base.py:46 -#: awx/main/models/base.py:51 +#: awx/main/models/base.py:33 awx/main/models/base.py:39 +#: awx/main/models/base.py:44 awx/main/models/base.py:49 msgid "Run" msgstr "" -#: awx/main/models/base.py:41 awx/main/models/base.py:47 -#: awx/main/models/base.py:52 +#: awx/main/models/base.py:34 awx/main/models/base.py:40 +#: awx/main/models/base.py:45 awx/main/models/base.py:50 msgid "Check" msgstr "" -#: awx/main/models/base.py:42 +#: awx/main/models/base.py:35 msgid "Scan" msgstr "" -#: awx/main/models/credential.py:86 +#: awx/main/models/credential/__init__.py:110 msgid "Host" msgstr "" -#: awx/main/models/credential.py:87 +#: awx/main/models/credential/__init__.py:111 msgid "The hostname or IP address to use." msgstr "" -#: awx/main/models/credential.py:93 +#: awx/main/models/credential/__init__.py:117 msgid "Username" msgstr "" -#: awx/main/models/credential.py:94 +#: awx/main/models/credential/__init__.py:118 msgid "Username for this credential." msgstr "" -#: awx/main/models/credential.py:100 +#: awx/main/models/credential/__init__.py:124 msgid "Password" msgstr "" -#: awx/main/models/credential.py:101 +#: awx/main/models/credential/__init__.py:125 msgid "" "Password for this credential (or \"ASK\" to prompt the user for machine " "credentials)." msgstr "" -#: awx/main/models/credential.py:108 +#: awx/main/models/credential/__init__.py:132 msgid "Security Token" msgstr "" -#: awx/main/models/credential.py:109 +#: awx/main/models/credential/__init__.py:133 msgid "Security Token for this credential" msgstr "" -#: awx/main/models/credential.py:115 +#: awx/main/models/credential/__init__.py:139 msgid "Project" msgstr "" -#: awx/main/models/credential.py:116 +#: awx/main/models/credential/__init__.py:140 msgid "The identifier for the project." msgstr "" -#: awx/main/models/credential.py:122 +#: awx/main/models/credential/__init__.py:146 msgid "Domain" msgstr "" -#: awx/main/models/credential.py:123 +#: awx/main/models/credential/__init__.py:147 msgid "The identifier for the domain." msgstr "" -#: awx/main/models/credential.py:128 +#: awx/main/models/credential/__init__.py:152 msgid "SSH private key" msgstr "" -#: awx/main/models/credential.py:129 +#: awx/main/models/credential/__init__.py:153 msgid "RSA or DSA private key to be used instead of password." msgstr "" -#: awx/main/models/credential.py:135 +#: awx/main/models/credential/__init__.py:159 msgid "SSH key unlock" msgstr "" -#: awx/main/models/credential.py:136 +#: awx/main/models/credential/__init__.py:160 msgid "" "Passphrase to unlock SSH private key if encrypted (or \"ASK\" to prompt the " "user for machine credentials)." msgstr "" -#: awx/main/models/credential.py:143 -msgid "None" -msgstr "" - -#: awx/main/models/credential.py:144 +#: awx/main/models/credential/__init__.py:168 msgid "Privilege escalation method." msgstr "" -#: awx/main/models/credential.py:150 +#: awx/main/models/credential/__init__.py:174 msgid "Privilege escalation username." msgstr "" -#: awx/main/models/credential.py:156 +#: awx/main/models/credential/__init__.py:180 msgid "Password for privilege escalation method." msgstr "" -#: awx/main/models/credential.py:162 +#: awx/main/models/credential/__init__.py:186 msgid "Vault password (or \"ASK\" to prompt the user)." msgstr "" -#: awx/main/models/credential.py:166 +#: awx/main/models/credential/__init__.py:190 msgid "Whether to use the authorize mechanism." msgstr "" -#: awx/main/models/credential.py:172 +#: awx/main/models/credential/__init__.py:196 msgid "Password used by the authorize mechanism." msgstr "" -#: awx/main/models/credential.py:178 +#: awx/main/models/credential/__init__.py:202 msgid "Client Id or Application Id for the credential" msgstr "" -#: awx/main/models/credential.py:184 +#: awx/main/models/credential/__init__.py:208 msgid "Secret Token for this credential" msgstr "" -#: awx/main/models/credential.py:190 +#: awx/main/models/credential/__init__.py:214 msgid "Subscription identifier for this credential" msgstr "" -#: awx/main/models/credential.py:196 +#: awx/main/models/credential/__init__.py:220 msgid "Tenant identifier for this credential" msgstr "" -#: awx/main/models/credential.py:220 +#: awx/main/models/credential/__init__.py:244 msgid "" "Specify the type of credential you want to create. Refer to the Ansible " "Tower documentation for details on each type." msgstr "" -#: awx/main/models/credential.py:234 awx/main/models/credential.py:420 +#: awx/main/models/credential/__init__.py:258 +#: awx/main/models/credential/__init__.py:476 msgid "" "Enter inputs using either JSON or YAML syntax. Use the radio button to " "toggle between the two. Refer to the Ansible Tower documentation for example " "syntax." msgstr "" -#: awx/main/models/credential.py:401 +#: awx/main/models/credential/__init__.py:457 msgid "Machine" msgstr "" -#: awx/main/models/credential.py:402 +#: awx/main/models/credential/__init__.py:458 msgid "Vault" msgstr "" -#: awx/main/models/credential.py:403 +#: awx/main/models/credential/__init__.py:459 msgid "Network" msgstr "" -#: awx/main/models/credential.py:404 +#: awx/main/models/credential/__init__.py:460 msgid "Source Control" msgstr "" -#: awx/main/models/credential.py:405 +#: awx/main/models/credential/__init__.py:461 msgid "Cloud" msgstr "" -#: awx/main/models/credential.py:406 +#: awx/main/models/credential/__init__.py:462 msgid "Insights" msgstr "" -#: awx/main/models/credential.py:427 +#: awx/main/models/credential/__init__.py:483 msgid "" "Enter injectors using either JSON or YAML syntax. Use the radio button to " "toggle between the two. Refer to the Ansible Tower documentation for example " "syntax." msgstr "" -#: awx/main/models/credential.py:478 +#: awx/main/models/credential/__init__.py:534 #, python-format msgid "adding %s credential type" msgstr "" +#: awx/main/models/events.py:71 awx/main/models/events.py:598 +msgid "Host Failed" +msgstr "" + +#: awx/main/models/events.py:72 awx/main/models/events.py:599 +msgid "Host OK" +msgstr "" + +#: awx/main/models/events.py:73 +msgid "Host Failure" +msgstr "" + +#: awx/main/models/events.py:74 awx/main/models/events.py:605 +msgid "Host Skipped" +msgstr "" + +#: awx/main/models/events.py:75 awx/main/models/events.py:600 +msgid "Host Unreachable" +msgstr "" + +#: awx/main/models/events.py:76 awx/main/models/events.py:90 +msgid "No Hosts Remaining" +msgstr "" + +#: awx/main/models/events.py:77 +msgid "Host Polling" +msgstr "" + +#: awx/main/models/events.py:78 +msgid "Host Async OK" +msgstr "" + +#: awx/main/models/events.py:79 +msgid "Host Async Failure" +msgstr "" + +#: awx/main/models/events.py:80 +msgid "Item OK" +msgstr "" + +#: awx/main/models/events.py:81 +msgid "Item Failed" +msgstr "" + +#: awx/main/models/events.py:82 +msgid "Item Skipped" +msgstr "" + +#: awx/main/models/events.py:83 +msgid "Host Retry" +msgstr "" + +#: awx/main/models/events.py:85 +msgid "File Difference" +msgstr "" + +#: awx/main/models/events.py:86 +msgid "Playbook Started" +msgstr "" + +#: awx/main/models/events.py:87 +msgid "Running Handlers" +msgstr "" + +#: awx/main/models/events.py:88 +msgid "Including File" +msgstr "" + +#: awx/main/models/events.py:89 +msgid "No Hosts Matched" +msgstr "" + +#: awx/main/models/events.py:91 +msgid "Task Started" +msgstr "" + +#: awx/main/models/events.py:93 +msgid "Variables Prompted" +msgstr "" + +#: awx/main/models/events.py:94 +msgid "Gathering Facts" +msgstr "" + +#: awx/main/models/events.py:95 +msgid "internal: on Import for Host" +msgstr "" + +#: awx/main/models/events.py:96 +msgid "internal: on Not Import for Host" +msgstr "" + +#: awx/main/models/events.py:97 +msgid "Play Started" +msgstr "" + +#: awx/main/models/events.py:98 +msgid "Playbook Complete" +msgstr "" + +#: awx/main/models/events.py:102 awx/main/models/events.py:615 +msgid "Debug" +msgstr "" + +#: awx/main/models/events.py:103 awx/main/models/events.py:616 +msgid "Verbose" +msgstr "" + +#: awx/main/models/events.py:104 awx/main/models/events.py:617 +msgid "Deprecated" +msgstr "" + +#: awx/main/models/events.py:105 awx/main/models/events.py:618 +msgid "Warning" +msgstr "" + +#: awx/main/models/events.py:106 awx/main/models/events.py:619 +msgid "System Warning" +msgstr "" + +#: awx/main/models/events.py:107 awx/main/models/events.py:620 +#: awx/main/models/unified_jobs.py:67 +msgid "Error" +msgstr "" + #: awx/main/models/fact.py:25 msgid "Host for the facts that the fact scan captured." msgstr "" @@ -2213,372 +2585,391 @@ msgid "" "host." msgstr "" -#: awx/main/models/ha.py:78 +#: awx/main/models/ha.py:129 msgid "Instances that are members of this InstanceGroup" msgstr "" -#: awx/main/models/ha.py:83 +#: awx/main/models/ha.py:134 msgid "Instance Group to remotely control this group." msgstr "" -#: awx/main/models/inventory.py:52 +#: awx/main/models/ha.py:141 +msgid "Percentage of Instances to automatically assign to this group" +msgstr "" + +#: awx/main/models/ha.py:145 +msgid "" +"Static minimum number of Instances to automatically assign to this group" +msgstr "" + +#: awx/main/models/ha.py:150 +msgid "" +"List of exact-match Instances that will always be automatically assigned to " +"this group" +msgstr "" + +#: awx/main/models/inventory.py:61 msgid "Hosts have a direct link to this inventory." msgstr "" -#: awx/main/models/inventory.py:53 +#: awx/main/models/inventory.py:62 msgid "Hosts for inventory generated using the host_filter property." msgstr "" -#: awx/main/models/inventory.py:58 +#: awx/main/models/inventory.py:67 msgid "inventories" msgstr "" -#: awx/main/models/inventory.py:65 +#: awx/main/models/inventory.py:74 msgid "Organization containing this inventory." msgstr "" -#: awx/main/models/inventory.py:72 +#: awx/main/models/inventory.py:81 msgid "Inventory variables in JSON or YAML format." msgstr "" -#: awx/main/models/inventory.py:77 +#: awx/main/models/inventory.py:86 msgid "Flag indicating whether any hosts in this inventory have failed." msgstr "" -#: awx/main/models/inventory.py:82 +#: awx/main/models/inventory.py:91 msgid "Total number of hosts in this inventory." msgstr "" -#: awx/main/models/inventory.py:87 +#: awx/main/models/inventory.py:96 msgid "Number of hosts in this inventory with active failures." msgstr "" -#: awx/main/models/inventory.py:92 +#: awx/main/models/inventory.py:101 msgid "Total number of groups in this inventory." msgstr "" -#: awx/main/models/inventory.py:97 +#: awx/main/models/inventory.py:106 msgid "Number of groups in this inventory with active failures." msgstr "" -#: awx/main/models/inventory.py:102 +#: awx/main/models/inventory.py:111 msgid "" "Flag indicating whether this inventory has any external inventory sources." msgstr "" -#: awx/main/models/inventory.py:107 +#: awx/main/models/inventory.py:116 msgid "" "Total number of external inventory sources configured within this inventory." msgstr "" -#: awx/main/models/inventory.py:112 +#: awx/main/models/inventory.py:121 msgid "Number of external inventory sources in this inventory with failures." msgstr "" -#: awx/main/models/inventory.py:119 +#: awx/main/models/inventory.py:128 msgid "Kind of inventory being represented." msgstr "" -#: awx/main/models/inventory.py:125 +#: awx/main/models/inventory.py:134 msgid "Filter that will be applied to the hosts of this inventory." msgstr "" -#: awx/main/models/inventory.py:152 +#: awx/main/models/inventory.py:161 msgid "" "Credentials to be used by hosts belonging to this inventory when accessing " "Red Hat Insights API." msgstr "" -#: awx/main/models/inventory.py:161 +#: awx/main/models/inventory.py:170 msgid "Flag indicating the inventory is being deleted." msgstr "" -#: awx/main/models/inventory.py:374 +#: awx/main/models/inventory.py:459 msgid "Assignment not allowed for Smart Inventory" msgstr "" -#: awx/main/models/inventory.py:376 awx/main/models/projects.py:148 +#: awx/main/models/inventory.py:461 awx/main/models/projects.py:159 msgid "Credential kind must be 'insights'." msgstr "" -#: awx/main/models/inventory.py:443 +#: awx/main/models/inventory.py:546 msgid "Is this host online and available for running jobs?" msgstr "" -#: awx/main/models/inventory.py:449 +#: awx/main/models/inventory.py:552 msgid "" "The value used by the remote inventory source to uniquely identify the host" msgstr "" -#: awx/main/models/inventory.py:454 +#: awx/main/models/inventory.py:557 msgid "Host variables in JSON or YAML format." msgstr "" -#: awx/main/models/inventory.py:476 +#: awx/main/models/inventory.py:579 msgid "Flag indicating whether the last job failed for this host." msgstr "" -#: awx/main/models/inventory.py:481 +#: awx/main/models/inventory.py:584 msgid "" "Flag indicating whether this host was created/updated from any external " "inventory sources." msgstr "" -#: awx/main/models/inventory.py:487 +#: awx/main/models/inventory.py:590 msgid "Inventory source(s) that created or modified this host." msgstr "" -#: awx/main/models/inventory.py:492 +#: awx/main/models/inventory.py:595 msgid "Arbitrary JSON structure of most recent ansible_facts, per-host." msgstr "" -#: awx/main/models/inventory.py:498 +#: awx/main/models/inventory.py:601 msgid "The date and time ansible_facts was last modified." msgstr "" -#: awx/main/models/inventory.py:505 +#: awx/main/models/inventory.py:608 msgid "Red Hat Insights host unique identifier." msgstr "" -#: awx/main/models/inventory.py:633 +#: awx/main/models/inventory.py:743 msgid "Group variables in JSON or YAML format." msgstr "" -#: awx/main/models/inventory.py:639 +#: awx/main/models/inventory.py:749 msgid "Hosts associated directly with this group." msgstr "" -#: awx/main/models/inventory.py:644 +#: awx/main/models/inventory.py:754 msgid "Total number of hosts directly or indirectly in this group." msgstr "" -#: awx/main/models/inventory.py:649 +#: awx/main/models/inventory.py:759 msgid "Flag indicating whether this group has any hosts with active failures." msgstr "" -#: awx/main/models/inventory.py:654 +#: awx/main/models/inventory.py:764 msgid "Number of hosts in this group with active failures." msgstr "" -#: awx/main/models/inventory.py:659 +#: awx/main/models/inventory.py:769 msgid "Total number of child groups contained within this group." msgstr "" -#: awx/main/models/inventory.py:664 +#: awx/main/models/inventory.py:774 msgid "Number of child groups within this group that have active failures." msgstr "" -#: awx/main/models/inventory.py:669 +#: awx/main/models/inventory.py:779 msgid "" "Flag indicating whether this group was created/updated from any external " "inventory sources." msgstr "" -#: awx/main/models/inventory.py:675 +#: awx/main/models/inventory.py:785 msgid "Inventory source(s) that created or modified this group." msgstr "" -#: awx/main/models/inventory.py:865 awx/main/models/projects.py:42 -#: awx/main/models/unified_jobs.py:428 +#: awx/main/models/inventory.py:981 awx/main/models/projects.py:53 +#: awx/main/models/unified_jobs.py:518 msgid "Manual" msgstr "" -#: awx/main/models/inventory.py:866 +#: awx/main/models/inventory.py:982 msgid "File, Directory or Script" msgstr "" -#: awx/main/models/inventory.py:867 +#: awx/main/models/inventory.py:983 msgid "Sourced from a Project" msgstr "" -#: awx/main/models/inventory.py:868 +#: awx/main/models/inventory.py:984 msgid "Amazon EC2" msgstr "" -#: awx/main/models/inventory.py:869 +#: awx/main/models/inventory.py:985 msgid "Google Compute Engine" msgstr "" -#: awx/main/models/inventory.py:870 +#: awx/main/models/inventory.py:986 msgid "Microsoft Azure Resource Manager" msgstr "" -#: awx/main/models/inventory.py:871 +#: awx/main/models/inventory.py:987 msgid "VMware vCenter" msgstr "" -#: awx/main/models/inventory.py:872 +#: awx/main/models/inventory.py:988 msgid "Red Hat Satellite 6" msgstr "" -#: awx/main/models/inventory.py:873 +#: awx/main/models/inventory.py:989 msgid "Red Hat CloudForms" msgstr "" -#: awx/main/models/inventory.py:874 +#: awx/main/models/inventory.py:990 msgid "OpenStack" msgstr "" -#: awx/main/models/inventory.py:875 -msgid "oVirt4" +#: awx/main/models/inventory.py:991 +msgid "Red Hat Virtualization" msgstr "" -#: awx/main/models/inventory.py:876 +#: awx/main/models/inventory.py:992 msgid "Ansible Tower" msgstr "" -#: awx/main/models/inventory.py:877 +#: awx/main/models/inventory.py:993 msgid "Custom Script" msgstr "" -#: awx/main/models/inventory.py:994 +#: awx/main/models/inventory.py:1110 msgid "Inventory source variables in YAML or JSON format." msgstr "" -#: awx/main/models/inventory.py:1013 +#: awx/main/models/inventory.py:1121 msgid "" "Comma-separated list of filter expressions (EC2 only). Hosts are imported " "when ANY of the filters match." msgstr "" -#: awx/main/models/inventory.py:1019 +#: awx/main/models/inventory.py:1127 msgid "Limit groups automatically created from inventory source (EC2 only)." msgstr "" -#: awx/main/models/inventory.py:1023 +#: awx/main/models/inventory.py:1131 msgid "Overwrite local groups and hosts from remote inventory source." msgstr "" -#: awx/main/models/inventory.py:1027 +#: awx/main/models/inventory.py:1135 msgid "Overwrite local variables from remote inventory source." msgstr "" -#: awx/main/models/inventory.py:1032 awx/main/models/jobs.py:160 -#: awx/main/models/projects.py:117 +#: awx/main/models/inventory.py:1140 awx/main/models/jobs.py:140 +#: awx/main/models/projects.py:128 msgid "The amount of time (in seconds) to run before the task is canceled." msgstr "" -#: awx/main/models/inventory.py:1065 +#: awx/main/models/inventory.py:1173 msgid "Image ID" msgstr "" -#: awx/main/models/inventory.py:1066 +#: awx/main/models/inventory.py:1174 msgid "Availability Zone" msgstr "" -#: awx/main/models/inventory.py:1067 +#: awx/main/models/inventory.py:1175 msgid "Account" msgstr "" -#: awx/main/models/inventory.py:1068 +#: awx/main/models/inventory.py:1176 msgid "Instance ID" msgstr "" -#: awx/main/models/inventory.py:1069 +#: awx/main/models/inventory.py:1177 msgid "Instance State" msgstr "" -#: awx/main/models/inventory.py:1070 +#: awx/main/models/inventory.py:1178 +msgid "Platform" +msgstr "" + +#: awx/main/models/inventory.py:1179 msgid "Instance Type" msgstr "" -#: awx/main/models/inventory.py:1071 +#: awx/main/models/inventory.py:1180 msgid "Key Name" msgstr "" -#: awx/main/models/inventory.py:1072 +#: awx/main/models/inventory.py:1181 msgid "Region" msgstr "" -#: awx/main/models/inventory.py:1073 +#: awx/main/models/inventory.py:1182 msgid "Security Group" msgstr "" -#: awx/main/models/inventory.py:1074 +#: awx/main/models/inventory.py:1183 msgid "Tags" msgstr "" -#: awx/main/models/inventory.py:1075 +#: awx/main/models/inventory.py:1184 msgid "Tag None" msgstr "" -#: awx/main/models/inventory.py:1076 +#: awx/main/models/inventory.py:1185 msgid "VPC ID" msgstr "" -#: awx/main/models/inventory.py:1145 +#: awx/main/models/inventory.py:1253 #, python-format msgid "" "Cloud-based inventory sources (such as %s) require credentials for the " "matching cloud service." msgstr "" -#: awx/main/models/inventory.py:1152 +#: awx/main/models/inventory.py:1259 msgid "Credential is required for a cloud source." msgstr "" -#: awx/main/models/inventory.py:1155 +#: awx/main/models/inventory.py:1262 msgid "" "Credentials of type machine, source control, insights and vault are " "disallowed for custom inventory sources." msgstr "" -#: awx/main/models/inventory.py:1179 +#: awx/main/models/inventory.py:1314 #, python-format msgid "Invalid %(source)s region: %(region)s" msgstr "" -#: awx/main/models/inventory.py:1203 +#: awx/main/models/inventory.py:1338 #, python-format msgid "Invalid filter expression: %(filter)s" msgstr "" -#: awx/main/models/inventory.py:1224 +#: awx/main/models/inventory.py:1359 #, python-format msgid "Invalid group by choice: %(choice)s" msgstr "" -#: awx/main/models/inventory.py:1259 +#: awx/main/models/inventory.py:1394 msgid "Project containing inventory file used as source." msgstr "" -#: awx/main/models/inventory.py:1407 +#: awx/main/models/inventory.py:1555 #, python-format msgid "" "Unable to configure this item for cloud sync. It is already managed by %s." msgstr "" -#: awx/main/models/inventory.py:1417 +#: awx/main/models/inventory.py:1565 msgid "" "More than one SCM-based inventory source with update on project update per-" "inventory not allowed." msgstr "" -#: awx/main/models/inventory.py:1424 +#: awx/main/models/inventory.py:1572 msgid "" "Cannot update SCM-based inventory source on launch if set to update on " "project update. Instead, configure the corresponding source project to " "update on launch." msgstr "" -#: awx/main/models/inventory.py:1430 -msgid "SCM type sources must set `overwrite_vars` to `true`." +#: awx/main/models/inventory.py:1579 +msgid "SCM type sources must set `overwrite_vars` to `true` until Ansible 2.5." msgstr "" -#: awx/main/models/inventory.py:1435 +#: awx/main/models/inventory.py:1584 msgid "Cannot set source_path if not SCM type." msgstr "" -#: awx/main/models/inventory.py:1460 +#: awx/main/models/inventory.py:1615 msgid "" "Inventory files from this Project Update were used for the inventory update." msgstr "" -#: awx/main/models/inventory.py:1573 +#: awx/main/models/inventory.py:1725 msgid "Inventory script contents" msgstr "" -#: awx/main/models/inventory.py:1578 +#: awx/main/models/inventory.py:1730 msgid "Organization owning this inventory script" msgstr "" @@ -2588,288 +2979,280 @@ msgid "" "shown in the standard output" msgstr "" -#: awx/main/models/jobs.py:164 +#: awx/main/models/jobs.py:145 msgid "" "If enabled, Tower will act as an Ansible Fact Cache Plugin; persisting facts " "at the end of a playbook run to the database and caching facts for use by " "Ansible." msgstr "" -#: awx/main/models/jobs.py:173 -msgid "You must provide an SSH credential." -msgstr "" - -#: awx/main/models/jobs.py:181 +#: awx/main/models/jobs.py:163 msgid "You must provide a Vault credential." msgstr "" -#: awx/main/models/jobs.py:317 +#: awx/main/models/jobs.py:308 msgid "Job Template must provide 'inventory' or allow prompting for it." msgstr "" -#: awx/main/models/jobs.py:321 -msgid "Job Template must provide 'credential' or allow prompting for it." +#: awx/main/models/jobs.py:403 +msgid "Field is not configured to prompt on launch." msgstr "" -#: awx/main/models/jobs.py:427 -msgid "Cannot override job_type to or from a scan job." +#: awx/main/models/jobs.py:407 +msgid "Saved launch configurations cannot provide passwords needed to start." msgstr "" -#: awx/main/models/jobs.py:493 awx/main/models/projects.py:263 +#: awx/main/models/jobs.py:415 +msgid "Job Template {} is missing or undefined." +msgstr "" + +#: awx/main/models/jobs.py:496 awx/main/models/projects.py:276 msgid "SCM Revision" msgstr "" -#: awx/main/models/jobs.py:494 +#: awx/main/models/jobs.py:497 msgid "The SCM Revision from the Project used for this job, if available" msgstr "" -#: awx/main/models/jobs.py:502 +#: awx/main/models/jobs.py:505 msgid "" "The SCM Refresh task used to make sure the playbooks were available for the " "job run" msgstr "" -#: awx/main/models/jobs.py:809 +#: awx/main/models/jobs.py:632 +#, python-brace-format +msgid "{status_value} is not a valid status option." +msgstr "" + +#: awx/main/models/jobs.py:991 msgid "job host summaries" msgstr "" -#: awx/main/models/jobs.py:913 -msgid "Host Failure" -msgstr "" - -#: awx/main/models/jobs.py:916 awx/main/models/jobs.py:930 -msgid "No Hosts Remaining" -msgstr "" - -#: awx/main/models/jobs.py:917 -msgid "Host Polling" -msgstr "" - -#: awx/main/models/jobs.py:918 -msgid "Host Async OK" -msgstr "" - -#: awx/main/models/jobs.py:919 -msgid "Host Async Failure" -msgstr "" - -#: awx/main/models/jobs.py:920 -msgid "Item OK" -msgstr "" - -#: awx/main/models/jobs.py:921 -msgid "Item Failed" -msgstr "" - -#: awx/main/models/jobs.py:922 -msgid "Item Skipped" -msgstr "" - -#: awx/main/models/jobs.py:923 -msgid "Host Retry" -msgstr "" - -#: awx/main/models/jobs.py:925 -msgid "File Difference" -msgstr "" - -#: awx/main/models/jobs.py:926 -msgid "Playbook Started" -msgstr "" - -#: awx/main/models/jobs.py:927 -msgid "Running Handlers" -msgstr "" - -#: awx/main/models/jobs.py:928 -msgid "Including File" -msgstr "" - -#: awx/main/models/jobs.py:929 -msgid "No Hosts Matched" -msgstr "" - -#: awx/main/models/jobs.py:931 -msgid "Task Started" -msgstr "" - -#: awx/main/models/jobs.py:933 -msgid "Variables Prompted" -msgstr "" - -#: awx/main/models/jobs.py:934 -msgid "Gathering Facts" -msgstr "" - -#: awx/main/models/jobs.py:935 -msgid "internal: on Import for Host" -msgstr "" - -#: awx/main/models/jobs.py:936 -msgid "internal: on Not Import for Host" -msgstr "" - -#: awx/main/models/jobs.py:937 -msgid "Play Started" -msgstr "" - -#: awx/main/models/jobs.py:938 -msgid "Playbook Complete" -msgstr "" - -#: awx/main/models/jobs.py:1351 +#: awx/main/models/jobs.py:1062 msgid "Remove jobs older than a certain number of days" msgstr "" -#: awx/main/models/jobs.py:1352 +#: awx/main/models/jobs.py:1063 msgid "Remove activity stream entries older than a certain number of days" msgstr "" -#: awx/main/models/jobs.py:1353 +#: awx/main/models/jobs.py:1064 msgid "Purge and/or reduce the granularity of system tracking data" msgstr "" +#: awx/main/models/jobs.py:1134 +#, python-brace-format +msgid "Variables {list_of_keys} are not allowed for system jobs." +msgstr "" + +#: awx/main/models/jobs.py:1149 +msgid "days must be a positive integer." +msgstr "" + #: awx/main/models/label.py:29 msgid "Organization this label belongs to." msgstr "" -#: awx/main/models/notifications.py:138 awx/main/models/unified_jobs.py:59 +#: awx/main/models/mixins.py:309 +#, python-brace-format +msgid "" +"Variables {list_of_keys} are not allowed on launch. Check the Prompt on " +"Launch setting on the Job Template to include Extra Variables." +msgstr "" + +#: awx/main/models/mixins.py:446 +msgid "{} is not a valid virtualenv in {}" +msgstr "" + +#: awx/main/models/notifications.py:42 +msgid "Rocket.Chat" +msgstr "" + +#: awx/main/models/notifications.py:142 awx/main/models/unified_jobs.py:62 msgid "Pending" msgstr "" -#: awx/main/models/notifications.py:139 awx/main/models/unified_jobs.py:62 +#: awx/main/models/notifications.py:143 awx/main/models/unified_jobs.py:65 msgid "Successful" msgstr "" -#: awx/main/models/notifications.py:140 awx/main/models/unified_jobs.py:63 +#: awx/main/models/notifications.py:144 awx/main/models/unified_jobs.py:66 msgid "Failed" msgstr "" -#: awx/main/models/organization.py:132 -msgid "Token not invalidated" +#: awx/main/models/notifications.py:218 +msgid "status_str must be either succeeded or failed" msgstr "" -#: awx/main/models/organization.py:133 -msgid "Token is expired" +#: awx/main/models/oauth.py:27 +msgid "application" msgstr "" -#: awx/main/models/organization.py:134 -msgid "The maximum number of allowed sessions for this user has been exceeded." +#: awx/main/models/oauth.py:32 +msgid "Confidential" msgstr "" -#: awx/main/models/organization.py:137 -msgid "Invalid token" +#: awx/main/models/oauth.py:33 +msgid "Public" msgstr "" -#: awx/main/models/organization.py:155 -msgid "Reason the auth token was invalidated." +#: awx/main/models/oauth.py:41 +msgid "Authorization code" msgstr "" -#: awx/main/models/organization.py:194 -msgid "Invalid reason specified" +#: awx/main/models/oauth.py:42 +msgid "Implicit" msgstr "" -#: awx/main/models/projects.py:43 +#: awx/main/models/oauth.py:43 +msgid "Resource owner password-based" +msgstr "" + +#: awx/main/models/oauth.py:44 +msgid "Client credentials" +msgstr "" + +#: awx/main/models/oauth.py:59 +msgid "Organization containing this application." +msgstr "" + +#: awx/main/models/oauth.py:68 +msgid "" +"Used for more stringent verification of access to an application when " +"creating a token." +msgstr "" + +#: awx/main/models/oauth.py:73 +msgid "" +"Set to Public or Confidential depending on how secure the client device is." +msgstr "" + +#: awx/main/models/oauth.py:77 +msgid "" +"Set True to skip authorization step for completely trusted applications." +msgstr "" + +#: awx/main/models/oauth.py:82 +msgid "" +"The Grant type the user must use for acquire tokens for this application." +msgstr "" + +#: awx/main/models/oauth.py:90 +msgid "access token" +msgstr "" + +#: awx/main/models/oauth.py:98 +msgid "The user representing the token owner" +msgstr "" + +#: awx/main/models/oauth.py:112 +msgid "" +"Allowed scopes, further restricts user's permissions. Must be a simple space-" +"separated string with allowed scopes ['read', 'write']." +msgstr "" + +#: awx/main/models/projects.py:54 msgid "Git" msgstr "" -#: awx/main/models/projects.py:44 +#: awx/main/models/projects.py:55 msgid "Mercurial" msgstr "" -#: awx/main/models/projects.py:45 +#: awx/main/models/projects.py:56 msgid "Subversion" msgstr "" -#: awx/main/models/projects.py:46 +#: awx/main/models/projects.py:57 msgid "Red Hat Insights" msgstr "" -#: awx/main/models/projects.py:72 +#: awx/main/models/projects.py:83 msgid "" "Local path (relative to PROJECTS_ROOT) containing playbooks and related " "files for this project." msgstr "" -#: awx/main/models/projects.py:81 +#: awx/main/models/projects.py:92 msgid "SCM Type" msgstr "" -#: awx/main/models/projects.py:82 +#: awx/main/models/projects.py:93 msgid "Specifies the source control system used to store the project." msgstr "" -#: awx/main/models/projects.py:88 +#: awx/main/models/projects.py:99 msgid "SCM URL" msgstr "" -#: awx/main/models/projects.py:89 +#: awx/main/models/projects.py:100 msgid "The location where the project is stored." msgstr "" -#: awx/main/models/projects.py:95 +#: awx/main/models/projects.py:106 msgid "SCM Branch" msgstr "" -#: awx/main/models/projects.py:96 +#: awx/main/models/projects.py:107 msgid "Specific branch, tag or commit to checkout." msgstr "" -#: awx/main/models/projects.py:100 +#: awx/main/models/projects.py:111 msgid "Discard any local changes before syncing the project." msgstr "" -#: awx/main/models/projects.py:104 +#: awx/main/models/projects.py:115 msgid "Delete the project before syncing." msgstr "" -#: awx/main/models/projects.py:133 +#: awx/main/models/projects.py:144 msgid "Invalid SCM URL." msgstr "" -#: awx/main/models/projects.py:136 +#: awx/main/models/projects.py:147 msgid "SCM URL is required." msgstr "" -#: awx/main/models/projects.py:144 +#: awx/main/models/projects.py:155 msgid "Insights Credential is required for an Insights Project." msgstr "" -#: awx/main/models/projects.py:150 +#: awx/main/models/projects.py:161 msgid "Credential kind must be 'scm'." msgstr "" -#: awx/main/models/projects.py:167 +#: awx/main/models/projects.py:178 msgid "Invalid credential." msgstr "" -#: awx/main/models/projects.py:249 +#: awx/main/models/projects.py:262 msgid "Update the project when a job is launched that uses the project." msgstr "" -#: awx/main/models/projects.py:254 +#: awx/main/models/projects.py:267 msgid "" "The number of seconds after the last project update ran that a newproject " "update will be launched as a job dependency." msgstr "" -#: awx/main/models/projects.py:264 +#: awx/main/models/projects.py:277 msgid "The last revision fetched by a project update" msgstr "" -#: awx/main/models/projects.py:271 +#: awx/main/models/projects.py:284 msgid "Playbook Files" msgstr "" -#: awx/main/models/projects.py:272 +#: awx/main/models/projects.py:285 msgid "List of playbooks found in the project" msgstr "" -#: awx/main/models/projects.py:279 +#: awx/main/models/projects.py:292 msgid "Inventory Files" msgstr "" -#: awx/main/models/projects.py:280 +#: awx/main/models/projects.py:293 msgid "" "Suggested list of content that could be Ansible inventory in the project" msgstr "" @@ -2891,213 +3274,275 @@ msgid "Admin" msgstr "" #: awx/main/models/rbac.py:40 -msgid "Auditor" +msgid "Project Admin" msgstr "" #: awx/main/models/rbac.py:41 -msgid "Execute" +msgid "Inventory Admin" msgstr "" #: awx/main/models/rbac.py:42 -msgid "Member" +msgid "Credential Admin" msgstr "" #: awx/main/models/rbac.py:43 -msgid "Read" +msgid "Workflow Admin" msgstr "" #: awx/main/models/rbac.py:44 -msgid "Update" +msgid "Notification Admin" msgstr "" #: awx/main/models/rbac.py:45 -msgid "Use" +msgid "Auditor" +msgstr "" + +#: awx/main/models/rbac.py:46 +msgid "Execute" +msgstr "" + +#: awx/main/models/rbac.py:47 +msgid "Member" +msgstr "" + +#: awx/main/models/rbac.py:48 +msgid "Read" msgstr "" #: awx/main/models/rbac.py:49 -msgid "Can manage all aspects of the system" +msgid "Update" msgstr "" #: awx/main/models/rbac.py:50 +msgid "Use" +msgstr "" + +#: awx/main/models/rbac.py:54 +msgid "Can manage all aspects of the system" +msgstr "" + +#: awx/main/models/rbac.py:55 msgid "Can view all settings on the system" msgstr "" -#: awx/main/models/rbac.py:51 +#: awx/main/models/rbac.py:56 msgid "May run ad hoc commands on an inventory" msgstr "" -#: awx/main/models/rbac.py:52 +#: awx/main/models/rbac.py:57 #, python-format msgid "Can manage all aspects of the %s" msgstr "" -#: awx/main/models/rbac.py:53 +#: awx/main/models/rbac.py:58 +#, python-format +msgid "Can manage all projects of the %s" +msgstr "" + +#: awx/main/models/rbac.py:59 +#, python-format +msgid "Can manage all inventories of the %s" +msgstr "" + +#: awx/main/models/rbac.py:60 +#, python-format +msgid "Can manage all credentials of the %s" +msgstr "" + +#: awx/main/models/rbac.py:61 +#, python-format +msgid "Can manage all workflows of the %s" +msgstr "" + +#: awx/main/models/rbac.py:62 +#, python-format +msgid "Can manage all notifications of the %s" +msgstr "" + +#: awx/main/models/rbac.py:63 #, python-format msgid "Can view all settings for the %s" msgstr "" -#: awx/main/models/rbac.py:54 +#: awx/main/models/rbac.py:65 +msgid "May run any executable resources in the organization" +msgstr "" + +#: awx/main/models/rbac.py:66 #, python-format msgid "May run the %s" msgstr "" -#: awx/main/models/rbac.py:55 +#: awx/main/models/rbac.py:68 #, python-format msgid "User is a member of the %s" msgstr "" -#: awx/main/models/rbac.py:56 +#: awx/main/models/rbac.py:69 #, python-format msgid "May view settings for the %s" msgstr "" -#: awx/main/models/rbac.py:57 +#: awx/main/models/rbac.py:70 msgid "" "May update project or inventory or group using the configured source update " "system" msgstr "" -#: awx/main/models/rbac.py:58 +#: awx/main/models/rbac.py:71 #, python-format msgid "Can use the %s in a job template" msgstr "" -#: awx/main/models/rbac.py:122 +#: awx/main/models/rbac.py:135 msgid "roles" msgstr "" -#: awx/main/models/rbac.py:434 +#: awx/main/models/rbac.py:441 msgid "role_ancestors" msgstr "" -#: awx/main/models/schedules.py:71 +#: awx/main/models/schedules.py:72 msgid "Enables processing of this schedule." msgstr "" -#: awx/main/models/schedules.py:77 +#: awx/main/models/schedules.py:78 msgid "The first occurrence of the schedule occurs on or after this time." msgstr "" -#: awx/main/models/schedules.py:83 +#: awx/main/models/schedules.py:84 msgid "" "The last occurrence of the schedule occurs before this time, aftewards the " "schedule expires." msgstr "" -#: awx/main/models/schedules.py:87 +#: awx/main/models/schedules.py:88 msgid "A value representing the schedules iCal recurrence rule." msgstr "" -#: awx/main/models/schedules.py:93 +#: awx/main/models/schedules.py:94 msgid "The next time that the scheduled action will run." msgstr "" -#: awx/main/models/schedules.py:109 -msgid "Expected JSON" -msgstr "" - -#: awx/main/models/schedules.py:121 -msgid "days must be a positive integer." -msgstr "" - -#: awx/main/models/unified_jobs.py:58 +#: awx/main/models/unified_jobs.py:61 msgid "New" msgstr "" -#: awx/main/models/unified_jobs.py:60 +#: awx/main/models/unified_jobs.py:63 msgid "Waiting" msgstr "" -#: awx/main/models/unified_jobs.py:61 +#: awx/main/models/unified_jobs.py:64 msgid "Running" msgstr "" -#: awx/main/models/unified_jobs.py:65 +#: awx/main/models/unified_jobs.py:68 msgid "Canceled" msgstr "" -#: awx/main/models/unified_jobs.py:69 +#: awx/main/models/unified_jobs.py:72 msgid "Never Updated" msgstr "" -#: awx/main/models/unified_jobs.py:73 awx/ui/templates/ui/index.html:67 -#: awx/ui/templates/ui/index.html.py:86 +#: awx/main/models/unified_jobs.py:76 msgid "OK" msgstr "" -#: awx/main/models/unified_jobs.py:74 +#: awx/main/models/unified_jobs.py:77 msgid "Missing" msgstr "" -#: awx/main/models/unified_jobs.py:78 +#: awx/main/models/unified_jobs.py:81 msgid "No External Source" msgstr "" -#: awx/main/models/unified_jobs.py:85 +#: awx/main/models/unified_jobs.py:88 msgid "Updating" msgstr "" -#: awx/main/models/unified_jobs.py:429 -msgid "Relaunch" +#: awx/main/models/unified_jobs.py:427 +msgid "Field is not allowed on launch." msgstr "" -#: awx/main/models/unified_jobs.py:430 -msgid "Callback" -msgstr "" - -#: awx/main/models/unified_jobs.py:431 -msgid "Scheduled" -msgstr "" - -#: awx/main/models/unified_jobs.py:432 -msgid "Dependency" -msgstr "" - -#: awx/main/models/unified_jobs.py:433 -msgid "Workflow" -msgstr "" - -#: awx/main/models/unified_jobs.py:434 -msgid "Sync" -msgstr "" - -#: awx/main/models/unified_jobs.py:481 -msgid "The node the job executed on." -msgstr "" - -#: awx/main/models/unified_jobs.py:507 -msgid "The date and time the job was queued for starting." -msgstr "" - -#: awx/main/models/unified_jobs.py:513 -msgid "The date and time the job finished execution." +#: awx/main/models/unified_jobs.py:455 +#, python-brace-format +msgid "" +"Variables {list_of_keys} provided, but this template cannot accept variables." msgstr "" #: awx/main/models/unified_jobs.py:519 +msgid "Relaunch" +msgstr "" + +#: awx/main/models/unified_jobs.py:520 +msgid "Callback" +msgstr "" + +#: awx/main/models/unified_jobs.py:521 +msgid "Scheduled" +msgstr "" + +#: awx/main/models/unified_jobs.py:522 +msgid "Dependency" +msgstr "" + +#: awx/main/models/unified_jobs.py:523 +msgid "Workflow" +msgstr "" + +#: awx/main/models/unified_jobs.py:524 +msgid "Sync" +msgstr "" + +#: awx/main/models/unified_jobs.py:572 +msgid "The node the job executed on." +msgstr "" + +#: awx/main/models/unified_jobs.py:598 +msgid "The date and time the job was queued for starting." +msgstr "" + +#: awx/main/models/unified_jobs.py:604 +msgid "The date and time the job finished execution." +msgstr "" + +#: awx/main/models/unified_jobs.py:610 msgid "Elapsed time in seconds that the job ran." msgstr "" -#: awx/main/models/unified_jobs.py:541 +#: awx/main/models/unified_jobs.py:632 msgid "" "A status field to indicate the state of the job if it wasn't able to run and " "capture stdout" msgstr "" -#: awx/main/models/unified_jobs.py:580 +#: awx/main/models/unified_jobs.py:661 msgid "The Rampart/Instance group the job was run under" msgstr "" +#: awx/main/models/workflow.py:203 +#, python-brace-format +msgid "" +"Bad launch configuration starting template {template_pk} as part of workflow " +"{workflow_pk}. Errors:\n" +"{error_text}" +msgstr "" + +#: awx/main/models/workflow.py:387 +msgid "Field is not allowed for use in workflows." +msgstr "" + #: awx/main/notifications/base.py:17 awx/main/notifications/email_backend.py:28 msgid "" "{} #{} had status {}, view details at {}\n" "\n" msgstr "" -#: awx/main/notifications/hipchat_backend.py:47 +#: awx/main/notifications/hipchat_backend.py:48 msgid "Error sending messages: {}" msgstr "" -#: awx/main/notifications/hipchat_backend.py:49 +#: awx/main/notifications/hipchat_backend.py:50 msgid "Error sending message to hipchat: {}" msgstr "" @@ -3105,16 +3550,27 @@ msgstr "" msgid "Exception connecting to irc server: {}" msgstr "" +#: awx/main/notifications/mattermost_backend.py:48 +#: awx/main/notifications/mattermost_backend.py:50 +msgid "Error sending notification mattermost: {}" +msgstr "" + #: awx/main/notifications/pagerduty_backend.py:39 msgid "Exception connecting to PagerDuty: {}" msgstr "" #: awx/main/notifications/pagerduty_backend.py:48 -#: awx/main/notifications/slack_backend.py:52 +#: awx/main/notifications/slack_backend.py:82 +#: awx/main/notifications/slack_backend.py:99 #: awx/main/notifications/twilio_backend.py:46 msgid "Exception sending messages: {}" msgstr "" +#: awx/main/notifications/rocketchat_backend.py:46 +#: awx/main/notifications/rocketchat_backend.py:49 +msgid "Error sending notification rocket.chat: {}" +msgstr "" + #: awx/main/notifications/twilio_backend.py:36 msgid "Exception connecting to Twilio: {}" msgstr "" @@ -3124,140 +3580,156 @@ msgstr "" msgid "Error sending notification webhook: {}" msgstr "" -#: awx/main/scheduler/task_manager.py:197 +#: awx/main/scheduler/task_manager.py:200 msgid "" "Job spawned from workflow could not start because it was not in the right " "state or required manual credentials" msgstr "" -#: awx/main/scheduler/task_manager.py:201 +#: awx/main/scheduler/task_manager.py:204 msgid "" "Job spawned from workflow could not start because it was missing a related " "resource such as project or inventory" msgstr "" -#: awx/main/tasks.py:184 +#: awx/main/signals.py:617 +msgid "limit_reached" +msgstr "" + +#: awx/main/tasks.py:273 msgid "Ansible Tower host usage over 90%" msgstr "" -#: awx/main/tasks.py:189 +#: awx/main/tasks.py:278 msgid "Ansible Tower license will expire soon" msgstr "" -#: awx/main/tasks.py:318 -msgid "status_str must be either succeeded or failed" +#: awx/main/tasks.py:1321 +msgid "Job could not start because it does not have a valid inventory." msgstr "" -#: awx/main/tasks.py:1549 -msgid "Dependent inventory update {} was canceled." -msgstr "" - -#: awx/main/utils/common.py:89 +#: awx/main/utils/common.py:97 #, python-format msgid "Unable to convert \"%s\" to boolean" msgstr "" -#: awx/main/utils/common.py:235 +#: awx/main/utils/common.py:251 #, python-format msgid "Unsupported SCM type \"%s\"" msgstr "" -#: awx/main/utils/common.py:242 awx/main/utils/common.py:254 -#: awx/main/utils/common.py:273 +#: awx/main/utils/common.py:258 awx/main/utils/common.py:270 +#: awx/main/utils/common.py:289 #, python-format msgid "Invalid %s URL" msgstr "" -#: awx/main/utils/common.py:244 awx/main/utils/common.py:283 +#: awx/main/utils/common.py:260 awx/main/utils/common.py:299 #, python-format msgid "Unsupported %s URL" msgstr "" -#: awx/main/utils/common.py:285 +#: awx/main/utils/common.py:301 #, python-format msgid "Unsupported host \"%s\" for file:// URL" msgstr "" -#: awx/main/utils/common.py:287 +#: awx/main/utils/common.py:303 #, python-format msgid "Host is required for %s URL" msgstr "" -#: awx/main/utils/common.py:305 +#: awx/main/utils/common.py:321 #, python-format msgid "Username must be \"git\" for SSH access to %s." msgstr "" -#: awx/main/utils/common.py:311 +#: awx/main/utils/common.py:327 #, python-format msgid "Username must be \"hg\" for SSH access to %s." msgstr "" -#: awx/main/validators.py:60 +#: awx/main/utils/common.py:608 +#, python-brace-format +msgid "Input type `{data_type}` is not a dictionary" +msgstr "" + +#: awx/main/utils/common.py:641 +#, python-brace-format +msgid "Variables not compatible with JSON standard (error: {json_error})" +msgstr "" + +#: awx/main/utils/common.py:647 +#, python-brace-format +msgid "" +"Cannot parse as JSON (error: {json_error}) or YAML (error: {yaml_error})." +msgstr "" + +#: awx/main/validators.py:67 #, python-format msgid "Invalid certificate or key: %s..." msgstr "" -#: awx/main/validators.py:74 +#: awx/main/validators.py:83 #, python-format msgid "Invalid private key: unsupported type \"%s\"" msgstr "" -#: awx/main/validators.py:78 +#: awx/main/validators.py:87 #, python-format msgid "Unsupported PEM object type: \"%s\"" msgstr "" -#: awx/main/validators.py:103 +#: awx/main/validators.py:112 msgid "Invalid base64-encoded data" msgstr "" -#: awx/main/validators.py:122 +#: awx/main/validators.py:131 msgid "Exactly one private key is required." msgstr "" -#: awx/main/validators.py:124 +#: awx/main/validators.py:133 msgid "At least one private key is required." msgstr "" -#: awx/main/validators.py:126 +#: awx/main/validators.py:135 #, python-format msgid "" "At least %(min_keys)d private keys are required, only %(key_count)d provided." msgstr "" -#: awx/main/validators.py:129 +#: awx/main/validators.py:138 #, python-format msgid "Only one private key is allowed, %(key_count)d provided." msgstr "" -#: awx/main/validators.py:131 +#: awx/main/validators.py:140 #, python-format msgid "" "No more than %(max_keys)d private keys are allowed, %(key_count)d provided." msgstr "" -#: awx/main/validators.py:136 +#: awx/main/validators.py:145 msgid "Exactly one certificate is required." msgstr "" -#: awx/main/validators.py:138 +#: awx/main/validators.py:147 msgid "At least one certificate is required." msgstr "" -#: awx/main/validators.py:140 +#: awx/main/validators.py:149 #, python-format msgid "" "At least %(min_certs)d certificates are required, only %(cert_count)d " "provided." msgstr "" -#: awx/main/validators.py:143 +#: awx/main/validators.py:152 #, python-format msgid "Only one certificate is allowed, %(cert_count)d provided." msgstr "" -#: awx/main/validators.py:145 +#: awx/main/validators.py:154 #, python-format msgid "" "No more than %(max_certs)d certificates are allowed, %(cert_count)d provided." @@ -3299,287 +3771,287 @@ msgstr "" msgid "A server error has occurred." msgstr "" -#: awx/settings/defaults.py:665 +#: awx/settings/defaults.py:722 msgid "US East (Northern Virginia)" msgstr "" -#: awx/settings/defaults.py:666 +#: awx/settings/defaults.py:723 msgid "US East (Ohio)" msgstr "" -#: awx/settings/defaults.py:667 +#: awx/settings/defaults.py:724 msgid "US West (Oregon)" msgstr "" -#: awx/settings/defaults.py:668 +#: awx/settings/defaults.py:725 msgid "US West (Northern California)" msgstr "" -#: awx/settings/defaults.py:669 +#: awx/settings/defaults.py:726 msgid "Canada (Central)" msgstr "" -#: awx/settings/defaults.py:670 +#: awx/settings/defaults.py:727 msgid "EU (Frankfurt)" msgstr "" -#: awx/settings/defaults.py:671 +#: awx/settings/defaults.py:728 msgid "EU (Ireland)" msgstr "" -#: awx/settings/defaults.py:672 +#: awx/settings/defaults.py:729 msgid "EU (London)" msgstr "" -#: awx/settings/defaults.py:673 +#: awx/settings/defaults.py:730 msgid "Asia Pacific (Singapore)" msgstr "" -#: awx/settings/defaults.py:674 +#: awx/settings/defaults.py:731 msgid "Asia Pacific (Sydney)" msgstr "" -#: awx/settings/defaults.py:675 +#: awx/settings/defaults.py:732 msgid "Asia Pacific (Tokyo)" msgstr "" -#: awx/settings/defaults.py:676 +#: awx/settings/defaults.py:733 msgid "Asia Pacific (Seoul)" msgstr "" -#: awx/settings/defaults.py:677 +#: awx/settings/defaults.py:734 msgid "Asia Pacific (Mumbai)" msgstr "" -#: awx/settings/defaults.py:678 +#: awx/settings/defaults.py:735 msgid "South America (Sao Paulo)" msgstr "" -#: awx/settings/defaults.py:679 +#: awx/settings/defaults.py:736 msgid "US West (GovCloud)" msgstr "" -#: awx/settings/defaults.py:680 +#: awx/settings/defaults.py:737 msgid "China (Beijing)" msgstr "" -#: awx/settings/defaults.py:729 +#: awx/settings/defaults.py:786 msgid "US East 1 (B)" msgstr "" -#: awx/settings/defaults.py:730 +#: awx/settings/defaults.py:787 msgid "US East 1 (C)" msgstr "" -#: awx/settings/defaults.py:731 +#: awx/settings/defaults.py:788 msgid "US East 1 (D)" msgstr "" -#: awx/settings/defaults.py:732 +#: awx/settings/defaults.py:789 msgid "US East 4 (A)" msgstr "" -#: awx/settings/defaults.py:733 +#: awx/settings/defaults.py:790 msgid "US East 4 (B)" msgstr "" -#: awx/settings/defaults.py:734 +#: awx/settings/defaults.py:791 msgid "US East 4 (C)" msgstr "" -#: awx/settings/defaults.py:735 +#: awx/settings/defaults.py:792 msgid "US Central (A)" msgstr "" -#: awx/settings/defaults.py:736 +#: awx/settings/defaults.py:793 msgid "US Central (B)" msgstr "" -#: awx/settings/defaults.py:737 +#: awx/settings/defaults.py:794 msgid "US Central (C)" msgstr "" -#: awx/settings/defaults.py:738 +#: awx/settings/defaults.py:795 msgid "US Central (F)" msgstr "" -#: awx/settings/defaults.py:739 +#: awx/settings/defaults.py:796 msgid "US West (A)" msgstr "" -#: awx/settings/defaults.py:740 +#: awx/settings/defaults.py:797 msgid "US West (B)" msgstr "" -#: awx/settings/defaults.py:741 +#: awx/settings/defaults.py:798 msgid "US West (C)" msgstr "" -#: awx/settings/defaults.py:742 +#: awx/settings/defaults.py:799 msgid "Europe West 1 (B)" msgstr "" -#: awx/settings/defaults.py:743 +#: awx/settings/defaults.py:800 msgid "Europe West 1 (C)" msgstr "" -#: awx/settings/defaults.py:744 +#: awx/settings/defaults.py:801 msgid "Europe West 1 (D)" msgstr "" -#: awx/settings/defaults.py:745 +#: awx/settings/defaults.py:802 msgid "Europe West 2 (A)" msgstr "" -#: awx/settings/defaults.py:746 +#: awx/settings/defaults.py:803 msgid "Europe West 2 (B)" msgstr "" -#: awx/settings/defaults.py:747 +#: awx/settings/defaults.py:804 msgid "Europe West 2 (C)" msgstr "" -#: awx/settings/defaults.py:748 +#: awx/settings/defaults.py:805 msgid "Asia East (A)" msgstr "" -#: awx/settings/defaults.py:749 +#: awx/settings/defaults.py:806 msgid "Asia East (B)" msgstr "" -#: awx/settings/defaults.py:750 +#: awx/settings/defaults.py:807 msgid "Asia East (C)" msgstr "" -#: awx/settings/defaults.py:751 +#: awx/settings/defaults.py:808 msgid "Asia Southeast (A)" msgstr "" -#: awx/settings/defaults.py:752 +#: awx/settings/defaults.py:809 msgid "Asia Southeast (B)" msgstr "" -#: awx/settings/defaults.py:753 +#: awx/settings/defaults.py:810 msgid "Asia Northeast (A)" msgstr "" -#: awx/settings/defaults.py:754 +#: awx/settings/defaults.py:811 msgid "Asia Northeast (B)" msgstr "" -#: awx/settings/defaults.py:755 +#: awx/settings/defaults.py:812 msgid "Asia Northeast (C)" msgstr "" -#: awx/settings/defaults.py:756 +#: awx/settings/defaults.py:813 msgid "Australia Southeast (A)" msgstr "" -#: awx/settings/defaults.py:757 +#: awx/settings/defaults.py:814 msgid "Australia Southeast (B)" msgstr "" -#: awx/settings/defaults.py:758 +#: awx/settings/defaults.py:815 msgid "Australia Southeast (C)" msgstr "" -#: awx/settings/defaults.py:780 +#: awx/settings/defaults.py:837 msgid "US East" msgstr "" -#: awx/settings/defaults.py:781 +#: awx/settings/defaults.py:838 msgid "US East 2" msgstr "" -#: awx/settings/defaults.py:782 +#: awx/settings/defaults.py:839 msgid "US Central" msgstr "" -#: awx/settings/defaults.py:783 +#: awx/settings/defaults.py:840 msgid "US North Central" msgstr "" -#: awx/settings/defaults.py:784 +#: awx/settings/defaults.py:841 msgid "US South Central" msgstr "" -#: awx/settings/defaults.py:785 +#: awx/settings/defaults.py:842 msgid "US West Central" msgstr "" -#: awx/settings/defaults.py:786 +#: awx/settings/defaults.py:843 msgid "US West" msgstr "" -#: awx/settings/defaults.py:787 +#: awx/settings/defaults.py:844 msgid "US West 2" msgstr "" -#: awx/settings/defaults.py:788 +#: awx/settings/defaults.py:845 msgid "Canada East" msgstr "" -#: awx/settings/defaults.py:789 +#: awx/settings/defaults.py:846 msgid "Canada Central" msgstr "" -#: awx/settings/defaults.py:790 +#: awx/settings/defaults.py:847 msgid "Brazil South" msgstr "" -#: awx/settings/defaults.py:791 +#: awx/settings/defaults.py:848 msgid "Europe North" msgstr "" -#: awx/settings/defaults.py:792 +#: awx/settings/defaults.py:849 msgid "Europe West" msgstr "" -#: awx/settings/defaults.py:793 +#: awx/settings/defaults.py:850 msgid "UK West" msgstr "" -#: awx/settings/defaults.py:794 +#: awx/settings/defaults.py:851 msgid "UK South" msgstr "" -#: awx/settings/defaults.py:795 +#: awx/settings/defaults.py:852 msgid "Asia East" msgstr "" -#: awx/settings/defaults.py:796 +#: awx/settings/defaults.py:853 msgid "Asia Southeast" msgstr "" -#: awx/settings/defaults.py:797 +#: awx/settings/defaults.py:854 msgid "Australia East" msgstr "" -#: awx/settings/defaults.py:798 +#: awx/settings/defaults.py:855 msgid "Australia Southeast" msgstr "" -#: awx/settings/defaults.py:799 +#: awx/settings/defaults.py:856 msgid "India West" msgstr "" -#: awx/settings/defaults.py:800 +#: awx/settings/defaults.py:857 msgid "India South" msgstr "" -#: awx/settings/defaults.py:801 +#: awx/settings/defaults.py:858 msgid "Japan East" msgstr "" -#: awx/settings/defaults.py:802 +#: awx/settings/defaults.py:859 msgid "Japan West" msgstr "" -#: awx/settings/defaults.py:803 +#: awx/settings/defaults.py:860 msgid "Korea Central" msgstr "" -#: awx/settings/defaults.py:804 +#: awx/settings/defaults.py:861 msgid "Korea South" msgstr "" @@ -3595,7 +4067,7 @@ msgid "" "their\n" "username and email address. Configuration details are available in the " "Ansible\n" -"Tower documentation.'" +"Tower documentation." msgstr "" #: awx/sso/conf.py:55 @@ -3633,11 +4105,11 @@ msgid "" "have a user account with a matching email address will be able to login." msgstr "" -#: awx/sso/conf.py:137 +#: awx/sso/conf.py:141 msgid "LDAP Server URI" msgstr "" -#: awx/sso/conf.py:138 +#: awx/sso/conf.py:142 msgid "" "URI to connect to LDAP server, such as \"ldap://ldap.example.com:389\" (non-" "SSL) or \"ldaps://ldap.example.com:636\" (SSL). Multiple LDAP servers may be " @@ -3645,46 +4117,47 @@ msgid "" "disabled if this parameter is empty." msgstr "" -#: awx/sso/conf.py:142 awx/sso/conf.py:158 awx/sso/conf.py:170 -#: awx/sso/conf.py:182 awx/sso/conf.py:198 awx/sso/conf.py:218 -#: awx/sso/conf.py:240 awx/sso/conf.py:255 awx/sso/conf.py:273 -#: awx/sso/conf.py:290 awx/sso/conf.py:307 awx/sso/conf.py:323 -#: awx/sso/conf.py:337 awx/sso/conf.py:354 awx/sso/conf.py:380 +#: awx/sso/conf.py:146 awx/sso/conf.py:162 awx/sso/conf.py:174 +#: awx/sso/conf.py:186 awx/sso/conf.py:202 awx/sso/conf.py:222 +#: awx/sso/conf.py:244 awx/sso/conf.py:259 awx/sso/conf.py:277 +#: awx/sso/conf.py:294 awx/sso/conf.py:306 awx/sso/conf.py:332 +#: awx/sso/conf.py:348 awx/sso/conf.py:362 awx/sso/conf.py:380 +#: awx/sso/conf.py:406 msgid "LDAP" msgstr "" -#: awx/sso/conf.py:154 +#: awx/sso/conf.py:158 msgid "LDAP Bind DN" msgstr "" -#: awx/sso/conf.py:155 +#: awx/sso/conf.py:159 msgid "" "DN (Distinguished Name) of user to bind for all search queries. This is the " "system user account we will use to login to query LDAP for other user " "information. Refer to the Ansible Tower documentation for example syntax." msgstr "" -#: awx/sso/conf.py:168 +#: awx/sso/conf.py:172 msgid "LDAP Bind Password" msgstr "" -#: awx/sso/conf.py:169 +#: awx/sso/conf.py:173 msgid "Password used to bind LDAP user account." msgstr "" -#: awx/sso/conf.py:180 +#: awx/sso/conf.py:184 msgid "LDAP Start TLS" msgstr "" -#: awx/sso/conf.py:181 +#: awx/sso/conf.py:185 msgid "Whether to enable TLS when the LDAP connection is not using SSL." msgstr "" -#: awx/sso/conf.py:191 +#: awx/sso/conf.py:195 msgid "LDAP Connection Options" msgstr "" -#: awx/sso/conf.py:192 +#: awx/sso/conf.py:196 msgid "" "Additional options to set for the LDAP connection. LDAP referrals are " "disabled by default (to prevent certain LDAP queries from hanging with AD). " @@ -3693,11 +4166,11 @@ msgid "" "values that can be set." msgstr "" -#: awx/sso/conf.py:211 +#: awx/sso/conf.py:215 msgid "LDAP User Search" msgstr "" -#: awx/sso/conf.py:212 +#: awx/sso/conf.py:216 msgid "" "LDAP search query to find users. Any user that matches the given pattern " "will be able to login to Tower. The user should also be mapped into a Tower " @@ -3706,11 +4179,11 @@ msgid "" "possible. See Tower documentation for details." msgstr "" -#: awx/sso/conf.py:234 +#: awx/sso/conf.py:238 msgid "LDAP User DN Template" msgstr "" -#: awx/sso/conf.py:235 +#: awx/sso/conf.py:239 msgid "" "Alternative to user search, if user DNs are all of the same format. This " "approach is more efficient for user lookups than searching if it is usable " @@ -3718,77 +4191,85 @@ msgid "" "used instead of AUTH_LDAP_USER_SEARCH." msgstr "" -#: awx/sso/conf.py:250 +#: awx/sso/conf.py:254 msgid "LDAP User Attribute Map" msgstr "" -#: awx/sso/conf.py:251 +#: awx/sso/conf.py:255 msgid "" "Mapping of LDAP user schema to Tower API user attributes. The default " "setting is valid for ActiveDirectory but users with other LDAP " "configurations may need to change the values. Refer to the Ansible Tower " -"documentation for additonal details." +"documentation for additional details." msgstr "" -#: awx/sso/conf.py:269 +#: awx/sso/conf.py:273 msgid "LDAP Group Search" msgstr "" -#: awx/sso/conf.py:270 +#: awx/sso/conf.py:274 msgid "" "Users are mapped to organizations based on their membership in LDAP groups. " "This setting defines the LDAP search query to find groups. Unlike the user " "search, group search does not support LDAPSearchUnion." msgstr "" -#: awx/sso/conf.py:286 +#: awx/sso/conf.py:290 msgid "LDAP Group Type" msgstr "" -#: awx/sso/conf.py:287 +#: awx/sso/conf.py:291 msgid "" "The group type may need to be changed based on the type of the LDAP server. " -"Values are listed at: http://pythonhosted.org/django-auth-ldap/groups." -"html#types-of-groups" +"Values are listed at: https://django-auth-ldap.readthedocs.io/en/stable/" +"groups.html#types-of-groups" msgstr "" -#: awx/sso/conf.py:302 +#: awx/sso/conf.py:304 +msgid "LDAP Group Type Parameters" +msgstr "" + +#: awx/sso/conf.py:305 +msgid "Key value parameters to send the chosen group type init method." +msgstr "" + +#: awx/sso/conf.py:327 msgid "LDAP Require Group" msgstr "" -#: awx/sso/conf.py:303 +#: awx/sso/conf.py:328 msgid "" "Group DN required to login. If specified, user must be a member of this " "group to login via LDAP. If not set, everyone in LDAP that matches the user " "search will be able to login via Tower. Only one require group is supported." msgstr "" -#: awx/sso/conf.py:319 +#: awx/sso/conf.py:344 msgid "LDAP Deny Group" msgstr "" -#: awx/sso/conf.py:320 +#: awx/sso/conf.py:345 msgid "" "Group DN denied from login. If specified, user will not be allowed to login " "if a member of this group. Only one deny group is supported." msgstr "" -#: awx/sso/conf.py:333 +#: awx/sso/conf.py:358 msgid "LDAP User Flags By Group" msgstr "" -#: awx/sso/conf.py:334 +#: awx/sso/conf.py:359 msgid "" "Retrieve users from a given group. At this time, superuser and system " "auditors are the only groups supported. Refer to the Ansible Tower " "documentation for more detail." msgstr "" -#: awx/sso/conf.py:349 +#: awx/sso/conf.py:375 msgid "LDAP Organization Map" msgstr "" -#: awx/sso/conf.py:350 +#: awx/sso/conf.py:376 msgid "" "Mapping between organization admins/users and LDAP groups. This controls " "which users are placed into which Tower organizations relative to their LDAP " @@ -3796,237 +4277,237 @@ msgid "" "documentation." msgstr "" -#: awx/sso/conf.py:377 +#: awx/sso/conf.py:403 msgid "LDAP Team Map" msgstr "" -#: awx/sso/conf.py:378 +#: awx/sso/conf.py:404 msgid "" "Mapping between team members (users) and LDAP groups. Configuration details " "are available in the Ansible Tower documentation." msgstr "" -#: awx/sso/conf.py:406 +#: awx/sso/conf.py:440 msgid "RADIUS Server" msgstr "" -#: awx/sso/conf.py:407 +#: awx/sso/conf.py:441 msgid "" "Hostname/IP of RADIUS server. RADIUS authentication is disabled if this " "setting is empty." msgstr "" -#: awx/sso/conf.py:409 awx/sso/conf.py:423 awx/sso/conf.py:435 +#: awx/sso/conf.py:443 awx/sso/conf.py:457 awx/sso/conf.py:469 #: awx/sso/models.py:14 msgid "RADIUS" msgstr "" -#: awx/sso/conf.py:421 +#: awx/sso/conf.py:455 msgid "RADIUS Port" msgstr "" -#: awx/sso/conf.py:422 +#: awx/sso/conf.py:456 msgid "Port of RADIUS server." msgstr "" -#: awx/sso/conf.py:433 +#: awx/sso/conf.py:467 msgid "RADIUS Secret" msgstr "" -#: awx/sso/conf.py:434 +#: awx/sso/conf.py:468 msgid "Shared secret for authenticating to RADIUS server." msgstr "" -#: awx/sso/conf.py:450 +#: awx/sso/conf.py:484 msgid "TACACS+ Server" msgstr "" -#: awx/sso/conf.py:451 +#: awx/sso/conf.py:485 msgid "Hostname of TACACS+ server." msgstr "" -#: awx/sso/conf.py:452 awx/sso/conf.py:465 awx/sso/conf.py:478 -#: awx/sso/conf.py:491 awx/sso/conf.py:503 awx/sso/models.py:15 +#: awx/sso/conf.py:486 awx/sso/conf.py:499 awx/sso/conf.py:512 +#: awx/sso/conf.py:525 awx/sso/conf.py:537 awx/sso/models.py:15 msgid "TACACS+" msgstr "" -#: awx/sso/conf.py:463 +#: awx/sso/conf.py:497 msgid "TACACS+ Port" msgstr "" -#: awx/sso/conf.py:464 +#: awx/sso/conf.py:498 msgid "Port number of TACACS+ server." msgstr "" -#: awx/sso/conf.py:476 +#: awx/sso/conf.py:510 msgid "TACACS+ Secret" msgstr "" -#: awx/sso/conf.py:477 +#: awx/sso/conf.py:511 msgid "Shared secret for authenticating to TACACS+ server." msgstr "" -#: awx/sso/conf.py:489 +#: awx/sso/conf.py:523 msgid "TACACS+ Auth Session Timeout" msgstr "" -#: awx/sso/conf.py:490 +#: awx/sso/conf.py:524 msgid "TACACS+ session timeout value in seconds, 0 disables timeout." msgstr "" -#: awx/sso/conf.py:501 +#: awx/sso/conf.py:535 msgid "TACACS+ Authentication Protocol" msgstr "" -#: awx/sso/conf.py:502 +#: awx/sso/conf.py:536 msgid "Choose the authentication protocol used by TACACS+ client." msgstr "" -#: awx/sso/conf.py:517 +#: awx/sso/conf.py:551 msgid "Google OAuth2 Callback URL" msgstr "" -#: awx/sso/conf.py:518 awx/sso/conf.py:611 awx/sso/conf.py:676 +#: awx/sso/conf.py:552 awx/sso/conf.py:645 awx/sso/conf.py:710 msgid "" "Provide this URL as the callback URL for your application as part of your " "registration process. Refer to the Ansible Tower documentation for more " "detail." msgstr "" -#: awx/sso/conf.py:521 awx/sso/conf.py:533 awx/sso/conf.py:545 -#: awx/sso/conf.py:558 awx/sso/conf.py:572 awx/sso/conf.py:584 -#: awx/sso/conf.py:596 +#: awx/sso/conf.py:555 awx/sso/conf.py:567 awx/sso/conf.py:579 +#: awx/sso/conf.py:592 awx/sso/conf.py:606 awx/sso/conf.py:618 +#: awx/sso/conf.py:630 msgid "Google OAuth2" msgstr "" -#: awx/sso/conf.py:531 +#: awx/sso/conf.py:565 msgid "Google OAuth2 Key" msgstr "" -#: awx/sso/conf.py:532 +#: awx/sso/conf.py:566 msgid "The OAuth2 key from your web application." msgstr "" -#: awx/sso/conf.py:543 +#: awx/sso/conf.py:577 msgid "Google OAuth2 Secret" msgstr "" -#: awx/sso/conf.py:544 +#: awx/sso/conf.py:578 msgid "The OAuth2 secret from your web application." msgstr "" -#: awx/sso/conf.py:555 +#: awx/sso/conf.py:589 msgid "Google OAuth2 Whitelisted Domains" msgstr "" -#: awx/sso/conf.py:556 +#: awx/sso/conf.py:590 msgid "" "Update this setting to restrict the domains who are allowed to login using " "Google OAuth2." msgstr "" -#: awx/sso/conf.py:567 +#: awx/sso/conf.py:601 msgid "Google OAuth2 Extra Arguments" msgstr "" -#: awx/sso/conf.py:568 +#: awx/sso/conf.py:602 msgid "" "Extra arguments for Google OAuth2 login. You can restrict it to only allow a " "single domain to authenticate, even if the user is logged in with multple " "Google accounts. Refer to the Ansible Tower documentation for more detail." msgstr "" -#: awx/sso/conf.py:582 +#: awx/sso/conf.py:616 msgid "Google OAuth2 Organization Map" msgstr "" -#: awx/sso/conf.py:594 +#: awx/sso/conf.py:628 msgid "Google OAuth2 Team Map" msgstr "" -#: awx/sso/conf.py:610 +#: awx/sso/conf.py:644 msgid "GitHub OAuth2 Callback URL" msgstr "" -#: awx/sso/conf.py:614 awx/sso/conf.py:626 awx/sso/conf.py:637 -#: awx/sso/conf.py:649 awx/sso/conf.py:661 +#: awx/sso/conf.py:648 awx/sso/conf.py:660 awx/sso/conf.py:671 +#: awx/sso/conf.py:683 awx/sso/conf.py:695 msgid "GitHub OAuth2" msgstr "" -#: awx/sso/conf.py:624 +#: awx/sso/conf.py:658 msgid "GitHub OAuth2 Key" msgstr "" -#: awx/sso/conf.py:625 +#: awx/sso/conf.py:659 msgid "The OAuth2 key (Client ID) from your GitHub developer application." msgstr "" -#: awx/sso/conf.py:635 +#: awx/sso/conf.py:669 msgid "GitHub OAuth2 Secret" msgstr "" -#: awx/sso/conf.py:636 +#: awx/sso/conf.py:670 msgid "" "The OAuth2 secret (Client Secret) from your GitHub developer application." msgstr "" -#: awx/sso/conf.py:647 +#: awx/sso/conf.py:681 msgid "GitHub OAuth2 Organization Map" msgstr "" -#: awx/sso/conf.py:659 +#: awx/sso/conf.py:693 msgid "GitHub OAuth2 Team Map" msgstr "" -#: awx/sso/conf.py:675 +#: awx/sso/conf.py:709 msgid "GitHub Organization OAuth2 Callback URL" msgstr "" -#: awx/sso/conf.py:679 awx/sso/conf.py:691 awx/sso/conf.py:702 -#: awx/sso/conf.py:715 awx/sso/conf.py:726 awx/sso/conf.py:738 +#: awx/sso/conf.py:713 awx/sso/conf.py:725 awx/sso/conf.py:736 +#: awx/sso/conf.py:749 awx/sso/conf.py:760 awx/sso/conf.py:772 msgid "GitHub Organization OAuth2" msgstr "" -#: awx/sso/conf.py:689 +#: awx/sso/conf.py:723 msgid "GitHub Organization OAuth2 Key" msgstr "" -#: awx/sso/conf.py:690 awx/sso/conf.py:768 +#: awx/sso/conf.py:724 awx/sso/conf.py:802 msgid "The OAuth2 key (Client ID) from your GitHub organization application." msgstr "" -#: awx/sso/conf.py:700 +#: awx/sso/conf.py:734 msgid "GitHub Organization OAuth2 Secret" msgstr "" -#: awx/sso/conf.py:701 awx/sso/conf.py:779 +#: awx/sso/conf.py:735 awx/sso/conf.py:813 msgid "" "The OAuth2 secret (Client Secret) from your GitHub organization application." msgstr "" -#: awx/sso/conf.py:712 +#: awx/sso/conf.py:746 msgid "GitHub Organization Name" msgstr "" -#: awx/sso/conf.py:713 +#: awx/sso/conf.py:747 msgid "" "The name of your GitHub organization, as used in your organization's URL: " "https://github.com//." msgstr "" -#: awx/sso/conf.py:724 +#: awx/sso/conf.py:758 msgid "GitHub Organization OAuth2 Organization Map" msgstr "" -#: awx/sso/conf.py:736 +#: awx/sso/conf.py:770 msgid "GitHub Organization OAuth2 Team Map" msgstr "" -#: awx/sso/conf.py:752 +#: awx/sso/conf.py:786 msgid "GitHub Team OAuth2 Callback URL" msgstr "" -#: awx/sso/conf.py:753 +#: awx/sso/conf.py:787 msgid "" "Create an organization-owned application at https://github.com/organizations/" "/settings/applications and obtain an OAuth2 key (Client ID) and " @@ -4034,170 +4515,172 @@ msgid "" "application." msgstr "" -#: awx/sso/conf.py:757 awx/sso/conf.py:769 awx/sso/conf.py:780 -#: awx/sso/conf.py:793 awx/sso/conf.py:804 awx/sso/conf.py:816 +#: awx/sso/conf.py:791 awx/sso/conf.py:803 awx/sso/conf.py:814 +#: awx/sso/conf.py:827 awx/sso/conf.py:838 awx/sso/conf.py:850 msgid "GitHub Team OAuth2" msgstr "" -#: awx/sso/conf.py:767 +#: awx/sso/conf.py:801 msgid "GitHub Team OAuth2 Key" msgstr "" -#: awx/sso/conf.py:778 +#: awx/sso/conf.py:812 msgid "GitHub Team OAuth2 Secret" msgstr "" -#: awx/sso/conf.py:790 +#: awx/sso/conf.py:824 msgid "GitHub Team ID" msgstr "" -#: awx/sso/conf.py:791 +#: awx/sso/conf.py:825 msgid "" "Find the numeric team ID using the Github API: http://fabian-kostadinov." "github.io/2015/01/16/how-to-find-a-github-team-id/." msgstr "" -#: awx/sso/conf.py:802 +#: awx/sso/conf.py:836 msgid "GitHub Team OAuth2 Organization Map" msgstr "" -#: awx/sso/conf.py:814 +#: awx/sso/conf.py:848 msgid "GitHub Team OAuth2 Team Map" msgstr "" -#: awx/sso/conf.py:830 +#: awx/sso/conf.py:864 msgid "Azure AD OAuth2 Callback URL" msgstr "" -#: awx/sso/conf.py:831 +#: awx/sso/conf.py:865 msgid "" "Provide this URL as the callback URL for your application as part of your " "registration process. Refer to the Ansible Tower documentation for more " "detail. " msgstr "" -#: awx/sso/conf.py:834 awx/sso/conf.py:846 awx/sso/conf.py:857 -#: awx/sso/conf.py:869 awx/sso/conf.py:881 +#: awx/sso/conf.py:868 awx/sso/conf.py:880 awx/sso/conf.py:891 +#: awx/sso/conf.py:903 awx/sso/conf.py:915 msgid "Azure AD OAuth2" msgstr "" -#: awx/sso/conf.py:844 +#: awx/sso/conf.py:878 msgid "Azure AD OAuth2 Key" msgstr "" -#: awx/sso/conf.py:845 +#: awx/sso/conf.py:879 msgid "The OAuth2 key (Client ID) from your Azure AD application." msgstr "" -#: awx/sso/conf.py:855 +#: awx/sso/conf.py:889 msgid "Azure AD OAuth2 Secret" msgstr "" -#: awx/sso/conf.py:856 +#: awx/sso/conf.py:890 msgid "The OAuth2 secret (Client Secret) from your Azure AD application." msgstr "" -#: awx/sso/conf.py:867 +#: awx/sso/conf.py:901 msgid "Azure AD OAuth2 Organization Map" msgstr "" -#: awx/sso/conf.py:879 +#: awx/sso/conf.py:913 msgid "Azure AD OAuth2 Team Map" msgstr "" -#: awx/sso/conf.py:904 +#: awx/sso/conf.py:938 msgid "SAML Assertion Consumer Service (ACS) URL" msgstr "" -#: awx/sso/conf.py:905 +#: awx/sso/conf.py:939 msgid "" "Register Tower as a service provider (SP) with each identity provider (IdP) " "you have configured. Provide your SP Entity ID and this ACS URL for your " "application." msgstr "" -#: awx/sso/conf.py:908 awx/sso/conf.py:922 awx/sso/conf.py:936 -#: awx/sso/conf.py:951 awx/sso/conf.py:965 awx/sso/conf.py:978 -#: awx/sso/conf.py:999 awx/sso/conf.py:1017 awx/sso/conf.py:1036 -#: awx/sso/conf.py:1070 awx/sso/conf.py:1083 awx/sso/models.py:16 +#: awx/sso/conf.py:942 awx/sso/conf.py:956 awx/sso/conf.py:970 +#: awx/sso/conf.py:985 awx/sso/conf.py:999 awx/sso/conf.py:1012 +#: awx/sso/conf.py:1033 awx/sso/conf.py:1051 awx/sso/conf.py:1070 +#: awx/sso/conf.py:1106 awx/sso/conf.py:1138 awx/sso/conf.py:1152 +#: awx/sso/conf.py:1169 awx/sso/conf.py:1182 awx/sso/conf.py:1195 +#: awx/sso/conf.py:1211 awx/sso/models.py:16 msgid "SAML" msgstr "" -#: awx/sso/conf.py:919 +#: awx/sso/conf.py:953 msgid "SAML Service Provider Metadata URL" msgstr "" -#: awx/sso/conf.py:920 +#: awx/sso/conf.py:954 msgid "" "If your identity provider (IdP) allows uploading an XML metadata file, you " "can download one from this URL." msgstr "" -#: awx/sso/conf.py:932 +#: awx/sso/conf.py:966 msgid "SAML Service Provider Entity ID" msgstr "" -#: awx/sso/conf.py:933 +#: awx/sso/conf.py:967 msgid "" "The application-defined unique identifier used as the audience of the SAML " "service provider (SP) configuration. This is usually the URL for Tower." msgstr "" -#: awx/sso/conf.py:948 +#: awx/sso/conf.py:982 msgid "SAML Service Provider Public Certificate" msgstr "" -#: awx/sso/conf.py:949 +#: awx/sso/conf.py:983 msgid "" "Create a keypair for Tower to use as a service provider (SP) and include the " "certificate content here." msgstr "" -#: awx/sso/conf.py:962 +#: awx/sso/conf.py:996 msgid "SAML Service Provider Private Key" msgstr "" -#: awx/sso/conf.py:963 +#: awx/sso/conf.py:997 msgid "" "Create a keypair for Tower to use as a service provider (SP) and include the " "private key content here." msgstr "" -#: awx/sso/conf.py:975 +#: awx/sso/conf.py:1009 msgid "SAML Service Provider Organization Info" msgstr "" -#: awx/sso/conf.py:976 +#: awx/sso/conf.py:1010 msgid "" "Provide the URL, display name, and the name of your app. Refer to the " "Ansible Tower documentation for example syntax." msgstr "" -#: awx/sso/conf.py:995 +#: awx/sso/conf.py:1029 msgid "SAML Service Provider Technical Contact" msgstr "" -#: awx/sso/conf.py:996 +#: awx/sso/conf.py:1030 msgid "" "Provide the name and email address of the technical contact for your service " "provider. Refer to the Ansible Tower documentation for example syntax." msgstr "" -#: awx/sso/conf.py:1013 +#: awx/sso/conf.py:1047 msgid "SAML Service Provider Support Contact" msgstr "" -#: awx/sso/conf.py:1014 +#: awx/sso/conf.py:1048 msgid "" "Provide the name and email address of the support contact for your service " "provider. Refer to the Ansible Tower documentation for example syntax." msgstr "" -#: awx/sso/conf.py:1030 +#: awx/sso/conf.py:1064 msgid "SAML Enabled Identity Providers" msgstr "" -#: awx/sso/conf.py:1031 +#: awx/sso/conf.py:1065 msgid "" "Configure the Entity ID, SSO URL and certificate for each identity provider " "(IdP) in use. Multiple SAML IdPs are supported. Some IdPs may provide user " @@ -4206,126 +4689,165 @@ msgid "" "additional details and syntax." msgstr "" -#: awx/sso/conf.py:1068 +#: awx/sso/conf.py:1102 +msgid "SAML Security Config" +msgstr "" + +#: awx/sso/conf.py:1103 +msgid "" +"A dict of key value pairs that are passed to the underlying python-saml " +"security setting https://github.com/onelogin/python-saml#settings" +msgstr "" + +#: awx/sso/conf.py:1135 +msgid "SAML Service Provider extra configuration data" +msgstr "" + +#: awx/sso/conf.py:1136 +msgid "" +"A dict of key value pairs to be passed to the underlying python-saml Service " +"Provider configuration setting." +msgstr "" + +#: awx/sso/conf.py:1149 +msgid "SAML IDP to extra_data attribute mapping" +msgstr "" + +#: awx/sso/conf.py:1150 +msgid "" +"A list of tuples that maps IDP attributes to extra_attributes. Each " +"attribute will be a list of values, even if only 1 value." +msgstr "" + +#: awx/sso/conf.py:1167 msgid "SAML Organization Map" msgstr "" -#: awx/sso/conf.py:1081 +#: awx/sso/conf.py:1180 msgid "SAML Team Map" msgstr "" -#: awx/sso/fields.py:123 +#: awx/sso/conf.py:1193 +msgid "SAML Organization Attribute Mapping" +msgstr "" + +#: awx/sso/conf.py:1194 +msgid "Used to translate user organization membership into Tower." +msgstr "" + +#: awx/sso/conf.py:1209 +msgid "SAML Team Attribute Mapping" +msgstr "" + +#: awx/sso/conf.py:1210 +msgid "Used to translate user team membership into Tower." +msgstr "" + +#: awx/sso/fields.py:183 #, python-brace-format msgid "Invalid connection option(s): {invalid_options}." msgstr "" -#: awx/sso/fields.py:194 +#: awx/sso/fields.py:254 msgid "Base" msgstr "" -#: awx/sso/fields.py:195 +#: awx/sso/fields.py:255 msgid "One Level" msgstr "" -#: awx/sso/fields.py:196 +#: awx/sso/fields.py:256 msgid "Subtree" msgstr "" -#: awx/sso/fields.py:214 +#: awx/sso/fields.py:274 #, python-brace-format msgid "Expected a list of three items but got {length} instead." msgstr "" -#: awx/sso/fields.py:215 +#: awx/sso/fields.py:275 #, python-brace-format msgid "Expected an instance of LDAPSearch but got {input_type} instead." msgstr "" -#: awx/sso/fields.py:251 +#: awx/sso/fields.py:311 #, python-brace-format msgid "" "Expected an instance of LDAPSearch or LDAPSearchUnion but got {input_type} " "instead." msgstr "" -#: awx/sso/fields.py:289 +#: awx/sso/fields.py:349 #, python-brace-format msgid "Invalid user attribute(s): {invalid_attrs}." msgstr "" -#: awx/sso/fields.py:306 +#: awx/sso/fields.py:366 #, python-brace-format msgid "Expected an instance of LDAPGroupType but got {input_type} instead." msgstr "" -#: awx/sso/fields.py:334 -#, python-brace-format -msgid "Invalid user flag: \"{invalid_flag}\"." -msgstr "" - -#: awx/sso/fields.py:350 awx/sso/fields.py:517 -#, python-brace-format -msgid "" -"Expected None, True, False, a string or list of strings but got {input_type} " -"instead." -msgstr "" - -#: awx/sso/fields.py:386 -#, python-brace-format -msgid "Missing key(s): {missing_keys}." -msgstr "" - -#: awx/sso/fields.py:387 +#: awx/sso/fields.py:406 awx/sso/fields.py:453 #, python-brace-format msgid "Invalid key(s): {invalid_keys}." msgstr "" -#: awx/sso/fields.py:436 awx/sso/fields.py:553 +#: awx/sso/fields.py:431 +#, python-brace-format +msgid "Invalid user flag: \"{invalid_flag}\"." +msgstr "" + +#: awx/sso/fields.py:452 +#, python-brace-format +msgid "Missing key(s): {missing_keys}." +msgstr "" + +#: awx/sso/fields.py:502 awx/sso/fields.py:619 #, python-brace-format msgid "Invalid key(s) for organization map: {invalid_keys}." msgstr "" -#: awx/sso/fields.py:454 +#: awx/sso/fields.py:520 #, python-brace-format msgid "Missing required key for team map: {invalid_keys}." msgstr "" -#: awx/sso/fields.py:455 awx/sso/fields.py:572 +#: awx/sso/fields.py:521 awx/sso/fields.py:638 #, python-brace-format msgid "Invalid key(s) for team map: {invalid_keys}." msgstr "" -#: awx/sso/fields.py:571 +#: awx/sso/fields.py:637 #, python-brace-format msgid "Missing required key for team map: {missing_keys}." msgstr "" -#: awx/sso/fields.py:589 +#: awx/sso/fields.py:655 #, python-brace-format msgid "Missing required key(s) for org info record: {missing_keys}." msgstr "" -#: awx/sso/fields.py:602 +#: awx/sso/fields.py:668 #, python-brace-format msgid "Invalid language code(s) for org info: {invalid_lang_codes}." msgstr "" -#: awx/sso/fields.py:621 +#: awx/sso/fields.py:687 #, python-brace-format msgid "Missing required key(s) for contact: {missing_keys}." msgstr "" -#: awx/sso/fields.py:633 +#: awx/sso/fields.py:699 #, python-brace-format msgid "Missing required key(s) for IdP: {missing_keys}." msgstr "" -#: awx/sso/pipeline.py:24 +#: awx/sso/pipeline.py:31 #, python-brace-format msgid "An account cannot be found for {0}" msgstr "" -#: awx/sso/pipeline.py:30 +#: awx/sso/pipeline.py:37 msgid "Your account is inactive" msgstr "" @@ -4352,68 +4874,48 @@ msgstr "" msgid "AWX" msgstr "" -#: awx/templates/rest_framework/api.html:39 +#: awx/templates/rest_framework/api.html:42 msgid "Ansible Tower API Guide" msgstr "" -#: awx/templates/rest_framework/api.html:40 +#: awx/templates/rest_framework/api.html:43 msgid "Back to Ansible Tower" msgstr "" -#: awx/templates/rest_framework/api.html:41 +#: awx/templates/rest_framework/api.html:44 msgid "Resize" msgstr "" +#: awx/templates/rest_framework/base.html:37 +msgid "navbar" +msgstr "" + +#: awx/templates/rest_framework/base.html:75 +msgid "content" +msgstr "" + #: awx/templates/rest_framework/base.html:78 -#: awx/templates/rest_framework/base.html:92 -#, python-format -msgid "Make a GET request on the %(name)s resource" +msgid "request form" msgstr "" -#: awx/templates/rest_framework/base.html:80 -msgid "Specify a format for the GET request" -msgstr "" - -#: awx/templates/rest_framework/base.html:86 -#, python-format -msgid "" -"Make a GET request on the %(name)s resource with the format set to `" -"%(format)s`" -msgstr "" - -#: awx/templates/rest_framework/base.html:100 -#, python-format -msgid "Make an OPTIONS request on the %(name)s resource" -msgstr "" - -#: awx/templates/rest_framework/base.html:106 -#, python-format -msgid "Make a DELETE request on the %(name)s resource" -msgstr "" - -#: awx/templates/rest_framework/base.html:113 +#: awx/templates/rest_framework/base.html:134 msgid "Filters" msgstr "" -#: awx/templates/rest_framework/base.html:172 -#: awx/templates/rest_framework/base.html:186 -#, python-format -msgid "Make a POST request on the %(name)s resource" +#: awx/templates/rest_framework/base.html:139 +msgid "main content" msgstr "" -#: awx/templates/rest_framework/base.html:216 -#: awx/templates/rest_framework/base.html:230 -#, python-format -msgid "Make a PUT request on the %(name)s resource" +#: awx/templates/rest_framework/base.html:155 +msgid "request info" msgstr "" -#: awx/templates/rest_framework/base.html:233 -#, python-format -msgid "Make a PATCH request on the %(name)s resource" +#: awx/templates/rest_framework/base.html:159 +msgid "response info" msgstr "" #: awx/ui/apps.py:9 awx/ui/conf.py:22 awx/ui/conf.py:36 awx/ui/conf.py:51 -#: awx/ui/conf.py:63 +#: awx/ui/conf.py:63 awx/ui/conf.py:73 msgid "UI" msgstr "" @@ -4461,12 +4963,22 @@ msgid "" msgstr "" #: awx/ui/conf.py:60 -msgid "Max Job Events Retreived by UI" +msgid "Max Job Events Retrieved by UI" msgstr "" #: awx/ui/conf.py:61 msgid "" -"Maximum number of job events for the UI to retreive within a single request." +"Maximum number of job events for the UI to retrieve within a single request." +msgstr "" + +#: awx/ui/conf.py:70 +msgid "Enable Live Updates in the UI" +msgstr "" + +#: awx/ui/conf.py:71 +msgid "" +"If disabled, the page will not refresh when events are received. Reloading " +"the page will be required to get the latest details." msgstr "" #: awx/ui/fields.py:29 @@ -4478,69 +4990,3 @@ msgstr "" #: awx/ui/fields.py:30 msgid "Invalid base64-encoded data in data URL." msgstr "" - -#: awx/ui/templates/ui/index.html:31 -msgid "" -"Your session will expire in 60 seconds, would you like to continue?" -msgstr "" - -#: awx/ui/templates/ui/index.html:46 -msgid "CANCEL" -msgstr "" - -#: awx/ui/templates/ui/index.html:98 -msgid "Set how many days of data should be retained." -msgstr "" - -#: awx/ui/templates/ui/index.html:104 -msgid "" -"Please enter an integer that is not " -"negative that is lower than 9999." -msgstr "" - -#: awx/ui/templates/ui/index.html:109 -msgid "" -"For facts collected older than the time period specified, save one fact scan " -"(snapshot) per time window (frequency). For example, facts older than 30 " -"days are purged, while one weekly fact scan is kept.\n" -"
\n" -"
CAUTION: Setting both numerical variables to \"0\" " -"will delete all facts.\n" -"
\n" -"
" -msgstr "" - -#: awx/ui/templates/ui/index.html:118 -msgid "Select a time period after which to remove old facts" -msgstr "" - -#: awx/ui/templates/ui/index.html:132 -msgid "" -"Please enter an integer " -"that is not negative " -"that is lower than 9999." -msgstr "" - -#: awx/ui/templates/ui/index.html:137 -msgid "Select a frequency for snapshot retention" -msgstr "" - -#: awx/ui/templates/ui/index.html:151 -msgid "" -"Please enter an integer that is not negative that is " -"lower than 9999." -msgstr "" - -#: awx/ui/templates/ui/index.html:157 -msgid "working..." -msgstr "" diff --git a/awx/main/access.py b/awx/main/access.py index ef2577d695..67bb01905d 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1366,6 +1366,7 @@ class JobTemplateAccess(BaseAccess): 'job_tags', 'force_handlers', 'skip_tags', 'ask_variables_on_launch', 'ask_tags_on_launch', 'ask_job_type_on_launch', 'ask_skip_tags_on_launch', 'ask_inventory_on_launch', 'ask_credential_on_launch', 'survey_enabled', + 'custom_virtualenv', 'diff_mode', # These fields are ignored, but it is convenient for QA to allow clients to post them 'last_job_run', 'created', 'modified', @@ -1814,13 +1815,14 @@ class WorkflowJobTemplateAccess(BaseAccess): missing_credentials = [] missing_inventories = [] qs = obj.workflow_job_template_nodes - qs = qs.prefetch_related('unified_job_template', 'inventory__use_role', 'credential__use_role') + qs = qs.prefetch_related('unified_job_template', 'inventory__use_role', 'credentials__use_role') for node in qs.all(): node_errors = {} if node.inventory and self.user not in node.inventory.use_role: missing_inventories.append(node.inventory.name) - if node.credential and self.user not in node.credential.use_role: - missing_credentials.append(node.credential.name) + for cred in node.credentials.all(): + if self.user not in cred.use_role: + missing_credentials.append(cred.name) ujt = node.unified_job_template if ujt and not self.user.can_access(UnifiedJobTemplate, 'start', ujt, validate_license=False): missing_ujt.append(ujt.name) @@ -1924,7 +1926,7 @@ class WorkflowJobAccess(BaseAccess): return self.can_recreate(obj) def can_recreate(self, obj): - node_qs = obj.workflow_job_nodes.all().prefetch_related('inventory', 'credential', 'unified_job_template') + node_qs = obj.workflow_job_nodes.all().prefetch_related('inventory', 'credentials', 'unified_job_template') node_access = WorkflowJobNodeAccess(user=self.user) wj_add_perm = True for node in node_qs: diff --git a/awx/main/fields.py b/awx/main/fields.py index 14e1cc6ad0..d63eb54002 100644 --- a/awx/main/fields.py +++ b/awx/main/fields.py @@ -193,8 +193,10 @@ def update_role_parentage_for_instance(instance): ''' for implicit_role_field in getattr(instance.__class__, '__implicit_role_fields'): cur_role = getattr(instance, implicit_role_field.name) + original_parents = set(json.loads(cur_role.implicit_parents)) new_parents = implicit_role_field._resolve_parent_roles(instance) - cur_role.parents.set(new_parents) + cur_role.parents.remove(*list(original_parents - new_parents)) + cur_role.parents.add(*list(new_parents - original_parents)) new_parents_list = list(new_parents) new_parents_list.sort() new_parents_json = json.dumps(new_parents_list) @@ -802,23 +804,33 @@ class CredentialTypeInjectorField(JSONSchemaField): for field in model_instance.defined_fields ) + class ExplodingNamespace: + def __unicode__(self): + raise UndefinedError(_('Must define unnamed file injector in order to reference `tower.filename`.')) + class TowerNamespace: - filename = None + def __init__(self): + self.filename = ExplodingNamespace() + + def __unicode__(self): + raise UndefinedError(_('Cannot directly reference reserved `tower` namespace container.')) + valid_namespace['tower'] = TowerNamespace() # ensure either single file or multi-file syntax is used (but not both) template_names = [x for x in value.get('file', {}).keys() if x.startswith('template')] - if 'template' in template_names and len(template_names) > 1: - raise django_exceptions.ValidationError( - _('Must use multi-file syntax when injecting multiple files'), - code='invalid', - params={'value': value}, - ) - if 'template' not in template_names: - valid_namespace['tower'].filename = TowerNamespace() + if 'template' in template_names: + valid_namespace['tower'].filename = 'EXAMPLE_FILENAME' + if len(template_names) > 1: + raise django_exceptions.ValidationError( + _('Must use multi-file syntax when injecting multiple files'), + code='invalid', + params={'value': value}, + ) + elif template_names: for template_name in template_names: template_name = template_name.split('.')[1] - setattr(valid_namespace['tower'].filename, template_name, 'EXAMPLE') + setattr(valid_namespace['tower'].filename, template_name, 'EXAMPLE_FILENAME') for type_, injector in value.items(): for key, tmpl in injector.items(): diff --git a/awx/main/management/commands/inventory_import.py b/awx/main/management/commands/inventory_import.py index 3a03e66c53..021064a46b 100644 --- a/awx/main/management/commands/inventory_import.py +++ b/awx/main/management/commands/inventory_import.py @@ -135,8 +135,7 @@ class AnsibleInventoryLoader(object): self.tmp_private_dir = build_proot_temp_dir() logger.debug("Using fresh temporary directory '{}' for isolation.".format(self.tmp_private_dir)) kwargs['proot_temp_dir'] = self.tmp_private_dir - # Run from source's location so that custom script contents are in `show_paths` - cwd = functioning_dir(self.source) + kwargs['proot_show_paths'] = [functioning_dir(self.source)] logger.debug("Running from `{}` working directory.".format(cwd)) return wrap_args_with_proot(cmd, cwd, **kwargs) diff --git a/awx/main/management/commands/list_instances.py b/awx/main/management/commands/list_instances.py index 2e6af440f7..d2f11b6e14 100644 --- a/awx/main/management/commands/list_instances.py +++ b/awx/main/management/commands/list_instances.py @@ -3,6 +3,7 @@ from awx.main.models import Instance, InstanceGroup from django.core.management.base import BaseCommand +import six class Command(BaseCommand): @@ -13,10 +14,11 @@ class Command(BaseCommand): super(Command, self).__init__() for instance in Instance.objects.all(): - print("hostname: {}; created: {}; heartbeat: {}; capacity: {}".format(instance.hostname, instance.created, - instance.modified, instance.capacity)) + print(six.text_type( + "hostname: {0.hostname}; created: {0.created}; " + "heartbeat: {0.modified}; capacity: {0.capacity}").format(instance)) for instance_group in InstanceGroup.objects.all(): - print("Instance Group: {}; created: {}; capacity: {}; members: {}".format(instance_group.name, - instance_group.created, - instance_group.capacity, - [x.hostname for x in instance_group.instances.all()])) + print(six.text_type( + "Instance Group: {0.name}; created: {0.created}; " + "capacity: {0.capacity}; members: {1}").format(instance_group, + [x.hostname for x in instance_group.instances.all()])) diff --git a/awx/main/management/commands/register_queue.py b/awx/main/management/commands/register_queue.py index 85c7842381..5f252a4637 100644 --- a/awx/main/management/commands/register_queue.py +++ b/awx/main/management/commands/register_queue.py @@ -19,11 +19,11 @@ class InstanceNotFound(Exception): class Command(BaseCommand): def add_arguments(self, parser): - parser.add_argument('--queuename', dest='queuename', type=str, + parser.add_argument('--queuename', dest='queuename', type=lambda s: six.text_type(s, 'utf8'), help='Queue to create/update') - parser.add_argument('--hostnames', dest='hostnames', type=str, + parser.add_argument('--hostnames', dest='hostnames', type=lambda s: six.text_type(s, 'utf8'), help='Comma-Delimited Hosts to add to the Queue') - parser.add_argument('--controller', dest='controller', type=str, + parser.add_argument('--controller', dest='controller', type=lambda s: six.text_type(s, 'utf8'), default='', help='The controlling group (makes this an isolated group)') parser.add_argument('--instance_percent', dest='instance_percent', type=int, default=0, help='The percentage of active instances that will be assigned to this group'), @@ -96,7 +96,7 @@ class Command(BaseCommand): if options.get('hostnames'): hostname_list = options.get('hostnames').split(",") - with advisory_lock('instance_group_registration_%s' % queuename): + with advisory_lock(six.text_type('instance_group_registration_{}').format(queuename)): (ig, created, changed) = self.get_create_update_instance_group(queuename, inst_per, inst_min) if created: print(six.text_type("Creating instance group {}".format(ig.name))) diff --git a/awx/main/management/commands/replay_job_events.py b/awx/main/management/commands/replay_job_events.py index 47ff723678..74bd9eb09f 100644 --- a/awx/main/management/commands/replay_job_events.py +++ b/awx/main/management/commands/replay_job_events.py @@ -95,7 +95,7 @@ class ReplayJobEvents(): raise RuntimeError("Job is of type {} and replay is not yet supported.".format(type(job))) sys.exit(1) - def run(self, job_id, speed=1.0, verbosity=0): + def run(self, job_id, speed=1.0, verbosity=0, skip=0): stats = { 'events_ontime': { 'total': 0, @@ -126,7 +126,10 @@ class ReplayJobEvents(): sys.exit(1) je_previous = None - for je_current in job_events: + for n, je_current in enumerate(job_events): + if n < skip: + continue + if not je_previous: stats['recording_start'] = je_current.created self.start(je_current.created) @@ -163,21 +166,25 @@ class ReplayJobEvents(): stats['events_total'] += 1 je_previous = je_current - - stats['replay_end'] = self.now() - stats['replay_duration'] = (stats['replay_end'] - stats['replay_start']).total_seconds() - stats['replay_start'] = stats['replay_start'].isoformat() - stats['replay_end'] = stats['replay_end'].isoformat() - stats['recording_end'] = je_current.created - stats['recording_duration'] = (stats['recording_end'] - stats['recording_start']).total_seconds() - stats['recording_start'] = stats['recording_start'].isoformat() - stats['recording_end'] = stats['recording_end'].isoformat() + if stats['events_total'] > 2: + stats['replay_end'] = self.now() + stats['replay_duration'] = (stats['replay_end'] - stats['replay_start']).total_seconds() + stats['replay_start'] = stats['replay_start'].isoformat() + stats['replay_end'] = stats['replay_end'].isoformat() + + stats['recording_end'] = je_current.created + stats['recording_duration'] = (stats['recording_end'] - stats['recording_start']).total_seconds() + stats['recording_start'] = stats['recording_start'].isoformat() + stats['recording_end'] = stats['recording_end'].isoformat() + + stats['events_ontime']['percentage'] = (stats['events_ontime']['total'] / float(stats['events_total'])) * 100.00 + stats['events_late']['percentage'] = (stats['events_late']['total'] / float(stats['events_total'])) * 100.00 + stats['events_distance_average'] = stats['events_distance_total'] / stats['events_total'] + stats['events_late']['lateness_average'] = stats['events_late']['lateness_total'] / stats['events_late']['total'] + else: + stats = {'events_total': stats['events_total']} - stats['events_ontime']['percentage'] = (stats['events_ontime']['total'] / float(stats['events_total'])) * 100.00 - stats['events_late']['percentage'] = (stats['events_late']['total'] / float(stats['events_total'])) * 100.00 - stats['events_distance_average'] = stats['events_distance_total'] / stats['events_total'] - stats['events_late']['lateness_average'] = stats['events_late']['lateness_total'] / stats['events_late']['total'] if verbosity >= 2: print(json.dumps(stats, indent=4, sort_keys=True)) @@ -191,11 +198,14 @@ class Command(BaseCommand): help='Id of the job to replay (job or adhoc)') parser.add_argument('--speed', dest='speed', type=int, metavar='s', help='Speedup factor.') + parser.add_argument('--skip', dest='skip', type=int, metavar='k', + help='Number of events to skip.') def handle(self, *args, **options): job_id = options.get('job_id') speed = options.get('speed') or 1 verbosity = options.get('verbosity') or 0 + skip = options.get('skip') or 0 replayer = ReplayJobEvents() - replayer.run(job_id, speed, verbosity) + replayer.run(job_id, speed, verbosity, skip) diff --git a/awx/main/managers.py b/awx/main/managers.py index 274a0ef774..d2af95e2b8 100644 --- a/awx/main/managers.py +++ b/awx/main/managers.py @@ -77,7 +77,7 @@ class InstanceManager(models.Manager): def me(self): """Return the currently active instance.""" # If we are running unit tests, return a stub record. - if settings.IS_TESTING(sys.argv): + if settings.IS_TESTING(sys.argv) or hasattr(sys, '_called_from_test'): return self.model(id=1, hostname='localhost', uuid='00000000-0000-0000-0000-000000000000') diff --git a/awx/main/models/__init__.py b/awx/main/models/__init__.py index 0bbbc08254..6541a8f9ed 100644 --- a/awx/main/models/__init__.py +++ b/awx/main/models/__init__.py @@ -3,6 +3,7 @@ # Django from django.conf import settings # noqa +from django.db.models.signals import pre_delete # noqa # AWX from awx.main.models.base import * # noqa @@ -58,6 +59,18 @@ User.add_to_class('can_access_with_errors', check_user_access_with_errors) User.add_to_class('accessible_objects', user_accessible_objects) +def cleanup_created_modified_by(sender, **kwargs): + # work around a bug in django-polymorphic that doesn't properly + # handle cascades for reverse foreign keys on the polymorphic base model + # https://github.com/django-polymorphic/django-polymorphic/issues/229 + for cls in (UnifiedJobTemplate, UnifiedJob): + cls.objects.filter(created_by=kwargs['instance']).update(created_by=None) + cls.objects.filter(modified_by=kwargs['instance']).update(modified_by=None) + + +pre_delete.connect(cleanup_created_modified_by, sender=User) + + @property def user_get_organizations(user): return Organization.objects.filter(member_role__members=user) @@ -169,3 +182,9 @@ activity_stream_registrar.connect(OAuth2AccessToken) # prevent API filtering on certain Django-supplied sensitive fields prevent_search(User._meta.get_field('password')) +prevent_search(OAuth2AccessToken._meta.get_field('token')) +prevent_search(RefreshToken._meta.get_field('token')) +prevent_search(OAuth2Application._meta.get_field('client_secret')) +prevent_search(OAuth2Application._meta.get_field('client_id')) +prevent_search(Grant._meta.get_field('code')) + diff --git a/awx/main/models/ad_hoc_commands.py b/awx/main/models/ad_hoc_commands.py index 3913a4ace7..3549bb2a41 100644 --- a/awx/main/models/ad_hoc_commands.py +++ b/awx/main/models/ad_hoc_commands.py @@ -153,7 +153,7 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin): return reverse('api:ad_hoc_command_detail', kwargs={'pk': self.pk}, request=request) def get_ui_url(self): - return urljoin(settings.TOWER_URL_BASE, "/#/ad_hoc_commands/{}".format(self.pk)) + return urljoin(settings.TOWER_URL_BASE, "/#/jobs/command/{}".format(self.pk)) @property def notification_templates(self): diff --git a/awx/main/models/events.py b/awx/main/models/events.py index 21dcd90a24..a6e2c67c74 100644 --- a/awx/main/models/events.py +++ b/awx/main/models/events.py @@ -4,9 +4,11 @@ import logging from django.conf import settings from django.db import models, DatabaseError from django.utils.dateparse import parse_datetime +from django.utils.text import Truncator from django.utils.timezone import utc from django.utils.translation import ugettext_lazy as _ from django.utils.encoding import force_text +import six from awx.api.versioning import reverse from awx.main.fields import JSONField @@ -22,6 +24,22 @@ __all__ = ['JobEvent', 'ProjectUpdateEvent', 'AdHocCommandEvent', 'InventoryUpdateEvent', 'SystemJobEvent'] +def sanitize_event_keys(kwargs, valid_keys): + # Sanity check: Don't honor keys that we don't recognize. + for key in kwargs.keys(): + if key not in valid_keys: + kwargs.pop(key) + + # Truncate certain values over 1k + for key in [ + 'play', 'role', 'task', 'playbook' + ]: + if isinstance(kwargs.get(key), six.string_types): + if len(kwargs[key]) > 1024: + kwargs[key] = Truncator(kwargs[key]).chars(1024) + + + class BasePlaybookEvent(CreatedModifiedModel): ''' An event/message logged from a playbook callback for each host. @@ -257,7 +275,7 @@ class BasePlaybookEvent(CreatedModifiedModel): return updated_fields @classmethod - def create_from_data(self, **kwargs): + def create_from_data(cls, **kwargs): pk = None for key in ('job_id', 'project_update_id'): if key in kwargs: @@ -279,12 +297,8 @@ class BasePlaybookEvent(CreatedModifiedModel): except (KeyError, ValueError): kwargs.pop('created', None) - # Sanity check: Don't honor keys that we don't recognize. - for key in kwargs.keys(): - if key not in self.VALID_KEYS: - kwargs.pop(key) - - job_event = self.objects.create(**kwargs) + sanitize_event_keys(kwargs, cls.VALID_KEYS) + job_event = cls.objects.create(**kwargs) analytics_logger.info('Event data saved.', extra=dict(python_objects=dict(job_event=job_event))) return job_event @@ -551,7 +565,7 @@ class BaseCommandEvent(CreatedModifiedModel): return u'%s @ %s' % (self.get_event_display(), self.created.isoformat()) @classmethod - def create_from_data(self, **kwargs): + def create_from_data(cls, **kwargs): # Convert the datetime for the event's creation # appropriately, and include a time zone for it. # @@ -565,12 +579,8 @@ class BaseCommandEvent(CreatedModifiedModel): except (KeyError, ValueError): kwargs.pop('created', None) - # Sanity check: Don't honor keys that we don't recognize. - for key in kwargs.keys(): - if key not in self.VALID_KEYS: - kwargs.pop(key) - - return self.objects.create(**kwargs) + sanitize_event_keys(kwargs, cls.VALID_KEYS) + return cls.objects.create(**kwargs) def get_event_display(self): ''' diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index c748f841f3..24d0e9a1c5 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -1646,7 +1646,7 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin, return reverse('api:inventory_update_detail', kwargs={'pk': self.pk}, request=request) def get_ui_url(self): - return urljoin(settings.TOWER_URL_BASE, "/#/inventory_sync/{}".format(self.pk)) + return urljoin(settings.TOWER_URL_BASE, "/#/jobs/inventory/{}".format(self.pk)) def get_actual_source_path(self): '''Alias to source_path that combines with project path for for SCM file based sources''' diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 51394aa830..80280636cc 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -530,7 +530,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana return reverse('api:job_detail', kwargs={'pk': self.pk}, request=request) def get_ui_url(self): - return urljoin(settings.TOWER_URL_BASE, "/#/jobs/{}".format(self.pk)) + return urljoin(settings.TOWER_URL_BASE, "/#/jobs/playbook/{}".format(self.pk)) @property def ansible_virtualenv_path(self): @@ -1192,7 +1192,7 @@ class SystemJob(UnifiedJob, SystemJobOptions, JobNotificationMixin): return reverse('api:system_job_detail', kwargs={'pk': self.pk}, request=request) def get_ui_url(self): - return urljoin(settings.TOWER_URL_BASE, "/#/management_jobs/{}".format(self.pk)) + return urljoin(settings.TOWER_URL_BASE, "/#/jobs/system/{}".format(self.pk)) @property def event_class(self): diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index d302aa0973..3bad19c8eb 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -241,6 +241,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')] FIELDS_TO_PRESERVE_AT_COPY = ['labels', 'instance_groups', 'credentials'] FIELDS_TO_DISCARD_AT_COPY = ['local_path'] + FIELDS_TRIGGER_UPDATE = frozenset(['scm_url', 'scm_branch', 'scm_type']) class Meta: app_label = 'main' @@ -323,6 +324,11 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn ['name', 'description', 'schedule'] ) + def __init__(self, *args, **kwargs): + r = super(Project, self).__init__(*args, **kwargs) + self._prior_values_store = self._current_sensitive_fields() + return r + def save(self, *args, **kwargs): new_instance = not bool(self.pk) # If update_fields has been specified, add our field names to it, @@ -354,9 +360,22 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn with disable_activity_stream(): self.save(update_fields=update_fields) # If we just created a new project with SCM, start the initial update. - if new_instance and self.scm_type and not skip_update: + # also update if certain fields have changed + relevant_change = False + new_values = self._current_sensitive_fields() + if hasattr(self, '_prior_values_store') and self._prior_values_store != new_values: + relevant_change = True + self._prior_values_store = new_values + if (relevant_change or new_instance) and (not skip_update) and self.scm_type: self.update() + def _current_sensitive_fields(self): + new_values = {} + for attr, val in self.__dict__.items(): + if attr in Project.FIELDS_TRIGGER_UPDATE: + new_values[attr] = val + return new_values + def _get_current_status(self): if self.scm_type: if self.current_job and self.current_job.status: @@ -533,7 +552,7 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManage return reverse('api:project_update_detail', kwargs={'pk': self.pk}, request=request) def get_ui_url(self): - return urlparse.urljoin(settings.TOWER_URL_BASE, "/#/scm_update/{}".format(self.pk)) + return urlparse.urljoin(settings.TOWER_URL_BASE, "/#/jobs/project/{}".format(self.pk)) def _update_parent_instance(self): parent_instance = self._get_parent_instance() diff --git a/awx/main/models/rbac.py b/awx/main/models/rbac.py index ae8c5e8705..7ce8567e88 100644 --- a/awx/main/models/rbac.py +++ b/awx/main/models/rbac.py @@ -172,7 +172,7 @@ class Role(models.Model): elif accessor.__class__.__name__ == 'Team': return self.ancestors.filter(pk=accessor.member_role.id).exists() elif type(accessor) == Role: - return self.ancestors.filter(pk=accessor).exists() + return self.ancestors.filter(pk=accessor.pk).exists() else: accessor_type = ContentType.objects.get_for_model(accessor) roles = Role.objects.filter(content_type__pk=accessor_type.id, diff --git a/awx/main/models/schedules.py b/awx/main/models/schedules.py index 71efa702c6..55cd7f2053 100644 --- a/awx/main/models/schedules.py +++ b/awx/main/models/schedules.py @@ -1,15 +1,19 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. -import logging import datetime +import logging +import re + import dateutil.rrule -from dateutil.tz import datetime_exists +import dateutil.parser +from dateutil.tz import datetime_exists, tzutc +from dateutil.zoneinfo import get_zonefile_instance # Django from django.db import models from django.db.models.query import QuerySet -from django.utils.timezone import now +from django.utils.timezone import now, make_aware from django.utils.translation import ugettext_lazy as _ # AWX @@ -27,6 +31,9 @@ logger = logging.getLogger('awx.main.models.schedule') __all__ = ['Schedule'] +UTC_TIMEZONES = {x: tzutc() for x in dateutil.parser.parserinfo().UTCZONE} + + class ScheduleFilterMethods(object): def enabled(self, enabled=True): @@ -94,13 +101,98 @@ class Schedule(CommonModel, LaunchTimeConfig): help_text=_("The next time that the scheduled action will run.") ) + @classmethod + def get_zoneinfo(self): + return sorted(get_zonefile_instance().zones) + + @property + def timezone(self): + utc = tzutc() + all_zones = Schedule.get_zoneinfo() + all_zones.sort(key = lambda x: -len(x)) + for r in Schedule.rrulestr(self.rrule)._rrule: + if r._dtstart: + tzinfo = r._dtstart.tzinfo + if tzinfo is utc: + return 'UTC' + fname = tzinfo._filename + for zone in all_zones: + if fname.endswith(zone): + return zone + logger.warn('Could not detect valid zoneinfo for {}'.format(self.rrule)) + return '' + + @property + def until(self): + # The UNTIL= datestamp (if any) coerced from UTC to the local naive time + # of the DTSTART + for r in Schedule.rrulestr(self.rrule)._rrule: + if r._until: + local_until = r._until.astimezone(r._dtstart.tzinfo) + naive_until = local_until.replace(tzinfo=None) + return naive_until.isoformat() + return '' + + @classmethod + def coerce_naive_until(cls, rrule): + # + # RFC5545 specifies that the UNTIL rule part MUST ALWAYS be a date + # with UTC time. This is extra work for API implementers because + # it requires them to perform DTSTART local -> UTC datetime coercion on + # POST and UTC -> DTSTART local coercion on GET. + # + # This block of code is a departure from the RFC. If you send an + # rrule like this to the API (without a Z on the UNTIL): + # + # DTSTART;TZID=America/New_York:20180502T150000 RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20180502T180000 + # + # ...we'll assume that the naive UNTIL is intended to match the DTSTART + # timezone (America/New_York), and so we'll coerce to UTC _for you_ + # automatically. + # + if 'until=' in rrule.lower(): + # if DTSTART;TZID= is used, coerce "naive" UNTIL values + # to the proper UTC date + match_until = re.match(".*?(?PUNTIL\=[0-9]+T[0-9]+)(?PZ?)", rrule) + if not len(match_until.group('utcflag')): + # rrule = DTSTART;TZID=America/New_York:20200601T120000 RRULE:...;UNTIL=20200601T170000 + + # Find the UNTIL=N part of the string + # naive_until = UNTIL=20200601T170000 + naive_until = match_until.group('until') + + # What is the DTSTART timezone for: + # DTSTART;TZID=America/New_York:20200601T120000 RRULE:...;UNTIL=20200601T170000Z + # local_tz = tzfile('/usr/share/zoneinfo/America/New_York') + local_tz = dateutil.rrule.rrulestr( + rrule.replace(naive_until, naive_until + 'Z'), + tzinfos=UTC_TIMEZONES + )._dtstart.tzinfo + + # Make a datetime object with tzinfo= + # localized_until = datetime.datetime(2020, 6, 1, 17, 0, tzinfo=tzfile('/usr/share/zoneinfo/America/New_York')) + localized_until = make_aware( + datetime.datetime.strptime(re.sub('^UNTIL=', '', naive_until), "%Y%m%dT%H%M%S"), + local_tz + ) + + # Coerce the datetime to UTC and format it as a string w/ Zulu format + # utc_until = UNTIL=20200601T220000Z + utc_until = 'UNTIL=' + localized_until.astimezone(pytz.utc).strftime('%Y%m%dT%H%M%SZ') + + # rrule was: DTSTART;TZID=America/New_York:20200601T120000 RRULE:...;UNTIL=20200601T170000 + # rrule is now: DTSTART;TZID=America/New_York:20200601T120000 RRULE:...;UNTIL=20200601T220000Z + rrule = rrule.replace(naive_until, utc_until) + return rrule + @classmethod def rrulestr(cls, rrule, **kwargs): """ Apply our own custom rrule parsing requirements """ + rrule = Schedule.coerce_naive_until(rrule) kwargs['forceset'] = True - x = dateutil.rrule.rrulestr(rrule, **kwargs) + x = dateutil.rrule.rrulestr(rrule, tzinfos=UTC_TIMEZONES, **kwargs) for r in x._rrule: if r._dtstart and r._dtstart.tzinfo is None: @@ -158,4 +250,5 @@ class Schedule(CommonModel, LaunchTimeConfig): def save(self, *args, **kwargs): self.update_computed_fields() + self.rrule = Schedule.coerce_naive_until(self.rrule) super(Schedule, self).save(*args, **kwargs) diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index 943956f7ac..f758a230a9 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -838,8 +838,11 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique setattr(unified_job, fd, val) unified_job.save() - # Labels coppied here - copy_m2m_relationships(self, unified_job, fields) + # Labels copied here + from awx.main.signals import disable_activity_stream + with disable_activity_stream(): + copy_m2m_relationships(self, unified_job, fields) + return unified_job def launch_prompts(self): diff --git a/awx/main/signals.py b/awx/main/signals.py index a702fd2aaa..86c798795d 100644 --- a/awx/main/signals.py +++ b/awx/main/signals.py @@ -205,7 +205,7 @@ def set_original_organization(sender, instance, **kwargs): pre-save organization, so we can later determine if the organization field is dirty. ''' - instance.__original_org = instance.organization + instance.__original_org_id = instance.organization_id def save_related_job_templates(sender, instance, **kwargs): @@ -217,7 +217,7 @@ def save_related_job_templates(sender, instance, **kwargs): if sender not in (Project, Inventory): raise ValueError('This signal callback is only intended for use with Project or Inventory') - if instance.__original_org != instance.organization: + if instance.__original_org_id != instance.organization_id: jtq = JobTemplate.objects.filter(**{sender.__name__.lower(): instance}) for jt in jtq: update_role_parentage_for_instance(jt) @@ -494,6 +494,8 @@ def activity_stream_delete(sender, instance, **kwargs): return changes = model_to_dict(instance) object1 = camelcase_to_underscore(instance.__class__.__name__) + if type(instance) == OAuth2AccessToken: + changes['token'] = TOKEN_CENSOR activity_entry = ActivityStream( operation='delete', changes=json.dumps(changes), diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 31edcc38d7..68dd21e804 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -29,7 +29,7 @@ except Exception: # Celery from celery import Task, shared_task, Celery -from celery.signals import celeryd_init, worker_process_init, worker_shutdown, worker_ready, celeryd_after_setup +from celery.signals import celeryd_init, worker_shutdown, worker_ready, celeryd_after_setup # Django from django.conf import settings @@ -49,6 +49,7 @@ from crum import impersonate # AWX from awx import __version__ as awx_application_version from awx.main.constants import CLOUD_PROVIDERS, PRIVILEGE_ESCALATION_METHODS +from awx.main.access import access_registry from awx.main.models import * # noqa from awx.main.constants import ACTIVE_STATES from awx.main.exceptions import AwxTaskError @@ -59,13 +60,15 @@ from awx.main.utils import (get_ansible_version, get_ssh_version, decrypt_field, wrap_args_with_proot, OutputEventFilter, OutputVerboseFilter, ignore_inventory_computed_fields, ignore_inventory_group_removal, get_type_for_model, extract_ansible_vars) from awx.main.utils.safe_yaml import safe_dump, sanitize_jinja -from awx.main.utils.reload import restart_local_services, stop_local_services +from awx.main.utils.reload import stop_local_services from awx.main.utils.pglock import advisory_lock -from awx.main.utils.ha import update_celery_worker_routes, register_celery_worker_queues -from awx.main.utils.handlers import configure_external_logger +from awx.main.utils.ha import register_celery_worker_queues from awx.main.consumers import emit_channel_notification from awx.conf import settings_registry +from rest_framework.exceptions import PermissionDenied + + __all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate', 'RunAdHocCommand', 'handle_work_error', 'handle_work_success', 'apply_cluster_membership_policies', 'update_inventory_computed_fields', 'update_host_smart_inventory_memberships', @@ -117,15 +120,6 @@ def celery_startup(conf=None, **kwargs): logger.exception(six.text_type("Failed to rebuild schedule {}.").format(sch)) -@worker_process_init.connect -def task_set_logger_pre_run(*args, **kwargs): - try: - cache.close() - configure_external_logger(settings, is_startup=False) - except Exception: - logger.exception('Encountered error on initial log configuration.') - - @worker_shutdown.connect def inform_cluster_of_shutdown(*args, **kwargs): try: @@ -152,7 +146,7 @@ def apply_cluster_membership_policies(self): # Process policy instance list first, these will represent manually managed instances # that will not go through automatic policy determination for ig in InstanceGroup.objects.all(): - logger.info(six.text_type("Considering group {}").format(ig.name)) + logger.info(six.text_type("Applying cluster membership policies to Group {}").format(ig.name)) ig.instances.clear() group_actual = Group(obj=ig, instances=[]) for i in ig.policy_instance_list: @@ -160,7 +154,7 @@ def apply_cluster_membership_policies(self): if not inst.exists(): continue inst = inst[0] - logger.info(six.text_type("Policy List, adding {} to {}").format(inst.hostname, ig.name)) + logger.info(six.text_type("Policy List, adding Instance {} to Group {}").format(inst.hostname, ig.name)) group_actual.instances.append(inst.id) ig.instances.add(inst) filtered_instances.append(inst) @@ -173,7 +167,7 @@ def apply_cluster_membership_policies(self): for i in sorted(actual_instances, cmp=lambda x,y: len(x.groups) - len(y.groups)): if len(g.instances) >= g.obj.policy_instance_minimum: break - logger.info(six.text_type("Policy minimum, adding {} to {}").format(i.obj.hostname, g.obj.name)) + logger.info(six.text_type("Policy minimum, adding Instance {} to Group {}").format(i.obj.hostname, g.obj.name)) g.obj.instances.add(i.obj) g.instances.append(i.obj.id) i.groups.append(g.obj.id) @@ -182,14 +176,14 @@ def apply_cluster_membership_policies(self): for i in sorted(actual_instances, cmp=lambda x,y: len(x.groups) - len(y.groups)): if 100 * float(len(g.instances)) / len(actual_instances) >= g.obj.policy_instance_percentage: break - logger.info(six.text_type("Policy percentage, adding {} to {}").format(i.obj.hostname, g.obj.name)) + logger.info(six.text_type("Policy percentage, adding Instance {} to Group {}").format(i.obj.hostname, g.obj.name)) g.instances.append(i.obj.id) g.obj.instances.add(i.obj) i.groups.append(g.obj.id) handle_ha_toplogy_changes.apply([]) -@shared_task(queue='tower_broadcast_all', bind=True) +@shared_task(exchange='tower_broadcast_all', bind=True) def handle_setting_changes(self, setting_keys): orig_len = len(setting_keys) for i in range(orig_len): @@ -200,15 +194,9 @@ def handle_setting_changes(self, setting_keys): cache_keys = set(setting_keys) logger.debug('cache delete_many(%r)', cache_keys) cache.delete_many(cache_keys) - for key in cache_keys: - if key.startswith('LOG_AGGREGATOR_'): - restart_local_services(['uwsgi', 'celery', 'beat', 'callback']) - break - elif key == 'OAUTH2_PROVIDER': - restart_local_services(['uwsgi']) -@shared_task(bind=True, queue='tower_broadcast_all') +@shared_task(bind=True, exchange='tower_broadcast_all') def handle_ha_toplogy_changes(self): (changed, instance) = Instance.objects.get_or_register() if changed: @@ -217,39 +205,24 @@ def handle_ha_toplogy_changes(self): awx_app = Celery('awx') awx_app.config_from_object('django.conf:settings') instances, removed_queues, added_queues = register_celery_worker_queues(awx_app, self.request.hostname) - for instance in instances: - logger.info(six.text_type("Workers on tower node '{}' removed from queues {} and added to queues {}") - .format(instance.hostname, removed_queues, added_queues)) - updated_routes = update_celery_worker_routes(instance, settings) - logger.info(six.text_type("Worker on tower node '{}' updated celery routes {} all routes are now {}") - .format(instance.hostname, updated_routes, self.app.conf.CELERY_ROUTES)) + if len(removed_queues) + len(added_queues) > 0: + logger.info(six.text_type("Workers on tower node(s) '{}' removed from queues {} and added to queues {}") + .format([i.hostname for i in instances], removed_queues, added_queues)) @worker_ready.connect def handle_ha_toplogy_worker_ready(sender, **kwargs): logger.debug(six.text_type("Configure celeryd queues task on host {}").format(sender.hostname)) instances, removed_queues, added_queues = register_celery_worker_queues(sender.app, sender.hostname) - for instance in instances: - logger.info(six.text_type("Workers on tower node '{}' unsubscribed from queues {} and subscribed to queues {}") - .format(instance.hostname, removed_queues, added_queues)) + if len(removed_queues) + len(added_queues) > 0: + logger.info(six.text_type("Workers on tower node(s) '{}' removed from queues {} and added to queues {}") + .format([i.hostname for i in instances], removed_queues, added_queues)) # Expedite the first hearbeat run so a node comes online quickly. cluster_node_heartbeat.apply([]) apply_cluster_membership_policies.apply([]) -@celeryd_init.connect -def handle_update_celery_routes(sender=None, conf=None, **kwargs): - conf = conf if conf else sender.app.conf - logger.debug(six.text_type("Registering celery routes for {}").format(sender)) - (changed, instance) = Instance.objects.get_or_register() - if changed: - logger.info(six.text_type("Registered tower node '{}'").format(instance.hostname)) - added_routes = update_celery_worker_routes(instance, conf) - logger.info(six.text_type("Workers on tower node '{}' added routes {} all routes are now {}") - .format(instance.hostname, added_routes, conf.CELERY_ROUTES)) - - @celeryd_after_setup.connect def handle_update_celery_hostname(sender, instance, **kwargs): (changed, tower_instance) = Instance.objects.get_or_register() @@ -282,7 +255,10 @@ def send_notifications(notification_list, job_id=None): notification.error = smart_str(e) update_fields.append('error') finally: - notification.save(update_fields=update_fields) + try: + notification.save(update_fields=update_fields) + except Exception as e: + logger.exception(six.text_type('Error saving notification {} result.').format(notification.id)) @shared_task(bind=True, queue=settings.CELERY_DEFAULT_QUEUE) @@ -426,6 +402,13 @@ def awx_periodic_scheduler(self): for schedule in old_schedules: schedule.save() schedules = Schedule.objects.enabled().between(last_run, run_now) + + invalid_license = False + try: + access_registry[Job](None).check_license() + except PermissionDenied as e: + invalid_license = e + for schedule in schedules: template = schedule.unified_job_template schedule.save() # To update next_run timestamp. @@ -435,6 +418,13 @@ def awx_periodic_scheduler(self): try: job_kwargs = schedule.get_job_kwargs() new_unified_job = schedule.unified_job_template.create_unified_job(**job_kwargs) + + if invalid_license: + new_unified_job.status = 'failed' + new_unified_job.job_explanation = str(invalid_license) + new_unified_job.save(update_fields=['status', 'job_explanation']) + new_unified_job.websocket_emit_status("failed") + raise invalid_license can_start = new_unified_job.signal_start() except Exception: logger.exception('Error spawning scheduled job.') @@ -561,6 +551,8 @@ def delete_inventory(self, inventory_id, user_id): with ignore_inventory_computed_fields(), ignore_inventory_group_removal(), impersonate(user): try: i = Inventory.objects.get(id=inventory_id) + for host in i.hosts.iterator(): + host.job_events_as_primary_host.update(host=None) i.delete() emit_channel_notification( 'inventories-status_changed', @@ -1677,7 +1669,13 @@ class RunProjectUpdate(BaseTask): raise try: + start_time = time.time() fcntl.flock(self.lock_fd, fcntl.LOCK_EX) + waiting_time = time.time() - start_time + if waiting_time > 1.0: + logger.info(six.text_type( + '{} spent {} waiting to acquire lock for local source tree ' + 'for path {}.').format(instance.log_format, waiting_time, lock_path)) except IOError as e: os.close(self.lock_fd) logger.error(six.text_type("I/O error({0}) while trying to aquire lock on file [{1}]: {2}").format(e.errno, lock_path, e.strerror)) @@ -1725,6 +1723,10 @@ class RunInventoryUpdate(BaseTask): event_model = InventoryUpdateEvent event_data_key = 'inventory_update_id' + @property + def proot_show_paths(self): + return [self.get_path_to('..', 'plugins', 'inventory')] + def build_private_data(self, inventory_update, **kwargs): """ Return private data needed for inventory update. @@ -2080,6 +2082,8 @@ class RunInventoryUpdate(BaseTask): return args def build_cwd(self, inventory_update, **kwargs): + if inventory_update.source == 'scm' and inventory_update.source_project_update: + return inventory_update.source_project_update.get_project_path(check_if_exists=False) return self.get_path_to('..', 'plugins', 'inventory') def get_idle_timeout(self): @@ -2331,6 +2335,9 @@ def _reconstruct_relationships(copy_mapping): setattr(new_obj, field_name, related_obj) elif field.many_to_many: for related_obj in getattr(old_obj, field_name).all(): + logger.debug(six.text_type('Deep copy: Adding {} to {}({}).{} relationship').format( + related_obj, new_obj, model, field_name + )) getattr(new_obj, field_name).add(copy_mapping.get(related_obj, related_obj)) new_obj.save() @@ -2352,7 +2359,7 @@ def deep_copy_model_obj( except ObjectDoesNotExist: logger.warning("Object or user no longer exists.") return - with transaction.atomic(): + with transaction.atomic(), ignore_inventory_computed_fields(): copy_mapping = {} for sub_obj_setup in sub_obj_list: sub_model = getattr(importlib.import_module(sub_obj_setup[0]), @@ -2372,3 +2379,5 @@ def deep_copy_model_obj( importlib.import_module(permission_check_func[0]), permission_check_func[1] ), permission_check_func[2]) permission_check_func(creater, copy_mapping.values()) + if isinstance(new_obj, Inventory): + update_inventory_computed_fields.delay(new_obj.id, True) diff --git a/awx/main/tests/conftest.py b/awx/main/tests/conftest.py index 679577858f..0c3cb513ec 100644 --- a/awx/main/tests/conftest.py +++ b/awx/main/tests/conftest.py @@ -15,6 +15,16 @@ from awx.main.tests.factories import ( ) +def pytest_configure(config): + import sys + sys._called_from_test = True + + +def pytest_unconfigure(config): + import sys + del sys._called_from_test + + @pytest.fixture def mock_access(): @contextmanager @@ -96,3 +106,21 @@ def get_ssh_version(mocker): @pytest.fixture def job_template_with_survey_passwords_unit(job_template_with_survey_passwords_factory): return job_template_with_survey_passwords_factory(persisted=False) + + +@pytest.fixture +def mock_cache(): + class MockCache(object): + cache = {} + + def get(self, key, default=None): + return self.cache.get(key, default) + + def set(self, key, value, timeout=60): + self.cache[key] = value + + def delete(self, key): + del self.cache[key] + + return MockCache() + diff --git a/awx/main/tests/functional/__init__.py b/awx/main/tests/functional/__init__.py index 405018d1a4..262a142790 100644 --- a/awx/main/tests/functional/__init__.py +++ b/awx/main/tests/functional/__init__.py @@ -1,6 +1,7 @@ from django.db import connection from django.db.models.signals import post_migrate from django.apps import apps +from django.conf import settings def app_post_migration(sender, app_config, **kwargs): @@ -17,7 +18,8 @@ def app_post_migration(sender, app_config, **kwargs): ) -post_migrate.connect(app_post_migration, sender=apps.get_app_config('main')) +if settings.DATABASES['default']['ENGINE'] == 'django.db.backends.sqlite3': + post_migrate.connect(app_post_migration, sender=apps.get_app_config('main')) diff --git a/awx/main/tests/functional/api/test_generic.py b/awx/main/tests/functional/api/test_generic.py index f445ee73f7..68ca294027 100644 --- a/awx/main/tests/functional/api/test_generic.py +++ b/awx/main/tests/functional/api/test_generic.py @@ -94,10 +94,16 @@ class TestDeleteViews: @pytest.mark.django_db -def test_non_filterable_field(options, instance, admin_user): +def test_filterable_fields(options, instance, admin_user): r = options( url=instance.get_absolute_url(), user=admin_user ) - field_info = r.data['actions']['GET']['percent_capacity_remaining'] - assert 'filterable' in field_info + + filterable_info = r.data['actions']['GET']['created'] + non_filterable_info = r.data['actions']['GET']['percent_capacity_remaining'] + + assert 'filterable' in filterable_info + assert filterable_info['filterable'] is True + + assert 'filterable' not in non_filterable_info diff --git a/awx/main/tests/functional/api/test_instance_group.py b/awx/main/tests/functional/api/test_instance_group.py index 3dfd554f11..cd78d0de33 100644 --- a/awx/main/tests/functional/api/test_instance_group.py +++ b/awx/main/tests/functional/api/test_instance_group.py @@ -87,7 +87,7 @@ def test_delete_instance_group_jobs_running(delete, instance_group_jobs_running, @pytest.mark.django_db -def test_modify_delete_tower_instance_group_prevented(delete, options, tower_instance_group, user, patch, put): +def test_delete_rename_tower_instance_group_prevented(delete, options, tower_instance_group, instance_group, user, patch): url = reverse("api:instance_group_detail", kwargs={'pk': tower_instance_group.pk}) super_user = user('bob', True) @@ -99,6 +99,13 @@ def test_modify_delete_tower_instance_group_prevented(delete, options, tower_ins assert 'GET' in resp.data['actions'] assert 'PUT' in resp.data['actions'] + # Rename 'tower' instance group denied + patch(url, {'name': 'tower_prime'}, super_user, expect=400) + + # Rename, other instance group OK + url = reverse("api:instance_group_detail", kwargs={'pk': instance_group.pk}) + patch(url, {'name': 'foobar'}, super_user, expect=200) + @pytest.mark.django_db def test_prevent_delete_iso_and_control_groups(delete, isolated_instance_group, admin): diff --git a/awx/main/tests/functional/api/test_inventory.py b/awx/main/tests/functional/api/test_inventory.py index c96bb8057c..2e4b7df63e 100644 --- a/awx/main/tests/functional/api/test_inventory.py +++ b/awx/main/tests/functional/api/test_inventory.py @@ -126,9 +126,8 @@ def test_list_cannot_order_by_unsearchable_field(get, organization, alice, order ) custom_script.admin_role.members.add(alice) - response = get(reverse('api:inventory_script_list'), alice, - QUERY_STRING='order_by=%s' % order_by, status=400) - assert response.status_code == 400 + get(reverse('api:inventory_script_list'), alice, + QUERY_STRING='order_by=%s' % order_by, expect=403) @pytest.mark.parametrize("role_field,expected_status_code", [ diff --git a/awx/main/tests/functional/api/test_job_template.py b/awx/main/tests/functional/api/test_job_template.py index 3bd337f10f..0d9d1c8985 100644 --- a/awx/main/tests/functional/api/test_job_template.py +++ b/awx/main/tests/functional/api/test_job_template.py @@ -625,17 +625,31 @@ def test_save_survey_passwords_on_migration(job_template_with_survey_passwords): @pytest.mark.django_db -def test_job_template_custom_virtualenv(get, patch, organization_factory, job_template_factory): +@pytest.mark.parametrize('access', ["superuser", "admin", "peon"]) +def test_job_template_custom_virtualenv(get, patch, organization_factory, job_template_factory, alice, access): objs = organization_factory("org", superusers=['admin']) jt = job_template_factory("jt", organization=objs.organization, inventory='test_inv', project='test_proj').job_template + user = alice + if access == "superuser": + user = objs.superusers.admin + elif access == "admin": + jt.admin_role.members.add(alice) + else: + jt.read_role.members.add(alice) + with TemporaryDirectory(dir=settings.BASE_VENV_PATH) as temp_dir: - admin = objs.superusers.admin os.makedirs(os.path.join(temp_dir, 'bin', 'activate')) url = reverse('api:job_template_detail', kwargs={'pk': jt.id}) - patch(url, {'custom_virtualenv': temp_dir}, user=admin, expect=200) - assert get(url, user=admin).data['custom_virtualenv'] == os.path.join(temp_dir, '') + + if access == "peon": + patch(url, {'custom_virtualenv': temp_dir}, user=user, expect=403) + assert 'custom_virtualenv' not in get(url, user=user) + assert JobTemplate.objects.get(pk=jt.id).custom_virtualenv is None + else: + patch(url, {'custom_virtualenv': temp_dir}, user=user, expect=200) + assert get(url, user=user).data['custom_virtualenv'] == os.path.join(temp_dir, '') @pytest.mark.django_db diff --git a/awx/main/tests/functional/api/test_oauth.py b/awx/main/tests/functional/api/test_oauth.py index 4110701e6a..7e745213c8 100644 --- a/awx/main/tests/functional/api/test_oauth.py +++ b/awx/main/tests/functional/api/test_oauth.py @@ -172,3 +172,12 @@ def test_oauth_application_delete(oauth_application, post, delete, admin): assert Application.objects.filter(client_id=oauth_application.client_id).count() == 0 assert RefreshToken.objects.filter(application=oauth_application).count() == 0 assert AccessToken.objects.filter(application=oauth_application).count() == 0 + + +@pytest.mark.django_db +def test_oauth_list_user_tokens(oauth_application, post, get, admin, alice): + for user in (admin, alice): + url = reverse('api:o_auth2_token_list', kwargs={'pk': user.pk}) + post(url, {'scope': 'read'}, user, expect=201) + response = get(url, admin, expect=200) + assert response.data['count'] == 1 diff --git a/awx/main/tests/functional/api/test_settings.py b/awx/main/tests/functional/api/test_settings.py index 5974a1a935..97effe0fa3 100644 --- a/awx/main/tests/functional/api/test_settings.py +++ b/awx/main/tests/functional/api/test_settings.py @@ -14,7 +14,7 @@ import mock # AWX from awx.api.versioning import reverse from awx.conf.models import Setting -from awx.main.utils.handlers import BaseHTTPSHandler, LoggingConnectivityException +from awx.main.utils.handlers import AWXProxyHandler, LoggingConnectivityException import six @@ -217,7 +217,7 @@ def test_logging_aggregrator_connection_test_bad_request(get, post, admin, key): @pytest.mark.django_db def test_logging_aggregrator_connection_test_valid(mocker, get, post, admin): - with mock.patch.object(BaseHTTPSHandler, 'perform_test') as perform_test: + with mock.patch.object(AWXProxyHandler, 'perform_test') as perform_test: url = reverse('api:setting_logging_test') user_data = { 'LOG_AGGREGATOR_TYPE': 'logstash', @@ -227,7 +227,8 @@ def test_logging_aggregrator_connection_test_valid(mocker, get, post, admin): 'LOG_AGGREGATOR_PASSWORD': 'mcstash' } post(url, user_data, user=admin, expect=200) - create_settings = perform_test.call_args[0][0] + args, kwargs = perform_test.call_args_list[0] + create_settings = kwargs['custom_settings'] for k, v in user_data.items(): assert hasattr(create_settings, k) assert getattr(create_settings, k) == v @@ -238,7 +239,7 @@ def test_logging_aggregrator_connection_test_with_masked_password(mocker, patch, url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'logging'}) patch(url, user=admin, data={'LOG_AGGREGATOR_PASSWORD': 'password123'}, expect=200) - with mock.patch.object(BaseHTTPSHandler, 'perform_test') as perform_test: + with mock.patch.object(AWXProxyHandler, 'perform_test') as perform_test: url = reverse('api:setting_logging_test') user_data = { 'LOG_AGGREGATOR_TYPE': 'logstash', @@ -248,13 +249,14 @@ def test_logging_aggregrator_connection_test_with_masked_password(mocker, patch, 'LOG_AGGREGATOR_PASSWORD': '$encrypted$' } post(url, user_data, user=admin, expect=200) - create_settings = perform_test.call_args[0][0] + args, kwargs = perform_test.call_args_list[0] + create_settings = kwargs['custom_settings'] assert getattr(create_settings, 'LOG_AGGREGATOR_PASSWORD') == 'password123' @pytest.mark.django_db def test_logging_aggregrator_connection_test_invalid(mocker, get, post, admin): - with mock.patch.object(BaseHTTPSHandler, 'perform_test') as perform_test: + with mock.patch.object(AWXProxyHandler, 'perform_test') as perform_test: perform_test.side_effect = LoggingConnectivityException('404: Not Found') url = reverse('api:setting_logging_test') resp = post(url, { diff --git a/awx/main/tests/functional/api/test_unified_jobs_stdout.py b/awx/main/tests/functional/api/test_unified_jobs_stdout.py index f51920fd41..3f1a5760fc 100644 --- a/awx/main/tests/functional/api/test_unified_jobs_stdout.py +++ b/awx/main/tests/functional/api/test_unified_jobs_stdout.py @@ -8,6 +8,7 @@ import tempfile from django.conf import settings from django.db.backends.sqlite3.base import SQLiteCursorWrapper +import mock import pytest from awx.api.versioning import reverse @@ -184,6 +185,7 @@ def test_text_stdout_with_max_stdout(sqlite_copy_expert, get, admin): [_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'], ]) @pytest.mark.parametrize('fmt', ['txt', 'ansi']) +@mock.patch('awx.main.redact.UriCleaner.SENSITIVE_URI_PATTERN', mock.Mock(**{'search.return_value': None})) # really slow for large strings def test_max_bytes_display(sqlite_copy_expert, Parent, Child, relation, view, fmt, get, admin): job = Parent() job.save() @@ -231,6 +233,7 @@ def test_legacy_result_stdout_text_fallback(Cls, view, fmt, get, admin): [_mk_inventory_update, 'api:inventory_update_stdout'] ]) @pytest.mark.parametrize('fmt', ['txt', 'ansi']) +@mock.patch('awx.main.redact.UriCleaner.SENSITIVE_URI_PATTERN', mock.Mock(**{'search.return_value': None})) # really slow for large strings def test_legacy_result_stdout_with_max_bytes(Cls, view, fmt, get, admin): job = Cls() job.save() @@ -282,7 +285,7 @@ def test_unicode_with_base64_ansi(sqlite_copy_expert, get, admin): url = reverse( 'api:job_stdout', kwargs={'pk': job.pk} - ) + '?format=json&content_encoding=base64&content_format=ansi' + ) + '?format=json&content_encoding=base64' response = get(url, user=admin, expect=200) content = base64.b64decode(json.loads(response.content)['content']) diff --git a/awx/main/tests/functional/conftest.py b/awx/main/tests/functional/conftest.py index d5b45cf728..28d7b65564 100644 --- a/awx/main/tests/functional/conftest.py +++ b/awx/main/tests/functional/conftest.py @@ -9,7 +9,6 @@ from six.moves import xrange # Django from django.core.urlresolvers import resolve -from django.core.cache import cache from django.utils.six.moves.urllib.parse import urlparse from django.utils import timezone from django.contrib.auth.models import User @@ -57,14 +56,6 @@ def swagger_autogen(requests=__SWAGGER_REQUESTS__): return requests -@pytest.fixture(autouse=True) -def clear_cache(): - ''' - Clear cache (local memory) for each test to prevent using cached settings. - ''' - cache.clear() - - @pytest.fixture(scope="session", autouse=True) def celery_memory_broker(): ''' diff --git a/awx/main/tests/functional/models/test_project.py b/awx/main/tests/functional/models/test_project.py new file mode 100644 index 0000000000..71352ed633 --- /dev/null +++ b/awx/main/tests/functional/models/test_project.py @@ -0,0 +1,33 @@ +import pytest +import mock + +from awx.main.models import Project + + +@pytest.mark.django_db +def test_project_initial_update(): + with mock.patch.object(Project, "update") as mock_update: + Project.objects.create(name='foo', scm_type='git') + mock_update.assert_called_once_with() + + +@pytest.mark.django_db +def test_does_not_update_nonsensitive_change(project): + with mock.patch.object(Project, "update") as mock_update: + project.scm_update_on_launch = not project.scm_update_on_launch + project.save() + mock_update.assert_not_called() + + +@pytest.mark.django_db +def test_sensitive_change_triggers_update(project): + with mock.patch.object(Project, "update") as mock_update: + project.scm_url = 'https://foo.invalid' + project.save() + mock_update.assert_called_once_with() + # test other means of initialization + project = Project.objects.get(pk=project.pk) + with mock.patch.object(Project, "update") as mock_update: + project.scm_url = 'https://foo2.invalid' + project.save() + mock_update.assert_called_once_with() diff --git a/awx/main/tests/functional/models/test_schedule.py b/awx/main/tests/functional/models/test_schedule.py index 101afa8b99..d18e848d97 100644 --- a/awx/main/tests/functional/models/test_schedule.py +++ b/awx/main/tests/functional/models/test_schedule.py @@ -1,5 +1,6 @@ from datetime import datetime +from django.utils.timezone import now import mock import pytest import pytz @@ -131,31 +132,19 @@ def test_utc_until(job_template, until, dtend): @pytest.mark.django_db @pytest.mark.parametrize('dtstart, until', [ - ['20180601T120000Z', '20180602T170000'], - ['TZID=America/New_York:20180601T120000', '20180602T170000'], + ['DTSTART:20380601T120000Z', '20380601T170000'], # noon UTC to 5PM UTC + ['DTSTART;TZID=America/New_York:20380601T120000', '20380601T170000'], # noon EST to 5PM EST ]) def test_tzinfo_naive_until(job_template, dtstart, until): - rrule = 'DTSTART;{} RRULE:FREQ=DAILY;INTERVAL=1;UNTIL={}'.format(dtstart, until) # noqa + rrule = '{} RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL={}'.format(dtstart, until) # noqa s = Schedule( name='Some Schedule', rrule=rrule, unified_job_template=job_template ) - with pytest.raises(ValueError): - s.save() - - -@pytest.mark.django_db -def test_until_must_be_utc(job_template): - rrule = 'DTSTART;TZID=America/New_York:20180601T120000 RRULE:FREQ=DAILY;INTERVAL=1;UNTIL=20180602T000000' # noqa the Z is required - s = Schedule( - name='Some Schedule', - rrule=rrule, - unified_job_template=job_template - ) - with pytest.raises(ValueError) as e: - s.save() - assert 'RRULE UNTIL values must be specified in UTC' in str(e) + s.save() + gen = Schedule.rrulestr(s.rrule).xafter(now(), count=20) + assert len(list(gen)) == 6 # noon, 1PM, 2, 3, 4, 5PM @pytest.mark.django_db @@ -203,3 +192,85 @@ def test_beginning_of_time(job_template): ) with pytest.raises(ValueError): s.save() + + +@pytest.mark.django_db +@pytest.mark.parametrize('rrule, tz', [ + ['DTSTART:20300112T210000Z RRULE:FREQ=DAILY;INTERVAL=1', 'UTC'], + ['DTSTART;TZID=America/New_York:20300112T210000 RRULE:FREQ=DAILY;INTERVAL=1', 'America/New_York'] +]) +def test_timezone_property(job_template, rrule, tz): + s = Schedule( + name='Some Schedule', + rrule=rrule, + unified_job_template=job_template + ) + assert s.timezone == tz + + +@pytest.mark.django_db +def test_utc_until_property(job_template): + rrule = 'DTSTART:20380601T120000Z RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T170000Z' + s = Schedule( + name='Some Schedule', + rrule=rrule, + unified_job_template=job_template + ) + s.save() + + assert s.rrule.endswith('20380601T170000Z') + assert s.until == '2038-06-01T17:00:00' + + +@pytest.mark.django_db +def test_localized_until_property(job_template): + rrule = 'DTSTART;TZID=America/New_York:20380601T120000 RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T220000Z' + s = Schedule( + name='Some Schedule', + rrule=rrule, + unified_job_template=job_template + ) + s.save() + + assert s.rrule.endswith('20380601T220000Z') + assert s.until == '2038-06-01T17:00:00' + + +@pytest.mark.django_db +def test_utc_naive_coercion(job_template): + rrule = 'DTSTART:20380601T120000Z RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T170000' + s = Schedule( + name='Some Schedule', + rrule=rrule, + unified_job_template=job_template + ) + s.save() + + assert s.rrule.endswith('20380601T170000Z') + assert s.until == '2038-06-01T17:00:00' + + +@pytest.mark.django_db +def test_est_naive_coercion(job_template): + rrule = 'DTSTART;TZID=America/New_York:20380601T120000 RRULE:FREQ=HOURLY;INTERVAL=1;UNTIL=20380601T170000' + s = Schedule( + name='Some Schedule', + rrule=rrule, + unified_job_template=job_template + ) + s.save() + + assert s.rrule.endswith('20380601T220000Z') # 5PM EDT = 10PM UTC + assert s.until == '2038-06-01T17:00:00' + + +@pytest.mark.django_db +def test_empty_until_property(job_template): + rrule = 'DTSTART;TZID=America/New_York:20380601T120000 RRULE:FREQ=HOURLY;INTERVAL=1' + s = Schedule( + name='Some Schedule', + rrule=rrule, + unified_job_template=job_template + ) + s.save() + assert s.until == '' diff --git a/awx/main/tests/functional/task_management/test_scheduler.py b/awx/main/tests/functional/task_management/test_scheduler.py index 1d87417b17..82625533c7 100644 --- a/awx/main/tests/functional/task_management/test_scheduler.py +++ b/awx/main/tests/functional/task_management/test_scheduler.py @@ -113,7 +113,7 @@ def test_single_job_dependencies_project_launch(default_instance_group, job_temp p.scm_update_cache_timeout = 0 p.scm_type = "git" p.scm_url = "http://github.com/ansible/ansible.git" - p.save() + p.save(skip_update=True) with mock.patch("awx.main.scheduler.TaskManager.start_task"): tm = TaskManager() with mock.patch.object(TaskManager, "create_project_update", wraps=tm.create_project_update) as mock_pu: @@ -241,15 +241,16 @@ def test_shared_dependencies_launch(default_instance_group, job_template_factory @pytest.mark.django_db -def test_cleanup_interval(): - assert cache.get('last_celery_task_cleanup') is None +def test_cleanup_interval(mock_cache): + with mock.patch.multiple('awx.main.scheduler.task_manager.cache', get=mock_cache.get, set=mock_cache.set): + assert mock_cache.get('last_celery_task_cleanup') is None - TaskManager().cleanup_inconsistent_celery_tasks() - last_cleanup = cache.get('last_celery_task_cleanup') - assert isinstance(last_cleanup, datetime) + TaskManager().cleanup_inconsistent_celery_tasks() + last_cleanup = mock_cache.get('last_celery_task_cleanup') + assert isinstance(last_cleanup, datetime) - TaskManager().cleanup_inconsistent_celery_tasks() - assert cache.get('last_celery_task_cleanup') == last_cleanup + TaskManager().cleanup_inconsistent_celery_tasks() + assert cache.get('last_celery_task_cleanup') == last_cleanup class TestReaper(): @@ -326,7 +327,8 @@ class TestReaper(): @pytest.mark.django_db @mock.patch.object(JobNotificationMixin, 'send_notification_templates') @mock.patch.object(TaskManager, 'get_active_tasks', lambda self: ([], [])) - def test_cleanup_inconsistent_task(self, notify, active_tasks, considered_jobs, reapable_jobs, running_tasks, waiting_tasks, mocker): + def test_cleanup_inconsistent_task(self, notify, active_tasks, considered_jobs, reapable_jobs, running_tasks, waiting_tasks, mocker, settings): + settings.AWX_INCONSISTENT_TASK_INTERVAL = 0 tm = TaskManager() tm.get_running_tasks = mocker.Mock(return_value=(running_tasks, waiting_tasks)) diff --git a/awx/main/tests/functional/test_rbac_core.py b/awx/main/tests/functional/test_rbac_core.py index bae3b61dc0..abec03da45 100644 --- a/awx/main/tests/functional/test_rbac_core.py +++ b/awx/main/tests/functional/test_rbac_core.py @@ -5,6 +5,7 @@ from awx.main.models import ( Organization, Project, ) +from awx.main.fields import update_role_parentage_for_instance @pytest.mark.django_db @@ -202,3 +203,11 @@ def test_auto_parenting(): assert org1.admin_role.is_ancestor_of(prj2.admin_role) is False assert org2.admin_role.is_ancestor_of(prj1.admin_role) assert org2.admin_role.is_ancestor_of(prj2.admin_role) + + +@pytest.mark.django_db +def test_update_parents_keeps_teams(team, project): + project.update_role.parents.add(team.member_role) + assert team.member_role in project.update_role # test prep sanity check + update_role_parentage_for_instance(project) + assert team.member_role in project.update_role # actual assertion diff --git a/awx/main/tests/functional/test_rbac_oauth.py b/awx/main/tests/functional/test_rbac_oauth.py index 35b915f94d..757c55e12b 100644 --- a/awx/main/tests/functional/test_rbac_oauth.py +++ b/awx/main/tests/functional/test_rbac_oauth.py @@ -102,21 +102,21 @@ class TestOAuth2Application: assert access.can_delete(app) is can_access - def test_superuser_can_always_create(self, admin, org_admin, org_member, alice): + def test_superuser_can_always_create(self, admin, org_admin, org_member, alice, organization): access = OAuth2ApplicationAccess(admin) for user in [admin, org_admin, org_member, alice]: assert access.can_add({ 'name': 'test app', 'user': user.pk, 'client_type': 'confidential', - 'authorization_grant_type': 'password', 'organization': 1 + 'authorization_grant_type': 'password', 'organization': organization.id }) - def test_normal_user_cannot_create(self, admin, org_admin, org_member, alice): + def test_normal_user_cannot_create(self, admin, org_admin, org_member, alice, organization): for access_user in [org_member, alice]: access = OAuth2ApplicationAccess(access_user) for user in [admin, org_admin, org_member, alice]: assert not access.can_add({ 'name': 'test app', 'user': user.pk, 'client_type': 'confidential', - 'authorization_grant_type': 'password', 'organization': 1 + 'authorization_grant_type': 'password', 'organization': organization.id }) diff --git a/awx/main/tests/test_env.py b/awx/main/tests/test_env.py new file mode 100644 index 0000000000..135c90d99b --- /dev/null +++ b/awx/main/tests/test_env.py @@ -0,0 +1,6 @@ + + +# Ensure that our autouse overwrites are working +def test_cache(settings): + assert settings.CACHES['default']['BACKEND'] == 'django.core.cache.backends.locmem.LocMemCache' + assert settings.CACHES['default']['LOCATION'].startswith('unique-') diff --git a/awx/main/tests/unit/api/test_filters.py b/awx/main/tests/unit/api/test_filters.py index 12ff3663a5..cc53234e97 100644 --- a/awx/main/tests/unit/api/test_filters.py +++ b/awx/main/tests/unit/api/test_filters.py @@ -3,14 +3,19 @@ import pytest from rest_framework.exceptions import PermissionDenied, ParseError -from awx.api.filters import FieldLookupBackend +from awx.api.filters import FieldLookupBackend, OrderByBackend, get_field_from_path from awx.main.models import (AdHocCommand, ActivityStream, CustomInventoryScript, Credential, Job, JobTemplate, SystemJob, UnifiedJob, User, WorkflowJob, WorkflowJobTemplate, - WorkflowJobOptions, InventorySource) + WorkflowJobOptions, InventorySource, + JobEvent) +from awx.main.models.oauth import OAuth2Application from awx.main.models.jobs import JobOptions +# Django +from django.db.models.fields import FieldDoesNotExist + def test_related(): field_lookup = FieldLookupBackend() @@ -20,6 +25,27 @@ def test_related(): print(new_lookup) +def test_invalid_filter_key(): + field_lookup = FieldLookupBackend() + # FieldDoesNotExist is caught and converted to ParseError by filter_queryset + with pytest.raises(FieldDoesNotExist) as excinfo: + field_lookup.value_to_python(JobEvent, 'event_data.task_action', 'foo') + assert 'has no field named' in str(excinfo) + + +def test_invalid_field_hop(): + with pytest.raises(ParseError) as excinfo: + get_field_from_path(Credential, 'organization__description__user') + assert 'No related model for' in str(excinfo) + + +def test_invalid_order_by_key(): + field_order_by = OrderByBackend() + with pytest.raises(ParseError) as excinfo: + [f for f in field_order_by._validate_ordering_fields(JobEvent, ('event_data.task_action',))] + assert 'has no field named' in str(excinfo) + + @pytest.mark.parametrize(u"empty_value", [u'', '']) def test_empty_in(empty_value): field_lookup = FieldLookupBackend() @@ -57,7 +83,6 @@ def test_filter_on_password_field(password_field, lookup_suffix): (User, 'password__icontains'), (User, 'settings__value__icontains'), (User, 'main_oauth2accesstoken__token__gt'), - (User, 'main_oauth2application__name__gt'), (UnifiedJob, 'job_args__icontains'), (UnifiedJob, 'job_env__icontains'), (UnifiedJob, 'start_args__icontains'), @@ -70,8 +95,8 @@ def test_filter_on_password_field(password_field, lookup_suffix): (JobTemplate, 'survey_spec__icontains'), (WorkflowJobTemplate, 'survey_spec__icontains'), (CustomInventoryScript, 'script__icontains'), - (ActivityStream, 'o_auth2_access_token__gt'), - (ActivityStream, 'o_auth2_application__gt') + (ActivityStream, 'o_auth2_application__client_secret__gt'), + (OAuth2Application, 'grant__code__gt') ]) def test_filter_sensitive_fields_and_relations(model, query): field_lookup = FieldLookupBackend() diff --git a/awx/main/tests/unit/api/test_views.py b/awx/main/tests/unit/api/test_views.py index 4c767043bb..fe1c4974dc 100644 --- a/awx/main/tests/unit/api/test_views.py +++ b/awx/main/tests/unit/api/test_views.py @@ -16,6 +16,9 @@ from awx.api.views import ( from awx.main.models import ( Host, ) +from awx.main.views import handle_error + +from rest_framework.test import APIRequestFactory @pytest.fixture @@ -25,6 +28,12 @@ def mock_response_new(mocker): return m +def test_handle_error(): + # Assure that templating of error does not raise errors + request = APIRequestFactory().get('/fooooo/') + handle_error(request) + + class TestApiRootView: def test_get_endpoints(self, mocker, mock_response_new): endpoints = [ diff --git a/awx/main/tests/unit/conftest.py b/awx/main/tests/unit/conftest.py index 7a5ae40f79..2307b3a47d 100644 --- a/awx/main/tests/unit/conftest.py +++ b/awx/main/tests/unit/conftest.py @@ -1,4 +1,5 @@ import pytest +import logging from mock import PropertyMock @@ -7,3 +8,16 @@ from mock import PropertyMock def _disable_database_settings(mocker): m = mocker.patch('awx.conf.settings.SettingsWrapper.all_supported_settings', new_callable=PropertyMock) m.return_value = [] + + +@pytest.fixture() +def dummy_log_record(): + return logging.LogRecord( + 'awx', # logger name + 20, # loglevel INFO + './awx/some/module.py', # pathname + 100, # lineno + 'User joe logged in', # msg + tuple(), # args, + None # exc_info + ) diff --git a/awx/main/tests/unit/expect/test_expect.py b/awx/main/tests/unit/expect/test_expect.py index 3978cb14c5..a43775ad33 100644 --- a/awx/main/tests/unit/expect/test_expect.py +++ b/awx/main/tests/unit/expect/test_expect.py @@ -90,7 +90,7 @@ def test_cancel_callback_error(): extra_fields = {} status, rc = run.run_pexpect( - ['ls', '-la'], + ['sleep', '2'], HERE, {}, stdout, diff --git a/awx/main/tests/unit/models/test_events.py b/awx/main/tests/unit/models/test_events.py index 71be98a167..47a7f30c55 100644 --- a/awx/main/tests/unit/models/test_events.py +++ b/awx/main/tests/unit/models/test_events.py @@ -44,3 +44,18 @@ def test_playbook_event_strip_invalid_keys(job_identifier, cls): 'extra_key': 'extra_value' }) manager.create.assert_called_with(**{job_identifier: 123}) + + +@pytest.mark.parametrize('field', [ + 'play', 'role', 'task', 'playbook' +]) +def test_really_long_event_fields(field): + with mock.patch.object(JobEvent, 'objects') as manager: + JobEvent.create_from_data(**{ + 'job_id': 123, + field: 'X' * 4096 + }) + manager.create.assert_called_with(**{ + 'job_id': 123, + field: 'X' * 1021 + '...' + }) diff --git a/awx/main/tests/unit/models/test_survey_models.py b/awx/main/tests/unit/models/test_survey_models.py index 6ce19d2060..3bc06edc87 100644 --- a/awx/main/tests/unit/models/test_survey_models.py +++ b/awx/main/tests/unit/models/test_survey_models.py @@ -1,8 +1,9 @@ import tempfile import json import yaml - import pytest +from itertools import count + from awx.main.utils.encryption import encrypt_value from awx.main.tasks import RunJob from awx.main.models import ( @@ -16,6 +17,15 @@ from awx.main.utils.safe_yaml import SafeLoader ENCRYPTED_SECRET = encrypt_value('secret') +class DistinctParametrize(object): + + def __init__(self): + self._gen = count(0) + + def __call__(self, value): + return str(next(self._gen)) + + @pytest.mark.survey class SurveyVariableValidation: @@ -243,7 +253,7 @@ def test_optional_survey_question_defaults( ('password', 'foo', 5, {'extra_vars': {'x': ''}}, {'x': ''}), ('password', ENCRYPTED_SECRET, 5, {'extra_vars': {'x': '$encrypted$'}}, {}), ('password', ENCRYPTED_SECRET, 10, {'extra_vars': {'x': '$encrypted$'}}, {'x': ENCRYPTED_SECRET}), -]) +], ids=DistinctParametrize()) def test_survey_encryption_defaults(survey_spec_factory, question_type, default, maxlen, kwargs, expected): spec = survey_spec_factory([ { diff --git a/awx/main/tests/unit/test_access.py b/awx/main/tests/unit/test_access.py index 6b20aaaed9..02e863f47f 100644 --- a/awx/main/tests/unit/test_access.py +++ b/awx/main/tests/unit/test_access.py @@ -158,7 +158,7 @@ def test_jt_existing_values_are_nonsensitive(job_template_with_ids, user_unit): """Assure that permission checks are not required if submitted data is identical to what the job template already has.""" - data = model_to_dict(job_template_with_ids) + data = model_to_dict(job_template_with_ids, exclude=['unifiedjobtemplate_ptr']) access = JobTemplateAccess(user_unit) assert access.changes_are_non_sensitive(job_template_with_ids, data) diff --git a/awx/main/tests/unit/test_fields.py b/awx/main/tests/unit/test_fields.py index bec0c4de2f..79a163b840 100644 --- a/awx/main/tests/unit/test_fields.py +++ b/awx/main/tests/unit/test_fields.py @@ -96,10 +96,26 @@ def test_cred_type_input_schema_validity(input_, valid): ({'invalid-injector': {}}, False), ({'file': 123}, False), ({'file': {}}, True), + # Uses credential inputs inside of unnamed file contents ({'file': {'template': '{{username}}'}}, True), + # Uses named file ({'file': {'template.username': '{{username}}'}}, True), + # Uses multiple named files ({'file': {'template.username': '{{username}}', 'template.password': '{{pass}}'}}, True), + # Use of unnamed file mutually exclusive with use of named files ({'file': {'template': '{{username}}', 'template.password': '{{pass}}'}}, False), + # References non-existant named file + ({'env': {'FROM_FILE': "{{tower.filename.cert}}"}}, False), + # References unnamed file, but a file was never defined + ({'env': {'FROM_FILE': "{{tower.filename}}"}}, False), + # Cannot reference tower namespace itself (what would this return??) + ({'env': {'FROM_FILE': "{{tower}}"}}, False), + # References filename of a named file + ({'file': {'template.cert': '{{awx_secret}}'}, 'env': {'FROM_FILE': "{{tower.filename.cert}}"}}, True), + # With named files, `tower.filename` is another namespace, so it cannot be referenced + ({'file': {'template.cert': '{{awx_secret}}'}, 'env': {'FROM_FILE': "{{tower.filename}}"}}, False), + # With an unnamed file, `tower.filename` is just the filename + ({'file': {'template': '{{awx_secret}}'}, 'env': {'THE_FILENAME': "{{tower.filename}}"}}, True), ({'file': {'foo': 'bar'}}, False), ({'env': 123}, False), ({'env': {}}, True), diff --git a/awx/main/tests/unit/test_tasks.py b/awx/main/tests/unit/test_tasks.py index 39a4d52de4..c4e3abe9b9 100644 --- a/awx/main/tests/unit/test_tasks.py +++ b/awx/main/tests/unit/test_tasks.py @@ -2155,7 +2155,7 @@ def test_aquire_lock_open_fail_logged(logging_getLogger, os_open): ProjectUpdate = tasks.RunProjectUpdate() - with pytest.raises(OSError, errno=3, strerror='dummy message'): + with pytest.raises(OSError, message='dummy message'): ProjectUpdate.acquire_lock(instance) assert logger.err.called_with("I/O error({0}) while trying to open lock file [{1}]: {2}".format(3, 'this_file_does_not_exist', 'dummy message')) @@ -2181,7 +2181,7 @@ def test_aquire_lock_acquisition_fail_logged(fcntl_flock, logging_getLogger, os_ ProjectUpdate = tasks.RunProjectUpdate() - with pytest.raises(IOError, errno=3, strerror='dummy message'): + with pytest.raises(IOError, message='dummy message'): ProjectUpdate.acquire_lock(instance) os_close.assert_called_with(3) assert logger.err.called_with("I/O error({0}) while trying to aquire lock on file [{1}]: {2}".format(3, 'this_file_does_not_exist', 'dummy message')) diff --git a/awx/main/tests/unit/test_views.py b/awx/main/tests/unit/test_views.py index 6e3572dc7d..f1bad79400 100644 --- a/awx/main/tests/unit/test_views.py +++ b/awx/main/tests/unit/test_views.py @@ -3,6 +3,10 @@ import mock # Django REST Framework from rest_framework import exceptions +from rest_framework.generics import ListAPIView + +# Django +from django.core.urlresolvers import RegexURLResolver, RegexURLPattern # AWX from awx.main.views import ApiErrorView @@ -52,3 +56,44 @@ def test_disable_post_on_v1_inventory_source_list(version, supports_post): inv_source_list.request = mock.MagicMock() with mock.patch('awx.api.views.get_request_version', return_value=version): assert ('POST' in inv_source_list.allowed_methods) == supports_post + + +def test_views_have_search_fields(): + from awx.api.urls import urlpatterns as api_patterns + patterns = set([]) + url_views = set([]) + # Add recursive URL patterns + unprocessed = set(api_patterns) + while unprocessed: + to_process = unprocessed.copy() + unprocessed = set([]) + for pattern in to_process: + if hasattr(pattern, 'lookup_str') and not pattern.lookup_str.startswith('awx.api'): + continue + patterns.add(pattern) + if isinstance(pattern, RegexURLResolver): + for sub_pattern in pattern.url_patterns: + if sub_pattern not in patterns: + unprocessed.add(sub_pattern) + # Get view classes + for pattern in patterns: + if isinstance(pattern, RegexURLPattern) and hasattr(pattern.callback, 'view_class'): + cls = pattern.callback.view_class + if issubclass(cls, ListAPIView): + url_views.add(pattern.callback.view_class) + + # Gather any views that don't have search fields defined + views_missing_search = [] + for View in url_views: + view = View() + if not hasattr(view, 'search_fields') or len(view.search_fields) == 0: + views_missing_search.append(view) + + if views_missing_search: + raise Exception('{} views do not have search fields defined:\n{}'.format( + len(views_missing_search), + '\n'.join([ + v.__class__.__name__ + ' (model: {})'.format(getattr(v, 'model', type(None)).__name__) + for v in views_missing_search + ])) + ) diff --git a/awx/main/tests/unit/utils/test_common.py b/awx/main/tests/unit/utils/test_common.py index fa4a038037..bb98b18bec 100644 --- a/awx/main/tests/unit/utils/test_common.py +++ b/awx/main/tests/unit/utils/test_common.py @@ -7,10 +7,10 @@ import pytest from uuid import uuid4 import json import yaml +import mock from backports.tempfile import TemporaryDirectory from django.conf import settings -from django.core.cache import cache from rest_framework.exceptions import ParseError @@ -26,14 +26,6 @@ from awx.main.models import ( ) -@pytest.fixture(autouse=True) -def clear_cache(): - ''' - Clear cache (local memory) for each test to prevent using cached settings. - ''' - cache.clear() - - @pytest.mark.parametrize('input_, output', [ ({"foo": "bar"}, {"foo": "bar"}), ('{"foo": "bar"}', {"foo": "bar"}), @@ -114,46 +106,48 @@ def test_get_type_for_model(model, name): @pytest.fixture -def memoized_function(mocker): - @common.memoize(track_function=True) - def myfunction(key, value): - if key not in myfunction.calls: - myfunction.calls[key] = 0 +def memoized_function(mocker, mock_cache): + with mock.patch('awx.main.utils.common.get_memoize_cache', return_value=mock_cache): + @common.memoize(track_function=True) + def myfunction(key, value): + if key not in myfunction.calls: + myfunction.calls[key] = 0 - myfunction.calls[key] += 1 + myfunction.calls[key] += 1 - if myfunction.calls[key] == 1: - return value - else: - return '%s called %s times' % (value, myfunction.calls[key]) - myfunction.calls = dict() - return myfunction + if myfunction.calls[key] == 1: + return value + else: + return '%s called %s times' % (value, myfunction.calls[key]) + myfunction.calls = dict() + return myfunction -def test_memoize_track_function(memoized_function): +def test_memoize_track_function(memoized_function, mock_cache): assert memoized_function('scott', 'scotterson') == 'scotterson' - assert cache.get('myfunction') == {u'scott-scotterson': 'scotterson'} + assert mock_cache.get('myfunction') == {u'scott-scotterson': 'scotterson'} assert memoized_function('scott', 'scotterson') == 'scotterson' assert memoized_function.calls['scott'] == 1 assert memoized_function('john', 'smith') == 'smith' - assert cache.get('myfunction') == {u'scott-scotterson': 'scotterson', u'john-smith': 'smith'} + assert mock_cache.get('myfunction') == {u'scott-scotterson': 'scotterson', u'john-smith': 'smith'} assert memoized_function('john', 'smith') == 'smith' - + assert memoized_function.calls['john'] == 1 -def test_memoize_delete(memoized_function): +def test_memoize_delete(memoized_function, mock_cache): assert memoized_function('john', 'smith') == 'smith' assert memoized_function('john', 'smith') == 'smith' assert memoized_function.calls['john'] == 1 - assert cache.get('myfunction') == {u'john-smith': 'smith'} + assert mock_cache.get('myfunction') == {u'john-smith': 'smith'} - common.memoize_delete('myfunction') + with mock.patch('awx.main.utils.common.memoize_delete', side_effect=mock_cache.delete): + common.memoize_delete('myfunction') - assert cache.get('myfunction') is None + assert mock_cache.get('myfunction') is None assert memoized_function('john', 'smith') == 'smith called 2 times' assert memoized_function.calls['john'] == 2 diff --git a/awx/main/tests/unit/utils/test_filters.py b/awx/main/tests/unit/utils/test_filters.py index c0b38d294c..a8127fdfb6 100644 --- a/awx/main/tests/unit/utils/test_filters.py +++ b/awx/main/tests/unit/utils/test_filters.py @@ -5,7 +5,7 @@ import mock from collections import namedtuple # AWX -from awx.main.utils.filters import SmartFilter +from awx.main.utils.filters import SmartFilter, ExternalLoggerEnabled # Django from django.db.models import Q @@ -13,6 +13,37 @@ from django.db.models import Q import six +@pytest.mark.parametrize('params, logger_name, expected', [ + # skip all records if enabled_flag = False + ({'enabled_flag': False}, 'awx.main', False), + # skip all records if the host is undefined + ({'enabled_flag': True}, 'awx.main', False), + # skip all records if underlying logger is used by handlers themselves + ({'enabled_flag': True}, 'awx.main.utils.handlers', False), + ({'enabled_flag': True, 'enabled_loggers': ['awx']}, 'awx.main', True), + ({'enabled_flag': True, 'enabled_loggers': ['abc']}, 'awx.analytics.xyz', False), + ({'enabled_flag': True, 'enabled_loggers': ['xyz']}, 'awx.analytics.xyz', True), +]) +def test_base_logging_handler_skip_log(params, logger_name, expected, dummy_log_record): + filter = ExternalLoggerEnabled(**params) + dummy_log_record.name = logger_name + assert filter.filter(dummy_log_record) is expected, (params, logger_name) + + +@pytest.mark.parametrize('level, expect', [ + (30, True), # warning + (20, False) # info +]) +def test_log_configurable_severity(level, expect, dummy_log_record): + dummy_log_record.levelno = level + filter = ExternalLoggerEnabled( + enabled_flag=True, + enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking'], + lvl='WARNING' + ) + assert filter.filter(dummy_log_record) is expect + + Field = namedtuple('Field', 'name') Meta = namedtuple('Meta', 'fields') diff --git a/awx/main/tests/unit/utils/test_ha.py b/awx/main/tests/unit/utils/test_ha.py index edd44b7958..94cb7d3606 100644 --- a/awx/main/tests/unit/utils/test_ha.py +++ b/awx/main/tests/unit/utils/test_ha.py @@ -6,22 +6,15 @@ # python import pytest import mock +from contextlib import nested # AWX from awx.main.utils.ha import ( _add_remove_celery_worker_queues, - update_celery_worker_routes, + AWXCeleryRouter, ) -@pytest.fixture -def conf(): - class Conf(): - CELERY_ROUTES = dict() - CELERYBEAT_SCHEDULE = dict() - return Conf() - - class TestAddRemoveCeleryWorkerQueues(): @pytest.fixture def instance_generator(self, mocker): @@ -47,54 +40,54 @@ class TestAddRemoveCeleryWorkerQueues(): app.control.cancel_consumer = mocker.MagicMock() return app - @pytest.mark.parametrize("static_queues,_worker_queues,groups,hostname,added_expected,removed_expected", [ - (['east', 'west'], ['east', 'west', 'east-1'], [], 'east-1', [], []), - ([], ['east', 'west', 'east-1'], ['east', 'west'], 'east-1', [], []), - ([], ['east', 'west'], ['east', 'west'], 'east-1', ['east-1'], []), - ([], [], ['east', 'west'], 'east-1', ['east', 'west', 'east-1'], []), - ([], ['china', 'russia'], ['east', 'west'], 'east-1', ['east', 'west', 'east-1'], ['china', 'russia']), + @pytest.mark.parametrize("broadcast_queues,static_queues,_worker_queues,groups,hostname,added_expected,removed_expected", [ + (['tower_broadcast_all'], ['east', 'west'], ['east', 'west', 'east-1'], [], 'east-1', ['tower_broadcast_all_east-1'], []), + ([], [], ['east', 'west', 'east-1'], ['east', 'west'], 'east-1', [], []), + ([], [], ['east', 'west'], ['east', 'west'], 'east-1', ['east-1'], []), + ([], [], [], ['east', 'west'], 'east-1', ['east', 'west', 'east-1'], []), + ([], [], ['china', 'russia'], ['east', 'west'], 'east-1', ['east', 'west', 'east-1'], ['china', 'russia']), ]) def test__add_remove_celery_worker_queues_noop(self, mock_app, - instance_generator, - worker_queues_generator, - static_queues, _worker_queues, + instance_generator, + worker_queues_generator, + broadcast_queues, + static_queues, _worker_queues, groups, hostname, added_expected, removed_expected): instance = instance_generator(groups=groups, hostname=hostname) worker_queues = worker_queues_generator(_worker_queues) - with mock.patch('awx.main.utils.ha.settings.AWX_CELERY_QUEUES_STATIC', static_queues): + with nested( + mock.patch('awx.main.utils.ha.settings.AWX_CELERY_QUEUES_STATIC', static_queues), + mock.patch('awx.main.utils.ha.settings.AWX_CELERY_BCAST_QUEUES_STATIC', broadcast_queues), + mock.patch('awx.main.utils.ha.settings.CLUSTER_HOST_ID', hostname)): (added_queues, removed_queues) = _add_remove_celery_worker_queues(mock_app, [instance], worker_queues, hostname) assert set(added_queues) == set(added_expected) assert set(removed_queues) == set(removed_expected) -class TestUpdateCeleryWorkerRoutes(): +class TestUpdateCeleryWorkerRouter(): @pytest.mark.parametrize("is_controller,expected_routes", [ - (False, { + (False, { 'awx.main.tasks.cluster_node_heartbeat': {'queue': 'east-1', 'routing_key': 'east-1'}, 'awx.main.tasks.purge_old_stdout_files': {'queue': 'east-1', 'routing_key': 'east-1'} }), - (True, { + (True, { 'awx.main.tasks.cluster_node_heartbeat': {'queue': 'east-1', 'routing_key': 'east-1'}, 'awx.main.tasks.purge_old_stdout_files': {'queue': 'east-1', 'routing_key': 'east-1'}, 'awx.main.tasks.awx_isolated_heartbeat': {'queue': 'east-1', 'routing_key': 'east-1'}, }), ]) - def test_update_celery_worker_routes(self, mocker, conf, is_controller, expected_routes): - instance = mocker.MagicMock() - instance.hostname = 'east-1' - instance.is_controller = mocker.MagicMock(return_value=is_controller) + def test_update_celery_worker_routes(self, mocker, is_controller, expected_routes): + def get_or_register(): + instance = mock.MagicMock() + instance.hostname = 'east-1' + instance.is_controller = mock.MagicMock(return_value=is_controller) + return (False, instance) - assert update_celery_worker_routes(instance, conf) == expected_routes - assert conf.CELERY_ROUTES == expected_routes + with mock.patch('awx.main.models.Instance.objects.get_or_register', get_or_register): + router = AWXCeleryRouter() - def test_update_celery_worker_routes_deleted(self, mocker, conf): - instance = mocker.MagicMock() - instance.hostname = 'east-1' - instance.is_controller = mocker.MagicMock(return_value=False) - conf.CELERY_ROUTES = {'awx.main.tasks.awx_isolated_heartbeat': 'foobar'} - - update_celery_worker_routes(instance, conf) - assert 'awx.main.tasks.awx_isolated_heartbeat' not in conf.CELERY_ROUTES + for k,v in expected_routes.iteritems(): + assert router.route_for_task(k) == v diff --git a/awx/main/tests/unit/utils/test_handlers.py b/awx/main/tests/unit/utils/test_handlers.py index 693d024281..f57d86158d 100644 --- a/awx/main/tests/unit/utils/test_handlers.py +++ b/awx/main/tests/unit/utils/test_handlers.py @@ -1,7 +1,6 @@ # -*- coding: utf-8 -*- import base64 import cStringIO -import json import logging import socket import datetime @@ -10,7 +9,6 @@ from uuid import uuid4 import mock -from django.conf import settings from django.conf import LazySettings import pytest import requests @@ -18,23 +16,11 @@ from requests_futures.sessions import FuturesSession from awx.main.utils.handlers import (BaseHandler, BaseHTTPSHandler as HTTPSHandler, TCPHandler, UDPHandler, _encode_payload_for_socket, - PARAM_NAMES, LoggingConnectivityException) + PARAM_NAMES, LoggingConnectivityException, + AWXProxyHandler) from awx.main.utils.formatters import LogstashFormatter -@pytest.fixture() -def dummy_log_record(): - return logging.LogRecord( - 'awx', # logger name - 20, # loglevel INFO - './awx/some/module.py', # pathname - 100, # lineno - 'User joe logged in', # msg - tuple(), # args, - None # exc_info - ) - - @pytest.fixture() def http_adapter(): class FakeHTTPAdapter(requests.adapters.HTTPAdapter): @@ -80,105 +66,91 @@ def test_https_logging_handler_requests_async_implementation(): def test_https_logging_handler_has_default_http_timeout(): - handler = HTTPSHandler.from_django_settings(settings) + handler = TCPHandler() assert handler.tcp_timeout == 5 -@pytest.mark.parametrize('param', PARAM_NAMES.keys()) +@pytest.mark.parametrize('param', ['host', 'port', 'indv_facts']) def test_base_logging_handler_defaults(param): handler = BaseHandler() assert hasattr(handler, param) and getattr(handler, param) is None -@pytest.mark.parametrize('param', PARAM_NAMES.keys()) +@pytest.mark.parametrize('param', ['host', 'port', 'indv_facts']) def test_base_logging_handler_kwargs(param): handler = BaseHandler(**{param: 'EXAMPLE'}) assert hasattr(handler, param) and getattr(handler, param) == 'EXAMPLE' -@pytest.mark.parametrize('param, django_settings_name', PARAM_NAMES.items()) -def test_base_logging_handler_from_django_settings(param, django_settings_name): +@pytest.mark.parametrize('params', [ + { + 'LOG_AGGREGATOR_HOST': 'https://server.invalid', + 'LOG_AGGREGATOR_PORT': 22222, + 'LOG_AGGREGATOR_TYPE': 'loggly', + 'LOG_AGGREGATOR_USERNAME': 'foo', + 'LOG_AGGREGATOR_PASSWORD': 'bar', + 'LOG_AGGREGATOR_INDIVIDUAL_FACTS': True, + 'LOG_AGGREGATOR_TCP_TIMEOUT': 96, + 'LOG_AGGREGATOR_VERIFY_CERT': False, + 'LOG_AGGREGATOR_PROTOCOL': 'https' + }, + { + 'LOG_AGGREGATOR_HOST': 'https://server.invalid', + 'LOG_AGGREGATOR_PORT': 22222, + 'LOG_AGGREGATOR_PROTOCOL': 'udp' + } +]) +def test_real_handler_from_django_settings(params): + settings = LazySettings() + settings.configure(**params) + handler = AWXProxyHandler().get_handler(custom_settings=settings) + # need the _reverse_ dictionary from PARAM_NAMES + attr_lookup = {} + for attr_name, setting_name in PARAM_NAMES.items(): + attr_lookup[setting_name] = attr_name + for setting_name, val in params.items(): + attr_name = attr_lookup[setting_name] + if attr_name == 'protocol': + continue + assert hasattr(handler, attr_name) + + +def test_invalid_kwarg_to_real_handler(): settings = LazySettings() settings.configure(**{ - django_settings_name: 'EXAMPLE' + 'LOG_AGGREGATOR_HOST': 'https://server.invalid', + 'LOG_AGGREGATOR_PORT': 22222, + 'LOG_AGGREGATOR_PROTOCOL': 'udp', + 'LOG_AGGREGATOR_VERIFY_CERT': False # setting not valid for UDP handler }) - handler = BaseHandler.from_django_settings(settings) - assert hasattr(handler, param) and getattr(handler, param) == 'EXAMPLE' + handler = AWXProxyHandler().get_handler(custom_settings=settings) + assert not hasattr(handler, 'verify_cert') -@pytest.mark.parametrize('params, logger_name, expected', [ - # skip all records if enabled_flag = False - ({'enabled_flag': False}, 'awx.main', True), - # skip all records if the host is undefined - ({'host': '', 'enabled_flag': True}, 'awx.main', True), - # skip all records if underlying logger is used by handlers themselves - ({'host': '127.0.0.1', 'enabled_flag': True}, 'awx.main.utils.handlers', True), - ({'host': '127.0.0.1', 'enabled_flag': True}, 'awx.main', False), - ({'host': '127.0.0.1', 'enabled_flag': True, 'enabled_loggers': ['abc']}, 'awx.analytics.xyz', True), - ({'host': '127.0.0.1', 'enabled_flag': True, 'enabled_loggers': ['xyz']}, 'awx.analytics.xyz', False), -]) -def test_base_logging_handler_skip_log(params, logger_name, expected): - handler = BaseHandler(**params) - assert handler._skip_log(logger_name) is expected - - -def test_base_logging_handler_emit(dummy_log_record): - handler = BaseHandler(host='127.0.0.1', enabled_flag=True, - message_type='logstash', lvl='INFO', - enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking']) +def test_base_logging_handler_emit_system_tracking(dummy_log_record): + handler = BaseHandler(host='127.0.0.1', indv_facts=True) handler.setFormatter(LogstashFormatter()) - sent_payloads = handler.emit(dummy_log_record) - - assert len(sent_payloads) == 1 - body = json.loads(sent_payloads[0]) - - assert body['level'] == 'INFO' - assert body['logger_name'] == 'awx' - assert body['message'] == 'User joe logged in' - - -def test_base_logging_handler_ignore_low_severity_msg(dummy_log_record): - handler = BaseHandler(host='127.0.0.1', enabled_flag=True, - message_type='logstash', lvl='WARNING', - enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking']) - handler.setFormatter(LogstashFormatter()) - sent_payloads = handler.emit(dummy_log_record) - assert len(sent_payloads) == 0 - - -def test_base_logging_handler_emit_system_tracking(): - handler = BaseHandler(host='127.0.0.1', enabled_flag=True, - message_type='logstash', indv_facts=True, lvl='INFO', - enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking']) - handler.setFormatter(LogstashFormatter()) - record = logging.LogRecord( - 'awx.analytics.system_tracking', # logger name - 20, # loglevel INFO - './awx/some/module.py', # pathname - 100, # lineno - None, # msg - tuple(), # args, - None # exc_info - ) - record.inventory_id = 11 - record.host_name = 'my_lucky_host' - record.job_id = 777 - record.ansible_facts = { + dummy_log_record.name = 'awx.analytics.system_tracking' + dummy_log_record.msg = None + dummy_log_record.inventory_id = 11 + dummy_log_record.host_name = 'my_lucky_host' + dummy_log_record.job_id = 777 + dummy_log_record.ansible_facts = { "ansible_kernel": "4.4.66-boot2docker", "ansible_machine": "x86_64", "ansible_swapfree_mb": 4663, } - record.ansible_facts_modified = datetime.datetime.now(tzutc()).isoformat() - sent_payloads = handler.emit(record) + dummy_log_record.ansible_facts_modified = datetime.datetime.now(tzutc()).isoformat() + sent_payloads = handler.emit(dummy_log_record) assert len(sent_payloads) == 1 - assert sent_payloads[0]['ansible_facts'] == record.ansible_facts - assert sent_payloads[0]['ansible_facts_modified'] == record.ansible_facts_modified + assert sent_payloads[0]['ansible_facts'] == dummy_log_record.ansible_facts + assert sent_payloads[0]['ansible_facts_modified'] == dummy_log_record.ansible_facts_modified assert sent_payloads[0]['level'] == 'INFO' assert sent_payloads[0]['logger_name'] == 'awx.analytics.system_tracking' - assert sent_payloads[0]['job_id'] == record.job_id - assert sent_payloads[0]['inventory_id'] == record.inventory_id - assert sent_payloads[0]['host_name'] == record.host_name + assert sent_payloads[0]['job_id'] == dummy_log_record.job_id + assert sent_payloads[0]['inventory_id'] == dummy_log_record.inventory_id + assert sent_payloads[0]['host_name'] == dummy_log_record.host_name @pytest.mark.parametrize('host, port, normalized, hostname_only', [ @@ -236,16 +208,18 @@ def test_https_logging_handler_connectivity_test(http_adapter, status, reason, e def emit(self, record): return super(FakeHTTPSHandler, self).emit(record) - if exc: - with pytest.raises(exc) as e: - FakeHTTPSHandler.perform_test(settings) - assert str(e).endswith('%s: %s' % (status, reason)) - else: - assert FakeHTTPSHandler.perform_test(settings) is None + with mock.patch.object(AWXProxyHandler, 'get_handler_class') as mock_get_class: + mock_get_class.return_value = FakeHTTPSHandler + if exc: + with pytest.raises(exc) as e: + AWXProxyHandler().perform_test(settings) + assert str(e).endswith('%s: %s' % (status, reason)) + else: + assert AWXProxyHandler().perform_test(settings) is None def test_https_logging_handler_logstash_auth_info(): - handler = HTTPSHandler(message_type='logstash', username='bob', password='ansible', lvl='INFO') + handler = HTTPSHandler(message_type='logstash', username='bob', password='ansible') handler._add_auth_information() assert isinstance(handler.session.auth, requests.auth.HTTPBasicAuth) assert handler.session.auth.username == 'bob' @@ -261,9 +235,7 @@ def test_https_logging_handler_splunk_auth_info(): def test_https_logging_handler_connection_error(connection_error_adapter, dummy_log_record): - handler = HTTPSHandler(host='127.0.0.1', enabled_flag=True, - message_type='logstash', lvl='INFO', - enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking']) + handler = HTTPSHandler(host='127.0.0.1', message_type='logstash') handler.setFormatter(LogstashFormatter()) handler.session.mount('http://', connection_error_adapter) @@ -289,9 +261,7 @@ def test_https_logging_handler_connection_error(connection_error_adapter, @pytest.mark.parametrize('message_type', ['logstash', 'splunk']) def test_https_logging_handler_emit_without_cred(http_adapter, dummy_log_record, message_type): - handler = HTTPSHandler(host='127.0.0.1', enabled_flag=True, - message_type=message_type, lvl='INFO', - enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking']) + handler = HTTPSHandler(host='127.0.0.1', message_type=message_type) handler.setFormatter(LogstashFormatter()) handler.session.mount('http://', http_adapter) async_futures = handler.emit(dummy_log_record) @@ -312,10 +282,9 @@ def test_https_logging_handler_emit_without_cred(http_adapter, dummy_log_record, def test_https_logging_handler_emit_logstash_with_creds(http_adapter, dummy_log_record): - handler = HTTPSHandler(host='127.0.0.1', enabled_flag=True, + handler = HTTPSHandler(host='127.0.0.1', username='user', password='pass', - message_type='logstash', lvl='INFO', - enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking']) + message_type='logstash') handler.setFormatter(LogstashFormatter()) handler.session.mount('http://', http_adapter) async_futures = handler.emit(dummy_log_record) @@ -328,9 +297,8 @@ def test_https_logging_handler_emit_logstash_with_creds(http_adapter, def test_https_logging_handler_emit_splunk_with_creds(http_adapter, dummy_log_record): - handler = HTTPSHandler(host='127.0.0.1', enabled_flag=True, - password='pass', message_type='splunk', lvl='INFO', - enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking']) + handler = HTTPSHandler(host='127.0.0.1', + password='pass', message_type='splunk') handler.setFormatter(LogstashFormatter()) handler.session.mount('http://', http_adapter) async_futures = handler.emit(dummy_log_record) @@ -351,9 +319,7 @@ def test_encode_payload_for_socket(payload, encoded_payload): def test_udp_handler_create_socket_at_init(): - handler = UDPHandler(host='127.0.0.1', port=4399, - enabled_flag=True, message_type='splunk', lvl='INFO', - enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking']) + handler = UDPHandler(host='127.0.0.1', port=4399) assert hasattr(handler, 'socket') assert isinstance(handler.socket, socket.socket) assert handler.socket.family == socket.AF_INET @@ -361,9 +327,7 @@ def test_udp_handler_create_socket_at_init(): def test_udp_handler_send(dummy_log_record): - handler = UDPHandler(host='127.0.0.1', port=4399, - enabled_flag=True, message_type='splunk', lvl='INFO', - enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking']) + handler = UDPHandler(host='127.0.0.1', port=4399) handler.setFormatter(LogstashFormatter()) with mock.patch('awx.main.utils.handlers._encode_payload_for_socket', return_value="des") as encode_mock,\ mock.patch.object(handler, 'socket') as socket_mock: @@ -373,9 +337,7 @@ def test_udp_handler_send(dummy_log_record): def test_tcp_handler_send(fake_socket, dummy_log_record): - handler = TCPHandler(host='127.0.0.1', port=4399, tcp_timeout=5, - enabled_flag=True, message_type='splunk', lvl='INFO', - enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking']) + handler = TCPHandler(host='127.0.0.1', port=4399, tcp_timeout=5) handler.setFormatter(LogstashFormatter()) with mock.patch('socket.socket', return_value=fake_socket) as sok_init_mock,\ mock.patch('select.select', return_value=([], [fake_socket], [])): @@ -388,9 +350,7 @@ def test_tcp_handler_send(fake_socket, dummy_log_record): def test_tcp_handler_return_if_socket_unavailable(fake_socket, dummy_log_record): - handler = TCPHandler(host='127.0.0.1', port=4399, tcp_timeout=5, - enabled_flag=True, message_type='splunk', lvl='INFO', - enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking']) + handler = TCPHandler(host='127.0.0.1', port=4399, tcp_timeout=5) handler.setFormatter(LogstashFormatter()) with mock.patch('socket.socket', return_value=fake_socket) as sok_init_mock,\ mock.patch('select.select', return_value=([], [], [])): @@ -403,9 +363,7 @@ def test_tcp_handler_return_if_socket_unavailable(fake_socket, dummy_log_record) def test_tcp_handler_log_exception(fake_socket, dummy_log_record): - handler = TCPHandler(host='127.0.0.1', port=4399, tcp_timeout=5, - enabled_flag=True, message_type='splunk', lvl='INFO', - enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking']) + handler = TCPHandler(host='127.0.0.1', port=4399, tcp_timeout=5) handler.setFormatter(LogstashFormatter()) with mock.patch('socket.socket', return_value=fake_socket) as sok_init_mock,\ mock.patch('select.select', return_value=([], [], [])),\ diff --git a/awx/main/tests/unit/utils/test_reload.py b/awx/main/tests/unit/utils/test_reload.py index 6d09d6105b..1820f2724a 100644 --- a/awx/main/tests/unit/utils/test_reload.py +++ b/awx/main/tests/unit/utils/test_reload.py @@ -13,31 +13,3 @@ def test_produce_supervisor_command(mocker): ['supervisorctl', 'restart', 'tower-processes:receiver',], stderr=-1, stdin=-1, stdout=-1) - -def test_routing_of_service_restarts_works(mocker): - ''' - This tests that the parent restart method will call the appropriate - service restart methods, depending on which services are given in args - ''' - with mocker.patch.object(reload, '_uwsgi_fifo_command'),\ - mocker.patch.object(reload, '_reset_celery_thread_pool'),\ - mocker.patch.object(reload, '_supervisor_service_command'): - reload.restart_local_services(['uwsgi', 'celery', 'flower', 'daphne']) - reload._uwsgi_fifo_command.assert_called_once_with(uwsgi_command="c") - reload._reset_celery_thread_pool.assert_called_once_with() - reload._supervisor_service_command.assert_called_once_with(['flower', 'daphne'], command="restart") - - - -def test_routing_of_service_restarts_diables(mocker): - ''' - Test that methods are not called if not in the args - ''' - with mocker.patch.object(reload, '_uwsgi_fifo_command'),\ - mocker.patch.object(reload, '_reset_celery_thread_pool'),\ - mocker.patch.object(reload, '_supervisor_service_command'): - reload.restart_local_services(['flower']) - reload._uwsgi_fifo_command.assert_not_called() - reload._reset_celery_thread_pool.assert_not_called() - reload._supervisor_service_command.assert_called_once_with(['flower'], command="restart") - diff --git a/awx/main/utils/common.py b/awx/main/utils/common.py index a024e7d649..c9914cd1c3 100644 --- a/awx/main/utils/common.py +++ b/awx/main/utils/common.py @@ -127,12 +127,16 @@ class IllegalArgumentError(ValueError): pass +def get_memoize_cache(): + from django.core.cache import cache + return cache + + def memoize(ttl=60, cache_key=None, track_function=False): ''' Decorator to wrap a function and cache its result. ''' - from django.core.cache import cache - + cache = get_memoize_cache() def _memoizer(f, *args, **kwargs): if cache_key and track_function: @@ -160,8 +164,7 @@ def memoize(ttl=60, cache_key=None, track_function=False): def memoize_delete(function_name): - from django.core.cache import cache - + cache = get_memoize_cache() return cache.delete(function_name) diff --git a/awx/main/utils/filters.py b/awx/main/utils/filters.py index 81f91a0b0a..9563eb6c34 100644 --- a/awx/main/utils/filters.py +++ b/awx/main/utils/filters.py @@ -8,14 +8,106 @@ from pyparsing import ( CharsNotIn, ParseException, ) +from logging import Filter, _levelNames import six -import django +from django.apps import apps +from django.db import models +from django.conf import settings from awx.main.utils.common import get_search_fields -__all__ = ['SmartFilter'] +__all__ = ['SmartFilter', 'ExternalLoggerEnabled'] + + +class FieldFromSettings(object): + """ + Field interface - defaults to getting value from setting + if otherwise set, provided value will take precedence + over value in settings + """ + + def __init__(self, setting_name): + self.setting_name = setting_name + + def __get__(self, instance, type=None): + if self.setting_name in getattr(instance, 'settings_override', {}): + return instance.settings_override[self.setting_name] + return getattr(settings, self.setting_name, None) + + def __set__(self, instance, value): + if value is None: + if hasattr(instance, 'settings_override'): + instance.settings_override.pop('instance', None) + else: + if not hasattr(instance, 'settings_override'): + instance.settings_override = {} + instance.settings_override[self.setting_name] = value + + +class ExternalLoggerEnabled(Filter): + + # Prevents recursive logging loops from swamping the server + LOGGER_BLACKLIST = ( + # loggers that may be called in process of emitting a log + 'awx.main.utils.handlers', + 'awx.main.utils.formatters', + 'awx.main.utils.filters', + 'awx.main.utils.encryption', + 'awx.main.utils.log', + # loggers that may be called getting logging settings + 'awx.conf' + ) + + lvl = FieldFromSettings('LOG_AGGREGATOR_LEVEL') + enabled_loggers = FieldFromSettings('LOG_AGGREGATOR_LOGGERS') + enabled_flag = FieldFromSettings('LOG_AGGREGATOR_ENABLED') + + def __init__(self, **kwargs): + super(ExternalLoggerEnabled, self).__init__() + for field_name, field_value in kwargs.items(): + if not isinstance(ExternalLoggerEnabled.__dict__.get(field_name, None), FieldFromSettings): + raise Exception('%s is not a valid kwarg' % field_name) + if field_value is None: + continue + setattr(self, field_name, field_value) + + def filter(self, record): + """ + Uses the database settings to determine if the current + external log configuration says that this particular record + should be sent to the external log aggregator + + False - should not be logged + True - should be logged + """ + # Logger exceptions + for logger_name in self.LOGGER_BLACKLIST: + if record.name.startswith(logger_name): + return False + # General enablement + if not self.enabled_flag: + return False + + # Level enablement + if record.levelno < _levelNames[self.lvl]: + # logging._levelNames -> logging._nameToLevel in python 3 + return False + + # Logger type enablement + loggers = self.enabled_loggers + if not loggers: + return False + if record.name.startswith('awx.analytics'): + base_path, headline_name = record.name.rsplit('.', 1) + return bool(headline_name in loggers) + else: + if '.' in record.name: + base_name, trailing_path = record.name.split('.', 1) + else: + base_name = record.name + return bool(base_name in loggers) def string_to_type(t): @@ -36,7 +128,7 @@ def string_to_type(t): def get_model(name): - return django.apps.apps.get_model('main', name) + return apps.get_model('main', name) class SmartFilter(object): @@ -52,7 +144,7 @@ class SmartFilter(object): search_kwargs = self._expand_search(k, v) if search_kwargs: kwargs.update(search_kwargs) - q = reduce(lambda x, y: x | y, [django.db.models.Q(**{u'%s__contains' % _k:_v}) for _k, _v in kwargs.items()]) + q = reduce(lambda x, y: x | y, [models.Q(**{u'%s__contains' % _k:_v}) for _k, _v in kwargs.items()]) self.result = Host.objects.filter(q) else: kwargs[k] = v diff --git a/awx/main/utils/formatters.py b/awx/main/utils/formatters.py index c867912be4..f83dc3887c 100644 --- a/awx/main/utils/formatters.py +++ b/awx/main/utils/formatters.py @@ -9,6 +9,8 @@ import logging import six +from django.conf import settings + class TimeFormatter(logging.Formatter): ''' @@ -20,15 +22,6 @@ class TimeFormatter(logging.Formatter): class LogstashFormatter(LogstashFormatterVersion1): - def __init__(self, **kwargs): - settings_module = kwargs.pop('settings_module', None) - ret = super(LogstashFormatter, self).__init__(**kwargs) - if settings_module: - self.host_id = getattr(settings_module, 'CLUSTER_HOST_ID', None) - if hasattr(settings_module, 'LOG_AGGREGATOR_TOWER_UUID'): - self.tower_uuid = settings_module.LOG_AGGREGATOR_TOWER_UUID - self.message_type = getattr(settings_module, 'LOG_AGGREGATOR_TYPE', 'other') - return ret def reformat_data_for_log(self, raw_data, kind=None): ''' @@ -147,6 +140,15 @@ class LogstashFormatter(LogstashFormatterVersion1): if record.name.startswith('awx.analytics'): log_kind = record.name[len('awx.analytics.'):] fields = self.reformat_data_for_log(fields, kind=log_kind) + # General AWX metadata + for log_name, setting_name in [ + ('type', 'LOG_AGGREGATOR_TYPE'), + ('cluster_host_id', 'CLUSTER_HOST_ID'), + ('tower_uuid', 'LOG_AGGREGATOR_TOWER_UUID')]: + if hasattr(settings, setting_name): + fields[log_name] = getattr(settings, setting_name, None) + elif log_name == 'type': + fields[log_name] = 'other' return fields def format(self, record): @@ -158,18 +160,12 @@ class LogstashFormatter(LogstashFormatterVersion1): '@timestamp': self.format_timestamp(record.created), 'message': record.getMessage(), 'host': self.host, - 'type': self.message_type, # Extra Fields 'level': record.levelname, 'logger_name': record.name, } - if getattr(self, 'tower_uuid', None): - message['tower_uuid'] = self.tower_uuid - if getattr(self, 'host_id', None): - message['cluster_host_id'] = self.host_id - # Add extra fields message.update(self.get_extra_fields(record)) diff --git a/awx/main/utils/ha.py b/awx/main/utils/ha.py index 93a7f8dd24..49421ad4cb 100644 --- a/awx/main/utils/ha.py +++ b/awx/main/utils/ha.py @@ -10,6 +10,10 @@ from django.conf import settings from awx.main.models import Instance +def construct_bcast_queue_name(common_name): + return common_name.encode('utf8') + '_' + settings.CLUSTER_HOST_ID + + def _add_remove_celery_worker_queues(app, controlled_instances, worker_queues, worker_name): removed_queues = [] added_queues = [] @@ -19,17 +23,15 @@ def _add_remove_celery_worker_queues(app, controlled_instances, worker_queues, w ig_names.update(instance.rampart_groups.values_list('name', flat=True)) worker_queue_names = set([q['name'] for q in worker_queues]) + bcast_queue_names = set([construct_bcast_queue_name(n) for n in settings.AWX_CELERY_BCAST_QUEUES_STATIC]) all_queue_names = ig_names | hostnames | set(settings.AWX_CELERY_QUEUES_STATIC) + desired_queues = bcast_queue_names | (all_queue_names if instance.enabled else set()) - # Remove queues that aren't in the instance group - for queue in worker_queues: - if queue['name'] in settings.AWX_CELERY_QUEUES_STATIC or \ - queue['alias'] in settings.AWX_CELERY_BCAST_QUEUES_STATIC: - continue - - if queue['name'] not in all_queue_names or not instance.enabled: - app.control.cancel_consumer(queue['name'].encode("utf8"), reply=True, destination=[worker_name]) - removed_queues.append(queue['name'].encode("utf8")) + # Remove queues + for queue_name in worker_queue_names: + if queue_name not in desired_queues: + app.control.cancel_consumer(queue_name.encode("utf8"), reply=True, destination=[worker_name]) + removed_queues.append(queue_name.encode("utf8")) # Add queues for instance and instance groups for queue_name in all_queue_names: @@ -37,27 +39,35 @@ def _add_remove_celery_worker_queues(app, controlled_instances, worker_queues, w app.control.add_consumer(queue_name.encode("utf8"), reply=True, destination=[worker_name]) added_queues.append(queue_name.encode("utf8")) + # Add stable-named broadcast queues + for queue_name in settings.AWX_CELERY_BCAST_QUEUES_STATIC: + bcast_queue_name = construct_bcast_queue_name(queue_name) + if bcast_queue_name not in worker_queue_names: + app.control.add_consumer(bcast_queue_name, + exchange=queue_name.encode("utf8"), + exchange_type='fanout', + routing_key=queue_name.encode("utf8"), + reply=True) + added_queues.append(bcast_queue_name) + return (added_queues, removed_queues) -def update_celery_worker_routes(instance, conf): - tasks = [ - 'awx.main.tasks.cluster_node_heartbeat', - 'awx.main.tasks.purge_old_stdout_files', - ] - routes_updated = {} - # Instance is, effectively, a controller node - if instance.is_controller(): - tasks.append('awx.main.tasks.awx_isolated_heartbeat') - else: - if 'awx.main.tasks.awx_isolated_heartbeat' in conf.CELERY_ROUTES: - del conf.CELERY_ROUTES['awx.main.tasks.awx_isolated_heartbeat'] +class AWXCeleryRouter(object): + def route_for_task(self, task, args=None, kwargs=None): + (changed, instance) = Instance.objects.get_or_register() + tasks = [ + 'awx.main.tasks.cluster_node_heartbeat', + 'awx.main.tasks.purge_old_stdout_files', + ] + isolated_tasks = [ + 'awx.main.tasks.awx_isolated_heartbeat', + ] + if task in tasks: + return {'queue': instance.hostname.encode("utf8"), 'routing_key': instance.hostname.encode("utf8")} - for t in tasks: - conf.CELERY_ROUTES[t] = {'queue': instance.hostname.encode("utf8"), 'routing_key': instance.hostname.encode("utf8")} - routes_updated[t] = conf.CELERY_ROUTES[t] - - return routes_updated + if instance.is_controller() and task in isolated_tasks: + return {'queue': instance.hostname.encode("utf8"), 'routing_key': instance.hostname.encode("utf8")} def register_celery_worker_queues(app, celery_worker_name): diff --git a/awx/main/utils/handlers.py b/awx/main/utils/handlers.py index 8ed1127292..214c40ff11 100644 --- a/awx/main/utils/handlers.py +++ b/awx/main/utils/handlers.py @@ -13,40 +13,35 @@ import six from concurrent.futures import ThreadPoolExecutor from requests.exceptions import RequestException -# loggly -import traceback - +# Django from django.conf import settings + +# requests futures, a dependency used by these handlers from requests_futures.sessions import FuturesSession # AWX from awx.main.utils.formatters import LogstashFormatter -__all__ = ['HTTPSNullHandler', 'BaseHTTPSHandler', 'TCPHandler', 'UDPHandler', - 'configure_external_logger'] +__all__ = ['BaseHTTPSHandler', 'TCPHandler', 'UDPHandler', + 'AWXProxyHandler'] logger = logging.getLogger('awx.main.utils.handlers') -# AWX external logging handler, generally designed to be used -# with the accompanying LogstashHandler, derives from python-logstash library -# Non-blocking request accomplished by FuturesSession, similar -# to the loggly-python-handler library (not used) # Translation of parameter names to names in Django settings +# logging settings category, only those related to handler / log emission PARAM_NAMES = { 'host': 'LOG_AGGREGATOR_HOST', 'port': 'LOG_AGGREGATOR_PORT', 'message_type': 'LOG_AGGREGATOR_TYPE', 'username': 'LOG_AGGREGATOR_USERNAME', 'password': 'LOG_AGGREGATOR_PASSWORD', - 'enabled_loggers': 'LOG_AGGREGATOR_LOGGERS', 'indv_facts': 'LOG_AGGREGATOR_INDIVIDUAL_FACTS', - 'enabled_flag': 'LOG_AGGREGATOR_ENABLED', 'tcp_timeout': 'LOG_AGGREGATOR_TCP_TIMEOUT', 'verify_cert': 'LOG_AGGREGATOR_VERIFY_CERT', - 'lvl': 'LOG_AGGREGATOR_LEVEL', + 'protocol': 'LOG_AGGREGATOR_PROTOCOL' } @@ -58,13 +53,6 @@ class LoggingConnectivityException(Exception): pass -class HTTPSNullHandler(logging.NullHandler): - "Placeholder null handler to allow loading without database access" - - def __init__(self, *args, **kwargs): - return super(HTTPSNullHandler, self).__init__() - - class VerboseThreadPoolExecutor(ThreadPoolExecutor): last_log_emit = 0 @@ -91,32 +79,25 @@ class VerboseThreadPoolExecutor(ThreadPoolExecutor): **kwargs) -LEVEL_MAPPING = { - 'DEBUG': logging.DEBUG, - 'INFO': logging.INFO, - 'WARNING': logging.WARNING, - 'ERROR': logging.ERROR, - 'CRITICAL': logging.CRITICAL, -} +class SocketResult: + ''' + A class to be the return type of methods that send data over a socket + allows object to be used in the same way as a request futures object + ''' + def __init__(self, ok, reason=None): + self.ok = ok + self.reason = reason + + def result(self): + return self class BaseHandler(logging.Handler): - def __init__(self, **kwargs): + def __init__(self, host=None, port=None, indv_facts=None, **kwargs): super(BaseHandler, self).__init__() - for fd in PARAM_NAMES: - setattr(self, fd, kwargs.get(fd, None)) - - @classmethod - def from_django_settings(cls, settings, *args, **kwargs): - for param, django_setting_name in PARAM_NAMES.items(): - kwargs[param] = getattr(settings, django_setting_name, None) - return cls(*args, **kwargs) - - def get_full_message(self, record): - if record.exc_info: - return '\n'.join(traceback.format_exception(*record.exc_info)) - else: - return record.getMessage() + self.host = host + self.port = port + self.indv_facts = indv_facts def _send(self, payload): """Actually send message to log aggregator. @@ -128,26 +109,11 @@ class BaseHandler(logging.Handler): return [self._send(json.loads(self.format(record)))] return [self._send(self.format(record))] - def _skip_log(self, logger_name): - if self.host == '' or (not self.enabled_flag): - return True - # Don't send handler-related records. - if logger_name == logger.name: - return True - # AWX log emission is only turned off by enablement setting - if not logger_name.startswith('awx.analytics'): - return False - return self.enabled_loggers is None or logger_name[len('awx.analytics.'):] not in self.enabled_loggers - def emit(self, record): """ Emit a log record. Returns a list of zero or more implementation-specific objects for tests. """ - if not record.name.startswith('awx.analytics') and record.levelno < LEVEL_MAPPING[self.lvl]: - return [] - if self._skip_log(record.name): - return [] try: return self._format_and_send_record(record) except (KeyboardInterrupt, SystemExit): @@ -181,6 +147,11 @@ class BaseHandler(logging.Handler): class BaseHTTPSHandler(BaseHandler): + ''' + Originally derived from python-logstash library + Non-blocking request accomplished by FuturesSession, similar + to the loggly-python-handler library + ''' def _add_auth_information(self): if self.message_type == 'logstash': if not self.username: @@ -196,39 +167,20 @@ class BaseHTTPSHandler(BaseHandler): } self.session.headers.update(headers) - def __init__(self, fqdn=False, **kwargs): + def __init__(self, fqdn=False, message_type=None, username=None, password=None, + tcp_timeout=5, verify_cert=True, **kwargs): self.fqdn = fqdn + self.message_type = message_type + self.username = username + self.password = password + self.tcp_timeout = tcp_timeout + self.verify_cert = verify_cert super(BaseHTTPSHandler, self).__init__(**kwargs) self.session = FuturesSession(executor=VerboseThreadPoolExecutor( max_workers=2 # this is the default used by requests_futures )) self._add_auth_information() - @classmethod - def perform_test(cls, settings): - """ - Tests logging connectivity for the current logging settings. - @raises LoggingConnectivityException - """ - handler = cls.from_django_settings(settings) - handler.enabled_flag = True - handler.setFormatter(LogstashFormatter(settings_module=settings)) - logger = logging.getLogger(__file__) - fn, lno, func = logger.findCaller() - record = logger.makeRecord('awx', 10, fn, lno, - 'AWX Connection Test', tuple(), - None, func) - futures = handler.emit(record) - for future in futures: - try: - resp = future.result() - if not resp.ok: - raise LoggingConnectivityException( - ': '.join([str(resp.status_code), resp.reason or '']) - ) - except RequestException as e: - raise LoggingConnectivityException(str(e)) - def _get_post_kwargs(self, payload_input): if self.message_type == 'splunk': # Splunk needs data nested under key "event" @@ -265,6 +217,10 @@ def _encode_payload_for_socket(payload): class TCPHandler(BaseHandler): + def __init__(self, tcp_timeout=5, **kwargs): + self.tcp_timeout = tcp_timeout + super(TCPHandler, self).__init__(**kwargs) + def _send(self, payload): payload = _encode_payload_for_socket(payload) sok = socket.socket(socket.AF_INET, socket.SOCK_STREAM) @@ -273,39 +229,32 @@ class TCPHandler(BaseHandler): sok.setblocking(0) _, ready_to_send, _ = select.select([], [sok], [], float(self.tcp_timeout)) if len(ready_to_send) == 0: - logger.warning("Socket currently busy, failed to send message") - sok.close() - return - sok.send(payload) + ret = SocketResult(False, "Socket currently busy, failed to send message") + logger.warning(ret.reason) + else: + sok.send(payload) + ret = SocketResult(True) # success! except Exception as e: - logger.exception("Error sending message from %s: %s" % - (TCPHandler.__name__, e.message)) - sok.close() + ret = SocketResult(False, "Error sending message from %s: %s" % + (TCPHandler.__name__, + ' '.join(six.text_type(arg) for arg in e.args))) + logger.exception(ret.reason) + finally: + sok.close() + return ret class UDPHandler(BaseHandler): + message = "Cannot determine if UDP messages are received." + def __init__(self, **kwargs): super(UDPHandler, self).__init__(**kwargs) self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) def _send(self, payload): payload = _encode_payload_for_socket(payload) - return self.socket.sendto(payload, (self._get_host(hostname_only=True), self.port or 0)) - - @classmethod - def perform_test(cls, settings): - """ - Tests logging connectivity for the current logging settings. - """ - handler = cls.from_django_settings(settings) - handler.enabled_flag = True - handler.setFormatter(LogstashFormatter(settings_module=settings)) - logger = logging.getLogger(__file__) - fn, lno, func = logger.findCaller() - record = logger.makeRecord('awx', 10, fn, lno, - 'AWX Connection Test', tuple(), - None, func) - handler.emit(_encode_payload_for_socket(record)) + self.socket.sendto(payload, (self._get_host(hostname_only=True), self.port or 0)) + return SocketResult(True, reason=self.message) HANDLER_MAPPING = { @@ -315,6 +264,88 @@ HANDLER_MAPPING = { } +class AWXProxyHandler(logging.Handler): + ''' + Handler specific to the AWX external logging feature + + Will dynamically create a handler specific to the configured + protocol, and will create a new one automatically on setting change + + Managing parameters: + All parameters will get their value from settings as a default + if the parameter was either provided on init, or set manually, + this value will take precedence. + Parameters match same parameters in the actualized handler classes. + ''' + + def __init__(self, **kwargs): + # TODO: process 'level' kwarg + super(AWXProxyHandler, self).__init__(**kwargs) + self._handler = None + self._old_kwargs = {} + + def get_handler_class(self, protocol): + return HANDLER_MAPPING[protocol] + + def get_handler(self, custom_settings=None, force_create=False): + new_kwargs = {} + use_settings = custom_settings or settings + for field_name, setting_name in PARAM_NAMES.items(): + val = getattr(use_settings, setting_name, None) + if val is None: + continue + new_kwargs[field_name] = val + if new_kwargs == self._old_kwargs and self._handler and (not force_create): + # avoids re-creating session objects, and other such things + return self._handler + self._old_kwargs = new_kwargs.copy() + # TODO: remove any kwargs no applicable to that particular handler + protocol = new_kwargs.pop('protocol', None) + HandlerClass = self.get_handler_class(protocol) + # cleanup old handler and make new one + if self._handler: + self._handler.close() + logger.debug('Creating external log handler due to startup or settings change.') + self._handler = HandlerClass(**new_kwargs) + if self.formatter: + # self.format(record) is called inside of emit method + # so not safe to assume this can be handled within self + self._handler.setFormatter(self.formatter) + return self._handler + + def emit(self, record): + actual_handler = self.get_handler() + return actual_handler.emit(record) + + def perform_test(self, custom_settings): + """ + Tests logging connectivity for given settings module. + @raises LoggingConnectivityException + """ + handler = self.get_handler(custom_settings=custom_settings, force_create=True) + handler.setFormatter(LogstashFormatter()) + logger = logging.getLogger(__file__) + fn, lno, func = logger.findCaller() + record = logger.makeRecord('awx', 10, fn, lno, + 'AWX Connection Test', tuple(), + None, func) + futures = handler.emit(record) + for future in futures: + try: + resp = future.result() + if not resp.ok: + if isinstance(resp, SocketResult): + raise LoggingConnectivityException( + 'Socket error: {}'.format(resp.reason or '') + ) + else: + raise LoggingConnectivityException( + ': '.join([str(resp.status_code), resp.reason or '']) + ) + except RequestException as e: + raise LoggingConnectivityException(str(e)) + + ColorHandler = logging.StreamHandler if settings.COLOR_LOGS is True: @@ -340,41 +371,3 @@ if settings.COLOR_LOGS is True: except ImportError: # logutils is only used for colored logs in the dev environment pass - - -def _add_or_remove_logger(address, instance): - specific_logger = logging.getLogger(address) - for i, handler in enumerate(specific_logger.handlers): - if isinstance(handler, (HTTPSNullHandler, BaseHTTPSHandler)): - specific_logger.handlers[i] = instance or HTTPSNullHandler() - break - else: - if instance is not None: - specific_logger.handlers.append(instance) - - -def configure_external_logger(settings_module, is_startup=True): - is_enabled = settings_module.LOG_AGGREGATOR_ENABLED - if is_startup and (not is_enabled): - # Pass-through if external logging not being used - return - - instance = None - if is_enabled: - handler_class = HANDLER_MAPPING[settings_module.LOG_AGGREGATOR_PROTOCOL] - instance = handler_class.from_django_settings(settings_module) - - # Obtain the Formatter class from settings to maintain customizations - configurator = logging.config.DictConfigurator(settings_module.LOGGING) - formatter_config = settings_module.LOGGING['formatters']['json'].copy() - formatter_config['settings_module'] = settings_module - formatter = configurator.configure_custom(formatter_config) - - instance.setFormatter(formatter) - - awx_logger_instance = instance - if is_enabled and 'awx' not in settings_module.LOG_AGGREGATOR_LOGGERS: - awx_logger_instance = None - - _add_or_remove_logger('awx.analytics', instance) - _add_or_remove_logger('awx', awx_logger_instance) diff --git a/awx/main/utils/reload.py b/awx/main/utils/reload.py index 6da8b0b4f7..8da1fb0096 100644 --- a/awx/main/utils/reload.py +++ b/awx/main/utils/reload.py @@ -8,29 +8,9 @@ import logging # Django from django.conf import settings -# Celery -from celery import Celery - logger = logging.getLogger('awx.main.utils.reload') -def _uwsgi_fifo_command(uwsgi_command): - # http://uwsgi-docs.readthedocs.io/en/latest/MasterFIFO.html#available-commands - logger.warn('Initiating uWSGI chain reload of server') - TRIGGER_COMMAND = uwsgi_command - with open(settings.UWSGI_FIFO_LOCATION, 'w') as awxfifo: - awxfifo.write(TRIGGER_COMMAND) - - -def _reset_celery_thread_pool(): - # Do not use current_app because of this outstanding issue: - # https://github.com/celery/celery/issues/4410 - app = Celery('awx') - app.config_from_object('django.conf:settings') - app.control.broadcast('pool_restart', arguments={'reload': True}, - destination=['celery@{}'.format(settings.CLUSTER_HOST_ID)], reply=False) - - def _supervisor_service_command(service_internal_names, command, communicate=True): ''' Service internal name options: @@ -68,21 +48,6 @@ def _supervisor_service_command(service_internal_names, command, communicate=Tru logger.info('Submitted supervisorctl {} command, not waiting for result'.format(command)) -def restart_local_services(service_internal_names): - logger.warn('Restarting services {} on this node in response to user action'.format(service_internal_names)) - if 'uwsgi' in service_internal_names: - _uwsgi_fifo_command(uwsgi_command='c') - service_internal_names.remove('uwsgi') - restart_celery = False - if 'celery' in service_internal_names: - restart_celery = True - service_internal_names.remove('celery') - _supervisor_service_command(service_internal_names, command='restart') - if restart_celery: - # Celery restarted last because this probably includes current process - _reset_celery_thread_pool() - - def stop_local_services(service_internal_names, communicate=True): logger.warn('Stopping services {} on this node in response to user action'.format(service_internal_names)) _supervisor_service_command(service_internal_names, command='stop', communicate=communicate) diff --git a/awx/network_ui/consumers.py b/awx/network_ui/consumers.py index 36cbb24803..9cf8c72982 100644 --- a/awx/network_ui/consumers.py +++ b/awx/network_ui/consumers.py @@ -1,6 +1,6 @@ # Copyright (c) 2017 Red Hat, Inc -from channels import Group -from channels.sessions import channel_session +import channels +from channels.auth import channel_session_user, channel_session_user_from_http from awx.network_ui.models import Topology, Device, Link, Client, Interface from awx.network_ui.models import TopologyInventory import urlparse @@ -22,6 +22,10 @@ def parse_inventory_id(data): inventory_id = int(inventory_id[0]) except ValueError: inventory_id = None + except IndexError: + inventory_id = None + except TypeError: + inventory_id = None if not inventory_id: inventory_id = None return inventory_id @@ -42,10 +46,10 @@ class NetworkingEvents(object): message_type = data.pop(0) message_value = data.pop(0) if isinstance(message_value, list): - logger.error("Message has no sender") + logger.warning("Message has no sender") return None, None if isinstance(message_value, dict) and client_id != message_value.get('sender'): - logger.error("client_id mismatch expected: %s actual %s", client_id, message_value.get('sender')) + logger.warning("client_id mismatch expected: %s actual %s", client_id, message_value.get('sender')) return None, None return message_type, message_value else: @@ -58,11 +62,19 @@ class NetworkingEvents(object): of name onX where X is the message type. ''' topology_id = message.get('topology') - assert topology_id is not None, "No topology_id" + if topology_id is None: + logger.warning("Unsupported message %s: no topology", message) + return client_id = message.get('client') - assert client_id is not None, "No client_id" + if client_id is None: + logger.warning("Unsupported message %s: no client", message) + return + if 'text' not in message: + logger.warning("Unsupported message %s: no data", message) + return message_type, message_value = self.parse_message_text(message['text'], client_id) if message_type is None: + logger.warning("Unsupported message %s: no message type", message) return handler = self.get_handler(message_type) if handler is not None: @@ -98,9 +110,6 @@ class NetworkingEvents(object): def onDeviceMove(self, device, topology_id, client_id): Device.objects.filter(topology_id=topology_id, cid=device['id']).update(x=device['x'], y=device['y']) - def onDeviceInventoryUpdate(self, device, topology_id, client_id): - Device.objects.filter(topology_id=topology_id, cid=device['id']).update(host_id=device['host_id']) - def onDeviceLabelEdit(self, device, topology_id, client_id): logger.debug("Device label edited %s", device) Device.objects.filter(topology_id=topology_id, cid=device['id']).update(name=device['name']) @@ -132,6 +141,12 @@ class NetworkingEvents(object): device_map = dict(Device.objects .filter(topology_id=topology_id, cid__in=[link['from_device_id'], link['to_device_id']]) .values_list('cid', 'pk')) + if link['from_device_id'] not in device_map: + logger.warning('Device not found') + return + if link['to_device_id'] not in device_map: + logger.warning('Device not found') + return Link.objects.get_or_create(cid=link['id'], name=link['name'], from_device_id=device_map[link['from_device_id']], @@ -150,8 +165,10 @@ class NetworkingEvents(object): .filter(topology_id=topology_id, cid__in=[link['from_device_id'], link['to_device_id']]) .values_list('cid', 'pk')) if link['from_device_id'] not in device_map: + logger.warning('Device not found') return if link['to_device_id'] not in device_map: + logger.warning('Device not found') return Link.objects.filter(cid=link['id'], from_device_id=device_map[link['from_device_id']], @@ -189,8 +206,15 @@ class NetworkingEvents(object): networking_events_dispatcher = NetworkingEvents() -@channel_session +@channel_session_user_from_http def ws_connect(message): + if not message.user.is_authenticated(): + logger.error("Request user is not authenticated to use websocket.") + message.reply_channel.send({"close": True}) + return + else: + message.reply_channel.send({"accept": True}) + data = urlparse.parse_qs(message.content['query_string']) inventory_id = parse_inventory_id(data) topology_ids = list(TopologyInventory.objects.filter(inventory_id=inventory_id).values_list('pk', flat=True)) @@ -205,11 +229,11 @@ def ws_connect(message): TopologyInventory(inventory_id=inventory_id, topology_id=topology.pk).save() topology_id = topology.pk message.channel_session['topology_id'] = topology_id - Group("topology-%s" % topology_id).add(message.reply_channel) + channels.Group("topology-%s" % topology_id).add(message.reply_channel) client = Client() client.save() message.channel_session['client_id'] = client.pk - Group("client-%s" % client.pk).add(message.reply_channel) + channels.Group("client-%s" % client.pk).add(message.reply_channel) message.reply_channel.send({"text": json.dumps(["id", client.pk])}) message.reply_channel.send({"text": json.dumps(["topology_id", topology_id])}) topology_data = transform_dict(dict(id='topology_id', @@ -268,18 +292,18 @@ def send_snapshot(channel, topology_id): channel.send({"text": json.dumps(["Snapshot", snapshot])}) -@channel_session +@channel_session_user def ws_message(message): # Send to all clients editing the topology - Group("topology-%s" % message.channel_session['topology_id']).send({"text": message['text']}) + channels.Group("topology-%s" % message.channel_session['topology_id']).send({"text": message['text']}) # Send to networking_events handler networking_events_dispatcher.handle({"text": message['text'], "topology": message.channel_session['topology_id'], "client": message.channel_session['client_id']}) -@channel_session +@channel_session_user def ws_disconnect(message): if 'topology_id' in message.channel_session: - Group("topology-%s" % message.channel_session['topology_id']).discard(message.reply_channel) + channels.Group("topology-%s" % message.channel_session['topology_id']).discard(message.reply_channel) diff --git a/awx/network_ui/routing.py b/awx/network_ui/routing.py index 66553e23d2..0a9d07635d 100644 --- a/awx/network_ui/routing.py +++ b/awx/network_ui/routing.py @@ -3,7 +3,7 @@ from channels.routing import route from awx.network_ui.consumers import ws_connect, ws_message, ws_disconnect channel_routing = [ - route("websocket.connect", ws_connect, path=r"^/network_ui/topology"), - route("websocket.receive", ws_message, path=r"^/network_ui/topology"), - route("websocket.disconnect", ws_disconnect, path=r"^/network_ui/topology"), + route("websocket.connect", ws_connect, path=r"^/network_ui/topology/"), + route("websocket.receive", ws_message, path=r"^/network_ui/topology/"), + route("websocket.disconnect", ws_disconnect, path=r"^/network_ui/topology/"), ] diff --git a/awx/network_ui/tests/__init__.py b/awx/network_ui/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/network_ui/tests/conftest.py b/awx/network_ui/tests/conftest.py new file mode 100644 index 0000000000..ca4f2f1cda --- /dev/null +++ b/awx/network_ui/tests/conftest.py @@ -0,0 +1,9 @@ +import pytest +from mock import PropertyMock + + +@pytest.fixture(autouse=True) +def _disable_database_settings(mocker): + m = mocker.patch('awx.conf.settings.SettingsWrapper.all_supported_settings', new_callable=PropertyMock) + m.return_value = [] + diff --git a/awx/network_ui/tests/unit/__init__.py b/awx/network_ui/tests/unit/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/awx/network_ui/tests/unit/test_consumers.py b/awx/network_ui/tests/unit/test_consumers.py new file mode 100644 index 0000000000..de5c79e105 --- /dev/null +++ b/awx/network_ui/tests/unit/test_consumers.py @@ -0,0 +1,240 @@ + +import mock +import logging +import json +import imp +from mock import patch +patch('channels.auth.channel_session_user', lambda x: x).start() +patch('channels.auth.channel_session_user_from_http', lambda x: x).start() + +from awx.network_ui.consumers import parse_inventory_id, networking_events_dispatcher, send_snapshot # noqa +from awx.network_ui.models import Topology, Device, Link, Interface, TopologyInventory, Client # noqa +import awx # noqa +import awx.network_ui # noqa +import awx.network_ui.consumers # noqa +imp.reload(awx.network_ui.consumers) + + +def test_parse_inventory_id(): + assert parse_inventory_id({}) is None + assert parse_inventory_id({'inventory_id': ['1']}) == 1 + assert parse_inventory_id({'inventory_id': ['0']}) is None + assert parse_inventory_id({'inventory_id': ['X']}) is None + assert parse_inventory_id({'inventory_id': []}) is None + assert parse_inventory_id({'inventory_id': 'x'}) is None + assert parse_inventory_id({'inventory_id': '12345'}) == 1 + assert parse_inventory_id({'inventory_id': 1}) is None + + +def test_network_events_handle_message_incomplete_message1(): + logger = logging.getLogger('awx.network_ui.consumers') + with mock.patch.object(logger, 'warning') as log_mock: + networking_events_dispatcher.handle({}) + log_mock.assert_called_once_with( + 'Unsupported message %s: no topology', {}) + + +def test_network_events_handle_message_incomplete_message2(): + logger = logging.getLogger('awx.network_ui.consumers') + with mock.patch.object(logger, 'warning') as log_mock: + networking_events_dispatcher.handle({'topology': [0]}) + log_mock.assert_called_once_with( + 'Unsupported message %s: no client', {'topology': [0]}) + + +def test_network_events_handle_message_incomplete_message3(): + logger = logging.getLogger('awx.network_ui.consumers') + with mock.patch.object(logger, 'warning') as log_mock: + networking_events_dispatcher.handle({'topology': [1]}) + log_mock.assert_called_once_with( + 'Unsupported message %s: no client', {'topology': [1]}) + + +def test_network_events_handle_message_incomplete_message4(): + logger = logging.getLogger('awx.network_ui.consumers') + with mock.patch.object(logger, 'warning') as log_mock: + networking_events_dispatcher.handle({'topology': 1, 'client': 1}) + log_mock.assert_called_once_with('Unsupported message %s: no data', { + 'client': 1, 'topology': 1}) + + +def test_network_events_handle_message_incomplete_message5(): + logger = logging.getLogger('awx.network_ui.consumers') + with mock.patch.object(logger, 'warning') as log_mock: + message = ['DeviceCreate'] + networking_events_dispatcher.handle( + {'topology': 1, 'client': 1, 'text': json.dumps(message)}) + log_mock.assert_called_once_with('Unsupported message %s: no message type', { + 'text': '["DeviceCreate"]', 'client': 1, 'topology': 1}) + + +def test_network_events_handle_message_incomplete_message6(): + logger = logging.getLogger('awx.network_ui.consumers') + with mock.patch.object(logger, 'warning') as log_mock: + message = ['DeviceCreate', []] + networking_events_dispatcher.handle( + {'topology': 1, 'client': 1, 'text': json.dumps(message)}) + log_mock.assert_has_calls([ + mock.call('Message has no sender'), + mock.call('Unsupported message %s: no message type', {'text': '["DeviceCreate", []]', 'client': 1, 'topology': 1})]) + + +def test_network_events_handle_message_incomplete_message7(): + logger = logging.getLogger('awx.network_ui.consumers') + with mock.patch.object(logger, 'warning') as log_mock: + message = ['DeviceCreate', {}] + networking_events_dispatcher.handle( + {'topology': 1, 'client': 1, 'text': json.dumps(message)}) + log_mock.assert_has_calls([ + mock.call('client_id mismatch expected: %s actual %s', 1, None), + mock.call('Unsupported message %s: no message type', {'text': '["DeviceCreate", {}]', 'client': 1, 'topology': 1})]) + + +def test_network_events_handle_message_incomplete_message8(): + logger = logging.getLogger('awx.network_ui.consumers') + with mock.patch.object(logger, 'warning') as log_mock: + message = ['Unsupported', {'sender': 1}] + networking_events_dispatcher.handle( + {'topology': 1, 'client': 1, 'text': json.dumps(message)}) + log_mock.assert_called_once_with( + 'Unsupported message %s: no handler', u'Unsupported') + + +def test_send_snapshot_empty(): + channel = mock.MagicMock() + logger = logging.getLogger('awx.network_ui.consumers') + with mock.patch.object(logger, 'warning') as log_mock,\ + mock.patch.object(Device, 'objects'),\ + mock.patch.object(Link, 'objects'),\ + mock.patch.object(Interface, 'objects'),\ + mock.patch.object(Topology, 'objects'): + send_snapshot(channel, 1) + log_mock.assert_not_called() + channel.send.assert_called_once_with( + {'text': '["Snapshot", {"links": [], "devices": [], "sender": 0}]'}) + + +def test_send_snapshot_single(): + channel = mock.MagicMock() + logger = logging.getLogger('awx.network_ui.consumers') + with mock.patch.object(logger, 'warning') as log_mock,\ + mock.patch.object(Device, 'objects') as device_objects_mock,\ + mock.patch.object(Link, 'objects'),\ + mock.patch.object(Interface, 'objects') as interface_objects_mock: + + interface_objects_mock.filter.return_value.values.return_value = [ + dict(cid=1, device_id=1, id=1, name="eth0")] + device_objects_mock.filter.return_value.values.return_value = [ + dict(cid=1, id=1, device_type="host", name="host1", x=0, y=0, + interface_id_seq=1, host_id=1)] + send_snapshot(channel, 1) + device_objects_mock.filter.assert_called_once_with(topology_id=1) + device_objects_mock.filter.return_value.values.assert_called_once_with() + interface_objects_mock.filter.assert_called_once_with( + device__topology_id=1) + interface_objects_mock.filter.return_value.values.assert_called_once_with() + log_mock.assert_not_called() + channel.send.assert_called_once_with( + {'text': '''["Snapshot", {"links": [], "devices": [{"interface_id_seq": 1, \ +"name": "host1", "interfaces": [{"id": 1, "device_id": 1, "name": "eth0", "interface_id": 1}], \ +"device_type": "host", "host_id": 1, "y": 0, "x": 0, "id": 1, "device_id": 1}], "sender": 0}]'''}) + + +def test_ws_disconnect(): + message = mock.MagicMock() + message.channel_session = dict(topology_id=1) + message.reply_channel = 'foo' + with mock.patch('channels.Group') as group_mock: + awx.network_ui.consumers.ws_disconnect(message) + group_mock.assert_called_once_with('topology-1') + group_mock.return_value.discard.assert_called_once_with('foo') + + +def test_ws_disconnect_no_topology(): + message = mock.MagicMock() + with mock.patch('channels.Group') as group_mock: + awx.network_ui.consumers.ws_disconnect(message) + group_mock.assert_not_called() + + +def test_ws_message(): + message = mock.MagicMock() + message.channel_session = dict(topology_id=1, client_id=1) + message.__getitem__.return_value = json.dumps([]) + print (message['text']) + with mock.patch('channels.Group') as group_mock: + awx.network_ui.consumers.ws_message(message) + group_mock.assert_called_once_with('topology-1') + group_mock.return_value.send.assert_called_once_with({'text': '[]'}) + + +def test_ws_connect_unauthenticated(): + message = mock.MagicMock() + message.user.is_authenticated.return_value = False + logger = logging.getLogger('awx.network_ui.consumers') + with mock.patch.object(logger, 'error') as log_mock: + awx.network_ui.consumers.ws_connect(message) + log_mock.assert_called_once_with('Request user is not authenticated to use websocket.') + + +def test_ws_connect_new_topology(): + message = mock.MagicMock() + logger = logging.getLogger('awx.network_ui.consumers') + with mock.patch('awx.network_ui.consumers.Client') as client_mock,\ + mock.patch('awx.network_ui.consumers.Topology') as topology_mock,\ + mock.patch('channels.Group'),\ + mock.patch('awx.network_ui.consumers.send_snapshot') as send_snapshot_mock,\ + mock.patch.object(logger, 'warning'),\ + mock.patch.object(TopologyInventory, 'objects'),\ + mock.patch.object(TopologyInventory, 'save'),\ + mock.patch.object(Topology, 'save'),\ + mock.patch.object(Topology, 'objects'),\ + mock.patch.object(Device, 'objects'),\ + mock.patch.object(Link, 'objects'),\ + mock.patch.object(Interface, 'objects'): + client_mock.return_value.pk = 777 + topology_mock.return_value = Topology( + name="topology", scale=1.0, panX=0, panY=0, pk=999) + awx.network_ui.consumers.ws_connect(message) + message.reply_channel.send.assert_has_calls([ + mock.call({'text': '["id", 777]'}), + mock.call({'text': '["topology_id", 999]'}), + mock.call( + {'text': '["Topology", {"scale": 1.0, "name": "topology", "device_id_seq": 0, "panY": 0, "panX": 0, "topology_id": 999, "link_id_seq": 0}]'}), + ]) + send_snapshot_mock.assert_called_once_with(message.reply_channel, 999) + + +def test_ws_connect_existing_topology(): + message = mock.MagicMock() + logger = logging.getLogger('awx.network_ui.consumers') + with mock.patch('awx.network_ui.consumers.Client') as client_mock,\ + mock.patch('awx.network_ui.consumers.send_snapshot') as send_snapshot_mock,\ + mock.patch('channels.Group'),\ + mock.patch.object(logger, 'warning'),\ + mock.patch.object(TopologyInventory, 'objects') as topology_inventory_objects_mock,\ + mock.patch.object(TopologyInventory, 'save'),\ + mock.patch.object(Topology, 'save'),\ + mock.patch.object(Topology, 'objects') as topology_objects_mock,\ + mock.patch.object(Device, 'objects'),\ + mock.patch.object(Link, 'objects'),\ + mock.patch.object(Interface, 'objects'): + topology_inventory_objects_mock.filter.return_value.values_list.return_value = [ + 1] + client_mock.return_value.pk = 888 + topology_objects_mock.get.return_value = Topology(pk=1001, + id=1, + name="topo", + panX=0, + panY=0, + scale=1.0, + link_id_seq=1, + device_id_seq=1) + awx.network_ui.consumers.ws_connect(message) + message.reply_channel.send.assert_has_calls([ + mock.call({'text': '["id", 888]'}), + mock.call({'text': '["topology_id", 1001]'}), + mock.call( + {'text': '["Topology", {"scale": 1.0, "name": "topo", "device_id_seq": 1, "panY": 0, "panX": 0, "topology_id": 1001, "link_id_seq": 1}]'}), + ]) + send_snapshot_mock.assert_called_once_with(message.reply_channel, 1001) diff --git a/awx/network_ui/tests/unit/test_models.py b/awx/network_ui/tests/unit/test_models.py new file mode 100644 index 0000000000..e392662a99 --- /dev/null +++ b/awx/network_ui/tests/unit/test_models.py @@ -0,0 +1,15 @@ + + +from awx.network_ui.models import Device, Topology, Interface + + +def test_device(): + assert str(Device(name="foo")) == "foo" + + +def test_topology(): + assert str(Topology(name="foo")) == "foo" + + +def test_interface(): + assert str(Interface(name="foo")) == "foo" diff --git a/awx/network_ui/tests/unit/test_network_events.py b/awx/network_ui/tests/unit/test_network_events.py new file mode 100644 index 0000000000..d4ce60c7ae --- /dev/null +++ b/awx/network_ui/tests/unit/test_network_events.py @@ -0,0 +1,451 @@ +import mock +import json +import logging + +from awx.network_ui.consumers import networking_events_dispatcher +from awx.network_ui.models import Topology, Device, Link, Interface + + +def message(message): + def wrapper(fn): + fn.tests_message = message + return fn + return wrapper + + +@message('DeviceMove') +def test_network_events_handle_message_DeviceMove(): + logger = logging.getLogger('awx.network_ui.consumers') + message_data = ['DeviceMove', dict( + msg_type='DeviceMove', + sender=1, + id=1, + x=100, + y=100, + previous_x=0, + previous_y=0 + )] + message = {'topology': 1, 'client': 1, 'text': json.dumps(message_data)} + with mock.patch.object(logger, 'warning') as log_mock,\ + mock.patch.object(Device, 'objects') as device_objects_mock: + networking_events_dispatcher.handle(message) + device_objects_mock.filter.assert_called_once_with( + cid=1, topology_id=1) + device_objects_mock.filter.return_value.update.assert_called_once_with( + x=100, y=100) + log_mock.assert_not_called() + + +@message('DeviceCreate') +def test_network_events_handle_message_DeviceCreate(): + logger = logging.getLogger('awx.network_ui.consumers') + message_data = ['DeviceCreate', dict(msg_type='DeviceCreate', + sender=1, + id=1, + x=0, + y=0, + name="test_created", + type='host', + host_id=None)] + message = {'topology': 1, 'client': 1, 'text': json.dumps(message_data)} + + with mock.patch.object(logger, 'warning') as log_mock,\ + mock.patch.object(Topology.objects, 'filter') as topology_objects_mock,\ + mock.patch.object(Device.objects, 'get_or_create') as device_objects_mock: + device_mock = mock.MagicMock() + filter_mock = mock.MagicMock() + device_objects_mock.return_value = [device_mock, True] + topology_objects_mock.return_value = filter_mock + networking_events_dispatcher.handle(message) + device_objects_mock.assert_called_once_with( + cid=1, + defaults={'name': u'test_created', 'cid': 1, 'device_type': u'host', + 'x': 0, 'y': 0, 'host_id': None}, + topology_id=1) + device_mock.save.assert_called_once_with() + topology_objects_mock.assert_called_once_with( + device_id_seq__lt=1, pk=1) + filter_mock.update.assert_called_once_with(device_id_seq=1) + log_mock.assert_not_called() + + +@message('DeviceLabelEdit') +def test_network_events_handle_message_DeviceLabelEdit(): + logger = logging.getLogger('awx.network_ui.consumers') + message_data = ['DeviceLabelEdit', dict( + msg_type='DeviceLabelEdit', + sender=1, + id=1, + name='test_changed', + previous_name='test_created' + )] + message = {'topology': 1, 'client': 1, 'text': json.dumps(message_data)} + with mock.patch.object(logger, 'warning') as log_mock,\ + mock.patch.object(Device.objects, 'filter') as device_objects_filter_mock: + networking_events_dispatcher.handle(message) + device_objects_filter_mock.assert_called_once_with( + cid=1, topology_id=1) + log_mock.assert_not_called() + + +@message('DeviceSelected') +def test_network_events_handle_message_DeviceSelected(): + logger = logging.getLogger('awx.network_ui.consumers') + message_data = ['DeviceSelected', dict( + msg_type='DeviceSelected', + sender=1, + id=1 + )] + message = {'topology': 1, 'client': 1, 'text': json.dumps(message_data)} + with mock.patch.object(logger, 'warning') as log_mock: + networking_events_dispatcher.handle(message) + log_mock.assert_not_called() + + +@message('DeviceUnSelected') +def test_network_events_handle_message_DeviceUnSelected(): + logger = logging.getLogger('awx.network_ui.consumers') + message_data = ['DeviceUnSelected', dict( + msg_type='DeviceUnSelected', + sender=1, + id=1 + )] + message = {'topology': 1, 'client': 1, 'text': json.dumps(message_data)} + with mock.patch.object(logger, 'warning') as log_mock: + networking_events_dispatcher.handle(message) + log_mock.assert_not_called() + + +@message('DeviceDestroy') +def test_network_events_handle_message_DeviceDestory(): + logger = logging.getLogger('awx.network_ui.consumers') + message_data = ['DeviceDestroy', dict( + msg_type='DeviceDestroy', + sender=1, + id=1, + previous_x=0, + previous_y=0, + previous_name="", + previous_type="host", + previous_host_id="1")] + message = {'topology': 1, 'client': 1, 'text': json.dumps(message_data)} + with mock.patch.object(logger, 'warning') as log_mock,\ + mock.patch.object(Device, 'objects') as device_objects_mock: + networking_events_dispatcher.handle(message) + device_objects_mock.filter.assert_called_once_with( + cid=1, topology_id=1) + device_objects_mock.filter.return_value.delete.assert_called_once_with() + log_mock.assert_not_called() + + +@message('InterfaceCreate') +def test_network_events_handle_message_InterfaceCreate(): + logger = logging.getLogger('awx.network_ui.consumers') + message_data = ['InterfaceCreate', dict( + msg_type='InterfaceCreate', + sender=1, + device_id=1, + id=1, + name='eth0' + )] + message = {'topology': 1, 'client': 1, 'text': json.dumps(message_data)} + with mock.patch.object(logger, 'warning') as log_mock,\ + mock.patch.object(Device, 'objects') as device_objects_mock,\ + mock.patch.object(Interface, 'objects') as interface_objects_mock: + device_objects_mock.get.return_value.pk = 99 + networking_events_dispatcher.handle(message) + device_objects_mock.get.assert_called_once_with(cid=1, topology_id=1) + device_objects_mock.filter.assert_called_once_with( + cid=1, interface_id_seq__lt=1, topology_id=1) + interface_objects_mock.get_or_create.assert_called_once_with( + cid=1, defaults={'name': u'eth0'}, device_id=99) + log_mock.assert_not_called() + + +@message('InterfaceLabelEdit') +def test_network_events_handle_message_InterfaceLabelEdit(): + logger = logging.getLogger('awx.network_ui.consumers') + message_data = ['InterfaceLabelEdit', dict( + msg_type='InterfaceLabelEdit', + sender=1, + id=1, + device_id=1, + name='new name', + previous_name='old name' + )] + message = {'topology': 1, 'client': 1, 'text': json.dumps(message_data)} + with mock.patch.object(logger, 'warning') as log_mock,\ + mock.patch.object(Interface, 'objects') as interface_objects_mock: + networking_events_dispatcher.handle(message) + interface_objects_mock.filter.assert_called_once_with( + cid=1, device__cid=1, device__topology_id=1) + interface_objects_mock.filter.return_value.update.assert_called_once_with( + name=u'new name') + log_mock.assert_not_called() + + +@message('LinkLabelEdit') +def test_network_events_handle_message_LinkLabelEdit(): + logger = logging.getLogger('awx.network_ui.consumers') + message_data = ['LinkLabelEdit', dict( + msg_type='LinkLabelEdit', + sender=1, + id=1, + name='new name', + previous_name='old name' + )] + message = {'topology': 1, 'client': 1, 'text': json.dumps(message_data)} + with mock.patch.object(logger, 'warning') as log_mock,\ + mock.patch.object(Link, 'objects') as link_objects_mock: + networking_events_dispatcher.handle(message) + link_objects_mock.filter.assert_called_once_with( + cid=1, from_device__topology_id=1) + link_objects_mock.filter.return_value.update.assert_called_once_with( + name=u'new name') + log_mock.assert_not_called() + + +@message('LinkCreate') +def test_network_events_handle_message_LinkCreate(): + logger = logging.getLogger('awx.network_ui.consumers') + message_data = ['LinkCreate', dict( + msg_type='LinkCreate', + id=1, + sender=1, + name="", + from_device_id=1, + to_device_id=2, + from_interface_id=1, + to_interface_id=1 + )] + message = {'topology': 1, 'client': 1, 'text': json.dumps(message_data)} + with mock.patch.object(logger, 'warning') as log_mock,\ + mock.patch.object(Device, 'objects') as device_objects_mock,\ + mock.patch.object(Link, 'objects') as link_objects_mock,\ + mock.patch.object(Interface, 'objects') as interface_objects_mock,\ + mock.patch.object(Topology, 'objects') as topology_objects_mock: + values_list_mock = mock.MagicMock() + values_list_mock.values_list.return_value = [(1,1), (2,2)] + interface_objects_mock.get.return_value = mock.MagicMock() + interface_objects_mock.get.return_value.pk = 7 + device_objects_mock.filter.return_value = values_list_mock + topology_objects_mock.filter.return_value = mock.MagicMock() + networking_events_dispatcher.handle(message) + device_objects_mock.filter.assert_called_once_with( + cid__in=[1, 2], topology_id=1) + values_list_mock.values_list.assert_called_once_with('cid', 'pk') + link_objects_mock.get_or_create.assert_called_once_with( + cid=1, from_device_id=1, from_interface_id=7, name=u'', + to_device_id=2, to_interface_id=7) + topology_objects_mock.filter.assert_called_once_with( + link_id_seq__lt=1, pk=1) + topology_objects_mock.filter.return_value.update.assert_called_once_with( + link_id_seq=1) + log_mock.assert_not_called() + + +@message('LinkCreate') +def test_network_events_handle_message_LinkCreate_bad_device1(): + logger = logging.getLogger('awx.network_ui.consumers') + message_data = ['LinkCreate', dict( + msg_type='LinkCreate', + id=1, + sender=1, + name="", + from_device_id=1, + to_device_id=2, + from_interface_id=1, + to_interface_id=1 + )] + message = {'topology': 1, 'client': 1, 'text': json.dumps(message_data)} + with mock.patch.object(logger, 'warning') as log_mock,\ + mock.patch.object(Device, 'objects') as device_objects_mock,\ + mock.patch.object(Link, 'objects'),\ + mock.patch.object(Interface, 'objects') as interface_objects_mock,\ + mock.patch.object(Topology, 'objects') as topology_objects_mock: + values_list_mock = mock.MagicMock() + values_list_mock.values_list.return_value = [(9,1), (2,2)] + interface_objects_mock.get.return_value = mock.MagicMock() + interface_objects_mock.get.return_value.pk = 7 + device_objects_mock.filter.return_value = values_list_mock + topology_objects_mock.filter.return_value = mock.MagicMock() + networking_events_dispatcher.handle(message) + device_objects_mock.filter.assert_called_once_with( + cid__in=[1, 2], topology_id=1) + values_list_mock.values_list.assert_called_once_with('cid', 'pk') + log_mock.assert_called_once_with('Device not found') + + +@message('LinkCreate') +def test_network_events_handle_message_LinkCreate_bad_device2(): + logger = logging.getLogger('awx.network_ui.consumers') + message_data = ['LinkCreate', dict( + msg_type='LinkCreate', + id=1, + sender=1, + name="", + from_device_id=1, + to_device_id=2, + from_interface_id=1, + to_interface_id=1 + )] + message = {'topology': 1, 'client': 1, 'text': json.dumps(message_data)} + with mock.patch.object(logger, 'warning') as log_mock,\ + mock.patch.object(Device, 'objects') as device_objects_mock,\ + mock.patch.object(Link, 'objects'),\ + mock.patch.object(Interface, 'objects') as interface_objects_mock,\ + mock.patch.object(Topology, 'objects') as topology_objects_mock: + values_list_mock = mock.MagicMock() + values_list_mock.values_list.return_value = [(1,1), (9,2)] + interface_objects_mock.get.return_value = mock.MagicMock() + interface_objects_mock.get.return_value.pk = 7 + device_objects_mock.filter.return_value = values_list_mock + topology_objects_mock.filter.return_value = mock.MagicMock() + networking_events_dispatcher.handle(message) + device_objects_mock.filter.assert_called_once_with( + cid__in=[1, 2], topology_id=1) + values_list_mock.values_list.assert_called_once_with('cid', 'pk') + log_mock.assert_called_once_with('Device not found') + + +@message('LinkDestroy') +def test_network_events_handle_message_LinkDestroy(): + logger = logging.getLogger('awx.network_ui.consumers') + message_data = ['LinkDestroy', dict( + msg_type='LinkDestroy', + id=1, + sender=1, + name="", + from_device_id=1, + to_device_id=2, + from_interface_id=1, + to_interface_id=1 + )] + message = {'topology': 1, 'client': 1, 'text': json.dumps(message_data)} + with mock.patch.object(logger, 'warning') as log_mock,\ + mock.patch.object(Device.objects, 'filter') as device_filter_mock,\ + mock.patch.object(Link.objects, 'filter') as link_filter_mock,\ + mock.patch.object(Interface.objects, 'get') as interface_get_mock: + values_mock = mock.MagicMock() + interface_get_mock.return_value = mock.MagicMock() + interface_get_mock.return_value.pk = 7 + device_filter_mock.return_value = values_mock + values_mock.values_list.return_value = [(1,1), (2,2)] + networking_events_dispatcher.handle(message) + device_filter_mock.assert_called_once_with( + cid__in=[1, 2], topology_id=1) + values_mock.values_list.assert_called_once_with('cid', 'pk') + link_filter_mock.assert_called_once_with( + cid=1, from_device_id=1, from_interface_id=7, to_device_id=2, to_interface_id=7) + log_mock.assert_not_called() + + +@message('LinkDestroy') +def test_network_events_handle_message_LinkDestroy_bad_device_map1(): + logger = logging.getLogger('awx.network_ui.consumers') + message_data = ['LinkDestroy', dict( + msg_type='LinkDestroy', + id=1, + sender=1, + name="", + from_device_id=1, + to_device_id=2, + from_interface_id=1, + to_interface_id=1 + )] + message = {'topology': 1, 'client': 1, 'text': json.dumps(message_data)} + with mock.patch.object(logger, 'warning') as log_mock,\ + mock.patch.object(Device.objects, 'filter') as device_filter_mock,\ + mock.patch.object(Link.objects, 'filter'),\ + mock.patch.object(Interface.objects, 'get') as interface_get_mock: + values_mock = mock.MagicMock() + interface_get_mock.return_value = mock.MagicMock() + interface_get_mock.return_value.pk = 7 + device_filter_mock.return_value = values_mock + values_mock.values_list.return_value = [(9,1), (2,2)] + networking_events_dispatcher.handle(message) + log_mock.assert_called_once_with('Device not found') + + +@message('LinkDestroy') +def test_network_events_handle_message_LinkDestroy_bad_device_map2(): + logger = logging.getLogger('awx.network_ui.consumers') + message_data = ['LinkDestroy', dict( + msg_type='LinkDestroy', + id=1, + sender=1, + name="", + from_device_id=1, + to_device_id=2, + from_interface_id=1, + to_interface_id=1 + )] + message = {'topology': 1, 'client': 1, 'text': json.dumps(message_data)} + with mock.patch.object(logger, 'warning') as log_mock,\ + mock.patch.object(Device.objects, 'filter') as device_filter_mock,\ + mock.patch.object(Link.objects, 'filter'),\ + mock.patch.object(Interface.objects, 'get') as interface_get_mock: + values_mock = mock.MagicMock() + interface_get_mock.return_value = mock.MagicMock() + interface_get_mock.return_value.pk = 7 + device_filter_mock.return_value = values_mock + values_mock.values_list.return_value = [(1,1), (9,2)] + networking_events_dispatcher.handle(message) + log_mock.assert_called_once_with('Device not found') + + +@message('LinkSelected') +def test_network_events_handle_message_LinkSelected(): + logger = logging.getLogger('awx.network_ui.consumers') + message_data = ['LinkSelected', dict( + msg_type='LinkSelected', + sender=1, + id=1 + )] + message = {'topology': 1, 'client': 1, 'text': json.dumps(message_data)} + with mock.patch.object(logger, 'warning') as log_mock: + networking_events_dispatcher.handle(message) + log_mock.assert_not_called() + + +@message('LinkUnSelected') +def test_network_events_handle_message_LinkUnSelected(): + logger = logging.getLogger('awx.network_ui.consumers') + message_data = ['LinkUnSelected', dict( + msg_type='LinkUnSelected', + sender=1, + id=1 + )] + message = {'topology': 1, 'client': 1, 'text': json.dumps(message_data)} + with mock.patch.object(logger, 'warning') as log_mock: + networking_events_dispatcher.handle(message) + log_mock.assert_not_called() + + +@message('MultipleMessage') +def test_network_events_handle_message_MultipleMessage_unsupported_message(): + logger = logging.getLogger('awx.network_ui.consumers') + message_data = ['MultipleMessage', dict( + msg_type='MultipleMessage', + sender=1, + messages=[dict(msg_type="Unsupported")] + )] + message = {'topology': 1, 'client': 1, 'text': json.dumps(message_data)} + with mock.patch.object(logger, 'warning') as log_mock: + networking_events_dispatcher.handle(message) + log_mock.assert_called_once_with( + 'Unsupported message %s', u'Unsupported') + + +@message('MultipleMessage') +def test_network_events_handle_message_MultipleMessage(): + logger = logging.getLogger('awx.network_ui.consumers') + message_data = ['MultipleMessage', dict( + msg_type='MultipleMessage', + sender=1, + messages=[dict(msg_type="DeviceSelected")] + )] + message = {'topology': 1, 'client': 1, 'text': json.dumps(message_data)} + with mock.patch.object(logger, 'warning') as log_mock: + networking_events_dispatcher.handle(message) + log_mock.assert_not_called() diff --git a/awx/network_ui/tests/unit/test_routing.py b/awx/network_ui/tests/unit/test_routing.py new file mode 100644 index 0000000000..d1d7a741dd --- /dev/null +++ b/awx/network_ui/tests/unit/test_routing.py @@ -0,0 +1,9 @@ + +import awx.network_ui.routing + + +def test_routing(): + ''' + Tests that the number of routes in awx.network_ui.routing is 3. + ''' + assert len(awx.network_ui.routing.channel_routing) == 3 diff --git a/awx/network_ui/tests/unit/test_views.py b/awx/network_ui/tests/unit/test_views.py new file mode 100644 index 0000000000..9b55ad72d4 --- /dev/null +++ b/awx/network_ui/tests/unit/test_views.py @@ -0,0 +1,65 @@ + +import mock + +from awx.network_ui.views import topology_data, NetworkAnnotatedInterface, json_topology_data, yaml_topology_data +from awx.network_ui.models import Topology, Device, Link, Interface + + + +def test_topology_data(): + with mock.patch.object(Topology, 'objects'),\ + mock.patch.object(Device, 'objects') as device_objects_mock,\ + mock.patch.object(Link, 'objects') as link_objects_mock,\ + mock.patch.object(Interface, 'objects'),\ + mock.patch.object(NetworkAnnotatedInterface, 'filter'): + device_objects_mock.filter.return_value.order_by.return_value = [ + Device(pk=1), Device(pk=2)] + link_objects_mock.filter.return_value = [Link(from_device=Device(name='from', cid=1), + to_device=Device( + name='to', cid=2), + from_interface=Interface( + name="eth0", cid=1), + to_interface=Interface( + name="eth0", cid=1), + name="", + pk=1 + )] + data = topology_data(1) + assert len(data['devices']) == 2 + assert len(data['links']) == 1 + + +def test_json_topology_data(): + request = mock.MagicMock() + request.GET = dict(topology_id=1) + with mock.patch('awx.network_ui.views.topology_data') as topology_data_mock: + topology_data_mock.return_value = dict() + json_topology_data(request) + topology_data_mock.assert_called_once_with(1) + + +def test_yaml_topology_data(): + request = mock.MagicMock() + request.GET = dict(topology_id=1) + with mock.patch('awx.network_ui.views.topology_data') as topology_data_mock: + topology_data_mock.return_value = dict() + yaml_topology_data(request) + topology_data_mock.assert_called_once_with(1) + + +def test_json_topology_data_no_topology_id(): + request = mock.MagicMock() + request.GET = dict() + with mock.patch('awx.network_ui.views.topology_data') as topology_data_mock: + topology_data_mock.return_value = dict() + json_topology_data(request) + topology_data_mock.assert_not_called() + + +def test_yaml_topology_data_no_topology_id(): + request = mock.MagicMock() + request.GET = dict() + with mock.patch('awx.network_ui.views.topology_data') as topology_data_mock: + topology_data_mock.return_value = dict() + yaml_topology_data(request) + topology_data_mock.assert_not_called() diff --git a/awx/network_ui/urls.py b/awx/network_ui/urls.py index d08b0a448a..2101eff59f 100644 --- a/awx/network_ui/urls.py +++ b/awx/network_ui/urls.py @@ -5,6 +5,6 @@ from awx.network_ui import views app_name = 'network_ui' urlpatterns = [ - url(r'^topology.json$', views.json_topology_data, name='json_topology_data'), - url(r'^topology.yaml$', views.yaml_topology_data, name='yaml_topology_data'), + url(r'^topology.json/?$', views.json_topology_data, name='json_topology_data'), + url(r'^topology.yaml/?$', views.yaml_topology_data, name='yaml_topology_data'), ] diff --git a/awx/network_ui/views.py b/awx/network_ui/views.py index e80f03c320..b9cd476bcc 100644 --- a/awx/network_ui/views.py +++ b/awx/network_ui/views.py @@ -1,11 +1,9 @@ # Copyright (c) 2017 Red Hat, Inc -from django.shortcuts import render from django import forms from django.http import JsonResponse, HttpResponseBadRequest, HttpResponse from awx.network_ui.models import Topology, Device, Link, Interface from django.db.models import Q import yaml -import json NetworkAnnotatedInterface = Interface.objects.values('name', 'cid', @@ -63,18 +61,6 @@ def topology_data(topology_id): return data -def yaml_serialize_topology(topology_id): - return yaml.safe_dump(topology_data(topology_id), default_flow_style=False) - - -def json_serialize_topology(topology_id): - return json.dumps(topology_data(topology_id)) - - -def index(request): - return render(request, "network_ui/index.html", dict(topologies=Topology.objects.all().order_by('-pk'))) - - class TopologyForm(forms.Form): topology_id = forms.IntegerField() @@ -82,7 +68,10 @@ class TopologyForm(forms.Form): def json_topology_data(request): form = TopologyForm(request.GET) if form.is_valid(): - return JsonResponse(topology_data(form.cleaned_data['topology_id'])) + response = JsonResponse(topology_data(form.cleaned_data['topology_id']), + content_type='application/force-download') + response['Content-Disposition'] = 'attachment; filename="{}"'.format('topology.json') + return response else: return HttpResponseBadRequest(form.errors) @@ -90,9 +79,11 @@ def json_topology_data(request): def yaml_topology_data(request): form = TopologyForm(request.GET) if form.is_valid(): - return HttpResponse(yaml.safe_dump(topology_data(form.cleaned_data['topology_id']), - default_flow_style=False), - content_type='application/yaml') + response = HttpResponse(yaml.safe_dump(topology_data(form.cleaned_data['topology_id']), + default_flow_style=False), + content_type='application/force-download') + response['Content-Disposition'] = 'attachment; filename="{}"'.format('topology.yaml') + return response else: return HttpResponseBadRequest(form.errors) diff --git a/awx/plugins/inventory/tower.py b/awx/plugins/inventory/tower.py index 52a63a0bab..ff14f6b731 100755 --- a/awx/plugins/inventory/tower.py +++ b/awx/plugins/inventory/tower.py @@ -90,6 +90,7 @@ def read_tower_inventory(tower_host, tower_user, tower_pass, inventory, license_ tower_host = "https://{}".format(tower_host) inventory_url = urljoin(tower_host, "/api/v2/inventories/{}/script/?hostvars=1&towervars=1&all=1".format(inventory.replace('/', ''))) config_url = urljoin(tower_host, "/api/v2/config/") + reason = None try: if license_type != "open": config_response = requests.get(config_url, @@ -106,14 +107,16 @@ def read_tower_inventory(tower_host, tower_user, tower_pass, inventory, license_ response = requests.get(inventory_url, auth=HTTPBasicAuth(tower_user, tower_pass), verify=not ignore_ssl) + try: + json_response = response.json() + except (ValueError, TypeError) as e: + reason = "Failed to parse json from host: {}".format(e) if response.ok: - return response.json() - json_reason = response.json() - reason = json_reason.get('detail', 'Retrieving Tower Inventory Failed') + return json_response + if not reason: + reason = json_response.get('detail', 'Retrieving Tower Inventory Failed') except requests.ConnectionError as e: reason = "Connection to remote host failed: {}".format(e) - except json.JSONDecodeError as e: - reason = "Failed to parse json from host: {}".format(e) raise RuntimeError(reason) diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 354c2b9e74..978cbf05c0 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -4,13 +4,10 @@ import os import re # noqa import sys -import ldap import djcelery import six from datetime import timedelta -from kombu.common import Broadcast - # global settings from django.conf import global_settings # ugettext lazy @@ -41,6 +38,13 @@ def IS_TESTING(argv=None): return is_testing(argv) +if "pytest" in sys.modules: + import mock + with mock.patch('__main__.__builtins__.dir', return_value=[]): + import ldap +else: + import ldap + DEBUG = True SQL_DEBUG = DEBUG @@ -456,6 +460,9 @@ BROKER_POOL_LIMIT = None BROKER_URL = 'amqp://guest:guest@localhost:5672//' CELERY_EVENT_QUEUE_TTL = 5 CELERY_DEFAULT_QUEUE = 'awx_private_queue' +CELERY_DEFAULT_EXCHANGE = 'awx_private_queue' +CELERY_DEFAULT_ROUTING_KEY = 'awx_private_queue' +CELERY_DEFAULT_EXCHANGE_TYPE = 'direct' CELERY_TASK_SERIALIZER = 'json' CELERY_RESULT_SERIALIZER = 'json' CELERY_ACCEPT_CONTENT = ['json'] @@ -466,10 +473,8 @@ CELERYD_POOL_RESTARTS = True CELERYD_AUTOSCALER = 'awx.main.utils.autoscale:DynamicAutoScaler' CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend' CELERY_IMPORTS = ('awx.main.scheduler.tasks',) -CELERY_QUEUES = ( - Broadcast('tower_broadcast_all'), -) -CELERY_ROUTES = {} +CELERY_QUEUES = () +CELERY_ROUTES = ('awx.main.utils.ha.AWXCeleryRouter',) def log_celery_failure(*args): @@ -532,19 +537,12 @@ ASGI_AMQP = { } # Django Caching Configuration -if is_testing(): - CACHES = { - 'default': { - 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', - }, - } -else: - CACHES = { - 'default': { - 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', - 'LOCATION': 'memcached:11211', - }, - } +CACHES = { + 'default': { + 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', + 'LOCATION': 'memcached:11211', + }, +} # Social Auth configuration. SOCIAL_AUTH_STRATEGY = 'social_django.strategy.DjangoStrategy' @@ -1005,6 +1003,9 @@ LOGGING = { 'require_debug_true_or_test': { '()': 'awx.main.utils.RequireDebugTrueOrTest', }, + 'external_log_enabled': { + '()': 'awx.main.utils.filters.ExternalLoggerEnabled' + }, }, 'formatters': { 'simple': { @@ -1038,11 +1039,10 @@ LOGGING = { 'class': 'logging.NullHandler', 'formatter': 'simple', }, - 'http_receiver': { - 'class': 'awx.main.utils.handlers.HTTPSNullHandler', - 'level': 'DEBUG', + 'external_logger': { + 'class': 'awx.main.utils.handlers.AWXProxyHandler', 'formatter': 'json', - 'host': '', + 'filters': ['external_log_enabled'], }, 'mail_admins': { 'level': 'ERROR', @@ -1135,7 +1135,7 @@ LOGGING = { 'handlers': ['console'], }, 'awx': { - 'handlers': ['console', 'file', 'tower_warnings'], + 'handlers': ['console', 'file', 'tower_warnings', 'external_logger'], 'level': 'DEBUG', }, 'awx.conf': { @@ -1160,16 +1160,13 @@ LOGGING = { 'propagate': False }, 'awx.main.tasks': { - 'handlers': ['task_system'], + 'handlers': ['task_system', 'external_logger'], 'propagate': False }, 'awx.main.scheduler': { - 'handlers': ['task_system'], + 'handlers': ['task_system', 'external_logger'], 'propagate': False }, - 'awx.main.consumers': { - 'handlers': ['null'] - }, 'awx.main.access': { 'handlers': ['null'], 'propagate': False, @@ -1183,7 +1180,7 @@ LOGGING = { 'propagate': False, }, 'awx.analytics': { - 'handlers': ['http_receiver'], + 'handlers': ['external_logger'], 'level': 'INFO', 'propagate': False }, diff --git a/awx/settings/development.py b/awx/settings/development.py index eec3689ee7..0061157ec4 100644 --- a/awx/settings/development.py +++ b/awx/settings/development.py @@ -9,6 +9,7 @@ import socket import copy import sys import traceback +import uuid # Centos-7 doesn't include the svg mime type # /usr/lib64/python/mimetypes.py @@ -20,6 +21,15 @@ from split_settings.tools import optional, include # Load default settings. from defaults import * # NOQA +# don't use memcache when running tests +if "pytest" in sys.modules: + CACHES = { + 'default': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + 'LOCATION': 'unique-{}'.format(str(uuid.uuid4())), + }, + } + # awx-manage shell_plus --notebook NOTEBOOK_ARGUMENTS = [ '--NotebookApp.token=', @@ -103,13 +113,6 @@ if 'django_jenkins' in INSTALLED_APPS: INSTALLED_APPS += ('rest_framework_swagger',) -# Much faster than the default -# https://docs.djangoproject.com/en/1.6/topics/auth/passwords/#how-django-stores-passwords -PASSWORD_HASHERS = ( - 'django.contrib.auth.hashers.MD5PasswordHasher', - 'django.contrib.auth.hashers.PBKDF2PasswordHasher', -) - # Configure a default UUID for development only. SYSTEM_UUID = '00000000-0000-0000-0000-000000000000' @@ -149,8 +152,6 @@ SERVICE_NAME_DICT = { "uwsgi": "uwsgi", "daphne": "daphne", "nginx": "nginx"} -# Used for sending commands in automatic restart -UWSGI_FIFO_LOCATION = '/awxfifo' try: socket.gethostbyname('docker.for.mac.internal') diff --git a/awx/settings/local_settings.py.docker_compose b/awx/settings/local_settings.py.docker_compose index 7381345b25..94d9ba0214 100644 --- a/awx/settings/local_settings.py.docker_compose +++ b/awx/settings/local_settings.py.docker_compose @@ -13,6 +13,7 @@ ############################################################################### import os import urllib +import sys def patch_broken_pipe_error(): @@ -66,7 +67,7 @@ DATABASES = { # Use SQLite for unit tests instead of PostgreSQL. If the lines below are # commented out, Django will create the test_awx-dev database in PostgreSQL to # run unit tests. -if is_testing(sys.argv): +if "pytest" in sys.modules: DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', @@ -195,11 +196,10 @@ LOGGING['handlers']['syslog'] = { LOGGING['loggers']['django.request']['handlers'] = ['console'] LOGGING['loggers']['rest_framework.request']['handlers'] = ['console'] -LOGGING['loggers']['awx']['handlers'] = ['console'] +LOGGING['loggers']['awx']['handlers'] = ['console', 'external_logger'] LOGGING['loggers']['awx.main.commands.run_callback_receiver']['handlers'] = ['console'] -LOGGING['loggers']['awx.main.commands.inventory_import']['handlers'] = ['console'] -LOGGING['loggers']['awx.main.tasks']['handlers'] = ['console'] -LOGGING['loggers']['awx.main.scheduler']['handlers'] = ['console'] +LOGGING['loggers']['awx.main.tasks']['handlers'] = ['console', 'external_logger'] +LOGGING['loggers']['awx.main.scheduler']['handlers'] = ['console', 'external_logger'] LOGGING['loggers']['django_auth_ldap']['handlers'] = ['console'] LOGGING['loggers']['social']['handlers'] = ['console'] LOGGING['loggers']['system_tracking_migrations']['handlers'] = ['console'] diff --git a/awx/settings/production.py b/awx/settings/production.py index d1d5baa3ad..ff7d966138 100644 --- a/awx/settings/production.py +++ b/awx/settings/production.py @@ -68,8 +68,6 @@ SERVICE_NAME_DICT = { "channels": "awx-channels-worker", "uwsgi": "awx-uwsgi", "daphne": "awx-daphne"} -# Used for sending commands in automatic restart -UWSGI_FIFO_LOCATION = '/var/lib/awx/awxfifo' # Store a snapshot of default settings at this point before loading any # customizable config files. diff --git a/awx/sso/tests/test_env.py b/awx/sso/tests/test_env.py new file mode 100644 index 0000000000..135c90d99b --- /dev/null +++ b/awx/sso/tests/test_env.py @@ -0,0 +1,6 @@ + + +# Ensure that our autouse overwrites are working +def test_cache(settings): + assert settings.CACHES['default']['BACKEND'] == 'django.core.cache.backends.locmem.LocMemCache' + assert settings.CACHES['default']['LOCATION'].startswith('unique-') diff --git a/awx/templates/rest_framework/base.html b/awx/templates/rest_framework/base.html index cbd761cf33..313bbec9a3 100644 --- a/awx/templates/rest_framework/base.html +++ b/awx/templates/rest_framework/base.html @@ -157,8 +157,8 @@
-
HTTP {{ response.status_code }} {{ response.status_text }}{% autoescape off %}{% for key, val in response_headers|items %}
-{{ key }}: {{ val|break_long_headers|urlize_quoted_links }}{% endfor %}
+                
HTTP {{ response.status_code }} {{ response.status_text }}{% autoescape off %}{% if response_headers %}{% for key, val in response_headers|items %}
+{{ key }}: {{ val|break_long_headers|urlize_quoted_links }}{% endfor %}{% endif %}
 {# Original line below had the side effect of also escaping content: #}
 {#   {{ content|urlize_quoted_links }}
{% endautoescape %} #} {# For AWX, disable automatic URL creation and move content outside of autoescape off block. #} diff --git a/awx/ui/.eslintrc.js b/awx/ui/.eslintrc.js index 217601f5f9..9a42d34ab2 100644 --- a/awx/ui/.eslintrc.js +++ b/awx/ui/.eslintrc.js @@ -54,6 +54,18 @@ module.exports = { 'no-multiple-empty-lines': ['error', { max: 1 }], 'object-curly-newline': 'off', 'space-before-function-paren': ['error', 'always'], - 'no-trailing-spaces': ['error'] - } + 'no-trailing-spaces': ['error'], + 'prefer-destructuring': ['error', { + 'VariableDeclarator': { + 'array': false, + 'object': true + }, + 'AssignmentExpression': { + 'array': false, + 'object': true + } + }, { + 'enforceForRenamedProperties': false + }] + } }; diff --git a/awx/ui/client/features/_index.less b/awx/ui/client/features/_index.less index 59e8e4630b..04be5b31bb 100644 --- a/awx/ui/client/features/_index.less +++ b/awx/ui/client/features/_index.less @@ -1,3 +1,2 @@ -@import 'credentials/_index'; @import 'output/_index'; @import 'users/tokens/_index'; diff --git a/awx/ui/client/features/applications/add-applications.controller.js b/awx/ui/client/features/applications/add-applications.controller.js index 173460157e..c848bcd345 100644 --- a/awx/ui/client/features/applications/add-applications.controller.js +++ b/awx/ui/client/features/applications/add-applications.controller.js @@ -3,10 +3,8 @@ function AddApplicationsController (models, $state, strings) { const { application, me, organization } = models; const omit = [ - 'authorization_grant_type', 'client_id', 'client_secret', - 'client_type', 'created', 'modified', 'related', @@ -44,19 +42,16 @@ function AddApplicationsController (models, $state, strings) { vm.form.organization._resource = 'organization'; vm.form.organization._route = 'applications.add.organization'; vm.form.organization._model = organization; - vm.form.organization._placeholder = strings.get('SELECT AN ORGANIZATION'); + vm.form.organization._placeholder = strings.get('inputs.ORGANIZATION_PLACEHOLDER'); vm.form.name.required = true; vm.form.organization.required = true; - vm.form.redirect_uris.required = true; delete vm.form.name.help_text; vm.form.save = data => { const hiddenData = { - authorization_grant_type: 'implicit', - user: me.get('id'), - client_type: 'public' + user: me.get('id') }; const payload = _.merge(data, hiddenData); diff --git a/awx/ui/client/features/applications/add-edit-applications.view.html b/awx/ui/client/features/applications/add-edit-applications.view.html index a8d8d68e6c..a7f2a87580 100644 --- a/awx/ui/client/features/applications/add-edit-applications.view.html +++ b/awx/ui/client/features/applications/add-edit-applications.view.html @@ -14,7 +14,9 @@ - + + + diff --git a/awx/ui/client/features/applications/applications.strings.js b/awx/ui/client/features/applications/applications.strings.js index 18158c4d4e..8dfcf521a2 100644 --- a/awx/ui/client/features/applications/applications.strings.js +++ b/awx/ui/client/features/applications/applications.strings.js @@ -16,6 +16,10 @@ function ApplicationsStrings (BaseString) { USERS: t.s('Tokens') }; + ns.tooltips = { + ADD: t.s('Create a new Application') + }; + ns.add = { PANEL_TITLE: t.s('NEW APPLICATION') }; @@ -25,6 +29,10 @@ function ApplicationsStrings (BaseString) { ROW_ITEM_LABEL_ORGANIZATION: t.s('ORG'), ROW_ITEM_LABEL_MODIFIED: t.s('LAST MODIFIED') }; + + ns.inputs = { + ORGANIZATION_PLACEHOLDER: t.s('SELECT AN ORGANIZATION') + }; } ApplicationsStrings.$inject = ['BaseStringService']; diff --git a/awx/ui/client/features/applications/edit-applications.controller.js b/awx/ui/client/features/applications/edit-applications.controller.js index 6279b642ee..1bf6b8c91b 100644 --- a/awx/ui/client/features/applications/edit-applications.controller.js +++ b/awx/ui/client/features/applications/edit-applications.controller.js @@ -4,10 +4,8 @@ function EditApplicationsController (models, $state, strings, $scope) { const { me, application, organization } = models; const omit = [ - 'authorization_grant_type', 'client_id', 'client_secret', - 'client_type', 'created', 'modified', 'related', @@ -54,45 +52,30 @@ function EditApplicationsController (models, $state, strings, $scope) { vm.form.disabled = !isEditable; + vm.form.name.required = true; + const isOrgAdmin = _.some(me.get('related.admin_of_organizations.results'), (org) => org.id === organization.get('id')); const isSuperuser = me.get('is_superuser'); const isCurrentAuthor = Boolean(application.get('summary_fields.created_by.id') === me.get('id')); - - vm.form.organization = { - type: 'field', - label: 'Organization', - id: 'organization' - }; - vm.form.description = { - type: 'String', - label: 'Description', - id: 'description' - }; - - vm.form.organization._resource = 'organization'; - vm.form.organization._route = 'applications.edit.organization'; - vm.form.organization._model = organization; - vm.form.organization._placeholder = strings.get('SELECT AN ORGANIZATION'); - - // TODO: org not returned via api endpoint, check on this - vm.form.organization._value = application.get('organization'); - vm.form.organization._disabled = true; + if (isSuperuser || isOrgAdmin || (application.get('organization') === null && isCurrentAuthor)) { vm.form.organization._disabled = false; } - vm.form.name.required = true; + vm.form.organization._resource = 'organization'; + vm.form.organization._model = organization; + vm.form.organization._route = 'applications.edit.organization'; + vm.form.organization._value = application.get('summary_fields.organization.id'); + vm.form.organization._displayValue = application.get('summary_fields.organization.name'); + vm.form.organization._placeholder = strings.get('inputs.ORGANIZATION_PLACEHOLDER'); vm.form.organization.required = true; - vm.form.redirect_uris.required = true; delete vm.form.name.help_text; vm.form.save = data => { const hiddenData = { - authorization_grant_type: 'implicit', - user: me.get('id'), - client_type: 'public' + user: me.get('id') }; const payload = _.merge(data, hiddenData); diff --git a/awx/ui/client/features/applications/index.js b/awx/ui/client/features/applications/index.js index ca70be6165..af4783884f 100644 --- a/awx/ui/client/features/applications/index.js +++ b/awx/ui/client/features/applications/index.js @@ -62,8 +62,7 @@ function ApplicationsRun ($stateExtender, strings) { }, data: { activityStream: true, - // TODO: double-check activity stream works - activityStreamTarget: 'application' + activityStreamTarget: 'o_auth2_application' }, views: { '@': { @@ -111,8 +110,7 @@ function ApplicationsRun ($stateExtender, strings) { }, data: { activityStream: true, - // TODO: double-check activity stream works - activityStreamTarget: 'application' + activityStreamTarget: 'o_auth2_application' }, views: { 'add@applications': { @@ -134,7 +132,7 @@ function ApplicationsRun ($stateExtender, strings) { }, data: { activityStream: true, - activityStreamTarget: 'application', + activityStreamTarget: 'o_auth2_application', activityStreamId: 'application_id' }, views: { @@ -264,8 +262,7 @@ function ApplicationsRun ($stateExtender, strings) { }, data: { activityStream: true, - // TODO: double-check activity stream works - activityStreamTarget: 'application' + activityStreamTarget: 'o_auth2_application' }, views: { 'userList@applications.edit': { diff --git a/awx/ui/client/features/applications/list-applications.controller.js b/awx/ui/client/features/applications/list-applications.controller.js index fd67589a08..b9353f8d88 100644 --- a/awx/ui/client/features/applications/list-applications.controller.js +++ b/awx/ui/client/features/applications/list-applications.controller.js @@ -38,6 +38,10 @@ function ListApplicationsController ( vm.applicationsCount = dataset.count; }); + vm.tooltips = { + add: strings.get('tooltips.ADD') + }; + vm.getModified = app => { const modified = _.get(app, 'modified'); @@ -74,7 +78,7 @@ function ListApplicationsController ( } if (parseInt($state.params.application_id, 10) === app.id) { - $state.go('^', reloadListStateParams, { reload: true }); + $state.go('applications', reloadListStateParams, { reload: true }); } else { $state.go('.', reloadListStateParams, { reload: true }); } diff --git a/awx/ui/client/features/applications/list-applications.view.html b/awx/ui/client/features/applications/list-applications.view.html index 1803e5ab66..c1ab24bd75 100644 --- a/awx/ui/client/features/applications/list-applications.view.html +++ b/awx/ui/client/features/applications/list-applications.view.html @@ -23,6 +23,9 @@ type="button" ui-sref="applications.add" class="at-Button--add" + id="button-add" + aw-tool-tip="{{vm.tooltips.add}}" + data-placement="top" aria-haspopup="true" aria-expanded="false"> diff --git a/awx/ui/client/features/credentials/_index.less b/awx/ui/client/features/credentials/_index.less deleted file mode 100644 index 87f746b2c3..0000000000 --- a/awx/ui/client/features/credentials/_index.less +++ /dev/null @@ -1,3 +0,0 @@ -.at-CredentialsPermissions { - margin-top: 50px; -} diff --git a/awx/ui/client/features/credentials/legacy.credentials.js b/awx/ui/client/features/credentials/legacy.credentials.js index 07cee1329d..27428feb7b 100644 --- a/awx/ui/client/features/credentials/legacy.credentials.js +++ b/awx/ui/client/features/credentials/legacy.credentials.js @@ -69,8 +69,8 @@ function LegacyCredentialsService () { ngClick: '$state.go(\'.add\')', label: 'Add', awToolTip: N_('Add a permission'), - actionClass: 'btn List-buttonSubmit', - buttonContent: `+ ${N_('ADD')}`, + actionClass: 'at-Button--add', + actionId: 'button-add', ngShow: '(credential_obj.summary_fields.user_capabilities.edit || canAdd)' } }, diff --git a/awx/ui/client/features/jobs/index.js b/awx/ui/client/features/jobs/index.js index c26f2b9d8f..8bb692f0c8 100644 --- a/awx/ui/client/features/jobs/index.js +++ b/awx/ui/client/features/jobs/index.js @@ -1,5 +1,6 @@ import JobsStrings from './jobs.strings'; import jobsRoute from './routes/jobs.route'; +import { jobsSchedulesRoute, jobsSchedulesEditRoute } from '../../src/scheduler/schedules.route'; const MODULE_NAME = 'at.features.jobs'; @@ -8,6 +9,8 @@ angular .service('JobsStrings', JobsStrings) .run(['$stateExtender', ($stateExtender) => { $stateExtender.addState(jobsRoute); + $stateExtender.addState(jobsSchedulesRoute); + $stateExtender.addState(jobsSchedulesEditRoute); }]); export default MODULE_NAME; diff --git a/awx/ui/client/features/jobs/index.view.html b/awx/ui/client/features/jobs/index.view.html index 054e26c2ba..2328e24261 100644 --- a/awx/ui/client/features/jobs/index.view.html +++ b/awx/ui/client/features/jobs/index.view.html @@ -1,13 +1,14 @@ +
-
+
JOBS
-
+
SCHEDULES diff --git a/awx/ui/client/features/jobs/jobsList.controller.js b/awx/ui/client/features/jobs/jobsList.controller.js index 4959d41b5e..77be97e72d 100644 --- a/awx/ui/client/features/jobs/jobsList.controller.js +++ b/awx/ui/client/features/jobs/jobsList.controller.js @@ -3,8 +3,7 @@ * * All Rights Reserved ************************************************ */ -const mapChoices = choices => Object - .assign(...choices.map(([k, v]) => ({ [k]: v }))); +const mapChoices = choices => Object.assign(...choices.map(([k, v]) => ({ [k]: v }))); function ListJobsController ( $scope, @@ -61,37 +60,36 @@ function ListJobsController ( vm.emptyListReason = strings.get('list.NO_RUNNING'); } - vm.jobTypes = mapChoices(unifiedJob - .options('actions.GET.type.choices')); + vm.jobTypes = mapChoices(unifiedJob.options('actions.GET.type.choices')); - vm.getLink = ({ type, id }) => { - let link; + vm.getSref = ({ type, id }) => { + let sref; switch (type) { case 'job': - link = `/#/jobz/playbook/${id}`; + sref = `output({type: 'playbook', id: ${id}})`; break; case 'ad_hoc_command': - link = `/#/jobz/command/${id}`; + sref = `output({type: 'command', id: ${id}})`; break; case 'system_job': - link = `/#/jobz/system/${id}`; + sref = `output({type: 'system', id: ${id}})`; break; case 'project_update': - link = `/#/jobz/project/${id}`; + sref = `output({type: 'project', id: ${id}})`; break; case 'inventory_update': - link = `/#/jobz/inventory/${id}`; + sref = `output({type: 'inventory', id: ${id}})`; break; case 'workflow_job': - link = `/#/workflows/${id}`; + sref = `workflowResults({id: ${id}})`; break; default: - link = ''; + sref = ''; break; } - return link; + return sref; }; vm.deleteJob = (job) => { @@ -132,7 +130,7 @@ function ListJobsController ( resourceName: $filter('sanitize')(job.name), body: deleteModalBody, action, - actionText: 'DELETE' + actionText: strings.get('DELETE'), }); }; @@ -174,7 +172,8 @@ function ListJobsController ( resourceName: $filter('sanitize')(job.name), body: deleteModalBody, action, - actionText: strings.get('CANCEL') + actionText: strings.get('cancelJob.CANCEL_JOB'), + cancelText: strings.get('cancelJob.RETURN') }); }; diff --git a/awx/ui/client/features/jobs/jobsList.view.html b/awx/ui/client/features/jobs/jobsList.view.html index d339ee75f1..e550cd879f 100644 --- a/awx/ui/client/features/jobs/jobsList.view.html +++ b/awx/ui/client/features/jobs/jobsList.view.html @@ -21,7 +21,7 @@ status="{{ job.status }}" status-tip="Job {{job.status}}. Click for details." header-value="{{ job.name }}" - header-link="{{ vm.getLink(job) }}" + header-state="{{ vm.getSref(job) }}" header-tag="{{ vm.jobTypes[job.type] }}">
diff --git a/awx/ui/client/features/jobs/routes/jobs.route.js b/awx/ui/client/features/jobs/routes/jobs.route.js index c3ee0b4fef..9129fed3d1 100644 --- a/awx/ui/client/features/jobs/routes/jobs.route.js +++ b/awx/ui/client/features/jobs/routes/jobs.route.js @@ -22,6 +22,8 @@ export default { } }, data: { + activityStream: true, + activityStreamTarget: 'job', socket: { groups: { jobs: ['status_changed'], diff --git a/awx/ui/client/features/jobs/routes/portalModeMyJobs.route.js b/awx/ui/client/features/jobs/routes/portalModeMyJobs.route.js index 07506a553f..4d0909d66b 100644 --- a/awx/ui/client/features/jobs/routes/portalModeMyJobs.route.js +++ b/awx/ui/client/features/jobs/routes/portalModeMyJobs.route.js @@ -5,6 +5,9 @@ const jobsListTemplate = require('~features/jobs/jobsList.view.html'); export default { name: 'portalMode.myJobs', url: '/myjobs?{job_search:queryset}', + ncyBreadcrumb: { + skip: true + }, params: { job_search: { value: { diff --git a/awx/ui/client/features/jobs/routes/workflowJobTemplateCompletedJobs.route.js b/awx/ui/client/features/jobs/routes/workflowJobTemplateCompletedJobs.route.js new file mode 100644 index 0000000000..8970ef8bc0 --- /dev/null +++ b/awx/ui/client/features/jobs/routes/workflowJobTemplateCompletedJobs.route.js @@ -0,0 +1,59 @@ +import { N_ } from '../../../src/i18n'; +import jobsListController from '../jobsList.controller'; + +const jobsListTemplate = require('~features/jobs/jobsList.view.html'); + +export default { + url: '/completed_jobs', + name: 'templates.editWorkflowJobTemplate.completed_jobs', + params: { + job_search: { + value: { + page_size: '20', + workflow_job__workflow_job_template: '', + order_by: '-id' + }, + dynamic: true, + squash: '' + } + }, + ncyBreadcrumb: { + label: N_('COMPLETED JOBS') + }, + views: { + related: { + templateUrl: jobsListTemplate, + controller: jobsListController, + controllerAs: 'vm' + } + }, + resolve: { + resolvedModels: [ + 'UnifiedJobModel', + (UnifiedJob) => { + const models = [ + new UnifiedJob(['options']), + ]; + return Promise.all(models); + }, + ], + Dataset: [ + '$stateParams', + 'Wait', + 'GetBasePath', + 'QuerySet', + ($stateParams, Wait, GetBasePath, qs) => { + const templateId = $stateParams.workflow_job_template_id; + + const searchParam = _.assign($stateParams + .job_search, { workflow_job__workflow_job_template: templateId }); + + const searchPath = GetBasePath('unified_jobs'); + + Wait('start'); + return qs.search(searchPath, searchParam) + .finally(() => Wait('stop')); + } + ] + } +}; diff --git a/awx/ui/client/features/output/_index.less b/awx/ui/client/features/output/_index.less index 4238573fb3..cb8e2940fd 100644 --- a/awx/ui/client/features/output/_index.less +++ b/awx/ui/client/features/output/_index.less @@ -6,6 +6,7 @@ border-top-left-radius: 4px; border-top-right-radius: 4px; border-bottom: none; + margin-top: 15px; & > div { user-select: none; @@ -123,7 +124,7 @@ &-container { font-family: monospace; - height: calc(~"100vh - 240px"); + height: 100%; overflow-y: scroll; font-size: 15px; border: 1px solid @at-gray-b7; @@ -134,6 +135,7 @@ border-radius: 0; border-bottom-right-radius: 4px; border-bottom-left-radius: 4px; + max-height: ~"calc(100vh - 350px)"; & > table { table-layout: fixed; @@ -143,6 +145,11 @@ } } } + + &--fullscreen { + grid-column-start: 1; + grid-column-end: 3; + } } .at-mixin-event() { @@ -201,18 +208,18 @@ flex-wrap: wrap; } - // Status Bar ----------------------------------------------------------------------------- .HostStatusBar { display: flex; flex: 0 0 auto; width: 100%; + margin-bottom: 15px; } .HostStatusBar-ok, .HostStatusBar-changed, .HostStatusBar-dark, -.HostStatusBar-failed, +.HostStatusBar-failures, .HostStatusBar-skipped, .HostStatusBar-noData { height: 15px; @@ -236,7 +243,7 @@ flex: 0 0 auto; } -.HostStatusBar-failed { +.HostStatusBar-failures { background-color: @default-err; flex: 0 0 auto; } @@ -282,49 +289,29 @@ } +.HostStatusBar-tooltip.top { + margin-top: 4px; +} + // Job Details --------------------------------------------------------------------------------- @breakpoint-md: 1200px; -.JobResults { - .OnePlusTwo-container(100%, @breakpoint-md); +.JobResults-container { + display: grid; + grid-gap: 20px; + grid-template-columns: minmax(300px, 1fr) minmax(500px, 2fr); + grid-template-rows: minmax(500px, ~"calc(100vh - 130px)"); - &.fullscreen { - .JobResults-rightSide { - max-width: 100%; - } + .at-Panel { + min-width: 0; + overflow-y: auto; } } -.JobResults-leftSide { - .OnePlusTwo-left--panel(100%, @breakpoint-md); - max-width: 30%; - height: ~"calc(100vh - 177px)"; - - @media screen and (max-width: @breakpoint-md) { - max-width: 100%; - } -} - -.JobResults-rightSide { - .OnePlusTwo-right--panel(100%, @breakpoint-md); - height: ~"calc(100vh - 177px)"; - - @media (max-width: @breakpoint-md - 1px) { - padding-right: 15px; - } -} - -.JobResults-detailsPanel{ - overflow-y: scroll; -} - -.JobResults-stdoutActionButton--active { - display: none; - visibility: hidden; - flex:none; - width:0px; - padding-right: 0px; +.JobResults-detailsPanel { + display: flex; + flex-direction: column; } .JobResults-panelHeader { @@ -352,8 +339,8 @@ flex-wrap: wrap; } -.JobResults-codeMirrorResultRowLabel{ - font-size: 12px; +.JobResults-resultRow #cm-variables-container { + width: 100%; } .JobResults-resultRowLabel { @@ -416,109 +403,10 @@ padding-right: 10px; } -.JobResults-badgeRow { - display: flex; - align-items: center; - margin-right: 5px; -} - -.JobResults-badgeTitle{ - color: @default-interface-txt; - font-size: 14px; - margin-right: 10px; - font-weight: normal; - text-transform: uppercase; - margin-left: 20px; -} - -@media (max-width: @breakpoint-md) { - .JobResults-detailsPanel { - overflow-y: auto; - } - - .JobResults-rightSide { - height: inherit; - } -} - -.JobResults-timeBadge { - float:right; - font-size: 11px; - font-weight: normal; - padding: 1px 10px; - height: 14px; - margin: 3px 15px; - width: 80px; - background-color: @default-bg; - border-radius: 5px; - color: @default-interface-txt; - margin-right: -5px; -} - -.JobResults-panelRight { - display: flex; - flex-direction: column; -} - -.JobResults-panelRight .SmartSearch-bar { - width: 100%; -} - -.JobResults-panelRightTitle{ - flex-wrap: wrap; -} - -.JobResults-panelRightTitleText{ - word-wrap: break-word; - word-break: break-all; - max-width: 100%; -} - -.JobResults-badgeAndActionRow{ - display:flex; - flex: 1 0 auto; - justify-content: flex-end; - flex-wrap: wrap; - max-width: 100%; -} - .StandardOut-panelHeader { flex: initial; } -.StandardOut-panelHeader--jobIsRunning { - margin-bottom: 20px; -} - -host-status-bar { - flex: initial; - margin-bottom: 20px; -} - -smart-search { - flex: initial; -} - -job-results-standard-out { - flex: 1; - flex-basis: auto; - height: ~"calc(100% - 800px)"; - display: flex; - border: 1px solid @d7grey; - border-radius: 5px; - margin-top: 20px; -} -@media screen and (max-width: @breakpoint-md) { - job-results-standard-out { - height: auto; - } -} - -.JobResults-extraVarsHelp { - margin-left: 10px; - color: @default-icon; -} - .JobResults-seeMoreLess { color: #337AB7; margin: 4px 0px; @@ -528,3 +416,10 @@ job-results-standard-out { border-radius: 5px; font-size: 11px; } + +@media screen and (max-width: @breakpoint-md) { + .JobResults-container { + display: flex; + flex-direction: column; + } +} \ No newline at end of file diff --git a/awx/ui/client/features/output/api.events.service.js b/awx/ui/client/features/output/api.events.service.js new file mode 100644 index 0000000000..39499eff46 --- /dev/null +++ b/awx/ui/client/features/output/api.events.service.js @@ -0,0 +1,132 @@ +const PAGE_LIMIT = 5; +const PAGE_SIZE = 50; + +const BASE_PARAMS = { + order_by: 'start_line', + page_size: PAGE_SIZE, +}; + +const merge = (...objs) => _.merge({}, ...objs); + +const getInitialState = params => ({ + results: [], + count: 0, + previous: 1, + page: 1, + next: 1, + last: 1, + params: merge(BASE_PARAMS, params), +}); + +function JobEventsApiService ($http, $q) { + this.init = (endpoint, params) => { + this.keys = []; + this.cache = {}; + this.pageSizes = {}; + this.endpoint = endpoint; + this.state = getInitialState(params); + }; + + this.getLastPage = count => Math.ceil(count / this.state.params.page_size); + + this.fetch = () => { + delete this.cache; + delete this.keys; + delete this.pageSizes; + + this.cache = {}; + this.keys = []; + this.pageSizes = {}; + + return this.getPage(1).then(() => this); + }; + + this.getPage = number => { + if (number < 1 || number > this.state.last) { + return $q.resolve(); + } + + if (this.cache[number]) { + if (this.pageSizes[number] === PAGE_SIZE) { + return this.cache[number]; + } + + delete this.pageSizes[number]; + delete this.cache[number]; + + this.keys.splice(this.keys.indexOf(number)); + } + + const { params } = this.state; + + delete params.page; + + params.page = number; + + const promise = $http.get(this.endpoint, { params }) + .then(({ data }) => { + const { results, count } = data; + + this.state.results = results; + this.state.count = count; + this.state.page = number; + this.state.last = this.getLastPage(count); + this.state.previous = Math.max(1, number - 1); + this.state.next = Math.min(this.state.last, number + 1); + + this.pageSizes[number] = results.length; + + return { results, page: number }; + }); + + this.cache[number] = promise; + this.keys.push(number); + + if (this.keys.length > PAGE_LIMIT) { + delete this.cache[this.keys.shift()]; + } + + return promise; + }; + + this.first = () => this.getPage(1); + this.next = () => this.getPage(this.state.next); + this.previous = () => this.getPage(this.state.previous); + + this.last = () => { + const params = merge({}, this.state.params); + + delete params.page; + delete params.order_by; + + params.page = 1; + params.order_by = '-start_line'; + + const promise = $http.get(this.endpoint, { params }) + .then(({ data }) => { + const { results, count } = data; + const lastPage = this.getLastPage(count); + + results.reverse(); + const shifted = results.splice(count % PAGE_SIZE); + + this.state.results = shifted; + this.state.count = count; + this.state.page = lastPage; + this.state.next = lastPage; + this.state.last = lastPage; + this.state.previous = Math.max(1, this.state.page - 1); + + return { results: shifted, page: lastPage }; + }); + + return promise; + }; +} + +JobEventsApiService.$inject = [ + '$http', + '$q' +]; + +export default JobEventsApiService; diff --git a/awx/ui/client/features/output/details.directive.js b/awx/ui/client/features/output/details.component.js similarity index 71% rename from awx/ui/client/features/output/details.directive.js rename to awx/ui/client/features/output/details.component.js index eb2b7277c7..84a97ed050 100644 --- a/awx/ui/client/features/output/details.directive.js +++ b/awx/ui/client/features/output/details.component.js @@ -2,7 +2,6 @@ const templateUrl = require('~features/output/details.partial.html'); let $http; let $filter; -let $scope; let $state; let error; @@ -10,7 +9,6 @@ let parse; let prompt; let resource; let strings; -let status; let wait; let vm; @@ -137,6 +135,98 @@ function getJobTemplateDetails () { return { label, link, value, tooltip }; } +function getInventoryJobNameDetails () { + if (resource.model.get('type') !== 'inventory_update') { + return null; + } + + const jobArgs = resource.model.get('job_args'); + + if (!jobArgs) { + return null; + } + + let parsedJobArgs; + + try { + parsedJobArgs = JSON.parse(jobArgs); + } catch (e) { + return null; + } + + if (!Array.isArray(parsedJobArgs)) { + return null; + } + + const jobArgIndex = parsedJobArgs.indexOf('--inventory-id'); + const inventoryId = parsedJobArgs[jobArgIndex + 1]; + + if (jobArgIndex < 0) { + return null; + } + + if (!Number.isInteger(parseInt(inventoryId, 10))) { + return null; + } + + const name = resource.model.get('name'); + const id = resource.model.get('id'); + + const label = 'Name'; + const tooltip = strings.get('resourceTooltips.INVENTORY'); + const value = `${id} - ${$filter('sanitize')(name)}`; + const link = `/#/inventories/inventory/${inventoryId}`; + + return { label, link, tooltip, value }; +} + +function getInventorySourceDetails () { + if (!resource.model.has('summary_fields.inventory_source.source')) { + return null; + } + + const { source } = resource.model.get('summary_fields.inventory_source'); + const choices = mapChoices(resource.model.options('actions.GET.source.choices')); + + const label = 'Source'; + const value = choices[source]; + + return { label, value }; +} + +function getOverwriteDetails () { + if (!resource.model.has('overwrite')) { + return null; + } + + const label = 'Overwrite'; + const value = resource.model.get('overwrite'); + + return { label, value }; +} + +function getOverwriteVarsDetails () { + if (!resource.model.has('overwrite_vars')) { + return null; + } + + const label = 'Overwrite Vars'; + const value = resource.model.get('overwrite_vars'); + + return { label, value }; +} + +function getLicenseErrorDetails () { + if (!resource.model.has('license_error')) { + return null; + } + + const label = 'License Error'; + const value = resource.model.get('license_error'); + + return { label, value }; +} + function getLaunchedByDetails () { const createdBy = resource.model.get('summary_fields.created_by'); const jobTemplate = resource.model.get('summary_fields.job_template'); @@ -227,7 +317,7 @@ function getProjectUpdateDetails (updateId) { return null; } - const link = `/#/jobz/project/${jobId}`; + const link = `/#/jobs/project/${jobId}`; const tooltip = strings.get('resourceTooltips.PROJECT_UPDATE'); return { link, tooltip }; @@ -290,7 +380,7 @@ function getResultTracebackDetails () { } const limit = 150; - const label = 'Results Traceback'; + const label = 'Error Details'; const more = traceback; const less = $filter('limitTo')(more, limit); @@ -367,25 +457,48 @@ function getInstanceGroupDetails () { } function getJobTagDetails () { - const label = 'Job Tags'; - const value = resource.model.get('job_tags'); + const tagString = resource.model.get('job_tags'); - if (!value) { + let jobTags; + + if (tagString) { + jobTags = tagString.split(',').filter(tag => tag !== ''); + } else { + jobTags = []; + } + + if (jobTags.length < 1) { return null; } - return { label, value }; + const label = 'Job Tags'; + const more = false; + + const value = jobTags.map($filter('sanitize')); + + return { label, more, value }; } function getSkipTagDetails () { - const label = 'Skip Tags'; - const value = resource.model.get('skip_tags'); + const tagString = resource.model.get('skip_tags'); - if (!value) { + let skipTags; + + if (tagString) { + skipTags = tagString.split(',').filter(tag => tag !== ''); + } else { + skipTags = []; + } + + if (skipTags.length < 1) { return null; } - return { label, value }; + const label = 'Skip Tags'; + const more = false; + const value = skipTags.map($filter('sanitize')); + + return { label, more, value }; } function getExtraVarsDetails () { @@ -423,28 +536,51 @@ function createErrorHandler (path, action) { const hdr = strings.get('error.HEADER'); const msg = strings.get('error.CALL', { path, action, status: res.status }); - error($scope, res.data, res.status, null, { hdr, msg }); + error(null, res.data, res.status, null, { hdr, msg }); }; } const ELEMENT_LABELS = '#job-results-labels'; +const ELEMENT_JOB_TAGS = '#job-results-job-tags'; +const ELEMENT_SKIP_TAGS = '#job-results-skip-tags'; const ELEMENT_PROMPT_MODAL = '#prompt-modal'; -const LABELS_SLIDE_DISTANCE = 200; +const TAGS_SLIDE_DISTANCE = 200; function toggleLabels () { if (!this.labels.more) { - $(ELEMENT_LABELS).slideUp(LABELS_SLIDE_DISTANCE); + $(ELEMENT_LABELS).slideUp(TAGS_SLIDE_DISTANCE); this.labels.more = true; } else { - $(ELEMENT_LABELS).slideDown(LABELS_SLIDE_DISTANCE); + $(ELEMENT_LABELS).slideDown(TAGS_SLIDE_DISTANCE); this.labels.more = false; } } +function toggleJobTags () { + if (!this.jobTags.more) { + $(ELEMENT_JOB_TAGS).slideUp(TAGS_SLIDE_DISTANCE); + this.jobTags.more = true; + } else { + $(ELEMENT_JOB_TAGS).slideDown(TAGS_SLIDE_DISTANCE); + this.jobTags.more = false; + } +} + +function toggleSkipTags () { + if (!this.skipTags.more) { + $(ELEMENT_SKIP_TAGS).slideUp(TAGS_SLIDE_DISTANCE); + this.skipTags.more = true; + } else { + $(ELEMENT_SKIP_TAGS).slideDown(TAGS_SLIDE_DISTANCE); + this.skipTags.more = false; + } +} + function cancelJob () { - const actionText = strings.get('warnings.CANCEL_ACTION'); - const hdr = strings.get('warnings.CANCEL_HEADER'); - const warning = strings.get('warnings.CANCEL_BODY'); + const actionText = strings.get('cancelJob.CANCEL_JOB'); + const hdr = strings.get('cancelJob.HEADER'); + const warning = strings.get('cancelJob.SUBMIT_REQUEST'); + const cancelText = strings.get('cancelJob.RETURN'); const id = resource.model.get('id'); const name = $filter('sanitize')(resource.model.get('name')); @@ -467,13 +603,13 @@ function cancelJob () { }); }; - prompt({ hdr, resourceName, body, actionText, action }); + prompt({ hdr, resourceName, body, actionText, action, cancelText }); } function deleteJob () { const actionText = strings.get('DELETE'); - const hdr = strings.get('warnings.DELETE_HEADER'); - const warning = strings.get('warnings.DELETE_BODY'); + const hdr = strings.get('deleteResource.HEADER'); + const warning = strings.get('deleteResource.CONFIRM', 'job'); const id = resource.model.get('id'); const name = $filter('sanitize')(resource.model.get('name')); @@ -500,34 +636,33 @@ function deleteJob () { prompt({ hdr, resourceName, body, actionText, action }); } -function AtJobDetailsController ( +function JobDetailsController ( _$http_, _$filter_, _$state_, _error_, _prompt_, _strings_, - _status_, _wait_, - ParseTypeChange, - ParseVariableString, + _parse_, + { subscribe }, ) { vm = this || {}; $http = _$http_; $filter = _$filter_; $state = _$state_; - error = _error_; - parse = ParseVariableString; + + parse = _parse_; prompt = _prompt_; strings = _strings_; - status = _status_; wait = _wait_; - vm.init = _$scope_ => { - $scope = _$scope_; - resource = $scope.resource; // eslint-disable-line prefer-destructuring + let unsubscribe; + + vm.$onInit = () => { + resource = this.resource; // eslint-disable-line prefer-destructuring vm.status = getStatusDetails(); vm.started = getStartDetails(); @@ -554,56 +689,54 @@ function AtJobDetailsController ( vm.skipTags = getSkipTagDetails(); vm.extraVars = getExtraVarsDetails(); vm.labels = getLabelDetails(); + vm.inventoryJobName = getInventoryJobNameDetails(); + vm.inventorySource = getInventorySourceDetails(); + vm.overwrite = getOverwriteDetails(); + vm.overwriteVars = getOverwriteVarsDetails(); + vm.licenseError = getLicenseErrorDetails(); // Relaunch and Delete Components - vm.job = _.get(resource.model, 'model.GET', {}); + vm.job = angular.copy(_.get(resource.model, 'model.GET', {})); vm.canDelete = resource.model.get('summary_fields.user_capabilities.delete'); vm.cancelJob = cancelJob; vm.deleteJob = deleteJob; + vm.toggleJobTags = toggleJobTags; + vm.toggleSkipTags = toggleSkipTags; vm.toggleLabels = toggleLabels; - const observe = (getter, transform, key) => { - $scope.$watch(getter, value => { vm[key] = transform(value); }); - }; + unsubscribe = subscribe(({ status, started, finished, scm }) => { + vm.started = getStartDetails(started); + vm.finished = getFinishDetails(finished); + vm.projectUpdate = getProjectUpdateDetails(scm.id); + vm.projectStatus = getProjectStatusDetails(scm.status); + vm.status = getStatusDetails(status); + vm.job.status = status; + }); + }; - observe(status.getStarted, getStartDetails, 'started'); - observe(status.getJobStatus, getStatusDetails, 'status'); - observe(status.getFinished, getFinishDetails, 'finished'); - observe(status.getProjectUpdateId, getProjectUpdateDetails, 'projectUpdate'); - observe(status.getProjectStatus, getProjectStatusDetails, 'projectStatus'); + vm.$onDestroy = () => { + unsubscribe(); }; } -AtJobDetailsController.$inject = [ +JobDetailsController.$inject = [ '$http', '$filter', '$state', 'ProcessErrors', 'Prompt', 'JobStrings', - 'JobStatusService', 'Wait', - 'ParseTypeChange', 'ParseVariableString', + 'JobStatusService', ]; -function atJobDetailsLink (scope, el, attrs, controllers) { - const [atDetailsController] = controllers; - - atDetailsController.init(scope); -} - -function atJobDetails () { - return { - templateUrl, - restrict: 'E', - require: ['atJobDetails'], - controllerAs: 'vm', - link: atJobDetailsLink, - controller: AtJobDetailsController, - scope: { resource: '=', }, - }; -} - -export default atJobDetails; +export default { + templateUrl, + controller: JobDetailsController, + controllerAs: 'vm', + bindings: { + resource: '<' + }, +}; diff --git a/awx/ui/client/features/output/details.partial.html b/awx/ui/client/features/output/details.partial.html index d873c4fbb5..1bc8acc7ee 100644 --- a/awx/ui/client/features/output/details.partial.html +++ b/awx/ui/client/features/output/details.partial.html @@ -40,248 +40,328 @@
-
- -
- -
- - {{ vm.status.value }} -
+ +
+ + +
- -
- -
- {{ vm.jobExplanation.less }} - ... - - Show More - -
-
- {{ vm.jobExplanation.more }} - - Show Less - -
+ +
+ +
+ + {{ vm.status.value }}
+
- -
- -
- {{ vm.started.value }} -
+ +
+ +
+ {{ vm.jobExplanation.less }} + ... + + Show More +
- - -
- -
- {{ vm.finished.value }} -
+
+ {{ vm.jobExplanation.more }} + + Show Less +
+
- -
- -
{{ vm.moduleArgs.value }}
+ +
+ +
+ {{ vm.licenseError.value }}
+
- -
- -
- {{ vm.resultTraceback.less }} - ... - - Show More - -
-
- {{ vm.resultTraceback.more }} - - Show Less - -
+ +
+ +
+ {{ vm.started.value }}
+
- -
- - + +
+ +
+ {{ vm.finished.value }}
+
- -
- -
{{ vm.jobType.value }}
+ +
+ +
{{ vm.moduleArgs.value }}
+
+ + +
+ +
+ {{ vm.resultTraceback.less }} + ... + + Show More +
+
+ {{ vm.resultTraceback.more }} + + Show Less + +
+
- -
- - -
+ +
+ + +
+ + +
+ +
{{ vm.jobType.value }}
+
+ + +
+ + +
- - -
- - +
+ {{ vm.launchedBy.value }}
+
- -
- - + +
+ + +
- -
- - + +
+ + +
- -
- -
{{ vm.playbook.value }}
+ +
+ + +
+ + +
+ +
{{ vm.playbook.value }}
+
+ + +
+ + +
- -
- - + +
+ +
+ {{ vm.inventorySource.value }}
+
- -
- -
{{ vm.forks.value }}
+ +
+ +
+ {{ vm.overwrite.value }}
+
- -
- -
{{ vm.limit.value }}
+ +
+ +
+ {{ vm.overwriteVars.value }}
+
- -
- -
{{ vm.verbosity.value }}
+ +
+ +
{{ vm.forks.value }}
+
+ + +
+ +
{{ vm.limit.value }}
+
+ + +
+ +
{{ vm.verbosity.value }}
+
+ + +
+ +
+ {{ vm.instanceGroup.value }} + + {{ vm.instanceGroup.isolated }} +
+
- -
- -
- {{ vm.instanceGroup.value }} - - {{ vm.instanceGroup.isolated }} - -
+ + + + + +
+ - - -
- -
{{ vm.jobTags.value }}
-
- - -
- -
{{ vm.skipTags.value }}
-
- - - - - - -
- -
-
-
{{ label }}
-
+
+
+
{{ label }}
+
+
+
+ + + + + + diff --git a/awx/ui/client/features/output/engine.service.js b/awx/ui/client/features/output/engine.service.js index 1f74a90c59..2e6371520f 100644 --- a/awx/ui/client/features/output/engine.service.js +++ b/awx/ui/client/features/output/engine.service.js @@ -38,6 +38,12 @@ function JobEventEngine ($q) { }; }; + this.setMinLine = min => { + if (min > this.lines.min) { + this.lines.min = min; + } + }; + this.getBatchFactors = size => { const factors = [1]; @@ -140,10 +146,6 @@ function JobEventEngine ($q) { this.renderFrame = events => this.hooks.onEventFrame(events) .then(() => { - if (this.scroll.isLocked()) { - this.scroll.scrollToBottom(); - } - if (this.isEnding()) { const lastEvents = this.page.emptyBuffer(); diff --git a/awx/ui/client/features/output/host-event/host-event-modal.partial.html b/awx/ui/client/features/output/host-event/host-event-modal.partial.html index a79b3cde68..47676df842 100644 --- a/awx/ui/client/features/output/host-event/host-event-modal.partial.html +++ b/awx/ui/client/features/output/host-event/host-event-modal.partial.html @@ -40,19 +40,19 @@
- - - diff --git a/awx/ui/client/features/output/host-event/host-event.controller.js b/awx/ui/client/features/output/host-event/host-event.controller.js index a688e59a64..280bf51818 100644 --- a/awx/ui/client/features/output/host-event/host-event.controller.js +++ b/awx/ui/client/features/output/host-event/host-event.controller.js @@ -57,7 +57,7 @@ function HostEventsController ( $scope.stdout = event[0];// eslint-disable-line prefer-destructuring } // instantiate Codemirror - if ($state.current.name === 'jobz.host-event.json') { + if ($state.current.name === 'output.host-event.json') { try { if (_.has(hostEvent.event_data, 'res')) { initCodeMirror( @@ -73,14 +73,14 @@ function HostEventsController ( // element with id HostEvent-codemirror is not the view // controlled by this instance of HostEventController } - } else if ($state.current.name === 'jobz.host-event.stdout') { + } else if ($state.current.name === 'output.host-event.stdout') { try { resize(); } catch (err) { // element with id HostEvent-codemirror is not the view // controlled by this instance of HostEventController } - } else if ($state.current.name === 'jobz.host-event.stderr') { + } else if ($state.current.name === 'output.host-event.stderr') { try { resize(); } catch (err) { @@ -98,11 +98,11 @@ function HostEventsController ( }); function resize () { - if ($state.current.name === 'jobz.host-event.json') { + if ($state.current.name === 'output.host-event.json') { const editor = $('.CodeMirror')[0].CodeMirror; const height = $('.modal-dialog').height() - $('.HostEvent-header').height() - $('.HostEvent-details').height() - $('.HostEvent-nav').height() - $('.HostEvent-controls').height() - 120; editor.setSize('100%', height); - } else if ($state.current.name === 'jobz.host-event.stdout' || $state.current.name === 'jobz.host-event.stderr') { + } else if ($state.current.name === 'output.host-event.stdout' || $state.current.name === 'output.host-event.stderr') { const height = $('.modal-dialog').height() - $('.HostEvent-header').height() - $('.HostEvent-details').height() - $('.HostEvent-nav').height() - $('.HostEvent-controls').height() - 120; $('.HostEvent-stdout').width('100%'); $('.HostEvent-stdout').height(height); @@ -154,7 +154,7 @@ function HostEventsController ( // Unbind the listener so it doesn't fire when we close the modal via navigation $('#HostEvent').off('hidden.bs.modal'); $('#HostEvent').modal('hide'); - $state.go('jobz'); + $state.go('output'); } $scope.init = init; $scope.init(); diff --git a/awx/ui/client/features/output/host-event/host-event.route.js b/awx/ui/client/features/output/host-event/host-event.route.js index 105881c778..aba9273327 100644 --- a/awx/ui/client/features/output/host-event/host-event.route.js +++ b/awx/ui/client/features/output/host-event/host-event.route.js @@ -25,7 +25,7 @@ HostEventResolve.$inject = [ ]; const hostEventModal = { - name: 'jobz.host-event', + name: 'output.host-event', url: '/host-event/:eventId', controller: 'HostEventsController', templateUrl: HostEventModalTemplate, @@ -40,7 +40,7 @@ const hostEventModal = { }; const hostEventJson = { - name: 'jobz.host-event.json', + name: 'output.host-event.json', url: '/json', controller: 'HostEventsController', templateUrl: HostEventCodeMirrorTemplate, @@ -50,7 +50,7 @@ const hostEventJson = { }; const hostEventStdout = { - name: 'jobz.host-event.stdout', + name: 'output.host-event.stdout', url: '/stdout', controller: 'HostEventsController', templateUrl: HostEventStdoutTemplate, @@ -60,7 +60,7 @@ const hostEventStdout = { }; const hostEventStderr = { - name: 'jobz.host-event.stderr', + name: 'output.host-event.stderr', url: '/stderr', controller: 'HostEventsController', templateUrl: HostEventStderrTemplate, diff --git a/awx/ui/client/features/output/host-event/host-event.service.js b/awx/ui/client/features/output/host-event/host-event.service.js index 4454bde27b..1e0588b329 100644 --- a/awx/ui/client/features/output/host-event/host-event.service.js +++ b/awx/ui/client/features/output/host-event/host-event.service.js @@ -5,13 +5,27 @@ function HostEventService ( $rootScope ) { this.getUrl = (id, type, params) => { - let url; + const queryString = this.stringifyParams(params); + + let baseUrl; + let related; + if (type === 'playbook') { - url = `${GetBasePath('jobs')}${id}/job_events/?${this.stringifyParams(params)}`; - } else if (type === 'command') { - url = `${GetBasePath('ad_hoc_commands')}${id}/events/?${this.stringifyParams(params)}`; + baseUrl = GetBasePath('jobs'); + related = 'job_events'; } - return url; + + if (type === 'command') { + baseUrl = GetBasePath('ad_hoc_commands'); + related = 'events'; + } + + if (type === 'project') { + baseUrl = GetBasePath('project_updates'); + related = 'events'; + } + + return `${baseUrl}${id}/${related}/?${queryString}`; }; // GET events related to a job run diff --git a/awx/ui/client/features/output/index.controller.js b/awx/ui/client/features/output/index.controller.js index 488d994dc2..935d0a7c79 100644 --- a/awx/ui/client/features/output/index.controller.js +++ b/awx/ui/client/features/output/index.controller.js @@ -9,6 +9,8 @@ let engine; let status; let vm; +let streaming; +let listeners = []; function JobsIndexController ( _resource_, @@ -38,9 +40,8 @@ function JobsIndexController ( vm.clear = devClear; // Expand/collapse - // vm.toggle = toggle; - // vm.expand = expand; - vm.isExpanded = true; + vm.expanded = false; + vm.toggleExpanded = toggleExpanded; // Panel vm.resource = resource; @@ -55,10 +56,6 @@ function JobsIndexController ( up: scrollPageUp }; - vm.fullscreen = { - isFullscreen: false - }; - render.requestAnimationFrame(() => init()); } @@ -72,7 +69,6 @@ function init () { }); render.init({ - get: () => resource.model.get(`related.${resource.related}.results`), compile: html => $compile(html)($scope), isStreamActive: engine.isActive, }); @@ -91,35 +87,58 @@ function init () { return shift().then(() => append(events, true)); }, onStart () { - status.resetCounts(); status.setJobStatus('running'); }, onStop () { + stopListening(); status.updateStats(); + status.dispatch(); } }); - $scope.$on(resource.ws.events, handleJobEvent); - $scope.$on(resource.ws.status, handleStatusEvent); - - if (!status.isRunning()) { - next(); - } + streaming = false; + return next().then(() => startListening()); } -function handleStatusEvent (scope, data) { +function stopListening () { + listeners.forEach(deregister => deregister()); + listeners = []; +} + +function startListening () { + stopListening(); + listeners.push($scope.$on(resource.ws.events, (scope, data) => handleJobEvent(data))); + listeners.push($scope.$on(resource.ws.status, (scope, data) => handleStatusEvent(data))); +} + +function handleStatusEvent (data) { status.pushStatusEvent(data); } -function handleJobEvent (scope, data) { - engine.pushJobEvent(data); - - status.pushJobEvent(data); +function handleJobEvent (data) { + streaming = streaming || attachToRunningJob(); + streaming.then(() => { + engine.pushJobEvent(data); + status.pushJobEvent(data); + }); } -function devClear (pageMode) { - init(pageMode); - render.clear(); +function attachToRunningJob () { + if (!status.state.running) { + return $q.resolve(); + } + + return page.last() + .then(events => { + if (!events) { + return $q.resolve(); + } + + const minLine = 1 + Math.max(...events.map(event => event.end_line)); + + return render.clear() + .then(() => engine.setMinLine(minLine)); + }); } function next () { @@ -253,11 +272,11 @@ function scrollEnd () { } return render.clear() - .then(() => append(events)) - .then(() => { - scroll.setScrollPosition(scroll.getScrollHeight()); - scroll.resume(); - }); + .then(() => append(events)); + }) + .then(() => { + scroll.setScrollPosition(scroll.getScrollHeight()); + scroll.resume(); }); } @@ -281,9 +300,13 @@ function scrollIsAtRest (isAtRest) { vm.scroll.showBackToTop = !isAtRest; } -// function expand () { -// vm.toggle(parent, true); -// } +function toggleExpanded () { + vm.expanded = !vm.expanded; +} + +function devClear () { + render.clear().then(() => init()); +} // function showHostDetails (id) { // jobEvent.request('get', id) diff --git a/awx/ui/client/features/output/index.js b/awx/ui/client/features/output/index.js index 3aaadc2553..be7bf49b33 100644 --- a/awx/ui/client/features/output/index.js +++ b/awx/ui/client/features/output/index.js @@ -8,10 +8,13 @@ import RenderService from '~features/output/render.service'; import ScrollService from '~features/output/scroll.service'; import EngineService from '~features/output/engine.service'; import StatusService from '~features/output/status.service'; +import MessageService from '~features/output/message.service'; +import EventsApiService from '~features/output/api.events.service'; +import LegacyRedirect from '~features/output/legacy.route'; -import DetailsDirective from '~features/output/details.directive'; -import SearchDirective from '~features/output/search.directive'; -import StatsDirective from '~features/output/stats.directive'; +import DetailsComponent from '~features/output/details.component'; +import SearchComponent from '~features/output/search.component'; +import StatsComponent from '~features/output/stats.component'; import HostEvent from './host-event/index'; const Template = require('~features/output/index.view.html'); @@ -24,6 +27,7 @@ const PAGE_SIZE = 50; const WS_PREFIX = 'ws'; function resolveResource ( + $state, Job, ProjectUpdate, AdHocCommand, @@ -32,9 +36,12 @@ function resolveResource ( InventoryUpdate, $stateParams, qs, - Wait + Wait, + eventsApi, ) { - const { id, type, job_event_search } = $stateParams; // eslint-disable-line camelcase + const { id, type, handleErrors } = $stateParams; + const { job_event_search } = $stateParams; // eslint-disable-line camelcase + const { name, key } = getWebSocketResource(type); let Resource; @@ -65,33 +72,45 @@ function resolveResource ( return null; } - const params = { page_size: PAGE_SIZE, order_by: 'start_line' }; - const config = { pageCache: PAGE_CACHE, pageLimit: PAGE_LIMIT, params }; + const params = { + page_size: PAGE_SIZE, + order_by: 'start_line', + }; + + const config = { + params, + pageCache: PAGE_CACHE, + pageLimit: PAGE_LIMIT, + }; if (job_event_search) { // eslint-disable-line camelcase - const queryParams = qs.encodeQuerysetObject(qs.decodeArr(job_event_search)); - - Object.assign(config.params, queryParams); + const query = qs.encodeQuerysetObject(qs.decodeArr(job_event_search)); + Object.assign(config.params, query); } - Wait('start'); - return new Resource(['get', 'options'], [id, id]) - .then(model => { - const promises = [model.getStats()]; + let model; - if (model.has('related.labels')) { - promises.push(model.extend('get', 'labels')); + Wait('start'); + const resourcePromise = new Resource(['get', 'options'], [id, id]) + .then(job => { + const endpoint = `${job.get('url')}${related}/`; + eventsApi.init(endpoint, config.params); + + const promises = [job.getStats(), eventsApi.fetch()]; + + if (job.has('related.labels')) { + promises.push(job.extend('get', 'labels')); } - promises.push(model.extend('get', related, config)); - + model = job; return Promise.all(promises); }) - .then(([stats, model]) => ({ + .then(([stats, events]) => ({ id, type, stats, model, + events, related, ws: { events: `${WS_PREFIX}-${key}-${id}`, @@ -102,8 +121,18 @@ function resolveResource ( size: PAGE_SIZE, pageLimit: PAGE_LIMIT } - })) - .catch(({ data, status }) => qs.error(data, status)) + })); + + if (!handleErrors) { + return resourcePromise + .finally(() => Wait('stop')); + } + + return resourcePromise + .catch(({ data, status }) => { + qs.error(data, status); + return $state.go($state.current, $state.params, { reload: true }); + }) .finally(() => Wait('stop')); } @@ -125,12 +154,6 @@ function resolveWebSocketConnection ($stateParams, SocketService) { return SocketService.addStateResolve(state, id); } -function resolveBreadcrumb (strings) { - return { - label: strings.get('state.TITLE') - }; -} - function getWebSocketResource (type) { let name; let key; @@ -163,24 +186,36 @@ function getWebSocketResource (type) { return { name, key }; } -function JobsRun ($stateRegistry) { +function JobsRun ($stateRegistry, strings) { + const parent = 'jobs'; + const ncyBreadcrumb = { parent, label: strings.get('state.BREADCRUMB_DEFAULT') }; + const state = { - name: 'jobz', - url: '/jobz/:type/:id?job_event_search', - route: '/jobz/:type/:id?job_event_search', + url: '/:type/:id?job_event_search', + name: 'output', + parent, + ncyBreadcrumb, + params: { + handleErrors: true, + }, data: { - activityStream: true, - activityStreamTarget: 'jobs' + activityStream: false, }, views: { '@': { templateUrl: Template, controller: Controller, controllerAs: 'vm' - } + }, }, resolve: { + webSocketConnection: [ + '$stateParams', + 'SocketService', + resolveWebSocketConnection + ], resource: [ + '$state', 'JobModel', 'ProjectUpdateModel', 'AdHocCommandModel', @@ -190,16 +225,14 @@ function JobsRun ($stateRegistry) { '$stateParams', 'QuerySet', 'Wait', + 'JobEventsApiService', resolveResource ], - ncyBreadcrumb: [ - 'JobStrings', - resolveBreadcrumb - ], - webSocketConnection: [ - '$stateParams', - 'SocketService', - resolveWebSocketConnection + breadcrumbLabel: [ + 'resource', + ({ model }) => { + ncyBreadcrumb.label = `${model.get('id')} - ${model.get('name')}`; + } ], }, }; @@ -207,7 +240,7 @@ function JobsRun ($stateRegistry) { $stateRegistry.register(state); } -JobsRun.$inject = ['$stateRegistry']; +JobsRun.$inject = ['$stateRegistry', 'JobStrings']; angular .module(MODULE_NAME, [ @@ -221,9 +254,12 @@ angular .service('JobRenderService', RenderService) .service('JobEventEngine', EngineService) .service('JobStatusService', StatusService) - .directive('atJobDetails', DetailsDirective) - .directive('atJobSearch', SearchDirective) - .directive('atJobStats', StatsDirective) - .run(JobsRun); + .service('JobMessageService', MessageService) + .service('JobEventsApiService', EventsApiService) + .component('atJobSearch', SearchComponent) + .component('atJobStats', StatsComponent) + .component('atJobDetails', DetailsComponent) + .run(JobsRun) + .run(LegacyRedirect); export default MODULE_NAME; diff --git a/awx/ui/client/features/output/index.view.html b/awx/ui/client/features/output/index.view.html index b3bb6e95bc..cb932fafd3 100644 --- a/awx/ui/client/features/output/index.view.html +++ b/awx/ui/client/features/output/index.view.html @@ -1,64 +1,64 @@ -
-
-
- - - -
+
+
+ + + + -
- -
{{ vm.title }}
- - - + +
+ {{ vm.title }} +
+ + + -
-
- -
- -
- -
-
- -
-
- -
-
- -
- -
+
+
+
-
-                
-                    
-                        
-                            
-                            
-                            
-                        
-                    
-                    
-                
 
-
- -
-
-

-

Back to Top

-
- -
+
+
- -
+
+ +
+
+ +
+
+ +
+ +
+
+ +
+            
+                
+                    
+                        
+                        
+                        
+                    
+                
+                
+            
 
+
+ +
+
+

+

Back to Top

+
+ +
+
+
diff --git a/awx/ui/client/features/output/jobs.strings.js b/awx/ui/client/features/output/jobs.strings.js index f1f3ace11d..c581039172 100644 --- a/awx/ui/client/features/output/jobs.strings.js +++ b/awx/ui/client/features/output/jobs.strings.js @@ -5,15 +5,7 @@ function JobsStrings (BaseString) { const ns = this.jobs; ns.state = { - TITLE: t.s('JOBZ') - }; - - ns.warnings = { - CANCEL_ACTION: t.s('PROCEED'), - CANCEL_BODY: t.s('Are you sure you want to cancel this job?'), - CANCEL_HEADER: t.s('Cancel Job'), - DELETE_BODY: t.s('Are you sure you want to delete this job?'), - DELETE_HEADER: t.s('Delete Job'), + BREADCRUMB_DEFAULT: t.s('RESULTS'), }; ns.status = { diff --git a/awx/ui/client/features/output/legacy.route.js b/awx/ui/client/features/output/legacy.route.js new file mode 100644 index 0000000000..4abf991dbb --- /dev/null +++ b/awx/ui/client/features/output/legacy.route.js @@ -0,0 +1,55 @@ +function LegacyRedirect ($stateRegistry) { + const destination = 'output'; + const routes = [ + { + name: 'legacyJobResult', + url: '/jobs/:id?job_event_search', + redirectTo: (trans) => { + const { + id, + job_event_search // eslint-disable-line camelcase + } = trans.params(); + + return { state: destination, params: { type: 'playbook', id, job_event_search } }; + } + }, + { + name: 'legacyAdHocJobStdout', + url: '/ad_hoc_commands/:id', + redirectTo: (trans) => { + const { id } = trans.params(); + return { state: destination, params: { type: 'command', id } }; + } + }, + { + name: 'legacyInventorySyncStdout', + url: '/inventory_sync/:id', + redirectTo: (trans) => { + const { id } = trans.params(); + return { state: destination, params: { type: 'inventory', id } }; + } + }, + { + name: 'legacyManagementJobStdout', + url: '/management_jobs/:id', + redirectTo: (trans) => { + const { id } = trans.params(); + return { state: destination, params: { type: 'system', id } }; + } + }, + { + name: 'legacyScmUpdateStdout', + url: '/scm_update/:id', + redirectTo: (trans) => { + const { id } = trans.params(); + return { state: destination, params: { type: 'project', id } }; + } + }, + ]; + + routes.forEach(state => $stateRegistry.register(state)); +} + +LegacyRedirect.$inject = ['$stateRegistry']; + +export default LegacyRedirect; diff --git a/awx/ui/client/features/output/message.service.js b/awx/ui/client/features/output/message.service.js new file mode 100644 index 0000000000..7e15ff302f --- /dev/null +++ b/awx/ui/client/features/output/message.service.js @@ -0,0 +1,41 @@ +function MessageService () { + const listeners = {}; + const registry = {}; + + this.subscribe = (key, listener) => { + registry[key] = registry[key] || 0; + + listeners[key] = listeners[key] || {}; + listeners[key][registry[key]] = listener; + + const unsubscribe = this.createCallback(key, registry[key]); + + registry[key]++; + + return unsubscribe; + }; + + this.dispatch = (key, data) => { + if (!listeners[key]) { + return; + } + + const indices = Object.keys(listeners[key]); + + for (let i = 0; i < indices.length; i++) { + listeners[key][indices[i]](data); + } + }; + + this.createCallback = (key, index) => { + const callback = () => { + if (listeners[key]) { + delete listeners[key][index]; + } + }; + + return callback; + }; +} + +export default MessageService; diff --git a/awx/ui/client/features/output/page.service.js b/awx/ui/client/features/output/page.service.js index 5f19fe921a..854bda23b0 100644 --- a/awx/ui/client/features/output/page.service.js +++ b/awx/ui/client/features/output/page.service.js @@ -1,6 +1,7 @@ function JobPageService ($q) { this.init = ({ resource }) => { this.resource = resource; + this.api = this.resource.events; this.page = { limit: this.resource.page.pageLimit, @@ -125,8 +126,9 @@ function JobPageService ($q) { number = number || reference.state.current; reference.state.first = number; - reference.state.last = number; reference.state.current = number; + reference.state.last = number; + reference.cache.splice(0, reference.cache.length); }; @@ -203,9 +205,9 @@ function JobPageService ($q) { this.next = () => { const reference = this.getActiveReference(); - const config = this.buildRequestConfig(reference.state.last + 1); + const number = reference.state.last + 1; - return this.resource.model.goToPage(config) + return this.api.getPage(number) .then(data => { if (!data || !data.results) { return $q.resolve(); @@ -219,9 +221,8 @@ function JobPageService ($q) { this.previous = () => { const reference = this.getActiveReference(); - const config = this.buildRequestConfig(reference.state.first - 1); - return this.resource.model.goToPage(config) + return this.api.getPage(reference.state.first - 1) .then(data => { if (!data || !data.results) { return $q.resolve(); @@ -233,45 +234,29 @@ function JobPageService ($q) { }); }; - this.last = () => { - const config = this.buildRequestConfig('last'); + this.last = () => this.api.last() + .then(data => { + if (!data || !data.results || !data.results.length > 0) { + return $q.resolve(); + } - return this.resource.model.goToPage(config) - .then(data => { - if (!data || !data.results) { - return $q.resolve(); - } + this.emptyCache(data.page); + this.addPage(data.page, [], true); - this.emptyCache(data.page); - this.addPage(data.page, [], true); + return data.results; + }); - return data.results; - }); - }; + this.first = () => this.api.first() + .then(data => { + if (!data || !data.results) { + return $q.resolve(); + } - this.first = () => { - const config = this.buildRequestConfig('first'); + this.emptyCache(data.page); + this.addPage(data.page, [], false); - return this.resource.model.goToPage(config) - .then(data => { - if (!data || !data.results) { - return $q.resolve(); - } - - this.emptyCache(data.page); - this.addPage(data.page, [], false); - - return data.results; - }); - }; - - this.buildRequestConfig = number => ({ - page: number, - related: this.resource.related, - params: { - order_by: 'start_line' - } - }); + return data.results; + }); this.getActiveReference = () => (this.isBookmarkSet() ? this.getReference(true) : this.getReference()); diff --git a/awx/ui/client/features/output/render.service.js b/awx/ui/client/features/output/render.service.js index aa86913133..08a3498fd2 100644 --- a/awx/ui/client/features/output/render.service.js +++ b/awx/ui/client/features/output/render.service.js @@ -30,11 +30,11 @@ const re = new RegExp(pattern); const hasAnsi = input => re.test(input); function JobRenderService ($q, $sce, $window) { - this.init = ({ compile, apply, isStreamActive }) => { + this.init = ({ compile, isStreamActive }) => { this.parent = null; this.record = {}; this.el = $(ELEMENT_TBODY); - this.hooks = { isStreamActive, compile, apply }; + this.hooks = { isStreamActive, compile }; }; this.sortByLineNumber = (a, b) => { @@ -96,6 +96,20 @@ function JobRenderService ($q, $sce, $window) { return { html, count }; }; + this.isHostEvent = (event) => { + if (typeof event.host === 'number') { + return true; + } + + if (event.type === 'project_update_event' && + event.event !== 'runner_on_skipped' && + event.event_data.host) { + return true; + } + + return false; + }; + this.createRecord = (ln, lines, event) => { if (!event.uuid) { return null; @@ -109,7 +123,7 @@ function JobRenderService ($q, $sce, $window) { start: event.start_line, end: event.end_line, isTruncated: (event.end_line - event.start_line) > lines.length, - isHost: typeof event.host === 'number' + isHost: this.isHostEvent(event), }; if (event.parent_uuid) { @@ -169,7 +183,7 @@ function JobRenderService ($q, $sce, $window) { } if (current.isHost) { - tdEvent = `${content}`; + tdEvent = `${content}`; } if (current.time && current.line === ln) { @@ -225,8 +239,6 @@ function JobRenderService ($q, $sce, $window) { return list; }; - this.getEvents = () => this.hooks.get(); - this.insert = (events, insert) => { const result = this.transformEventGroup(events); const html = this.trustHtml(result.html); diff --git a/awx/ui/client/features/output/search.component.js b/awx/ui/client/features/output/search.component.js new file mode 100644 index 0000000000..b1f40efd3e --- /dev/null +++ b/awx/ui/client/features/output/search.component.js @@ -0,0 +1,124 @@ +const templateUrl = require('~features/output/search.partial.html'); + +const searchReloadOptions = { inherit: false, location: 'replace' }; +const searchKeyExamples = ['id:>1', 'task:set', 'created:>=2000-01-01']; +const searchKeyFields = ['changed', 'failed', 'host_name', 'stdout', 'task', 'role', 'playbook', 'play']; + +const PLACEHOLDER_RUNNING = 'CANNOT SEARCH RUNNING JOB'; +const PLACEHOLDER_DEFAULT = 'SEARCH'; +const REJECT_DEFAULT = 'Failed to update search results.'; +const REJECT_INVALID = 'Invalid search filter provided.'; + +let $state; +let qs; + +let vm; + +function toggleSearchKey () { + vm.key = !vm.key; +} + +function getCurrentQueryset () { + const { job_event_search } = $state.params; // eslint-disable-line camelcase + + return qs.decodeArr(job_event_search); +} + +function getSearchTags (queryset) { + return qs.createSearchTagsFromQueryset(queryset) + .filter(tag => !tag.startsWith('event')) + .filter(tag => !tag.startsWith('-event')) + .filter(tag => !tag.startsWith('page_size')) + .filter(tag => !tag.startsWith('order_by')); +} + +function reloadQueryset (queryset, rejection = REJECT_DEFAULT) { + const params = angular.copy($state.params); + const currentTags = vm.tags; + + params.handleErrors = false; + params.job_event_search = qs.encodeArr(queryset); + + vm.disabled = true; + vm.message = ''; + vm.tags = getSearchTags(queryset); + + return $state.transitionTo($state.current, params, searchReloadOptions) + .catch(() => { + vm.tags = currentTags; + vm.message = rejection; + vm.rejected = true; + vm.disabled = false; + }); +} + +function removeSearchTag (index) { + const searchTerm = vm.tags[index]; + + const currentQueryset = getCurrentQueryset(); + const modifiedQueryset = qs.removeTermsFromQueryset(currentQueryset, searchTerm); + + reloadQueryset(modifiedQueryset); +} + +function submitSearch () { + const currentQueryset = getCurrentQueryset(); + + const searchInputQueryset = qs.getSearchInputQueryset(vm.value); + const modifiedQueryset = qs.mergeQueryset(currentQueryset, searchInputQueryset); + + reloadQueryset(modifiedQueryset, REJECT_INVALID); +} + +function clearSearch () { + reloadQueryset(); +} + +function JobSearchController (_$state_, _qs_, { subscribe }) { + $state = _$state_; + qs = _qs_; + + vm = this || {}; + + vm.examples = searchKeyExamples; + vm.fields = searchKeyFields; + vm.relatedFields = []; + vm.placeholder = PLACEHOLDER_DEFAULT; + + vm.clearSearch = clearSearch; + vm.toggleSearchKey = toggleSearchKey; + vm.removeSearchTag = removeSearchTag; + vm.submitSearch = submitSearch; + + let unsubscribe; + + vm.$onInit = () => { + vm.value = ''; + vm.message = ''; + vm.key = false; + vm.rejected = false; + vm.disabled = true; + vm.tags = getSearchTags(getCurrentQueryset()); + + unsubscribe = subscribe(({ running }) => { + vm.disabled = running; + vm.placeholder = running ? PLACEHOLDER_RUNNING : PLACEHOLDER_DEFAULT; + }); + }; + + vm.$onDestroy = () => { + unsubscribe(); + }; +} + +JobSearchController.$inject = [ + '$state', + 'QuerySet', + 'JobStatusService', +]; + +export default { + templateUrl, + controller: JobSearchController, + controllerAs: 'vm', +}; diff --git a/awx/ui/client/features/output/search.directive.js b/awx/ui/client/features/output/search.directive.js deleted file mode 100644 index 0a688f92bb..0000000000 --- a/awx/ui/client/features/output/search.directive.js +++ /dev/null @@ -1,129 +0,0 @@ -const templateUrl = require('~features/output/search.partial.html'); - -const searchReloadOptions = { reload: true, inherit: false }; -const searchKeyExamples = ['id:>1', 'task:set', 'created:>=2000-01-01']; -const searchKeyFields = ['changed', 'failed', 'host_name', 'stdout', 'task', 'role', 'playbook', 'play']; - -const PLACEHOLDER_RUNNING = 'CANNOT SEARCH RUNNING JOB'; -const PLACEHOLDER_DEFAULT = 'SEARCH'; - -let $state; -let status; -let qs; - -let vm; - -function toggleSearchKey () { - vm.key = !vm.key; -} - -function getCurrentQueryset () { - const { job_event_search } = $state.params; // eslint-disable-line camelcase - - return qs.decodeArr(job_event_search); -} - -function getSearchTags (queryset) { - return qs.createSearchTagsFromQueryset(queryset) - .filter(tag => !tag.startsWith('event')) - .filter(tag => !tag.startsWith('-event')) - .filter(tag => !tag.startsWith('page_size')) - .filter(tag => !tag.startsWith('order_by')); -} - -function removeSearchTag (index) { - const searchTerm = vm.tags[index]; - - const currentQueryset = getCurrentQueryset(); - const modifiedQueryset = qs.removeTermsFromQueryset(currentQueryset, searchTerm); - - vm.tags = getSearchTags(modifiedQueryset); - vm.disabled = true; - - $state.params.job_event_search = qs.encodeArr(modifiedQueryset); - $state.transitionTo($state.current, $state.params, searchReloadOptions); -} - -function submitSearch () { - const searchInputQueryset = qs.getSearchInputQueryset(vm.value); - - const currentQueryset = getCurrentQueryset(); - const modifiedQueryset = qs.mergeQueryset(currentQueryset, searchInputQueryset); - - vm.tags = getSearchTags(modifiedQueryset); - vm.disabled = true; - - $state.params.job_event_search = qs.encodeArr(modifiedQueryset); - $state.transitionTo($state.current, $state.params, searchReloadOptions); -} - -function clearSearch () { - vm.tags = []; - vm.disabled = true; - - $state.params.job_event_search = ''; - $state.transitionTo($state.current, $state.params, searchReloadOptions); -} - -function atJobSearchLink (scope, el, attrs, controllers) { - const [atJobSearchController] = controllers; - - atJobSearchController.init(scope); -} - -function AtJobSearchController (_$state_, _status_, _qs_) { - $state = _$state_; - status = _status_; - qs = _qs_; - - vm = this || {}; - - vm.value = ''; - vm.key = false; - vm.rejected = false; - vm.disabled = true; - vm.tags = getSearchTags(getCurrentQueryset()); - - vm.clearSearch = clearSearch; - vm.searchKeyExamples = searchKeyExamples; - vm.searchKeyFields = searchKeyFields; - vm.toggleSearchKey = toggleSearchKey; - vm.removeSearchTag = removeSearchTag; - vm.submitSearch = submitSearch; - - vm.init = scope => { - vm.examples = scope.examples || searchKeyExamples; - vm.fields = scope.fields || searchKeyFields; - vm.placeholder = PLACEHOLDER_DEFAULT; - vm.relatedFields = scope.relatedFields || []; - - scope.$watch(status.isRunning, value => { - vm.disabled = value; - vm.placeholder = value ? PLACEHOLDER_RUNNING : PLACEHOLDER_DEFAULT; - }); - }; -} - -AtJobSearchController.$inject = [ - '$state', - 'JobStatusService', - 'QuerySet', -]; - -function atJobSearch () { - return { - templateUrl, - restrict: 'E', - require: ['atJobSearch'], - controllerAs: 'vm', - link: atJobSearchLink, - controller: AtJobSearchController, - scope: { - examples: '=', - fields: '=', - relatedFields: '=', - }, - }; -} - -export default atJobSearch; diff --git a/awx/ui/client/features/output/search.partial.html b/awx/ui/client/features/output/search.partial.html index d7acedc3d4..c209394815 100644 --- a/awx/ui/client/features/output/search.partial.html +++ b/awx/ui/client/features/output/search.partial.html @@ -28,6 +28,9 @@
+

+ {{ vm.message }} +

diff --git a/awx/ui/client/features/output/stats.component.js b/awx/ui/client/features/output/stats.component.js new file mode 100644 index 0000000000..a7a0ec61f3 --- /dev/null +++ b/awx/ui/client/features/output/stats.component.js @@ -0,0 +1,74 @@ +const templateUrl = require('~features/output/stats.partial.html'); + +let vm; + +function createStatsBarTooltip (key, count) { + const label = `${key}`; + const badge = `${count}`; + + return `${label}${badge}`; +} + +function JobStatsController (strings, { subscribe }) { + vm = this || {}; + + let unsubscribe; + + vm.tooltips = { + running: strings.get('status.RUNNING'), + unavailable: strings.get('status.UNAVAILABLE'), + }; + + vm.$onInit = () => { + vm.download = vm.resource.model.get('related.stdout'); + vm.toggleStdoutFullscreenTooltip = strings.get('expandCollapse.EXPAND'); + + unsubscribe = subscribe(({ running, elapsed, counts, stats, hosts }) => { + vm.plays = counts.plays; + vm.tasks = counts.tasks; + vm.hosts = counts.hosts; + vm.elapsed = elapsed; + vm.running = running; + vm.setHostStatusCounts(stats, hosts); + }); + }; + + vm.$onDestroy = () => { + unsubscribe(); + }; + + vm.setHostStatusCounts = (stats, counts) => { + Object.keys(counts).forEach(key => { + const count = counts[key]; + const statusBarElement = $(`.HostStatusBar-${key}`); + + statusBarElement.css('flex', `${count} 0 auto`); + + vm.tooltips[key] = createStatsBarTooltip(key, count); + }); + + vm.statsAreAvailable = stats; + }; + + vm.toggleExpanded = () => { + vm.expanded = !vm.expanded; + vm.toggleStdoutFullscreenTooltip = vm.expanded ? + strings.get('expandCollapse.COLLAPSE') : + strings.get('expandCollapse.EXPAND'); + }; +} + +JobStatsController.$inject = [ + 'JobStrings', + 'JobStatusService', +]; + +export default { + templateUrl, + controller: JobStatsController, + controllerAs: 'vm', + bindings: { + resource: '<', + expanded: '=', + }, +}; diff --git a/awx/ui/client/features/output/stats.directive.js b/awx/ui/client/features/output/stats.directive.js deleted file mode 100644 index 789fc29de6..0000000000 --- a/awx/ui/client/features/output/stats.directive.js +++ /dev/null @@ -1,91 +0,0 @@ -const templateUrl = require('~features/output/stats.partial.html'); - -let status; -let strings; - -function createStatsBarTooltip (key, count) { - const label = `${key}`; - const badge = `${count}`; - - return `${label}${badge}`; -} - -function atJobStatsLink (scope, el, attrs, controllers) { - const [atJobStatsController] = controllers; - - atJobStatsController.init(scope); -} - -function AtJobStatsController (_strings_, _status_) { - status = _status_; - strings = _strings_; - - const vm = this || {}; - - vm.tooltips = { - running: strings.get('status.RUNNING'), - unavailable: strings.get('status.UNAVAILABLE'), - }; - - vm.init = scope => { - const { resource } = scope; - - vm.fullscreen = scope.fullscreen; - - vm.download = resource.model.get('related.stdout'); - - vm.toggleStdoutFullscreenTooltip = strings.get('expandCollapse.EXPAND'); - - vm.setHostStatusCounts(status.getHostStatusCounts()); - - scope.$watch(status.getPlayCount, value => { vm.plays = value; }); - scope.$watch(status.getTaskCount, value => { vm.tasks = value; }); - scope.$watch(status.getElapsed, value => { vm.elapsed = value; }); - scope.$watch(status.getHostCount, value => { vm.hosts = value; }); - scope.$watch(status.isRunning, value => { vm.running = value; }); - - scope.$watchCollection(status.getHostStatusCounts, vm.setHostStatusCounts); - }; - - vm.setHostStatusCounts = counts => { - Object.keys(counts).forEach(key => { - const count = counts[key]; - const statusBarElement = $(`.HostStatusBar-${key}`); - - statusBarElement.css('flex', `${count} 0 auto`); - - vm.tooltips[key] = createStatsBarTooltip(key, count); - }); - - vm.statsAreAvailable = Boolean(status.getStatsEvent()); - }; - - vm.toggleFullscreen = () => { - vm.fullscreen.isFullscreen = !vm.fullscreen.isFullscreen; - vm.toggleStdoutFullscreenTooltip = vm.fullscreen.isFullscreen ? - strings.get('expandCollapse.COLLAPSE') : - strings.get('expandCollapse.EXPAND'); - }; -} - -function atJobStats () { - return { - templateUrl, - restrict: 'E', - require: ['atJobStats'], - controllerAs: 'vm', - link: atJobStatsLink, - controller: [ - 'JobStrings', - 'JobStatusService', - '$scope', - AtJobStatsController - ], - scope: { - resource: '=', - fullscreen: '=' - } - }; -} - -export default atJobStats; diff --git a/awx/ui/client/features/output/stats.partial.html b/awx/ui/client/features/output/stats.partial.html index 70d980ed33..c55ba4bc8d 100644 --- a/awx/ui/client/features/output/stats.partial.html +++ b/awx/ui/client/features/output/stats.partial.html @@ -31,8 +31,8 @@ aw-tool-tip="{{ vm.toggleStdoutFullscreenTooltip }}" data-tip-watch="vm.toggleStdoutFullscreenTooltip" data-placement="top" - ng-class="{'at-Input-button--active': vm.fullscreen.isFullscreen}" - ng-click="vm.toggleFullscreen()"> + ng-class="{'at-Input-button--active': vm.expanded}" + ng-click="vm.toggleExpanded()">
@@ -42,40 +42,47 @@ ng-show="!vm.running" data-placement="top" aw-tool-tip="{{ vm.tooltips.ok }}" - data-tip-watch="vm.tooltips.ok"> + data-tip-watch="vm.tooltips.ok" + tooltip-outer-class="HostStatusBar-tooltip">
+ data-tip-watch="vm.tooltips.skipped" + tooltip-outer-class="HostStatusBar-tooltip">
+ data-tip-watch="vm.tooltips.changed" + tooltip-outer-class="HostStatusBar-tooltip">
+ data-tip-watch="vm.tooltips.failures" + tooltip-outer-class="HostStatusBar-tooltip">
+ data-tip-watch="vm.tooltips.dark" + tooltip-outer-class="HostStatusBar-tooltip">
+ aw-tool-tip="{{:: vm.tooltips.running }}" + tooltip-outer-class="HostStatusBar-tooltip">
+ aw-tool-tip="{{:: vm.tooltips.unavailable }}" + tooltip-outer-class="HostStatusBar-tooltip">
diff --git a/awx/ui/client/features/output/status.service.js b/awx/ui/client/features/output/status.service.js index 638d2ff399..29558c70df 100644 --- a/awx/ui/client/features/output/status.service.js +++ b/awx/ui/client/features/output/status.service.js @@ -6,77 +6,123 @@ const TASK_START = 'playbook_on_task_start'; const HOST_STATUS_KEYS = ['dark', 'failures', 'changed', 'ok', 'skipped']; const FINISHED = ['successful', 'failed', 'error']; -let moment; - -function JobStatusService (_moment_) { - moment = _moment_; +function JobStatusService (moment, message) { + this.dispatch = () => message.dispatch('status', this.state); + this.subscribe = listener => message.subscribe('status', listener); this.init = ({ resource }) => { + const { model } = resource; + + this.created = model.get('created'); + this.job = model.get('id'); + this.jobType = model.get('type'); + this.project = model.get('project'); + + this.active = false; + this.latestTime = null; this.counter = -1; - this.created = resource.model.get('created'); - this.job = resource.model.get('id'); - this.jobType = resource.model.get('type'); - this.project = resource.model.get('project'); - this.elapsed = resource.model.get('elapsed'); - this.started = resource.model.get('started'); - this.finished = resource.model.get('finished'); - this.jobStatus = resource.model.get('status'); - this.projectStatus = resource.model.get('summary_fields.project_update.status'); - this.projectUpdateId = resource.model.get('summary_fields.project_update.id'); + this.state = { + running: false, + stats: false, + counts: { + plays: 0, + tasks: 0, + hosts: 0, + }, + hosts: {}, + status: model.get('status'), + elapsed: model.get('elapsed'), + started: model.get('started'), + finished: model.get('finished'), + scm: { + id: model.get('summary_fields.project_update.id'), + status: model.get('summary_fields.project_update.status') + }, + }; - this.latestTime = null; - this.playCount = null; - this.taskCount = null; - this.hostCount = null; - this.active = false; - this.hostStatusCounts = {}; - - this.statsEvent = resource.stats; + this.setStatsEvent(resource.stats); this.updateStats(); + this.updateRunningState(); + + this.dispatch(); }; this.pushStatusEvent = data => { - const isJobEvent = (this.job === data.unified_job_id); - const isProjectEvent = (this.project && (this.project === data.project_id)); + const isJobStatusEvent = (this.job === data.unified_job_id); + const isProjectStatusEvent = (this.project && (this.project === data.project_id)); - if (isJobEvent) { + if (isJobStatusEvent) { this.setJobStatus(data.status); - } else if (isProjectEvent) { + this.dispatch(); + } else if (isProjectStatusEvent) { this.setProjectStatus(data.status); this.setProjectUpdateId(data.unified_job_id); + this.dispatch(); } }; this.pushJobEvent = data => { const isLatest = ((!this.counter) || (data.counter > this.counter)); + let changed = false; + if (!this.active && !(data.event === JOB_END)) { this.active = true; this.setJobStatus('running'); + changed = true; } if (isLatest) { this.counter = data.counter; this.latestTime = data.created; - this.elapsed = moment(data.created).diff(this.created, 'seconds'); + this.setElapsed(moment(data.created).diff(this.created, 'seconds')); + changed = true; } if (data.event === JOB_START) { - this.started = this.started || data.created; + this.setStarted(this.state.started || data.created); + changed = true; } if (data.event === PLAY_START) { - this.playCount++; + this.state.counts.plays++; + changed = true; } if (data.event === TASK_START) { - this.taskCount++; + this.state.counts.tasks++; + changed = true; } if (data.event === JOB_END) { - this.statsEvent = data; + this.setStatsEvent(data); + changed = true; } + + if (changed) { + this.dispatch(); + } + }; + + this.isExpectingStatsEvent = () => (this.jobType === 'job') || + (this.jobType === 'project_update'); + + this.updateStats = () => { + this.updateHostCounts(); + + if (this.statsEvent) { + this.state.stats = true; + this.setFinished(this.statsEvent.created); + this.setJobStatus(this.statsEvent.failed ? 'failed' : 'successful'); + } + }; + + this.updateRunningState = () => { + this.state.running = (Boolean(this.state.started) && !this.state.finished) || + (this.state.status === 'running') || + (this.state.status === 'pending') || + (this.state.status === 'waiting'); }; this.updateHostCounts = () => { @@ -98,74 +144,66 @@ function JobStatusService (_moment_) { }); }); - this.hostCount = countedHostNames.length; - this.hostStatusCounts = counts; + this.state.counts.hosts = countedHostNames.length; + this.setHostStatusCounts(counts); }; - this.updateStats = () => { - this.updateHostCounts(); - - if (this.statsEvent) { - this.setFinished(this.statsEvent.created); - this.setJobStatus(this.statsEvent.failed ? 'failed' : 'successful'); - } - }; - - this.isRunning = () => (Boolean(this.started) && !this.finished) || - (this.jobStatus === 'running') || - (this.jobStatus === 'pending') || - (this.jobStatus === 'waiting'); - - this.isExpectingStatsEvent = () => (this.jobType === 'job') || - (this.jobType === 'project_update'); - - this.getPlayCount = () => this.playCount; - this.getTaskCount = () => this.taskCount; - this.getHostCount = () => this.hostCount; - this.getHostStatusCounts = () => this.hostStatusCounts || {}; - this.getJobStatus = () => this.jobStatus; - this.getProjectStatus = () => this.projectStatus; - this.getProjectUpdateId = () => this.projectUpdateId; - this.getElapsed = () => this.elapsed; - this.getStatsEvent = () => this.statsEvent; - this.getStarted = () => this.started; - this.getFinished = () => this.finished; - this.setJobStatus = status => { - this.jobStatus = status; + this.state.status = status; if (!this.isExpectingStatsEvent() && _.includes(FINISHED, status)) { if (this.latestTime) { this.setFinished(this.latestTime); - - if (!this.started && this.elapsed) { - this.started = moment(this.latestTime).subtract(this.elapsed, 'seconds'); + if (!this.state.started && this.state.elapsed) { + this.setStarted(moment(this.latestTime) + .subtract(this.state.elapsed, 'seconds')); } } } + + this.updateRunningState(); + }; + + this.setElapsed = elapsed => { + this.state.elapsed = elapsed; + }; + + this.setStarted = started => { + this.state.started = started; + this.updateRunningState(); }; this.setProjectStatus = status => { - this.projectStatus = status; + this.state.scm.status = status; }; this.setProjectUpdateId = id => { - this.projectUpdateId = id; + this.state.scm.id = id; }; this.setFinished = time => { - this.finished = time; + this.state.finished = time; + this.updateRunningState(); + }; + + this.setStatsEvent = data => { + this.statsEvent = data; + }; + + this.setHostStatusCounts = counts => { + this.state.hosts = counts; }; this.resetCounts = () => { - this.playCount = 0; - this.taskCount = 0; - this.hostCount = 0; + this.state.counts.plays = 0; + this.state.counts.tasks = 0; + this.state.counts.hosts = 0; }; } JobStatusService.$inject = [ 'moment', + 'JobMessageService', ]; export default JobStatusService; diff --git a/awx/ui/client/features/templates/index.controller.js b/awx/ui/client/features/templates/index.controller.js index 5f305345a9..c859255070 100644 --- a/awx/ui/client/features/templates/index.controller.js +++ b/awx/ui/client/features/templates/index.controller.js @@ -11,9 +11,9 @@ function IndexTemplatesController ($scope, strings, dataset) { } IndexTemplatesController.$inject = [ - '$scope', + '$scope', 'TemplatesStrings', - 'Dataset', + 'Dataset' ]; export default IndexTemplatesController; diff --git a/awx/ui/client/features/templates/index.view.html b/awx/ui/client/features/templates/index.view.html index 6323fd8129..346ab0c0f1 100644 --- a/awx/ui/client/features/templates/index.view.html +++ b/awx/ui/client/features/templates/index.view.html @@ -1,10 +1,21 @@ +
- - - {{:: vm.strings.get('list.PANEL_TITLE') }} -
- {{ vm.count }} -
-
+ +
+ + {{:: vm.strings.get('list.PANEL_TITLE') }} +
+ {{ vm.count }} +
+
+
+
+ + {{:: vm.strings.get('list.PANEL_TITLE') }} +
+ {{ vm.count }} +
+
+
diff --git a/awx/ui/client/features/templates/routes/portalModeTemplatesList.route.js b/awx/ui/client/features/templates/routes/portalModeTemplatesList.route.js new file mode 100644 index 0000000000..55187b5a98 --- /dev/null +++ b/awx/ui/client/features/templates/routes/portalModeTemplatesList.route.js @@ -0,0 +1,79 @@ +import { templateUrl } from '../../../src/shared/template-url/template-url.factory'; +import { N_ } from '../../../src/i18n'; +import templatesListController from '../templatesList.controller'; + +const templatesListTemplate = require('~features/templates/templatesList.view.html'); + +export default { + name: 'portalMode', + url: '/portal', + reloadOnSearch: true, + ncyBreadcrumb: { + label: N_('MY VIEW') + }, + data: { + socket: { + "groups": { + "jobs": ["status_changed"] + } + } + }, + params: { + template_search: { + dynamic: true, + value: { + type: 'workflow_job_template,job_template', + }, + } + }, + searchPrefix: 'template', + views: { + '@': { + templateUrl: templateUrl('portal-mode/portal-mode-layout'), + controller: ['$scope', '$state', + function($scope, $state) { + + $scope.filterUser = function() { + $state.go('portalMode.myJobs'); + }; + + $scope.filterAll = function() { + $state.go('portalMode.allJobs'); + }; + } + ] + }, + 'templates@portalMode': { + templateUrl: templatesListTemplate, + controller: templatesListController, + controllerAs: 'vm' + } + }, + resolve: { + resolvedModels: [ + 'JobTemplateModel', + 'WorkflowJobTemplateModel', + (JobTemplate, WorkflowJobTemplate) => { + const models = [ + new JobTemplate(['options']), + new WorkflowJobTemplate(['options']), + ]; + return Promise.all(models); + }, + ], + Dataset: [ + '$stateParams', + 'Wait', + 'GetBasePath', + 'QuerySet', + ($stateParams, Wait, GetBasePath, qs) => { + const searchParam = $stateParams.template_search; + const searchPath = GetBasePath('unified_job_templates'); + + Wait('start'); + return qs.search(searchPath, searchParam) + .finally(() => Wait('stop')); + } + ], + } +}; diff --git a/awx/ui/client/features/templates/templates.strings.js b/awx/ui/client/features/templates/templates.strings.js index 896cfd87bd..3bb2d38b66 100644 --- a/awx/ui/client/features/templates/templates.strings.js +++ b/awx/ui/client/features/templates/templates.strings.js @@ -10,7 +10,6 @@ function TemplatesStrings (BaseString) { ns.list = { PANEL_TITLE: t.s('TEMPLATES'), - ADD_BUTTON_LABEL: t.s('ADD'), ADD_DD_JT_LABEL: t.s('Job Template'), ADD_DD_WF_LABEL: t.s('Workflow Template'), ROW_ITEM_LABEL_ACTIVITY: t.s('Activity'), diff --git a/awx/ui/client/features/templates/templatesList.controller.js b/awx/ui/client/features/templates/templatesList.controller.js index 6ab8712e40..5ad7388fd4 100644 --- a/awx/ui/client/features/templates/templatesList.controller.js +++ b/awx/ui/client/features/templates/templatesList.controller.js @@ -98,9 +98,9 @@ function ListTemplatesController( } if (isJobTemplate(template)) { - $state.go('jobTemplateSchedules', { id: template.id }); + $state.go('templates.editJobTemplate.schedules', { job_template_id: template.id }); } else if (isWorkflowTemplate(template)) { - $state.go('workflowJobTemplateSchedules', { id: template.id }); + $state.go('templates.editWorkflowJobTemplate.schedules', { workflow_job_template_id: template.id }); } else { Alert(strings.get('error.UNKNOWN'), strings.get('alert.UNKNOWN_SCHEDULE')); } @@ -211,6 +211,7 @@ function ListTemplatesController( .then(model => model.extend('get', 'copy')) .then(model => { const action = () => { + $('#prompt-modal').modal('hide'); Wait('start'); model.copy() .then(({ id }) => { diff --git a/awx/ui/client/features/templates/templatesList.view.html b/awx/ui/client/features/templates/templatesList.view.html index e68ddef8fb..5a00912663 100644 --- a/awx/ui/client/features/templates/templatesList.view.html +++ b/awx/ui/client/features/templates/templatesList.view.html @@ -14,13 +14,12 @@