diff --git a/awx/__init__.py b/awx/__init__.py index fa3e164092..eae7df87bd 100644 --- a/awx/__init__.py +++ b/awx/__init__.py @@ -36,7 +36,6 @@ else: from django.db.backends.utils import names_digest from django.db import connection - if HAS_DJANGO is True: # See upgrade blocker note in requirements/README.md diff --git a/awx/api/authentication.py b/awx/api/authentication.py index 52b3462005..48fc00db44 100644 --- a/awx/api/authentication.py +++ b/awx/api/authentication.py @@ -6,7 +6,7 @@ import logging # Django from django.conf import settings -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str # Django REST Framework from rest_framework import authentication @@ -24,7 +24,7 @@ class LoggedBasicAuthentication(authentication.BasicAuthentication): ret = super(LoggedBasicAuthentication, self).authenticate(request) if ret: username = ret[0].username if ret[0] else '' - logger.info(smart_text(u"User {} performed a {} to {} through the API".format(username, request.method, request.path))) + logger.info(smart_str(u"User {} performed a {} to {} through the API".format(username, request.method, request.path))) return ret def authenticate_header(self, request): @@ -45,7 +45,7 @@ class LoggedOAuth2Authentication(OAuth2Authentication): user, token = ret username = user.username if user else '' logger.info( - smart_text(u"User {} performed a {} to {} through the API using OAuth 2 token {}.".format(username, request.method, request.path, token.pk)) + smart_str(u"User {} performed a {} to {} through the API using OAuth 2 token {}.".format(username, request.method, request.path, token.pk)) ) setattr(user, 'oauth_scopes', [x for x in token.scope.split() if x]) return ret diff --git a/awx/api/conf.py b/awx/api/conf.py index 00c712a064..fd1467cdde 100644 --- a/awx/api/conf.py +++ b/awx/api/conf.py @@ -1,6 +1,6 @@ # Django from django.conf import settings -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework import serializers diff --git a/awx/api/exceptions.py b/awx/api/exceptions.py index 8f2c079583..406bd5e85f 100644 --- a/awx/api/exceptions.py +++ b/awx/api/exceptions.py @@ -2,7 +2,7 @@ # All Rights Reserved. # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework.exceptions import ValidationError @@ -13,7 +13,7 @@ class ActiveJobConflict(ValidationError): def __init__(self, active_jobs): # During APIException.__init__(), Django Rest Framework - # turn everything in self.detail into string by using force_text. + # turn everything in self.detail into string by using force_str. # Declare detail afterwards circumvent this behavior. super(ActiveJobConflict, self).__init__() self.detail = {"error": _("Resource is being used by running jobs."), "active_jobs": active_jobs} diff --git a/awx/api/fields.py b/awx/api/fields.py index 6f288f2bce..c84b6327f9 100644 --- a/awx/api/fields.py +++ b/awx/api/fields.py @@ -2,7 +2,7 @@ # All Rights Reserved. # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.core.exceptions import ObjectDoesNotExist # Django REST Framework @@ -28,13 +28,17 @@ class NullFieldMixin(object): return (is_empty_value, data) -class BooleanNullField(NullFieldMixin, serializers.NullBooleanField): +class BooleanNullField(NullFieldMixin, serializers.BooleanField): """ Custom boolean field that allows null and empty string as False values. """ + def __init__(self, **kwargs): + kwargs['allow_null'] = True + super().__init__(**kwargs) + def to_internal_value(self, data): - return bool(super(BooleanNullField, self).to_internal_value(data)) + return bool(super().to_internal_value(data)) class CharNullField(NullFieldMixin, serializers.CharField): @@ -47,7 +51,7 @@ class CharNullField(NullFieldMixin, serializers.CharField): super(CharNullField, self).__init__(**kwargs) def to_internal_value(self, data): - return super(CharNullField, self).to_internal_value(data or u'') + return super(CharNullField, self).to_internal_value(data or '') class ChoiceNullField(NullFieldMixin, serializers.ChoiceField): @@ -60,7 +64,7 @@ class ChoiceNullField(NullFieldMixin, serializers.ChoiceField): super(ChoiceNullField, self).__init__(**kwargs) def to_internal_value(self, data): - return super(ChoiceNullField, self).to_internal_value(data or u'') + return super(ChoiceNullField, self).to_internal_value(data or '') class VerbatimField(serializers.Field): diff --git a/awx/api/filters.py b/awx/api/filters.py index 138478135b..10fc488006 100644 --- a/awx/api/filters.py +++ b/awx/api/filters.py @@ -7,15 +7,15 @@ import json from functools import reduce # Django -from django.core.exceptions import FieldError, ValidationError +from django.core.exceptions import FieldError, ValidationError, FieldDoesNotExist from django.db import models -from django.db.models import Q, CharField, IntegerField, BooleanField -from django.db.models.fields import FieldDoesNotExist +from django.db.models import Q, CharField, IntegerField, BooleanField, TextField, JSONField from django.db.models.fields.related import ForeignObjectRel, ManyToManyField, ForeignKey +from django.db.models.functions import Cast from django.contrib.contenttypes.models import ContentType from django.contrib.contenttypes.fields import GenericForeignKey -from django.utils.encoding import force_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import force_str +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework.exceptions import ParseError, PermissionDenied @@ -185,16 +185,14 @@ class FieldLookupBackend(BaseFilterBackend): return (field_list[-1], new_lookup) def to_python_related(self, value): - value = force_text(value) + value = force_str(value) if value.lower() in ('none', 'null'): return None else: return int(value) def value_to_python_for_field(self, field, value): - if isinstance(field, models.NullBooleanField): - return to_python_boolean(value, allow_none=True) - elif isinstance(field, models.BooleanField): + if isinstance(field, models.BooleanField): return to_python_boolean(value) elif isinstance(field, (ForeignObjectRel, ManyToManyField, GenericForeignKey, ForeignKey)): try: @@ -244,6 +242,8 @@ class FieldLookupBackend(BaseFilterBackend): new_lookups.append('{}__{}__icontains'.format(new_lookup[:-8], rm_field.name)) return value, new_lookups, needs_distinct else: + if isinstance(field, JSONField): + new_lookup = new_lookup.replace(field.name, f'{field.name}_as_txt') value = self.value_to_python_for_field(field, value) return value, new_lookup, needs_distinct @@ -293,7 +293,7 @@ class FieldLookupBackend(BaseFilterBackend): search_filter_relation = 'AND' values = reduce(lambda list1, list2: list1 + list2, [i.split(',') for i in values]) for value in values: - search_value, new_keys, _ = self.value_to_python(queryset.model, key, force_text(value)) + search_value, new_keys, _ = self.value_to_python(queryset.model, key, force_str(value)) assert isinstance(new_keys, list) search_filters[search_value] = new_keys # by definition, search *only* joins across relations, @@ -325,6 +325,9 @@ class FieldLookupBackend(BaseFilterBackend): value, new_key, distinct = self.value_to_python(queryset.model, key, value) if distinct: needs_distinct = True + if '_as_txt' in new_key: + fname = next(item for item in new_key.split('__') if item.endswith('_as_txt')) + queryset = queryset.annotate(**{fname: Cast(fname[:-7], output_field=TextField())}) if q_chain: chain_filters.append((q_not, new_key, value)) elif q_or: diff --git a/awx/api/generics.py b/awx/api/generics.py index 58ed5a9801..f2faec5c47 100644 --- a/awx/api/generics.py +++ b/awx/api/generics.py @@ -10,18 +10,18 @@ import urllib.parse # Django from django.conf import settings +from django.contrib.auth import views as auth_views +from django.contrib.contenttypes.models import ContentType from django.core.cache import cache +from django.core.exceptions import FieldDoesNotExist from django.db import connection -from django.db.models.fields import FieldDoesNotExist from django.db.models.fields.related import OneToOneRel from django.http import QueryDict from django.shortcuts import get_object_or_404 from django.template.loader import render_to_string -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str from django.utils.safestring import mark_safe -from django.contrib.contenttypes.models import ContentType -from django.utils.translation import ugettext_lazy as _ -from django.contrib.auth import views as auth_views +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework.exceptions import PermissionDenied, AuthenticationFailed, ParseError, NotAcceptable, UnsupportedMediaType @@ -93,10 +93,10 @@ class LoggedLoginView(auth_views.LoginView): ret = super(LoggedLoginView, self).post(request, *args, **kwargs) current_user = getattr(request, 'user', None) if request.user.is_authenticated: - logger.info(smart_text(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None)))) + logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None)))) ret.set_cookie('userLoggedIn', 'true') current_user = UserSerializer(self.request.user) - current_user = smart_text(JSONRenderer().render(current_user.data)) + current_user = smart_str(JSONRenderer().render(current_user.data)) current_user = urllib.parse.quote('%s' % current_user, '') ret.set_cookie('current_user', current_user, secure=settings.SESSION_COOKIE_SECURE or None) ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid')) @@ -104,7 +104,7 @@ class LoggedLoginView(auth_views.LoginView): return ret else: if 'username' in self.request.POST: - logger.warn(smart_text(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None)))) + logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None)))) ret.status_code = 401 return ret @@ -392,8 +392,8 @@ class GenericAPIView(generics.GenericAPIView, APIView): if hasattr(self.model._meta, "verbose_name"): d.update( { - 'model_verbose_name': smart_text(self.model._meta.verbose_name), - 'model_verbose_name_plural': smart_text(self.model._meta.verbose_name_plural), + 'model_verbose_name': smart_str(self.model._meta.verbose_name), + 'model_verbose_name_plural': smart_str(self.model._meta.verbose_name_plural), } ) serializer = self.get_serializer() @@ -524,8 +524,8 @@ class SubListAPIView(ParentMixin, ListAPIView): d = super(SubListAPIView, self).get_description_context() d.update( { - 'parent_model_verbose_name': smart_text(self.parent_model._meta.verbose_name), - 'parent_model_verbose_name_plural': smart_text(self.parent_model._meta.verbose_name_plural), + 'parent_model_verbose_name': smart_str(self.parent_model._meta.verbose_name), + 'parent_model_verbose_name_plural': smart_str(self.parent_model._meta.verbose_name_plural), } ) return d diff --git a/awx/api/metadata.py b/awx/api/metadata.py index 5b8cf2ccb3..b4c75d09cb 100644 --- a/awx/api/metadata.py +++ b/awx/api/metadata.py @@ -6,11 +6,12 @@ from uuid import UUID # Django from django.core.exceptions import PermissionDenied +from django.db.models import JSONField from django.db.models.fields import PositiveIntegerField, BooleanField from django.db.models.fields.related import ForeignKey from django.http import Http404 -from django.utils.encoding import force_text, smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import force_str, smart_str +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework import exceptions @@ -22,7 +23,7 @@ from rest_framework.request import clone_request # AWX from awx.api.fields import ChoiceNullField -from awx.main.fields import JSONField, ImplicitRoleField +from awx.main.fields import ImplicitRoleField from awx.main.models import NotificationTemplate from awx.main.utils.execution_environments import get_default_pod_spec @@ -53,7 +54,7 @@ class Metadata(metadata.SimpleMetadata): for attr in text_attrs: value = getattr(field, attr, None) if value is not None and value != '': - field_info[attr] = force_text(value, strings_only=True) + field_info[attr] = force_str(value, strings_only=True) placeholder = getattr(field, 'placeholder', serializers.empty) if placeholder is not serializers.empty: @@ -77,7 +78,7 @@ class Metadata(metadata.SimpleMetadata): } if field.field_name in field_help_text: opts = serializer.Meta.model._meta.concrete_model._meta - verbose_name = smart_text(opts.verbose_name) + verbose_name = smart_str(opts.verbose_name) field_info['help_text'] = field_help_text[field.field_name].format(verbose_name) if field.field_name == 'type': diff --git a/awx/api/metrics.py b/awx/api/metrics.py index 5ed3dcabef..de66c2de33 100644 --- a/awx/api/metrics.py +++ b/awx/api/metrics.py @@ -1,11 +1,11 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import MetricsView -urls = [url(r'^$', MetricsView.as_view(), name='metrics_view')] +urls = [re_path(r'^$', MetricsView.as_view(), name='metrics_view')] __all__ = ['urls'] diff --git a/awx/api/parsers.py b/awx/api/parsers.py index ce18bce0af..ac06a35b81 100644 --- a/awx/api/parsers.py +++ b/awx/api/parsers.py @@ -5,7 +5,7 @@ import json # Django from django.conf import settings from django.utils.encoding import smart_str -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework import parsers diff --git a/awx/api/serializers.py b/awx/api/serializers.py index a4ee2ef7f0..c7f7b8a242 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -25,8 +25,8 @@ from django.contrib.auth.password_validation import validate_password as django_ from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ObjectDoesNotExist, ValidationError as DjangoValidationError from django.db import models -from django.utils.translation import ugettext_lazy as _ -from django.utils.encoding import force_text +from django.utils.translation import gettext_lazy as _ +from django.utils.encoding import force_str from django.utils.text import capfirst from django.utils.timezone import now from django.utils.functional import cached_property @@ -97,7 +97,7 @@ from awx.main.models import ( ) from awx.main.models.base import VERBOSITY_CHOICES, NEW_JOB_TYPE_CHOICES from awx.main.models.rbac import get_roles_on_resource, role_summary_fields_generator -from awx.main.fields import ImplicitRoleField, JSONBField +from awx.main.fields import ImplicitRoleField from awx.main.utils import ( get_type_for_model, get_model_for_type, @@ -357,7 +357,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl } choices = [] for t in self.get_types(): - name = _(type_name_map.get(t, force_text(get_model_for_type(t)._meta.verbose_name).title())) + name = _(type_name_map.get(t, force_str(get_model_for_type(t)._meta.verbose_name).title())) choices.append((t, name)) return choices @@ -645,7 +645,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl v2.extend(e) else: v2.append(e) - d[k] = list(map(force_text, v2)) + d[k] = list(map(force_str, v2)) raise ValidationError(d) return attrs @@ -1724,7 +1724,7 @@ class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables): def validate_host_filter(self, host_filter): if host_filter: try: - for match in JSONBField.get_lookups().keys(): + for match in models.JSONField.get_lookups().keys(): if match == 'exact': # __exact is allowed continue @@ -1853,11 +1853,11 @@ class HostSerializer(BaseSerializerWithVariables): if port < 1 or port > 65535: raise ValueError except ValueError: - raise serializers.ValidationError(_(u'Invalid port specification: %s') % force_text(port)) + raise serializers.ValidationError(_(u'Invalid port specification: %s') % force_str(port)) return name, port def validate_name(self, value): - name = force_text(value or '') + name = force_str(value or '') # Validate here only, update in main validate method. host, port = self._get_host_port_from_name(name) return value @@ -1871,13 +1871,13 @@ class HostSerializer(BaseSerializerWithVariables): return vars_validate_or_raise(value) def validate(self, attrs): - name = force_text(attrs.get('name', self.instance and self.instance.name or '')) + name = force_str(attrs.get('name', self.instance and self.instance.name or '')) inventory = attrs.get('inventory', self.instance and self.instance.inventory or '') host, port = self._get_host_port_from_name(name) if port: attrs['name'] = host - variables = force_text(attrs.get('variables', self.instance and self.instance.variables or '')) + variables = force_str(attrs.get('variables', self.instance and self.instance.variables or '')) vars_dict = parse_yaml_or_json(variables) vars_dict['ansible_ssh_port'] = port attrs['variables'] = json.dumps(vars_dict) @@ -1950,7 +1950,7 @@ class GroupSerializer(BaseSerializerWithVariables): return res def validate(self, attrs): - name = force_text(attrs.get('name', self.instance and self.instance.name or '')) + name = force_str(attrs.get('name', self.instance and self.instance.name or '')) inventory = attrs.get('inventory', self.instance and self.instance.inventory or '') if Host.objects.filter(name=name, inventory=inventory).exists(): raise serializers.ValidationError(_('A Host with that name already exists.')) @@ -2844,8 +2844,8 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer): if not project: raise serializers.ValidationError({'project': _('This field is required.')}) playbook_not_found = bool( - (project and project.scm_type and (not project.allow_override) and playbook and force_text(playbook) not in project.playbook_files) - or (project and not project.scm_type and playbook and force_text(playbook) not in project.playbooks) # manual + (project and project.scm_type and (not project.allow_override) and playbook and force_str(playbook) not in project.playbook_files) + or (project and not project.scm_type and playbook and force_str(playbook) not in project.playbooks) # manual ) if playbook_not_found: raise serializers.ValidationError({'playbook': _('Playbook not found for project.')}) @@ -3634,7 +3634,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer): job_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None) limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None) skip_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None) - diff_mode = serializers.NullBooleanField(required=False, default=None) + diff_mode = serializers.BooleanField(required=False, allow_null=True, default=None) verbosity = serializers.ChoiceField(allow_null=True, required=False, default=None, choices=VERBOSITY_CHOICES) exclude_errors = () @@ -5104,7 +5104,7 @@ class ActivityStreamSerializer(BaseSerializer): try: return json.loads(obj.changes) except Exception: - logger.warn("Error deserializing activity stream json changes") + logger.warning("Error deserializing activity stream json changes") return {} def get_object_association(self, obj): diff --git a/awx/api/urls/activity_stream.py b/awx/api/urls/activity_stream.py index 907f856aa8..36b64a2de5 100644 --- a/awx/api/urls/activity_stream.py +++ b/awx/api/urls/activity_stream.py @@ -1,14 +1,14 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ActivityStreamList, ActivityStreamDetail urls = [ - url(r'^$', ActivityStreamList.as_view(), name='activity_stream_list'), - url(r'^(?P[0-9]+)/$', ActivityStreamDetail.as_view(), name='activity_stream_detail'), + re_path(r'^$', ActivityStreamList.as_view(), name='activity_stream_list'), + re_path(r'^(?P[0-9]+)/$', ActivityStreamDetail.as_view(), name='activity_stream_detail'), ] __all__ = ['urls'] diff --git a/awx/api/urls/ad_hoc_command.py b/awx/api/urls/ad_hoc_command.py index cc1277adcf..9c723e762b 100644 --- a/awx/api/urls/ad_hoc_command.py +++ b/awx/api/urls/ad_hoc_command.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( AdHocCommandList, @@ -16,14 +16,14 @@ from awx.api.views import ( urls = [ - url(r'^$', AdHocCommandList.as_view(), name='ad_hoc_command_list'), - url(r'^(?P[0-9]+)/$', AdHocCommandDetail.as_view(), name='ad_hoc_command_detail'), - url(r'^(?P[0-9]+)/cancel/$', AdHocCommandCancel.as_view(), name='ad_hoc_command_cancel'), - url(r'^(?P[0-9]+)/relaunch/$', AdHocCommandRelaunch.as_view(), name='ad_hoc_command_relaunch'), - url(r'^(?P[0-9]+)/events/$', AdHocCommandAdHocCommandEventsList.as_view(), name='ad_hoc_command_ad_hoc_command_events_list'), - url(r'^(?P[0-9]+)/activity_stream/$', AdHocCommandActivityStreamList.as_view(), name='ad_hoc_command_activity_stream_list'), - url(r'^(?P[0-9]+)/notifications/$', AdHocCommandNotificationsList.as_view(), name='ad_hoc_command_notifications_list'), - url(r'^(?P[0-9]+)/stdout/$', AdHocCommandStdout.as_view(), name='ad_hoc_command_stdout'), + re_path(r'^$', AdHocCommandList.as_view(), name='ad_hoc_command_list'), + re_path(r'^(?P[0-9]+)/$', AdHocCommandDetail.as_view(), name='ad_hoc_command_detail'), + re_path(r'^(?P[0-9]+)/cancel/$', AdHocCommandCancel.as_view(), name='ad_hoc_command_cancel'), + re_path(r'^(?P[0-9]+)/relaunch/$', AdHocCommandRelaunch.as_view(), name='ad_hoc_command_relaunch'), + re_path(r'^(?P[0-9]+)/events/$', AdHocCommandAdHocCommandEventsList.as_view(), name='ad_hoc_command_ad_hoc_command_events_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', AdHocCommandActivityStreamList.as_view(), name='ad_hoc_command_activity_stream_list'), + re_path(r'^(?P[0-9]+)/notifications/$', AdHocCommandNotificationsList.as_view(), name='ad_hoc_command_notifications_list'), + re_path(r'^(?P[0-9]+)/stdout/$', AdHocCommandStdout.as_view(), name='ad_hoc_command_stdout'), ] __all__ = ['urls'] diff --git a/awx/api/urls/ad_hoc_command_event.py b/awx/api/urls/ad_hoc_command_event.py index a38d4fc165..cbfa72b8bc 100644 --- a/awx/api/urls/ad_hoc_command_event.py +++ b/awx/api/urls/ad_hoc_command_event.py @@ -1,13 +1,13 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import AdHocCommandEventDetail urls = [ - url(r'^(?P[0-9]+)/$', AdHocCommandEventDetail.as_view(), name='ad_hoc_command_event_detail'), + re_path(r'^(?P[0-9]+)/$', AdHocCommandEventDetail.as_view(), name='ad_hoc_command_event_detail'), ] __all__ = ['urls'] diff --git a/awx/api/urls/credential.py b/awx/api/urls/credential.py index e041e08477..f2ae8535b9 100644 --- a/awx/api/urls/credential.py +++ b/awx/api/urls/credential.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( CredentialList, @@ -18,16 +18,16 @@ from awx.api.views import ( urls = [ - url(r'^$', CredentialList.as_view(), name='credential_list'), - url(r'^(?P[0-9]+)/activity_stream/$', CredentialActivityStreamList.as_view(), name='credential_activity_stream_list'), - url(r'^(?P[0-9]+)/$', CredentialDetail.as_view(), name='credential_detail'), - url(r'^(?P[0-9]+)/access_list/$', CredentialAccessList.as_view(), name='credential_access_list'), - url(r'^(?P[0-9]+)/object_roles/$', CredentialObjectRolesList.as_view(), name='credential_object_roles_list'), - url(r'^(?P[0-9]+)/owner_users/$', CredentialOwnerUsersList.as_view(), name='credential_owner_users_list'), - url(r'^(?P[0-9]+)/owner_teams/$', CredentialOwnerTeamsList.as_view(), name='credential_owner_teams_list'), - url(r'^(?P[0-9]+)/copy/$', CredentialCopy.as_view(), name='credential_copy'), - url(r'^(?P[0-9]+)/input_sources/$', CredentialInputSourceSubList.as_view(), name='credential_input_source_sublist'), - url(r'^(?P[0-9]+)/test/$', CredentialExternalTest.as_view(), name='credential_external_test'), + re_path(r'^$', CredentialList.as_view(), name='credential_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', CredentialActivityStreamList.as_view(), name='credential_activity_stream_list'), + re_path(r'^(?P[0-9]+)/$', CredentialDetail.as_view(), name='credential_detail'), + re_path(r'^(?P[0-9]+)/access_list/$', CredentialAccessList.as_view(), name='credential_access_list'), + re_path(r'^(?P[0-9]+)/object_roles/$', CredentialObjectRolesList.as_view(), name='credential_object_roles_list'), + re_path(r'^(?P[0-9]+)/owner_users/$', CredentialOwnerUsersList.as_view(), name='credential_owner_users_list'), + re_path(r'^(?P[0-9]+)/owner_teams/$', CredentialOwnerTeamsList.as_view(), name='credential_owner_teams_list'), + re_path(r'^(?P[0-9]+)/copy/$', CredentialCopy.as_view(), name='credential_copy'), + re_path(r'^(?P[0-9]+)/input_sources/$', CredentialInputSourceSubList.as_view(), name='credential_input_source_sublist'), + re_path(r'^(?P[0-9]+)/test/$', CredentialExternalTest.as_view(), name='credential_external_test'), ] __all__ = ['urls'] diff --git a/awx/api/urls/credential_input_source.py b/awx/api/urls/credential_input_source.py index 6b365e5840..7ac4b3c4f4 100644 --- a/awx/api/urls/credential_input_source.py +++ b/awx/api/urls/credential_input_source.py @@ -1,14 +1,14 @@ # Copyright (c) 2019 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import CredentialInputSourceDetail, CredentialInputSourceList urls = [ - url(r'^$', CredentialInputSourceList.as_view(), name='credential_input_source_list'), - url(r'^(?P[0-9]+)/$', CredentialInputSourceDetail.as_view(), name='credential_input_source_detail'), + re_path(r'^$', CredentialInputSourceList.as_view(), name='credential_input_source_list'), + re_path(r'^(?P[0-9]+)/$', CredentialInputSourceDetail.as_view(), name='credential_input_source_detail'), ] __all__ = ['urls'] diff --git a/awx/api/urls/credential_type.py b/awx/api/urls/credential_type.py index ab799c8c54..8307126351 100644 --- a/awx/api/urls/credential_type.py +++ b/awx/api/urls/credential_type.py @@ -1,17 +1,17 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import CredentialTypeList, CredentialTypeDetail, CredentialTypeCredentialList, CredentialTypeActivityStreamList, CredentialTypeExternalTest urls = [ - url(r'^$', CredentialTypeList.as_view(), name='credential_type_list'), - url(r'^(?P[0-9]+)/$', CredentialTypeDetail.as_view(), name='credential_type_detail'), - url(r'^(?P[0-9]+)/credentials/$', CredentialTypeCredentialList.as_view(), name='credential_type_credential_list'), - url(r'^(?P[0-9]+)/activity_stream/$', CredentialTypeActivityStreamList.as_view(), name='credential_type_activity_stream_list'), - url(r'^(?P[0-9]+)/test/$', CredentialTypeExternalTest.as_view(), name='credential_type_external_test'), + re_path(r'^$', CredentialTypeList.as_view(), name='credential_type_list'), + re_path(r'^(?P[0-9]+)/$', CredentialTypeDetail.as_view(), name='credential_type_detail'), + re_path(r'^(?P[0-9]+)/credentials/$', CredentialTypeCredentialList.as_view(), name='credential_type_credential_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', CredentialTypeActivityStreamList.as_view(), name='credential_type_activity_stream_list'), + re_path(r'^(?P[0-9]+)/test/$', CredentialTypeExternalTest.as_view(), name='credential_type_external_test'), ] __all__ = ['urls'] diff --git a/awx/api/urls/execution_environments.py b/awx/api/urls/execution_environments.py index 99b9cb3ddc..2b4f325b1a 100644 --- a/awx/api/urls/execution_environments.py +++ b/awx/api/urls/execution_environments.py @@ -1,4 +1,4 @@ -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( ExecutionEnvironmentList, @@ -10,11 +10,11 @@ from awx.api.views import ( urls = [ - url(r'^$', ExecutionEnvironmentList.as_view(), name='execution_environment_list'), - url(r'^(?P[0-9]+)/$', ExecutionEnvironmentDetail.as_view(), name='execution_environment_detail'), - url(r'^(?P[0-9]+)/unified_job_templates/$', ExecutionEnvironmentJobTemplateList.as_view(), name='execution_environment_job_template_list'), - url(r'^(?P[0-9]+)/copy/$', ExecutionEnvironmentCopy.as_view(), name='execution_environment_copy'), - url(r'^(?P[0-9]+)/activity_stream/$', ExecutionEnvironmentActivityStreamList.as_view(), name='execution_environment_activity_stream_list'), + re_path(r'^$', ExecutionEnvironmentList.as_view(), name='execution_environment_list'), + re_path(r'^(?P[0-9]+)/$', ExecutionEnvironmentDetail.as_view(), name='execution_environment_detail'), + re_path(r'^(?P[0-9]+)/unified_job_templates/$', ExecutionEnvironmentJobTemplateList.as_view(), name='execution_environment_job_template_list'), + re_path(r'^(?P[0-9]+)/copy/$', ExecutionEnvironmentCopy.as_view(), name='execution_environment_copy'), + re_path(r'^(?P[0-9]+)/activity_stream/$', ExecutionEnvironmentActivityStreamList.as_view(), name='execution_environment_activity_stream_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/group.py b/awx/api/urls/group.py index 416479def6..18107cd22a 100644 --- a/awx/api/urls/group.py +++ b/awx/api/urls/group.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( GroupList, @@ -20,18 +20,18 @@ from awx.api.views import ( urls = [ - url(r'^$', GroupList.as_view(), name='group_list'), - url(r'^(?P[0-9]+)/$', GroupDetail.as_view(), name='group_detail'), - url(r'^(?P[0-9]+)/children/$', GroupChildrenList.as_view(), name='group_children_list'), - url(r'^(?P[0-9]+)/hosts/$', GroupHostsList.as_view(), name='group_hosts_list'), - url(r'^(?P[0-9]+)/all_hosts/$', GroupAllHostsList.as_view(), name='group_all_hosts_list'), - url(r'^(?P[0-9]+)/variable_data/$', GroupVariableData.as_view(), name='group_variable_data'), - url(r'^(?P[0-9]+)/job_events/$', GroupJobEventsList.as_view(), name='group_job_events_list'), - url(r'^(?P[0-9]+)/job_host_summaries/$', GroupJobHostSummariesList.as_view(), name='group_job_host_summaries_list'), - url(r'^(?P[0-9]+)/potential_children/$', GroupPotentialChildrenList.as_view(), name='group_potential_children_list'), - url(r'^(?P[0-9]+)/activity_stream/$', GroupActivityStreamList.as_view(), name='group_activity_stream_list'), - url(r'^(?P[0-9]+)/inventory_sources/$', GroupInventorySourcesList.as_view(), name='group_inventory_sources_list'), - url(r'^(?P[0-9]+)/ad_hoc_commands/$', GroupAdHocCommandsList.as_view(), name='group_ad_hoc_commands_list'), + re_path(r'^$', GroupList.as_view(), name='group_list'), + re_path(r'^(?P[0-9]+)/$', GroupDetail.as_view(), name='group_detail'), + re_path(r'^(?P[0-9]+)/children/$', GroupChildrenList.as_view(), name='group_children_list'), + re_path(r'^(?P[0-9]+)/hosts/$', GroupHostsList.as_view(), name='group_hosts_list'), + re_path(r'^(?P[0-9]+)/all_hosts/$', GroupAllHostsList.as_view(), name='group_all_hosts_list'), + re_path(r'^(?P[0-9]+)/variable_data/$', GroupVariableData.as_view(), name='group_variable_data'), + re_path(r'^(?P[0-9]+)/job_events/$', GroupJobEventsList.as_view(), name='group_job_events_list'), + re_path(r'^(?P[0-9]+)/job_host_summaries/$', GroupJobHostSummariesList.as_view(), name='group_job_host_summaries_list'), + re_path(r'^(?P[0-9]+)/potential_children/$', GroupPotentialChildrenList.as_view(), name='group_potential_children_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', GroupActivityStreamList.as_view(), name='group_activity_stream_list'), + re_path(r'^(?P[0-9]+)/inventory_sources/$', GroupInventorySourcesList.as_view(), name='group_inventory_sources_list'), + re_path(r'^(?P[0-9]+)/ad_hoc_commands/$', GroupAdHocCommandsList.as_view(), name='group_ad_hoc_commands_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/host.py b/awx/api/urls/host.py index d06608bf86..36bbb70da4 100644 --- a/awx/api/urls/host.py +++ b/awx/api/urls/host.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( HostList, @@ -20,18 +20,18 @@ from awx.api.views import ( urls = [ - url(r'^$', HostList.as_view(), name='host_list'), - url(r'^(?P[0-9]+)/$', HostDetail.as_view(), name='host_detail'), - url(r'^(?P[0-9]+)/variable_data/$', HostVariableData.as_view(), name='host_variable_data'), - url(r'^(?P[0-9]+)/groups/$', HostGroupsList.as_view(), name='host_groups_list'), - url(r'^(?P[0-9]+)/all_groups/$', HostAllGroupsList.as_view(), name='host_all_groups_list'), - url(r'^(?P[0-9]+)/job_events/', HostJobEventsList.as_view(), name='host_job_events_list'), - url(r'^(?P[0-9]+)/job_host_summaries/$', HostJobHostSummariesList.as_view(), name='host_job_host_summaries_list'), - url(r'^(?P[0-9]+)/activity_stream/$', HostActivityStreamList.as_view(), name='host_activity_stream_list'), - url(r'^(?P[0-9]+)/inventory_sources/$', HostInventorySourcesList.as_view(), name='host_inventory_sources_list'), - url(r'^(?P[0-9]+)/smart_inventories/$', HostSmartInventoriesList.as_view(), name='host_smart_inventories_list'), - url(r'^(?P[0-9]+)/ad_hoc_commands/$', HostAdHocCommandsList.as_view(), name='host_ad_hoc_commands_list'), - url(r'^(?P[0-9]+)/ad_hoc_command_events/$', HostAdHocCommandEventsList.as_view(), name='host_ad_hoc_command_events_list'), + re_path(r'^$', HostList.as_view(), name='host_list'), + re_path(r'^(?P[0-9]+)/$', HostDetail.as_view(), name='host_detail'), + re_path(r'^(?P[0-9]+)/variable_data/$', HostVariableData.as_view(), name='host_variable_data'), + re_path(r'^(?P[0-9]+)/groups/$', HostGroupsList.as_view(), name='host_groups_list'), + re_path(r'^(?P[0-9]+)/all_groups/$', HostAllGroupsList.as_view(), name='host_all_groups_list'), + re_path(r'^(?P[0-9]+)/job_events/', HostJobEventsList.as_view(), name='host_job_events_list'), + re_path(r'^(?P[0-9]+)/job_host_summaries/$', HostJobHostSummariesList.as_view(), name='host_job_host_summaries_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', HostActivityStreamList.as_view(), name='host_activity_stream_list'), + re_path(r'^(?P[0-9]+)/inventory_sources/$', HostInventorySourcesList.as_view(), name='host_inventory_sources_list'), + re_path(r'^(?P[0-9]+)/smart_inventories/$', HostSmartInventoriesList.as_view(), name='host_smart_inventories_list'), + re_path(r'^(?P[0-9]+)/ad_hoc_commands/$', HostAdHocCommandsList.as_view(), name='host_ad_hoc_commands_list'), + re_path(r'^(?P[0-9]+)/ad_hoc_command_events/$', HostAdHocCommandEventsList.as_view(), name='host_ad_hoc_command_events_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/instance.py b/awx/api/urls/instance.py index dd75db2b21..6c70e285c5 100644 --- a/awx/api/urls/instance.py +++ b/awx/api/urls/instance.py @@ -1,17 +1,17 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import InstanceList, InstanceDetail, InstanceUnifiedJobsList, InstanceInstanceGroupsList, InstanceHealthCheck urls = [ - url(r'^$', InstanceList.as_view(), name='instance_list'), - url(r'^(?P[0-9]+)/$', InstanceDetail.as_view(), name='instance_detail'), - url(r'^(?P[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'), - url(r'^(?P[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'), - url(r'^(?P[0-9]+)/health_check/$', InstanceHealthCheck.as_view(), name='instance_health_check'), + re_path(r'^$', InstanceList.as_view(), name='instance_list'), + re_path(r'^(?P[0-9]+)/$', InstanceDetail.as_view(), name='instance_detail'), + re_path(r'^(?P[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'), + re_path(r'^(?P[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'), + re_path(r'^(?P[0-9]+)/health_check/$', InstanceHealthCheck.as_view(), name='instance_health_check'), ] __all__ = ['urls'] diff --git a/awx/api/urls/instance_group.py b/awx/api/urls/instance_group.py index 3bb9eabefc..de8cf8b52a 100644 --- a/awx/api/urls/instance_group.py +++ b/awx/api/urls/instance_group.py @@ -1,16 +1,16 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import InstanceGroupList, InstanceGroupDetail, InstanceGroupUnifiedJobsList, InstanceGroupInstanceList urls = [ - url(r'^$', InstanceGroupList.as_view(), name='instance_group_list'), - url(r'^(?P[0-9]+)/$', InstanceGroupDetail.as_view(), name='instance_group_detail'), - url(r'^(?P[0-9]+)/jobs/$', InstanceGroupUnifiedJobsList.as_view(), name='instance_group_unified_jobs_list'), - url(r'^(?P[0-9]+)/instances/$', InstanceGroupInstanceList.as_view(), name='instance_group_instance_list'), + re_path(r'^$', InstanceGroupList.as_view(), name='instance_group_list'), + re_path(r'^(?P[0-9]+)/$', InstanceGroupDetail.as_view(), name='instance_group_detail'), + re_path(r'^(?P[0-9]+)/jobs/$', InstanceGroupUnifiedJobsList.as_view(), name='instance_group_unified_jobs_list'), + re_path(r'^(?P[0-9]+)/instances/$', InstanceGroupInstanceList.as_view(), name='instance_group_instance_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/inventory.py b/awx/api/urls/inventory.py index d323be9450..863591ba60 100644 --- a/awx/api/urls/inventory.py +++ b/awx/api/urls/inventory.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( InventoryList, @@ -26,24 +26,24 @@ from awx.api.views import ( urls = [ - url(r'^$', InventoryList.as_view(), name='inventory_list'), - url(r'^(?P[0-9]+)/$', InventoryDetail.as_view(), name='inventory_detail'), - url(r'^(?P[0-9]+)/hosts/$', InventoryHostsList.as_view(), name='inventory_hosts_list'), - url(r'^(?P[0-9]+)/groups/$', InventoryGroupsList.as_view(), name='inventory_groups_list'), - url(r'^(?P[0-9]+)/root_groups/$', InventoryRootGroupsList.as_view(), name='inventory_root_groups_list'), - url(r'^(?P[0-9]+)/variable_data/$', InventoryVariableData.as_view(), name='inventory_variable_data'), - url(r'^(?P[0-9]+)/script/$', InventoryScriptView.as_view(), name='inventory_script_view'), - url(r'^(?P[0-9]+)/tree/$', InventoryTreeView.as_view(), name='inventory_tree_view'), - url(r'^(?P[0-9]+)/inventory_sources/$', InventoryInventorySourcesList.as_view(), name='inventory_inventory_sources_list'), - url(r'^(?P[0-9]+)/update_inventory_sources/$', InventoryInventorySourcesUpdate.as_view(), name='inventory_inventory_sources_update'), - url(r'^(?P[0-9]+)/activity_stream/$', InventoryActivityStreamList.as_view(), name='inventory_activity_stream_list'), - url(r'^(?P[0-9]+)/job_templates/$', InventoryJobTemplateList.as_view(), name='inventory_job_template_list'), - url(r'^(?P[0-9]+)/ad_hoc_commands/$', InventoryAdHocCommandsList.as_view(), name='inventory_ad_hoc_commands_list'), - url(r'^(?P[0-9]+)/access_list/$', InventoryAccessList.as_view(), name='inventory_access_list'), - url(r'^(?P[0-9]+)/object_roles/$', InventoryObjectRolesList.as_view(), name='inventory_object_roles_list'), - url(r'^(?P[0-9]+)/instance_groups/$', InventoryInstanceGroupsList.as_view(), name='inventory_instance_groups_list'), - url(r'^(?P[0-9]+)/labels/$', InventoryLabelList.as_view(), name='inventory_label_list'), - url(r'^(?P[0-9]+)/copy/$', InventoryCopy.as_view(), name='inventory_copy'), + re_path(r'^$', InventoryList.as_view(), name='inventory_list'), + re_path(r'^(?P[0-9]+)/$', InventoryDetail.as_view(), name='inventory_detail'), + re_path(r'^(?P[0-9]+)/hosts/$', InventoryHostsList.as_view(), name='inventory_hosts_list'), + re_path(r'^(?P[0-9]+)/groups/$', InventoryGroupsList.as_view(), name='inventory_groups_list'), + re_path(r'^(?P[0-9]+)/root_groups/$', InventoryRootGroupsList.as_view(), name='inventory_root_groups_list'), + re_path(r'^(?P[0-9]+)/variable_data/$', InventoryVariableData.as_view(), name='inventory_variable_data'), + re_path(r'^(?P[0-9]+)/script/$', InventoryScriptView.as_view(), name='inventory_script_view'), + re_path(r'^(?P[0-9]+)/tree/$', InventoryTreeView.as_view(), name='inventory_tree_view'), + re_path(r'^(?P[0-9]+)/inventory_sources/$', InventoryInventorySourcesList.as_view(), name='inventory_inventory_sources_list'), + re_path(r'^(?P[0-9]+)/update_inventory_sources/$', InventoryInventorySourcesUpdate.as_view(), name='inventory_inventory_sources_update'), + re_path(r'^(?P[0-9]+)/activity_stream/$', InventoryActivityStreamList.as_view(), name='inventory_activity_stream_list'), + re_path(r'^(?P[0-9]+)/job_templates/$', InventoryJobTemplateList.as_view(), name='inventory_job_template_list'), + re_path(r'^(?P[0-9]+)/ad_hoc_commands/$', InventoryAdHocCommandsList.as_view(), name='inventory_ad_hoc_commands_list'), + re_path(r'^(?P[0-9]+)/access_list/$', InventoryAccessList.as_view(), name='inventory_access_list'), + re_path(r'^(?P[0-9]+)/object_roles/$', InventoryObjectRolesList.as_view(), name='inventory_object_roles_list'), + re_path(r'^(?P[0-9]+)/instance_groups/$', InventoryInstanceGroupsList.as_view(), name='inventory_instance_groups_list'), + re_path(r'^(?P[0-9]+)/labels/$', InventoryLabelList.as_view(), name='inventory_label_list'), + re_path(r'^(?P[0-9]+)/copy/$', InventoryCopy.as_view(), name='inventory_copy'), ] __all__ = ['urls'] diff --git a/awx/api/urls/inventory_source.py b/awx/api/urls/inventory_source.py index 02cf30c408..120f5022c6 100644 --- a/awx/api/urls/inventory_source.py +++ b/awx/api/urls/inventory_source.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( InventorySourceList, @@ -20,26 +20,26 @@ from awx.api.views import ( urls = [ - url(r'^$', InventorySourceList.as_view(), name='inventory_source_list'), - url(r'^(?P[0-9]+)/$', InventorySourceDetail.as_view(), name='inventory_source_detail'), - url(r'^(?P[0-9]+)/update/$', InventorySourceUpdateView.as_view(), name='inventory_source_update_view'), - url(r'^(?P[0-9]+)/inventory_updates/$', InventorySourceUpdatesList.as_view(), name='inventory_source_updates_list'), - url(r'^(?P[0-9]+)/activity_stream/$', InventorySourceActivityStreamList.as_view(), name='inventory_source_activity_stream_list'), - url(r'^(?P[0-9]+)/schedules/$', InventorySourceSchedulesList.as_view(), name='inventory_source_schedules_list'), - url(r'^(?P[0-9]+)/credentials/$', InventorySourceCredentialsList.as_view(), name='inventory_source_credentials_list'), - url(r'^(?P[0-9]+)/groups/$', InventorySourceGroupsList.as_view(), name='inventory_source_groups_list'), - url(r'^(?P[0-9]+)/hosts/$', InventorySourceHostsList.as_view(), name='inventory_source_hosts_list'), - url( + re_path(r'^$', InventorySourceList.as_view(), name='inventory_source_list'), + re_path(r'^(?P[0-9]+)/$', InventorySourceDetail.as_view(), name='inventory_source_detail'), + re_path(r'^(?P[0-9]+)/update/$', InventorySourceUpdateView.as_view(), name='inventory_source_update_view'), + re_path(r'^(?P[0-9]+)/inventory_updates/$', InventorySourceUpdatesList.as_view(), name='inventory_source_updates_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', InventorySourceActivityStreamList.as_view(), name='inventory_source_activity_stream_list'), + re_path(r'^(?P[0-9]+)/schedules/$', InventorySourceSchedulesList.as_view(), name='inventory_source_schedules_list'), + re_path(r'^(?P[0-9]+)/credentials/$', InventorySourceCredentialsList.as_view(), name='inventory_source_credentials_list'), + re_path(r'^(?P[0-9]+)/groups/$', InventorySourceGroupsList.as_view(), name='inventory_source_groups_list'), + re_path(r'^(?P[0-9]+)/hosts/$', InventorySourceHostsList.as_view(), name='inventory_source_hosts_list'), + re_path( r'^(?P[0-9]+)/notification_templates_started/$', InventorySourceNotificationTemplatesStartedList.as_view(), name='inventory_source_notification_templates_started_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_error/$', InventorySourceNotificationTemplatesErrorList.as_view(), name='inventory_source_notification_templates_error_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_success/$', InventorySourceNotificationTemplatesSuccessList.as_view(), name='inventory_source_notification_templates_success_list', diff --git a/awx/api/urls/inventory_update.py b/awx/api/urls/inventory_update.py index 0279f8c915..6855561a72 100644 --- a/awx/api/urls/inventory_update.py +++ b/awx/api/urls/inventory_update.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( InventoryUpdateList, @@ -15,13 +15,13 @@ from awx.api.views import ( urls = [ - url(r'^$', InventoryUpdateList.as_view(), name='inventory_update_list'), - url(r'^(?P[0-9]+)/$', InventoryUpdateDetail.as_view(), name='inventory_update_detail'), - url(r'^(?P[0-9]+)/cancel/$', InventoryUpdateCancel.as_view(), name='inventory_update_cancel'), - url(r'^(?P[0-9]+)/stdout/$', InventoryUpdateStdout.as_view(), name='inventory_update_stdout'), - url(r'^(?P[0-9]+)/notifications/$', InventoryUpdateNotificationsList.as_view(), name='inventory_update_notifications_list'), - url(r'^(?P[0-9]+)/credentials/$', InventoryUpdateCredentialsList.as_view(), name='inventory_update_credentials_list'), - url(r'^(?P[0-9]+)/events/$', InventoryUpdateEventsList.as_view(), name='inventory_update_events_list'), + re_path(r'^$', InventoryUpdateList.as_view(), name='inventory_update_list'), + re_path(r'^(?P[0-9]+)/$', InventoryUpdateDetail.as_view(), name='inventory_update_detail'), + re_path(r'^(?P[0-9]+)/cancel/$', InventoryUpdateCancel.as_view(), name='inventory_update_cancel'), + re_path(r'^(?P[0-9]+)/stdout/$', InventoryUpdateStdout.as_view(), name='inventory_update_stdout'), + re_path(r'^(?P[0-9]+)/notifications/$', InventoryUpdateNotificationsList.as_view(), name='inventory_update_notifications_list'), + re_path(r'^(?P[0-9]+)/credentials/$', InventoryUpdateCredentialsList.as_view(), name='inventory_update_credentials_list'), + re_path(r'^(?P[0-9]+)/events/$', InventoryUpdateEventsList.as_view(), name='inventory_update_events_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/job.py b/awx/api/urls/job.py index de45cba9aa..bea61a48a0 100644 --- a/awx/api/urls/job.py +++ b/awx/api/urls/job.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( JobList, @@ -20,18 +20,18 @@ from awx.api.views import ( urls = [ - url(r'^$', JobList.as_view(), name='job_list'), - url(r'^(?P[0-9]+)/$', JobDetail.as_view(), name='job_detail'), - url(r'^(?P[0-9]+)/cancel/$', JobCancel.as_view(), name='job_cancel'), - url(r'^(?P[0-9]+)/relaunch/$', JobRelaunch.as_view(), name='job_relaunch'), - url(r'^(?P[0-9]+)/create_schedule/$', JobCreateSchedule.as_view(), name='job_create_schedule'), - url(r'^(?P[0-9]+)/job_host_summaries/$', JobJobHostSummariesList.as_view(), name='job_job_host_summaries_list'), - url(r'^(?P[0-9]+)/job_events/$', JobJobEventsList.as_view(), name='job_job_events_list'), - url(r'^(?P[0-9]+)/activity_stream/$', JobActivityStreamList.as_view(), name='job_activity_stream_list'), - url(r'^(?P[0-9]+)/stdout/$', JobStdout.as_view(), name='job_stdout'), - url(r'^(?P[0-9]+)/notifications/$', JobNotificationsList.as_view(), name='job_notifications_list'), - url(r'^(?P[0-9]+)/labels/$', JobLabelList.as_view(), name='job_label_list'), - url(r'^(?P[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail'), + re_path(r'^$', JobList.as_view(), name='job_list'), + re_path(r'^(?P[0-9]+)/$', JobDetail.as_view(), name='job_detail'), + re_path(r'^(?P[0-9]+)/cancel/$', JobCancel.as_view(), name='job_cancel'), + re_path(r'^(?P[0-9]+)/relaunch/$', JobRelaunch.as_view(), name='job_relaunch'), + re_path(r'^(?P[0-9]+)/create_schedule/$', JobCreateSchedule.as_view(), name='job_create_schedule'), + re_path(r'^(?P[0-9]+)/job_host_summaries/$', JobJobHostSummariesList.as_view(), name='job_job_host_summaries_list'), + re_path(r'^(?P[0-9]+)/job_events/$', JobJobEventsList.as_view(), name='job_job_events_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', JobActivityStreamList.as_view(), name='job_activity_stream_list'), + re_path(r'^(?P[0-9]+)/stdout/$', JobStdout.as_view(), name='job_stdout'), + re_path(r'^(?P[0-9]+)/notifications/$', JobNotificationsList.as_view(), name='job_notifications_list'), + re_path(r'^(?P[0-9]+)/labels/$', JobLabelList.as_view(), name='job_label_list'), + re_path(r'^(?P[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail'), ] __all__ = ['urls'] diff --git a/awx/api/urls/job_event.py b/awx/api/urls/job_event.py index 94f3b33929..1a5c681124 100644 --- a/awx/api/urls/job_event.py +++ b/awx/api/urls/job_event.py @@ -1,13 +1,13 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import JobEventDetail, JobEventChildrenList urls = [ - url(r'^(?P[0-9]+)/$', JobEventDetail.as_view(), name='job_event_detail'), - url(r'^(?P[0-9]+)/children/$', JobEventChildrenList.as_view(), name='job_event_children_list'), + re_path(r'^(?P[0-9]+)/$', JobEventDetail.as_view(), name='job_event_detail'), + re_path(r'^(?P[0-9]+)/children/$', JobEventChildrenList.as_view(), name='job_event_children_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/job_host_summary.py b/awx/api/urls/job_host_summary.py index 8252a982d0..c736c59a17 100644 --- a/awx/api/urls/job_host_summary.py +++ b/awx/api/urls/job_host_summary.py @@ -1,11 +1,11 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import JobHostSummaryDetail -urls = [url(r'^(?P[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail')] +urls = [re_path(r'^(?P[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail')] __all__ = ['urls'] diff --git a/awx/api/urls/job_template.py b/awx/api/urls/job_template.py index 967f821fd1..b73be7e913 100644 --- a/awx/api/urls/job_template.py +++ b/awx/api/urls/job_template.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import include, url +from django.urls import include, re_path from awx.api.views import ( JobTemplateList, @@ -25,36 +25,36 @@ from awx.api.views import ( urls = [ - url(r'^$', JobTemplateList.as_view(), name='job_template_list'), - url(r'^(?P[0-9]+)/$', JobTemplateDetail.as_view(), name='job_template_detail'), - url(r'^(?P[0-9]+)/launch/$', JobTemplateLaunch.as_view(), name='job_template_launch'), - url(r'^(?P[0-9]+)/jobs/$', JobTemplateJobsList.as_view(), name='job_template_jobs_list'), - url(r'^(?P[0-9]+)/slice_workflow_jobs/$', JobTemplateSliceWorkflowJobsList.as_view(), name='job_template_slice_workflow_jobs_list'), - url(r'^(?P[0-9]+)/callback/$', JobTemplateCallback.as_view(), name='job_template_callback'), - url(r'^(?P[0-9]+)/schedules/$', JobTemplateSchedulesList.as_view(), name='job_template_schedules_list'), - url(r'^(?P[0-9]+)/survey_spec/$', JobTemplateSurveySpec.as_view(), name='job_template_survey_spec'), - url(r'^(?P[0-9]+)/activity_stream/$', JobTemplateActivityStreamList.as_view(), name='job_template_activity_stream_list'), - url( + re_path(r'^$', JobTemplateList.as_view(), name='job_template_list'), + re_path(r'^(?P[0-9]+)/$', JobTemplateDetail.as_view(), name='job_template_detail'), + re_path(r'^(?P[0-9]+)/launch/$', JobTemplateLaunch.as_view(), name='job_template_launch'), + re_path(r'^(?P[0-9]+)/jobs/$', JobTemplateJobsList.as_view(), name='job_template_jobs_list'), + re_path(r'^(?P[0-9]+)/slice_workflow_jobs/$', JobTemplateSliceWorkflowJobsList.as_view(), name='job_template_slice_workflow_jobs_list'), + re_path(r'^(?P[0-9]+)/callback/$', JobTemplateCallback.as_view(), name='job_template_callback'), + re_path(r'^(?P[0-9]+)/schedules/$', JobTemplateSchedulesList.as_view(), name='job_template_schedules_list'), + re_path(r'^(?P[0-9]+)/survey_spec/$', JobTemplateSurveySpec.as_view(), name='job_template_survey_spec'), + re_path(r'^(?P[0-9]+)/activity_stream/$', JobTemplateActivityStreamList.as_view(), name='job_template_activity_stream_list'), + re_path( r'^(?P[0-9]+)/notification_templates_started/$', JobTemplateNotificationTemplatesStartedList.as_view(), name='job_template_notification_templates_started_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_error/$', JobTemplateNotificationTemplatesErrorList.as_view(), name='job_template_notification_templates_error_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_success/$', JobTemplateNotificationTemplatesSuccessList.as_view(), name='job_template_notification_templates_success_list', ), - url(r'^(?P[0-9]+)/instance_groups/$', JobTemplateInstanceGroupsList.as_view(), name='job_template_instance_groups_list'), - url(r'^(?P[0-9]+)/access_list/$', JobTemplateAccessList.as_view(), name='job_template_access_list'), - url(r'^(?P[0-9]+)/object_roles/$', JobTemplateObjectRolesList.as_view(), name='job_template_object_roles_list'), - url(r'^(?P[0-9]+)/labels/$', JobTemplateLabelList.as_view(), name='job_template_label_list'), - url(r'^(?P[0-9]+)/copy/$', JobTemplateCopy.as_view(), name='job_template_copy'), - url(r'^(?P[0-9]+)/', include('awx.api.urls.webhooks'), {'model_kwarg': 'job_templates'}), + re_path(r'^(?P[0-9]+)/instance_groups/$', JobTemplateInstanceGroupsList.as_view(), name='job_template_instance_groups_list'), + re_path(r'^(?P[0-9]+)/access_list/$', JobTemplateAccessList.as_view(), name='job_template_access_list'), + re_path(r'^(?P[0-9]+)/object_roles/$', JobTemplateObjectRolesList.as_view(), name='job_template_object_roles_list'), + re_path(r'^(?P[0-9]+)/labels/$', JobTemplateLabelList.as_view(), name='job_template_label_list'), + re_path(r'^(?P[0-9]+)/copy/$', JobTemplateCopy.as_view(), name='job_template_copy'), + re_path(r'^(?P[0-9]+)/', include('awx.api.urls.webhooks'), {'model_kwarg': 'job_templates'}), ] __all__ = ['urls'] diff --git a/awx/api/urls/label.py b/awx/api/urls/label.py index 368fca7aa8..5fc0a4f629 100644 --- a/awx/api/urls/label.py +++ b/awx/api/urls/label.py @@ -1,11 +1,11 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import LabelList, LabelDetail -urls = [url(r'^$', LabelList.as_view(), name='label_list'), url(r'^(?P[0-9]+)/$', LabelDetail.as_view(), name='label_detail')] +urls = [re_path(r'^$', LabelList.as_view(), name='label_list'), re_path(r'^(?P[0-9]+)/$', LabelDetail.as_view(), name='label_detail')] __all__ = ['urls'] diff --git a/awx/api/urls/notification.py b/awx/api/urls/notification.py index 960318ee15..2ed27e7c3d 100644 --- a/awx/api/urls/notification.py +++ b/awx/api/urls/notification.py @@ -1,11 +1,14 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import NotificationList, NotificationDetail -urls = [url(r'^$', NotificationList.as_view(), name='notification_list'), url(r'^(?P[0-9]+)/$', NotificationDetail.as_view(), name='notification_detail')] +urls = [ + re_path(r'^$', NotificationList.as_view(), name='notification_list'), + re_path(r'^(?P[0-9]+)/$', NotificationDetail.as_view(), name='notification_detail'), +] __all__ = ['urls'] diff --git a/awx/api/urls/notification_template.py b/awx/api/urls/notification_template.py index 8473878922..764c571868 100644 --- a/awx/api/urls/notification_template.py +++ b/awx/api/urls/notification_template.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( NotificationTemplateList, @@ -13,11 +13,11 @@ from awx.api.views import ( urls = [ - url(r'^$', NotificationTemplateList.as_view(), name='notification_template_list'), - url(r'^(?P[0-9]+)/$', NotificationTemplateDetail.as_view(), name='notification_template_detail'), - url(r'^(?P[0-9]+)/test/$', NotificationTemplateTest.as_view(), name='notification_template_test'), - url(r'^(?P[0-9]+)/notifications/$', NotificationTemplateNotificationList.as_view(), name='notification_template_notification_list'), - url(r'^(?P[0-9]+)/copy/$', NotificationTemplateCopy.as_view(), name='notification_template_copy'), + re_path(r'^$', NotificationTemplateList.as_view(), name='notification_template_list'), + re_path(r'^(?P[0-9]+)/$', NotificationTemplateDetail.as_view(), name='notification_template_detail'), + re_path(r'^(?P[0-9]+)/test/$', NotificationTemplateTest.as_view(), name='notification_template_test'), + re_path(r'^(?P[0-9]+)/notifications/$', NotificationTemplateNotificationList.as_view(), name='notification_template_notification_list'), + re_path(r'^(?P[0-9]+)/copy/$', NotificationTemplateCopy.as_view(), name='notification_template_copy'), ] __all__ = ['urls'] diff --git a/awx/api/urls/oauth2.py b/awx/api/urls/oauth2.py index 720ba2416f..f613b34a0b 100644 --- a/awx/api/urls/oauth2.py +++ b/awx/api/urls/oauth2.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( OAuth2ApplicationList, @@ -15,13 +15,13 @@ from awx.api.views import ( urls = [ - url(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'), - url(r'^applications/(?P[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'), - url(r'^applications/(?P[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='o_auth2_application_token_list'), - url(r'^applications/(?P[0-9]+)/activity_stream/$', OAuth2ApplicationActivityStreamList.as_view(), name='o_auth2_application_activity_stream_list'), - url(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'), - url(r'^tokens/(?P[0-9]+)/$', OAuth2TokenDetail.as_view(), name='o_auth2_token_detail'), - url(r'^tokens/(?P[0-9]+)/activity_stream/$', OAuth2TokenActivityStreamList.as_view(), name='o_auth2_token_activity_stream_list'), + re_path(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'), + re_path(r'^applications/(?P[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'), + re_path(r'^applications/(?P[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='o_auth2_application_token_list'), + re_path(r'^applications/(?P[0-9]+)/activity_stream/$', OAuth2ApplicationActivityStreamList.as_view(), name='o_auth2_application_activity_stream_list'), + re_path(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'), + re_path(r'^tokens/(?P[0-9]+)/$', OAuth2TokenDetail.as_view(), name='o_auth2_token_detail'), + re_path(r'^tokens/(?P[0-9]+)/activity_stream/$', OAuth2TokenActivityStreamList.as_view(), name='o_auth2_token_activity_stream_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/oauth2_root.py b/awx/api/urls/oauth2_root.py index 61e1e15850..d15d14825e 100644 --- a/awx/api/urls/oauth2_root.py +++ b/awx/api/urls/oauth2_root.py @@ -4,7 +4,7 @@ from datetime import timedelta from django.utils.timezone import now from django.conf import settings -from django.conf.urls import url +from django.urls import re_path from oauthlib import oauth2 from oauth2_provider import views @@ -35,10 +35,10 @@ class TokenView(views.TokenView): urls = [ - url(r'^$', ApiOAuthAuthorizationRootView.as_view(), name='oauth_authorization_root_view'), - url(r"^authorize/$", views.AuthorizationView.as_view(), name="authorize"), - url(r"^token/$", TokenView.as_view(), name="token"), - url(r"^revoke_token/$", views.RevokeTokenView.as_view(), name="revoke-token"), + re_path(r'^$', ApiOAuthAuthorizationRootView.as_view(), name='oauth_authorization_root_view'), + re_path(r"^authorize/$", views.AuthorizationView.as_view(), name="authorize"), + re_path(r"^token/$", TokenView.as_view(), name="token"), + re_path(r"^revoke_token/$", views.RevokeTokenView.as_view(), name="revoke-token"), ] diff --git a/awx/api/urls/organization.py b/awx/api/urls/organization.py index 9eac94da48..c841a53181 100644 --- a/awx/api/urls/organization.py +++ b/awx/api/urls/organization.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( OrganizationList, @@ -30,44 +30,44 @@ from awx.api.views import ( urls = [ - url(r'^$', OrganizationList.as_view(), name='organization_list'), - url(r'^(?P[0-9]+)/$', OrganizationDetail.as_view(), name='organization_detail'), - url(r'^(?P[0-9]+)/users/$', OrganizationUsersList.as_view(), name='organization_users_list'), - url(r'^(?P[0-9]+)/admins/$', OrganizationAdminsList.as_view(), name='organization_admins_list'), - url(r'^(?P[0-9]+)/inventories/$', OrganizationInventoriesList.as_view(), name='organization_inventories_list'), - url(r'^(?P[0-9]+)/execution_environments/$', OrganizationExecutionEnvironmentsList.as_view(), name='organization_execution_environments_list'), - url(r'^(?P[0-9]+)/projects/$', OrganizationProjectsList.as_view(), name='organization_projects_list'), - url(r'^(?P[0-9]+)/job_templates/$', OrganizationJobTemplatesList.as_view(), name='organization_job_templates_list'), - url(r'^(?P[0-9]+)/workflow_job_templates/$', OrganizationWorkflowJobTemplatesList.as_view(), name='organization_workflow_job_templates_list'), - url(r'^(?P[0-9]+)/teams/$', OrganizationTeamsList.as_view(), name='organization_teams_list'), - url(r'^(?P[0-9]+)/credentials/$', OrganizationCredentialList.as_view(), name='organization_credential_list'), - url(r'^(?P[0-9]+)/activity_stream/$', OrganizationActivityStreamList.as_view(), name='organization_activity_stream_list'), - url(r'^(?P[0-9]+)/notification_templates/$', OrganizationNotificationTemplatesList.as_view(), name='organization_notification_templates_list'), - url( + re_path(r'^$', OrganizationList.as_view(), name='organization_list'), + re_path(r'^(?P[0-9]+)/$', OrganizationDetail.as_view(), name='organization_detail'), + re_path(r'^(?P[0-9]+)/users/$', OrganizationUsersList.as_view(), name='organization_users_list'), + re_path(r'^(?P[0-9]+)/admins/$', OrganizationAdminsList.as_view(), name='organization_admins_list'), + re_path(r'^(?P[0-9]+)/inventories/$', OrganizationInventoriesList.as_view(), name='organization_inventories_list'), + re_path(r'^(?P[0-9]+)/execution_environments/$', OrganizationExecutionEnvironmentsList.as_view(), name='organization_execution_environments_list'), + re_path(r'^(?P[0-9]+)/projects/$', OrganizationProjectsList.as_view(), name='organization_projects_list'), + re_path(r'^(?P[0-9]+)/job_templates/$', OrganizationJobTemplatesList.as_view(), name='organization_job_templates_list'), + re_path(r'^(?P[0-9]+)/workflow_job_templates/$', OrganizationWorkflowJobTemplatesList.as_view(), name='organization_workflow_job_templates_list'), + re_path(r'^(?P[0-9]+)/teams/$', OrganizationTeamsList.as_view(), name='organization_teams_list'), + re_path(r'^(?P[0-9]+)/credentials/$', OrganizationCredentialList.as_view(), name='organization_credential_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', OrganizationActivityStreamList.as_view(), name='organization_activity_stream_list'), + re_path(r'^(?P[0-9]+)/notification_templates/$', OrganizationNotificationTemplatesList.as_view(), name='organization_notification_templates_list'), + re_path( r'^(?P[0-9]+)/notification_templates_started/$', OrganizationNotificationTemplatesStartedList.as_view(), name='organization_notification_templates_started_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_error/$', OrganizationNotificationTemplatesErrorList.as_view(), name='organization_notification_templates_error_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_success/$', OrganizationNotificationTemplatesSuccessList.as_view(), name='organization_notification_templates_success_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_approvals/$', OrganizationNotificationTemplatesApprovalList.as_view(), name='organization_notification_templates_approvals_list', ), - url(r'^(?P[0-9]+)/instance_groups/$', OrganizationInstanceGroupsList.as_view(), name='organization_instance_groups_list'), - url(r'^(?P[0-9]+)/galaxy_credentials/$', OrganizationGalaxyCredentialsList.as_view(), name='organization_galaxy_credentials_list'), - url(r'^(?P[0-9]+)/object_roles/$', OrganizationObjectRolesList.as_view(), name='organization_object_roles_list'), - url(r'^(?P[0-9]+)/access_list/$', OrganizationAccessList.as_view(), name='organization_access_list'), - url(r'^(?P[0-9]+)/applications/$', OrganizationApplicationList.as_view(), name='organization_applications_list'), + re_path(r'^(?P[0-9]+)/instance_groups/$', OrganizationInstanceGroupsList.as_view(), name='organization_instance_groups_list'), + re_path(r'^(?P[0-9]+)/galaxy_credentials/$', OrganizationGalaxyCredentialsList.as_view(), name='organization_galaxy_credentials_list'), + re_path(r'^(?P[0-9]+)/object_roles/$', OrganizationObjectRolesList.as_view(), name='organization_object_roles_list'), + re_path(r'^(?P[0-9]+)/access_list/$', OrganizationAccessList.as_view(), name='organization_access_list'), + re_path(r'^(?P[0-9]+)/applications/$', OrganizationApplicationList.as_view(), name='organization_applications_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/project.py b/awx/api/urls/project.py index ea356a651b..0ce6cacecb 100644 --- a/awx/api/urls/project.py +++ b/awx/api/urls/project.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( ProjectList, @@ -24,30 +24,32 @@ from awx.api.views import ( urls = [ - url(r'^$', ProjectList.as_view(), name='project_list'), - url(r'^(?P[0-9]+)/$', ProjectDetail.as_view(), name='project_detail'), - url(r'^(?P[0-9]+)/playbooks/$', ProjectPlaybooks.as_view(), name='project_playbooks'), - url(r'^(?P[0-9]+)/inventories/$', ProjectInventories.as_view(), name='project_inventories'), - url(r'^(?P[0-9]+)/scm_inventory_sources/$', ProjectScmInventorySources.as_view(), name='project_scm_inventory_sources'), - url(r'^(?P[0-9]+)/teams/$', ProjectTeamsList.as_view(), name='project_teams_list'), - url(r'^(?P[0-9]+)/update/$', ProjectUpdateView.as_view(), name='project_update_view'), - url(r'^(?P[0-9]+)/project_updates/$', ProjectUpdatesList.as_view(), name='project_updates_list'), - url(r'^(?P[0-9]+)/activity_stream/$', ProjectActivityStreamList.as_view(), name='project_activity_stream_list'), - url(r'^(?P[0-9]+)/schedules/$', ProjectSchedulesList.as_view(), name='project_schedules_list'), - url(r'^(?P[0-9]+)/notification_templates_error/$', ProjectNotificationTemplatesErrorList.as_view(), name='project_notification_templates_error_list'), - url( + re_path(r'^$', ProjectList.as_view(), name='project_list'), + re_path(r'^(?P[0-9]+)/$', ProjectDetail.as_view(), name='project_detail'), + re_path(r'^(?P[0-9]+)/playbooks/$', ProjectPlaybooks.as_view(), name='project_playbooks'), + re_path(r'^(?P[0-9]+)/inventories/$', ProjectInventories.as_view(), name='project_inventories'), + re_path(r'^(?P[0-9]+)/scm_inventory_sources/$', ProjectScmInventorySources.as_view(), name='project_scm_inventory_sources'), + re_path(r'^(?P[0-9]+)/teams/$', ProjectTeamsList.as_view(), name='project_teams_list'), + re_path(r'^(?P[0-9]+)/update/$', ProjectUpdateView.as_view(), name='project_update_view'), + re_path(r'^(?P[0-9]+)/project_updates/$', ProjectUpdatesList.as_view(), name='project_updates_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', ProjectActivityStreamList.as_view(), name='project_activity_stream_list'), + re_path(r'^(?P[0-9]+)/schedules/$', ProjectSchedulesList.as_view(), name='project_schedules_list'), + re_path( + r'^(?P[0-9]+)/notification_templates_error/$', ProjectNotificationTemplatesErrorList.as_view(), name='project_notification_templates_error_list' + ), + re_path( r'^(?P[0-9]+)/notification_templates_success/$', ProjectNotificationTemplatesSuccessList.as_view(), name='project_notification_templates_success_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_started/$', ProjectNotificationTemplatesStartedList.as_view(), name='project_notification_templates_started_list', ), - url(r'^(?P[0-9]+)/object_roles/$', ProjectObjectRolesList.as_view(), name='project_object_roles_list'), - url(r'^(?P[0-9]+)/access_list/$', ProjectAccessList.as_view(), name='project_access_list'), - url(r'^(?P[0-9]+)/copy/$', ProjectCopy.as_view(), name='project_copy'), + re_path(r'^(?P[0-9]+)/object_roles/$', ProjectObjectRolesList.as_view(), name='project_object_roles_list'), + re_path(r'^(?P[0-9]+)/access_list/$', ProjectAccessList.as_view(), name='project_access_list'), + re_path(r'^(?P[0-9]+)/copy/$', ProjectCopy.as_view(), name='project_copy'), ] __all__ = ['urls'] diff --git a/awx/api/urls/project_update.py b/awx/api/urls/project_update.py index 03356602ca..fc3e2d2d52 100644 --- a/awx/api/urls/project_update.py +++ b/awx/api/urls/project_update.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( ProjectUpdateList, @@ -15,13 +15,13 @@ from awx.api.views import ( urls = [ - url(r'^$', ProjectUpdateList.as_view(), name='project_update_list'), - url(r'^(?P[0-9]+)/$', ProjectUpdateDetail.as_view(), name='project_update_detail'), - url(r'^(?P[0-9]+)/cancel/$', ProjectUpdateCancel.as_view(), name='project_update_cancel'), - url(r'^(?P[0-9]+)/stdout/$', ProjectUpdateStdout.as_view(), name='project_update_stdout'), - url(r'^(?P[0-9]+)/scm_inventory_updates/$', ProjectUpdateScmInventoryUpdates.as_view(), name='project_update_scm_inventory_updates'), - url(r'^(?P[0-9]+)/notifications/$', ProjectUpdateNotificationsList.as_view(), name='project_update_notifications_list'), - url(r'^(?P[0-9]+)/events/$', ProjectUpdateEventsList.as_view(), name='project_update_events_list'), + re_path(r'^$', ProjectUpdateList.as_view(), name='project_update_list'), + re_path(r'^(?P[0-9]+)/$', ProjectUpdateDetail.as_view(), name='project_update_detail'), + re_path(r'^(?P[0-9]+)/cancel/$', ProjectUpdateCancel.as_view(), name='project_update_cancel'), + re_path(r'^(?P[0-9]+)/stdout/$', ProjectUpdateStdout.as_view(), name='project_update_stdout'), + re_path(r'^(?P[0-9]+)/scm_inventory_updates/$', ProjectUpdateScmInventoryUpdates.as_view(), name='project_update_scm_inventory_updates'), + re_path(r'^(?P[0-9]+)/notifications/$', ProjectUpdateNotificationsList.as_view(), name='project_update_notifications_list'), + re_path(r'^(?P[0-9]+)/events/$', ProjectUpdateEventsList.as_view(), name='project_update_events_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/role.py b/awx/api/urls/role.py index 2b6aed19b5..0ee306ef0c 100644 --- a/awx/api/urls/role.py +++ b/awx/api/urls/role.py @@ -1,18 +1,18 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import RoleList, RoleDetail, RoleUsersList, RoleTeamsList, RoleParentsList, RoleChildrenList urls = [ - url(r'^$', RoleList.as_view(), name='role_list'), - url(r'^(?P[0-9]+)/$', RoleDetail.as_view(), name='role_detail'), - url(r'^(?P[0-9]+)/users/$', RoleUsersList.as_view(), name='role_users_list'), - url(r'^(?P[0-9]+)/teams/$', RoleTeamsList.as_view(), name='role_teams_list'), - url(r'^(?P[0-9]+)/parents/$', RoleParentsList.as_view(), name='role_parents_list'), - url(r'^(?P[0-9]+)/children/$', RoleChildrenList.as_view(), name='role_children_list'), + re_path(r'^$', RoleList.as_view(), name='role_list'), + re_path(r'^(?P[0-9]+)/$', RoleDetail.as_view(), name='role_detail'), + re_path(r'^(?P[0-9]+)/users/$', RoleUsersList.as_view(), name='role_users_list'), + re_path(r'^(?P[0-9]+)/teams/$', RoleTeamsList.as_view(), name='role_teams_list'), + re_path(r'^(?P[0-9]+)/parents/$', RoleParentsList.as_view(), name='role_parents_list'), + re_path(r'^(?P[0-9]+)/children/$', RoleChildrenList.as_view(), name='role_children_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/schedule.py b/awx/api/urls/schedule.py index c3c91f7ae0..87907eda8f 100644 --- a/awx/api/urls/schedule.py +++ b/awx/api/urls/schedule.py @@ -1,16 +1,16 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ScheduleList, ScheduleDetail, ScheduleUnifiedJobsList, ScheduleCredentialsList urls = [ - url(r'^$', ScheduleList.as_view(), name='schedule_list'), - url(r'^(?P[0-9]+)/$', ScheduleDetail.as_view(), name='schedule_detail'), - url(r'^(?P[0-9]+)/jobs/$', ScheduleUnifiedJobsList.as_view(), name='schedule_unified_jobs_list'), - url(r'^(?P[0-9]+)/credentials/$', ScheduleCredentialsList.as_view(), name='schedule_credentials_list'), + re_path(r'^$', ScheduleList.as_view(), name='schedule_list'), + re_path(r'^(?P[0-9]+)/$', ScheduleDetail.as_view(), name='schedule_detail'), + re_path(r'^(?P[0-9]+)/jobs/$', ScheduleUnifiedJobsList.as_view(), name='schedule_unified_jobs_list'), + re_path(r'^(?P[0-9]+)/credentials/$', ScheduleCredentialsList.as_view(), name='schedule_credentials_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/system_job.py b/awx/api/urls/system_job.py index 8b060a2d85..891a697006 100644 --- a/awx/api/urls/system_job.py +++ b/awx/api/urls/system_job.py @@ -1,17 +1,17 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import SystemJobList, SystemJobDetail, SystemJobCancel, SystemJobNotificationsList, SystemJobEventsList urls = [ - url(r'^$', SystemJobList.as_view(), name='system_job_list'), - url(r'^(?P[0-9]+)/$', SystemJobDetail.as_view(), name='system_job_detail'), - url(r'^(?P[0-9]+)/cancel/$', SystemJobCancel.as_view(), name='system_job_cancel'), - url(r'^(?P[0-9]+)/notifications/$', SystemJobNotificationsList.as_view(), name='system_job_notifications_list'), - url(r'^(?P[0-9]+)/events/$', SystemJobEventsList.as_view(), name='system_job_events_list'), + re_path(r'^$', SystemJobList.as_view(), name='system_job_list'), + re_path(r'^(?P[0-9]+)/$', SystemJobDetail.as_view(), name='system_job_detail'), + re_path(r'^(?P[0-9]+)/cancel/$', SystemJobCancel.as_view(), name='system_job_cancel'), + re_path(r'^(?P[0-9]+)/notifications/$', SystemJobNotificationsList.as_view(), name='system_job_notifications_list'), + re_path(r'^(?P[0-9]+)/events/$', SystemJobEventsList.as_view(), name='system_job_events_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/system_job_template.py b/awx/api/urls/system_job_template.py index 532d35d97a..e0b68d3135 100644 --- a/awx/api/urls/system_job_template.py +++ b/awx/api/urls/system_job_template.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( SystemJobTemplateList, @@ -16,22 +16,22 @@ from awx.api.views import ( urls = [ - url(r'^$', SystemJobTemplateList.as_view(), name='system_job_template_list'), - url(r'^(?P[0-9]+)/$', SystemJobTemplateDetail.as_view(), name='system_job_template_detail'), - url(r'^(?P[0-9]+)/launch/$', SystemJobTemplateLaunch.as_view(), name='system_job_template_launch'), - url(r'^(?P[0-9]+)/jobs/$', SystemJobTemplateJobsList.as_view(), name='system_job_template_jobs_list'), - url(r'^(?P[0-9]+)/schedules/$', SystemJobTemplateSchedulesList.as_view(), name='system_job_template_schedules_list'), - url( + re_path(r'^$', SystemJobTemplateList.as_view(), name='system_job_template_list'), + re_path(r'^(?P[0-9]+)/$', SystemJobTemplateDetail.as_view(), name='system_job_template_detail'), + re_path(r'^(?P[0-9]+)/launch/$', SystemJobTemplateLaunch.as_view(), name='system_job_template_launch'), + re_path(r'^(?P[0-9]+)/jobs/$', SystemJobTemplateJobsList.as_view(), name='system_job_template_jobs_list'), + re_path(r'^(?P[0-9]+)/schedules/$', SystemJobTemplateSchedulesList.as_view(), name='system_job_template_schedules_list'), + re_path( r'^(?P[0-9]+)/notification_templates_started/$', SystemJobTemplateNotificationTemplatesStartedList.as_view(), name='system_job_template_notification_templates_started_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_error/$', SystemJobTemplateNotificationTemplatesErrorList.as_view(), name='system_job_template_notification_templates_error_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_success/$', SystemJobTemplateNotificationTemplatesSuccessList.as_view(), name='system_job_template_notification_templates_success_list', diff --git a/awx/api/urls/team.py b/awx/api/urls/team.py index 185c86e42a..311a2e8009 100644 --- a/awx/api/urls/team.py +++ b/awx/api/urls/team.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( TeamList, @@ -17,15 +17,15 @@ from awx.api.views import ( urls = [ - url(r'^$', TeamList.as_view(), name='team_list'), - url(r'^(?P[0-9]+)/$', TeamDetail.as_view(), name='team_detail'), - url(r'^(?P[0-9]+)/projects/$', TeamProjectsList.as_view(), name='team_projects_list'), - url(r'^(?P[0-9]+)/users/$', TeamUsersList.as_view(), name='team_users_list'), - url(r'^(?P[0-9]+)/credentials/$', TeamCredentialsList.as_view(), name='team_credentials_list'), - url(r'^(?P[0-9]+)/roles/$', TeamRolesList.as_view(), name='team_roles_list'), - url(r'^(?P[0-9]+)/object_roles/$', TeamObjectRolesList.as_view(), name='team_object_roles_list'), - url(r'^(?P[0-9]+)/activity_stream/$', TeamActivityStreamList.as_view(), name='team_activity_stream_list'), - url(r'^(?P[0-9]+)/access_list/$', TeamAccessList.as_view(), name='team_access_list'), + re_path(r'^$', TeamList.as_view(), name='team_list'), + re_path(r'^(?P[0-9]+)/$', TeamDetail.as_view(), name='team_detail'), + re_path(r'^(?P[0-9]+)/projects/$', TeamProjectsList.as_view(), name='team_projects_list'), + re_path(r'^(?P[0-9]+)/users/$', TeamUsersList.as_view(), name='team_users_list'), + re_path(r'^(?P[0-9]+)/credentials/$', TeamCredentialsList.as_view(), name='team_credentials_list'), + re_path(r'^(?P[0-9]+)/roles/$', TeamRolesList.as_view(), name='team_roles_list'), + re_path(r'^(?P[0-9]+)/object_roles/$', TeamObjectRolesList.as_view(), name='team_object_roles_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', TeamActivityStreamList.as_view(), name='team_activity_stream_list'), + re_path(r'^(?P[0-9]+)/access_list/$', TeamAccessList.as_view(), name='team_access_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/urls.py b/awx/api/urls/urls.py index 017fa307aa..c092696d24 100644 --- a/awx/api/urls/urls.py +++ b/awx/api/urls/urls.py @@ -3,7 +3,7 @@ from __future__ import absolute_import, unicode_literals from django.conf import settings -from django.conf.urls import include, url +from django.urls import include, re_path from awx.api.generics import LoggedLoginView, LoggedLogoutView from awx.api.views import ( @@ -74,78 +74,78 @@ from .workflow_approval import urls as workflow_approval_urls v2_urls = [ - url(r'^$', ApiV2RootView.as_view(), name='api_v2_root_view'), - url(r'^credential_types/', include(credential_type_urls)), - url(r'^credential_input_sources/', include(credential_input_source_urls)), - url(r'^hosts/(?P[0-9]+)/ansible_facts/$', HostAnsibleFactsDetail.as_view(), name='host_ansible_facts_detail'), - url(r'^jobs/(?P[0-9]+)/credentials/$', JobCredentialsList.as_view(), name='job_credentials_list'), - url(r'^job_templates/(?P[0-9]+)/credentials/$', JobTemplateCredentialsList.as_view(), name='job_template_credentials_list'), - url(r'^schedules/preview/$', SchedulePreview.as_view(), name='schedule_rrule'), - url(r'^schedules/zoneinfo/$', ScheduleZoneInfo.as_view(), name='schedule_zoneinfo'), - url(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'), - url(r'^applications/(?P[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'), - url(r'^applications/(?P[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='application_o_auth2_token_list'), - url(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'), - url(r'^', include(oauth2_urls)), - url(r'^metrics/$', MetricsView.as_view(), name='metrics_view'), - url(r'^ping/$', ApiV2PingView.as_view(), name='api_v2_ping_view'), - url(r'^config/$', ApiV2ConfigView.as_view(), name='api_v2_config_view'), - url(r'^config/subscriptions/$', ApiV2SubscriptionView.as_view(), name='api_v2_subscription_view'), - url(r'^config/attach/$', ApiV2AttachView.as_view(), name='api_v2_attach_view'), - url(r'^auth/$', AuthView.as_view()), - url(r'^me/$', UserMeList.as_view(), name='user_me_list'), - url(r'^dashboard/$', DashboardView.as_view(), name='dashboard_view'), - url(r'^dashboard/graphs/jobs/$', DashboardJobsGraphView.as_view(), name='dashboard_jobs_graph_view'), - url(r'^mesh_visualizer/', MeshVisualizer.as_view(), name='mesh_visualizer_view'), - url(r'^settings/', include('awx.conf.urls')), - url(r'^instances/', include(instance_urls)), - url(r'^instance_groups/', include(instance_group_urls)), - url(r'^schedules/', include(schedule_urls)), - url(r'^organizations/', include(organization_urls)), - url(r'^users/', include(user_urls)), - url(r'^execution_environments/', include(execution_environment_urls)), - url(r'^projects/', include(project_urls)), - url(r'^project_updates/', include(project_update_urls)), - url(r'^teams/', include(team_urls)), - url(r'^inventories/', include(inventory_urls)), - url(r'^hosts/', include(host_urls)), - url(r'^groups/', include(group_urls)), - url(r'^inventory_sources/', include(inventory_source_urls)), - url(r'^inventory_updates/', include(inventory_update_urls)), - url(r'^credentials/', include(credential_urls)), - url(r'^roles/', include(role_urls)), - url(r'^job_templates/', include(job_template_urls)), - url(r'^jobs/', include(job_urls)), - url(r'^job_host_summaries/', include(job_host_summary_urls)), - url(r'^job_events/', include(job_event_urls)), - url(r'^ad_hoc_commands/', include(ad_hoc_command_urls)), - url(r'^ad_hoc_command_events/', include(ad_hoc_command_event_urls)), - url(r'^system_job_templates/', include(system_job_template_urls)), - url(r'^system_jobs/', include(system_job_urls)), - url(r'^notification_templates/', include(notification_template_urls)), - url(r'^notifications/', include(notification_urls)), - url(r'^workflow_job_templates/', include(workflow_job_template_urls)), - url(r'^workflow_jobs/', include(workflow_job_urls)), - url(r'^labels/', include(label_urls)), - url(r'^workflow_job_template_nodes/', include(workflow_job_template_node_urls)), - url(r'^workflow_job_nodes/', include(workflow_job_node_urls)), - url(r'^unified_job_templates/$', UnifiedJobTemplateList.as_view(), name='unified_job_template_list'), - url(r'^unified_jobs/$', UnifiedJobList.as_view(), name='unified_job_list'), - url(r'^activity_stream/', include(activity_stream_urls)), - url(r'^workflow_approval_templates/', include(workflow_approval_template_urls)), - url(r'^workflow_approvals/', include(workflow_approval_urls)), + re_path(r'^$', ApiV2RootView.as_view(), name='api_v2_root_view'), + re_path(r'^credential_types/', include(credential_type_urls)), + re_path(r'^credential_input_sources/', include(credential_input_source_urls)), + re_path(r'^hosts/(?P[0-9]+)/ansible_facts/$', HostAnsibleFactsDetail.as_view(), name='host_ansible_facts_detail'), + re_path(r'^jobs/(?P[0-9]+)/credentials/$', JobCredentialsList.as_view(), name='job_credentials_list'), + re_path(r'^job_templates/(?P[0-9]+)/credentials/$', JobTemplateCredentialsList.as_view(), name='job_template_credentials_list'), + re_path(r'^schedules/preview/$', SchedulePreview.as_view(), name='schedule_rrule'), + re_path(r'^schedules/zoneinfo/$', ScheduleZoneInfo.as_view(), name='schedule_zoneinfo'), + re_path(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'), + re_path(r'^applications/(?P[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'), + re_path(r'^applications/(?P[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='application_o_auth2_token_list'), + re_path(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'), + re_path(r'^', include(oauth2_urls)), + re_path(r'^metrics/$', MetricsView.as_view(), name='metrics_view'), + re_path(r'^ping/$', ApiV2PingView.as_view(), name='api_v2_ping_view'), + re_path(r'^config/$', ApiV2ConfigView.as_view(), name='api_v2_config_view'), + re_path(r'^config/subscriptions/$', ApiV2SubscriptionView.as_view(), name='api_v2_subscription_view'), + re_path(r'^config/attach/$', ApiV2AttachView.as_view(), name='api_v2_attach_view'), + re_path(r'^auth/$', AuthView.as_view()), + re_path(r'^me/$', UserMeList.as_view(), name='user_me_list'), + re_path(r'^dashboard/$', DashboardView.as_view(), name='dashboard_view'), + re_path(r'^dashboard/graphs/jobs/$', DashboardJobsGraphView.as_view(), name='dashboard_jobs_graph_view'), + re_path(r'^mesh_visualizer/', MeshVisualizer.as_view(), name='mesh_visualizer_view'), + re_path(r'^settings/', include('awx.conf.urls')), + re_path(r'^instances/', include(instance_urls)), + re_path(r'^instance_groups/', include(instance_group_urls)), + re_path(r'^schedules/', include(schedule_urls)), + re_path(r'^organizations/', include(organization_urls)), + re_path(r'^users/', include(user_urls)), + re_path(r'^execution_environments/', include(execution_environment_urls)), + re_path(r'^projects/', include(project_urls)), + re_path(r'^project_updates/', include(project_update_urls)), + re_path(r'^teams/', include(team_urls)), + re_path(r'^inventories/', include(inventory_urls)), + re_path(r'^hosts/', include(host_urls)), + re_path(r'^groups/', include(group_urls)), + re_path(r'^inventory_sources/', include(inventory_source_urls)), + re_path(r'^inventory_updates/', include(inventory_update_urls)), + re_path(r'^credentials/', include(credential_urls)), + re_path(r'^roles/', include(role_urls)), + re_path(r'^job_templates/', include(job_template_urls)), + re_path(r'^jobs/', include(job_urls)), + re_path(r'^job_host_summaries/', include(job_host_summary_urls)), + re_path(r'^job_events/', include(job_event_urls)), + re_path(r'^ad_hoc_commands/', include(ad_hoc_command_urls)), + re_path(r'^ad_hoc_command_events/', include(ad_hoc_command_event_urls)), + re_path(r'^system_job_templates/', include(system_job_template_urls)), + re_path(r'^system_jobs/', include(system_job_urls)), + re_path(r'^notification_templates/', include(notification_template_urls)), + re_path(r'^notifications/', include(notification_urls)), + re_path(r'^workflow_job_templates/', include(workflow_job_template_urls)), + re_path(r'^workflow_jobs/', include(workflow_job_urls)), + re_path(r'^labels/', include(label_urls)), + re_path(r'^workflow_job_template_nodes/', include(workflow_job_template_node_urls)), + re_path(r'^workflow_job_nodes/', include(workflow_job_node_urls)), + re_path(r'^unified_job_templates/$', UnifiedJobTemplateList.as_view(), name='unified_job_template_list'), + re_path(r'^unified_jobs/$', UnifiedJobList.as_view(), name='unified_job_list'), + re_path(r'^activity_stream/', include(activity_stream_urls)), + re_path(r'^workflow_approval_templates/', include(workflow_approval_template_urls)), + re_path(r'^workflow_approvals/', include(workflow_approval_urls)), ] app_name = 'api' urlpatterns = [ - url(r'^$', ApiRootView.as_view(), name='api_root_view'), - url(r'^(?P(v2))/', include(v2_urls)), - url(r'^login/$', LoggedLoginView.as_view(template_name='rest_framework/login.html', extra_context={'inside_login_context': True}), name='login'), - url(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'), - url(r'^o/', include(oauth2_root_urls)), + re_path(r'^$', ApiRootView.as_view(), name='api_root_view'), + re_path(r'^(?P(v2))/', include(v2_urls)), + re_path(r'^login/$', LoggedLoginView.as_view(template_name='rest_framework/login.html', extra_context={'inside_login_context': True}), name='login'), + re_path(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'), + re_path(r'^o/', include(oauth2_root_urls)), ] if settings.SETTINGS_MODULE == 'awx.settings.development': from awx.api.swagger import SwaggerSchemaView - urlpatterns += [url(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view')] + urlpatterns += [re_path(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view')] diff --git a/awx/api/urls/user.py b/awx/api/urls/user.py index 340c428ba5..39bc07aec4 100644 --- a/awx/api/urls/user.py +++ b/awx/api/urls/user.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( UserList, @@ -21,20 +21,20 @@ from awx.api.views import ( ) urls = [ - url(r'^$', UserList.as_view(), name='user_list'), - url(r'^(?P[0-9]+)/$', UserDetail.as_view(), name='user_detail'), - url(r'^(?P[0-9]+)/teams/$', UserTeamsList.as_view(), name='user_teams_list'), - url(r'^(?P[0-9]+)/organizations/$', UserOrganizationsList.as_view(), name='user_organizations_list'), - url(r'^(?P[0-9]+)/admin_of_organizations/$', UserAdminOfOrganizationsList.as_view(), name='user_admin_of_organizations_list'), - url(r'^(?P[0-9]+)/projects/$', UserProjectsList.as_view(), name='user_projects_list'), - url(r'^(?P[0-9]+)/credentials/$', UserCredentialsList.as_view(), name='user_credentials_list'), - url(r'^(?P[0-9]+)/roles/$', UserRolesList.as_view(), name='user_roles_list'), - url(r'^(?P[0-9]+)/activity_stream/$', UserActivityStreamList.as_view(), name='user_activity_stream_list'), - url(r'^(?P[0-9]+)/access_list/$', UserAccessList.as_view(), name='user_access_list'), - url(r'^(?P[0-9]+)/applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'), - url(r'^(?P[0-9]+)/tokens/$', OAuth2UserTokenList.as_view(), name='o_auth2_token_list'), - url(r'^(?P[0-9]+)/authorized_tokens/$', UserAuthorizedTokenList.as_view(), name='user_authorized_token_list'), - url(r'^(?P[0-9]+)/personal_tokens/$', UserPersonalTokenList.as_view(), name='user_personal_token_list'), + re_path(r'^$', UserList.as_view(), name='user_list'), + re_path(r'^(?P[0-9]+)/$', UserDetail.as_view(), name='user_detail'), + re_path(r'^(?P[0-9]+)/teams/$', UserTeamsList.as_view(), name='user_teams_list'), + re_path(r'^(?P[0-9]+)/organizations/$', UserOrganizationsList.as_view(), name='user_organizations_list'), + re_path(r'^(?P[0-9]+)/admin_of_organizations/$', UserAdminOfOrganizationsList.as_view(), name='user_admin_of_organizations_list'), + re_path(r'^(?P[0-9]+)/projects/$', UserProjectsList.as_view(), name='user_projects_list'), + re_path(r'^(?P[0-9]+)/credentials/$', UserCredentialsList.as_view(), name='user_credentials_list'), + re_path(r'^(?P[0-9]+)/roles/$', UserRolesList.as_view(), name='user_roles_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', UserActivityStreamList.as_view(), name='user_activity_stream_list'), + re_path(r'^(?P[0-9]+)/access_list/$', UserAccessList.as_view(), name='user_access_list'), + re_path(r'^(?P[0-9]+)/applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'), + re_path(r'^(?P[0-9]+)/tokens/$', OAuth2UserTokenList.as_view(), name='o_auth2_token_list'), + re_path(r'^(?P[0-9]+)/authorized_tokens/$', UserAuthorizedTokenList.as_view(), name='user_authorized_token_list'), + re_path(r'^(?P[0-9]+)/personal_tokens/$', UserPersonalTokenList.as_view(), name='user_personal_token_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/webhooks.py b/awx/api/urls/webhooks.py index f6739a5df9..764e3dd6e2 100644 --- a/awx/api/urls/webhooks.py +++ b/awx/api/urls/webhooks.py @@ -1,10 +1,10 @@ -from django.conf.urls import url +from django.urls import re_path from awx.api.views import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver urlpatterns = [ - url(r'^webhook_key/$', WebhookKeyView.as_view(), name='webhook_key'), - url(r'^github/$', GithubWebhookReceiver.as_view(), name='webhook_receiver_github'), - url(r'^gitlab/$', GitlabWebhookReceiver.as_view(), name='webhook_receiver_gitlab'), + re_path(r'^webhook_key/$', WebhookKeyView.as_view(), name='webhook_key'), + re_path(r'^github/$', GithubWebhookReceiver.as_view(), name='webhook_receiver_github'), + re_path(r'^gitlab/$', GitlabWebhookReceiver.as_view(), name='webhook_receiver_gitlab'), ] diff --git a/awx/api/urls/workflow_approval.py b/awx/api/urls/workflow_approval.py index a3c6454af1..640528edbd 100644 --- a/awx/api/urls/workflow_approval.py +++ b/awx/api/urls/workflow_approval.py @@ -1,16 +1,16 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import WorkflowApprovalList, WorkflowApprovalDetail, WorkflowApprovalApprove, WorkflowApprovalDeny urls = [ - url(r'^$', WorkflowApprovalList.as_view(), name='workflow_approval_list'), - url(r'^(?P[0-9]+)/$', WorkflowApprovalDetail.as_view(), name='workflow_approval_detail'), - url(r'^(?P[0-9]+)/approve/$', WorkflowApprovalApprove.as_view(), name='workflow_approval_approve'), - url(r'^(?P[0-9]+)/deny/$', WorkflowApprovalDeny.as_view(), name='workflow_approval_deny'), + re_path(r'^$', WorkflowApprovalList.as_view(), name='workflow_approval_list'), + re_path(r'^(?P[0-9]+)/$', WorkflowApprovalDetail.as_view(), name='workflow_approval_detail'), + re_path(r'^(?P[0-9]+)/approve/$', WorkflowApprovalApprove.as_view(), name='workflow_approval_approve'), + re_path(r'^(?P[0-9]+)/deny/$', WorkflowApprovalDeny.as_view(), name='workflow_approval_deny'), ] __all__ = ['urls'] diff --git a/awx/api/urls/workflow_approval_template.py b/awx/api/urls/workflow_approval_template.py index f49929b283..811ad351d2 100644 --- a/awx/api/urls/workflow_approval_template.py +++ b/awx/api/urls/workflow_approval_template.py @@ -1,14 +1,14 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import WorkflowApprovalTemplateDetail, WorkflowApprovalTemplateJobsList urls = [ - url(r'^(?P[0-9]+)/$', WorkflowApprovalTemplateDetail.as_view(), name='workflow_approval_template_detail'), - url(r'^(?P[0-9]+)/approvals/$', WorkflowApprovalTemplateJobsList.as_view(), name='workflow_approval_template_jobs_list'), + re_path(r'^(?P[0-9]+)/$', WorkflowApprovalTemplateDetail.as_view(), name='workflow_approval_template_detail'), + re_path(r'^(?P[0-9]+)/approvals/$', WorkflowApprovalTemplateJobsList.as_view(), name='workflow_approval_template_jobs_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/workflow_job.py b/awx/api/urls/workflow_job.py index 1ecbb39373..707b7080f9 100644 --- a/awx/api/urls/workflow_job.py +++ b/awx/api/urls/workflow_job.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( WorkflowJobList, @@ -16,14 +16,14 @@ from awx.api.views import ( urls = [ - url(r'^$', WorkflowJobList.as_view(), name='workflow_job_list'), - url(r'^(?P[0-9]+)/$', WorkflowJobDetail.as_view(), name='workflow_job_detail'), - url(r'^(?P[0-9]+)/workflow_nodes/$', WorkflowJobWorkflowNodesList.as_view(), name='workflow_job_workflow_nodes_list'), - url(r'^(?P[0-9]+)/labels/$', WorkflowJobLabelList.as_view(), name='workflow_job_label_list'), - url(r'^(?P[0-9]+)/cancel/$', WorkflowJobCancel.as_view(), name='workflow_job_cancel'), - url(r'^(?P[0-9]+)/relaunch/$', WorkflowJobRelaunch.as_view(), name='workflow_job_relaunch'), - url(r'^(?P[0-9]+)/notifications/$', WorkflowJobNotificationsList.as_view(), name='workflow_job_notifications_list'), - url(r'^(?P[0-9]+)/activity_stream/$', WorkflowJobActivityStreamList.as_view(), name='workflow_job_activity_stream_list'), + re_path(r'^$', WorkflowJobList.as_view(), name='workflow_job_list'), + re_path(r'^(?P[0-9]+)/$', WorkflowJobDetail.as_view(), name='workflow_job_detail'), + re_path(r'^(?P[0-9]+)/workflow_nodes/$', WorkflowJobWorkflowNodesList.as_view(), name='workflow_job_workflow_nodes_list'), + re_path(r'^(?P[0-9]+)/labels/$', WorkflowJobLabelList.as_view(), name='workflow_job_label_list'), + re_path(r'^(?P[0-9]+)/cancel/$', WorkflowJobCancel.as_view(), name='workflow_job_cancel'), + re_path(r'^(?P[0-9]+)/relaunch/$', WorkflowJobRelaunch.as_view(), name='workflow_job_relaunch'), + re_path(r'^(?P[0-9]+)/notifications/$', WorkflowJobNotificationsList.as_view(), name='workflow_job_notifications_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', WorkflowJobActivityStreamList.as_view(), name='workflow_job_activity_stream_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/workflow_job_node.py b/awx/api/urls/workflow_job_node.py index 809ee515f0..5b246c95b4 100644 --- a/awx/api/urls/workflow_job_node.py +++ b/awx/api/urls/workflow_job_node.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( WorkflowJobNodeList, @@ -14,12 +14,12 @@ from awx.api.views import ( urls = [ - url(r'^$', WorkflowJobNodeList.as_view(), name='workflow_job_node_list'), - url(r'^(?P[0-9]+)/$', WorkflowJobNodeDetail.as_view(), name='workflow_job_node_detail'), - url(r'^(?P[0-9]+)/success_nodes/$', WorkflowJobNodeSuccessNodesList.as_view(), name='workflow_job_node_success_nodes_list'), - url(r'^(?P[0-9]+)/failure_nodes/$', WorkflowJobNodeFailureNodesList.as_view(), name='workflow_job_node_failure_nodes_list'), - url(r'^(?P[0-9]+)/always_nodes/$', WorkflowJobNodeAlwaysNodesList.as_view(), name='workflow_job_node_always_nodes_list'), - url(r'^(?P[0-9]+)/credentials/$', WorkflowJobNodeCredentialsList.as_view(), name='workflow_job_node_credentials_list'), + re_path(r'^$', WorkflowJobNodeList.as_view(), name='workflow_job_node_list'), + re_path(r'^(?P[0-9]+)/$', WorkflowJobNodeDetail.as_view(), name='workflow_job_node_detail'), + re_path(r'^(?P[0-9]+)/success_nodes/$', WorkflowJobNodeSuccessNodesList.as_view(), name='workflow_job_node_success_nodes_list'), + re_path(r'^(?P[0-9]+)/failure_nodes/$', WorkflowJobNodeFailureNodesList.as_view(), name='workflow_job_node_failure_nodes_list'), + re_path(r'^(?P[0-9]+)/always_nodes/$', WorkflowJobNodeAlwaysNodesList.as_view(), name='workflow_job_node_always_nodes_list'), + re_path(r'^(?P[0-9]+)/credentials/$', WorkflowJobNodeCredentialsList.as_view(), name='workflow_job_node_credentials_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/workflow_job_template.py b/awx/api/urls/workflow_job_template.py index 90b3c043fc..e2729186ca 100644 --- a/awx/api/urls/workflow_job_template.py +++ b/awx/api/urls/workflow_job_template.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import include, url +from django.urls import include, re_path from awx.api.views import ( WorkflowJobTemplateList, @@ -24,39 +24,39 @@ from awx.api.views import ( urls = [ - url(r'^$', WorkflowJobTemplateList.as_view(), name='workflow_job_template_list'), - url(r'^(?P[0-9]+)/$', WorkflowJobTemplateDetail.as_view(), name='workflow_job_template_detail'), - url(r'^(?P[0-9]+)/workflow_jobs/$', WorkflowJobTemplateJobsList.as_view(), name='workflow_job_template_jobs_list'), - url(r'^(?P[0-9]+)/launch/$', WorkflowJobTemplateLaunch.as_view(), name='workflow_job_template_launch'), - url(r'^(?P[0-9]+)/copy/$', WorkflowJobTemplateCopy.as_view(), name='workflow_job_template_copy'), - url(r'^(?P[0-9]+)/schedules/$', WorkflowJobTemplateSchedulesList.as_view(), name='workflow_job_template_schedules_list'), - url(r'^(?P[0-9]+)/survey_spec/$', WorkflowJobTemplateSurveySpec.as_view(), name='workflow_job_template_survey_spec'), - url(r'^(?P[0-9]+)/workflow_nodes/$', WorkflowJobTemplateWorkflowNodesList.as_view(), name='workflow_job_template_workflow_nodes_list'), - url(r'^(?P[0-9]+)/activity_stream/$', WorkflowJobTemplateActivityStreamList.as_view(), name='workflow_job_template_activity_stream_list'), - url( + re_path(r'^$', WorkflowJobTemplateList.as_view(), name='workflow_job_template_list'), + re_path(r'^(?P[0-9]+)/$', WorkflowJobTemplateDetail.as_view(), name='workflow_job_template_detail'), + re_path(r'^(?P[0-9]+)/workflow_jobs/$', WorkflowJobTemplateJobsList.as_view(), name='workflow_job_template_jobs_list'), + re_path(r'^(?P[0-9]+)/launch/$', WorkflowJobTemplateLaunch.as_view(), name='workflow_job_template_launch'), + re_path(r'^(?P[0-9]+)/copy/$', WorkflowJobTemplateCopy.as_view(), name='workflow_job_template_copy'), + re_path(r'^(?P[0-9]+)/schedules/$', WorkflowJobTemplateSchedulesList.as_view(), name='workflow_job_template_schedules_list'), + re_path(r'^(?P[0-9]+)/survey_spec/$', WorkflowJobTemplateSurveySpec.as_view(), name='workflow_job_template_survey_spec'), + re_path(r'^(?P[0-9]+)/workflow_nodes/$', WorkflowJobTemplateWorkflowNodesList.as_view(), name='workflow_job_template_workflow_nodes_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', WorkflowJobTemplateActivityStreamList.as_view(), name='workflow_job_template_activity_stream_list'), + re_path( r'^(?P[0-9]+)/notification_templates_started/$', WorkflowJobTemplateNotificationTemplatesStartedList.as_view(), name='workflow_job_template_notification_templates_started_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_error/$', WorkflowJobTemplateNotificationTemplatesErrorList.as_view(), name='workflow_job_template_notification_templates_error_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_success/$', WorkflowJobTemplateNotificationTemplatesSuccessList.as_view(), name='workflow_job_template_notification_templates_success_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_approvals/$', WorkflowJobTemplateNotificationTemplatesApprovalList.as_view(), name='workflow_job_template_notification_templates_approvals_list', ), - url(r'^(?P[0-9]+)/access_list/$', WorkflowJobTemplateAccessList.as_view(), name='workflow_job_template_access_list'), - url(r'^(?P[0-9]+)/object_roles/$', WorkflowJobTemplateObjectRolesList.as_view(), name='workflow_job_template_object_roles_list'), - url(r'^(?P[0-9]+)/labels/$', WorkflowJobTemplateLabelList.as_view(), name='workflow_job_template_label_list'), - url(r'^(?P[0-9]+)/', include('awx.api.urls.webhooks'), {'model_kwarg': 'workflow_job_templates'}), + re_path(r'^(?P[0-9]+)/access_list/$', WorkflowJobTemplateAccessList.as_view(), name='workflow_job_template_access_list'), + re_path(r'^(?P[0-9]+)/object_roles/$', WorkflowJobTemplateObjectRolesList.as_view(), name='workflow_job_template_object_roles_list'), + re_path(r'^(?P[0-9]+)/labels/$', WorkflowJobTemplateLabelList.as_view(), name='workflow_job_template_label_list'), + re_path(r'^(?P[0-9]+)/', include('awx.api.urls.webhooks'), {'model_kwarg': 'workflow_job_templates'}), ] __all__ = ['urls'] diff --git a/awx/api/urls/workflow_job_template_node.py b/awx/api/urls/workflow_job_template_node.py index 868c728a88..bcd61aed67 100644 --- a/awx/api/urls/workflow_job_template_node.py +++ b/awx/api/urls/workflow_job_template_node.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( WorkflowJobTemplateNodeList, @@ -15,13 +15,13 @@ from awx.api.views import ( urls = [ - url(r'^$', WorkflowJobTemplateNodeList.as_view(), name='workflow_job_template_node_list'), - url(r'^(?P[0-9]+)/$', WorkflowJobTemplateNodeDetail.as_view(), name='workflow_job_template_node_detail'), - url(r'^(?P[0-9]+)/success_nodes/$', WorkflowJobTemplateNodeSuccessNodesList.as_view(), name='workflow_job_template_node_success_nodes_list'), - url(r'^(?P[0-9]+)/failure_nodes/$', WorkflowJobTemplateNodeFailureNodesList.as_view(), name='workflow_job_template_node_failure_nodes_list'), - url(r'^(?P[0-9]+)/always_nodes/$', WorkflowJobTemplateNodeAlwaysNodesList.as_view(), name='workflow_job_template_node_always_nodes_list'), - url(r'^(?P[0-9]+)/credentials/$', WorkflowJobTemplateNodeCredentialsList.as_view(), name='workflow_job_template_node_credentials_list'), - url(r'^(?P[0-9]+)/create_approval_template/$', WorkflowJobTemplateNodeCreateApproval.as_view(), name='workflow_job_template_node_create_approval'), + re_path(r'^$', WorkflowJobTemplateNodeList.as_view(), name='workflow_job_template_node_list'), + re_path(r'^(?P[0-9]+)/$', WorkflowJobTemplateNodeDetail.as_view(), name='workflow_job_template_node_detail'), + re_path(r'^(?P[0-9]+)/success_nodes/$', WorkflowJobTemplateNodeSuccessNodesList.as_view(), name='workflow_job_template_node_success_nodes_list'), + re_path(r'^(?P[0-9]+)/failure_nodes/$', WorkflowJobTemplateNodeFailureNodesList.as_view(), name='workflow_job_template_node_failure_nodes_list'), + re_path(r'^(?P[0-9]+)/always_nodes/$', WorkflowJobTemplateNodeAlwaysNodesList.as_view(), name='workflow_job_template_node_always_nodes_list'), + re_path(r'^(?P[0-9]+)/credentials/$', WorkflowJobTemplateNodeCredentialsList.as_view(), name='workflow_job_template_node_credentials_list'), + re_path(r'^(?P[0-9]+)/create_approval_template/$', WorkflowJobTemplateNodeCreateApproval.as_view(), name='workflow_job_template_node_create_approval'), ] __all__ = ['urls'] diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py index 34e442eb5e..37bf8cfab7 100644 --- a/awx/api/views/__init__.py +++ b/awx/api/views/__init__.py @@ -29,7 +29,7 @@ from django.views.decorators.csrf import csrf_exempt from django.template.loader import render_to_string from django.http import HttpResponse from django.contrib.contenttypes.models import ContentType -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework diff --git a/awx/api/views/inventory.py b/awx/api/views/inventory.py index dfa7204f80..43815ae565 100644 --- a/awx/api/views/inventory.py +++ b/awx/api/views/inventory.py @@ -8,7 +8,7 @@ import logging from django.conf import settings from django.db.models import Q from django.contrib.contenttypes.models import ContentType -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework.exceptions import PermissionDenied diff --git a/awx/api/views/mesh_visualizer.py b/awx/api/views/mesh_visualizer.py index d2c04f0962..e790069700 100644 --- a/awx/api/views/mesh_visualizer.py +++ b/awx/api/views/mesh_visualizer.py @@ -1,7 +1,7 @@ # Copyright (c) 2018 Red Hat, Inc. # All Rights Reserved. -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from awx.api.generics import APIView, Response from awx.api.permissions import IsSystemAdminOrAuditor diff --git a/awx/api/views/metrics.py b/awx/api/views/metrics.py index 5a37092dd4..1634293cab 100644 --- a/awx/api/views/metrics.py +++ b/awx/api/views/metrics.py @@ -5,7 +5,7 @@ import logging # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework.response import Response diff --git a/awx/api/views/mixin.py b/awx/api/views/mixin.py index 059e1120f7..2ba254d3b3 100644 --- a/awx/api/views/mixin.py +++ b/awx/api/views/mixin.py @@ -8,7 +8,7 @@ from django.db.models import Count from django.db import transaction from django.shortcuts import get_object_or_404 from django.utils.timezone import now -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from rest_framework.permissions import SAFE_METHODS from rest_framework.exceptions import PermissionDenied diff --git a/awx/api/views/organization.py b/awx/api/views/organization.py index 4a12a7d599..099edcadb0 100644 --- a/awx/api/views/organization.py +++ b/awx/api/views/organization.py @@ -7,7 +7,7 @@ import logging # Django from django.db.models import Count from django.contrib.contenttypes.models import ContentType -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.main.models import ( diff --git a/awx/api/views/root.py b/awx/api/views/root.py index 675daa2569..d879e4537e 100644 --- a/awx/api/views/root.py +++ b/awx/api/views/root.py @@ -8,11 +8,11 @@ import operator from collections import OrderedDict from django.conf import settings -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str from django.utils.decorators import method_decorator from django.views.decorators.csrf import ensure_csrf_cookie from django.template.loader import render_to_string -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from rest_framework.permissions import AllowAny, IsAuthenticated from rest_framework.response import Response @@ -205,7 +205,7 @@ class ApiV2SubscriptionView(APIView): elif isinstance(exc, (ValueError, OSError)) and exc.args: msg = exc.args[0] else: - logger.exception(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) + logger.exception(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST) return Response(validated) @@ -246,7 +246,7 @@ class ApiV2AttachView(APIView): elif isinstance(exc, (ValueError, OSError)) and exc.args: msg = exc.args[0] else: - logger.exception(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) + logger.exception(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST) for sub in validated: if sub['pool_id'] == pool_id: @@ -322,7 +322,7 @@ class ApiV2ConfigView(APIView): try: data_actual = json.dumps(request.data) except Exception: - logger.info(smart_text(u"Invalid JSON submitted for license."), extra=dict(actor=request.user.username)) + logger.info(smart_str(u"Invalid JSON submitted for license."), extra=dict(actor=request.user.username)) return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST) license_data = json.loads(data_actual) @@ -346,7 +346,7 @@ class ApiV2ConfigView(APIView): try: license_data_validated = get_licenser().license_from_manifest(license_data) except Exception: - logger.warning(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) + logger.warning(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST) else: license_data_validated = get_licenser().validate() @@ -357,7 +357,7 @@ class ApiV2ConfigView(APIView): settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host()) return Response(license_data_validated) - logger.warning(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) + logger.warning(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) return Response({"error": _("Invalid subscription")}, status=status.HTTP_400_BAD_REQUEST) def delete(self, request): diff --git a/awx/api/views/webhooks.py b/awx/api/views/webhooks.py index 921839a3f5..c3d1604b0a 100644 --- a/awx/api/views/webhooks.py +++ b/awx/api/views/webhooks.py @@ -4,7 +4,7 @@ import logging import urllib.parse from django.utils.encoding import force_bytes -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.views.decorators.csrf import csrf_exempt from rest_framework import status diff --git a/awx/conf/__init__.py b/awx/conf/__init__.py index 661295a685..3580b7a45a 100644 --- a/awx/conf/__init__.py +++ b/awx/conf/__init__.py @@ -7,8 +7,6 @@ from django.utils.module_loading import autodiscover_modules # AWX from .registry import settings_registry -default_app_config = 'awx.conf.apps.ConfConfig' - def register(setting, **kwargs): settings_registry.register(setting, **kwargs) diff --git a/awx/conf/apps.py b/awx/conf/apps.py index a61e489858..811baba262 100644 --- a/awx/conf/apps.py +++ b/awx/conf/apps.py @@ -1,8 +1,10 @@ +import sys + # Django from django.apps import AppConfig # from django.core import checks -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ class ConfConfig(AppConfig): @@ -12,6 +14,9 @@ class ConfConfig(AppConfig): def ready(self): self.module.autodiscover() - from .settings import SettingsWrapper - SettingsWrapper.initialize() + if not set(sys.argv) & {'migrate', 'check_migrations'}: + + from .settings import SettingsWrapper + + SettingsWrapper.initialize() diff --git a/awx/conf/conf.py b/awx/conf/conf.py index 45a3442756..019bd1d068 100644 --- a/awx/conf/conf.py +++ b/awx/conf/conf.py @@ -1,6 +1,6 @@ # Django from django.conf import settings -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.conf import fields, register diff --git a/awx/conf/fields.py b/awx/conf/fields.py index 2ab3a9e8d9..7802b2a085 100644 --- a/awx/conf/fields.py +++ b/awx/conf/fields.py @@ -7,10 +7,10 @@ from collections import OrderedDict # Django from django.core.validators import URLValidator, _lazy_re_compile -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework -from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField, IntegerField, ListField, NullBooleanField # noqa +from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField, IntegerField, ListField # noqa from rest_framework.serializers import PrimaryKeyRelatedField # noqa # AWX @@ -65,11 +65,11 @@ class StringListBooleanField(ListField): try: if isinstance(value, (list, tuple)): return super(StringListBooleanField, self).to_representation(value) - elif value in NullBooleanField.TRUE_VALUES: + elif value in BooleanField.TRUE_VALUES: return True - elif value in NullBooleanField.FALSE_VALUES: + elif value in BooleanField.FALSE_VALUES: return False - elif value in NullBooleanField.NULL_VALUES: + elif value in BooleanField.NULL_VALUES: return None elif isinstance(value, str): return self.child.to_representation(value) @@ -82,11 +82,11 @@ class StringListBooleanField(ListField): try: if isinstance(data, (list, tuple)): return super(StringListBooleanField, self).to_internal_value(data) - elif data in NullBooleanField.TRUE_VALUES: + elif data in BooleanField.TRUE_VALUES: return True - elif data in NullBooleanField.FALSE_VALUES: + elif data in BooleanField.FALSE_VALUES: return False - elif data in NullBooleanField.NULL_VALUES: + elif data in BooleanField.NULL_VALUES: return None elif isinstance(data, str): return self.child.run_validation(data) diff --git a/awx/conf/migrations/0001_initial.py b/awx/conf/migrations/0001_initial.py index 8bb9b6bcec..b239f5e143 100644 --- a/awx/conf/migrations/0001_initial.py +++ b/awx/conf/migrations/0001_initial.py @@ -2,7 +2,6 @@ from __future__ import unicode_literals from django.db import migrations, models -import jsonfield.fields from django.conf import settings @@ -18,7 +17,7 @@ class Migration(migrations.Migration): ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), ('key', models.CharField(max_length=255)), - ('value', jsonfield.fields.JSONField(null=True)), + ('value', models.JSONField(null=True)), ( 'user', models.ForeignKey(related_name='settings', default=None, editable=False, to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True), diff --git a/awx/conf/migrations/0003_v310_JSONField_changes.py b/awx/conf/migrations/0003_v310_JSONField_changes.py index 2550d2fff0..d312c40b1d 100644 --- a/awx/conf/migrations/0003_v310_JSONField_changes.py +++ b/awx/conf/migrations/0003_v310_JSONField_changes.py @@ -1,12 +1,11 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals -from django.db import migrations -import awx.main.fields +from django.db import migrations, models class Migration(migrations.Migration): dependencies = [('conf', '0002_v310_copy_tower_settings')] - operations = [migrations.AlterField(model_name='setting', name='value', field=awx.main.fields.JSONField(null=True))] + operations = [migrations.AlterField(model_name='setting', name='value', field=models.JSONField(null=True))] diff --git a/awx/conf/migrations/_ldap_group_type.py b/awx/conf/migrations/_ldap_group_type.py index e8de5ca4aa..b6580f8cae 100644 --- a/awx/conf/migrations/_ldap_group_type.py +++ b/awx/conf/migrations/_ldap_group_type.py @@ -5,7 +5,7 @@ from django.utils.timezone import now def fill_ldap_group_type_params(apps, schema_editor): - group_type = settings.AUTH_LDAP_GROUP_TYPE + group_type = getattr(settings, 'AUTH_LDAP_GROUP_TYPE', None) Setting = apps.get_model('conf', 'Setting') group_type_params = {'name_attr': 'cn', 'member_attr': 'member'} @@ -17,7 +17,7 @@ def fill_ldap_group_type_params(apps, schema_editor): else: entry = Setting(key='AUTH_LDAP_GROUP_TYPE_PARAMS', value=group_type_params, created=now(), modified=now()) - init_attrs = set(inspect.getargspec(group_type.__init__).args[1:]) + init_attrs = set(inspect.getfullargspec(group_type.__init__).args[1:]) for k in list(group_type_params.keys()): if k not in init_attrs: del group_type_params[k] diff --git a/awx/conf/models.py b/awx/conf/models.py index f64d8a2aab..05162436d1 100644 --- a/awx/conf/models.py +++ b/awx/conf/models.py @@ -9,7 +9,6 @@ from django.db import models # AWX from awx.main.models.base import CreatedModifiedModel, prevent_search -from awx.main.fields import JSONField from awx.main.utils import encrypt_field from awx.conf import settings_registry @@ -19,7 +18,7 @@ __all__ = ['Setting'] class Setting(CreatedModifiedModel): key = models.CharField(max_length=255) - value = JSONField(null=True) + value = models.JSONField(null=True) user = prevent_search(models.ForeignKey('auth.User', related_name='settings', default=None, null=True, editable=False, on_delete=models.CASCADE)) def __str__(self): diff --git a/awx/conf/registry.py b/awx/conf/registry.py index 36f6eba6d2..da056e99db 100644 --- a/awx/conf/registry.py +++ b/awx/conf/registry.py @@ -8,7 +8,7 @@ import logging # Django from django.core.exceptions import ImproperlyConfigured from django.utils.text import slugify -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from awx.conf.license import get_license diff --git a/awx/conf/tests/unit/test_registry.py b/awx/conf/tests/unit/test_registry.py index 6a817985e2..1ce4dceaaf 100644 --- a/awx/conf/tests/unit/test_registry.py +++ b/awx/conf/tests/unit/test_registry.py @@ -6,7 +6,7 @@ from uuid import uuid4 from django.conf import LazySettings from django.core.cache.backends.locmem import LocMemCache from django.core.exceptions import ImproperlyConfigured -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from rest_framework.fields import empty import pytest diff --git a/awx/conf/tests/unit/test_settings.py b/awx/conf/tests/unit/test_settings.py index da97d41c6f..a184fa3191 100644 --- a/awx/conf/tests/unit/test_settings.py +++ b/awx/conf/tests/unit/test_settings.py @@ -11,7 +11,7 @@ import time from django.conf import LazySettings from django.core.cache.backends.locmem import LocMemCache from django.core.exceptions import ImproperlyConfigured -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ import pytest from awx.conf import models, fields diff --git a/awx/conf/urls.py b/awx/conf/urls.py index 61134d20b8..69d47b6afc 100644 --- a/awx/conf/urls.py +++ b/awx/conf/urls.py @@ -1,13 +1,13 @@ # Copyright (c) 2016 Ansible, Inc. # All Rights Reserved. +from django.urls import re_path -from django.conf.urls import url from awx.conf.views import SettingCategoryList, SettingSingletonDetail, SettingLoggingTest urlpatterns = [ - url(r'^$', SettingCategoryList.as_view(), name='setting_category_list'), - url(r'^(?P[a-z0-9-]+)/$', SettingSingletonDetail.as_view(), name='setting_singleton_detail'), - url(r'^logging/test/$', SettingLoggingTest.as_view(), name='setting_logging_test'), + re_path(r'^$', SettingCategoryList.as_view(), name='setting_category_list'), + re_path(r'^(?P[a-z0-9-]+)/$', SettingSingletonDetail.as_view(), name='setting_singleton_detail'), + re_path(r'^logging/test/$', SettingLoggingTest.as_view(), name='setting_logging_test'), ] diff --git a/awx/conf/views.py b/awx/conf/views.py index a9eae07409..b2b312d834 100644 --- a/awx/conf/views.py +++ b/awx/conf/views.py @@ -13,7 +13,7 @@ from socket import SHUT_RDWR from django.db import connection from django.conf import settings from django.http import Http404 -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework.exceptions import PermissionDenied diff --git a/awx/main/__init__.py b/awx/main/__init__.py index f500f439b6..e484e62be1 100644 --- a/awx/main/__init__.py +++ b/awx/main/__init__.py @@ -1,4 +1,2 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. - -default_app_config = 'awx.main.apps.MainConfig' diff --git a/awx/main/access.py b/awx/main/access.py index c608a7aa41..ba91d290c1 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -11,7 +11,7 @@ from functools import reduce from django.conf import settings from django.db.models import Q, Prefetch from django.contrib.auth.models import User -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.core.exceptions import ObjectDoesNotExist # Django REST Framework diff --git a/awx/main/analytics/broadcast_websocket.py b/awx/main/analytics/broadcast_websocket.py index ff4bcb4fa1..df1582c9b9 100644 --- a/awx/main/analytics/broadcast_websocket.py +++ b/awx/main/analytics/broadcast_websocket.py @@ -89,7 +89,7 @@ class BroadcastWebsocketStatsManager: await asyncio.sleep(settings.BROADCAST_WEBSOCKET_STATS_POLL_RATE_SECONDS) except Exception as e: - logger.warn(e) + logger.warning(e) await asyncio.sleep(settings.BROADCAST_WEBSOCKET_STATS_POLL_RATE_SECONDS) self.start() diff --git a/awx/main/analytics/collectors.py b/awx/main/analytics/collectors.py index 1270c87315..ee52dece89 100644 --- a/awx/main/analytics/collectors.py +++ b/awx/main/analytics/collectors.py @@ -10,7 +10,7 @@ from django.db.models import Count from django.conf import settings from django.contrib.sessions.models import Session from django.utils.timezone import now, timedelta -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from psycopg2.errors import UntranslatableCharacter diff --git a/awx/main/apps.py b/awx/main/apps.py index b45b3c20f2..abd3332fd0 100644 --- a/awx/main/apps.py +++ b/awx/main/apps.py @@ -1,5 +1,5 @@ from django.apps import AppConfig -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ class MainConfig(AppConfig): diff --git a/awx/main/conf.py b/awx/main/conf.py index c754ecc92a..0099fbe3ad 100644 --- a/awx/main/conf.py +++ b/awx/main/conf.py @@ -2,7 +2,7 @@ import logging # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework import serializers diff --git a/awx/main/constants.py b/awx/main/constants.py index d87bf82983..cda6dd3a67 100644 --- a/awx/main/constants.py +++ b/awx/main/constants.py @@ -3,7 +3,7 @@ import re -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ __all__ = [ 'CLOUD_PROVIDERS', diff --git a/awx/main/consumers.py b/awx/main/consumers.py index 21ebe9d771..ad1740c362 100644 --- a/awx/main/consumers.py +++ b/awx/main/consumers.py @@ -65,7 +65,7 @@ class WebsocketSecretAuthHelper: nonce_parsed = int(nonce_parsed) nonce_diff = now - nonce_parsed if abs(nonce_diff) > nonce_tolerance: - logger.warn(f"Potential replay attack or machine(s) time out of sync by {nonce_diff} seconds.") + logger.warning(f"Potential replay attack or machine(s) time out of sync by {nonce_diff} seconds.") raise ValueError(f"Potential replay attack or machine(s) time out of sync by {nonce_diff} seconds.") return True @@ -85,7 +85,7 @@ class BroadcastConsumer(AsyncJsonWebsocketConsumer): try: WebsocketSecretAuthHelper.is_authorized(self.scope) except Exception: - logger.warn(f"client '{self.channel_name}' failed to authorize against the broadcast endpoint.") + logger.warning(f"client '{self.channel_name}' failed to authorize against the broadcast endpoint.") await self.close() return diff --git a/awx/main/credential_plugins/aim.py b/awx/main/credential_plugins/aim.py index 235511f959..95bf767508 100644 --- a/awx/main/credential_plugins/aim.py +++ b/awx/main/credential_plugins/aim.py @@ -2,7 +2,7 @@ from .plugin import CredentialPlugin, CertFiles, raise_for_status from urllib.parse import quote, urlencode, urljoin -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ import requests aim_inputs = { diff --git a/awx/main/credential_plugins/azure_kv.py b/awx/main/credential_plugins/azure_kv.py index 58580edf9a..eecfde65b1 100644 --- a/awx/main/credential_plugins/azure_kv.py +++ b/awx/main/credential_plugins/azure_kv.py @@ -1,6 +1,6 @@ from .plugin import CredentialPlugin -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from azure.keyvault import KeyVaultClient, KeyVaultAuthentication from azure.common.credentials import ServicePrincipalCredentials from msrestazure import azure_cloud diff --git a/awx/main/credential_plugins/centrify_vault.py b/awx/main/credential_plugins/centrify_vault.py index a0be2250f4..1e05625e71 100644 --- a/awx/main/credential_plugins/centrify_vault.py +++ b/awx/main/credential_plugins/centrify_vault.py @@ -1,115 +1,115 @@ -from .plugin import CredentialPlugin, raise_for_status -from django.utils.translation import ugettext_lazy as _ -from urllib.parse import urljoin -import requests - -pas_inputs = { - 'fields': [ - { - 'id': 'url', - 'label': _('Centrify Tenant URL'), - 'type': 'string', - 'help_text': _('Centrify Tenant URL'), - 'format': 'url', - }, - { - 'id': 'client_id', - 'label': _('Centrify API User'), - 'type': 'string', - 'help_text': _('Centrify API User, having necessary permissions as mentioned in support doc'), - }, - { - 'id': 'client_password', - 'label': _('Centrify API Password'), - 'type': 'string', - 'help_text': _('Password of Centrify API User with necessary permissions'), - 'secret': True, - }, - { - 'id': 'oauth_application_id', - 'label': _('OAuth2 Application ID'), - 'type': 'string', - 'help_text': _('Application ID of the configured OAuth2 Client (defaults to \'awx\')'), - 'default': 'awx', - }, - { - 'id': 'oauth_scope', - 'label': _('OAuth2 Scope'), - 'type': 'string', - 'help_text': _('Scope of the configured OAuth2 Client (defaults to \'awx\')'), - 'default': 'awx', - }, - ], - 'metadata': [ - { - 'id': 'account-name', - 'label': _('Account Name'), - 'type': 'string', - 'help_text': _('Local system account or Domain account name enrolled in Centrify Vault. eg. (root or DOMAIN/Administrator)'), - }, - { - 'id': 'system-name', - 'label': _('System Name'), - 'type': 'string', - 'help_text': _('Machine Name enrolled with in Centrify Portal'), - }, - ], - 'required': ['url', 'account-name', 'system-name', 'client_id', 'client_password'], -} - - -# generate bearer token to authenticate with PAS portal, Input : Client ID, Client Secret -def handle_auth(**kwargs): - post_data = {"grant_type": "client_credentials", "scope": kwargs['oauth_scope']} - response = requests.post(kwargs['endpoint'], data=post_data, auth=(kwargs['client_id'], kwargs['client_password']), verify=True, timeout=(5, 30)) - raise_for_status(response) - try: - return response.json()['access_token'] - except KeyError: - raise RuntimeError('OAuth request to tenant was unsuccessful') - - -# fetch the ID of system with RedRock query, Input : System Name, Account Name -def get_ID(**kwargs): - endpoint = urljoin(kwargs['url'], '/Redrock/query') - name = " Name='{0}' and User='{1}'".format(kwargs['system_name'], kwargs['acc_name']) - query = 'Select ID from VaultAccount where {0}'.format(name) - post_headers = {"Authorization": "Bearer " + kwargs['access_token'], "X-CENTRIFY-NATIVE-CLIENT": "true"} - response = requests.post(endpoint, json={'Script': query}, headers=post_headers, verify=True, timeout=(5, 30)) - raise_for_status(response) - try: - result_str = response.json()["Result"]["Results"] - return result_str[0]["Row"]["ID"] - except (IndexError, KeyError): - raise RuntimeError("Error Detected!! Check the Inputs") - - -# CheckOut Password from Centrify Vault, Input : ID -def get_passwd(**kwargs): - endpoint = urljoin(kwargs['url'], '/ServerManage/CheckoutPassword') - post_headers = {"Authorization": "Bearer " + kwargs['access_token'], "X-CENTRIFY-NATIVE-CLIENT": "true"} - response = requests.post(endpoint, json={'ID': kwargs['acc_id']}, headers=post_headers, verify=True, timeout=(5, 30)) - raise_for_status(response) - try: - return response.json()["Result"]["Password"] - except KeyError: - raise RuntimeError("Password Not Found") - - -def centrify_backend(**kwargs): - url = kwargs.get('url') - acc_name = kwargs.get('account-name') - system_name = kwargs.get('system-name') - client_id = kwargs.get('client_id') - client_password = kwargs.get('client_password') - app_id = kwargs.get('oauth_application_id', 'awx') - endpoint = urljoin(url, f'/oauth2/token/{app_id}') - endpoint = {'endpoint': endpoint, 'client_id': client_id, 'client_password': client_password, 'oauth_scope': kwargs.get('oauth_scope', 'awx')} - token = handle_auth(**endpoint) - get_id_args = {'system_name': system_name, 'acc_name': acc_name, 'url': url, 'access_token': token} - acc_id = get_ID(**get_id_args) - get_pwd_args = {'url': url, 'acc_id': acc_id, 'access_token': token} - return get_passwd(**get_pwd_args) - - -centrify_plugin = CredentialPlugin('Centrify Vault Credential Provider Lookup', inputs=pas_inputs, backend=centrify_backend) +from .plugin import CredentialPlugin, raise_for_status +from django.utils.translation import gettext_lazy as _ +from urllib.parse import urljoin +import requests + +pas_inputs = { + 'fields': [ + { + 'id': 'url', + 'label': _('Centrify Tenant URL'), + 'type': 'string', + 'help_text': _('Centrify Tenant URL'), + 'format': 'url', + }, + { + 'id': 'client_id', + 'label': _('Centrify API User'), + 'type': 'string', + 'help_text': _('Centrify API User, having necessary permissions as mentioned in support doc'), + }, + { + 'id': 'client_password', + 'label': _('Centrify API Password'), + 'type': 'string', + 'help_text': _('Password of Centrify API User with necessary permissions'), + 'secret': True, + }, + { + 'id': 'oauth_application_id', + 'label': _('OAuth2 Application ID'), + 'type': 'string', + 'help_text': _('Application ID of the configured OAuth2 Client (defaults to \'awx\')'), + 'default': 'awx', + }, + { + 'id': 'oauth_scope', + 'label': _('OAuth2 Scope'), + 'type': 'string', + 'help_text': _('Scope of the configured OAuth2 Client (defaults to \'awx\')'), + 'default': 'awx', + }, + ], + 'metadata': [ + { + 'id': 'account-name', + 'label': _('Account Name'), + 'type': 'string', + 'help_text': _('Local system account or Domain account name enrolled in Centrify Vault. eg. (root or DOMAIN/Administrator)'), + }, + { + 'id': 'system-name', + 'label': _('System Name'), + 'type': 'string', + 'help_text': _('Machine Name enrolled with in Centrify Portal'), + }, + ], + 'required': ['url', 'account-name', 'system-name', 'client_id', 'client_password'], +} + + +# generate bearer token to authenticate with PAS portal, Input : Client ID, Client Secret +def handle_auth(**kwargs): + post_data = {"grant_type": "client_credentials", "scope": kwargs['oauth_scope']} + response = requests.post(kwargs['endpoint'], data=post_data, auth=(kwargs['client_id'], kwargs['client_password']), verify=True, timeout=(5, 30)) + raise_for_status(response) + try: + return response.json()['access_token'] + except KeyError: + raise RuntimeError('OAuth request to tenant was unsuccessful') + + +# fetch the ID of system with RedRock query, Input : System Name, Account Name +def get_ID(**kwargs): + endpoint = urljoin(kwargs['url'], '/Redrock/query') + name = " Name='{0}' and User='{1}'".format(kwargs['system_name'], kwargs['acc_name']) + query = 'Select ID from VaultAccount where {0}'.format(name) + post_headers = {"Authorization": "Bearer " + kwargs['access_token'], "X-CENTRIFY-NATIVE-CLIENT": "true"} + response = requests.post(endpoint, json={'Script': query}, headers=post_headers, verify=True, timeout=(5, 30)) + raise_for_status(response) + try: + result_str = response.json()["Result"]["Results"] + return result_str[0]["Row"]["ID"] + except (IndexError, KeyError): + raise RuntimeError("Error Detected!! Check the Inputs") + + +# CheckOut Password from Centrify Vault, Input : ID +def get_passwd(**kwargs): + endpoint = urljoin(kwargs['url'], '/ServerManage/CheckoutPassword') + post_headers = {"Authorization": "Bearer " + kwargs['access_token'], "X-CENTRIFY-NATIVE-CLIENT": "true"} + response = requests.post(endpoint, json={'ID': kwargs['acc_id']}, headers=post_headers, verify=True, timeout=(5, 30)) + raise_for_status(response) + try: + return response.json()["Result"]["Password"] + except KeyError: + raise RuntimeError("Password Not Found") + + +def centrify_backend(**kwargs): + url = kwargs.get('url') + acc_name = kwargs.get('account-name') + system_name = kwargs.get('system-name') + client_id = kwargs.get('client_id') + client_password = kwargs.get('client_password') + app_id = kwargs.get('oauth_application_id', 'awx') + endpoint = urljoin(url, f'/oauth2/token/{app_id}') + endpoint = {'endpoint': endpoint, 'client_id': client_id, 'client_password': client_password, 'oauth_scope': kwargs.get('oauth_scope', 'awx')} + token = handle_auth(**endpoint) + get_id_args = {'system_name': system_name, 'acc_name': acc_name, 'url': url, 'access_token': token} + acc_id = get_ID(**get_id_args) + get_pwd_args = {'url': url, 'acc_id': acc_id, 'access_token': token} + return get_passwd(**get_pwd_args) + + +centrify_plugin = CredentialPlugin('Centrify Vault Credential Provider Lookup', inputs=pas_inputs, backend=centrify_backend) diff --git a/awx/main/credential_plugins/conjur.py b/awx/main/credential_plugins/conjur.py index b9606d48bc..5ae6be27f3 100644 --- a/awx/main/credential_plugins/conjur.py +++ b/awx/main/credential_plugins/conjur.py @@ -3,7 +3,7 @@ from .plugin import CredentialPlugin, CertFiles, raise_for_status import base64 from urllib.parse import urljoin, quote -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ import requests diff --git a/awx/main/credential_plugins/dsv.py b/awx/main/credential_plugins/dsv.py index d256b27647..9c89199710 100644 --- a/awx/main/credential_plugins/dsv.py +++ b/awx/main/credential_plugins/dsv.py @@ -1,7 +1,7 @@ from .plugin import CredentialPlugin from django.conf import settings -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from thycotic.secrets.vault import SecretsVault diff --git a/awx/main/credential_plugins/hashivault.py b/awx/main/credential_plugins/hashivault.py index c179fcd1e7..c55ac2f4e6 100644 --- a/awx/main/credential_plugins/hashivault.py +++ b/awx/main/credential_plugins/hashivault.py @@ -6,7 +6,7 @@ from urllib.parse import urljoin from .plugin import CredentialPlugin, CertFiles, raise_for_status import requests -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ base_inputs = { 'fields': [ diff --git a/awx/main/credential_plugins/tss.py b/awx/main/credential_plugins/tss.py index bf83693860..172a8aef00 100644 --- a/awx/main/credential_plugins/tss.py +++ b/awx/main/credential_plugins/tss.py @@ -1,5 +1,5 @@ from .plugin import CredentialPlugin -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from thycotic.secrets.server import PasswordGrantAuthorizer, SecretServer, ServerSecret diff --git a/awx/main/dispatch/control.py b/awx/main/dispatch/control.py index e5952f02bf..b1eb2281c9 100644 --- a/awx/main/dispatch/control.py +++ b/awx/main/dispatch/control.py @@ -42,7 +42,7 @@ class Control(object): return f"reply_to_{str(uuid.uuid4()).replace('-','_')}" def control_with_reply(self, command, timeout=5): - logger.warn('checking {} {} for {}'.format(self.service, command, self.queuename)) + logger.warning('checking {} {} for {}'.format(self.service, command, self.queuename)) reply_queue = Control.generate_reply_queue_name() self.result = None diff --git a/awx/main/dispatch/periodic.py b/awx/main/dispatch/periodic.py index 9ff6dd2570..e3e7da5db9 100644 --- a/awx/main/dispatch/periodic.py +++ b/awx/main/dispatch/periodic.py @@ -6,7 +6,8 @@ from multiprocessing import Process from django.conf import settings from django.db import connections from schedule import Scheduler -from django_guid.middleware import GuidMiddleware +from django_guid import set_guid +from django_guid.utils import generate_guid from awx.main.dispatch.worker import TaskWorker @@ -19,20 +20,20 @@ class Scheduler(Scheduler): def run(): ppid = os.getppid() - logger.warn('periodic beat started') + logger.warning('periodic beat started') while True: if os.getppid() != ppid: # if the parent PID changes, this process has been orphaned # via e.g., segfault or sigkill, we should exit too pid = os.getpid() - logger.warn(f'periodic beat exiting gracefully pid:{pid}') + logger.warning(f'periodic beat exiting gracefully pid:{pid}') raise SystemExit() try: for conn in connections.all(): # If the database connection has a hiccup, re-establish a new # connection conn.close_if_unusable_or_obsolete() - GuidMiddleware.set_guid(GuidMiddleware._generate_guid()) + set_guid(generate_guid()) self.run_pending() except Exception: logger.exception('encountered an error while scheduling periodic tasks') diff --git a/awx/main/dispatch/pool.py b/awx/main/dispatch/pool.py index f1f46363f3..576f6bf799 100644 --- a/awx/main/dispatch/pool.py +++ b/awx/main/dispatch/pool.py @@ -16,7 +16,7 @@ from queue import Full as QueueFull, Empty as QueueEmpty from django.conf import settings from django.db import connection as django_connection, connections from django.core.cache import cache as django_cache -from django_guid.middleware import GuidMiddleware +from django_guid import set_guid from jinja2 import Template import psutil @@ -142,7 +142,7 @@ class PoolWorker(object): # when this occurs, it's _fine_ to ignore this KeyError because # the purpose of self.managed_tasks is to just track internal # state of which events are *currently* being processed. - logger.warn('Event UUID {} appears to be have been duplicated.'.format(uuid)) + logger.warning('Event UUID {} appears to be have been duplicated.'.format(uuid)) @property def current_task(self): @@ -291,8 +291,8 @@ class WorkerPool(object): pass except Exception: tb = traceback.format_exc() - logger.warn("could not write to queue %s" % preferred_queue) - logger.warn("detail: {}".format(tb)) + logger.warning("could not write to queue %s" % preferred_queue) + logger.warning("detail: {}".format(tb)) write_attempt_order.append(preferred_queue) logger.error("could not write payload to any queue, attempted order: {}".format(write_attempt_order)) return None @@ -436,7 +436,7 @@ class AutoscalePool(WorkerPool): def write(self, preferred_queue, body): if 'guid' in body: - GuidMiddleware.set_guid(body['guid']) + set_guid(body['guid']) try: # when the cluster heartbeat occurs, clean up internally if isinstance(body, dict) and 'cluster_node_heartbeat' in body['task']: diff --git a/awx/main/dispatch/publish.py b/awx/main/dispatch/publish.py index 63b2890e1e..e873465155 100644 --- a/awx/main/dispatch/publish.py +++ b/awx/main/dispatch/publish.py @@ -5,7 +5,7 @@ import json from uuid import uuid4 from django.conf import settings -from django_guid.middleware import GuidMiddleware +from django_guid import get_guid from . import pg_bus_conn @@ -76,7 +76,7 @@ class task: logger.error(msg) raise ValueError(msg) obj = {'uuid': task_id, 'args': args, 'kwargs': kwargs, 'task': cls.name} - guid = GuidMiddleware.get_guid() + guid = get_guid() if guid: obj['guid'] = guid obj.update(**kw) diff --git a/awx/main/dispatch/worker/base.py b/awx/main/dispatch/worker/base.py index 193fb778bb..6965416c94 100644 --- a/awx/main/dispatch/worker/base.py +++ b/awx/main/dispatch/worker/base.py @@ -60,7 +60,7 @@ class AWXConsumerBase(object): return f'listening on {self.queues}' def control(self, body): - logger.warn(f'Received control signal:\n{body}') + logger.warning(f'Received control signal:\n{body}') control = body.get('control') if control in ('status', 'running'): reply_queue = body['reply_to'] @@ -118,7 +118,7 @@ class AWXConsumerBase(object): def stop(self, signum, frame): self.should_stop = True - logger.warn('received {}, stopping'.format(signame(signum))) + logger.warning('received {}, stopping'.format(signame(signum))) self.worker.on_stop() raise SystemExit() @@ -153,7 +153,7 @@ class AWXConsumerPG(AWXConsumerBase): if self.should_stop: return except psycopg2.InterfaceError: - logger.warn("Stale Postgres message bus connection, reconnecting") + logger.warning("Stale Postgres message bus connection, reconnecting") continue diff --git a/awx/main/dispatch/worker/callback.py b/awx/main/dispatch/worker/callback.py index a88286364a..5026e72c06 100644 --- a/awx/main/dispatch/worker/callback.py +++ b/awx/main/dispatch/worker/callback.py @@ -9,7 +9,7 @@ from django.conf import settings from django.utils.timezone import now as tz_now from django.db import DatabaseError, OperationalError, connection as django_connection from django.db.utils import InterfaceError, InternalError -from django_guid.middleware import GuidMiddleware +from django_guid import set_guid import psutil @@ -184,7 +184,7 @@ class CallbackBrokerWorker(BaseWorker): if body.get('event') == 'EOF': try: if 'guid' in body: - GuidMiddleware.set_guid(body['guid']) + set_guid(body['guid']) final_counter = body.get('final_counter', 0) logger.info('Event processing is finished for Job {}, sending notifications'.format(job_identifier)) # EOF events are sent when stdout for the running task is @@ -208,7 +208,7 @@ class CallbackBrokerWorker(BaseWorker): logger.exception('Worker failed to emit notifications: Job {}'.format(job_identifier)) finally: self.subsystem_metrics.inc('callback_receiver_events_in_memory', -1) - GuidMiddleware.set_guid('') + set_guid('') return skip_websocket_message = body.pop('skip_websocket_message', False) diff --git a/awx/main/dispatch/worker/task.py b/awx/main/dispatch/worker/task.py index 91ce7f47b4..e1fe196ddb 100644 --- a/awx/main/dispatch/worker/task.py +++ b/awx/main/dispatch/worker/task.py @@ -7,7 +7,7 @@ import traceback from kubernetes.config import kube_config from django.conf import settings -from django_guid.middleware import GuidMiddleware +from django_guid import set_guid from awx.main.tasks.system import dispatch_startup, inform_cluster_of_shutdown @@ -54,7 +54,7 @@ class TaskWorker(BaseWorker): args = body.get('args', []) kwargs = body.get('kwargs', {}) if 'guid' in body: - GuidMiddleware.set_guid(body.pop('guid')) + set_guid(body.pop('guid')) _call = TaskWorker.resolve_callable(task) if inspect.isclass(_call): # the callable is a class, e.g., RunJob; instantiate and diff --git a/awx/main/fields.py b/awx/main/fields.py index 95ebfbca73..83ab57f37d 100644 --- a/awx/main/fields.py +++ b/awx/main/fields.py @@ -11,7 +11,6 @@ from jinja2 import sandbox, StrictUndefined from jinja2.exceptions import UndefinedError, TemplateSyntaxError, SecurityError # Django -from django.contrib.postgres.fields import JSONField as upstream_JSONBField from django.core import exceptions as django_exceptions from django.core.serializers.json import DjangoJSONEncoder from django.db.models.signals import ( @@ -28,17 +27,15 @@ from django.db.models.fields.related_descriptors import ( ReverseManyToOneDescriptor, create_forward_many_to_many_manager, ) -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str +from django.db.models import JSONField from django.utils.functional import cached_property -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # jsonschema from jsonschema import Draft4Validator, FormatChecker import jsonschema.exceptions -# Django-JSONField -from jsonfield import JSONField as upstream_JSONField - # DRF from rest_framework import serializers @@ -52,9 +49,9 @@ from awx.main import utils __all__ = [ + 'JSONBlob', 'AutoOneToOneField', 'ImplicitRoleField', - 'JSONField', 'SmartFilterField', 'OrderedManyToManyField', 'update_role_parentage_for_instance', @@ -71,34 +68,9 @@ def __enum_validate__(validator, enums, instance, schema): Draft4Validator.VALIDATORS['enum'] = __enum_validate__ -class JSONField(upstream_JSONField): - def db_type(self, connection): - return 'text' - - def from_db_value(self, value, expression, connection): - if value in {'', None} and not self.null: - return {} - return super(JSONField, self).from_db_value(value, expression, connection) - - -class JSONBField(upstream_JSONBField): - def get_prep_lookup(self, lookup_type, value): - if isinstance(value, str) and value == "null": - return 'null' - return super(JSONBField, self).get_prep_lookup(lookup_type, value) - - def get_db_prep_value(self, value, connection, prepared=False): - if connection.vendor == 'sqlite': - # sqlite (which we use for tests) does not support jsonb; - return json.dumps(value, cls=DjangoJSONEncoder) - return super(JSONBField, self).get_db_prep_value(value, connection, prepared) - - def from_db_value(self, value, expression, connection): - # Work around a bug in django-jsonfield - # https://bitbucket.org/schinckel/django-jsonfield/issues/57/cannot-use-in-the-same-project-as-djangos - if isinstance(value, str): - return json.loads(value) - return value +class JSONBlob(JSONField): + def get_internal_type(self): + return "TextField" # Based on AutoOneToOneField from django-annoying: @@ -140,7 +112,7 @@ def resolve_role_field(obj, field): # use extremely generous duck typing to accomidate all possible forms # of the model that may be used during various migrations if obj._meta.model_name != 'role' or obj._meta.app_label != 'main': - raise Exception(smart_text('{} refers to a {}, not a Role'.format(field, type(obj)))) + raise Exception(smart_str('{} refers to a {}, not a Role'.format(field, type(obj)))) ret.append(obj.id) else: if type(obj) is ManyToManyDescriptor: @@ -385,7 +357,7 @@ class SmartFilterField(models.TextField): return super(SmartFilterField, self).get_prep_value(value) -class JSONSchemaField(JSONBField): +class JSONSchemaField(models.JSONField): """ A JSONB field that self-validates against a defined JSON schema (http://json-schema.org). This base class is intended to be overwritten by @@ -398,8 +370,13 @@ class JSONSchemaField(JSONBField): # validation empty_values = (None, '') + def __init__(self, encoder=None, decoder=None, **options): + if encoder is None: + encoder = DjangoJSONEncoder + super().__init__(encoder=encoder, decoder=decoder, **options) + def get_default(self): - return copy.deepcopy(super(JSONBField, self).get_default()) + return copy.deepcopy(super(models.JSONField, self).get_default()) def schema(self, model_instance): raise NotImplementedError() diff --git a/awx/main/management/commands/cleanup_jobs.py b/awx/main/management/commands/cleanup_jobs.py index c9c508c6e8..dec5ca6e50 100644 --- a/awx/main/management/commands/cleanup_jobs.py +++ b/awx/main/management/commands/cleanup_jobs.py @@ -11,13 +11,12 @@ import re # Django from django.core.management.base import BaseCommand, CommandError from django.db import transaction, connection +from django.db.models import Min, Max +from django.db.models.signals import pre_save, post_save, pre_delete, post_delete, m2m_changed from django.utils.timezone import now # AWX from awx.main.models import Job, AdHocCommand, ProjectUpdate, InventoryUpdate, SystemJob, WorkflowJob, Notification -from awx.main.signals import disable_activity_stream, disable_computed_fields - -from awx.main.utils.deletion import AWXCollector, pre_delete def unified_job_class_to_event_table_name(job_class): @@ -80,7 +79,6 @@ class DeleteMeta: ).count() def identify_excluded_partitions(self): - part_drop = {} for pk, status, created in self.jobs_qs: @@ -94,7 +92,7 @@ class DeleteMeta: # Note that parts_no_drop _may_ contain the names of partitions that don't exist # This can happen when the cleanup of _unpartitioned_* logic leaves behind jobs with status pending, waiting, running. The find_jobs_to_delete() will # pick these jobs up. - self.parts_no_drop = set([k for k, v in part_drop.items() if v is False]) + self.parts_no_drop = {k for k, v in part_drop.items() if v is False} def delete_jobs(self): if not self.dry_run: @@ -116,7 +114,7 @@ class DeleteMeta: partitions_dt = [p for p in partitions_dt if not None] # convert datetime partition back to string partition - partitions_maybe_drop = set([dt_to_partition_name(tbl_name, dt) for dt in partitions_dt]) + partitions_maybe_drop = {dt_to_partition_name(tbl_name, dt) for dt in partitions_dt} # Do not drop partition if there is a job that will not be deleted pointing at it self.parts_to_drop = partitions_maybe_drop - self.parts_no_drop @@ -164,6 +162,15 @@ class Command(BaseCommand): parser.add_argument('--notifications', dest='only_notifications', action='store_true', default=False, help='Remove notifications') parser.add_argument('--workflow-jobs', default=False, action='store_true', dest='only_workflow_jobs', help='Remove workflow jobs') + def init_logging(self): + log_levels = dict(enumerate([logging.ERROR, logging.INFO, logging.DEBUG, 0])) + self.logger = logging.getLogger('awx.main.commands.cleanup_jobs') + self.logger.setLevel(log_levels.get(self.verbosity, 0)) + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter('%(message)s')) + self.logger.addHandler(handler) + self.logger.propagate = False + def cleanup(self, job_class): delete_meta = DeleteMeta(self.logger, job_class, self.cutoff, self.dry_run) skipped, deleted = delete_meta.delete() @@ -193,7 +200,7 @@ class Command(BaseCommand): return (delete_meta.jobs_no_delete_count, delete_meta.jobs_to_delete_count) def _cascade_delete_job_events(self, model, pk_list): - if len(pk_list) > 0: + if pk_list: with connection.cursor() as cursor: tblname = unified_job_class_to_event_table_name(model) @@ -202,37 +209,30 @@ class Command(BaseCommand): cursor.execute(f"DELETE FROM _unpartitioned_{tblname} WHERE {rel_name} IN ({pk_list_csv})") def cleanup_jobs(self): - skipped, deleted = 0, 0 + batch_size = 100000 - batch_size = 1000000 + # Hack to avoid doing N+1 queries as each item in the Job query set does + # an individual query to get the underlying UnifiedJob. + Job.polymorphic_super_sub_accessors_replaced = True - while True: - # get queryset for available jobs to remove - qs = Job.objects.filter(created__lt=self.cutoff).exclude(status__in=['pending', 'waiting', 'running']) - # get pk list for the first N (batch_size) objects - pk_list = qs[0:batch_size].values_list('pk', flat=True) - # You cannot delete queries with sql LIMIT set, so we must - # create a new query from this pk_list - qs_batch = Job.objects.filter(pk__in=pk_list) - just_deleted = 0 - if not self.dry_run: + skipped = (Job.objects.filter(created__gte=self.cutoff) | Job.objects.filter(status__in=['pending', 'waiting', 'running'])).count() + + qs = Job.objects.select_related('unifiedjob_ptr').filter(created__lt=self.cutoff).exclude(status__in=['pending', 'waiting', 'running']) + if self.dry_run: + deleted = qs.count() + return skipped, deleted + + deleted = 0 + info = qs.aggregate(min=Min('id'), max=Max('id')) + if info['min'] is not None: + for start in range(info['min'], info['max'] + 1, batch_size): + qs_batch = qs.filter(id__gte=start, id__lte=start + batch_size) + pk_list = qs_batch.values_list('id', flat=True) + + _, results = qs_batch.delete() + deleted += results['main.Job'] self._cascade_delete_job_events(Job, pk_list) - del_query = pre_delete(qs_batch) - collector = AWXCollector(del_query.db) - collector.collect(del_query) - _, models_deleted = collector.delete() - if models_deleted: - just_deleted = models_deleted['main.Job'] - deleted += just_deleted - else: - just_deleted = 0 # break from loop, this is dry run - deleted = qs.count() - - if just_deleted == 0: - break - - skipped += (Job.objects.filter(created__gte=self.cutoff) | Job.objects.filter(status__in=['pending', 'waiting', 'running'])).count() return skipped, deleted def cleanup_ad_hoc_commands(self): @@ -339,15 +339,6 @@ class Command(BaseCommand): skipped += SystemJob.objects.filter(created__gte=self.cutoff).count() return skipped, deleted - def init_logging(self): - log_levels = dict(enumerate([logging.ERROR, logging.INFO, logging.DEBUG, 0])) - self.logger = logging.getLogger('awx.main.commands.cleanup_jobs') - self.logger.setLevel(log_levels.get(self.verbosity, 0)) - handler = logging.StreamHandler() - handler.setFormatter(logging.Formatter('%(message)s')) - self.logger.addHandler(handler) - self.logger.propagate = False - def cleanup_workflow_jobs(self): skipped, deleted = 0, 0 workflow_jobs = WorkflowJob.objects.filter(created__lt=self.cutoff) @@ -398,6 +389,7 @@ class Command(BaseCommand): self.cutoff = now() - datetime.timedelta(days=self.days) except OverflowError: raise CommandError('--days specified is too large. Try something less than 99999 (about 270 years).') + model_names = ('jobs', 'ad_hoc_commands', 'project_updates', 'inventory_updates', 'management_jobs', 'workflow_jobs', 'notifications') models_to_cleanup = set() for m in model_names: @@ -405,18 +397,28 @@ class Command(BaseCommand): models_to_cleanup.add(m) if not models_to_cleanup: models_to_cleanup.update(model_names) - with disable_activity_stream(), disable_computed_fields(): - for m in model_names: - if m in models_to_cleanup: - skipped, deleted = getattr(self, 'cleanup_%s' % m)() - func = getattr(self, 'cleanup_%s_partition' % m, None) - if func: - skipped_partition, deleted_partition = func() - skipped += skipped_partition - deleted += deleted_partition + # Completely disconnect all signal handlers. This is very aggressive, + # but it will be ok since this command is run in its own process. The + # core of the logic is borrowed from Signal.disconnect(). + for s in (pre_save, post_save, pre_delete, post_delete, m2m_changed): + with s.lock: + del s.receivers[:] + s.sender_receivers_cache.clear() - if self.dry_run: - self.logger.log(99, '%s: %d would be deleted, %d would be skipped.', m.replace('_', ' '), deleted, skipped) - else: - self.logger.log(99, '%s: %d deleted, %d skipped.', m.replace('_', ' '), deleted, skipped) + for m in model_names: + if m not in models_to_cleanup: + continue + + skipped, deleted = getattr(self, 'cleanup_%s' % m)() + + func = getattr(self, 'cleanup_%s_partition' % m, None) + if func: + skipped_partition, deleted_partition = func() + skipped += skipped_partition + deleted += deleted_partition + + if self.dry_run: + self.logger.log(99, '%s: %d would be deleted, %d would be skipped.', m.replace('_', ' '), deleted, skipped) + else: + self.logger.log(99, '%s: %d deleted, %d skipped.', m.replace('_', ' '), deleted, skipped) diff --git a/awx/main/management/commands/inventory_import.py b/awx/main/management/commands/inventory_import.py index 0854784f10..78acec423d 100644 --- a/awx/main/management/commands/inventory_import.py +++ b/awx/main/management/commands/inventory_import.py @@ -16,7 +16,7 @@ from collections import OrderedDict from django.conf import settings from django.core.management.base import BaseCommand, CommandError from django.db import connection, transaction -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str # DRF error class to distinguish license exceptions from rest_framework.exceptions import PermissionDenied @@ -79,13 +79,13 @@ class AnsibleInventoryLoader(object): ee = get_default_execution_environment() if settings.IS_K8S: - logger.warn('This command is not able to run on kubernetes-based deployment. This action should be done using the API.') + logger.warning('This command is not able to run on kubernetes-based deployment. This action should be done using the API.') sys.exit(1) if ee.credential: process = subprocess.run(['podman', 'image', 'exists', ee.image], capture_output=True) if process.returncode != 0: - logger.warn( + logger.warning( f'The default execution environment (id={ee.id}, name={ee.name}, image={ee.image}) is not available on this node. ' 'The image needs to be available locally before using this command, due to registry authentication. ' 'To pull this image, either run a job on this node or manually pull the image.' @@ -109,8 +109,8 @@ class AnsibleInventoryLoader(object): proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate() - stdout = smart_text(stdout) - stderr = smart_text(stderr) + stdout = smart_str(stdout) + stderr = smart_str(stderr) if proc.returncode != 0: raise RuntimeError('%s failed (rc=%d) with stdout:\n%s\nstderr:\n%s' % ('ansible-inventory', proc.returncode, stdout, stderr)) @@ -224,7 +224,7 @@ class Command(BaseCommand): from_dict = instance_id if instance_id: break - return smart_text(instance_id) + return smart_str(instance_id) def _get_enabled(self, from_dict, default=None): """ diff --git a/awx/main/managers.py b/awx/main/managers.py index 404745b995..4702ad6a9e 100644 --- a/awx/main/managers.py +++ b/awx/main/managers.py @@ -1,7 +1,6 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. -import sys import logging import os from django.db import models @@ -104,10 +103,6 @@ class InstanceManager(models.Manager): def me(self): """Return the currently active instance.""" - # If we are running unit tests, return a stub record. - if settings.IS_TESTING(sys.argv) or hasattr(sys, '_called_from_test'): - return self.model(id=1, hostname=settings.CLUSTER_HOST_ID, uuid=UUID_DEFAULT) - node = self.filter(hostname=settings.CLUSTER_HOST_ID) if node.exists(): return node[0] @@ -247,7 +242,7 @@ class InstanceGroupManager(models.Manager): if t.controller_node: control_groups = instance_ig_mapping.get(t.controller_node, []) if not control_groups: - logger.warn(f"No instance group found for {t.controller_node}, capacity consumed may be innaccurate.") + logger.warning(f"No instance group found for {t.controller_node}, capacity consumed may be innaccurate.") if t.status == 'waiting' or (not t.execution_node and not t.is_container_group_task): # Subtract capacity from any peer groups that share instances diff --git a/awx/main/middleware.py b/awx/main/middleware.py index 39caf4a7e7..90739aebbe 100644 --- a/awx/main/middleware.py +++ b/awx/main/middleware.py @@ -14,7 +14,7 @@ from django.db import connection from django.shortcuts import redirect from django.apps import apps from django.utils.deprecation import MiddlewareMixin -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.urls import reverse, resolve from awx.main.utils.named_url_graph import generate_graph, GraphNode @@ -103,7 +103,7 @@ def _customize_graph(): class URLModificationMiddleware(MiddlewareMixin): - def __init__(self, get_response=None): + def __init__(self, get_response): models = [m for m in apps.get_app_config('main').get_models() if hasattr(m, 'get_absolute_url')] generate_graph(models) _customize_graph() diff --git a/awx/main/migrations/0001_initial.py b/awx/main/migrations/0001_initial.py index 7ce9911546..c3dcbe36b7 100644 --- a/awx/main/migrations/0001_initial.py +++ b/awx/main/migrations/0001_initial.py @@ -7,7 +7,6 @@ from __future__ import unicode_literals from django.db import migrations, models import django.utils.timezone -import jsonfield.fields import django.db.models.deletion from django.conf import settings import taggit.managers @@ -70,7 +69,7 @@ class Migration(migrations.Migration): ], ), ), - ('event_data', jsonfield.fields.JSONField(default=dict, blank=True)), + ('event_data', awx.main.fields.JSONBlob(default=dict, blank=True)), ('failed', models.BooleanField(default=False, editable=False)), ('changed', models.BooleanField(default=False, editable=False)), ('counter', models.PositiveIntegerField(default=0)), @@ -433,7 +432,7 @@ class Migration(migrations.Migration): ], ), ), - ('event_data', jsonfield.fields.JSONField(default=dict, blank=True)), + ('event_data', awx.main.fields.JSONBlob(default=dict, blank=True)), ('failed', models.BooleanField(default=False, editable=False)), ('changed', models.BooleanField(default=False, editable=False)), ('host_name', models.CharField(default='', max_length=1024, editable=False)), @@ -623,7 +622,7 @@ class Migration(migrations.Migration): ('dtend', models.DateTimeField(default=None, null=True, editable=False)), ('rrule', models.CharField(max_length=255)), ('next_run', models.DateTimeField(default=None, null=True, editable=False)), - ('extra_data', jsonfield.fields.JSONField(default=dict, blank=True)), + ('extra_data', models.JSONField(default=dict, null=True, blank=True)), ( 'created_by', models.ForeignKey( @@ -751,7 +750,7 @@ class Migration(migrations.Migration): ('elapsed', models.DecimalField(editable=False, max_digits=12, decimal_places=3)), ('job_args', models.TextField(default='', editable=False, blank=True)), ('job_cwd', models.CharField(default='', max_length=1024, editable=False, blank=True)), - ('job_env', jsonfield.fields.JSONField(default=dict, editable=False, blank=True)), + ('job_env', models.JSONField(default=dict, editable=False, null=True, blank=True)), ('job_explanation', models.TextField(default='', editable=False, blank=True)), ('start_args', models.TextField(default='', editable=False, blank=True)), ('result_stdout_text', models.TextField(default='', editable=False, blank=True)), @@ -1035,7 +1034,7 @@ class Migration(migrations.Migration): ('host_config_key', models.CharField(default='', max_length=1024, blank=True)), ('ask_variables_on_launch', models.BooleanField(default=False)), ('survey_enabled', models.BooleanField(default=False)), - ('survey_spec', jsonfield.fields.JSONField(default=dict, blank=True)), + ('survey_spec', models.JSONField(default=dict, blank=True)), ], options={ 'ordering': ('name',), diff --git a/awx/main/migrations/0002_squashed_v300_release.py b/awx/main/migrations/0002_squashed_v300_release.py index 2afdef1845..5f23ed566f 100644 --- a/awx/main/migrations/0002_squashed_v300_release.py +++ b/awx/main/migrations/0002_squashed_v300_release.py @@ -12,7 +12,6 @@ import django.db.models.deletion from django.conf import settings from django.utils.timezone import now -import jsonfield.fields import taggit.managers @@ -199,7 +198,7 @@ class Migration(migrations.Migration): ), ('recipients', models.TextField(default='', editable=False, blank=True)), ('subject', models.TextField(default='', editable=False, blank=True)), - ('body', jsonfield.fields.JSONField(default=dict, blank=True)), + ('body', models.JSONField(default=dict, null=True, blank=True)), ], options={ 'ordering': ('pk',), @@ -230,7 +229,7 @@ class Migration(migrations.Migration): ], ), ), - ('notification_configuration', jsonfield.fields.JSONField(default=dict)), + ('notification_configuration', models.JSONField(default=dict)), ( 'created_by', models.ForeignKey( @@ -324,9 +323,7 @@ class Migration(migrations.Migration): ('module', models.CharField(max_length=128)), ( 'facts', - awx.main.fields.JSONBField( - default=dict, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True - ), + models.JSONField(default=dict, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True), ), ( 'host', diff --git a/awx/main/migrations/0004_squashed_v310_release.py b/awx/main/migrations/0004_squashed_v310_release.py index 06fd3aeed3..c0ac0d4a04 100644 --- a/awx/main/migrations/0004_squashed_v310_release.py +++ b/awx/main/migrations/0004_squashed_v310_release.py @@ -3,7 +3,6 @@ from __future__ import unicode_literals from django.db import migrations, models import awx.main.models.notifications -import jsonfield.fields import django.db.models.deletion import awx.main.models.workflow import awx.main.fields @@ -221,7 +220,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjobnode', name='char_prompts', - field=jsonfield.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AddField( model_name='workflowjobnode', @@ -260,7 +259,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjobtemplatenode', name='char_prompts', - field=jsonfield.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AddField( model_name='workflowjobtemplatenode', @@ -308,12 +307,12 @@ class Migration(migrations.Migration): migrations.AddField( model_name='job', name='artifacts', - field=jsonfield.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AddField( model_name='workflowjobnode', name='ancestor_artifacts', - field=jsonfield.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), # Job timeout settings migrations.AddField( @@ -381,9 +380,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='project', name='playbook_files', - field=jsonfield.fields.JSONField( - default=[], help_text='List of playbooks found in the project', verbose_name='Playbook Files', editable=False, blank=True - ), + field=models.JSONField(default=list, help_text='List of playbooks found in the project', verbose_name='Playbook Files', editable=False, blank=True), ), # Job events to stdout migrations.AddField( @@ -539,7 +536,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjob', name='survey_passwords', - field=jsonfield.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AddField( model_name='workflowjobtemplate', @@ -549,85 +546,83 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjobtemplate', name='survey_spec', - field=jsonfield.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, blank=True), ), # JSON field changes migrations.AlterField( model_name='adhoccommandevent', name='event_data', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=awx.main.fields.JSONBlob(default=dict, blank=True), ), migrations.AlterField( model_name='job', name='artifacts', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AlterField( model_name='job', name='survey_passwords', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AlterField( model_name='jobevent', name='event_data', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=awx.main.fields.JSONBlob(default=dict, blank=True), ), migrations.AlterField( model_name='jobtemplate', name='survey_spec', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, blank=True), ), migrations.AlterField( model_name='notification', name='body', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AlterField( model_name='notificationtemplate', name='notification_configuration', - field=awx.main.fields.JSONField(default=dict), + field=models.JSONField(default=dict), ), migrations.AlterField( model_name='project', name='playbook_files', - field=awx.main.fields.JSONField( - default=[], help_text='List of playbooks found in the project', verbose_name='Playbook Files', editable=False, blank=True - ), + field=models.JSONField(default=list, help_text='List of playbooks found in the project', verbose_name='Playbook Files', editable=False, blank=True), ), migrations.AlterField( model_name='schedule', name='extra_data', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AlterField( model_name='unifiedjob', name='job_env', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AlterField( model_name='workflowjob', name='survey_passwords', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AlterField( model_name='workflowjobnode', name='ancestor_artifacts', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AlterField( model_name='workflowjobnode', name='char_prompts', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AlterField( model_name='workflowjobtemplate', name='survey_spec', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, blank=True), ), migrations.AlterField( model_name='workflowjobtemplatenode', name='char_prompts', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), # Job Project Update migrations.AddField( diff --git a/awx/main/migrations/0006_v320_release.py b/awx/main/migrations/0006_v320_release.py index 1f755f94ce..c05bee3eec 100644 --- a/awx/main/migrations/0006_v320_release.py +++ b/awx/main/migrations/0006_v320_release.py @@ -108,14 +108,12 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='fact', name='facts', - field=awx.main.fields.JSONBField( - default=dict, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True - ), + field=models.JSONField(default=dict, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True), ), migrations.AddField( model_name='host', name='ansible_facts', - field=awx.main.fields.JSONBField(default=dict, help_text='Arbitrary JSON structure of most recent ansible_facts, per-host.', blank=True), + field=models.JSONField(default=dict, help_text='Arbitrary JSON structure of most recent ansible_facts, per-host.', blank=True), ), migrations.AddField( model_name='host', @@ -177,8 +175,8 @@ class Migration(migrations.Migration): migrations.AddField( model_name='project', name='inventory_files', - field=awx.main.fields.JSONField( - default=[], + field=models.JSONField( + default=list, help_text='Suggested list of content that could be Ansible inventory in the project', verbose_name='Inventory Files', editable=False, diff --git a/awx/main/migrations/0009_v322_add_setting_field_for_activity_stream.py b/awx/main/migrations/0009_v322_add_setting_field_for_activity_stream.py index 3d69de2b33..56c86b19a8 100644 --- a/awx/main/migrations/0009_v322_add_setting_field_for_activity_stream.py +++ b/awx/main/migrations/0009_v322_add_setting_field_for_activity_stream.py @@ -1,8 +1,7 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals -from django.db import migrations -import awx.main.fields +from django.db import migrations, models class Migration(migrations.Migration): @@ -15,6 +14,6 @@ class Migration(migrations.Migration): migrations.AddField( model_name='activitystream', name='setting', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), ] diff --git a/awx/main/migrations/0014_v330_saved_launchtime_configs.py b/awx/main/migrations/0014_v330_saved_launchtime_configs.py index d120166218..38c5d2b2f6 100644 --- a/awx/main/migrations/0014_v330_saved_launchtime_configs.py +++ b/awx/main/migrations/0014_v330_saved_launchtime_configs.py @@ -20,7 +20,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='schedule', name='char_prompts', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AddField( model_name='schedule', @@ -37,7 +37,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='schedule', name='survey_passwords', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AddField( model_name='workflowjobnode', @@ -47,12 +47,12 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjobnode', name='extra_data', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AddField( model_name='workflowjobnode', name='survey_passwords', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AddField( model_name='workflowjobtemplatenode', @@ -62,12 +62,12 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjobtemplatenode', name='extra_data', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AddField( model_name='workflowjobtemplatenode', name='survey_passwords', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), # Run data migration before removing the old credential field migrations.RunPython(migration_utils.set_current_apps_for_migrations, migrations.RunPython.noop), @@ -85,9 +85,9 @@ class Migration(migrations.Migration): name='JobLaunchConfig', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('extra_data', awx.main.fields.JSONField(blank=True, default=dict)), - ('survey_passwords', awx.main.fields.JSONField(blank=True, default=dict, editable=False)), - ('char_prompts', awx.main.fields.JSONField(blank=True, default=dict)), + ('extra_data', models.JSONField(blank=True, null=True, default=dict)), + ('survey_passwords', models.JSONField(blank=True, null=True, default=dict, editable=False)), + ('char_prompts', models.JSONField(blank=True, null=True, default=dict)), ('credentials', models.ManyToManyField(related_name='joblaunchconfigs', to='main.Credential')), ( 'inventory', diff --git a/awx/main/migrations/0018_v330_add_additional_stdout_events.py b/awx/main/migrations/0018_v330_add_additional_stdout_events.py index c9b026eeb5..ad399e72bb 100644 --- a/awx/main/migrations/0018_v330_add_additional_stdout_events.py +++ b/awx/main/migrations/0018_v330_add_additional_stdout_events.py @@ -2,10 +2,11 @@ # Generated by Django 1.11.7 on 2017-12-14 15:13 from __future__ import unicode_literals -import awx.main.fields from django.db import migrations, models import django.db.models.deletion +import awx.main.fields + class Migration(migrations.Migration): @@ -20,7 +21,7 @@ class Migration(migrations.Migration): ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('event_data', awx.main.fields.JSONField(blank=True, default=dict)), + ('event_data', awx.main.fields.JSONBlob(blank=True, default=dict)), ('uuid', models.CharField(default='', editable=False, max_length=1024)), ('counter', models.PositiveIntegerField(default=0, editable=False)), ('stdout', models.TextField(default='', editable=False)), @@ -84,7 +85,7 @@ class Migration(migrations.Migration): max_length=100, ), ), - ('event_data', awx.main.fields.JSONField(blank=True, default=dict)), + ('event_data', awx.main.fields.JSONBlob(blank=True, default=dict)), ('failed', models.BooleanField(default=False, editable=False)), ('changed', models.BooleanField(default=False, editable=False)), ('uuid', models.CharField(default='', editable=False, max_length=1024)), @@ -114,7 +115,7 @@ class Migration(migrations.Migration): ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('event_data', awx.main.fields.JSONField(blank=True, default=dict)), + ('event_data', awx.main.fields.JSONBlob(blank=True, default=dict)), ('uuid', models.CharField(default='', editable=False, max_length=1024)), ('counter', models.PositiveIntegerField(default=0, editable=False)), ('stdout', models.TextField(default='', editable=False)), diff --git a/awx/main/migrations/0020_v330_instancegroup_policies.py b/awx/main/migrations/0020_v330_instancegroup_policies.py index e2dc677b44..0577f14ee9 100644 --- a/awx/main/migrations/0020_v330_instancegroup_policies.py +++ b/awx/main/migrations/0020_v330_instancegroup_policies.py @@ -3,7 +3,6 @@ from __future__ import unicode_literals from django.db import migrations, models from decimal import Decimal -import awx.main.fields class Migration(migrations.Migration): @@ -16,8 +15,8 @@ class Migration(migrations.Migration): migrations.AddField( model_name='instancegroup', name='policy_instance_list', - field=awx.main.fields.JSONField( - default=[], help_text='List of exact-match Instances that will always be automatically assigned to this group', blank=True + field=models.JSONField( + default=list, help_text='List of exact-match Instances that will always be automatically assigned to this group', blank=True ), ), migrations.AddField( diff --git a/awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py b/awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py index cc1d1bfeba..e26571f1b9 100644 --- a/awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py +++ b/awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py @@ -29,7 +29,7 @@ class Migration(migrations.Migration): ('client_id', models.CharField(db_index=True, default=oauth2_provider.generators.generate_client_id, max_length=100, unique=True)), ( 'redirect_uris', - models.TextField(blank=True, help_text='Allowed URIs list, space separated', validators=[oauth2_provider.validators.validate_uris]), + models.TextField(blank=True, help_text='Allowed URIs list, space separated'), ), ('client_type', models.CharField(choices=[('confidential', 'Confidential'), ('public', 'Public')], max_length=32)), ( diff --git a/awx/main/migrations/0038_v330_add_deleted_activitystream_actor.py b/awx/main/migrations/0038_v330_add_deleted_activitystream_actor.py index 2f856e23f5..504fa14eb3 100644 --- a/awx/main/migrations/0038_v330_add_deleted_activitystream_actor.py +++ b/awx/main/migrations/0038_v330_add_deleted_activitystream_actor.py @@ -2,9 +2,7 @@ # Generated by Django 1.11.11 on 2018-05-21 19:51 from __future__ import unicode_literals -import awx.main.fields -import awx.main.models.activity_stream -from django.db import migrations +from django.db import models, migrations class Migration(migrations.Migration): @@ -17,6 +15,6 @@ class Migration(migrations.Migration): migrations.AddField( model_name='activitystream', name='deleted_actor', - field=awx.main.fields.JSONField(null=True), + field=models.JSONField(null=True), ), ] diff --git a/awx/main/migrations/0053_v340_workflow_inventory.py b/awx/main/migrations/0053_v340_workflow_inventory.py index 23bede35f7..e3dd56a3b2 100644 --- a/awx/main/migrations/0053_v340_workflow_inventory.py +++ b/awx/main/migrations/0053_v340_workflow_inventory.py @@ -17,7 +17,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjob', name='char_prompts', - field=awx.main.fields.JSONField(blank=True, default=dict), + field=models.JSONField(blank=True, null=True, default=dict), ), migrations.AddField( model_name='workflowjob', diff --git a/awx/main/migrations/0085_v360_add_notificationtemplate_messages.py b/awx/main/migrations/0085_v360_add_notificationtemplate_messages.py index 690989276b..c2c69bb440 100644 --- a/awx/main/migrations/0085_v360_add_notificationtemplate_messages.py +++ b/awx/main/migrations/0085_v360_add_notificationtemplate_messages.py @@ -4,7 +4,6 @@ from __future__ import unicode_literals from django.db import migrations, models -import awx.main.fields import awx.main.models.notifications @@ -18,7 +17,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='notificationtemplate', name='messages', - field=awx.main.fields.JSONField( + field=models.JSONField( default=awx.main.models.notifications.NotificationTemplate.default_messages, help_text='Optional custom messages for notification template.', null=True, diff --git a/awx/main/migrations/0090_v360_WFJT_prompts.py b/awx/main/migrations/0090_v360_WFJT_prompts.py index 46fb497202..fdc3b85fcc 100644 --- a/awx/main/migrations/0090_v360_WFJT_prompts.py +++ b/awx/main/migrations/0090_v360_WFJT_prompts.py @@ -24,7 +24,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjobtemplate', name='char_prompts', - field=awx.main.fields.JSONField(blank=True, default=dict), + field=models.JSONField(blank=True, null=True, default=dict), ), migrations.AlterField( model_name='joblaunchconfig', diff --git a/awx/main/migrations/0129_unifiedjob_installed_collections.py b/awx/main/migrations/0129_unifiedjob_installed_collections.py index d20c9068d0..644bff4132 100644 --- a/awx/main/migrations/0129_unifiedjob_installed_collections.py +++ b/awx/main/migrations/0129_unifiedjob_installed_collections.py @@ -1,7 +1,6 @@ # Generated by Django 2.2.16 on 2021-02-16 20:27 -import awx.main.fields -from django.db import migrations +from django.db import migrations, models class Migration(migrations.Migration): @@ -14,7 +13,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='unifiedjob', name='installed_collections', - field=awx.main.fields.JSONBField( + field=models.JSONField( blank=True, default=dict, editable=False, help_text='The Collections names and versions installed in the execution environment.' ), ), diff --git a/awx/main/migrations/0150_rename_inv_sources_inv_updates.py b/awx/main/migrations/0150_rename_inv_sources_inv_updates.py index 11c4b1b3f9..596d1f81f2 100644 --- a/awx/main/migrations/0150_rename_inv_sources_inv_updates.py +++ b/awx/main/migrations/0150_rename_inv_sources_inv_updates.py @@ -15,10 +15,10 @@ def forwards(apps, schema_editor): r = InventoryUpdate.objects.filter(source='tower').update(source='controller') if r: - logger.warn(f'Renamed {r} tower inventory updates to controller') + logger.warning(f'Renamed {r} tower inventory updates to controller') InventorySource.objects.filter(source='tower').update(source='controller') if r: - logger.warn(f'Renamed {r} tower inventory sources to controller') + logger.warning(f'Renamed {r} tower inventory sources to controller') CredentialType = apps.get_model('main', 'CredentialType') @@ -32,7 +32,7 @@ def forwards(apps, schema_editor): registry_type = ManagedCredentialType.registry.get('controller') if not registry_type: raise RuntimeError('Excpected to find controller credential, this may need to be edited in the future!') - logger.warn('Renaming the Ansible Tower credential type for existing install') + logger.warning('Renaming the Ansible Tower credential type for existing install') tower_type.name = registry_type.name # sensitive to translations tower_type.namespace = 'controller' # if not done, will error setup_tower_managed_defaults tower_type.save(update_fields=['name', 'namespace']) @@ -46,10 +46,10 @@ def backwards(apps, schema_editor): r = InventoryUpdate.objects.filter(source='controller').update(source='tower') if r: - logger.warn(f'Renamed {r} controller inventory updates to tower') + logger.warning(f'Renamed {r} controller inventory updates to tower') r = InventorySource.objects.filter(source='controller').update(source='tower') if r: - logger.warn(f'Renamed {r} controller inventory sources to tower') + logger.warning(f'Renamed {r} controller inventory sources to tower') CredentialType = apps.get_model('main', 'CredentialType') diff --git a/awx/main/migrations/_hg_removal.py b/awx/main/migrations/_hg_removal.py index e384ea5413..76828ef474 100644 --- a/awx/main/migrations/_hg_removal.py +++ b/awx/main/migrations/_hg_removal.py @@ -14,4 +14,4 @@ def delete_hg_scm(apps, schema_editor): update_ct = Project.objects.filter(scm_type='hg').update(scm_type='') if update_ct: - logger.warn('Changed {} mercurial projects to manual, deprecation period ended'.format(update_ct)) + logger.warning('Changed {} mercurial projects to manual, deprecation period ended'.format(update_ct)) diff --git a/awx/main/migrations/_inventory_source.py b/awx/main/migrations/_inventory_source.py index e6a65a82d4..023a7ee072 100644 --- a/awx/main/migrations/_inventory_source.py +++ b/awx/main/migrations/_inventory_source.py @@ -1,6 +1,6 @@ import logging -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str from awx.main.utils.common import set_current_apps from awx.main.utils.common import parse_yaml_or_json @@ -19,7 +19,7 @@ def _get_instance_id(from_dict, new_id, default=''): break instance_id = from_dict.get(key, default) from_dict = instance_id - return smart_text(instance_id) + return smart_str(instance_id) def _get_instance_id_for_upgrade(host, new_id): @@ -35,7 +35,7 @@ def _get_instance_id_for_upgrade(host, new_id): return None if len(new_id) > 255: # this should never happen - logger.warn('Computed instance id "{}"" for host {}-{} is too long'.format(new_id_value, host.name, host.pk)) + logger.warning('Computed instance id "{}"" for host {}-{} is too long'.format(new_id_value, host.name, host.pk)) return None return new_id_value @@ -47,7 +47,7 @@ def set_new_instance_id(apps, source, new_id): id_from_settings = getattr(settings, '{}_INSTANCE_ID_VAR'.format(source.upper())) if id_from_settings != new_id: # User applied an instance ID themselves, so nope on out of there - logger.warn('You have an instance ID set for {}, not migrating'.format(source)) + logger.warning('You have an instance ID set for {}, not migrating'.format(source)) return logger.debug('Migrating inventory instance_id for {} to {}'.format(source, new_id)) Host = apps.get_model('main', 'Host') diff --git a/awx/main/migrations/_inventory_source_vars.py b/awx/main/migrations/_inventory_source_vars.py index 71c96403a6..12bad4e4b8 100644 --- a/awx/main/migrations/_inventory_source_vars.py +++ b/awx/main/migrations/_inventory_source_vars.py @@ -2,7 +2,7 @@ import json import re import logging -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.utils.encoding import iri_to_uri diff --git a/awx/main/migrations/_squashed_30.py b/awx/main/migrations/_squashed_30.py index c604b95c37..90c2dd061b 100644 --- a/awx/main/migrations/_squashed_30.py +++ b/awx/main/migrations/_squashed_30.py @@ -2,7 +2,6 @@ from django.db import ( migrations, models, ) -import jsonfield.fields import awx.main.fields from awx.main.migrations import _save_password_keys @@ -30,7 +29,7 @@ SQUASHED_30 = { migrations.AddField( model_name='job', name='survey_passwords', - field=jsonfield.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), ], '0031_v302_migrate_survey_passwords': [ diff --git a/awx/main/models/__init__.py b/awx/main/models/__init__.py index ed49b98083..107c7a9418 100644 --- a/awx/main/models/__init__.py +++ b/awx/main/models/__init__.py @@ -3,6 +3,7 @@ # Django from django.conf import settings # noqa +from django.db import connection from django.db.models.signals import pre_delete # noqa # AWX @@ -97,6 +98,93 @@ User.add_to_class('can_access_with_errors', check_user_access_with_errors) User.add_to_class('accessible_objects', user_accessible_objects) +def convert_jsonfields_to_jsonb(): + if connection.vendor != 'postgresql': + return + + # fmt: off + fields = [ # Table name, expensive or not, tuple of column names + ('conf_setting', False, ( + 'value', + )), + ('main_instancegroup', False, ( + 'policy_instance_list', + )), + ('main_jobtemplate', False, ( + 'survey_spec', + )), + ('main_notificationtemplate', False, ( + 'notification_configuration', + 'messages', + )), + ('main_project', False, ( + 'playbook_files', + 'inventory_files', + )), + ('main_schedule', False, ( + 'extra_data', + 'char_prompts', + 'survey_passwords', + )), + ('main_workflowjobtemplate', False, ( + 'survey_spec', + 'char_prompts', + )), + ('main_workflowjobtemplatenode', False, ( + 'char_prompts', + 'extra_data', + 'survey_passwords', + )), + ('main_activitystream', True, ( + 'setting', # NN = NOT NULL + 'deleted_actor', + )), + ('main_job', True, ( + 'survey_passwords', # NN + 'artifacts', # NN + )), + ('main_joblaunchconfig', True, ( + 'extra_data', # NN + 'survey_passwords', # NN + 'char_prompts', # NN + )), + ('main_notification', True, ( + 'body', # NN + )), + ('main_unifiedjob', True, ( + 'job_env', # NN + )), + ('main_workflowjob', True, ( + 'survey_passwords', # NN + 'char_prompts', # NN + )), + ('main_workflowjobnode', True, ( + 'char_prompts', # NN + 'ancestor_artifacts', # NN + 'extra_data', # NN + 'survey_passwords', # NN + )), + ] + # fmt: on + + with connection.cursor() as cursor: + for table, expensive, columns in fields: + cursor.execute( + """ + select count(1) from information_schema.columns + where + table_name = %s and + column_name in %s and + data_type != 'jsonb'; + """, + (table, columns), + ) + if cursor.fetchone()[0]: + from awx.main.tasks.system import migrate_json_fields + + migrate_json_fields.apply_async([table, expensive, columns]) + + def cleanup_created_modified_by(sender, **kwargs): # work around a bug in django-polymorphic that doesn't properly # handle cascades for reverse foreign keys on the polymorphic base model diff --git a/awx/main/models/activity_stream.py b/awx/main/models/activity_stream.py index 6215e17a5a..aa0ab9d9d6 100644 --- a/awx/main/models/activity_stream.py +++ b/awx/main/models/activity_stream.py @@ -3,14 +3,13 @@ # AWX from awx.api.versioning import reverse -from awx.main.fields import JSONField from awx.main.models.base import accepts_json # Django from django.db import models from django.conf import settings from django.utils.encoding import smart_str -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ __all__ = ['ActivityStream'] @@ -36,7 +35,7 @@ class ActivityStream(models.Model): operation = models.CharField(max_length=13, choices=OPERATION_CHOICES) timestamp = models.DateTimeField(auto_now_add=True) changes = accepts_json(models.TextField(blank=True)) - deleted_actor = JSONField(null=True) + deleted_actor = models.JSONField(null=True) action_node = models.CharField( blank=True, default='', @@ -84,7 +83,7 @@ class ActivityStream(models.Model): o_auth2_application = models.ManyToManyField("OAuth2Application", blank=True) o_auth2_access_token = models.ManyToManyField("OAuth2AccessToken", blank=True) - setting = JSONField(blank=True) + setting = models.JSONField(default=dict, null=True, blank=True) def __str__(self): operation = self.operation if 'operation' in self.__dict__ else '_delayed_' diff --git a/awx/main/models/ad_hoc_commands.py b/awx/main/models/ad_hoc_commands.py index f4065e473d..d0608bd652 100644 --- a/awx/main/models/ad_hoc_commands.py +++ b/awx/main/models/ad_hoc_commands.py @@ -9,7 +9,7 @@ from urllib.parse import urljoin from django.conf import settings from django.db import models from django.utils.text import Truncator -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.core.exceptions import ValidationError # AWX diff --git a/awx/main/models/base.py b/awx/main/models/base.py index 8cdd557a84..da12f603cb 100644 --- a/awx/main/models/base.py +++ b/awx/main/models/base.py @@ -4,7 +4,7 @@ # Django from django.db import models from django.core.exceptions import ValidationError, ObjectDoesNotExist -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.utils.timezone import now # Django-Taggit diff --git a/awx/main/models/credential/__init__.py b/awx/main/models/credential/__init__.py index 88c0eedadd..53f6ffaa1f 100644 --- a/awx/main/models/credential/__init__.py +++ b/awx/main/models/credential/__init__.py @@ -15,9 +15,9 @@ from jinja2 import sandbox # Django from django.db import models -from django.utils.translation import ugettext_lazy as _, ugettext_noop +from django.utils.translation import gettext_lazy as _, gettext_noop from django.core.exceptions import ValidationError -from django.utils.encoding import force_text +from django.utils.encoding import force_str from django.utils.functional import cached_property from django.utils.timezone import now @@ -230,7 +230,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin): def display_inputs(self): field_val = self.inputs.copy() for k, v in field_val.items(): - if force_text(v).startswith('$encrypted$'): + if force_str(v).startswith('$encrypted$'): field_val[k] = '$encrypted$' return field_val @@ -579,34 +579,34 @@ class ManagedCredentialType(SimpleNamespace): ManagedCredentialType( namespace='ssh', kind='ssh', - name=ugettext_noop('Machine'), + name=gettext_noop('Machine'), inputs={ 'fields': [ - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, - {'id': 'password', 'label': ugettext_noop('Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, - {'id': 'ssh_key_data', 'label': ugettext_noop('SSH Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, + {'id': 'password', 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, + {'id': 'ssh_key_data', 'label': gettext_noop('SSH Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, { 'id': 'ssh_public_key_data', - 'label': ugettext_noop('Signed SSH Certificate'), + 'label': gettext_noop('Signed SSH Certificate'), 'type': 'string', 'multiline': True, 'secret': True, }, - {'id': 'ssh_key_unlock', 'label': ugettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, + {'id': 'ssh_key_unlock', 'label': gettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, { 'id': 'become_method', - 'label': ugettext_noop('Privilege Escalation Method'), + 'label': gettext_noop('Privilege Escalation Method'), 'type': 'string', - 'help_text': ugettext_noop( + 'help_text': gettext_noop( 'Specify a method for "become" operations. This is ' 'equivalent to specifying the --become-method ' 'Ansible parameter.' ), }, { 'id': 'become_username', - 'label': ugettext_noop('Privilege Escalation Username'), + 'label': gettext_noop('Privilege Escalation Username'), 'type': 'string', }, - {'id': 'become_password', 'label': ugettext_noop('Privilege Escalation Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, + {'id': 'become_password', 'label': gettext_noop('Privilege Escalation Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, ], }, ) @@ -614,14 +614,14 @@ ManagedCredentialType( ManagedCredentialType( namespace='scm', kind='scm', - name=ugettext_noop('Source Control'), + name=gettext_noop('Source Control'), managed=True, inputs={ 'fields': [ - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, - {'id': 'password', 'label': ugettext_noop('Password'), 'type': 'string', 'secret': True}, - {'id': 'ssh_key_data', 'label': ugettext_noop('SCM Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, - {'id': 'ssh_key_unlock', 'label': ugettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, + {'id': 'password', 'label': gettext_noop('Password'), 'type': 'string', 'secret': True}, + {'id': 'ssh_key_data', 'label': gettext_noop('SCM Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, + {'id': 'ssh_key_unlock', 'label': gettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True}, ], }, ) @@ -629,17 +629,17 @@ ManagedCredentialType( ManagedCredentialType( namespace='vault', kind='vault', - name=ugettext_noop('Vault'), + name=gettext_noop('Vault'), managed=True, inputs={ 'fields': [ - {'id': 'vault_password', 'label': ugettext_noop('Vault Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, + {'id': 'vault_password', 'label': gettext_noop('Vault Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, { 'id': 'vault_id', - 'label': ugettext_noop('Vault Identifier'), + 'label': gettext_noop('Vault Identifier'), 'type': 'string', 'format': 'vault_id', - 'help_text': ugettext_noop( + 'help_text': gettext_noop( 'Specify an (optional) Vault ID. This is ' 'equivalent to specifying the --vault-id ' 'Ansible parameter for providing multiple Vault ' @@ -655,32 +655,32 @@ ManagedCredentialType( ManagedCredentialType( namespace='net', kind='net', - name=ugettext_noop('Network'), + name=gettext_noop('Network'), managed=True, inputs={ 'fields': [ - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Password'), + 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, }, - {'id': 'ssh_key_data', 'label': ugettext_noop('SSH Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, + {'id': 'ssh_key_data', 'label': gettext_noop('SSH Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, { 'id': 'ssh_key_unlock', - 'label': ugettext_noop('Private Key Passphrase'), + 'label': gettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True, }, { 'id': 'authorize', - 'label': ugettext_noop('Authorize'), + 'label': gettext_noop('Authorize'), 'type': 'boolean', }, { 'id': 'authorize_password', - 'label': ugettext_noop('Authorize Password'), + 'label': gettext_noop('Authorize Password'), 'type': 'string', 'secret': True, }, @@ -695,23 +695,23 @@ ManagedCredentialType( ManagedCredentialType( namespace='aws', kind='cloud', - name=ugettext_noop('Amazon Web Services'), + name=gettext_noop('Amazon Web Services'), managed=True, inputs={ 'fields': [ - {'id': 'username', 'label': ugettext_noop('Access Key'), 'type': 'string'}, + {'id': 'username', 'label': gettext_noop('Access Key'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Secret Key'), + 'label': gettext_noop('Secret Key'), 'type': 'string', 'secret': True, }, { 'id': 'security_token', - 'label': ugettext_noop('STS Token'), + 'label': gettext_noop('STS Token'), 'type': 'string', 'secret': True, - 'help_text': ugettext_noop( + 'help_text': gettext_noop( 'Security Token Service (STS) is a web service ' 'that enables you to request temporary, ' 'limited-privilege credentials for AWS Identity ' @@ -726,38 +726,38 @@ ManagedCredentialType( ManagedCredentialType( namespace='openstack', kind='cloud', - name=ugettext_noop('OpenStack'), + name=gettext_noop('OpenStack'), managed=True, inputs={ 'fields': [ - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Password (API Key)'), + 'label': gettext_noop('Password (API Key)'), 'type': 'string', 'secret': True, }, { 'id': 'host', - 'label': ugettext_noop('Host (Authentication URL)'), + 'label': gettext_noop('Host (Authentication URL)'), 'type': 'string', - 'help_text': ugettext_noop('The host to authenticate with. For example, ' 'https://openstack.business.com/v2.0/'), + 'help_text': gettext_noop('The host to authenticate with. For example, ' 'https://openstack.business.com/v2.0/'), }, { 'id': 'project', - 'label': ugettext_noop('Project (Tenant Name)'), + 'label': gettext_noop('Project (Tenant Name)'), 'type': 'string', }, { 'id': 'project_domain_name', - 'label': ugettext_noop('Project (Domain Name)'), + 'label': gettext_noop('Project (Domain Name)'), 'type': 'string', }, { 'id': 'domain', - 'label': ugettext_noop('Domain Name'), + 'label': gettext_noop('Domain Name'), 'type': 'string', - 'help_text': ugettext_noop( + 'help_text': gettext_noop( 'OpenStack domains define administrative boundaries. ' 'It is only needed for Keystone v3 authentication ' 'URLs. Refer to the documentation for ' @@ -766,13 +766,13 @@ ManagedCredentialType( }, { 'id': 'region', - 'label': ugettext_noop('Region Name'), + 'label': gettext_noop('Region Name'), 'type': 'string', - 'help_text': ugettext_noop('For some cloud providers, like OVH, region must be specified'), + 'help_text': gettext_noop('For some cloud providers, like OVH, region must be specified'), }, { 'id': 'verify_ssl', - 'label': ugettext_noop('Verify SSL'), + 'label': gettext_noop('Verify SSL'), 'type': 'boolean', 'default': True, }, @@ -784,20 +784,20 @@ ManagedCredentialType( ManagedCredentialType( namespace='vmware', kind='cloud', - name=ugettext_noop('VMware vCenter'), + name=gettext_noop('VMware vCenter'), managed=True, inputs={ 'fields': [ { 'id': 'host', - 'label': ugettext_noop('VCenter Host'), + 'label': gettext_noop('VCenter Host'), 'type': 'string', - 'help_text': ugettext_noop('Enter the hostname or IP address that corresponds ' 'to your VMware vCenter.'), + 'help_text': gettext_noop('Enter the hostname or IP address that corresponds ' 'to your VMware vCenter.'), }, - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Password'), + 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, }, @@ -809,20 +809,20 @@ ManagedCredentialType( ManagedCredentialType( namespace='satellite6', kind='cloud', - name=ugettext_noop('Red Hat Satellite 6'), + name=gettext_noop('Red Hat Satellite 6'), managed=True, inputs={ 'fields': [ { 'id': 'host', - 'label': ugettext_noop('Satellite 6 URL'), + 'label': gettext_noop('Satellite 6 URL'), 'type': 'string', - 'help_text': ugettext_noop('Enter the URL that corresponds to your Red Hat ' 'Satellite 6 server. For example, https://satellite.example.org'), + 'help_text': gettext_noop('Enter the URL that corresponds to your Red Hat ' 'Satellite 6 server. For example, https://satellite.example.org'), }, - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Password'), + 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, }, @@ -834,21 +834,21 @@ ManagedCredentialType( ManagedCredentialType( namespace='gce', kind='cloud', - name=ugettext_noop('Google Compute Engine'), + name=gettext_noop('Google Compute Engine'), managed=True, inputs={ 'fields': [ { 'id': 'username', - 'label': ugettext_noop('Service Account Email Address'), + 'label': gettext_noop('Service Account Email Address'), 'type': 'string', - 'help_text': ugettext_noop('The email address assigned to the Google Compute ' 'Engine service account.'), + 'help_text': gettext_noop('The email address assigned to the Google Compute ' 'Engine service account.'), }, { 'id': 'project', 'label': 'Project', 'type': 'string', - 'help_text': ugettext_noop( + 'help_text': gettext_noop( 'The Project ID is the GCE assigned identification. ' 'It is often constructed as three words or two words ' 'followed by a three-digit number. Examples: project-id-000 ' @@ -857,12 +857,12 @@ ManagedCredentialType( }, { 'id': 'ssh_key_data', - 'label': ugettext_noop('RSA Private Key'), + 'label': gettext_noop('RSA Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True, - 'help_text': ugettext_noop('Paste the contents of the PEM file associated ' 'with the service account email.'), + 'help_text': gettext_noop('Paste the contents of the PEM file associated ' 'with the service account email.'), }, ], 'required': ['username', 'ssh_key_data'], @@ -872,36 +872,36 @@ ManagedCredentialType( ManagedCredentialType( namespace='azure_rm', kind='cloud', - name=ugettext_noop('Microsoft Azure Resource Manager'), + name=gettext_noop('Microsoft Azure Resource Manager'), managed=True, inputs={ 'fields': [ { 'id': 'subscription', - 'label': ugettext_noop('Subscription ID'), + 'label': gettext_noop('Subscription ID'), 'type': 'string', - 'help_text': ugettext_noop('Subscription ID is an Azure construct, which is ' 'mapped to a username.'), + 'help_text': gettext_noop('Subscription ID is an Azure construct, which is ' 'mapped to a username.'), }, - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Password'), + 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, }, - {'id': 'client', 'label': ugettext_noop('Client ID'), 'type': 'string'}, + {'id': 'client', 'label': gettext_noop('Client ID'), 'type': 'string'}, { 'id': 'secret', - 'label': ugettext_noop('Client Secret'), + 'label': gettext_noop('Client Secret'), 'type': 'string', 'secret': True, }, - {'id': 'tenant', 'label': ugettext_noop('Tenant ID'), 'type': 'string'}, + {'id': 'tenant', 'label': gettext_noop('Tenant ID'), 'type': 'string'}, { 'id': 'cloud_environment', - 'label': ugettext_noop('Azure Cloud Environment'), + 'label': gettext_noop('Azure Cloud Environment'), 'type': 'string', - 'help_text': ugettext_noop('Environment variable AZURE_CLOUD_ENVIRONMENT when' ' using Azure GovCloud or Azure stack.'), + 'help_text': gettext_noop('Environment variable AZURE_CLOUD_ENVIRONMENT when' ' using Azure GovCloud or Azure stack.'), }, ], 'required': ['subscription'], @@ -911,16 +911,16 @@ ManagedCredentialType( ManagedCredentialType( namespace='github_token', kind='token', - name=ugettext_noop('GitHub Personal Access Token'), + name=gettext_noop('GitHub Personal Access Token'), managed=True, inputs={ 'fields': [ { 'id': 'token', - 'label': ugettext_noop('Token'), + 'label': gettext_noop('Token'), 'type': 'string', 'secret': True, - 'help_text': ugettext_noop('This token needs to come from your profile settings in GitHub'), + 'help_text': gettext_noop('This token needs to come from your profile settings in GitHub'), } ], 'required': ['token'], @@ -930,16 +930,16 @@ ManagedCredentialType( ManagedCredentialType( namespace='gitlab_token', kind='token', - name=ugettext_noop('GitLab Personal Access Token'), + name=gettext_noop('GitLab Personal Access Token'), managed=True, inputs={ 'fields': [ { 'id': 'token', - 'label': ugettext_noop('Token'), + 'label': gettext_noop('Token'), 'type': 'string', 'secret': True, - 'help_text': ugettext_noop('This token needs to come from your profile settings in GitLab'), + 'help_text': gettext_noop('This token needs to come from your profile settings in GitLab'), } ], 'required': ['token'], @@ -949,12 +949,12 @@ ManagedCredentialType( ManagedCredentialType( namespace='insights', kind='insights', - name=ugettext_noop('Insights'), + name=gettext_noop('Insights'), managed=True, inputs={ 'fields': [ - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, - {'id': 'password', 'label': ugettext_noop('Password'), 'type': 'string', 'secret': True}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, + {'id': 'password', 'label': gettext_noop('Password'), 'type': 'string', 'secret': True}, ], 'required': ['username', 'password'], }, @@ -973,23 +973,23 @@ ManagedCredentialType( ManagedCredentialType( namespace='rhv', kind='cloud', - name=ugettext_noop('Red Hat Virtualization'), + name=gettext_noop('Red Hat Virtualization'), managed=True, inputs={ 'fields': [ - {'id': 'host', 'label': ugettext_noop('Host (Authentication URL)'), 'type': 'string', 'help_text': ugettext_noop('The host to authenticate with.')}, - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, + {'id': 'host', 'label': gettext_noop('Host (Authentication URL)'), 'type': 'string', 'help_text': gettext_noop('The host to authenticate with.')}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Password'), + 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, }, { 'id': 'ca_file', - 'label': ugettext_noop('CA File'), + 'label': gettext_noop('CA File'), 'type': 'string', - 'help_text': ugettext_noop('Absolute file path to the CA file to use (optional)'), + 'help_text': gettext_noop('Absolute file path to the CA file to use (optional)'), }, ], 'required': ['host', 'username', 'password'], @@ -1017,38 +1017,38 @@ ManagedCredentialType( ManagedCredentialType( namespace='controller', kind='cloud', - name=ugettext_noop('Red Hat Ansible Automation Platform'), + name=gettext_noop('Red Hat Ansible Automation Platform'), managed=True, inputs={ 'fields': [ { 'id': 'host', - 'label': ugettext_noop('Red Hat Ansible Automation Platform'), + 'label': gettext_noop('Red Hat Ansible Automation Platform'), 'type': 'string', - 'help_text': ugettext_noop('Red Hat Ansible Automation Platform base URL to authenticate with.'), + 'help_text': gettext_noop('Red Hat Ansible Automation Platform base URL to authenticate with.'), }, { 'id': 'username', - 'label': ugettext_noop('Username'), + 'label': gettext_noop('Username'), 'type': 'string', - 'help_text': ugettext_noop( + 'help_text': gettext_noop( 'Red Hat Ansible Automation Platform username id to authenticate as.' 'This should not be set if an OAuth token is being used.' ), }, { 'id': 'password', - 'label': ugettext_noop('Password'), + 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, }, { 'id': 'oauth_token', - 'label': ugettext_noop('OAuth Token'), + 'label': gettext_noop('OAuth Token'), 'type': 'string', 'secret': True, - 'help_text': ugettext_noop('An OAuth token to use to authenticate with.' 'This should not be set if username/password are being used.'), + 'help_text': gettext_noop('An OAuth token to use to authenticate with.' 'This should not be set if username/password are being used.'), }, - {'id': 'verify_ssl', 'label': ugettext_noop('Verify SSL'), 'type': 'boolean', 'secret': False}, + {'id': 'verify_ssl', 'label': gettext_noop('Verify SSL'), 'type': 'boolean', 'secret': False}, ], 'required': ['host'], }, @@ -1071,30 +1071,30 @@ ManagedCredentialType( ManagedCredentialType( namespace='kubernetes_bearer_token', kind='kubernetes', - name=ugettext_noop('OpenShift or Kubernetes API Bearer Token'), + name=gettext_noop('OpenShift or Kubernetes API Bearer Token'), inputs={ 'fields': [ { 'id': 'host', - 'label': ugettext_noop('OpenShift or Kubernetes API Endpoint'), + 'label': gettext_noop('OpenShift or Kubernetes API Endpoint'), 'type': 'string', - 'help_text': ugettext_noop('The OpenShift or Kubernetes API Endpoint to authenticate with.'), + 'help_text': gettext_noop('The OpenShift or Kubernetes API Endpoint to authenticate with.'), }, { 'id': 'bearer_token', - 'label': ugettext_noop('API authentication bearer token'), + 'label': gettext_noop('API authentication bearer token'), 'type': 'string', 'secret': True, }, { 'id': 'verify_ssl', - 'label': ugettext_noop('Verify SSL'), + 'label': gettext_noop('Verify SSL'), 'type': 'boolean', 'default': True, }, { 'id': 'ssl_ca_cert', - 'label': ugettext_noop('Certificate Authority data'), + 'label': gettext_noop('Certificate Authority data'), 'type': 'string', 'secret': True, 'multiline': True, @@ -1107,31 +1107,31 @@ ManagedCredentialType( ManagedCredentialType( namespace='registry', kind='registry', - name=ugettext_noop('Container Registry'), + name=gettext_noop('Container Registry'), inputs={ 'fields': [ { 'id': 'host', - 'label': ugettext_noop('Authentication URL'), + 'label': gettext_noop('Authentication URL'), 'type': 'string', - 'help_text': ugettext_noop('Authentication endpoint for the container registry.'), + 'help_text': gettext_noop('Authentication endpoint for the container registry.'), 'default': 'quay.io', }, { 'id': 'username', - 'label': ugettext_noop('Username'), + 'label': gettext_noop('Username'), 'type': 'string', }, { 'id': 'password', - 'label': ugettext_noop('Password or Token'), + 'label': gettext_noop('Password or Token'), 'type': 'string', 'secret': True, - 'help_text': ugettext_noop('A password or token used to authenticate with'), + 'help_text': gettext_noop('A password or token used to authenticate with'), }, { 'id': 'verify_ssl', - 'label': ugettext_noop('Verify SSL'), + 'label': gettext_noop('Verify SSL'), 'type': 'boolean', 'default': True, }, @@ -1144,27 +1144,27 @@ ManagedCredentialType( ManagedCredentialType( namespace='galaxy_api_token', kind='galaxy', - name=ugettext_noop('Ansible Galaxy/Automation Hub API Token'), + name=gettext_noop('Ansible Galaxy/Automation Hub API Token'), inputs={ 'fields': [ { 'id': 'url', - 'label': ugettext_noop('Galaxy Server URL'), + 'label': gettext_noop('Galaxy Server URL'), 'type': 'string', - 'help_text': ugettext_noop('The URL of the Galaxy instance to connect to.'), + 'help_text': gettext_noop('The URL of the Galaxy instance to connect to.'), }, { 'id': 'auth_url', - 'label': ugettext_noop('Auth Server URL'), + 'label': gettext_noop('Auth Server URL'), 'type': 'string', - 'help_text': ugettext_noop('The URL of a Keycloak server token_endpoint, if using ' 'SSO auth.'), + 'help_text': gettext_noop('The URL of a Keycloak server token_endpoint, if using ' 'SSO auth.'), }, { 'id': 'token', - 'label': ugettext_noop('API Token'), + 'label': gettext_noop('API Token'), 'type': 'string', 'secret': True, - 'help_text': ugettext_noop('A token to use for authentication against the Galaxy instance.'), + 'help_text': gettext_noop('A token to use for authentication against the Galaxy instance.'), }, ], 'required': ['url'], diff --git a/awx/main/models/events.py b/awx/main/models/events.py index f953e7ca61..f80c23d58b 100644 --- a/awx/main/models/events.py +++ b/awx/main/models/events.py @@ -10,13 +10,13 @@ from django.db import models, DatabaseError, connection from django.utils.dateparse import parse_datetime from django.utils.text import Truncator from django.utils.timezone import utc, now -from django.utils.translation import ugettext_lazy as _ -from django.utils.encoding import force_text +from django.utils.translation import gettext_lazy as _ +from django.utils.encoding import force_str from awx.api.versioning import reverse from awx.main import consumers +from awx.main.fields import JSONBlob from awx.main.managers import DeferJobCreatedManager -from awx.main.fields import JSONField from awx.main.constants import MINIMAL_EVENTS from awx.main.models.base import CreatedModifiedModel from awx.main.utils import ignore_inventory_computed_fields, camelcase_to_underscore @@ -209,10 +209,7 @@ class BasePlaybookEvent(CreatedModifiedModel): max_length=100, choices=EVENT_CHOICES, ) - event_data = JSONField( - blank=True, - default=dict, - ) + event_data = JSONBlob(default=dict, blank=True) failed = models.BooleanField( default=False, editable=False, @@ -396,7 +393,7 @@ class BasePlaybookEvent(CreatedModifiedModel): connection.on_commit(_send_notifications) for field in ('playbook', 'play', 'task', 'role'): - value = force_text(event_data.get(field, '')).strip() + value = force_str(event_data.get(field, '')).strip() if value != getattr(self, field): setattr(self, field, value) if settings.LOG_AGGREGATOR_ENABLED: @@ -648,10 +645,7 @@ class BaseCommandEvent(CreatedModifiedModel): class Meta: abstract = True - event_data = JSONField( - blank=True, - default=dict, - ) + event_data = JSONBlob(default=dict, blank=True) uuid = models.CharField( max_length=1024, default='', diff --git a/awx/main/models/execution_environments.py b/awx/main/models/execution_environments.py index b0b3dd7579..55ce69098b 100644 --- a/awx/main/models/execution_environments.py +++ b/awx/main/models/execution_environments.py @@ -1,5 +1,5 @@ from django.db import models -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from awx.api.versioning import reverse from awx.main.models.base import CommonModel diff --git a/awx/main/models/ha.py b/awx/main/models/ha.py index 08d95bf86a..a9dc9b887d 100644 --- a/awx/main/models/ha.py +++ b/awx/main/models/ha.py @@ -9,7 +9,7 @@ from django.core.validators import MinValueValidator from django.db import models, connection from django.db.models.signals import post_save, post_delete from django.dispatch import receiver -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.conf import settings from django.utils.timezone import now, timedelta @@ -19,7 +19,6 @@ from solo.models import SingletonModel from awx import __version__ as awx_application_version from awx.api.versioning import reverse from awx.main.managers import InstanceManager, InstanceGroupManager, UUID_DEFAULT -from awx.main.fields import JSONField from awx.main.constants import JOB_FOLDER_PREFIX from awx.main.models.base import BaseModel, HasEditsMixin, prevent_search from awx.main.models.unified_jobs import UnifiedJob @@ -253,7 +252,7 @@ class Instance(HasPolicyEditsMixin, BaseModel): if uuid is not None and self.uuid != uuid: if self.uuid is not None: - logger.warn(f'Self-reported uuid of {self.hostname} changed from {self.uuid} to {uuid}') + logger.warning(f'Self-reported uuid of {self.hostname} changed from {self.uuid} to {uuid}') self.uuid = uuid update_fields.append('uuid') @@ -328,8 +327,8 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin): ) policy_instance_percentage = models.IntegerField(default=0, help_text=_("Percentage of Instances to automatically assign to this group")) policy_instance_minimum = models.IntegerField(default=0, help_text=_("Static minimum number of Instances to automatically assign to this group")) - policy_instance_list = JSONField( - default=[], blank=True, help_text=_("List of exact-match Instances that will always be automatically assigned to this group") + policy_instance_list = models.JSONField( + default=list, blank=True, help_text=_("List of exact-match Instances that will always be automatically assigned to this group") ) POLICY_FIELDS = frozenset(('policy_instance_list', 'policy_instance_minimum', 'policy_instance_percentage')) diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index 0cac6602e0..3b7945c965 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -14,7 +14,7 @@ import yaml # Django from django.conf import settings from django.db import models, connection -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.db import transaction from django.core.exceptions import ValidationError from django.utils.timezone import now @@ -29,7 +29,6 @@ from awx.main.constants import CLOUD_PROVIDERS from awx.main.consumers import emit_channel_notification from awx.main.fields import ( ImplicitRoleField, - JSONBField, SmartFilterField, OrderedManyToManyField, ) @@ -488,7 +487,7 @@ class Host(CommonModelNameNotUnique, RelatedJobsMixin): editable=False, help_text=_('Inventory source(s) that created or modified this host.'), ) - ansible_facts = JSONBField( + ansible_facts = models.JSONField( blank=True, default=dict, help_text=_('Arbitrary JSON structure of most recent ansible_facts, per-host.'), diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index e405c98596..3b22ecd02c 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -19,7 +19,7 @@ from django.db import models # from django.core.cache import cache from django.utils.encoding import smart_str from django.utils.timezone import now -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.core.exceptions import FieldDoesNotExist # REST Framework @@ -44,7 +44,7 @@ from awx.main.models.notifications import ( JobNotificationMixin, ) from awx.main.utils import parse_yaml_or_json, getattr_dne, NullablePromptPseudoField -from awx.main.fields import ImplicitRoleField, JSONField, AskForField +from awx.main.fields import ImplicitRoleField, AskForField from awx.main.models.mixins import ( ResourceMixin, SurveyJobTemplateMixin, @@ -546,9 +546,10 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana editable=False, through='JobHostSummary', ) - artifacts = JSONField( - blank=True, + artifacts = models.JSONField( default=dict, + null=True, + blank=True, editable=False, ) scm_revision = models.CharField( @@ -885,7 +886,7 @@ class LaunchTimeConfigBase(BaseModel): ) # All standard fields are stored in this dictionary field # This is a solution to the nullable CharField problem, specific to prompting - char_prompts = JSONField(blank=True, default=dict) + char_prompts = models.JSONField(default=dict, null=True, blank=True) def prompts_dict(self, display=False): data = {} @@ -938,12 +939,13 @@ class LaunchTimeConfig(LaunchTimeConfigBase): abstract = True # Special case prompting fields, even more special than the other ones - extra_data = JSONField(blank=True, default=dict) + extra_data = models.JSONField(default=dict, null=True, blank=True) survey_passwords = prevent_search( - JSONField( - blank=True, + models.JSONField( default=dict, editable=False, + null=True, + blank=True, ) ) # Credentials needed for non-unified job / unified JT models diff --git a/awx/main/models/label.py b/awx/main/models/label.py index 18bdb2b025..7ca92d4ff2 100644 --- a/awx/main/models/label.py +++ b/awx/main/models/label.py @@ -3,7 +3,7 @@ # Django from django.db import models -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.api.versioning import reverse diff --git a/awx/main/models/mixins.py b/awx/main/models/mixins.py index 45a3cae885..94e737859b 100644 --- a/awx/main/models/mixins.py +++ b/awx/main/models/mixins.py @@ -15,7 +15,7 @@ from django.core.exceptions import ValidationError from django.db import models from django.db.models.query import QuerySet from django.utils.crypto import get_random_string -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.main.models.base import prevent_search @@ -24,7 +24,7 @@ from awx.main.utils import parse_yaml_or_json, get_custom_venv_choices, get_lice from awx.main.utils.execution_environments import get_default_execution_environment from awx.main.utils.encryption import decrypt_value, get_encryption_key, is_encrypted from awx.main.utils.polymorphic import build_polymorphic_ctypes_map -from awx.main.fields import JSONField, AskForField +from awx.main.fields import AskForField from awx.main.constants import ACTIVE_STATES @@ -103,12 +103,7 @@ class SurveyJobTemplateMixin(models.Model): survey_enabled = models.BooleanField( default=False, ) - survey_spec = prevent_search( - JSONField( - blank=True, - default=dict, - ) - ) + survey_spec = prevent_search(models.JSONField(default=dict, blank=True)) ask_variables_on_launch = AskForField(blank=True, default=False, allows_field='extra_vars') def survey_password_variables(self): @@ -370,10 +365,11 @@ class SurveyJobMixin(models.Model): abstract = True survey_passwords = prevent_search( - JSONField( - blank=True, + models.JSONField( default=dict, editable=False, + null=True, + blank=True, ) ) diff --git a/awx/main/models/notifications.py b/awx/main/models/notifications.py index 860e591e2c..9bfd1bc6b5 100644 --- a/awx/main/models/notifications.py +++ b/awx/main/models/notifications.py @@ -10,8 +10,8 @@ from django.db import models from django.conf import settings from django.core.mail.message import EmailMessage from django.db import connection -from django.utils.translation import ugettext_lazy as _ -from django.utils.encoding import smart_str, force_text +from django.utils.translation import gettext_lazy as _ +from django.utils.encoding import smart_str, force_str from jinja2 import sandbox, ChainableUndefined from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError @@ -28,7 +28,6 @@ from awx.main.notifications.mattermost_backend import MattermostBackend from awx.main.notifications.grafana_backend import GrafanaBackend from awx.main.notifications.rocketchat_backend import RocketChatBackend from awx.main.notifications.irc_backend import IrcBackend -from awx.main.fields import JSONField logger = logging.getLogger('awx.main.models.notifications') @@ -70,12 +69,12 @@ class NotificationTemplate(CommonModelNameNotUnique): choices=NOTIFICATION_TYPE_CHOICES, ) - notification_configuration = prevent_search(JSONField(blank=False)) + notification_configuration = prevent_search(models.JSONField(default=dict)) def default_messages(): return {'started': None, 'success': None, 'error': None, 'workflow_approval': None} - messages = JSONField(null=True, blank=True, default=default_messages, help_text=_('Optional custom messages for notification template.')) + messages = models.JSONField(null=True, blank=True, default=default_messages, help_text=_('Optional custom messages for notification template.')) def has_message(self, condition): potential_template = self.messages.get(condition, {}) @@ -187,7 +186,7 @@ class NotificationTemplate(CommonModelNameNotUnique): def display_notification_configuration(self): field_val = self.notification_configuration.copy() for field in self.notification_class.init_parameters: - if field in field_val and force_text(field_val[field]).startswith('$encrypted$'): + if field in field_val and force_str(field_val[field]).startswith('$encrypted$'): field_val[field] = '$encrypted$' return field_val @@ -237,7 +236,7 @@ class Notification(CreatedModifiedModel): default='', editable=False, ) - body = JSONField(blank=True) + body = models.JSONField(default=dict, null=True, blank=True) def get_absolute_url(self, request=None): return reverse('api:notification_detail', kwargs={'pk': self.pk}, request=request) @@ -515,7 +514,7 @@ class JobNotificationMixin(object): try: notification_templates = self.get_notification_templates() except Exception: - logger.warn("No notification template defined for emitting notification") + logger.warning("No notification template defined for emitting notification") return if not notification_templates: diff --git a/awx/main/models/oauth.py b/awx/main/models/oauth.py index b9b4b8c217..c9927f78bd 100644 --- a/awx/main/models/oauth.py +++ b/awx/main/models/oauth.py @@ -6,7 +6,7 @@ import re from django.core.validators import RegexValidator from django.db import models, connection from django.utils.timezone import now -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.conf import settings # Django OAuth Toolkit diff --git a/awx/main/models/organization.py b/awx/main/models/organization.py index 277b33315e..30a393d72b 100644 --- a/awx/main/models/organization.py +++ b/awx/main/models/organization.py @@ -8,7 +8,7 @@ from django.db import models from django.contrib.auth.models import User from django.contrib.sessions.models import Session from django.utils.timezone import now as tz_now -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index a2de97e34f..385674d7ab 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -9,8 +9,8 @@ import urllib.parse as urlparse # Django from django.conf import settings from django.db import models -from django.utils.translation import ugettext_lazy as _ -from django.utils.encoding import smart_str, smart_text +from django.utils.translation import gettext_lazy as _ +from django.utils.encoding import smart_str from django.utils.text import slugify from django.core.exceptions import ValidationError from django.utils.timezone import now, make_aware, get_default_timezone @@ -38,7 +38,6 @@ from awx.main.models.rbac import ( ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR, ) -from awx.main.fields import JSONField __all__ = ['Project', 'ProjectUpdate'] @@ -214,7 +213,7 @@ class ProjectOptions(models.Model): for filename in filenames: playbook = could_be_playbook(project_path, dirpath, filename) if playbook is not None: - results.append(smart_text(playbook)) + results.append(smart_str(playbook)) return sorted(results, key=lambda x: smart_str(x).lower()) @property @@ -230,7 +229,7 @@ class ProjectOptions(models.Model): for filename in filenames: inv_path = could_be_inventory(project_path, dirpath, filename) if inv_path is not None: - results.append(smart_text(inv_path)) + results.append(smart_str(inv_path)) if len(results) > max_inventory_listing: break if len(results) > max_inventory_listing: @@ -294,17 +293,17 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn help_text=_('The last revision fetched by a project update'), ) - playbook_files = JSONField( + playbook_files = models.JSONField( + default=list, blank=True, - default=[], editable=False, verbose_name=_('Playbook Files'), help_text=_('List of playbooks found in the project'), ) - inventory_files = JSONField( + inventory_files = models.JSONField( + default=list, blank=True, - default=[], editable=False, verbose_name=_('Inventory Files'), help_text=_('Suggested list of content that could be Ansible inventory in the project'), diff --git a/awx/main/models/rbac.py b/awx/main/models/rbac.py index 485f70bd0d..8f54cc3e43 100644 --- a/awx/main/models/rbac.py +++ b/awx/main/models/rbac.py @@ -11,7 +11,7 @@ import re from django.db import models, transaction, connection from django.contrib.contenttypes.models import ContentType from django.contrib.contenttypes.fields import GenericForeignKey -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.api.versioning import reverse diff --git a/awx/main/models/schedules.py b/awx/main/models/schedules.py index dca50d9232..c3fae526f1 100644 --- a/awx/main/models/schedules.py +++ b/awx/main/models/schedules.py @@ -14,7 +14,7 @@ from dateutil.zoneinfo import get_zonefile_instance from django.db import models from django.db.models.query import QuerySet from django.utils.timezone import now, make_aware -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.api.versioning import reverse @@ -103,7 +103,7 @@ class Schedule(PrimordialModel, LaunchTimeConfig): for zone in all_zones: if fname.endswith(zone): return zone - logger.warn('Could not detect valid zoneinfo for {}'.format(self.rrule)) + logger.warning('Could not detect valid zoneinfo for {}'.format(self.rrule)) return '' @property diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index 489cba9799..65804c97b0 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -19,9 +19,9 @@ from collections import OrderedDict from django.conf import settings from django.db import models, connection from django.core.exceptions import NON_FIELD_ERRORS -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.utils.timezone import now -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str from django.contrib.contenttypes.models import ContentType # REST Framework @@ -54,7 +54,7 @@ from awx.main.utils import polymorphic from awx.main.constants import ACTIVE_STATES, CAN_CANCEL from awx.main.redact import UriCleaner, REPLACE_STR from awx.main.consumers import emit_channel_notification -from awx.main.fields import JSONField, JSONBField, AskForField, OrderedManyToManyField +from awx.main.fields import AskForField, OrderedManyToManyField __all__ = ['UnifiedJobTemplate', 'UnifiedJob', 'StdoutMaxBytesExceeded'] @@ -357,7 +357,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn validated_kwargs = kwargs.copy() if unallowed_fields: if parent_field_name is None: - logger.warn('Fields {} are not allowed as overrides to spawn from {}.'.format(', '.join(unallowed_fields), self)) + logger.warning('Fields {} are not allowed as overrides to spawn from {}.'.format(', '.join(unallowed_fields), self)) for f in unallowed_fields: validated_kwargs.pop(f) @@ -653,9 +653,10 @@ class UnifiedJob( editable=False, ) job_env = prevent_search( - JSONField( - blank=True, + models.JSONField( default=dict, + null=True, + blank=True, editable=False, ) ) @@ -704,7 +705,7 @@ class UnifiedJob( 'Credential', related_name='%(class)ss', ) - installed_collections = JSONBField( + installed_collections = models.JSONField( blank=True, default=dict, editable=False, @@ -1090,7 +1091,7 @@ class UnifiedJob( # function assume a str-based fd will be returned; decode # .write() calls on the fly to maintain this interface _write = fd.write - fd.write = lambda s: _write(smart_text(s)) + fd.write = lambda s: _write(smart_str(s)) tbl = self._meta.db_table + 'event' created_by_cond = '' if self.has_unpartitioned_events: @@ -1205,7 +1206,7 @@ class UnifiedJob( try: extra_data_dict = parse_yaml_or_json(extra_data, silent_failure=False) except Exception as e: - logger.warn("Exception deserializing extra vars: " + str(e)) + logger.warning("Exception deserializing extra vars: " + str(e)) evars = self.extra_vars_dict evars.update(extra_data_dict) self.update_fields(extra_vars=json.dumps(evars)) @@ -1273,7 +1274,7 @@ class UnifiedJob( id=self.id, name=self.name, url=self.get_ui_url(), - created_by=smart_text(self.created_by), + created_by=smart_str(self.created_by), started=self.started.isoformat() if self.started is not None else None, finished=self.finished.isoformat() if self.finished is not None else None, status=self.status, diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index 684e25b967..197951ea05 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -11,7 +11,7 @@ from urllib.parse import urljoin # Django from django.db import connection, models from django.conf import settings -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.core.exceptions import ObjectDoesNotExist # from django import settings as tower_settings @@ -40,7 +40,6 @@ from awx.main.models.mixins import ( from awx.main.models.jobs import LaunchTimeConfigBase, LaunchTimeConfig, JobTemplate from awx.main.models.credential import Credential from awx.main.redact import REPLACE_STR -from awx.main.fields import JSONField from awx.main.utils import schedule_task_manager @@ -232,9 +231,10 @@ class WorkflowJobNode(WorkflowNodeBase): default=None, on_delete=models.CASCADE, ) - ancestor_artifacts = JSONField( - blank=True, + ancestor_artifacts = models.JSONField( default=dict, + null=True, + blank=True, editable=False, ) do_not_run = models.BooleanField( diff --git a/awx/main/notifications/grafana_backend.py b/awx/main/notifications/grafana_backend.py index 4e9a7a6262..51a27a897e 100644 --- a/awx/main/notifications/grafana_backend.py +++ b/awx/main/notifications/grafana_backend.py @@ -7,8 +7,8 @@ import logging import requests import dateutil.parser as dp -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.notifications.custom_notification_base import CustomNotificationBase @@ -82,9 +82,9 @@ class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase): if m.body.get('finished'): grafana_data['timeEnd'] = int((dp.parse(m.body['finished']).replace(tzinfo=None) - epoch).total_seconds() * 1000) except ValueError: - logger.error(smart_text(_("Error converting time {} or timeEnd {} to int.").format(m.body['started'], m.body['finished']))) + logger.error(smart_str(_("Error converting time {} or timeEnd {} to int.").format(m.body['started'], m.body['finished']))) if not self.fail_silently: - raise Exception(smart_text(_("Error converting time {} and/or timeEnd {} to int.").format(m.body['started'], m.body['finished']))) + raise Exception(smart_str(_("Error converting time {} and/or timeEnd {} to int.").format(m.body['started'], m.body['finished']))) grafana_data['isRegion'] = self.isRegion grafana_data['dashboardId'] = self.dashboardId grafana_data['panelId'] = self.panelId @@ -97,8 +97,8 @@ class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase): "{}/api/annotations".format(m.recipients()[0]), json=grafana_data, headers=grafana_headers, verify=(not self.grafana_no_verify_ssl) ) if r.status_code >= 400: - logger.error(smart_text(_("Error sending notification grafana: {}").format(r.status_code))) + logger.error(smart_str(_("Error sending notification grafana: {}").format(r.status_code))) if not self.fail_silently: - raise Exception(smart_text(_("Error sending notification grafana: {}").format(r.status_code))) + raise Exception(smart_str(_("Error sending notification grafana: {}").format(r.status_code))) sent_messages += 1 return sent_messages diff --git a/awx/main/notifications/irc_backend.py b/awx/main/notifications/irc_backend.py index d020de824d..20a5523b52 100644 --- a/awx/main/notifications/irc_backend.py +++ b/awx/main/notifications/irc_backend.py @@ -7,8 +7,8 @@ import logging import irc.client -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.notifications.custom_notification_base import CustomNotificationBase @@ -55,7 +55,7 @@ class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase): connect_factory=connection_factory, ) except irc.client.ServerConnectionError as e: - logger.error(smart_text(_("Exception connecting to irc server: {}").format(e))) + logger.error(smart_str(_("Exception connecting to irc server: {}").format(e))) if not self.fail_silently: raise return True diff --git a/awx/main/notifications/mattermost_backend.py b/awx/main/notifications/mattermost_backend.py index b9cc513ba7..c96b3e9f54 100644 --- a/awx/main/notifications/mattermost_backend.py +++ b/awx/main/notifications/mattermost_backend.py @@ -4,8 +4,8 @@ import logging import requests -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.notifications.custom_notification_base import CustomNotificationBase @@ -44,8 +44,8 @@ class MattermostBackend(AWXBaseEmailBackend, CustomNotificationBase): r = requests.post("{}".format(m.recipients()[0]), json=payload, verify=(not self.mattermost_no_verify_ssl)) if r.status_code >= 400: - logger.error(smart_text(_("Error sending notification mattermost: {}").format(r.status_code))) + logger.error(smart_str(_("Error sending notification mattermost: {}").format(r.status_code))) if not self.fail_silently: - raise Exception(smart_text(_("Error sending notification mattermost: {}").format(r.status_code))) + raise Exception(smart_str(_("Error sending notification mattermost: {}").format(r.status_code))) sent_messages += 1 return sent_messages diff --git a/awx/main/notifications/pagerduty_backend.py b/awx/main/notifications/pagerduty_backend.py index 8cde9e3cfd..cfc3073ed4 100644 --- a/awx/main/notifications/pagerduty_backend.py +++ b/awx/main/notifications/pagerduty_backend.py @@ -5,8 +5,8 @@ import json import logging import pygerduty -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.notifications.custom_notification_base import CustomNotificationBase @@ -78,13 +78,13 @@ class PagerDutyBackend(AWXBaseEmailBackend, CustomNotificationBase): except Exception as e: if not self.fail_silently: raise - logger.error(smart_text(_("Exception connecting to PagerDuty: {}").format(e))) + logger.error(smart_str(_("Exception connecting to PagerDuty: {}").format(e))) for m in messages: try: pager.trigger_incident(m.recipients()[0], description=m.subject, details=m.body, client=m.from_email) sent_messages += 1 except Exception as e: - logger.error(smart_text(_("Exception sending messages: {}").format(e))) + logger.error(smart_str(_("Exception sending messages: {}").format(e))) if not self.fail_silently: raise return sent_messages diff --git a/awx/main/notifications/rocketchat_backend.py b/awx/main/notifications/rocketchat_backend.py index 9092b90f17..67155233c7 100644 --- a/awx/main/notifications/rocketchat_backend.py +++ b/awx/main/notifications/rocketchat_backend.py @@ -5,8 +5,8 @@ import logging import requests import json -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.utils import get_awx_http_client_headers @@ -44,8 +44,8 @@ class RocketChatBackend(AWXBaseEmailBackend, CustomNotificationBase): ) if r.status_code >= 400: - logger.error(smart_text(_("Error sending notification rocket.chat: {}").format(r.status_code))) + logger.error(smart_str(_("Error sending notification rocket.chat: {}").format(r.status_code))) if not self.fail_silently: - raise Exception(smart_text(_("Error sending notification rocket.chat: {}").format(r.status_code))) + raise Exception(smart_str(_("Error sending notification rocket.chat: {}").format(r.status_code))) sent_messages += 1 return sent_messages diff --git a/awx/main/notifications/slack_backend.py b/awx/main/notifications/slack_backend.py index 73364dc037..d1016526aa 100644 --- a/awx/main/notifications/slack_backend.py +++ b/awx/main/notifications/slack_backend.py @@ -5,8 +5,8 @@ import logging from slack_sdk import WebClient from slack_sdk.errors import SlackApiError -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.notifications.custom_notification_base import CustomNotificationBase @@ -53,7 +53,7 @@ class SlackBackend(AWXBaseEmailBackend, CustomNotificationBase): else: raise RuntimeError("Slack Notification unable to send {}: {} ({})".format(r, m.subject, response['error'])) except SlackApiError as e: - logger.error(smart_text(_("Exception sending messages: {}").format(e))) + logger.error(smart_str(_("Exception sending messages: {}").format(e))) if not self.fail_silently: raise return sent_messages diff --git a/awx/main/notifications/twilio_backend.py b/awx/main/notifications/twilio_backend.py index 0b730a56b2..1f54d603ac 100644 --- a/awx/main/notifications/twilio_backend.py +++ b/awx/main/notifications/twilio_backend.py @@ -5,8 +5,8 @@ import logging from twilio.rest import Client -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.notifications.custom_notification_base import CustomNotificationBase @@ -37,14 +37,14 @@ class TwilioBackend(AWXBaseEmailBackend, CustomNotificationBase): except Exception as e: if not self.fail_silently: raise - logger.error(smart_text(_("Exception connecting to Twilio: {}").format(e))) + logger.error(smart_str(_("Exception connecting to Twilio: {}").format(e))) for m in messages: try: connection.messages.create(to=m.to, from_=m.from_email, body=m.subject) sent_messages += 1 except Exception as e: - logger.error(smart_text(_("Exception sending messages: {}").format(e))) + logger.error(smart_str(_("Exception sending messages: {}").format(e))) if not self.fail_silently: raise return sent_messages diff --git a/awx/main/notifications/webhook_backend.py b/awx/main/notifications/webhook_backend.py index 342184ecf2..30518e0714 100644 --- a/awx/main/notifications/webhook_backend.py +++ b/awx/main/notifications/webhook_backend.py @@ -5,8 +5,8 @@ import json import logging import requests -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.utils import get_awx_http_client_headers @@ -76,8 +76,8 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase): verify=(not self.disable_ssl_verification), ) if r.status_code >= 400: - logger.error(smart_text(_("Error sending notification webhook: {}").format(r.status_code))) + logger.error(smart_str(_("Error sending notification webhook: {}").format(r.status_code))) if not self.fail_silently: - raise Exception(smart_text(_("Error sending notification webhook: {}").format(r.status_code))) + raise Exception(smart_str(_("Error sending notification webhook: {}").format(r.status_code))) sent_messages += 1 return sent_messages diff --git a/awx/main/registrar.py b/awx/main/registrar.py index 07e721a953..31133f936b 100644 --- a/awx/main/registrar.py +++ b/awx/main/registrar.py @@ -32,7 +32,7 @@ class ActivityStreamRegistrar(object): post_save.disconnect(dispatch_uid=str(self.__class__) + str(model) + "_create") pre_save.disconnect(dispatch_uid=str(self.__class__) + str(model) + "_update") pre_delete.disconnect(dispatch_uid=str(self.__class__) + str(model) + "_delete") - self.models.pop(model) + self.models.remove(model) for m2mfield in model._meta.many_to_many: m2m_attr = getattr(model, m2mfield.name) diff --git a/awx/main/routing.py b/awx/main/routing.py index 6ba58e68c6..2818559428 100644 --- a/awx/main/routing.py +++ b/awx/main/routing.py @@ -1,8 +1,8 @@ import redis import logging -from django.conf.urls import url from django.conf import settings +from django.urls import re_path from channels.auth import AuthMiddlewareStack from channels.routing import ProtocolTypeRouter, URLRouter @@ -21,14 +21,14 @@ class AWXProtocolTypeRouter(ProtocolTypeRouter): logger.debug(f"cleaning up Redis key {k}") r.delete(k) except redis.exceptions.RedisError as e: - logger.warn("encountered an error communicating with redis.") + logger.warning("encountered an error communicating with redis.") raise e super().__init__(*args, **kwargs) websocket_urlpatterns = [ - url(r'websocket/$', consumers.EventConsumer), - url(r'websocket/broadcast/$', consumers.BroadcastConsumer), + re_path(r'websocket/$', consumers.EventConsumer), + re_path(r'websocket/broadcast/$', consumers.BroadcastConsumer), ] application = AWXProtocolTypeRouter( diff --git a/awx/main/scheduler/dag_workflow.py b/awx/main/scheduler/dag_workflow.py index 39995f437c..c2afba68ad 100644 --- a/awx/main/scheduler/dag_workflow.py +++ b/awx/main/scheduler/dag_workflow.py @@ -1,5 +1,5 @@ -from django.utils.translation import ugettext_lazy as _ -from django.utils.encoding import smart_text +from django.utils.translation import gettext_lazy as _ +from django.utils.encoding import smart_str # Python from awx.main.models import ( @@ -171,7 +171,7 @@ class WorkflowDAG(SimpleDAG): parms['node_status'] = ",".join(["({},{})".format(id, status) for id, status in failed_path_nodes_id_status]) if len(failed_unified_job_template_node_ids) > 0: parms['no_ufjt'] = ",".join(failed_unified_job_template_node_ids) - return True, smart_text(s.format(**parms)) + return True, smart_str(s.format(**parms)) return False, None r''' diff --git a/awx/main/scheduler/kubernetes.py b/awx/main/scheduler/kubernetes.py index 6e36226df5..8566ca4864 100644 --- a/awx/main/scheduler/kubernetes.py +++ b/awx/main/scheduler/kubernetes.py @@ -7,7 +7,7 @@ from urllib import parse as urlparse from django.conf import settings from kubernetes import client, config from django.utils.functional import cached_property -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from awx.main.utils.common import parse_yaml_or_json, deepmerge from awx.main.utils.execution_environments import get_default_pod_spec diff --git a/awx/main/scheduler/task_manager.py b/awx/main/scheduler/task_manager.py index beb4840c9a..fba17917cf 100644 --- a/awx/main/scheduler/task_manager.py +++ b/awx/main/scheduler/task_manager.py @@ -10,7 +10,7 @@ from types import SimpleNamespace # Django from django.db import transaction, connection -from django.utils.translation import ugettext_lazy as _, gettext_noop +from django.utils.translation import gettext_lazy as _, gettext_noop from django.utils.timezone import now as tz_now from django.conf import settings @@ -574,7 +574,7 @@ class TaskManager: timeout_message = _("The approval node {name} ({pk}) has expired after {timeout} seconds.").format( name=task.name, pk=task.pk, timeout=task.timeout ) - logger.warn(timeout_message) + logger.warning(timeout_message) task.timed_out = True task.status = 'failed' task.send_approval_notification('timed_out') diff --git a/awx/main/tasks/__init__.py b/awx/main/tasks/__init__.py index e69de29bb2..517df4a285 100644 --- a/awx/main/tasks/__init__.py +++ b/awx/main/tasks/__init__.py @@ -0,0 +1 @@ +from . import jobs, receptor, system # noqa diff --git a/awx/main/tasks/callback.py b/awx/main/tasks/callback.py index ccd9c39815..b1a4c450e5 100644 --- a/awx/main/tasks/callback.py +++ b/awx/main/tasks/callback.py @@ -8,7 +8,7 @@ import stat # Django from django.utils.timezone import now from django.conf import settings -from django_guid.middleware import GuidMiddleware +from django_guid import get_guid # AWX from awx.main.redact import UriCleaner @@ -25,7 +25,7 @@ class RunnerCallback: def __init__(self, model=None): self.parent_workflow_job_id = None self.host_map = {} - self.guid = GuidMiddleware.get_guid() + self.guid = get_guid() self.job_created = None self.recent_event_timings = deque(maxlen=settings.MAX_WEBSOCKET_EVENT_RATE) self.dispatcher = CallbackQueueDispatcher() @@ -154,7 +154,7 @@ class RunnerCallback: if self.instance.cancel_flag or self.instance.status == 'canceled': cancel_wait = (now() - self.instance.modified).seconds if self.instance.modified else 0 if cancel_wait > 5: - logger.warn('Request to cancel {} took {} seconds to complete.'.format(self.instance.log_format, cancel_wait)) + logger.warning('Request to cancel {} took {} seconds to complete.'.format(self.instance.log_format, cancel_wait)) return True return False diff --git a/awx/main/tasks/jobs.py b/awx/main/tasks/jobs.py index 7d2057d62d..6fb1613f0f 100644 --- a/awx/main/tasks/jobs.py +++ b/awx/main/tasks/jobs.py @@ -81,7 +81,7 @@ from awx.main.utils.handlers import SpecialInventoryHandler from awx.main.tasks.system import handle_success_and_failure_notifications, update_smart_memberships_for_inventory, update_inventory_computed_fields from awx.main.utils.update_model import update_model from rest_framework.exceptions import PermissionDenied -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ logger = logging.getLogger('awx.main.tasks.jobs') @@ -169,7 +169,7 @@ class BaseTask(object): # mount_option validation via performed via API, but since this can be overriden via settings.py if mount_option not in CONTAINER_VOLUMES_MOUNT_TYPES: mount_option = 'z' - logger.warn(f'The path {this_path} has volume mount type {mount_option} which is not supported. Using "z" instead.') + logger.warning(f'The path {this_path} has volume mount type {mount_option} which is not supported. Using "z" instead.') params['container_volume_mounts'].append(f'{src}:{dest}:{mount_option}') elif this_path.count(':') == MAX_ISOLATED_PATH_COLON_DELIMITER - 1: diff --git a/awx/main/tasks/receptor.py b/awx/main/tasks/receptor.py index 5d58217b6f..c2028dbb36 100644 --- a/awx/main/tasks/receptor.py +++ b/awx/main/tasks/receptor.py @@ -164,7 +164,7 @@ def run_until_complete(node, timing_data=None, **kwargs): if settings.RECEPTOR_RELEASE_WORK: res = receptor_ctl.simple_command(f"work release {unit_id}") if res != {'released': unit_id}: - logger.warn(f'Could not confirm release of receptor work unit id {unit_id} from {node}, data: {res}') + logger.warning(f'Could not confirm release of receptor work unit id {unit_id} from {node}, data: {res}') receptor_ctl.close() @@ -358,9 +358,9 @@ class AWXReceptorJob: logger.exception(f'An error was encountered while getting status for work unit {self.unit_id}') if 'exceeded quota' in detail: - logger.warn(detail) + logger.warning(detail) log_name = self.task.instance.log_format - logger.warn(f"Could not launch pod for {log_name}. Exceeded quota.") + logger.warning(f"Could not launch pod for {log_name}. Exceeded quota.") self.task.update_model(self.task.instance.pk, status='pending') return # If ansible-runner ran, but an error occured at runtime, the traceback information @@ -380,7 +380,7 @@ class AWXReceptorJob: self.task.instance.result_traceback = detail self.task.instance.save(update_fields=['result_traceback']) else: - logger.warn(f'No result details or output from {self.task.instance.log_format}, status:\n{state_name}') + logger.warning(f'No result details or output from {self.task.instance.log_format}, status:\n{state_name}') except Exception: raise RuntimeError(detail) diff --git a/awx/main/tasks/system.py b/awx/main/tasks/system.py index 927bbbee2a..008c3bcb2f 100644 --- a/awx/main/tasks/system.py +++ b/awx/main/tasks/system.py @@ -1,5 +1,6 @@ # Python from collections import namedtuple +import itertools import functools import importlib import json @@ -13,15 +14,16 @@ from distutils.version import LooseVersion as Version # Django from django.conf import settings -from django.db import transaction, DatabaseError, IntegrityError +from django.db import connection, transaction, DatabaseError, IntegrityError from django.db.models.fields.related import ForeignKey from django.utils.timezone import now from django.utils.encoding import smart_str from django.contrib.auth.models import User -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_noop from django.core.cache import cache from django.core.exceptions import ObjectDoesNotExist +from django.contrib.contenttypes.models import ContentType # Django-CRUM from crum import impersonate @@ -46,6 +48,7 @@ from awx.main.models import ( Inventory, SmartInventoryMembership, Job, + convert_jsonfields_to_jsonb, ) from awx.main.constants import ACTIVE_STATES from awx.main.dispatch.publish import task @@ -78,6 +81,9 @@ Try upgrading OpenSSH or providing your private key in an different format. \ def dispatch_startup(): startup_logger = logging.getLogger('awx.main.tasks') + + convert_jsonfields_to_jsonb() + startup_logger.debug("Syncing Schedules") for sch in Schedule.objects.all(): try: @@ -121,6 +127,123 @@ def inform_cluster_of_shutdown(): logger.exception('Encountered problem with normal shutdown signal.') +def migrate_json_fields_expensive(table, columns): + batchsize = 50000 + + ct = ContentType.objects.get_by_natural_key(*table.split('_', 1)) + model = ct.model_class() + + # Phase 1: add the new columns, making them nullable to avoid populating them + with connection.schema_editor() as schema_editor: + # See: https://docs.djangoproject.com/en/3.1/ref/schema-editor/ + + for colname in columns: + f = model._meta.get_field(colname) + _, _, args, kwargs = f.deconstruct() + kwargs['null'] = True + new_f = f.__class__(*args, **kwargs) + new_f.set_attributes_from_name(f'_{colname}') + + schema_editor.add_field(model, new_f) + + # Create a trigger to make sure new data automatically gets put in both fields. + with connection.cursor() as cursor: + # It's a little annoying, I think this trigger will re-do + # the same work as the update query in Phase 2 + cursor.execute( + f""" + create or replace function update_{table}_{colname}() + returns trigger as $body$ + begin + new._{colname} = new.{colname}::jsonb + return new; + end + $body$ language plpgsql; + """ + ) + cursor.execute( + f""" + create trigger {table}_{colname}_trigger + before insert or update + on {table} + for each row + execute procedure update_{table}_{colname}; + """ + ) + + # Phase 2: copy over the data + with connection.cursor() as cursor: + rows = 0 + for i in itertools.count(0, batchsize): + cursor.execute(f"select count(1) from {table} where id >= %s;", (i,)) + if not cursor.fetchone()[0]: + break + + column_expr = ', '.join(f"_{colname} = {colname}::jsonb" for colname in columns) + cursor.execute( + f""" + update {table} + set {column_expr} + where id >= %s and id < %s; + """, + (i, i + batchsize), + ) + rows += cursor.rowcount + logger.debug(f"Batch {i} to {i + batchsize} copied on {table}.") + + logger.warning(f"Data copied for {rows} rows on {table}.") + + # Phase 3: drop the old column and rename the new one + with connection.schema_editor() as schema_editor: + + # FIXME: Grab a lock explicitly here? + for colname in columns: + with connection.cursor() as cursor: + cursor.execute(f"drop trigger {table}_{colname}_trigger;") + cursor.execute(f"drop function update_{table}_{colname};") + + f = model._meta.get_field(colname) + _, _, args, kwargs = f.deconstruct() + kwargs['null'] = True + new_f = f.__class__(*args, **kwargs) + new_f.set_attributes_from_name(f'_{colname}') + + schema_editor.remove_field(model, f) + + _, _, args, kwargs = new_f.deconstruct() + f = new_f.__class__(*args, **kwargs) + f.set_attributes_from_name(colname) + + schema_editor.alter_field(model, new_f, f) + + +@task(queue=get_local_queuename) +def migrate_json_fields(table, expensive, columns): + logger.warning(f"Migrating json fields: {table} {columns}") + + with advisory_lock(f'json_migration_{table}', wait=False) as acquired: + if not acquired: + return + + from django.db.migrations.executor import MigrationExecutor + + # If Django is currently running migrations, wait until it is done. + while True: + executor = MigrationExecutor(connection) + if not executor.migration_plan(executor.loader.graph.leaf_nodes()): + break + time.sleep(60) + + if expensive: + migrate_json_fields_expensive(table, columns) + else: + with connection.cursor() as cursor: + column_expr = " ".join(f"ALTER {colname} TYPE jsonb" for colname in columns) + cursor.execute(f"ALTER TABLE {table} {column_expr};") + + logger.warning(f"Migration of {table} to jsonb is finished") + + @task(queue=get_local_queuename) def apply_cluster_membership_policies(): from awx.main.signals import disable_activity_stream @@ -374,15 +497,15 @@ def cluster_node_health_check(node): Used for the health check endpoint, refreshes the status of the instance, but must be ran on target node """ if node == '': - logger.warn('Local health check incorrectly called with blank string') + logger.warning('Local health check incorrectly called with blank string') return elif node != settings.CLUSTER_HOST_ID: - logger.warn(f'Local health check for {node} incorrectly sent to {settings.CLUSTER_HOST_ID}') + logger.warning(f'Local health check for {node} incorrectly sent to {settings.CLUSTER_HOST_ID}') return try: this_inst = Instance.objects.me() except Instance.DoesNotExist: - logger.warn(f'Instance record for {node} missing, could not check capacity.') + logger.warning(f'Instance record for {node} missing, could not check capacity.') return this_inst.local_health_check() @@ -390,12 +513,12 @@ def cluster_node_health_check(node): @task(queue=get_local_queuename) def execution_node_health_check(node): if node == '': - logger.warn('Remote health check incorrectly called with blank string') + logger.warning('Remote health check incorrectly called with blank string') return try: instance = Instance.objects.get(hostname=node) except Instance.DoesNotExist: - logger.warn(f'Instance record for {node} missing, could not check capacity.') + logger.warning(f'Instance record for {node} missing, could not check capacity.') return if instance.node_type != 'execution': @@ -416,7 +539,7 @@ def execution_node_health_check(node): if data['errors']: formatted_error = "\n".join(data["errors"]) if prior_capacity: - logger.warn(f'Health check marking execution node {node} as lost, errors:\n{formatted_error}') + logger.warning(f'Health check marking execution node {node} as lost, errors:\n{formatted_error}') else: logger.info(f'Failed to find capacity of new or lost execution node {node}, errors:\n{formatted_error}') else: @@ -440,7 +563,7 @@ def inspect_execution_nodes(instance_list): if hostname in node_lookup: instance = node_lookup[hostname] else: - logger.warn(f"Unrecognized node advertising on mesh: {hostname}") + logger.warning(f"Unrecognized node advertising on mesh: {hostname}") continue # Control-plane nodes are dealt with via local_health_check instead. @@ -466,7 +589,7 @@ def inspect_execution_nodes(instance_list): # if the instance *was* lost, but has appeared again, # attempt to re-establish the initial capacity and version # check - logger.warn(f'Execution node attempting to rejoin as instance {hostname}.') + logger.warning(f'Execution node attempting to rejoin as instance {hostname}.') execution_node_health_check.apply_async([hostname]) elif instance.capacity == 0 and instance.enabled: # nodes with proven connection but need remediation run health checks are reduced frequency @@ -634,7 +757,7 @@ def awx_periodic_scheduler(): template = schedule.unified_job_template schedule.update_computed_fields() # To update next_run timestamp. if template.cache_timeout_blocked: - logger.warn("Cache timeout is in the future, bypassing schedule for template %s" % str(template.id)) + logger.warning("Cache timeout is in the future, bypassing schedule for template %s" % str(template.id)) continue try: job_kwargs = schedule.get_job_kwargs() @@ -688,7 +811,7 @@ def handle_work_error(task_id, *args, **kwargs): instance = UnifiedJob.get_instance_by_type(each_task['type'], each_task['id']) if not instance: # Unknown task type - logger.warn("Unknown task type: {}".format(each_task['type'])) + logger.warning("Unknown task type: {}".format(each_task['type'])) continue except ObjectDoesNotExist: logger.warning('Missing {} `{}` in error callback.'.format(each_task['type'], each_task['id'])) @@ -735,7 +858,7 @@ def handle_success_and_failure_notifications(job_id): time.sleep(1) uj = UnifiedJob.objects.get(pk=job_id) - logger.warn(f"Failed to even try to send notifications for job '{uj}' due to job not being in finished state.") + logger.warning(f"Failed to even try to send notifications for job '{uj}' due to job not being in finished state.") @task(queue=get_local_queuename) diff --git a/awx/main/tests/conftest.py b/awx/main/tests/conftest.py index 0400f025d2..28565901b0 100644 --- a/awx/main/tests/conftest.py +++ b/awx/main/tests/conftest.py @@ -3,7 +3,7 @@ import pytest from unittest import mock from contextlib import contextmanager -from awx.main.models import Credential, UnifiedJob +from awx.main.models import Credential, UnifiedJob, Instance from awx.main.tests.factories import ( create_organization, create_job_template, @@ -212,3 +212,10 @@ def mock_get_event_queryset_no_job_created(): with mock.patch.object(UnifiedJob, 'get_event_queryset', lambda self: event_qs(self)) as _fixture: yield _fixture + + +@pytest.fixture +def mock_me(): + me_mock = mock.MagicMock(return_value=Instance(id=1, hostname=settings.CLUSTER_HOST_ID, uuid='00000000-0000-0000-0000-000000000000')) + with mock.patch.object(Instance.objects, 'me', me_mock): + yield diff --git a/awx/main/tests/docs/test_swagger_generation.py b/awx/main/tests/docs/test_swagger_generation.py index e1257cf889..658d8ad2d4 100644 --- a/awx/main/tests/docs/test_swagger_generation.py +++ b/awx/main/tests/docs/test_swagger_generation.py @@ -5,7 +5,7 @@ import re from django.conf import settings from django.core.serializers.json import DjangoJSONEncoder from django.utils.functional import Promise -from django.utils.encoding import force_text +from django.utils.encoding import force_str from openapi_codec.encode import generate_swagger_object import pytest @@ -16,9 +16,9 @@ from awx.api.versioning import drf_reverse class i18nEncoder(DjangoJSONEncoder): def default(self, obj): if isinstance(obj, Promise): - return force_text(obj) + return force_str(obj) if type(obj) == bytes: - return force_text(obj) + return force_str(obj) return super(i18nEncoder, self).default(obj) diff --git a/awx/main/tests/factories/fixtures.py b/awx/main/tests/factories/fixtures.py index 574916a84f..200fa0f195 100644 --- a/awx/main/tests/factories/fixtures.py +++ b/awx/main/tests/factories/fixtures.py @@ -180,8 +180,8 @@ def mk_job_template( jt.project = project - jt.survey_spec = spec - if jt.survey_spec is not None: + if spec is not None: + jt.survey_spec = spec jt.survey_enabled = True if persisted: @@ -212,8 +212,8 @@ def mk_workflow_job_template(name, extra_vars='', spec=None, organization=None, wfjt = WorkflowJobTemplate(name=name, extra_vars=extra_vars, organization=organization, webhook_service=webhook_service) - wfjt.survey_spec = spec - if wfjt.survey_spec: + if spec: + wfjt.survey_spec = spec wfjt.survey_enabled = True if persisted: diff --git a/awx/main/tests/functional/api/test_unified_jobs_stdout.py b/awx/main/tests/functional/api/test_unified_jobs_stdout.py index acfc7a0459..dad55c5ba0 100644 --- a/awx/main/tests/functional/api/test_unified_jobs_stdout.py +++ b/awx/main/tests/functional/api/test_unified_jobs_stdout.py @@ -3,11 +3,12 @@ import base64 import json import re -from datetime import datetime +from unittest import mock from django.conf import settings from django.utils.encoding import smart_str -from unittest import mock +from django.utils.timezone import now as tz_now + import pytest from awx.api.versioning import reverse @@ -146,7 +147,7 @@ def test_stdout_line_range(sqlite_copy_expert, Parent, Child, relation, view, ge @pytest.mark.django_db def test_text_stdout_from_system_job_events(sqlite_copy_expert, get, admin): - created = datetime.utcnow() + created = tz_now() job = SystemJob(created=created) job.save() for i in range(3): @@ -158,7 +159,7 @@ def test_text_stdout_from_system_job_events(sqlite_copy_expert, get, admin): @pytest.mark.django_db def test_text_stdout_with_max_stdout(sqlite_copy_expert, get, admin): - created = datetime.utcnow() + created = tz_now() job = SystemJob(created=created) job.save() total_bytes = settings.STDOUT_MAX_BYTES_DISPLAY + 1 @@ -185,7 +186,7 @@ def test_text_stdout_with_max_stdout(sqlite_copy_expert, get, admin): @pytest.mark.parametrize('fmt', ['txt', 'ansi']) @mock.patch('awx.main.redact.UriCleaner.SENSITIVE_URI_PATTERN', mock.Mock(**{'search.return_value': None})) # really slow for large strings def test_max_bytes_display(sqlite_copy_expert, Parent, Child, relation, view, fmt, get, admin): - created = datetime.utcnow() + created = tz_now() job = Parent(created=created) job.save() total_bytes = settings.STDOUT_MAX_BYTES_DISPLAY + 1 @@ -267,7 +268,7 @@ def test_text_with_unicode_stdout(sqlite_copy_expert, Parent, Child, relation, v @pytest.mark.django_db def test_unicode_with_base64_ansi(sqlite_copy_expert, get, admin): - created = datetime.utcnow() + created = tz_now() job = Job(created=created) job.save() for i in range(3): diff --git a/awx/main/tests/functional/api/test_user.py b/awx/main/tests/functional/api/test_user.py index a201d4e1cc..c19192c90c 100644 --- a/awx/main/tests/functional/api/test_user.py +++ b/awx/main/tests/functional/api/test_user.py @@ -1,4 +1,5 @@ from datetime import date +from unittest import mock import pytest @@ -17,7 +18,7 @@ EXAMPLE_USER_DATA = {"username": "affable", "first_name": "a", "last_name": "a", @pytest.mark.django_db def test_user_create(post, admin): - response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware()) + response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware(mock.Mock())) assert response.status_code == 201 assert not response.data['is_superuser'] assert not response.data['is_system_auditor'] @@ -25,22 +26,22 @@ def test_user_create(post, admin): @pytest.mark.django_db def test_fail_double_create_user(post, admin): - response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware()) + response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware(mock.Mock())) assert response.status_code == 201 - response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware()) + response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware(mock.Mock())) assert response.status_code == 400 @pytest.mark.django_db def test_create_delete_create_user(post, delete, admin): - response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware()) + response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware(mock.Mock())) assert response.status_code == 201 - response = delete(reverse('api:user_detail', kwargs={'pk': response.data['id']}), admin, middleware=SessionMiddleware()) + response = delete(reverse('api:user_detail', kwargs={'pk': response.data['id']}), admin, middleware=SessionMiddleware(mock.Mock())) assert response.status_code == 204 - response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware()) + response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware(mock.Mock())) print(response.data) assert response.status_code == 201 @@ -48,7 +49,7 @@ def test_create_delete_create_user(post, delete, admin): @pytest.mark.django_db def test_user_cannot_update_last_login(patch, admin): assert admin.last_login is None - patch(reverse('api:user_detail', kwargs={'pk': admin.pk}), {'last_login': '2020-03-13T16:39:47.303016Z'}, admin, middleware=SessionMiddleware()) + patch(reverse('api:user_detail', kwargs={'pk': admin.pk}), {'last_login': '2020-03-13T16:39:47.303016Z'}, admin, middleware=SessionMiddleware(mock.Mock())) assert User.objects.get(pk=admin.pk).last_login is None diff --git a/awx/main/tests/functional/commands/test_cleanup_jobs.py b/awx/main/tests/functional/commands/test_cleanup_jobs.py deleted file mode 100644 index 612895559a..0000000000 --- a/awx/main/tests/functional/commands/test_cleanup_jobs.py +++ /dev/null @@ -1,178 +0,0 @@ -import pytest -from datetime import datetime, timedelta -from pytz import timezone -from collections import OrderedDict -from unittest import mock - -from django.db.models.deletion import Collector, SET_NULL, CASCADE -from django.core.management import call_command - -from awx.main.management.commands import cleanup_jobs -from awx.main.utils.deletion import AWXCollector -from awx.main.models import JobTemplate, User, Job, Notification, WorkflowJobNode, JobHostSummary - - -@pytest.fixture -def setup_environment(inventory, project, machine_credential, host, notification_template, label): - """ - Create old jobs and new jobs, with various other objects to hit the - related fields of Jobs. This makes sure on_delete() effects are tested - properly. - """ - old_jobs = [] - new_jobs = [] - days = 10 - days_str = str(days) - - jt = JobTemplate.objects.create(name='testjt', inventory=inventory, project=project) - jt.credentials.add(machine_credential) - jt_user = User.objects.create(username='jobtemplateuser') - jt.execute_role.members.add(jt_user) - - notification = Notification() - notification.notification_template = notification_template - notification.save() - - for i in range(3): - # create jobs with current time - job1 = jt.create_job() - job1.created = datetime.now(tz=timezone('UTC')) - job1.save() - # sqlite does not support partitioning so we cannot test partition-based jobevent cleanup - # JobEvent.create_from_data(job_id=job1.pk, uuid='abc123', event='runner_on_start', stdout='a' * 1025).save() - new_jobs.append(job1) - - # create jobs 10 days ago - job2 = jt.create_job() - job2.created = datetime.now(tz=timezone('UTC')) - timedelta(days=days) - job2.save() - job2.dependent_jobs.add(job1) - # JobEvent.create_from_data(job_id=job2.pk, uuid='abc123', event='runner_on_start', stdout='a' * 1025).save() - old_jobs.append(job2) - - jt.last_job = job2 - jt.current_job = job2 - jt.save() - host.last_job = job2 - host.save() - notification.unifiedjob_notifications.add(job2) - label.unifiedjob_labels.add(job2) - jn = WorkflowJobNode.objects.create(job=job2) - jn.save() - jh = JobHostSummary.objects.create(job=job2) - jh.save() - - return (old_jobs, new_jobs, days_str) - - -# sqlite does not support table partitioning so we mock out the methods responsible for pruning -# job event partitions during the job cleanup task -# https://github.com/ansible/awx/issues/9039 -@pytest.mark.django_db -@mock.patch.object(cleanup_jobs.DeleteMeta, 'identify_excluded_partitions', mock.MagicMock()) -@mock.patch.object(cleanup_jobs.DeleteMeta, 'find_partitions_to_drop', mock.MagicMock()) -@mock.patch.object(cleanup_jobs.DeleteMeta, 'drop_partitions', mock.MagicMock()) -def test_cleanup_jobs(setup_environment): - (old_jobs, new_jobs, days_str) = setup_environment - - # related_fields - related = [f for f in Job._meta.get_fields(include_hidden=True) if f.auto_created and not f.concrete and (f.one_to_one or f.one_to_many)] - - job = old_jobs[-1] # last job - - # gather related objects for job - related_should_be_removed = {} - related_should_be_null = {} - for r in related: - qs = r.related_model._base_manager.using('default').filter(**{"%s__in" % r.field.name: [job.pk]}) - if qs.exists(): - if r.field.remote_field.on_delete == CASCADE: - related_should_be_removed[qs.model] = set(qs.values_list('pk', flat=True)) - if r.field.remote_field.on_delete == SET_NULL: - related_should_be_null[(qs.model, r.field.name)] = set(qs.values_list('pk', flat=True)) - - assert related_should_be_removed - assert related_should_be_null - - call_command('cleanup_jobs', '--days', days_str) - # make sure old jobs are removed - assert not Job.objects.filter(pk__in=[obj.pk for obj in old_jobs]).exists() - - # make sure new jobs are untouched - assert len(new_jobs) == Job.objects.filter(pk__in=[obj.pk for obj in new_jobs]).count() - - # make sure related objects are destroyed or set to NULL (none) - for model, values in related_should_be_removed.items(): - assert not model.objects.filter(pk__in=values).exists() - - for (model, fieldname), values in related_should_be_null.items(): - for v in values: - assert not getattr(model.objects.get(pk=v), fieldname) - - -@pytest.mark.django_db -def test_awxcollector(setup_environment): - """ - Efforts to improve the performance of cleanup_jobs involved - sub-classing the django Collector class. This unit test will - check for parity between the django Collector and the modified - AWXCollector class. AWXCollector is used in cleanup_jobs to - bulk-delete old jobs from the database. - - Specifically, Collector has four dictionaries to check: - .dependencies, .data, .fast_deletes, and .field_updates - - These tests will convert each dictionary from AWXCollector - (after running .collect on jobs), from querysets to sets of - objects. The final result should be a dictionary that is - equivalent to django's Collector. - """ - - (old_jobs, new_jobs, days_str) = setup_environment - collector = Collector('default') - collector.collect(old_jobs) - - awx_col = AWXCollector('default') - # awx_col accepts a queryset as input - awx_col.collect(Job.objects.filter(pk__in=[obj.pk for obj in old_jobs])) - - # check that dependencies are the same - assert awx_col.dependencies == collector.dependencies - - # check that objects to delete are the same - awx_del_dict = OrderedDict() - for model, instances in awx_col.data.items(): - awx_del_dict.setdefault(model, set()) - for inst in instances: - # .update() will put each object in a queryset into the set - awx_del_dict[model].update(inst) - assert awx_del_dict == collector.data - - # check that field updates are the same - awx_del_dict = OrderedDict() - for model, instances_for_fieldvalues in awx_col.field_updates.items(): - awx_del_dict.setdefault(model, {}) - for (field, value), instances in instances_for_fieldvalues.items(): - awx_del_dict[model].setdefault((field, value), set()) - for inst in instances: - awx_del_dict[model][(field, value)].update(inst) - - # collector field updates don't use the base (polymorphic parent) model, e.g. - # it will use JobTemplate instead of UnifiedJobTemplate. Therefore, - # we need to rebuild the dictionary and grab the model from the field - collector_del_dict = OrderedDict() - for model, instances_for_fieldvalues in collector.field_updates.items(): - for (field, value), instances in instances_for_fieldvalues.items(): - collector_del_dict.setdefault(field.model, {}) - collector_del_dict[field.model][(field, value)] = collector.field_updates[model][(field, value)] - assert awx_del_dict == collector_del_dict - - # check that fast deletes are the same - collector_fast_deletes = set() - for q in collector.fast_deletes: - collector_fast_deletes.update(q) - - awx_col_fast_deletes = set() - for q in awx_col.fast_deletes: - awx_col_fast_deletes.update(q) - assert collector_fast_deletes == awx_col_fast_deletes diff --git a/awx/main/tests/functional/conftest.py b/awx/main/tests/functional/conftest.py index 7e2178ca4d..ea18b491e6 100644 --- a/awx/main/tests/functional/conftest.py +++ b/awx/main/tests/functional/conftest.py @@ -15,7 +15,6 @@ from django.core.serializers.json import DjangoJSONEncoder from django.db.backends.sqlite3.base import SQLiteCursorWrapper # AWX -from awx.main.fields import JSONBField from awx.main.models.projects import Project from awx.main.models.ha import Instance @@ -755,11 +754,6 @@ def get_db_prep_save(self, value, connection, **kwargs): return value -@pytest.fixture -def monkeypatch_jsonbfield_get_db_prep_save(mocker): - JSONBField.get_db_prep_save = get_db_prep_save - - @pytest.fixture def oauth_application(admin): return Application.objects.create(name='test app', user=admin, client_type='confidential', authorization_grant_type='password') diff --git a/awx/main/tests/functional/test_inventory_source_injectors.py b/awx/main/tests/functional/test_inventory_source_injectors.py index 0d4247feb3..01b7c3e2b0 100644 --- a/awx/main/tests/functional/test_inventory_source_injectors.py +++ b/awx/main/tests/functional/test_inventory_source_injectors.py @@ -181,7 +181,7 @@ def create_reference_data(source_dir, env, content): @pytest.mark.django_db @pytest.mark.parametrize('this_kind', CLOUD_PROVIDERS) -def test_inventory_update_injected_content(this_kind, inventory, fake_credential_factory): +def test_inventory_update_injected_content(this_kind, inventory, fake_credential_factory, mock_me): ExecutionEnvironment.objects.create(name='Control Plane EE', managed=True) ExecutionEnvironment.objects.create(name='Default Job EE', managed=False) diff --git a/awx/main/tests/functional/test_named_url.py b/awx/main/tests/functional/test_named_url.py index 7df38aa4e1..884ecd7dc0 100644 --- a/awx/main/tests/functional/test_named_url.py +++ b/awx/main/tests/functional/test_named_url.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- +from unittest import mock + import pytest from django.core.exceptions import ImproperlyConfigured @@ -31,7 +33,7 @@ def setup_module(module): # in unit test environment. So it is wrapped by try-except block to mute any # unwanted exceptions. try: - URLModificationMiddleware() + URLModificationMiddleware(mock.Mock()) except ImproperlyConfigured: pass diff --git a/awx/main/tests/functional/test_session.py b/awx/main/tests/functional/test_session.py index f9eb4c42a4..157000d1ab 100644 --- a/awx/main/tests/functional/test_session.py +++ b/awx/main/tests/functional/test_session.py @@ -1,16 +1,12 @@ from importlib import import_module import pytest -import re from django.conf import settings from django.test.utils import override_settings -from django.contrib.sessions.middleware import SessionMiddleware from django.contrib.sessions.models import Session from django.contrib.auth import SESSION_KEY from unittest import mock -from awx.api.versioning import reverse - class AlwaysPassBackend(object): @@ -30,26 +26,6 @@ def test_login_json_not_allowed(get, accept, status): get('/api/login/', HTTP_ACCEPT=accept, expect=status) -@pytest.mark.skip(reason="Needs Update - CA") -@pytest.mark.django_db -def test_session_create_delete(admin, post, get): - AlwaysPassBackend.user = admin - with override_settings(AUTHENTICATION_BACKENDS=(AlwaysPassBackend.get_backend_path(),), SESSION_COOKIE_NAME='session_id'): - response = post( - '/api/login/', - data={'username': admin.username, 'password': admin.password, 'next': '/api/'}, - expect=302, - middleware=SessionMiddleware(), - format='multipart', - ) - assert 'session_id' in response.cookies - session_key = re.findall(r'session_id=[a-zA-z0-9]+', str(response.cookies['session_id']))[0][len('session_id=') :] - session = Session.objects.get(session_key=session_key) - assert int(session.get_decoded()[SESSION_KEY]) == admin.pk - response = get('/api/logout/', middleware=SessionMiddleware(), cookies={'session_id': session_key}, expect=302) - assert not Session.objects.filter(session_key=session_key).exists() - - @pytest.mark.django_db @mock.patch('awx.main.consumers.emit_channel_notification') def test_sessions_unlimited(emit, admin): @@ -81,21 +57,3 @@ def test_session_overlimit(emit, admin, alice): store = import_module(settings.SESSION_ENGINE).SessionStore() store.create_model_instance({SESSION_KEY: alice.pk}).save() assert Session.objects.count() == 4 - - -@pytest.mark.skip(reason="Needs Update - CA") -@pytest.mark.django_db -def test_password_update_clears_sessions(admin, alice, post, patch): - AlwaysPassBackend.user = alice - with override_settings(AUTHENTICATION_BACKENDS=(AlwaysPassBackend.get_backend_path(),), SESSION_COOKIE_NAME='session_id'): - response = post( - '/api/login/', - data={'username': alice.username, 'password': alice.password, 'next': '/api/'}, - expect=302, - middleware=SessionMiddleware(), - format='multipart', - ) - session_key = re.findall(r'session_id=[a-zA-z0-9]+', str(response.cookies['session_id']))[0][len('session_id=') :] - assert Session.objects.filter(session_key=session_key).exists() - patch(reverse('api:user_detail', kwargs={'pk': alice.pk}), admin, data={'password': 'new_password'}, expect=200) - assert not Session.objects.filter(session_key=session_key).exists() diff --git a/awx/main/tests/functional/test_tasks.py b/awx/main/tests/functional/test_tasks.py index 951767d08e..14c48fa5ff 100644 --- a/awx/main/tests/functional/test_tasks.py +++ b/awx/main/tests/functional/test_tasks.py @@ -27,7 +27,7 @@ def test_no_worker_info_on_AWX_nodes(node_type): @pytest.mark.django_db class TestDependentInventoryUpdate: - def test_dependent_inventory_updates_is_called(self, scm_inventory_source, scm_revision_file): + def test_dependent_inventory_updates_is_called(self, scm_inventory_source, scm_revision_file, mock_me): task = RunProjectUpdate() task.revision_path = scm_revision_file proj_update = scm_inventory_source.source_project.create_project_update() @@ -36,7 +36,7 @@ class TestDependentInventoryUpdate: task.post_run_hook(proj_update, 'successful') inv_update_mck.assert_called_once_with(proj_update, mock.ANY) - def test_no_unwanted_dependent_inventory_updates(self, project, scm_revision_file): + def test_no_unwanted_dependent_inventory_updates(self, project, scm_revision_file, mock_me): task = RunProjectUpdate() task.revision_path = scm_revision_file proj_update = project.create_project_update() @@ -45,7 +45,7 @@ class TestDependentInventoryUpdate: task.post_run_hook(proj_update, 'successful') assert not inv_update_mck.called - def test_dependent_inventory_updates(self, scm_inventory_source, default_instance_group): + def test_dependent_inventory_updates(self, scm_inventory_source, default_instance_group, mock_me): task = RunProjectUpdate() scm_inventory_source.scm_last_revision = '' proj_update = ProjectUpdate.objects.create(project=scm_inventory_source.source_project) @@ -57,7 +57,7 @@ class TestDependentInventoryUpdate: iu_run_mock.assert_called_once_with(inv_update.id) assert inv_update.source_project_update_id == proj_update.pk - def test_dependent_inventory_project_cancel(self, project, inventory, default_instance_group): + def test_dependent_inventory_project_cancel(self, project, inventory, default_instance_group, mock_me): """ Test that dependent inventory updates exhibit good behavior on cancel of the source project update diff --git a/awx/main/tests/unit/api/test_filters.py b/awx/main/tests/unit/api/test_filters.py index c523cd2650..21e651e22b 100644 --- a/awx/main/tests/unit/api/test_filters.py +++ b/awx/main/tests/unit/api/test_filters.py @@ -2,7 +2,11 @@ import pytest +# Django +from django.core.exceptions import FieldDoesNotExist + from rest_framework.exceptions import PermissionDenied, ParseError + from awx.api.filters import FieldLookupBackend, OrderByBackend, get_field_from_path from awx.main.models import ( AdHocCommand, @@ -22,9 +26,6 @@ from awx.main.models import ( from awx.main.models.oauth import OAuth2Application from awx.main.models.jobs import JobOptions -# Django -from django.db.models.fields import FieldDoesNotExist - def test_related(): field_lookup = FieldLookupBackend() diff --git a/awx/main/tests/unit/models/test_credential.py b/awx/main/tests/unit/models/test_credential.py index 082d7df7eb..0dc8daff33 100644 --- a/awx/main/tests/unit/models/test_credential.py +++ b/awx/main/tests/unit/models/test_credential.py @@ -1,12 +1,15 @@ # -*- coding: utf-8 -*- +import pytest + from awx.main.models import Credential, CredentialType +@pytest.mark.django_db def test_unique_hash_with_unicode(): - ct = CredentialType(name=u'Väult', kind='vault') - cred = Credential(id=4, name=u'Iñtërnâtiônàlizætiøn', credential_type=ct, inputs={u'vault_id': u'🐉🐉🐉'}, credential_type_id=42) - assert cred.unique_hash(display=True) == u'Väult (id=🐉🐉🐉)' + ct = CredentialType.objects.create(name='Väult', kind='vault') + cred = Credential.objects.create(name='Iñtërnâtiônàlizætiøn', credential_type=ct, inputs={'vault_id': '🐉🐉🐉'}) + assert cred.unique_hash(display=True) == 'Väult (id=🐉🐉🐉)' def test_custom_cred_with_empty_encrypted_field(): diff --git a/awx/main/tests/unit/scheduler/test_dag_workflow.py b/awx/main/tests/unit/scheduler/test_dag_workflow.py index 18c3d193f7..a3225b76a3 100644 --- a/awx/main/tests/unit/scheduler/test_dag_workflow.py +++ b/awx/main/tests/unit/scheduler/test_dag_workflow.py @@ -2,8 +2,8 @@ import pytest import uuid import os -from django.utils.translation import ugettext_lazy as _ -from django.utils.encoding import smart_text +from django.utils.translation import gettext_lazy as _ +from django.utils.encoding import smart_str from awx.main.scheduler.dag_workflow import WorkflowDAG @@ -468,7 +468,7 @@ class TestIsWorkflowDone: assert g.is_workflow_done() is True assert g.has_workflow_failed() == ( True, - smart_text( + smart_str( _( "No error handling path for workflow job node(s) [({},{})]. Workflow job node(s)" " missing unified job template and error handling path []." @@ -484,7 +484,7 @@ class TestIsWorkflowDone: assert g.is_workflow_done() is True assert g.has_workflow_failed() == ( True, - smart_text( + smart_str( _( "No error handling path for workflow job node(s) []. Workflow job node(s) missing" " unified job template and error handling path [{}]." ).format(nodes[2].id) @@ -500,7 +500,7 @@ class TestIsWorkflowDone: assert g.is_workflow_done() is True assert g.has_workflow_failed() == ( True, - smart_text( + smart_str( _( "No error handling path for workflow job node(s) []. Workflow job node(s) missing" " unified job template and error handling path [{}]." ).format(nodes[0].id) @@ -512,7 +512,7 @@ class TestIsWorkflowDone: assert g.has_workflow_failed() == ( True, - smart_text( + smart_str( _( "No error handling path for workflow job node(s) [({},{})]. Workflow job node(s)" " missing unified job template and error handling path []." @@ -525,7 +525,7 @@ class TestIsWorkflowDone: assert g.has_workflow_failed() == ( True, - smart_text( + smart_str( _( "No error handling path for workflow job node(s) [({},{})]. Workflow job node(s)" " missing unified job template and error handling path []." diff --git a/awx/main/tests/unit/test_fields.py b/awx/main/tests/unit/test_fields.py index 8c00a95194..da669ae47d 100644 --- a/awx/main/tests/unit/test_fields.py +++ b/awx/main/tests/unit/test_fields.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +from unittest import mock import pytest from django.core.exceptions import ValidationError @@ -8,7 +9,7 @@ from django.db.models.fields.related_descriptors import ReverseManyToOneDescript from rest_framework.serializers import ValidationError as DRFValidationError -from awx.main.models import Credential, CredentialType, BaseModel +from awx.main.models import Credential, CredentialType from awx.main.fields import JSONSchemaField, ImplicitRoleField, ImplicitRoleDescriptor @@ -16,7 +17,7 @@ from awx.main.fields import JSONSchemaField, ImplicitRoleField, ImplicitRoleDesc 'schema, given, message', [ ( - { # immitates what the CredentialType injectors field is + { # imitates what the CredentialType injectors field is "additionalProperties": False, "type": "object", "properties": {"extra_vars": {"additionalProperties": False, "type": "object"}}, @@ -25,7 +26,7 @@ from awx.main.fields import JSONSchemaField, ImplicitRoleField, ImplicitRoleDesc "list provided in relative path ['extra_vars'], expected dict", ), ( - { # immitates what the CredentialType injectors field is + { # imitates what the CredentialType injectors field is "additionalProperties": False, "type": "object", }, @@ -35,7 +36,7 @@ from awx.main.fields import JSONSchemaField, ImplicitRoleField, ImplicitRoleDesc ], ) def test_custom_error_messages(schema, given, message): - instance = BaseModel() + instance = mock.Mock() class MockFieldSubclass(JSONSchemaField): def schema(self, model_instance): diff --git a/awx/main/tests/unit/test_tasks.py b/awx/main/tests/unit/test_tasks.py index f2d617abb8..69a7f03c33 100644 --- a/awx/main/tests/unit/test_tasks.py +++ b/awx/main/tests/unit/test_tasks.py @@ -166,7 +166,7 @@ def test_safe_env_returns_new_copy(): @pytest.mark.parametrize("source,expected", [(None, True), (False, False), (True, True)]) -def test_openstack_client_config_generation(mocker, source, expected, private_data_dir): +def test_openstack_client_config_generation(mocker, source, expected, private_data_dir, mock_me): update = jobs.RunInventoryUpdate() credential_type = CredentialType.defaults['openstack']() inputs = { @@ -206,7 +206,7 @@ def test_openstack_client_config_generation(mocker, source, expected, private_da @pytest.mark.parametrize("source,expected", [(None, True), (False, False), (True, True)]) -def test_openstack_client_config_generation_with_project_domain_name(mocker, source, expected, private_data_dir): +def test_openstack_client_config_generation_with_project_domain_name(mocker, source, expected, private_data_dir, mock_me): update = jobs.RunInventoryUpdate() credential_type = CredentialType.defaults['openstack']() inputs = { @@ -248,7 +248,7 @@ def test_openstack_client_config_generation_with_project_domain_name(mocker, sou @pytest.mark.parametrize("source,expected", [(None, True), (False, False), (True, True)]) -def test_openstack_client_config_generation_with_region(mocker, source, expected, private_data_dir): +def test_openstack_client_config_generation_with_region(mocker, source, expected, private_data_dir, mock_me): update = jobs.RunInventoryUpdate() credential_type = CredentialType.defaults['openstack']() inputs = { @@ -292,7 +292,7 @@ def test_openstack_client_config_generation_with_region(mocker, source, expected @pytest.mark.parametrize("source,expected", [(False, False), (True, True)]) -def test_openstack_client_config_generation_with_private_source_vars(mocker, source, expected, private_data_dir): +def test_openstack_client_config_generation_with_private_source_vars(mocker, source, expected, private_data_dir, mock_me): update = jobs.RunInventoryUpdate() credential_type = CredentialType.defaults['openstack']() inputs = { @@ -352,7 +352,7 @@ class TestExtraVarSanitation(TestJobExecution): UNSAFE = '{{ lookup(' 'pipe' ',' 'ls -la' ') }}' - def test_vars_unsafe_by_default(self, job, private_data_dir): + def test_vars_unsafe_by_default(self, job, private_data_dir, mock_me): job.created_by = User(pk=123, username='angry-spud') job.inventory = Inventory(pk=123, name='example-inv') @@ -390,7 +390,7 @@ class TestExtraVarSanitation(TestJobExecution): ]: assert not hasattr(extra_vars[safe], '__UNSAFE__') - def test_launchtime_vars_unsafe(self, job, private_data_dir): + def test_launchtime_vars_unsafe(self, job, private_data_dir, mock_me): job.extra_vars = json.dumps({'msg': self.UNSAFE}) task = jobs.RunJob() @@ -401,7 +401,7 @@ class TestExtraVarSanitation(TestJobExecution): assert extra_vars['msg'] == self.UNSAFE assert hasattr(extra_vars['msg'], '__UNSAFE__') - def test_nested_launchtime_vars_unsafe(self, job, private_data_dir): + def test_nested_launchtime_vars_unsafe(self, job, private_data_dir, mock_me): job.extra_vars = json.dumps({'msg': {'a': [self.UNSAFE]}}) task = jobs.RunJob() @@ -412,7 +412,7 @@ class TestExtraVarSanitation(TestJobExecution): assert extra_vars['msg'] == {'a': [self.UNSAFE]} assert hasattr(extra_vars['msg']['a'][0], '__UNSAFE__') - def test_allowed_jt_extra_vars(self, job, private_data_dir): + def test_allowed_jt_extra_vars(self, job, private_data_dir, mock_me): job.job_template.extra_vars = job.extra_vars = json.dumps({'msg': self.UNSAFE}) task = jobs.RunJob() @@ -423,7 +423,7 @@ class TestExtraVarSanitation(TestJobExecution): assert extra_vars['msg'] == self.UNSAFE assert not hasattr(extra_vars['msg'], '__UNSAFE__') - def test_nested_allowed_vars(self, job, private_data_dir): + def test_nested_allowed_vars(self, job, private_data_dir, mock_me): job.extra_vars = json.dumps({'msg': {'a': {'b': [self.UNSAFE]}}}) job.job_template.extra_vars = job.extra_vars task = jobs.RunJob() @@ -435,7 +435,7 @@ class TestExtraVarSanitation(TestJobExecution): assert extra_vars['msg'] == {'a': {'b': [self.UNSAFE]}} assert not hasattr(extra_vars['msg']['a']['b'][0], '__UNSAFE__') - def test_sensitive_values_dont_leak(self, job, private_data_dir): + def test_sensitive_values_dont_leak(self, job, private_data_dir, mock_me): # JT defines `msg=SENSITIVE`, the job *should not* be able to do # `other_var=SENSITIVE` job.job_template.extra_vars = json.dumps({'msg': self.UNSAFE}) @@ -452,7 +452,7 @@ class TestExtraVarSanitation(TestJobExecution): assert extra_vars['other_var'] == self.UNSAFE assert hasattr(extra_vars['other_var'], '__UNSAFE__') - def test_overwritten_jt_extra_vars(self, job, private_data_dir): + def test_overwritten_jt_extra_vars(self, job, private_data_dir, mock_me): job.job_template.extra_vars = json.dumps({'msg': 'SAFE'}) job.extra_vars = json.dumps({'msg': self.UNSAFE}) task = jobs.RunJob() @@ -466,7 +466,7 @@ class TestExtraVarSanitation(TestJobExecution): class TestGenericRun: - def test_generic_failure(self, patch_Job, execution_environment): + def test_generic_failure(self, patch_Job, execution_environment, mock_me): job = Job(status='running', inventory=Inventory(), project=Project(local_path='/projects/_23_foo')) job.websocket_emit_status = mock.Mock() job.execution_environment = execution_environment @@ -486,7 +486,7 @@ class TestGenericRun: assert update_model_call['status'] == 'error' assert update_model_call['emitted_events'] == 0 - def test_cancel_flag(self, job, update_model_wrapper, execution_environment): + def test_cancel_flag(self, job, update_model_wrapper, execution_environment, mock_me): job.status = 'running' job.cancel_flag = True job.websocket_emit_status = mock.Mock() @@ -506,7 +506,7 @@ class TestGenericRun: for c in [mock.call(1, status='running', start_args=''), mock.call(1, status='canceled')]: assert c in task.update_model.call_args_list - def test_event_count(self): + def test_event_count(self, mock_me): task = jobs.RunJob() task.runner_callback.dispatcher = mock.MagicMock() task.runner_callback.instance = Job() @@ -516,7 +516,7 @@ class TestGenericRun: [task.runner_callback.event_handler(event_data) for i in range(20)] assert 20 == task.runner_callback.event_ct - def test_finished_callback_eof(self): + def test_finished_callback_eof(self, mock_me): task = jobs.RunJob() task.runner_callback.dispatcher = mock.MagicMock() task.runner_callback.instance = Job(pk=1, id=1) @@ -524,7 +524,7 @@ class TestGenericRun: task.runner_callback.finished_callback(None) task.runner_callback.dispatcher.dispatch.assert_called_with({'event': 'EOF', 'final_counter': 17, 'job_id': 1, 'guid': None}) - def test_save_job_metadata(self, job, update_model_wrapper): + def test_save_job_metadata(self, job, update_model_wrapper, mock_me): class MockMe: pass @@ -542,7 +542,7 @@ class TestGenericRun: 1, job_args=json.dumps({'foo': 'bar'}), job_cwd='/foobar', job_env={'switch': 'blade', 'foot': 'ball', 'secret_key': 'redacted_value'} ) - def test_created_by_extra_vars(self): + def test_created_by_extra_vars(self, mock_me): job = Job(created_by=User(pk=123, username='angry-spud')) task = jobs.RunJob() @@ -557,7 +557,7 @@ class TestGenericRun: assert extra_vars['awx_user_id'] == 123 assert extra_vars['awx_user_name'] == "angry-spud" - def test_survey_extra_vars(self): + def test_survey_extra_vars(self, mock_me): job = Job() job.extra_vars = json.dumps({'super_secret': encrypt_value('CLASSIFIED', pk=None)}) job.survey_passwords = {'super_secret': '$encrypted$'} @@ -571,7 +571,7 @@ class TestGenericRun: private_data_dir, extra_vars, safe_dict = call_args assert extra_vars['super_secret'] == "CLASSIFIED" - def test_awx_task_env(self, patch_Job, private_data_dir, execution_environment): + def test_awx_task_env(self, patch_Job, private_data_dir, execution_environment, mock_me): job = Job(project=Project(), inventory=Inventory()) job.execution_environment = execution_environment @@ -586,7 +586,7 @@ class TestGenericRun: @pytest.mark.django_db class TestAdhocRun(TestJobExecution): - def test_options_jinja_usage(self, adhoc_job, adhoc_update_model_wrapper): + def test_options_jinja_usage(self, adhoc_job, adhoc_update_model_wrapper, mock_me): ExecutionEnvironment.objects.create(name='Control Plane EE', managed=True) ExecutionEnvironment.objects.create(name='Default Job EE', managed=False) @@ -611,7 +611,7 @@ class TestAdhocRun(TestJobExecution): be wrapped in unsafe ''' ''' - def test_extra_vars_jinja_usage(self, adhoc_job, adhoc_update_model_wrapper): + def test_extra_vars_jinja_usage(self, adhoc_job, adhoc_update_model_wrapper, mock_me): adhoc_job.module_args = 'ls' adhoc_job.extra_vars = json.dumps({ 'foo': '{{ bar }}' @@ -630,7 +630,7 @@ class TestAdhocRun(TestJobExecution): assert extra_vars['foo'] == '{{ bar }}' ''' - def test_created_by_extra_vars(self): + def test_created_by_extra_vars(self, mock_me): adhoc_job = AdHocCommand(created_by=User(pk=123, username='angry-spud')) task = jobs.RunAdHocCommand() @@ -691,7 +691,7 @@ class TestJobCredentials(TestJobExecution): ] } - def test_username_jinja_usage(self, job, private_data_dir): + def test_username_jinja_usage(self, job, private_data_dir, mock_me): task = jobs.RunJob() ssh = CredentialType.defaults['ssh']() credential = Credential(pk=1, credential_type=ssh, inputs={'username': '{{ ansible_ssh_pass }}'}) @@ -702,7 +702,7 @@ class TestJobCredentials(TestJobExecution): assert 'Jinja variables are not allowed' in str(e.value) @pytest.mark.parametrize("flag", ['become_username', 'become_method']) - def test_become_jinja_usage(self, job, private_data_dir, flag): + def test_become_jinja_usage(self, job, private_data_dir, flag, mock_me): task = jobs.RunJob() ssh = CredentialType.defaults['ssh']() credential = Credential(pk=1, credential_type=ssh, inputs={'username': 'joe', flag: '{{ ansible_ssh_pass }}'}) @@ -713,7 +713,7 @@ class TestJobCredentials(TestJobExecution): assert 'Jinja variables are not allowed' in str(e.value) - def test_ssh_passwords(self, job, private_data_dir, field, password_name, expected_flag): + def test_ssh_passwords(self, job, private_data_dir, field, password_name, expected_flag, mock_me): task = jobs.RunJob() ssh = CredentialType.defaults['ssh']() credential = Credential(pk=1, credential_type=ssh, inputs={'username': 'bob', field: 'secret'}) @@ -730,7 +730,7 @@ class TestJobCredentials(TestJobExecution): if expected_flag: assert expected_flag in ' '.join(args) - def test_net_ssh_key_unlock(self, job): + def test_net_ssh_key_unlock(self, job, mock_me): task = jobs.RunJob() net = CredentialType.defaults['net']() credential = Credential(pk=1, credential_type=net, inputs={'ssh_key_unlock': 'secret'}) @@ -743,7 +743,7 @@ class TestJobCredentials(TestJobExecution): assert 'secret' in expect_passwords.values() - def test_net_first_ssh_key_unlock_wins(self, job): + def test_net_first_ssh_key_unlock_wins(self, job, mock_me): task = jobs.RunJob() for i in range(3): net = CredentialType.defaults['net']() @@ -757,7 +757,7 @@ class TestJobCredentials(TestJobExecution): assert 'secret0' in expect_passwords.values() - def test_prefer_ssh_over_net_ssh_key_unlock(self, job): + def test_prefer_ssh_over_net_ssh_key_unlock(self, job, mock_me): task = jobs.RunJob() net = CredentialType.defaults['net']() net_credential = Credential(pk=1, credential_type=net, inputs={'ssh_key_unlock': 'net_secret'}) @@ -776,7 +776,7 @@ class TestJobCredentials(TestJobExecution): assert 'ssh_secret' in expect_passwords.values() - def test_vault_password(self, private_data_dir, job): + def test_vault_password(self, private_data_dir, job, mock_me): task = jobs.RunJob() vault = CredentialType.defaults['vault']() credential = Credential(pk=1, credential_type=vault, inputs={'vault_password': 'vault-me'}) @@ -788,10 +788,10 @@ class TestJobCredentials(TestJobExecution): password_prompts = task.get_password_prompts(passwords) expect_passwords = task.create_expect_passwords_data_struct(password_prompts, passwords) - assert expect_passwords['Vault password:\s*?$'] == 'vault-me' # noqa + assert expect_passwords[r'Vault password:\s*?$'] == 'vault-me' # noqa assert '--ask-vault-pass' in ' '.join(args) - def test_vault_password_ask(self, private_data_dir, job): + def test_vault_password_ask(self, private_data_dir, job, mock_me): task = jobs.RunJob() vault = CredentialType.defaults['vault']() credential = Credential(pk=1, credential_type=vault, inputs={'vault_password': 'ASK'}) @@ -803,10 +803,10 @@ class TestJobCredentials(TestJobExecution): password_prompts = task.get_password_prompts(passwords) expect_passwords = task.create_expect_passwords_data_struct(password_prompts, passwords) - assert expect_passwords['Vault password:\s*?$'] == 'provided-at-launch' # noqa + assert expect_passwords[r'Vault password:\s*?$'] == 'provided-at-launch' # noqa assert '--ask-vault-pass' in ' '.join(args) - def test_multi_vault_password(self, private_data_dir, job): + def test_multi_vault_password(self, private_data_dir, job, mock_me): task = jobs.RunJob() vault = CredentialType.defaults['vault']() for i, label in enumerate(['dev', 'prod', 'dotted.name']): @@ -820,16 +820,16 @@ class TestJobCredentials(TestJobExecution): expect_passwords = task.create_expect_passwords_data_struct(password_prompts, passwords) vault_passwords = dict((k, v) for k, v in expect_passwords.items() if 'Vault' in k) - assert vault_passwords['Vault password \(prod\):\\s*?$'] == 'pass@prod' # noqa - assert vault_passwords['Vault password \(dev\):\\s*?$'] == 'pass@dev' # noqa - assert vault_passwords['Vault password \(dotted.name\):\\s*?$'] == 'pass@dotted.name' # noqa - assert vault_passwords['Vault password:\\s*?$'] == '' # noqa + assert vault_passwords[r'Vault password \(prod\):\s*?$'] == 'pass@prod' # noqa + assert vault_passwords[r'Vault password \(dev\):\s*?$'] == 'pass@dev' # noqa + assert vault_passwords[r'Vault password \(dotted.name\):\s*?$'] == 'pass@dotted.name' # noqa + assert vault_passwords[r'Vault password:\s*?$'] == '' # noqa assert '--ask-vault-pass' not in ' '.join(args) assert '--vault-id dev@prompt' in ' '.join(args) assert '--vault-id prod@prompt' in ' '.join(args) assert '--vault-id dotted.name@prompt' in ' '.join(args) - def test_multi_vault_id_conflict(self, job): + def test_multi_vault_id_conflict(self, job, mock_me): task = jobs.RunJob() vault = CredentialType.defaults['vault']() for i in range(2): @@ -842,7 +842,7 @@ class TestJobCredentials(TestJobExecution): assert 'multiple vault credentials were specified with --vault-id' in str(e.value) - def test_multi_vault_password_ask(self, private_data_dir, job): + def test_multi_vault_password_ask(self, private_data_dir, job, mock_me): task = jobs.RunJob() vault = CredentialType.defaults['vault']() for i, label in enumerate(['dev', 'prod']): @@ -855,15 +855,15 @@ class TestJobCredentials(TestJobExecution): expect_passwords = task.create_expect_passwords_data_struct(password_prompts, passwords) vault_passwords = dict((k, v) for k, v in expect_passwords.items() if 'Vault' in k) - assert vault_passwords['Vault password \(prod\):\\s*?$'] == 'provided-at-launch@prod' # noqa - assert vault_passwords['Vault password \(dev\):\\s*?$'] == 'provided-at-launch@dev' # noqa - assert vault_passwords['Vault password:\\s*?$'] == '' # noqa + assert vault_passwords[r'Vault password \(prod\):\s*?$'] == 'provided-at-launch@prod' # noqa + assert vault_passwords[r'Vault password \(dev\):\s*?$'] == 'provided-at-launch@dev' # noqa + assert vault_passwords[r'Vault password:\s*?$'] == '' # noqa assert '--ask-vault-pass' not in ' '.join(args) assert '--vault-id dev@prompt' in ' '.join(args) assert '--vault-id prod@prompt' in ' '.join(args) @pytest.mark.parametrize("verify", (True, False)) - def test_k8s_credential(self, job, private_data_dir, verify): + def test_k8s_credential(self, job, private_data_dir, verify, mock_me): k8s = CredentialType.defaults['kubernetes_bearer_token']() inputs = { 'host': 'https://example.org/', @@ -898,7 +898,7 @@ class TestJobCredentials(TestJobExecution): assert safe_env['K8S_AUTH_API_KEY'] == HIDDEN_PASSWORD - def test_aws_cloud_credential(self, job, private_data_dir): + def test_aws_cloud_credential(self, job, private_data_dir, mock_me): aws = CredentialType.defaults['aws']() credential = Credential(pk=1, credential_type=aws, inputs={'username': 'bob', 'password': 'secret'}) credential.inputs['password'] = encrypt_field(credential, 'password') @@ -913,7 +913,7 @@ class TestJobCredentials(TestJobExecution): assert 'AWS_SECURITY_TOKEN' not in env assert safe_env['AWS_SECRET_ACCESS_KEY'] == HIDDEN_PASSWORD - def test_aws_cloud_credential_with_sts_token(self, private_data_dir, job): + def test_aws_cloud_credential_with_sts_token(self, private_data_dir, job, mock_me): aws = CredentialType.defaults['aws']() credential = Credential(pk=1, credential_type=aws, inputs={'username': 'bob', 'password': 'secret', 'security_token': 'token'}) for key in ('password', 'security_token'): @@ -929,7 +929,7 @@ class TestJobCredentials(TestJobExecution): assert env['AWS_SECURITY_TOKEN'] == 'token' assert safe_env['AWS_SECRET_ACCESS_KEY'] == HIDDEN_PASSWORD - def test_gce_credentials(self, private_data_dir, job): + def test_gce_credentials(self, private_data_dir, job, mock_me): gce = CredentialType.defaults['gce']() credential = Credential(pk=1, credential_type=gce, inputs={'username': 'bob', 'project': 'some-project', 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY}) credential.inputs['ssh_key_data'] = encrypt_field(credential, 'ssh_key_data') @@ -946,7 +946,7 @@ class TestJobCredentials(TestJobExecution): assert json_data['client_email'] == 'bob' assert json_data['project_id'] == 'some-project' - def test_azure_rm_with_tenant(self, private_data_dir, job): + def test_azure_rm_with_tenant(self, private_data_dir, job, mock_me): azure = CredentialType.defaults['azure_rm']() credential = Credential( pk=1, credential_type=azure, inputs={'client': 'some-client', 'secret': 'some-secret', 'tenant': 'some-tenant', 'subscription': 'some-subscription'} @@ -964,7 +964,7 @@ class TestJobCredentials(TestJobExecution): assert env['AZURE_SUBSCRIPTION_ID'] == 'some-subscription' assert safe_env['AZURE_SECRET'] == HIDDEN_PASSWORD - def test_azure_rm_with_password(self, private_data_dir, job): + def test_azure_rm_with_password(self, private_data_dir, job, mock_me): azure = CredentialType.defaults['azure_rm']() credential = Credential( pk=1, credential_type=azure, inputs={'subscription': 'some-subscription', 'username': 'bob', 'password': 'secret', 'cloud_environment': 'foobar'} @@ -982,7 +982,7 @@ class TestJobCredentials(TestJobExecution): assert env['AZURE_CLOUD_ENVIRONMENT'] == 'foobar' assert safe_env['AZURE_PASSWORD'] == HIDDEN_PASSWORD - def test_vmware_credentials(self, private_data_dir, job): + def test_vmware_credentials(self, private_data_dir, job, mock_me): vmware = CredentialType.defaults['vmware']() credential = Credential(pk=1, credential_type=vmware, inputs={'username': 'bob', 'password': 'secret', 'host': 'https://example.org'}) credential.inputs['password'] = encrypt_field(credential, 'password') @@ -997,7 +997,7 @@ class TestJobCredentials(TestJobExecution): assert env['VMWARE_HOST'] == 'https://example.org' assert safe_env['VMWARE_PASSWORD'] == HIDDEN_PASSWORD - def test_openstack_credentials(self, private_data_dir, job): + def test_openstack_credentials(self, private_data_dir, job, mock_me): task = jobs.RunJob() task.instance = job openstack = CredentialType.defaults['openstack']() @@ -1028,7 +1028,7 @@ class TestJobCredentials(TestJobExecution): ) @pytest.mark.parametrize("ca_file", [None, '/path/to/some/file']) - def test_rhv_credentials(self, private_data_dir, job, ca_file): + def test_rhv_credentials(self, private_data_dir, job, ca_file, mock_me): rhv = CredentialType.defaults['rhv']() inputs = { 'host': 'some-ovirt-host.example.org', @@ -1065,7 +1065,7 @@ class TestJobCredentials(TestJobExecution): [None, '0'], ], ) - def test_net_credentials(self, authorize, expected_authorize, job, private_data_dir): + def test_net_credentials(self, authorize, expected_authorize, job, private_data_dir, mock_me): task = jobs.RunJob() task.instance = job net = CredentialType.defaults['net']() @@ -1090,7 +1090,7 @@ class TestJobCredentials(TestJobExecution): assert open(env['ANSIBLE_NET_SSH_KEYFILE'], 'r').read() == self.EXAMPLE_PRIVATE_KEY assert safe_env['ANSIBLE_NET_PASSWORD'] == HIDDEN_PASSWORD - def test_custom_environment_injectors_with_jinja_syntax_error(self, private_data_dir): + def test_custom_environment_injectors_with_jinja_syntax_error(self, private_data_dir, mock_me): some_cloud = CredentialType( kind='cloud', name='SomeCloud', @@ -1103,7 +1103,7 @@ class TestJobCredentials(TestJobExecution): with pytest.raises(jinja2.exceptions.UndefinedError): credential.credential_type.inject_credential(credential, {}, {}, [], private_data_dir) - def test_custom_environment_injectors(self, private_data_dir): + def test_custom_environment_injectors(self, private_data_dir, mock_me): some_cloud = CredentialType( kind='cloud', name='SomeCloud', @@ -1118,7 +1118,7 @@ class TestJobCredentials(TestJobExecution): assert env['MY_CLOUD_API_TOKEN'] == 'ABC123' - def test_custom_environment_injectors_with_boolean_env_var(self, private_data_dir): + def test_custom_environment_injectors_with_boolean_env_var(self, private_data_dir, mock_me): some_cloud = CredentialType( kind='cloud', name='SomeCloud', @@ -1133,7 +1133,7 @@ class TestJobCredentials(TestJobExecution): assert env['TURBO_BUTTON'] == str(True) - def test_custom_environment_injectors_with_reserved_env_var(self, private_data_dir, job): + def test_custom_environment_injectors_with_reserved_env_var(self, private_data_dir, job, mock_me): task = jobs.RunJob() task.instance = job some_cloud = CredentialType( @@ -1150,7 +1150,7 @@ class TestJobCredentials(TestJobExecution): assert env['JOB_ID'] == str(job.pk) - def test_custom_environment_injectors_with_secret_field(self, private_data_dir): + def test_custom_environment_injectors_with_secret_field(self, private_data_dir, mock_me): some_cloud = CredentialType( kind='cloud', name='SomeCloud', @@ -1169,7 +1169,7 @@ class TestJobCredentials(TestJobExecution): assert 'SUPER-SECRET-123' not in safe_env.values() assert safe_env['MY_CLOUD_PRIVATE_VAR'] == HIDDEN_PASSWORD - def test_custom_environment_injectors_with_extra_vars(self, private_data_dir, job): + def test_custom_environment_injectors_with_extra_vars(self, private_data_dir, job, mock_me): task = jobs.RunJob() some_cloud = CredentialType( kind='cloud', @@ -1188,7 +1188,7 @@ class TestJobCredentials(TestJobExecution): assert extra_vars["api_token"] == "ABC123" assert hasattr(extra_vars["api_token"], '__UNSAFE__') - def test_custom_environment_injectors_with_boolean_extra_vars(self, job, private_data_dir): + def test_custom_environment_injectors_with_boolean_extra_vars(self, job, private_data_dir, mock_me): task = jobs.RunJob() some_cloud = CredentialType( kind='cloud', @@ -1207,7 +1207,7 @@ class TestJobCredentials(TestJobExecution): assert extra_vars["turbo_button"] == "True" return ['successful', 0] - def test_custom_environment_injectors_with_complicated_boolean_template(self, job, private_data_dir): + def test_custom_environment_injectors_with_complicated_boolean_template(self, job, private_data_dir, mock_me): task = jobs.RunJob() some_cloud = CredentialType( kind='cloud', @@ -1225,7 +1225,7 @@ class TestJobCredentials(TestJobExecution): assert extra_vars["turbo_button"] == "FAST!" - def test_custom_environment_injectors_with_secret_extra_vars(self, job, private_data_dir): + def test_custom_environment_injectors_with_secret_extra_vars(self, job, private_data_dir, mock_me): """ extra_vars that contain secret field values should be censored in the DB """ @@ -1247,7 +1247,7 @@ class TestJobCredentials(TestJobExecution): extra_vars = parse_extra_vars(args, private_data_dir) assert extra_vars["password"] == "SUPER-SECRET-123" - def test_custom_environment_injectors_with_file(self, private_data_dir): + def test_custom_environment_injectors_with_file(self, private_data_dir, mock_me): some_cloud = CredentialType( kind='cloud', name='SomeCloud', @@ -1263,7 +1263,7 @@ class TestJobCredentials(TestJobExecution): path = to_host_path(env['MY_CLOUD_INI_FILE'], private_data_dir) assert open(path, 'r').read() == '[mycloud]\nABC123' - def test_custom_environment_injectors_with_unicode_content(self, private_data_dir): + def test_custom_environment_injectors_with_unicode_content(self, private_data_dir, mock_me): value = 'Iñtërnâtiônàlizætiøn' some_cloud = CredentialType( kind='cloud', @@ -1283,7 +1283,7 @@ class TestJobCredentials(TestJobExecution): path = to_host_path(env['MY_CLOUD_INI_FILE'], private_data_dir) assert open(path, 'r').read() == value - def test_custom_environment_injectors_with_files(self, private_data_dir): + def test_custom_environment_injectors_with_files(self, private_data_dir, mock_me): some_cloud = CredentialType( kind='cloud', name='SomeCloud', @@ -1304,7 +1304,7 @@ class TestJobCredentials(TestJobExecution): assert open(cert_path, 'r').read() == '[mycert]\nCERT123' assert open(key_path, 'r').read() == '[mykey]\nKEY123' - def test_multi_cloud(self, private_data_dir): + def test_multi_cloud(self, private_data_dir, mock_me): gce = CredentialType.defaults['gce']() gce_credential = Credential(pk=1, credential_type=gce, inputs={'username': 'bob', 'project': 'some-project', 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY}) gce_credential.inputs['ssh_key_data'] = encrypt_field(gce_credential, 'ssh_key_data') @@ -1332,7 +1332,7 @@ class TestJobCredentials(TestJobExecution): assert safe_env['AZURE_PASSWORD'] == HIDDEN_PASSWORD - def test_awx_task_env(self, settings, private_data_dir, job): + def test_awx_task_env(self, settings, private_data_dir, job, mock_me): settings.AWX_TASK_ENV = {'FOO': 'BAR'} task = jobs.RunJob() task.instance = job @@ -1359,7 +1359,7 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution): ], } - def test_galaxy_credentials_ignore_certs(self, private_data_dir, project_update, ignore): + def test_galaxy_credentials_ignore_certs(self, private_data_dir, project_update, ignore, mock_me): settings.GALAXY_IGNORE_CERTS = ignore task = jobs.RunProjectUpdate() task.instance = project_update @@ -1369,7 +1369,7 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution): else: assert 'ANSIBLE_GALAXY_IGNORE' not in env - def test_galaxy_credentials_empty(self, private_data_dir, project_update): + def test_galaxy_credentials_empty(self, private_data_dir, project_update, mock_me): class RunProjectUpdate(jobs.RunProjectUpdate): __vars__ = {} @@ -1388,7 +1388,7 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution): for k in env: assert not k.startswith('ANSIBLE_GALAXY_SERVER') - def test_single_public_galaxy(self, private_data_dir, project_update): + def test_single_public_galaxy(self, private_data_dir, project_update, mock_me): class RunProjectUpdate(jobs.RunProjectUpdate): __vars__ = {} @@ -1418,7 +1418,7 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution): ('ANSIBLE_GALAXY_SERVER_SERVER0_URL', 'https://galaxy.ansible.com/'), ] - def test_multiple_galaxy_endpoints(self, private_data_dir, project_update): + def test_multiple_galaxy_endpoints(self, private_data_dir, project_update, mock_me): credential_type = CredentialType.defaults['galaxy_api_token']() public_galaxy = Credential( pk=1, @@ -1479,7 +1479,7 @@ class TestProjectUpdateCredentials(TestJobExecution): ], } - def test_username_and_password_auth(self, project_update, scm_type): + def test_username_and_password_auth(self, project_update, scm_type, mock_me): task = jobs.RunProjectUpdate() ssh = CredentialType.defaults['ssh']() project_update.scm_type = scm_type @@ -1493,7 +1493,7 @@ class TestProjectUpdateCredentials(TestJobExecution): assert 'bob' in expect_passwords.values() assert 'secret' in expect_passwords.values() - def test_ssh_key_auth(self, project_update, scm_type): + def test_ssh_key_auth(self, project_update, scm_type, mock_me): task = jobs.RunProjectUpdate() ssh = CredentialType.defaults['ssh']() project_update.scm_type = scm_type @@ -1505,7 +1505,7 @@ class TestProjectUpdateCredentials(TestJobExecution): expect_passwords = task.create_expect_passwords_data_struct(password_prompts, passwords) assert 'bob' in expect_passwords.values() - def test_awx_task_env(self, project_update, settings, private_data_dir, scm_type, execution_environment): + def test_awx_task_env(self, project_update, settings, private_data_dir, scm_type, execution_environment, mock_me): project_update.execution_environment = execution_environment settings.AWX_TASK_ENV = {'FOO': 'BAR'} task = jobs.RunProjectUpdate() @@ -1522,7 +1522,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): def inventory_update(self, execution_environment): return InventoryUpdate(pk=1, execution_environment=execution_environment, inventory_source=InventorySource(pk=1, inventory=Inventory(pk=1))) - def test_source_without_credential(self, mocker, inventory_update, private_data_dir): + def test_source_without_credential(self, mocker, inventory_update, private_data_dir, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update inventory_update.source = 'ec2' @@ -1535,7 +1535,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert 'AWS_ACCESS_KEY_ID' not in env assert 'AWS_SECRET_ACCESS_KEY' not in env - def test_ec2_source(self, private_data_dir, inventory_update, mocker): + def test_ec2_source(self, private_data_dir, inventory_update, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update aws = CredentialType.defaults['aws']() @@ -1559,7 +1559,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert safe_env['AWS_SECRET_ACCESS_KEY'] == HIDDEN_PASSWORD - def test_vmware_source(self, inventory_update, private_data_dir, mocker): + def test_vmware_source(self, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update vmware = CredentialType.defaults['vmware']() @@ -1587,7 +1587,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): env["VMWARE_HOST"] == "https://example.org", env["VMWARE_VALIDATE_CERTS"] == "False", - def test_azure_rm_source_with_tenant(self, private_data_dir, inventory_update, mocker): + def test_azure_rm_source_with_tenant(self, private_data_dir, inventory_update, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update azure_rm = CredentialType.defaults['azure_rm']() @@ -1623,7 +1623,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert safe_env['AZURE_SECRET'] == HIDDEN_PASSWORD - def test_azure_rm_source_with_password(self, private_data_dir, inventory_update, mocker): + def test_azure_rm_source_with_password(self, private_data_dir, inventory_update, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update azure_rm = CredentialType.defaults['azure_rm']() @@ -1652,7 +1652,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert safe_env['AZURE_PASSWORD'] == HIDDEN_PASSWORD - def test_gce_source(self, inventory_update, private_data_dir, mocker): + def test_gce_source(self, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update gce = CredentialType.defaults['gce']() @@ -1682,7 +1682,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert json_data['client_email'] == 'bob' assert json_data['project_id'] == 'some-project' - def test_openstack_source(self, inventory_update, private_data_dir, mocker): + def test_openstack_source(self, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update openstack = CredentialType.defaults['openstack']() @@ -1722,7 +1722,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): in shade_config ) - def test_satellite6_source(self, inventory_update, private_data_dir, mocker): + def test_satellite6_source(self, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update satellite6 = CredentialType.defaults['satellite6']() @@ -1745,7 +1745,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert env["FOREMAN_PASSWORD"] == "secret" assert safe_env["FOREMAN_PASSWORD"] == HIDDEN_PASSWORD - def test_insights_source(self, inventory_update, private_data_dir, mocker): + def test_insights_source(self, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update insights = CredentialType.defaults['insights']() @@ -1774,7 +1774,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert safe_env['INSIGHTS_PASSWORD'] == HIDDEN_PASSWORD @pytest.mark.parametrize('verify', [True, False]) - def test_tower_source(self, verify, inventory_update, private_data_dir, mocker): + def test_tower_source(self, verify, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update tower = CredentialType.defaults['controller']() @@ -1802,7 +1802,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert env['CONTROLLER_VERIFY_SSL'] == 'False' assert safe_env['CONTROLLER_PASSWORD'] == HIDDEN_PASSWORD - def test_tower_source_ssl_verify_empty(self, inventory_update, private_data_dir, mocker): + def test_tower_source_ssl_verify_empty(self, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update tower = CredentialType.defaults['controller']() @@ -1830,7 +1830,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert env['TOWER_VERIFY_SSL'] == 'False' - def test_awx_task_env(self, inventory_update, private_data_dir, settings, mocker): + def test_awx_task_env(self, inventory_update, private_data_dir, settings, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update gce = CredentialType.defaults['gce']() @@ -1869,7 +1869,7 @@ def test_fcntl_ioerror(): @mock.patch('os.open') @mock.patch('logging.getLogger') -def test_aquire_lock_open_fail_logged(logging_getLogger, os_open): +def test_acquire_lock_open_fail_logged(logging_getLogger, os_open, mock_me): err = OSError() err.errno = 3 err.strerror = 'dummy message' @@ -1893,7 +1893,7 @@ def test_aquire_lock_open_fail_logged(logging_getLogger, os_open): @mock.patch('os.close') @mock.patch('logging.getLogger') @mock.patch('fcntl.lockf') -def test_aquire_lock_acquisition_fail_logged(fcntl_lockf, logging_getLogger, os_close, os_open): +def test_acquire_lock_acquisition_fail_logged(fcntl_lockf, logging_getLogger, os_close, os_open, mock_me): err = IOError() err.errno = 3 err.strerror = 'dummy message' @@ -1913,7 +1913,7 @@ def test_aquire_lock_acquisition_fail_logged(fcntl_lockf, logging_getLogger, os_ with pytest.raises(IOError): ProjectUpdate.acquire_lock(instance) os_close.assert_called_with(3) - assert logger.err.called_with("I/O error({0}) while trying to aquire lock on file [{1}]: {2}".format(3, 'this_file_does_not_exist', 'dummy message')) + assert logger.err.called_with("I/O error({0}) while trying to acquire lock on file [{1}]: {2}".format(3, 'this_file_does_not_exist', 'dummy message')) @pytest.mark.parametrize('injector_cls', [cls for cls in ManagedCredentialType.registry.values() if cls.injectors]) @@ -1947,7 +1947,7 @@ def test_notification_job_not_finished(logging_getLogger, mocker): with mocker.patch('awx.main.models.UnifiedJob.objects.get', uj): system.handle_success_and_failure_notifications(1) - assert logger.warn.called_with(f"Failed to even try to send notifications for job '{uj}' due to job not being in finished state.") + assert logger.warning.called_with(f"Failed to even try to send notifications for job '{uj}' due to job not being in finished state.") def test_notification_job_finished(mocker): @@ -1958,7 +1958,7 @@ def test_notification_job_finished(mocker): uj.send_notification_templates.assert_called() -def test_job_run_no_ee(): +def test_job_run_no_ee(mock_me): org = Organization(pk=1) proj = Project(pk=1, organization=org) job = Job(project=proj, organization=org, inventory=Inventory(pk=1)) @@ -1977,7 +1977,7 @@ def test_job_run_no_ee(): assert 'Job could not start because no Execution Environment could be found' in str(e.value) -def test_project_update_no_ee(): +def test_project_update_no_ee(mock_me): org = Organization(pk=1) proj = Project(pk=1, organization=org) project_update = ProjectUpdate(pk=1, project=proj, scm_type='git') diff --git a/awx/main/tests/unit/utils/test_filters.py b/awx/main/tests/unit/utils/test_filters.py index 52e37ab893..ef0abb80d3 100644 --- a/awx/main/tests/unit/utils/test_filters.py +++ b/awx/main/tests/unit/utils/test_filters.py @@ -4,7 +4,6 @@ from unittest import mock # AWX from awx.main.utils.filters import SmartFilter, ExternalLoggerEnabled -from awx.main.models import Host # Django from django.db.models import Q @@ -219,39 +218,6 @@ class TestSmartFilterQueryFromString: assert str(q) == str(q_expected) -class TestSmartFilterQueryFromStringNoDB: - @pytest.mark.parametrize( - "filter_string,q_expected", - [ - ( - 'ansible_facts__a="true" and ansible_facts__b="true" and ansible_facts__c="true"', - ( - Q(**{u"ansible_facts__contains": {u"a": u"true"}}) - & Q(**{u"ansible_facts__contains": {u"b": u"true"}}) - & Q(**{u"ansible_facts__contains": {u"c": u"true"}}) - ), - ), - ( - 'ansible_facts__a="true" or ansible_facts__b="true" or ansible_facts__c="true"', - ( - Q(**{u"ansible_facts__contains": {u"a": u"true"}}) - | Q(**{u"ansible_facts__contains": {u"b": u"true"}}) - | Q(**{u"ansible_facts__contains": {u"c": u"true"}}) - ), - ), - ('search=foo', Q(Q(**{u"description__icontains": u"foo"}) | Q(**{u"name__icontains": u"foo"}))), - ( - 'search=foo and ansible_facts__a="null"', - Q(Q(**{u"description__icontains": u"foo"}) | Q(**{u"name__icontains": u"foo"})) & Q(**{u"ansible_facts__contains": {u"a": u"\"null\""}}), - ), - ('name=foo or name=bar and name=foobar', Q(name="foo") | Q(name="bar") & Q(name="foobar")), - ], - ) - def test_does_not_invoke_db(self, filter_string, q_expected): - q = SmartFilter.query_from_string(filter_string) - assert str(q.query) == str(Host.objects.filter(q_expected).query) - - ''' #('"facts__quoted_val"="f\"oo"', 1), #('facts__facts__arr[]="foo"', 1), diff --git a/awx/main/utils/common.py b/awx/main/utils/common.py index 49885d70c7..a88113d2e4 100644 --- a/awx/main/utils/common.py +++ b/awx/main/utils/common.py @@ -19,7 +19,7 @@ from functools import reduce, wraps # Django from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist from django.utils.dateparse import parse_datetime -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.utils.functional import cached_property from django.db import connection from django.db.models.fields.related import ForeignObjectRel, ManyToManyField diff --git a/awx/main/utils/deletion.py b/awx/main/utils/deletion.py deleted file mode 100644 index d17bc0b710..0000000000 --- a/awx/main/utils/deletion.py +++ /dev/null @@ -1,173 +0,0 @@ -from django.contrib.contenttypes.models import ContentType -from django.db.models.deletion import ( - DO_NOTHING, - Collector, - get_candidate_relations_to_delete, -) -from collections import Counter, OrderedDict -from django.db import transaction -from django.db.models import sql - - -def bulk_related_objects(field, objs, using): - # This overrides the method in django.contrib.contenttypes.fields.py - """ - Return all objects related to ``objs`` via this ``GenericRelation``. - """ - return field.remote_field.model._base_manager.db_manager(using).filter( - **{ - "%s__pk" - % field.content_type_field_name: ContentType.objects.db_manager(using).get_for_model(field.model, for_concrete_model=field.for_concrete_model).pk, - "%s__in" % field.object_id_field_name: list(objs.values_list('pk', flat=True)), - } - ) - - -def pre_delete(qs): - # taken from .delete method in django.db.models.query.py - assert qs.query.can_filter(), "Cannot use 'limit' or 'offset' with delete." - - if qs._fields is not None: - raise TypeError("Cannot call delete() after .values() or .values_list()") - - del_query = qs._chain() - - # The delete is actually 2 queries - one to find related objects, - # and one to delete. Make sure that the discovery of related - # objects is performed on the same database as the deletion. - del_query._for_write = True - - # Disable non-supported fields. - del_query.query.select_for_update = False - del_query.query.select_related = False - del_query.query.clear_ordering(force_empty=True) - return del_query - - -class AWXCollector(Collector): - def add(self, objs, source=None, nullable=False, reverse_dependency=False): - """ - Add 'objs' to the collection of objects to be deleted. If the call is - the result of a cascade, 'source' should be the model that caused it, - and 'nullable' should be set to True if the relation can be null. - - Return a list of all objects that were not already collected. - """ - if not objs.exists(): - return objs - model = objs.model - self.data.setdefault(model, []) - self.data[model].append(objs) - # Nullable relationships can be ignored -- they are nulled out before - # deleting, and therefore do not affect the order in which objects have - # to be deleted. - if source is not None and not nullable: - if reverse_dependency: - source, model = model, source - self.dependencies.setdefault(source._meta.concrete_model, set()).add(model._meta.concrete_model) - return objs - - def add_field_update(self, field, value, objs): - """ - Schedule a field update. 'objs' must be a homogeneous iterable - collection of model instances (e.g. a QuerySet). - """ - if not objs.exists(): - return - model = objs.model - self.field_updates.setdefault(model, {}) - self.field_updates[model].setdefault((field, value), []) - self.field_updates[model][(field, value)].append(objs) - - def collect(self, objs, source=None, nullable=False, collect_related=True, source_attr=None, reverse_dependency=False, keep_parents=False): - """ - Add 'objs' to the collection of objects to be deleted as well as all - parent instances. 'objs' must be a homogeneous iterable collection of - model instances (e.g. a QuerySet). If 'collect_related' is True, - related objects will be handled by their respective on_delete handler. - - If the call is the result of a cascade, 'source' should be the model - that caused it and 'nullable' should be set to True, if the relation - can be null. - - If 'reverse_dependency' is True, 'source' will be deleted before the - current model, rather than after. (Needed for cascading to parent - models, the one case in which the cascade follows the forwards - direction of an FK rather than the reverse direction.) - - If 'keep_parents' is True, data of parent model's will be not deleted. - """ - - if hasattr(objs, 'polymorphic_disabled'): - objs.polymorphic_disabled = True - - if self.can_fast_delete(objs): - self.fast_deletes.append(objs) - return - new_objs = self.add(objs, source, nullable, reverse_dependency=reverse_dependency) - if not new_objs.exists(): - return - - model = new_objs.model - - if not keep_parents: - # Recursively collect concrete model's parent models, but not their - # related objects. These will be found by meta.get_fields() - concrete_model = model._meta.concrete_model - for ptr in concrete_model._meta.parents.keys(): - if ptr: - parent_objs = ptr.objects.filter(pk__in=new_objs.values_list('pk', flat=True)) - self.collect(parent_objs, source=model, collect_related=False, reverse_dependency=True) - if collect_related: - parents = model._meta.parents - for related in get_candidate_relations_to_delete(model._meta): - # Preserve parent reverse relationships if keep_parents=True. - if keep_parents and related.model in parents: - continue - field = related.field - if field.remote_field.on_delete == DO_NOTHING: - continue - related_qs = self.related_objects(related, new_objs) - if self.can_fast_delete(related_qs, from_field=field): - self.fast_deletes.append(related_qs) - elif related_qs: - field.remote_field.on_delete(self, field, related_qs, self.using) - for field in model._meta.private_fields: - if hasattr(field, 'bulk_related_objects'): - # It's something like generic foreign key. - sub_objs = bulk_related_objects(field, new_objs, self.using) - self.collect(sub_objs, source=model, nullable=True) - - def delete(self): - self.sort() - - # collect pk_list before deletion (once things start to delete - # queries might not be able to retreive pk list) - del_dict = OrderedDict() - for model, instances in self.data.items(): - del_dict.setdefault(model, []) - for inst in instances: - del_dict[model] += list(inst.values_list('pk', flat=True)) - - deleted_counter = Counter() - - with transaction.atomic(using=self.using, savepoint=False): - - # update fields - for model, instances_for_fieldvalues in self.field_updates.items(): - for (field, value), instances in instances_for_fieldvalues.items(): - for inst in instances: - query = sql.UpdateQuery(model) - query.update_batch(inst.values_list('pk', flat=True), {field.name: value}, self.using) - # fast deletes - for qs in self.fast_deletes: - count = qs._raw_delete(using=self.using) - deleted_counter[qs.model._meta.label] += count - - # delete instances - for model, pk_list in del_dict.items(): - query = sql.DeleteQuery(model) - count = query.delete_batch(pk_list, self.using) - deleted_counter[model._meta.label] += count - - return sum(deleted_counter.values()), dict(deleted_counter) diff --git a/awx/main/utils/filters.py b/awx/main/utils/filters.py index 002ab957bd..f0d29c0d10 100644 --- a/awx/main/utils/filters.py +++ b/awx/main/utils/filters.py @@ -15,8 +15,8 @@ from django.apps import apps from django.db import models from django.conf import settings +from django_guid import get_guid from django_guid.log_filters import CorrelationId -from django_guid.middleware import GuidMiddleware from awx import MODE from awx.main.constants import LOGGER_BLOCKLIST @@ -188,13 +188,11 @@ class SmartFilter(object): ''' def _json_path_to_contains(self, k, v): - from awx.main.fields import JSONBField # avoid a circular import - if not k.startswith(SmartFilter.SEARCHABLE_RELATIONSHIP): v = self.strip_quotes_traditional_logic(v) return (k, v) - for match in JSONBField.get_lookups().keys(): + for match in models.JSONField.get_lookups().keys(): match = '__{}'.format(match) if k.endswith(match): if match == '__exact': @@ -368,7 +366,7 @@ class SmartFilter(object): class DefaultCorrelationId(CorrelationId): def filter(self, record): - guid = GuidMiddleware.get_guid() or '-' + guid = get_guid() or '-' if MODE == 'development': guid = guid[:8] record.guid = guid diff --git a/awx/main/utils/licensing.py b/awx/main/utils/licensing.py index eeae581655..bec953f822 100644 --- a/awx/main/utils/licensing.py +++ b/awx/main/utils/licensing.py @@ -33,7 +33,7 @@ from cryptography import x509 # Django from django.conf import settings -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ MAX_INSTANCES = 9999999 diff --git a/awx/main/utils/reload.py b/awx/main/utils/reload.py index 6651fcf44d..a7c2a1ed99 100644 --- a/awx/main/utils/reload.py +++ b/awx/main/utils/reload.py @@ -40,5 +40,5 @@ def supervisor_service_command(command, service='*', communicate=True): def stop_local_services(communicate=True): - logger.warn('Stopping services on this node in response to user action') + logger.warning('Stopping services on this node in response to user action') supervisor_service_command(command='stop', communicate=communicate) diff --git a/awx/main/validators.py b/awx/main/validators.py index 872eabafdc..751d38060b 100644 --- a/awx/main/validators.py +++ b/awx/main/validators.py @@ -6,7 +6,7 @@ import base64 import re # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.core.exceptions import ValidationError # REST framework @@ -198,7 +198,7 @@ def vars_validate_or_raise(vars_str): def validate_container_image_name(value): - """ + r""" from https://github.com/distribution/distribution/blob/af8ac809336c2316c81b08605d92d94f8670ad15/reference/reference.go#L4 Grammar diff --git a/awx/main/views.py b/awx/main/views.py index bb6c43b6bf..8ff612e8ba 100644 --- a/awx/main/views.py +++ b/awx/main/views.py @@ -7,7 +7,7 @@ import json from django.http import HttpResponse, HttpResponseRedirect from django.shortcuts import render from django.utils.html import format_html -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.views.decorators.csrf import csrf_exempt # Django REST Framework diff --git a/awx/main/wsbroadcast.py b/awx/main/wsbroadcast.py index ec2fae5e89..47006adc9d 100644 --- a/awx/main/wsbroadcast.py +++ b/awx/main/wsbroadcast.py @@ -4,6 +4,7 @@ import asyncio import aiohttp from aiohttp import client_exceptions +from asgiref.sync import sync_to_async from channels.layers import get_channel_layer @@ -30,6 +31,7 @@ def unwrap_broadcast_msg(payload: dict): return (payload['group'], payload['message']) +@sync_to_async def get_broadcast_hosts(): Instance = apps.get_model('main', 'Instance') instances = ( @@ -90,7 +92,7 @@ class WebsocketTask: if attempt > 0: await asyncio.sleep(settings.BROADCAST_WEBSOCKET_RECONNECT_RETRY_RATE_SECONDS) except asyncio.CancelledError: - logger.warn(f"Connection from {self.name} to {self.remote_host} cancelled") + logger.warning(f"Connection from {self.name} to {self.remote_host} cancelled") raise uri = f"{self.protocol}://{self.remote_host}:{self.remote_port}/websocket/{self.endpoint}/" @@ -107,18 +109,18 @@ class WebsocketTask: except asyncio.CancelledError: # TODO: Check if connected and disconnect # Possibly use run_until_complete() if disconnect is async - logger.warn(f"Connection from {self.name} to {self.remote_host} cancelled.") + logger.warning(f"Connection from {self.name} to {self.remote_host} cancelled.") self.stats.record_connection_lost() raise except client_exceptions.ClientConnectorError as e: - logger.warn(f"Connection from {self.name} to {self.remote_host} failed: '{e}'.") + logger.warning(f"Connection from {self.name} to {self.remote_host} failed: '{e}'.") except asyncio.TimeoutError: - logger.warn(f"Connection from {self.name} to {self.remote_host} timed out.") + logger.warning(f"Connection from {self.name} to {self.remote_host} timed out.") except Exception as e: # Early on, this is our canary. I'm not sure what exceptions we can really encounter. - logger.warn(f"Connection from {self.name} to {self.remote_host} failed for unknown reason: '{e}'.") + logger.warning(f"Connection from {self.name} to {self.remote_host} failed for unknown reason: '{e}'.") else: - logger.warn(f"Connection from {self.name} to {self.remote_host} list.") + logger.warning(f"Connection from {self.name} to {self.remote_host} list.") self.stats.record_connection_lost() self.start(attempt=attempt + 1) @@ -144,7 +146,7 @@ class BroadcastWebsocketTask(WebsocketTask): logmsg = "Failed to decode broadcast message" if logger.isEnabledFor(logging.DEBUG): logmsg = "{} {}".format(logmsg, payload) - logger.warn(logmsg) + logger.warning(logmsg) continue (group, message) = unwrap_broadcast_msg(payload) if group == "metrics": @@ -170,7 +172,7 @@ class BroadcastWebsocketManager(object): async def run_per_host_websocket(self): while True: - known_hosts = get_broadcast_hosts() + known_hosts = await get_broadcast_hosts() future_remote_hosts = known_hosts.keys() current_remote_hosts = self.broadcast_tasks.keys() deleted_remote_hosts = set(current_remote_hosts) - set(future_remote_hosts) @@ -183,9 +185,9 @@ class BroadcastWebsocketManager(object): new_remote_hosts.add(hostname) if deleted_remote_hosts: - logger.warn(f"Removing {deleted_remote_hosts} from websocket broadcast list") + logger.warning(f"Removing {deleted_remote_hosts} from websocket broadcast list") if new_remote_hosts: - logger.warn(f"Adding {new_remote_hosts} to websocket broadcast list") + logger.warning(f"Adding {new_remote_hosts} to websocket broadcast list") for h in deleted_remote_hosts: self.broadcast_tasks[h].cancel() diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index bc3c2549c3..b11710495e 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -7,14 +7,6 @@ import re # noqa import sys from datetime import timedelta -# global settings -from django.conf import global_settings - -# Update this module's local settings from the global settings module. -this_module = sys.modules[__name__] -for setting in dir(global_settings): - if setting == setting.upper(): - setattr(this_module, setting, getattr(global_settings, setting)) # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(__file__)) @@ -49,6 +41,11 @@ else: DEBUG = True SQL_DEBUG = DEBUG +# FIXME: it would be nice to cycle back around and allow this to be +# BigAutoField going forward, but we'd have to be explicit about our +# existing models. +DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' + DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', @@ -152,7 +149,8 @@ SITE_ID = 1 # Make this unique, and don't share it with anybody. if os.path.exists('/etc/tower/SECRET_KEY'): - SECRET_KEY = open('/etc/tower/SECRET_KEY', 'rb').read().strip() + with open('/etc/tower/SECRET_KEY', 'rb') as f: + SECRET_KEY = f.read().strip() else: SECRET_KEY = base64.encodebytes(os.urandom(32)).decode().rstrip() @@ -273,8 +271,8 @@ TEMPLATES = [ { 'NAME': 'default', 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'APP_DIRS': True, 'OPTIONS': { - 'debug': DEBUG, 'context_processors': [ # NOQA 'django.contrib.auth.context_processors.auth', 'django.template.context_processors.debug', @@ -289,13 +287,10 @@ TEMPLATES = [ 'social_django.context_processors.backends', 'social_django.context_processors.login_redirect', ], - 'loaders': [ - ('django.template.loaders.cached.Loader', ('django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader')) - ], 'builtins': ['awx.main.templatetags.swagger'], }, 'DIRS': [os.path.join(BASE_DIR, 'templates'), os.path.join(BASE_DIR, 'ui', 'build'), os.path.join(BASE_DIR, 'ui', 'public')], - } + }, ] ROOT_URLCONF = 'awx.urls' @@ -453,7 +448,7 @@ CACHES = {'default': {'BACKEND': 'django_redis.cache.RedisCache', 'LOCATION': 'u # Social Auth configuration. SOCIAL_AUTH_STRATEGY = 'social_django.strategy.DjangoStrategy' SOCIAL_AUTH_STORAGE = 'social_django.models.DjangoStorage' -SOCIAL_AUTH_USER_MODEL = AUTH_USER_MODEL # noqa +SOCIAL_AUTH_USER_MODEL = 'auth.User' _SOCIAL_AUTH_PIPELINE_BASE = ( 'social_core.pipeline.social_auth.social_details', @@ -945,7 +940,7 @@ AWX_CLEANUP_PATHS = True RECEPTOR_RELEASE_WORK = True MIDDLEWARE = [ - 'django_guid.middleware.GuidMiddleware', + 'django_guid.middleware.guid_middleware', 'awx.main.middleware.TimingMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'awx.main.middleware.MigrationRanCheckMiddleware', diff --git a/awx/settings/development.py b/awx/settings/development.py index 70b64643dd..be1c115606 100644 --- a/awx/settings/development.py +++ b/awx/settings/development.py @@ -45,10 +45,6 @@ SESSION_COOKIE_SECURE = False # Disallow sending csrf cookies over insecure connections CSRF_COOKIE_SECURE = False -# Override django.template.loaders.cached.Loader in defaults.py -template = next((tpl_backend for tpl_backend in TEMPLATES if tpl_backend['NAME'] == 'default'), None) # noqa -template['OPTIONS']['loaders'] = ('django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader') - # Disable Pendo on the UI for development/test. # Note: This setting may be overridden by database settings. PENDO_TRACKING_STATE = "off" diff --git a/awx/sso/__init__.py b/awx/sso/__init__.py index bb4e958844..e484e62be1 100644 --- a/awx/sso/__init__.py +++ b/awx/sso/__init__.py @@ -1,4 +1,2 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. - -default_app_config = 'awx.sso.apps.SSOConfig' diff --git a/awx/sso/apps.py b/awx/sso/apps.py index 45c00e871b..4d09b7acf6 100644 --- a/awx/sso/apps.py +++ b/awx/sso/apps.py @@ -1,6 +1,6 @@ # Django from django.apps import AppConfig -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ class SSOConfig(AppConfig): diff --git a/awx/sso/backends.py b/awx/sso/backends.py index 727cacab20..e54a124560 100644 --- a/awx/sso/backends.py +++ b/awx/sso/backends.py @@ -13,7 +13,7 @@ from django.dispatch import receiver from django.contrib.auth.models import User from django.conf import settings as django_settings from django.core.signals import setting_changed -from django.utils.encoding import force_text +from django.utils.encoding import force_str # django-auth-ldap from django_auth_ldap.backend import LDAPSettings as BaseLDAPSettings @@ -179,7 +179,7 @@ def _get_or_set_enterprise_user(username, password, provider): created = True if created or user.is_in_enterprise_category(provider): return user - logger.warn("Enterprise user %s already defined in Tower." % username) + logger.warning("Enterprise user %s already defined in Tower." % username) class RADIUSBackend(BaseRADIUSBackend): @@ -199,8 +199,8 @@ class RADIUSBackend(BaseRADIUSBackend): if not user.has_usable_password(): return user - def get_django_user(self, username, password=None): - return _get_or_set_enterprise_user(force_text(username), force_text(password), 'radius') + def get_django_user(self, username, password=None, groups=[], is_staff=False, is_superuser=False): + return _get_or_set_enterprise_user(force_str(username), force_str(password), 'radius') class TACACSPlusBackend(object): @@ -257,7 +257,7 @@ class TowerSAMLIdentityProvider(BaseSAMLIdentityProvider): if isinstance(value, (list, tuple)): value = value[0] if conf_key in ('attr_first_name', 'attr_last_name', 'attr_username', 'attr_email') and value is None: - logger.warn( + logger.warning( "Could not map user detail '%s' from SAML attribute '%s'; " "update SOCIAL_AUTH_SAML_ENABLED_IDPS['%s']['%s'] with the correct SAML attribute.", conf_key[5:], key, @@ -370,7 +370,7 @@ def on_populate_user(sender, **kwargs): if field_len > max_len: setattr(user, field, getattr(user, field)[:max_len]) force_user_update = True - logger.warn('LDAP user {} has {} > max {} characters'.format(user.username, field, max_len)) + logger.warning('LDAP user {} has {} > max {} characters'.format(user.username, field, max_len)) # Update organization membership based on group memberships. org_map = getattr(backend.settings, 'ORGANIZATION_MAP', {}) diff --git a/awx/sso/conf.py b/awx/sso/conf.py index 2faf342934..29d7f401d3 100644 --- a/awx/sso/conf.py +++ b/awx/sso/conf.py @@ -5,7 +5,7 @@ import urllib.parse as urlparse # Django from django.conf import settings from django.urls import reverse -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework import serializers diff --git a/awx/sso/fields.py b/awx/sso/fields.py index e2d46d9362..9ad016f594 100644 --- a/awx/sso/fields.py +++ b/awx/sso/fields.py @@ -4,13 +4,14 @@ import inspect import json import re +import six + # Python LDAP import ldap import awx # Django -from django.utils import six -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django Auth LDAP import django_auth_ldap.config @@ -456,7 +457,7 @@ class LDAPGroupTypeField(fields.ChoiceField, DependsOnMixin): params = self.get_depends_on() or {} params_sanitized = dict() - cls_args = inspect.getargspec(cls.__init__).args[1:] + cls_args = inspect.getfullargspec(cls.__init__).args[1:] if cls_args: if not isinstance(params, dict): @@ -487,7 +488,7 @@ class LDAPGroupTypeParamsField(fields.DictField, DependsOnMixin): # Fail safe return {} - invalid_keys = set(value.keys()) - set(inspect.getargspec(group_type_cls.__init__).args[1:]) + invalid_keys = set(value.keys()) - set(inspect.getfullargspec(group_type_cls.__init__).args[1:]) if invalid_keys: invalid_keys = sorted(list(invalid_keys)) keys_display = json.dumps(invalid_keys).lstrip('[').rstrip(']') @@ -582,11 +583,11 @@ class SocialMapField(fields.ListField): def to_representation(self, value): if isinstance(value, (list, tuple)): return super(SocialMapField, self).to_representation(value) - elif value in fields.NullBooleanField.TRUE_VALUES: + elif value in fields.BooleanField.TRUE_VALUES: return True - elif value in fields.NullBooleanField.FALSE_VALUES: + elif value in fields.BooleanField.FALSE_VALUES: return False - elif value in fields.NullBooleanField.NULL_VALUES: + elif value in fields.BooleanField.NULL_VALUES: return None elif isinstance(value, (str, type(re.compile('')))): return self.child.to_representation(value) @@ -596,11 +597,11 @@ class SocialMapField(fields.ListField): def to_internal_value(self, data): if isinstance(data, (list, tuple)): return super(SocialMapField, self).to_internal_value(data) - elif data in fields.NullBooleanField.TRUE_VALUES: + elif data in fields.BooleanField.TRUE_VALUES: return True - elif data in fields.NullBooleanField.FALSE_VALUES: + elif data in fields.BooleanField.FALSE_VALUES: return False - elif data in fields.NullBooleanField.NULL_VALUES: + elif data in fields.BooleanField.NULL_VALUES: return None elif isinstance(data, str): return self.child.run_validation(data) diff --git a/awx/sso/models.py b/awx/sso/models.py index 95da1b82c7..28eb23857f 100644 --- a/awx/sso/models.py +++ b/awx/sso/models.py @@ -4,7 +4,7 @@ # Django from django.db import models from django.contrib.auth.models import User -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ class UserEnterpriseAuth(models.Model): diff --git a/awx/sso/pipeline.py b/awx/sso/pipeline.py index 3a63391fe8..85bfd499fd 100644 --- a/awx/sso/pipeline.py +++ b/awx/sso/pipeline.py @@ -11,7 +11,7 @@ from social_core.exceptions import AuthException # Django from django.core.exceptions import ObjectDoesNotExist -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.db.models import Q diff --git a/awx/sso/tests/functional/test_get_or_set_enterprise_user.py b/awx/sso/tests/functional/test_get_or_set_enterprise_user.py index 0d48c54d87..3f37b41df3 100644 --- a/awx/sso/tests/functional/test_get_or_set_enterprise_user.py +++ b/awx/sso/tests/functional/test_get_or_set_enterprise_user.py @@ -11,7 +11,7 @@ def test_fetch_user_if_exist(existing_tacacsplus_user): with mock.patch('awx.sso.backends.logger') as mocked_logger: new_user = _get_or_set_enterprise_user("foo", "password", "tacacs+") mocked_logger.debug.assert_not_called() - mocked_logger.warn.assert_not_called() + mocked_logger.warning.assert_not_called() assert new_user == existing_tacacsplus_user @@ -33,5 +33,5 @@ def test_created_user_has_no_usable_password(): def test_non_enterprise_user_does_not_get_pass(existing_normal_user): with mock.patch('awx.sso.backends.logger') as mocked_logger: new_user = _get_or_set_enterprise_user("alice", "password", "tacacs+") - mocked_logger.warn.assert_called_once_with(u'Enterprise user alice already defined in Tower.') + mocked_logger.warning.assert_called_once_with(u'Enterprise user alice already defined in Tower.') assert new_user is None diff --git a/awx/sso/urls.py b/awx/sso/urls.py index a32b11d6d6..93da0996c9 100644 --- a/awx/sso/urls.py +++ b/awx/sso/urls.py @@ -1,14 +1,15 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path + from awx.sso.views import sso_complete, sso_error, sso_inactive, saml_metadata app_name = 'sso' urlpatterns = [ - url(r'^complete/$', sso_complete, name='sso_complete'), - url(r'^error/$', sso_error, name='sso_error'), - url(r'^inactive/$', sso_inactive, name='sso_inactive'), - url(r'^metadata/saml/$', saml_metadata, name='saml_metadata'), + re_path(r'^complete/$', sso_complete, name='sso_complete'), + re_path(r'^error/$', sso_error, name='sso_error'), + re_path(r'^inactive/$', sso_inactive, name='sso_inactive'), + re_path(r'^metadata/saml/$', saml_metadata, name='saml_metadata'), ] diff --git a/awx/sso/validators.py b/awx/sso/validators.py index 821abc3b15..478b86b36f 100644 --- a/awx/sso/validators.py +++ b/awx/sso/validators.py @@ -6,7 +6,7 @@ import ldap # Django from django.core.exceptions import ValidationError -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ __all__ = [ 'validate_ldap_dn', diff --git a/awx/sso/views.py b/awx/sso/views.py index 2f3a448af9..67921b2fa4 100644 --- a/awx/sso/views.py +++ b/awx/sso/views.py @@ -10,7 +10,7 @@ from django.urls import reverse from django.http import HttpResponse from django.views.generic import View from django.views.generic.base import RedirectView -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str from awx.api.serializers import UserSerializer from rest_framework.renderers import JSONRenderer from django.conf import settings @@ -40,10 +40,10 @@ class CompleteView(BaseRedirectView): def dispatch(self, request, *args, **kwargs): response = super(CompleteView, self).dispatch(request, *args, **kwargs) if self.request.user and self.request.user.is_authenticated: - logger.info(smart_text(u"User {} logged in".format(self.request.user.username))) + logger.info(smart_str(u"User {} logged in".format(self.request.user.username))) response.set_cookie('userLoggedIn', 'true') current_user = UserSerializer(self.request.user) - current_user = smart_text(JSONRenderer().render(current_user.data)) + current_user = smart_str(JSONRenderer().render(current_user.data)) current_user = urllib.parse.quote('%s' % current_user, '') response.set_cookie('current_user', current_user, secure=settings.SESSION_COOKIE_SECURE or None) response.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid')) diff --git a/awx/ui/__init__.py b/awx/ui/__init__.py index ac6a554356..e484e62be1 100644 --- a/awx/ui/__init__.py +++ b/awx/ui/__init__.py @@ -1,4 +1,2 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. - -default_app_config = 'awx.ui.apps.UIConfig' diff --git a/awx/ui/apps.py b/awx/ui/apps.py index 40943c6f53..d567e64b80 100644 --- a/awx/ui/apps.py +++ b/awx/ui/apps.py @@ -1,6 +1,6 @@ # Django from django.apps import AppConfig -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ class UIConfig(AppConfig): diff --git a/awx/ui/conf.py b/awx/ui/conf.py index 34208f2339..9f1cef04fc 100644 --- a/awx/ui/conf.py +++ b/awx/ui/conf.py @@ -2,7 +2,7 @@ # All Rights Reserved. # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.conf import register, fields diff --git a/awx/ui/fields.py b/awx/ui/fields.py index d9b46890ff..37089c0265 100644 --- a/awx/ui/fields.py +++ b/awx/ui/fields.py @@ -7,7 +7,7 @@ import binascii import re # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.conf import fields @@ -16,7 +16,7 @@ from awx.conf import fields class PendoTrackingStateField(fields.ChoiceField): def to_internal_value(self, data): # Any false/null values get converted to 'off'. - if data in fields.NullBooleanField.FALSE_VALUES or data in fields.NullBooleanField.NULL_VALUES: + if data in fields.BooleanField.FALSE_VALUES or data in fields.BooleanField.NULL_VALUES: return 'off' return super(PendoTrackingStateField, self).to_internal_value(data) diff --git a/awx/ui/urls.py b/awx/ui/urls.py index 7d524d82be..6661fee280 100644 --- a/awx/ui/urls.py +++ b/awx/ui/urls.py @@ -1,5 +1,5 @@ -from django.conf.urls import url -from django.utils.translation import ugettext_lazy as _ +from django.urls import re_path +from django.utils.translation import gettext_lazy as _ from django.views.generic.base import TemplateView from awx.main.utils.licensing import server_product_name @@ -27,4 +27,4 @@ class MigrationsNotran(TemplateView): app_name = 'ui' -urlpatterns = [url(r'^$', IndexView.as_view(), name='index'), url(r'^migrations_notran/$', MigrationsNotran.as_view(), name='migrations_notran')] +urlpatterns = [re_path(r'^$', IndexView.as_view(), name='index'), re_path(r'^migrations_notran/$', MigrationsNotran.as_view(), name='migrations_notran')] diff --git a/awx/urls.py b/awx/urls.py index 4424e8328a..c99eda011c 100644 --- a/awx/urls.py +++ b/awx/urls.py @@ -1,29 +1,30 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url, include from django.conf import settings +from django.urls import re_path, include + from awx.main.views import handle_400, handle_403, handle_404, handle_500, handle_csp_violation, handle_login_redirect urlpatterns = [ - url(r'', include('awx.ui.urls', namespace='ui')), - url(r'^api/', include('awx.api.urls', namespace='api')), - url(r'^sso/', include('awx.sso.urls', namespace='sso')), - url(r'^sso/', include('social_django.urls', namespace='social')), - url(r'^(?:api/)?400.html$', handle_400), - url(r'^(?:api/)?403.html$', handle_403), - url(r'^(?:api/)?404.html$', handle_404), - url(r'^(?:api/)?500.html$', handle_500), - url(r'^csp-violation/', handle_csp_violation), - url(r'^login/', handle_login_redirect), + re_path(r'', include('awx.ui.urls', namespace='ui')), + re_path(r'^api/', include('awx.api.urls', namespace='api')), + re_path(r'^sso/', include('awx.sso.urls', namespace='sso')), + re_path(r'^sso/', include('social_django.urls', namespace='social')), + re_path(r'^(?:api/)?400.html$', handle_400), + re_path(r'^(?:api/)?403.html$', handle_403), + re_path(r'^(?:api/)?404.html$', handle_404), + re_path(r'^(?:api/)?500.html$', handle_500), + re_path(r'^csp-violation/', handle_csp_violation), + re_path(r'^login/', handle_login_redirect), ] if settings.SETTINGS_MODULE == 'awx.settings.development': try: import debug_toolbar - urlpatterns += [url(r'^__debug__/', include(debug_toolbar.urls))] + urlpatterns += [re_path(r'^__debug__/', include(debug_toolbar.urls))] except ImportError: pass diff --git a/awxkit/tox.ini b/awxkit/tox.ini index 73c9493e1d..3e63d73673 100644 --- a/awxkit/tox.ini +++ b/awxkit/tox.ini @@ -8,7 +8,7 @@ skip_missing_interpreters = true # skipsdist = true [testenv] -basepython = python3.8 +basepython = python3.9 passenv = TRAVIS TRAVIS_JOB_ID TRAVIS_BRANCH setenv = PYTHONPATH = {toxinidir}:{env:PYTHONPATH:}:. diff --git a/docs/licenses/django-jsonfield.txt b/docs/licenses/django-jsonfield.txt deleted file mode 100644 index d869f756e8..0000000000 --- a/docs/licenses/django-jsonfield.txt +++ /dev/null @@ -1,23 +0,0 @@ -Copyright (c) 2012, Matthew Schinckel. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * The names of its contributors may not be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL MATTHEW SCHINCKEL BE LIABLE FOR ANY DIRECT, -INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE -OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF -ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/licenses/jaraco.classes.txt b/docs/licenses/jaraco-classes.txt similarity index 100% rename from docs/licenses/jaraco.classes.txt rename to docs/licenses/jaraco-classes.txt diff --git a/docs/licenses/jaraco.collections.txt b/docs/licenses/jaraco-collections.txt similarity index 100% rename from docs/licenses/jaraco.collections.txt rename to docs/licenses/jaraco-collections.txt diff --git a/docs/licenses/jaraco.functools.txt b/docs/licenses/jaraco-functools.txt similarity index 100% rename from docs/licenses/jaraco.functools.txt rename to docs/licenses/jaraco-functools.txt diff --git a/docs/licenses/jaraco.logging.txt b/docs/licenses/jaraco-logging.txt similarity index 100% rename from docs/licenses/jaraco.logging.txt rename to docs/licenses/jaraco-logging.txt diff --git a/docs/licenses/jaraco.stream.txt b/docs/licenses/jaraco-stream.txt similarity index 100% rename from docs/licenses/jaraco.stream.txt rename to docs/licenses/jaraco-stream.txt diff --git a/docs/licenses/jaraco.text.txt b/docs/licenses/jaraco-text.txt similarity index 100% rename from docs/licenses/jaraco.text.txt rename to docs/licenses/jaraco-text.txt diff --git a/docs/licenses/pkgconfig.txt b/docs/licenses/pkgconfig.txt deleted file mode 100644 index 716f12754d..0000000000 --- a/docs/licenses/pkgconfig.txt +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (c) 2013 Matthias Vogelgesang - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/docs/licenses/future.txt b/docs/licenses/python-future.txt similarity index 100% rename from docs/licenses/future.txt rename to docs/licenses/python-future.txt diff --git a/docs/licenses/ruamel.yaml.txt b/docs/licenses/ruamel-yaml.txt similarity index 100% rename from docs/licenses/ruamel.yaml.txt rename to docs/licenses/ruamel-yaml.txt diff --git a/docs/licenses/ruamel.yaml.clib.txt b/docs/licenses/ruamel.yaml.clib.txt deleted file mode 100644 index 1c3e20a20e..0000000000 --- a/docs/licenses/ruamel.yaml.clib.txt +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2019 Anthon van der Neut, Ruamel bvba - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/docs/licenses/tacacs_plus.txt b/docs/licenses/tacacs-plus.txt similarity index 100% rename from docs/licenses/tacacs_plus.txt rename to docs/licenses/tacacs-plus.txt diff --git a/docs/licenses/zope.interface.txt b/docs/licenses/zope-interface.txt similarity index 100% rename from docs/licenses/zope.interface.txt rename to docs/licenses/zope-interface.txt diff --git a/pytest.ini b/pytest.ini index d5d7273433..d4ffd2db9b 100644 --- a/pytest.ini +++ b/pytest.ini @@ -12,4 +12,5 @@ markers = job_permissions: activity_stream_access: job_runtime_vars: + fixture_args: junit_family=xunit2 diff --git a/requirements/README.md b/requirements/README.md index 69ab2d4bcf..a10d412f2c 100644 --- a/requirements/README.md +++ b/requirements/README.md @@ -58,7 +58,7 @@ Make sure to delete the old tarball if it is an upgrade. Anything pinned in `*.in` files involves additional manual work in order to upgrade. Some information related to that work is outlined here. -### django +### Django For any upgrade of Django, it must be confirmed that we don't regress on FIPS support before merging. @@ -90,13 +90,10 @@ that we have the latest version ### django-oauth-toolkit -Version 1.2.0 of this project has a bug that error when revoking tokens. -This is fixed in the master branch but is not yet released. - -When upgrading past 1.2.0 in the future, the `0025` migration needs to be -edited, just like the old migration was edited in the project: -https://github.com/jazzband/django-oauth-toolkit/commit/96538876d0d7ea0319ba5286f9bde842a906e1c5 -The field can simply have the validator method `validate_uris` removed. +Versions later than 1.4.1 throw an error about id_token_id, due to the +OpenID Connect work that was done in +https://github.com/jazzband/django-oauth-toolkit/pull/915. This may +be fixable by creating a migration on our end? ### azure-keyvault @@ -108,17 +105,6 @@ Upgrading to 4.0.0 causes error because imports changed. ImportError: cannot import name 'KeyVaultClient' ``` -### django-jsonfield - -Instead of calling a `loads()` operation, the returned value is casted into -a string in some cases, introduced in the change: - -https://github.com/adamchainz/django-jsonfield/pull/14 - -This breaks a very large amount of AWX code that assumes these fields -are returned as dicts. Upgrading this library will require a refactor -to accomidate this change. - ### pip and setuptools The offline installer needs to have functionality confirmed before upgrading these. diff --git a/requirements/requirements.in b/requirements/requirements.in index 94ece3085d..48f4153086 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -9,23 +9,21 @@ cryptography>=3.2 Cython<3 # Since the bump to PyYAML 5.4.1 this is now a mandatory dep daphne distro -django==2.2.24 # see UPGRADE BLOCKERs +django==3.2.12 # see UPGRADE BLOCKERs django-auth-ldap django-cors-headers>=3.5.0 django-crum django-extensions>=2.2.9 # https://github.com/ansible/awx/pull/6441 -django-guid==2.2.0 # pinned to match Django 2.2 -django-jsonfield==1.2.0 # see UPGRADE BLOCKERs -django-oauth-toolkit==1.1.3 # see UPGRADE BLOCKERs +django-guid==3.2.1 +django-oauth-toolkit==1.4.1 django-polymorphic django-pglocks django-qsstats-magic -django-radius==1.3.3 # FIX auth does not work with later versions django-redis django-solo django-split-settings django-taggit -djangorestframework>=3.12.1 +djangorestframework==3.13.1 djangorestframework-yaml GitPython>=3.1.1 # minimum to fix https://github.com/ansible/awx/issues/6119 irc @@ -40,15 +38,15 @@ psycopg2 psutil pygerduty pyparsing -python3-saml +python3-saml==1.13.0 python-dsv-sdk python-tss-sdk==1.0.0 python-ldap>=3.3.1 # https://github.com/python-ldap/python-ldap/issues/270 pyyaml>=5.4.1 # minimum to fix https://github.com/yaml/pyyaml/issues/478 receptorctl==1.1.1 schedule==0.6.0 -social-auth-core==3.3.1 # see UPGRADE BLOCKERs -social-auth-app-django==3.1.0 # see UPGRADE BLOCKERs +social-auth-core==4.2.0 # see UPGRADE BLOCKERs +social-auth-app-django==5.0.0 # see UPGRADE BLOCKERs redis requests slack-sdk diff --git a/requirements/requirements.txt b/requirements/requirements.txt index f05ebe3b96..1349f34ffd 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -9,11 +9,12 @@ ansiconv==1.0.0 # via -r /awx_devel/requirements/requirements.in asciichartpy==1.5.25 # via -r /awx_devel/requirements/requirements.in -asgiref==3.2.5 +asgiref==3.5.0 # via # channels # channels-redis # daphne + # django async-timeout==3.0.1 # via # aiohttp @@ -80,13 +81,12 @@ dataclasses==0.6 defusedxml==0.6.0 # via # python3-openid - # python3-saml # social-auth-core dictdiffer==0.8.1 # via openshift distro==1.5.0 # via -r /awx_devel/requirements/requirements.in -django==2.2.24 +django==3.2.12 # via # -r /awx_devel/requirements/requirements.in # channels @@ -94,49 +94,47 @@ django==2.2.24 # django-cors-headers # django-crum # django-guid - # django-jsonfield # django-oauth-toolkit # django-polymorphic + # django-solo # django-taggit # djangorestframework -django-auth-ldap==2.1.0 +django-auth-ldap==4.0.0 # via -r /awx_devel/requirements/requirements.in django-cors-headers==3.7.0 # via -r /awx_devel/requirements/requirements.in -django-crum==0.7.5 +django-crum==0.7.9 # via -r /awx_devel/requirements/requirements.in django-extensions==2.2.9 # via -r /awx_devel/requirements/requirements.in -django-guid==2.2.0 +django-guid==3.2.1 # via -r /awx_devel/requirements/requirements.in -django-jsonfield==1.2.0 - # via -r /awx_devel/requirements/requirements.in -django-oauth-toolkit==1.1.3 +django-oauth-toolkit==1.4.1 # via -r /awx_devel/requirements/requirements.in django-pglocks==1.0.4 # via -r /awx_devel/requirements/requirements.in -django-polymorphic==2.1.2 +django-polymorphic==3.1.0 # via -r /awx_devel/requirements/requirements.in django-qsstats-magic==1.1.0 # via -r /awx_devel/requirements/requirements.in -django-radius==1.3.3 - # via -r /awx_devel/requirements/requirements.in + # via -r /awx_devel/requirements/requirements_git.txt django-redis==4.5.0 # via -r /awx_devel/requirements/requirements.in -django-solo==1.1.3 +django-solo==2.0.0 # via -r /awx_devel/requirements/requirements.in django-split-settings==1.0.0 # via -r /awx_devel/requirements/requirements.in -django-taggit==1.2.0 +django-taggit==2.1.0 # via -r /awx_devel/requirements/requirements.in -djangorestframework==3.12.1 +djangorestframework==3.13.1 # via -r /awx_devel/requirements/requirements.in -djangorestframework-yaml==1.0.3 +djangorestframework-yaml==2.0.0 # via -r /awx_devel/requirements/requirements.in docutils==0.16 # via python-daemon -future==0.16.0 - # via django-radius + # via + # -r /awx_devel/requirements/requirements_git.txt + # django-radius gitdb==4.0.2 # via gitpython gitpython==3.1.7 @@ -159,28 +157,28 @@ incremental==17.5.0 # via twisted irc==18.0.0 # via -r /awx_devel/requirements/requirements.in -isodate==0.6.0 +isodate==0.6.1 # via # msrest # python3-saml -jaraco.classes==3.1.0 - # via jaraco.collections -jaraco.collections==3.0.0 +jaraco-classes==3.1.0 + # via jaraco-collections +jaraco-collections==3.0.0 # via irc -jaraco.functools==3.0.0 +jaraco-functools==3.0.0 # via # irc - # jaraco.text + # jaraco-text # tempora -jaraco.logging==3.0.0 +jaraco-logging==3.0.0 # via irc -jaraco.stream==3.0.0 +jaraco-stream==3.0.0 # via irc -jaraco.text==3.2.0 +jaraco-text==3.2.0 # via # irc - # jaraco.collections -jinja2==2.11.2 + # jaraco-collections +jinja2==3.0.3 # via # -r /awx_devel/requirements/requirements.in # openshift @@ -192,17 +190,19 @@ kubernetes==11.0.0 # via openshift lockfile==0.12.2 # via python-daemon -lxml==4.6.3 - # via xmlsec +lxml==4.7.0 + # via + # python3-saml + # xmlsec markdown==3.2.1 # via -r /awx_devel/requirements/requirements.in -markupsafe==1.1.1 +markupsafe==2.0.1 # via jinja2 more-itertools==8.2.0 # via # irc - # jaraco.classes - # jaraco.functools + # jaraco-classes + # jaraco-functools msgpack==1.0.0 # via channels-redis msrest==0.6.11 @@ -215,9 +215,9 @@ multidict==4.7.5 # via # aiohttp # yarl -netaddr==0.7.19 +netaddr==0.8.0 # via pyrad -oauthlib==3.1.0 +oauthlib==3.2.0 # via # django-oauth-toolkit # requests-oauthlib @@ -232,8 +232,6 @@ pexpect==4.7.0 # via # -r /awx_devel/requirements/requirements.in # ansible-runner -pkgconfig==1.5.1 - # via xmlsec prometheus-client==0.7.1 # via -r /awx_devel/requirements/requirements.in psutil==5.8.0 @@ -259,7 +257,7 @@ pygerduty==0.38.2 # via -r /awx_devel/requirements/requirements.in pyhamcrest==2.0.2 # via twisted -pyjwt==1.7.1 +pyjwt==2.3.0 # via # adal # social-auth-core @@ -270,7 +268,7 @@ pyparsing==2.4.6 # via # -r /awx_devel/requirements/requirements.in # packaging -pyrad==2.3 +pyrad==2.4 # via django-radius pyrsistent==0.15.7 # via jsonschema @@ -293,11 +291,12 @@ python-tss-sdk==1.0.0 # via -r /awx_devel/requirements/requirements.in python3-openid==3.1.0 # via social-auth-core -python3-saml==1.9.0 +python3-saml==1.13.0 # via -r /awx_devel/requirements/requirements.in -pytz==2019.3 +pytz==2021.3 # via # django + # djangorestframework # irc # tempora # twilio @@ -327,17 +326,15 @@ requests==2.26.0 # requests-oauthlib # social-auth-core # twilio -requests-oauthlib==1.3.0 +requests-oauthlib==1.3.1 # via # kubernetes # msrest # social-auth-core rsa==4.7.2 # via google-auth -ruamel.yaml==0.16.10 +ruamel-yaml==0.16.10 # via openshift -ruamel.yaml.clib==0.2.0 - # via ruamel.yaml schedule==0.6.0 # via -r /awx_devel/requirements/requirements.in service-identity==18.1.0 @@ -351,9 +348,9 @@ six==1.14.0 # django-pglocks # google-auth # isodate - # jaraco.collections - # jaraco.logging - # jaraco.text + # jaraco-collections + # jaraco-logging + # jaraco-text # jsonschema # kubernetes # openshift @@ -362,8 +359,6 @@ six==1.14.0 # pyrad # pyrsistent # python-dateutil - # social-auth-app-django - # social-auth-core # tacacs-plus # twilio # websocket-client @@ -371,20 +366,20 @@ slack-sdk==3.11.2 # via -r /awx_devel/requirements/requirements.in smmap==3.0.1 # via gitdb -social-auth-app-django==3.1.0 +social-auth-app-django==5.0.0 # via -r /awx_devel/requirements/requirements.in -social-auth-core==3.3.1 +social-auth-core==4.2.0 # via # -r /awx_devel/requirements/requirements.in # social-auth-app-django sqlparse==0.3.1 # via django -tacacs_plus==1.0 +tacacs-plus==1.0 # via -r /awx_devel/requirements/requirements.in tempora==2.1.0 # via # irc - # jaraco.logging + # jaraco-logging twilio==6.37.0 # via -r /awx_devel/requirements/requirements.in twisted[tls]==20.3.0 @@ -407,11 +402,11 @@ websocket-client==0.57.0 # via kubernetes wheel==0.36.2 # via -r /awx_devel/requirements/requirements.in -xmlsec==1.3.3 +xmlsec==1.3.12 # via python3-saml yarl==1.4.2 # via aiohttp -zope.interface==5.0.0 +zope-interface==5.0.0 # via twisted # The following packages are considered to be unsafe in a requirements file: @@ -426,4 +421,4 @@ setuptools==58.2.0 # kubernetes # markdown # python-daemon - # zope.interface + # zope-interface diff --git a/requirements/requirements_dev.txt b/requirements/requirements_dev.txt index 582eac7fb9..bea1e88dc2 100644 --- a/requirements/requirements_dev.txt +++ b/requirements/requirements_dev.txt @@ -1,4 +1,4 @@ -django-debug-toolbar==1.11.1 +django-debug-toolbar==3.2.4 django-rest-swagger # pprofile - re-add once https://github.com/vpelletier/pprofile/issues/41 is addressed ipython==7.21.0 diff --git a/requirements/requirements_git.txt b/requirements/requirements_git.txt index 01b668ee18..ab13d9aed2 100644 --- a/requirements/requirements_git.txt +++ b/requirements/requirements_git.txt @@ -1,3 +1,6 @@ git+https://github.com/ansible/system-certifi.git@devel#egg=certifi # Remove pbr from requirements.in when moving ansible-runner to requirements.in git+https://github.com/ansible/ansible-runner.git@devel#egg=ansible-runner +# django-radius has an aggressive pin of future==0.16.0, see https://github.com/robgolding/django-radius/pull/25 +git+https://github.com/ansible/django-radius.git@develop#egg=django-radius +git+https://github.com/PythonCharmers/python-future@master#egg=future diff --git a/requirements/updater.sh b/requirements/updater.sh index fa9ae7ddd2..01f6000d2c 100755 --- a/requirements/updater.sh +++ b/requirements/updater.sh @@ -18,7 +18,8 @@ generate_requirements() { # shellcheck disable=SC1090 source ${venv}/bin/activate - ${venv}/bin/python3 -m pip install -U pip pip-tools + # FIXME: https://github.com/jazzband/pip-tools/issues/1558 + ${venv}/bin/python3 -m pip install -U 'pip<22.0' pip-tools ${pip_compile} "${requirements_in}" "${requirements_git}" --output-file requirements.txt # consider the git requirements for purposes of resolving deps diff --git a/tools/scripts/firehose.py b/tools/scripts/firehose.py index cd5930315a..2eeeb5da7b 100755 --- a/tools/scripts/firehose.py +++ b/tools/scripts/firehose.py @@ -318,8 +318,9 @@ if __name__ == '__main__': for j_hour in range(24): time_delta = datetime.timedelta(days=i_day, hours=j_hour, seconds=0) created_job_ids = generate_jobs(jobs, batch_size=batch_size, time_delta=time_delta) - for k_id in created_job_ids: - generate_events(events, str(k_id), time_delta) + if events > 0: + for k_id in created_job_ids: + generate_events(events, str(k_id), time_delta) print(datetime.datetime.utcnow().isoformat()) conn.close()