diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 99df2f7d72..01e33079c8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -5,7 +5,7 @@ env: on: pull_request: jobs: - common_tests: + common-tests: name: ${{ matrix.tests.name }} runs-on: ubuntu-latest permissions: @@ -33,9 +33,12 @@ jobs: - name: ui-lint label: Run UI Linters command: make ui-lint - - name: ui-test - label: Run UI Tests - command: make ui-test + - name: ui-test-screens + label: Run UI Screens Tests + command: make ui-test-screens + - name: ui-test-general + label: Run UI General Tests + command: make ui-test-general steps: - uses: actions/checkout@v2 @@ -63,6 +66,36 @@ jobs: run: | docker run -u $(id -u) --rm -v ${{ github.workspace}}:/awx_devel/:Z \ --workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} ${{ matrix.tests.command }} + dev-env: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Get python version from Makefile + run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV + + - name: Install python ${{ env.py_version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ env.py_version }} + + - name: Log in to registry + run: | + echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin + + - name: Pre-pull image to warm build cache + run: | + docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} || : + + - name: Build image + run: | + DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ env.BRANCH }} make docker-compose-build + + - name: Run smoke test + run: | + export DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} + export COMPOSE_TAG=${{ env.BRANCH }} + ansible-playbook tools/docker-compose/ansible/smoke-test.yml -e repo_dir=$(pwd) -v awx-operator: runs-on: ubuntu-latest diff --git a/.github/workflows/promote.yml b/.github/workflows/promote.yml index 3955d7fcd7..f21bc69672 100644 --- a/.github/workflows/promote.yml +++ b/.github/workflows/promote.yml @@ -8,6 +8,53 @@ jobs: promote: runs-on: ubuntu-latest steps: + - name: Checkout awx + uses: actions/checkout@v2 + + - name: Get python version from Makefile + run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV + + - name: Install python ${{ env.py_version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ env.py_version }} + + - name: Install dependencies + run: | + python${{ env.py_version }} -m pip install wheel twine + + - name: Set official collection namespace + run: echo collection_namespace=awx >> $GITHUB_ENV + if: ${{ github.repository_owner == 'ansible' }} + + - name: Set unofficial collection namespace + run: echo collection_namespace=${{ github.repository_owner }} >> $GITHUB_ENV + if: ${{ github.repository_owner != 'ansible' }} + + - name: Build collection and publish to galaxy + run: | + COLLECTION_NAMESPACE=${{ env.collection_namespace }} make build_collection + ansible-galaxy collection publish \ + --token=${{ secrets.GALAXY_TOKEN }} \ + awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz + + - name: Set official pypi info + run: echo pypi_repo=pypi >> $GITHUB_ENV + if: ${{ github.repository_owner == 'ansible' }} + + - name: Set unofficial pypi info + run: echo pypi_repo=testpypi >> $GITHUB_ENV + if: ${{ github.repository_owner != 'ansible' }} + + - name: Build awxkit and upload to pypi + run: | + cd awxkit && python3 setup.py bdist_wheel + twine upload \ + -r ${{ env.pypi_repo }} \ + -u ${{ secrets.PYPI_USERNAME }} \ + -p ${{ secrets.PYPI_PASSWORD }} \ + dist/* + - name: Log in to GHCR run: | echo ${{ secrets.GITHUB_TOKEN }} | docker login ghcr.io -u ${{ github.actor }} --password-stdin diff --git a/.github/workflows/stage.yml b/.github/workflows/stage.yml index 64f4e48ac7..fd319fa6d8 100644 --- a/.github/workflows/stage.yml +++ b/.github/workflows/stage.yml @@ -83,7 +83,8 @@ jobs: - name: Build and stage awx-operator working-directory: awx-operator run: | - BUILD_ARGS="--build-arg DEFAULT_AWX_VERSION=${{ github.event.inputs.version }}" \ + BUILD_ARGS="--build-arg DEFAULT_AWX_VERSION=${{ github.event.inputs.version }} \ + --build-arg OPERATOR_VERSION=${{ github.event.inputs.operator_version }}" \ IMAGE_TAG_BASE=ghcr.io/${{ github.repository_owner }}/awx-operator \ VERSION=${{ github.event.inputs.operator_version }} make docker-build docker-push diff --git a/Makefile b/Makefile index 6f4680399d..bc4a6589f8 100644 --- a/Makefile +++ b/Makefile @@ -13,6 +13,8 @@ COMPOSE_TAG ?= $(GIT_BRANCH) MAIN_NODE_TYPE ?= hybrid # If set to true docker-compose will also start a keycloak instance KEYCLOAK ?= false +# If set to true docker-compose will also start an ldap instance +LDAP ?= false VENV_BASE ?= /var/lib/awx/venv @@ -305,7 +307,7 @@ symlink_collection: mkdir -p ~/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE) # in case it does not exist ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL) -build_collection: +awx_collection_build: $(shell find awx_collection -type f) ansible-playbook -i localhost, awx_collection/tools/template_galaxy.yml \ -e collection_package=$(COLLECTION_PACKAGE) \ -e collection_namespace=$(COLLECTION_NAMESPACE) \ @@ -313,6 +315,8 @@ build_collection: -e '{"awx_template_version":false}' ansible-galaxy collection build awx_collection_build --force --output-path=awx_collection_build +build_collection: awx_collection_build + install_collection: build_collection rm -rf $(COLLECTION_INSTALL) ansible-galaxy collection install awx_collection_build/$(COLLECTION_NAMESPACE)-$(COLLECTION_PACKAGE)-$(COLLECTION_VERSION).tar.gz @@ -400,9 +404,18 @@ ui-lint: ui-test: $(NPM_BIN) --prefix awx/ui install - $(NPM_BIN) run --prefix awx/ui test + $(NPM_BIN) run --prefix awx/ui test +ui-test-screens: + $(NPM_BIN) --prefix awx/ui install + $(NPM_BIN) run --prefix awx/ui pretest + $(NPM_BIN) run --prefix awx/ui test-screens --runInBand +ui-test-general: + $(NPM_BIN) --prefix awx/ui install + $(NPM_BIN) run --prefix awx/ui pretest + $(NPM_BIN) run --prefix awx/ui/ test-general --runInBand + # Build a pip-installable package into dist/ with a timestamped version number. dev_build: $(PYTHON) setup.py dev_build @@ -451,7 +464,8 @@ docker-compose-sources: .git/hooks/pre-commit -e control_plane_node_count=$(CONTROL_PLANE_NODE_COUNT) \ -e execution_node_count=$(EXECUTION_NODE_COUNT) \ -e minikube_container_group=$(MINIKUBE_CONTAINER_GROUP) \ - -e enable_keycloak=$(KEYCLOAK) + -e enable_keycloak=$(KEYCLOAK) \ + -e enable_ldap=$(LDAP) docker-compose: awx/projects docker-compose-sources @@ -567,3 +581,6 @@ messages: . $(VENV_BASE)/awx/bin/activate; \ fi; \ $(PYTHON) manage.py makemessages -l $(LANG) --keep-pot + +print-%: + @echo $($*) diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000000..2e8815a38d --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,3 @@ +For all security related bugs, email security@ansible.com instead of using this issue tracker and you will receive a prompt response. + +For more information on the Ansible community's practices regarding responsible disclosure, see https://www.ansible.com/security diff --git a/awx/__init__.py b/awx/__init__.py index fa3e164092..eae7df87bd 100644 --- a/awx/__init__.py +++ b/awx/__init__.py @@ -36,7 +36,6 @@ else: from django.db.backends.utils import names_digest from django.db import connection - if HAS_DJANGO is True: # See upgrade blocker note in requirements/README.md diff --git a/awx/api/authentication.py b/awx/api/authentication.py index 52b3462005..48fc00db44 100644 --- a/awx/api/authentication.py +++ b/awx/api/authentication.py @@ -6,7 +6,7 @@ import logging # Django from django.conf import settings -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str # Django REST Framework from rest_framework import authentication @@ -24,7 +24,7 @@ class LoggedBasicAuthentication(authentication.BasicAuthentication): ret = super(LoggedBasicAuthentication, self).authenticate(request) if ret: username = ret[0].username if ret[0] else '' - logger.info(smart_text(u"User {} performed a {} to {} through the API".format(username, request.method, request.path))) + logger.info(smart_str(u"User {} performed a {} to {} through the API".format(username, request.method, request.path))) return ret def authenticate_header(self, request): @@ -45,7 +45,7 @@ class LoggedOAuth2Authentication(OAuth2Authentication): user, token = ret username = user.username if user else '' logger.info( - smart_text(u"User {} performed a {} to {} through the API using OAuth 2 token {}.".format(username, request.method, request.path, token.pk)) + smart_str(u"User {} performed a {} to {} through the API using OAuth 2 token {}.".format(username, request.method, request.path, token.pk)) ) setattr(user, 'oauth_scopes', [x for x in token.scope.split() if x]) return ret diff --git a/awx/api/conf.py b/awx/api/conf.py index 00c712a064..fd1467cdde 100644 --- a/awx/api/conf.py +++ b/awx/api/conf.py @@ -1,6 +1,6 @@ # Django from django.conf import settings -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework import serializers diff --git a/awx/api/exceptions.py b/awx/api/exceptions.py index 8f2c079583..406bd5e85f 100644 --- a/awx/api/exceptions.py +++ b/awx/api/exceptions.py @@ -2,7 +2,7 @@ # All Rights Reserved. # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework.exceptions import ValidationError @@ -13,7 +13,7 @@ class ActiveJobConflict(ValidationError): def __init__(self, active_jobs): # During APIException.__init__(), Django Rest Framework - # turn everything in self.detail into string by using force_text. + # turn everything in self.detail into string by using force_str. # Declare detail afterwards circumvent this behavior. super(ActiveJobConflict, self).__init__() self.detail = {"error": _("Resource is being used by running jobs."), "active_jobs": active_jobs} diff --git a/awx/api/fields.py b/awx/api/fields.py index 6f288f2bce..c84b6327f9 100644 --- a/awx/api/fields.py +++ b/awx/api/fields.py @@ -2,7 +2,7 @@ # All Rights Reserved. # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.core.exceptions import ObjectDoesNotExist # Django REST Framework @@ -28,13 +28,17 @@ class NullFieldMixin(object): return (is_empty_value, data) -class BooleanNullField(NullFieldMixin, serializers.NullBooleanField): +class BooleanNullField(NullFieldMixin, serializers.BooleanField): """ Custom boolean field that allows null and empty string as False values. """ + def __init__(self, **kwargs): + kwargs['allow_null'] = True + super().__init__(**kwargs) + def to_internal_value(self, data): - return bool(super(BooleanNullField, self).to_internal_value(data)) + return bool(super().to_internal_value(data)) class CharNullField(NullFieldMixin, serializers.CharField): @@ -47,7 +51,7 @@ class CharNullField(NullFieldMixin, serializers.CharField): super(CharNullField, self).__init__(**kwargs) def to_internal_value(self, data): - return super(CharNullField, self).to_internal_value(data or u'') + return super(CharNullField, self).to_internal_value(data or '') class ChoiceNullField(NullFieldMixin, serializers.ChoiceField): @@ -60,7 +64,7 @@ class ChoiceNullField(NullFieldMixin, serializers.ChoiceField): super(ChoiceNullField, self).__init__(**kwargs) def to_internal_value(self, data): - return super(ChoiceNullField, self).to_internal_value(data or u'') + return super(ChoiceNullField, self).to_internal_value(data or '') class VerbatimField(serializers.Field): diff --git a/awx/api/filters.py b/awx/api/filters.py index 138478135b..10fc488006 100644 --- a/awx/api/filters.py +++ b/awx/api/filters.py @@ -7,15 +7,15 @@ import json from functools import reduce # Django -from django.core.exceptions import FieldError, ValidationError +from django.core.exceptions import FieldError, ValidationError, FieldDoesNotExist from django.db import models -from django.db.models import Q, CharField, IntegerField, BooleanField -from django.db.models.fields import FieldDoesNotExist +from django.db.models import Q, CharField, IntegerField, BooleanField, TextField, JSONField from django.db.models.fields.related import ForeignObjectRel, ManyToManyField, ForeignKey +from django.db.models.functions import Cast from django.contrib.contenttypes.models import ContentType from django.contrib.contenttypes.fields import GenericForeignKey -from django.utils.encoding import force_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import force_str +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework.exceptions import ParseError, PermissionDenied @@ -185,16 +185,14 @@ class FieldLookupBackend(BaseFilterBackend): return (field_list[-1], new_lookup) def to_python_related(self, value): - value = force_text(value) + value = force_str(value) if value.lower() in ('none', 'null'): return None else: return int(value) def value_to_python_for_field(self, field, value): - if isinstance(field, models.NullBooleanField): - return to_python_boolean(value, allow_none=True) - elif isinstance(field, models.BooleanField): + if isinstance(field, models.BooleanField): return to_python_boolean(value) elif isinstance(field, (ForeignObjectRel, ManyToManyField, GenericForeignKey, ForeignKey)): try: @@ -244,6 +242,8 @@ class FieldLookupBackend(BaseFilterBackend): new_lookups.append('{}__{}__icontains'.format(new_lookup[:-8], rm_field.name)) return value, new_lookups, needs_distinct else: + if isinstance(field, JSONField): + new_lookup = new_lookup.replace(field.name, f'{field.name}_as_txt') value = self.value_to_python_for_field(field, value) return value, new_lookup, needs_distinct @@ -293,7 +293,7 @@ class FieldLookupBackend(BaseFilterBackend): search_filter_relation = 'AND' values = reduce(lambda list1, list2: list1 + list2, [i.split(',') for i in values]) for value in values: - search_value, new_keys, _ = self.value_to_python(queryset.model, key, force_text(value)) + search_value, new_keys, _ = self.value_to_python(queryset.model, key, force_str(value)) assert isinstance(new_keys, list) search_filters[search_value] = new_keys # by definition, search *only* joins across relations, @@ -325,6 +325,9 @@ class FieldLookupBackend(BaseFilterBackend): value, new_key, distinct = self.value_to_python(queryset.model, key, value) if distinct: needs_distinct = True + if '_as_txt' in new_key: + fname = next(item for item in new_key.split('__') if item.endswith('_as_txt')) + queryset = queryset.annotate(**{fname: Cast(fname[:-7], output_field=TextField())}) if q_chain: chain_filters.append((q_not, new_key, value)) elif q_or: diff --git a/awx/api/generics.py b/awx/api/generics.py index b10728f32a..f2faec5c47 100644 --- a/awx/api/generics.py +++ b/awx/api/generics.py @@ -10,18 +10,18 @@ import urllib.parse # Django from django.conf import settings +from django.contrib.auth import views as auth_views +from django.contrib.contenttypes.models import ContentType from django.core.cache import cache +from django.core.exceptions import FieldDoesNotExist from django.db import connection -from django.db.models.fields import FieldDoesNotExist from django.db.models.fields.related import OneToOneRel from django.http import QueryDict from django.shortcuts import get_object_or_404 from django.template.loader import render_to_string -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str from django.utils.safestring import mark_safe -from django.contrib.contenttypes.models import ContentType -from django.utils.translation import ugettext_lazy as _ -from django.contrib.auth import views as auth_views +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework.exceptions import PermissionDenied, AuthenticationFailed, ParseError, NotAcceptable, UnsupportedMediaType @@ -93,17 +93,18 @@ class LoggedLoginView(auth_views.LoginView): ret = super(LoggedLoginView, self).post(request, *args, **kwargs) current_user = getattr(request, 'user', None) if request.user.is_authenticated: - logger.info(smart_text(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None)))) + logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None)))) ret.set_cookie('userLoggedIn', 'true') current_user = UserSerializer(self.request.user) - current_user = smart_text(JSONRenderer().render(current_user.data)) + current_user = smart_str(JSONRenderer().render(current_user.data)) current_user = urllib.parse.quote('%s' % current_user, '') ret.set_cookie('current_user', current_user, secure=settings.SESSION_COOKIE_SECURE or None) + ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid')) return ret else: if 'username' in self.request.POST: - logger.warn(smart_text(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None)))) + logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None)))) ret.status_code = 401 return ret @@ -391,8 +392,8 @@ class GenericAPIView(generics.GenericAPIView, APIView): if hasattr(self.model._meta, "verbose_name"): d.update( { - 'model_verbose_name': smart_text(self.model._meta.verbose_name), - 'model_verbose_name_plural': smart_text(self.model._meta.verbose_name_plural), + 'model_verbose_name': smart_str(self.model._meta.verbose_name), + 'model_verbose_name_plural': smart_str(self.model._meta.verbose_name_plural), } ) serializer = self.get_serializer() @@ -523,8 +524,8 @@ class SubListAPIView(ParentMixin, ListAPIView): d = super(SubListAPIView, self).get_description_context() d.update( { - 'parent_model_verbose_name': smart_text(self.parent_model._meta.verbose_name), - 'parent_model_verbose_name_plural': smart_text(self.parent_model._meta.verbose_name_plural), + 'parent_model_verbose_name': smart_str(self.parent_model._meta.verbose_name), + 'parent_model_verbose_name_plural': smart_str(self.parent_model._meta.verbose_name_plural), } ) return d diff --git a/awx/api/metadata.py b/awx/api/metadata.py index 5b8cf2ccb3..b4c75d09cb 100644 --- a/awx/api/metadata.py +++ b/awx/api/metadata.py @@ -6,11 +6,12 @@ from uuid import UUID # Django from django.core.exceptions import PermissionDenied +from django.db.models import JSONField from django.db.models.fields import PositiveIntegerField, BooleanField from django.db.models.fields.related import ForeignKey from django.http import Http404 -from django.utils.encoding import force_text, smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import force_str, smart_str +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework import exceptions @@ -22,7 +23,7 @@ from rest_framework.request import clone_request # AWX from awx.api.fields import ChoiceNullField -from awx.main.fields import JSONField, ImplicitRoleField +from awx.main.fields import ImplicitRoleField from awx.main.models import NotificationTemplate from awx.main.utils.execution_environments import get_default_pod_spec @@ -53,7 +54,7 @@ class Metadata(metadata.SimpleMetadata): for attr in text_attrs: value = getattr(field, attr, None) if value is not None and value != '': - field_info[attr] = force_text(value, strings_only=True) + field_info[attr] = force_str(value, strings_only=True) placeholder = getattr(field, 'placeholder', serializers.empty) if placeholder is not serializers.empty: @@ -77,7 +78,7 @@ class Metadata(metadata.SimpleMetadata): } if field.field_name in field_help_text: opts = serializer.Meta.model._meta.concrete_model._meta - verbose_name = smart_text(opts.verbose_name) + verbose_name = smart_str(opts.verbose_name) field_info['help_text'] = field_help_text[field.field_name].format(verbose_name) if field.field_name == 'type': diff --git a/awx/api/metrics.py b/awx/api/metrics.py index 5ed3dcabef..de66c2de33 100644 --- a/awx/api/metrics.py +++ b/awx/api/metrics.py @@ -1,11 +1,11 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import MetricsView -urls = [url(r'^$', MetricsView.as_view(), name='metrics_view')] +urls = [re_path(r'^$', MetricsView.as_view(), name='metrics_view')] __all__ = ['urls'] diff --git a/awx/api/parsers.py b/awx/api/parsers.py index ce18bce0af..ac06a35b81 100644 --- a/awx/api/parsers.py +++ b/awx/api/parsers.py @@ -5,7 +5,7 @@ import json # Django from django.conf import settings from django.utils.encoding import smart_str -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework import parsers diff --git a/awx/api/permissions.py b/awx/api/permissions.py index bd6328495b..3608a23d33 100644 --- a/awx/api/permissions.py +++ b/awx/api/permissions.py @@ -4,8 +4,6 @@ # Python import logging -from django.conf import settings - # Django REST Framework from rest_framework.exceptions import MethodNotAllowed, PermissionDenied from rest_framework import permissions @@ -250,13 +248,6 @@ class IsSystemAdminOrAuditor(permissions.BasePermission): return request.user.is_superuser -class InstanceGroupTowerPermission(ModelAccessPermission): - def has_object_permission(self, request, view, obj): - if request.method == 'DELETE' and obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]: - return False - return super(InstanceGroupTowerPermission, self).has_object_permission(request, view, obj) - - class WebhookKeyPermission(permissions.BasePermission): def has_object_permission(self, request, view, obj): return request.user.can_access(view.model, 'admin', obj, request.data) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index ff8e654f55..c7f7b8a242 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -25,8 +25,8 @@ from django.contrib.auth.password_validation import validate_password as django_ from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ObjectDoesNotExist, ValidationError as DjangoValidationError from django.db import models -from django.utils.translation import ugettext_lazy as _ -from django.utils.encoding import force_text +from django.utils.translation import gettext_lazy as _ +from django.utils.encoding import force_str from django.utils.text import capfirst from django.utils.timezone import now from django.utils.functional import cached_property @@ -97,7 +97,7 @@ from awx.main.models import ( ) from awx.main.models.base import VERBOSITY_CHOICES, NEW_JOB_TYPE_CHOICES from awx.main.models.rbac import get_roles_on_resource, role_summary_fields_generator -from awx.main.fields import ImplicitRoleField, JSONBField +from awx.main.fields import ImplicitRoleField from awx.main.utils import ( get_type_for_model, get_model_for_type, @@ -357,7 +357,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl } choices = [] for t in self.get_types(): - name = _(type_name_map.get(t, force_text(get_model_for_type(t)._meta.verbose_name).title())) + name = _(type_name_map.get(t, force_str(get_model_for_type(t)._meta.verbose_name).title())) choices.append((t, name)) return choices @@ -645,7 +645,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl v2.extend(e) else: v2.append(e) - d[k] = list(map(force_text, v2)) + d[k] = list(map(force_str, v2)) raise ValidationError(d) return attrs @@ -1263,6 +1263,12 @@ class OAuth2ApplicationSerializer(BaseSerializer): activity_stream=self.reverse('api:o_auth2_application_activity_stream_list', kwargs={'pk': obj.pk}), ) ) + if obj.organization_id: + res.update( + dict( + organization=self.reverse('api:organization_detail', kwargs={'pk': obj.organization_id}), + ) + ) return res def get_modified(self, obj): @@ -1718,7 +1724,7 @@ class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables): def validate_host_filter(self, host_filter): if host_filter: try: - for match in JSONBField.get_lookups().keys(): + for match in models.JSONField.get_lookups().keys(): if match == 'exact': # __exact is allowed continue @@ -1847,11 +1853,11 @@ class HostSerializer(BaseSerializerWithVariables): if port < 1 or port > 65535: raise ValueError except ValueError: - raise serializers.ValidationError(_(u'Invalid port specification: %s') % force_text(port)) + raise serializers.ValidationError(_(u'Invalid port specification: %s') % force_str(port)) return name, port def validate_name(self, value): - name = force_text(value or '') + name = force_str(value or '') # Validate here only, update in main validate method. host, port = self._get_host_port_from_name(name) return value @@ -1865,13 +1871,13 @@ class HostSerializer(BaseSerializerWithVariables): return vars_validate_or_raise(value) def validate(self, attrs): - name = force_text(attrs.get('name', self.instance and self.instance.name or '')) + name = force_str(attrs.get('name', self.instance and self.instance.name or '')) inventory = attrs.get('inventory', self.instance and self.instance.inventory or '') host, port = self._get_host_port_from_name(name) if port: attrs['name'] = host - variables = force_text(attrs.get('variables', self.instance and self.instance.variables or '')) + variables = force_str(attrs.get('variables', self.instance and self.instance.variables or '')) vars_dict = parse_yaml_or_json(variables) vars_dict['ansible_ssh_port'] = port attrs['variables'] = json.dumps(vars_dict) @@ -1944,7 +1950,7 @@ class GroupSerializer(BaseSerializerWithVariables): return res def validate(self, attrs): - name = force_text(attrs.get('name', self.instance and self.instance.name or '')) + name = force_str(attrs.get('name', self.instance and self.instance.name or '')) inventory = attrs.get('inventory', self.instance and self.instance.inventory or '') if Host.objects.filter(name=name, inventory=inventory).exists(): raise serializers.ValidationError(_('A Host with that name already exists.')) @@ -2838,8 +2844,8 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer): if not project: raise serializers.ValidationError({'project': _('This field is required.')}) playbook_not_found = bool( - (project and project.scm_type and (not project.allow_override) and playbook and force_text(playbook) not in project.playbook_files) - or (project and not project.scm_type and playbook and force_text(playbook) not in project.playbooks) # manual + (project and project.scm_type and (not project.allow_override) and playbook and force_str(playbook) not in project.playbook_files) + or (project and not project.scm_type and playbook and force_str(playbook) not in project.playbooks) # manual ) if playbook_not_found: raise serializers.ValidationError({'playbook': _('Playbook not found for project.')}) @@ -3628,7 +3634,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer): job_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None) limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None) skip_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None) - diff_mode = serializers.NullBooleanField(required=False, default=None) + diff_mode = serializers.BooleanField(required=False, allow_null=True, default=None) verbosity = serializers.ChoiceField(allow_null=True, required=False, default=None, choices=VERBOSITY_CHOICES) exclude_errors = () @@ -4850,6 +4856,11 @@ class InstanceSerializer(BaseSerializer): else: return float("{0:.2f}".format(((float(obj.capacity) - float(obj.consumed_capacity)) / (float(obj.capacity))) * 100)) + def validate(self, attrs): + if self.instance.node_type == 'hop': + raise serializers.ValidationError(_('Hop node instances may not be changed.')) + return attrs + class InstanceHealthCheckSerializer(BaseSerializer): class Meta: @@ -4936,6 +4947,9 @@ class InstanceGroupSerializer(BaseSerializer): return res def validate_policy_instance_list(self, value): + if self.instance and self.instance.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]: + if self.instance.policy_instance_list != value: + raise serializers.ValidationError(_('%s instance group policy_instance_list may not be changed.' % self.instance.name)) for instance_name in value: if value.count(instance_name) > 1: raise serializers.ValidationError(_('Duplicate entry {}.').format(instance_name)) @@ -4946,6 +4960,11 @@ class InstanceGroupSerializer(BaseSerializer): return value def validate_policy_instance_percentage(self, value): + if self.instance and self.instance.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]: + if value != self.instance.policy_instance_percentage: + raise serializers.ValidationError( + _('%s instance group policy_instance_percentage may not be changed from the initial value set by the installer.' % self.instance.name) + ) if value and self.instance and self.instance.is_container_group: raise serializers.ValidationError(_('Containerized instances may not be managed via the API')) return value @@ -4964,6 +4983,13 @@ class InstanceGroupSerializer(BaseSerializer): return value + def validate_is_container_group(self, value): + if self.instance and self.instance.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]: + if value != self.instance.is_container_group: + raise serializers.ValidationError(_('%s instance group is_container_group may not be changed.' % self.instance.name)) + + return value + def validate_credential(self, value): if value and not value.kubernetes: raise serializers.ValidationError(_('Only Kubernetes credentials can be associated with an Instance Group')) @@ -5078,7 +5104,7 @@ class ActivityStreamSerializer(BaseSerializer): try: return json.loads(obj.changes) except Exception: - logger.warn("Error deserializing activity stream json changes") + logger.warning("Error deserializing activity stream json changes") return {} def get_object_association(self, obj): diff --git a/awx/api/urls/activity_stream.py b/awx/api/urls/activity_stream.py index 907f856aa8..36b64a2de5 100644 --- a/awx/api/urls/activity_stream.py +++ b/awx/api/urls/activity_stream.py @@ -1,14 +1,14 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ActivityStreamList, ActivityStreamDetail urls = [ - url(r'^$', ActivityStreamList.as_view(), name='activity_stream_list'), - url(r'^(?P[0-9]+)/$', ActivityStreamDetail.as_view(), name='activity_stream_detail'), + re_path(r'^$', ActivityStreamList.as_view(), name='activity_stream_list'), + re_path(r'^(?P[0-9]+)/$', ActivityStreamDetail.as_view(), name='activity_stream_detail'), ] __all__ = ['urls'] diff --git a/awx/api/urls/ad_hoc_command.py b/awx/api/urls/ad_hoc_command.py index cc1277adcf..9c723e762b 100644 --- a/awx/api/urls/ad_hoc_command.py +++ b/awx/api/urls/ad_hoc_command.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( AdHocCommandList, @@ -16,14 +16,14 @@ from awx.api.views import ( urls = [ - url(r'^$', AdHocCommandList.as_view(), name='ad_hoc_command_list'), - url(r'^(?P[0-9]+)/$', AdHocCommandDetail.as_view(), name='ad_hoc_command_detail'), - url(r'^(?P[0-9]+)/cancel/$', AdHocCommandCancel.as_view(), name='ad_hoc_command_cancel'), - url(r'^(?P[0-9]+)/relaunch/$', AdHocCommandRelaunch.as_view(), name='ad_hoc_command_relaunch'), - url(r'^(?P[0-9]+)/events/$', AdHocCommandAdHocCommandEventsList.as_view(), name='ad_hoc_command_ad_hoc_command_events_list'), - url(r'^(?P[0-9]+)/activity_stream/$', AdHocCommandActivityStreamList.as_view(), name='ad_hoc_command_activity_stream_list'), - url(r'^(?P[0-9]+)/notifications/$', AdHocCommandNotificationsList.as_view(), name='ad_hoc_command_notifications_list'), - url(r'^(?P[0-9]+)/stdout/$', AdHocCommandStdout.as_view(), name='ad_hoc_command_stdout'), + re_path(r'^$', AdHocCommandList.as_view(), name='ad_hoc_command_list'), + re_path(r'^(?P[0-9]+)/$', AdHocCommandDetail.as_view(), name='ad_hoc_command_detail'), + re_path(r'^(?P[0-9]+)/cancel/$', AdHocCommandCancel.as_view(), name='ad_hoc_command_cancel'), + re_path(r'^(?P[0-9]+)/relaunch/$', AdHocCommandRelaunch.as_view(), name='ad_hoc_command_relaunch'), + re_path(r'^(?P[0-9]+)/events/$', AdHocCommandAdHocCommandEventsList.as_view(), name='ad_hoc_command_ad_hoc_command_events_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', AdHocCommandActivityStreamList.as_view(), name='ad_hoc_command_activity_stream_list'), + re_path(r'^(?P[0-9]+)/notifications/$', AdHocCommandNotificationsList.as_view(), name='ad_hoc_command_notifications_list'), + re_path(r'^(?P[0-9]+)/stdout/$', AdHocCommandStdout.as_view(), name='ad_hoc_command_stdout'), ] __all__ = ['urls'] diff --git a/awx/api/urls/ad_hoc_command_event.py b/awx/api/urls/ad_hoc_command_event.py index a38d4fc165..cbfa72b8bc 100644 --- a/awx/api/urls/ad_hoc_command_event.py +++ b/awx/api/urls/ad_hoc_command_event.py @@ -1,13 +1,13 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import AdHocCommandEventDetail urls = [ - url(r'^(?P[0-9]+)/$', AdHocCommandEventDetail.as_view(), name='ad_hoc_command_event_detail'), + re_path(r'^(?P[0-9]+)/$', AdHocCommandEventDetail.as_view(), name='ad_hoc_command_event_detail'), ] __all__ = ['urls'] diff --git a/awx/api/urls/credential.py b/awx/api/urls/credential.py index e041e08477..f2ae8535b9 100644 --- a/awx/api/urls/credential.py +++ b/awx/api/urls/credential.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( CredentialList, @@ -18,16 +18,16 @@ from awx.api.views import ( urls = [ - url(r'^$', CredentialList.as_view(), name='credential_list'), - url(r'^(?P[0-9]+)/activity_stream/$', CredentialActivityStreamList.as_view(), name='credential_activity_stream_list'), - url(r'^(?P[0-9]+)/$', CredentialDetail.as_view(), name='credential_detail'), - url(r'^(?P[0-9]+)/access_list/$', CredentialAccessList.as_view(), name='credential_access_list'), - url(r'^(?P[0-9]+)/object_roles/$', CredentialObjectRolesList.as_view(), name='credential_object_roles_list'), - url(r'^(?P[0-9]+)/owner_users/$', CredentialOwnerUsersList.as_view(), name='credential_owner_users_list'), - url(r'^(?P[0-9]+)/owner_teams/$', CredentialOwnerTeamsList.as_view(), name='credential_owner_teams_list'), - url(r'^(?P[0-9]+)/copy/$', CredentialCopy.as_view(), name='credential_copy'), - url(r'^(?P[0-9]+)/input_sources/$', CredentialInputSourceSubList.as_view(), name='credential_input_source_sublist'), - url(r'^(?P[0-9]+)/test/$', CredentialExternalTest.as_view(), name='credential_external_test'), + re_path(r'^$', CredentialList.as_view(), name='credential_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', CredentialActivityStreamList.as_view(), name='credential_activity_stream_list'), + re_path(r'^(?P[0-9]+)/$', CredentialDetail.as_view(), name='credential_detail'), + re_path(r'^(?P[0-9]+)/access_list/$', CredentialAccessList.as_view(), name='credential_access_list'), + re_path(r'^(?P[0-9]+)/object_roles/$', CredentialObjectRolesList.as_view(), name='credential_object_roles_list'), + re_path(r'^(?P[0-9]+)/owner_users/$', CredentialOwnerUsersList.as_view(), name='credential_owner_users_list'), + re_path(r'^(?P[0-9]+)/owner_teams/$', CredentialOwnerTeamsList.as_view(), name='credential_owner_teams_list'), + re_path(r'^(?P[0-9]+)/copy/$', CredentialCopy.as_view(), name='credential_copy'), + re_path(r'^(?P[0-9]+)/input_sources/$', CredentialInputSourceSubList.as_view(), name='credential_input_source_sublist'), + re_path(r'^(?P[0-9]+)/test/$', CredentialExternalTest.as_view(), name='credential_external_test'), ] __all__ = ['urls'] diff --git a/awx/api/urls/credential_input_source.py b/awx/api/urls/credential_input_source.py index 6b365e5840..7ac4b3c4f4 100644 --- a/awx/api/urls/credential_input_source.py +++ b/awx/api/urls/credential_input_source.py @@ -1,14 +1,14 @@ # Copyright (c) 2019 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import CredentialInputSourceDetail, CredentialInputSourceList urls = [ - url(r'^$', CredentialInputSourceList.as_view(), name='credential_input_source_list'), - url(r'^(?P[0-9]+)/$', CredentialInputSourceDetail.as_view(), name='credential_input_source_detail'), + re_path(r'^$', CredentialInputSourceList.as_view(), name='credential_input_source_list'), + re_path(r'^(?P[0-9]+)/$', CredentialInputSourceDetail.as_view(), name='credential_input_source_detail'), ] __all__ = ['urls'] diff --git a/awx/api/urls/credential_type.py b/awx/api/urls/credential_type.py index ab799c8c54..8307126351 100644 --- a/awx/api/urls/credential_type.py +++ b/awx/api/urls/credential_type.py @@ -1,17 +1,17 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import CredentialTypeList, CredentialTypeDetail, CredentialTypeCredentialList, CredentialTypeActivityStreamList, CredentialTypeExternalTest urls = [ - url(r'^$', CredentialTypeList.as_view(), name='credential_type_list'), - url(r'^(?P[0-9]+)/$', CredentialTypeDetail.as_view(), name='credential_type_detail'), - url(r'^(?P[0-9]+)/credentials/$', CredentialTypeCredentialList.as_view(), name='credential_type_credential_list'), - url(r'^(?P[0-9]+)/activity_stream/$', CredentialTypeActivityStreamList.as_view(), name='credential_type_activity_stream_list'), - url(r'^(?P[0-9]+)/test/$', CredentialTypeExternalTest.as_view(), name='credential_type_external_test'), + re_path(r'^$', CredentialTypeList.as_view(), name='credential_type_list'), + re_path(r'^(?P[0-9]+)/$', CredentialTypeDetail.as_view(), name='credential_type_detail'), + re_path(r'^(?P[0-9]+)/credentials/$', CredentialTypeCredentialList.as_view(), name='credential_type_credential_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', CredentialTypeActivityStreamList.as_view(), name='credential_type_activity_stream_list'), + re_path(r'^(?P[0-9]+)/test/$', CredentialTypeExternalTest.as_view(), name='credential_type_external_test'), ] __all__ = ['urls'] diff --git a/awx/api/urls/execution_environments.py b/awx/api/urls/execution_environments.py index 99b9cb3ddc..2b4f325b1a 100644 --- a/awx/api/urls/execution_environments.py +++ b/awx/api/urls/execution_environments.py @@ -1,4 +1,4 @@ -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( ExecutionEnvironmentList, @@ -10,11 +10,11 @@ from awx.api.views import ( urls = [ - url(r'^$', ExecutionEnvironmentList.as_view(), name='execution_environment_list'), - url(r'^(?P[0-9]+)/$', ExecutionEnvironmentDetail.as_view(), name='execution_environment_detail'), - url(r'^(?P[0-9]+)/unified_job_templates/$', ExecutionEnvironmentJobTemplateList.as_view(), name='execution_environment_job_template_list'), - url(r'^(?P[0-9]+)/copy/$', ExecutionEnvironmentCopy.as_view(), name='execution_environment_copy'), - url(r'^(?P[0-9]+)/activity_stream/$', ExecutionEnvironmentActivityStreamList.as_view(), name='execution_environment_activity_stream_list'), + re_path(r'^$', ExecutionEnvironmentList.as_view(), name='execution_environment_list'), + re_path(r'^(?P[0-9]+)/$', ExecutionEnvironmentDetail.as_view(), name='execution_environment_detail'), + re_path(r'^(?P[0-9]+)/unified_job_templates/$', ExecutionEnvironmentJobTemplateList.as_view(), name='execution_environment_job_template_list'), + re_path(r'^(?P[0-9]+)/copy/$', ExecutionEnvironmentCopy.as_view(), name='execution_environment_copy'), + re_path(r'^(?P[0-9]+)/activity_stream/$', ExecutionEnvironmentActivityStreamList.as_view(), name='execution_environment_activity_stream_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/group.py b/awx/api/urls/group.py index 416479def6..18107cd22a 100644 --- a/awx/api/urls/group.py +++ b/awx/api/urls/group.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( GroupList, @@ -20,18 +20,18 @@ from awx.api.views import ( urls = [ - url(r'^$', GroupList.as_view(), name='group_list'), - url(r'^(?P[0-9]+)/$', GroupDetail.as_view(), name='group_detail'), - url(r'^(?P[0-9]+)/children/$', GroupChildrenList.as_view(), name='group_children_list'), - url(r'^(?P[0-9]+)/hosts/$', GroupHostsList.as_view(), name='group_hosts_list'), - url(r'^(?P[0-9]+)/all_hosts/$', GroupAllHostsList.as_view(), name='group_all_hosts_list'), - url(r'^(?P[0-9]+)/variable_data/$', GroupVariableData.as_view(), name='group_variable_data'), - url(r'^(?P[0-9]+)/job_events/$', GroupJobEventsList.as_view(), name='group_job_events_list'), - url(r'^(?P[0-9]+)/job_host_summaries/$', GroupJobHostSummariesList.as_view(), name='group_job_host_summaries_list'), - url(r'^(?P[0-9]+)/potential_children/$', GroupPotentialChildrenList.as_view(), name='group_potential_children_list'), - url(r'^(?P[0-9]+)/activity_stream/$', GroupActivityStreamList.as_view(), name='group_activity_stream_list'), - url(r'^(?P[0-9]+)/inventory_sources/$', GroupInventorySourcesList.as_view(), name='group_inventory_sources_list'), - url(r'^(?P[0-9]+)/ad_hoc_commands/$', GroupAdHocCommandsList.as_view(), name='group_ad_hoc_commands_list'), + re_path(r'^$', GroupList.as_view(), name='group_list'), + re_path(r'^(?P[0-9]+)/$', GroupDetail.as_view(), name='group_detail'), + re_path(r'^(?P[0-9]+)/children/$', GroupChildrenList.as_view(), name='group_children_list'), + re_path(r'^(?P[0-9]+)/hosts/$', GroupHostsList.as_view(), name='group_hosts_list'), + re_path(r'^(?P[0-9]+)/all_hosts/$', GroupAllHostsList.as_view(), name='group_all_hosts_list'), + re_path(r'^(?P[0-9]+)/variable_data/$', GroupVariableData.as_view(), name='group_variable_data'), + re_path(r'^(?P[0-9]+)/job_events/$', GroupJobEventsList.as_view(), name='group_job_events_list'), + re_path(r'^(?P[0-9]+)/job_host_summaries/$', GroupJobHostSummariesList.as_view(), name='group_job_host_summaries_list'), + re_path(r'^(?P[0-9]+)/potential_children/$', GroupPotentialChildrenList.as_view(), name='group_potential_children_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', GroupActivityStreamList.as_view(), name='group_activity_stream_list'), + re_path(r'^(?P[0-9]+)/inventory_sources/$', GroupInventorySourcesList.as_view(), name='group_inventory_sources_list'), + re_path(r'^(?P[0-9]+)/ad_hoc_commands/$', GroupAdHocCommandsList.as_view(), name='group_ad_hoc_commands_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/host.py b/awx/api/urls/host.py index d06608bf86..36bbb70da4 100644 --- a/awx/api/urls/host.py +++ b/awx/api/urls/host.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( HostList, @@ -20,18 +20,18 @@ from awx.api.views import ( urls = [ - url(r'^$', HostList.as_view(), name='host_list'), - url(r'^(?P[0-9]+)/$', HostDetail.as_view(), name='host_detail'), - url(r'^(?P[0-9]+)/variable_data/$', HostVariableData.as_view(), name='host_variable_data'), - url(r'^(?P[0-9]+)/groups/$', HostGroupsList.as_view(), name='host_groups_list'), - url(r'^(?P[0-9]+)/all_groups/$', HostAllGroupsList.as_view(), name='host_all_groups_list'), - url(r'^(?P[0-9]+)/job_events/', HostJobEventsList.as_view(), name='host_job_events_list'), - url(r'^(?P[0-9]+)/job_host_summaries/$', HostJobHostSummariesList.as_view(), name='host_job_host_summaries_list'), - url(r'^(?P[0-9]+)/activity_stream/$', HostActivityStreamList.as_view(), name='host_activity_stream_list'), - url(r'^(?P[0-9]+)/inventory_sources/$', HostInventorySourcesList.as_view(), name='host_inventory_sources_list'), - url(r'^(?P[0-9]+)/smart_inventories/$', HostSmartInventoriesList.as_view(), name='host_smart_inventories_list'), - url(r'^(?P[0-9]+)/ad_hoc_commands/$', HostAdHocCommandsList.as_view(), name='host_ad_hoc_commands_list'), - url(r'^(?P[0-9]+)/ad_hoc_command_events/$', HostAdHocCommandEventsList.as_view(), name='host_ad_hoc_command_events_list'), + re_path(r'^$', HostList.as_view(), name='host_list'), + re_path(r'^(?P[0-9]+)/$', HostDetail.as_view(), name='host_detail'), + re_path(r'^(?P[0-9]+)/variable_data/$', HostVariableData.as_view(), name='host_variable_data'), + re_path(r'^(?P[0-9]+)/groups/$', HostGroupsList.as_view(), name='host_groups_list'), + re_path(r'^(?P[0-9]+)/all_groups/$', HostAllGroupsList.as_view(), name='host_all_groups_list'), + re_path(r'^(?P[0-9]+)/job_events/', HostJobEventsList.as_view(), name='host_job_events_list'), + re_path(r'^(?P[0-9]+)/job_host_summaries/$', HostJobHostSummariesList.as_view(), name='host_job_host_summaries_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', HostActivityStreamList.as_view(), name='host_activity_stream_list'), + re_path(r'^(?P[0-9]+)/inventory_sources/$', HostInventorySourcesList.as_view(), name='host_inventory_sources_list'), + re_path(r'^(?P[0-9]+)/smart_inventories/$', HostSmartInventoriesList.as_view(), name='host_smart_inventories_list'), + re_path(r'^(?P[0-9]+)/ad_hoc_commands/$', HostAdHocCommandsList.as_view(), name='host_ad_hoc_commands_list'), + re_path(r'^(?P[0-9]+)/ad_hoc_command_events/$', HostAdHocCommandEventsList.as_view(), name='host_ad_hoc_command_events_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/instance.py b/awx/api/urls/instance.py index dd75db2b21..6c70e285c5 100644 --- a/awx/api/urls/instance.py +++ b/awx/api/urls/instance.py @@ -1,17 +1,17 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import InstanceList, InstanceDetail, InstanceUnifiedJobsList, InstanceInstanceGroupsList, InstanceHealthCheck urls = [ - url(r'^$', InstanceList.as_view(), name='instance_list'), - url(r'^(?P[0-9]+)/$', InstanceDetail.as_view(), name='instance_detail'), - url(r'^(?P[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'), - url(r'^(?P[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'), - url(r'^(?P[0-9]+)/health_check/$', InstanceHealthCheck.as_view(), name='instance_health_check'), + re_path(r'^$', InstanceList.as_view(), name='instance_list'), + re_path(r'^(?P[0-9]+)/$', InstanceDetail.as_view(), name='instance_detail'), + re_path(r'^(?P[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'), + re_path(r'^(?P[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'), + re_path(r'^(?P[0-9]+)/health_check/$', InstanceHealthCheck.as_view(), name='instance_health_check'), ] __all__ = ['urls'] diff --git a/awx/api/urls/instance_group.py b/awx/api/urls/instance_group.py index 3bb9eabefc..de8cf8b52a 100644 --- a/awx/api/urls/instance_group.py +++ b/awx/api/urls/instance_group.py @@ -1,16 +1,16 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import InstanceGroupList, InstanceGroupDetail, InstanceGroupUnifiedJobsList, InstanceGroupInstanceList urls = [ - url(r'^$', InstanceGroupList.as_view(), name='instance_group_list'), - url(r'^(?P[0-9]+)/$', InstanceGroupDetail.as_view(), name='instance_group_detail'), - url(r'^(?P[0-9]+)/jobs/$', InstanceGroupUnifiedJobsList.as_view(), name='instance_group_unified_jobs_list'), - url(r'^(?P[0-9]+)/instances/$', InstanceGroupInstanceList.as_view(), name='instance_group_instance_list'), + re_path(r'^$', InstanceGroupList.as_view(), name='instance_group_list'), + re_path(r'^(?P[0-9]+)/$', InstanceGroupDetail.as_view(), name='instance_group_detail'), + re_path(r'^(?P[0-9]+)/jobs/$', InstanceGroupUnifiedJobsList.as_view(), name='instance_group_unified_jobs_list'), + re_path(r'^(?P[0-9]+)/instances/$', InstanceGroupInstanceList.as_view(), name='instance_group_instance_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/inventory.py b/awx/api/urls/inventory.py index d323be9450..863591ba60 100644 --- a/awx/api/urls/inventory.py +++ b/awx/api/urls/inventory.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( InventoryList, @@ -26,24 +26,24 @@ from awx.api.views import ( urls = [ - url(r'^$', InventoryList.as_view(), name='inventory_list'), - url(r'^(?P[0-9]+)/$', InventoryDetail.as_view(), name='inventory_detail'), - url(r'^(?P[0-9]+)/hosts/$', InventoryHostsList.as_view(), name='inventory_hosts_list'), - url(r'^(?P[0-9]+)/groups/$', InventoryGroupsList.as_view(), name='inventory_groups_list'), - url(r'^(?P[0-9]+)/root_groups/$', InventoryRootGroupsList.as_view(), name='inventory_root_groups_list'), - url(r'^(?P[0-9]+)/variable_data/$', InventoryVariableData.as_view(), name='inventory_variable_data'), - url(r'^(?P[0-9]+)/script/$', InventoryScriptView.as_view(), name='inventory_script_view'), - url(r'^(?P[0-9]+)/tree/$', InventoryTreeView.as_view(), name='inventory_tree_view'), - url(r'^(?P[0-9]+)/inventory_sources/$', InventoryInventorySourcesList.as_view(), name='inventory_inventory_sources_list'), - url(r'^(?P[0-9]+)/update_inventory_sources/$', InventoryInventorySourcesUpdate.as_view(), name='inventory_inventory_sources_update'), - url(r'^(?P[0-9]+)/activity_stream/$', InventoryActivityStreamList.as_view(), name='inventory_activity_stream_list'), - url(r'^(?P[0-9]+)/job_templates/$', InventoryJobTemplateList.as_view(), name='inventory_job_template_list'), - url(r'^(?P[0-9]+)/ad_hoc_commands/$', InventoryAdHocCommandsList.as_view(), name='inventory_ad_hoc_commands_list'), - url(r'^(?P[0-9]+)/access_list/$', InventoryAccessList.as_view(), name='inventory_access_list'), - url(r'^(?P[0-9]+)/object_roles/$', InventoryObjectRolesList.as_view(), name='inventory_object_roles_list'), - url(r'^(?P[0-9]+)/instance_groups/$', InventoryInstanceGroupsList.as_view(), name='inventory_instance_groups_list'), - url(r'^(?P[0-9]+)/labels/$', InventoryLabelList.as_view(), name='inventory_label_list'), - url(r'^(?P[0-9]+)/copy/$', InventoryCopy.as_view(), name='inventory_copy'), + re_path(r'^$', InventoryList.as_view(), name='inventory_list'), + re_path(r'^(?P[0-9]+)/$', InventoryDetail.as_view(), name='inventory_detail'), + re_path(r'^(?P[0-9]+)/hosts/$', InventoryHostsList.as_view(), name='inventory_hosts_list'), + re_path(r'^(?P[0-9]+)/groups/$', InventoryGroupsList.as_view(), name='inventory_groups_list'), + re_path(r'^(?P[0-9]+)/root_groups/$', InventoryRootGroupsList.as_view(), name='inventory_root_groups_list'), + re_path(r'^(?P[0-9]+)/variable_data/$', InventoryVariableData.as_view(), name='inventory_variable_data'), + re_path(r'^(?P[0-9]+)/script/$', InventoryScriptView.as_view(), name='inventory_script_view'), + re_path(r'^(?P[0-9]+)/tree/$', InventoryTreeView.as_view(), name='inventory_tree_view'), + re_path(r'^(?P[0-9]+)/inventory_sources/$', InventoryInventorySourcesList.as_view(), name='inventory_inventory_sources_list'), + re_path(r'^(?P[0-9]+)/update_inventory_sources/$', InventoryInventorySourcesUpdate.as_view(), name='inventory_inventory_sources_update'), + re_path(r'^(?P[0-9]+)/activity_stream/$', InventoryActivityStreamList.as_view(), name='inventory_activity_stream_list'), + re_path(r'^(?P[0-9]+)/job_templates/$', InventoryJobTemplateList.as_view(), name='inventory_job_template_list'), + re_path(r'^(?P[0-9]+)/ad_hoc_commands/$', InventoryAdHocCommandsList.as_view(), name='inventory_ad_hoc_commands_list'), + re_path(r'^(?P[0-9]+)/access_list/$', InventoryAccessList.as_view(), name='inventory_access_list'), + re_path(r'^(?P[0-9]+)/object_roles/$', InventoryObjectRolesList.as_view(), name='inventory_object_roles_list'), + re_path(r'^(?P[0-9]+)/instance_groups/$', InventoryInstanceGroupsList.as_view(), name='inventory_instance_groups_list'), + re_path(r'^(?P[0-9]+)/labels/$', InventoryLabelList.as_view(), name='inventory_label_list'), + re_path(r'^(?P[0-9]+)/copy/$', InventoryCopy.as_view(), name='inventory_copy'), ] __all__ = ['urls'] diff --git a/awx/api/urls/inventory_source.py b/awx/api/urls/inventory_source.py index 02cf30c408..120f5022c6 100644 --- a/awx/api/urls/inventory_source.py +++ b/awx/api/urls/inventory_source.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( InventorySourceList, @@ -20,26 +20,26 @@ from awx.api.views import ( urls = [ - url(r'^$', InventorySourceList.as_view(), name='inventory_source_list'), - url(r'^(?P[0-9]+)/$', InventorySourceDetail.as_view(), name='inventory_source_detail'), - url(r'^(?P[0-9]+)/update/$', InventorySourceUpdateView.as_view(), name='inventory_source_update_view'), - url(r'^(?P[0-9]+)/inventory_updates/$', InventorySourceUpdatesList.as_view(), name='inventory_source_updates_list'), - url(r'^(?P[0-9]+)/activity_stream/$', InventorySourceActivityStreamList.as_view(), name='inventory_source_activity_stream_list'), - url(r'^(?P[0-9]+)/schedules/$', InventorySourceSchedulesList.as_view(), name='inventory_source_schedules_list'), - url(r'^(?P[0-9]+)/credentials/$', InventorySourceCredentialsList.as_view(), name='inventory_source_credentials_list'), - url(r'^(?P[0-9]+)/groups/$', InventorySourceGroupsList.as_view(), name='inventory_source_groups_list'), - url(r'^(?P[0-9]+)/hosts/$', InventorySourceHostsList.as_view(), name='inventory_source_hosts_list'), - url( + re_path(r'^$', InventorySourceList.as_view(), name='inventory_source_list'), + re_path(r'^(?P[0-9]+)/$', InventorySourceDetail.as_view(), name='inventory_source_detail'), + re_path(r'^(?P[0-9]+)/update/$', InventorySourceUpdateView.as_view(), name='inventory_source_update_view'), + re_path(r'^(?P[0-9]+)/inventory_updates/$', InventorySourceUpdatesList.as_view(), name='inventory_source_updates_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', InventorySourceActivityStreamList.as_view(), name='inventory_source_activity_stream_list'), + re_path(r'^(?P[0-9]+)/schedules/$', InventorySourceSchedulesList.as_view(), name='inventory_source_schedules_list'), + re_path(r'^(?P[0-9]+)/credentials/$', InventorySourceCredentialsList.as_view(), name='inventory_source_credentials_list'), + re_path(r'^(?P[0-9]+)/groups/$', InventorySourceGroupsList.as_view(), name='inventory_source_groups_list'), + re_path(r'^(?P[0-9]+)/hosts/$', InventorySourceHostsList.as_view(), name='inventory_source_hosts_list'), + re_path( r'^(?P[0-9]+)/notification_templates_started/$', InventorySourceNotificationTemplatesStartedList.as_view(), name='inventory_source_notification_templates_started_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_error/$', InventorySourceNotificationTemplatesErrorList.as_view(), name='inventory_source_notification_templates_error_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_success/$', InventorySourceNotificationTemplatesSuccessList.as_view(), name='inventory_source_notification_templates_success_list', diff --git a/awx/api/urls/inventory_update.py b/awx/api/urls/inventory_update.py index 0279f8c915..6855561a72 100644 --- a/awx/api/urls/inventory_update.py +++ b/awx/api/urls/inventory_update.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( InventoryUpdateList, @@ -15,13 +15,13 @@ from awx.api.views import ( urls = [ - url(r'^$', InventoryUpdateList.as_view(), name='inventory_update_list'), - url(r'^(?P[0-9]+)/$', InventoryUpdateDetail.as_view(), name='inventory_update_detail'), - url(r'^(?P[0-9]+)/cancel/$', InventoryUpdateCancel.as_view(), name='inventory_update_cancel'), - url(r'^(?P[0-9]+)/stdout/$', InventoryUpdateStdout.as_view(), name='inventory_update_stdout'), - url(r'^(?P[0-9]+)/notifications/$', InventoryUpdateNotificationsList.as_view(), name='inventory_update_notifications_list'), - url(r'^(?P[0-9]+)/credentials/$', InventoryUpdateCredentialsList.as_view(), name='inventory_update_credentials_list'), - url(r'^(?P[0-9]+)/events/$', InventoryUpdateEventsList.as_view(), name='inventory_update_events_list'), + re_path(r'^$', InventoryUpdateList.as_view(), name='inventory_update_list'), + re_path(r'^(?P[0-9]+)/$', InventoryUpdateDetail.as_view(), name='inventory_update_detail'), + re_path(r'^(?P[0-9]+)/cancel/$', InventoryUpdateCancel.as_view(), name='inventory_update_cancel'), + re_path(r'^(?P[0-9]+)/stdout/$', InventoryUpdateStdout.as_view(), name='inventory_update_stdout'), + re_path(r'^(?P[0-9]+)/notifications/$', InventoryUpdateNotificationsList.as_view(), name='inventory_update_notifications_list'), + re_path(r'^(?P[0-9]+)/credentials/$', InventoryUpdateCredentialsList.as_view(), name='inventory_update_credentials_list'), + re_path(r'^(?P[0-9]+)/events/$', InventoryUpdateEventsList.as_view(), name='inventory_update_events_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/job.py b/awx/api/urls/job.py index de45cba9aa..bea61a48a0 100644 --- a/awx/api/urls/job.py +++ b/awx/api/urls/job.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( JobList, @@ -20,18 +20,18 @@ from awx.api.views import ( urls = [ - url(r'^$', JobList.as_view(), name='job_list'), - url(r'^(?P[0-9]+)/$', JobDetail.as_view(), name='job_detail'), - url(r'^(?P[0-9]+)/cancel/$', JobCancel.as_view(), name='job_cancel'), - url(r'^(?P[0-9]+)/relaunch/$', JobRelaunch.as_view(), name='job_relaunch'), - url(r'^(?P[0-9]+)/create_schedule/$', JobCreateSchedule.as_view(), name='job_create_schedule'), - url(r'^(?P[0-9]+)/job_host_summaries/$', JobJobHostSummariesList.as_view(), name='job_job_host_summaries_list'), - url(r'^(?P[0-9]+)/job_events/$', JobJobEventsList.as_view(), name='job_job_events_list'), - url(r'^(?P[0-9]+)/activity_stream/$', JobActivityStreamList.as_view(), name='job_activity_stream_list'), - url(r'^(?P[0-9]+)/stdout/$', JobStdout.as_view(), name='job_stdout'), - url(r'^(?P[0-9]+)/notifications/$', JobNotificationsList.as_view(), name='job_notifications_list'), - url(r'^(?P[0-9]+)/labels/$', JobLabelList.as_view(), name='job_label_list'), - url(r'^(?P[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail'), + re_path(r'^$', JobList.as_view(), name='job_list'), + re_path(r'^(?P[0-9]+)/$', JobDetail.as_view(), name='job_detail'), + re_path(r'^(?P[0-9]+)/cancel/$', JobCancel.as_view(), name='job_cancel'), + re_path(r'^(?P[0-9]+)/relaunch/$', JobRelaunch.as_view(), name='job_relaunch'), + re_path(r'^(?P[0-9]+)/create_schedule/$', JobCreateSchedule.as_view(), name='job_create_schedule'), + re_path(r'^(?P[0-9]+)/job_host_summaries/$', JobJobHostSummariesList.as_view(), name='job_job_host_summaries_list'), + re_path(r'^(?P[0-9]+)/job_events/$', JobJobEventsList.as_view(), name='job_job_events_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', JobActivityStreamList.as_view(), name='job_activity_stream_list'), + re_path(r'^(?P[0-9]+)/stdout/$', JobStdout.as_view(), name='job_stdout'), + re_path(r'^(?P[0-9]+)/notifications/$', JobNotificationsList.as_view(), name='job_notifications_list'), + re_path(r'^(?P[0-9]+)/labels/$', JobLabelList.as_view(), name='job_label_list'), + re_path(r'^(?P[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail'), ] __all__ = ['urls'] diff --git a/awx/api/urls/job_event.py b/awx/api/urls/job_event.py index 94f3b33929..1a5c681124 100644 --- a/awx/api/urls/job_event.py +++ b/awx/api/urls/job_event.py @@ -1,13 +1,13 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import JobEventDetail, JobEventChildrenList urls = [ - url(r'^(?P[0-9]+)/$', JobEventDetail.as_view(), name='job_event_detail'), - url(r'^(?P[0-9]+)/children/$', JobEventChildrenList.as_view(), name='job_event_children_list'), + re_path(r'^(?P[0-9]+)/$', JobEventDetail.as_view(), name='job_event_detail'), + re_path(r'^(?P[0-9]+)/children/$', JobEventChildrenList.as_view(), name='job_event_children_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/job_host_summary.py b/awx/api/urls/job_host_summary.py index 8252a982d0..c736c59a17 100644 --- a/awx/api/urls/job_host_summary.py +++ b/awx/api/urls/job_host_summary.py @@ -1,11 +1,11 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import JobHostSummaryDetail -urls = [url(r'^(?P[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail')] +urls = [re_path(r'^(?P[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail')] __all__ = ['urls'] diff --git a/awx/api/urls/job_template.py b/awx/api/urls/job_template.py index 967f821fd1..b73be7e913 100644 --- a/awx/api/urls/job_template.py +++ b/awx/api/urls/job_template.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import include, url +from django.urls import include, re_path from awx.api.views import ( JobTemplateList, @@ -25,36 +25,36 @@ from awx.api.views import ( urls = [ - url(r'^$', JobTemplateList.as_view(), name='job_template_list'), - url(r'^(?P[0-9]+)/$', JobTemplateDetail.as_view(), name='job_template_detail'), - url(r'^(?P[0-9]+)/launch/$', JobTemplateLaunch.as_view(), name='job_template_launch'), - url(r'^(?P[0-9]+)/jobs/$', JobTemplateJobsList.as_view(), name='job_template_jobs_list'), - url(r'^(?P[0-9]+)/slice_workflow_jobs/$', JobTemplateSliceWorkflowJobsList.as_view(), name='job_template_slice_workflow_jobs_list'), - url(r'^(?P[0-9]+)/callback/$', JobTemplateCallback.as_view(), name='job_template_callback'), - url(r'^(?P[0-9]+)/schedules/$', JobTemplateSchedulesList.as_view(), name='job_template_schedules_list'), - url(r'^(?P[0-9]+)/survey_spec/$', JobTemplateSurveySpec.as_view(), name='job_template_survey_spec'), - url(r'^(?P[0-9]+)/activity_stream/$', JobTemplateActivityStreamList.as_view(), name='job_template_activity_stream_list'), - url( + re_path(r'^$', JobTemplateList.as_view(), name='job_template_list'), + re_path(r'^(?P[0-9]+)/$', JobTemplateDetail.as_view(), name='job_template_detail'), + re_path(r'^(?P[0-9]+)/launch/$', JobTemplateLaunch.as_view(), name='job_template_launch'), + re_path(r'^(?P[0-9]+)/jobs/$', JobTemplateJobsList.as_view(), name='job_template_jobs_list'), + re_path(r'^(?P[0-9]+)/slice_workflow_jobs/$', JobTemplateSliceWorkflowJobsList.as_view(), name='job_template_slice_workflow_jobs_list'), + re_path(r'^(?P[0-9]+)/callback/$', JobTemplateCallback.as_view(), name='job_template_callback'), + re_path(r'^(?P[0-9]+)/schedules/$', JobTemplateSchedulesList.as_view(), name='job_template_schedules_list'), + re_path(r'^(?P[0-9]+)/survey_spec/$', JobTemplateSurveySpec.as_view(), name='job_template_survey_spec'), + re_path(r'^(?P[0-9]+)/activity_stream/$', JobTemplateActivityStreamList.as_view(), name='job_template_activity_stream_list'), + re_path( r'^(?P[0-9]+)/notification_templates_started/$', JobTemplateNotificationTemplatesStartedList.as_view(), name='job_template_notification_templates_started_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_error/$', JobTemplateNotificationTemplatesErrorList.as_view(), name='job_template_notification_templates_error_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_success/$', JobTemplateNotificationTemplatesSuccessList.as_view(), name='job_template_notification_templates_success_list', ), - url(r'^(?P[0-9]+)/instance_groups/$', JobTemplateInstanceGroupsList.as_view(), name='job_template_instance_groups_list'), - url(r'^(?P[0-9]+)/access_list/$', JobTemplateAccessList.as_view(), name='job_template_access_list'), - url(r'^(?P[0-9]+)/object_roles/$', JobTemplateObjectRolesList.as_view(), name='job_template_object_roles_list'), - url(r'^(?P[0-9]+)/labels/$', JobTemplateLabelList.as_view(), name='job_template_label_list'), - url(r'^(?P[0-9]+)/copy/$', JobTemplateCopy.as_view(), name='job_template_copy'), - url(r'^(?P[0-9]+)/', include('awx.api.urls.webhooks'), {'model_kwarg': 'job_templates'}), + re_path(r'^(?P[0-9]+)/instance_groups/$', JobTemplateInstanceGroupsList.as_view(), name='job_template_instance_groups_list'), + re_path(r'^(?P[0-9]+)/access_list/$', JobTemplateAccessList.as_view(), name='job_template_access_list'), + re_path(r'^(?P[0-9]+)/object_roles/$', JobTemplateObjectRolesList.as_view(), name='job_template_object_roles_list'), + re_path(r'^(?P[0-9]+)/labels/$', JobTemplateLabelList.as_view(), name='job_template_label_list'), + re_path(r'^(?P[0-9]+)/copy/$', JobTemplateCopy.as_view(), name='job_template_copy'), + re_path(r'^(?P[0-9]+)/', include('awx.api.urls.webhooks'), {'model_kwarg': 'job_templates'}), ] __all__ = ['urls'] diff --git a/awx/api/urls/label.py b/awx/api/urls/label.py index 368fca7aa8..5fc0a4f629 100644 --- a/awx/api/urls/label.py +++ b/awx/api/urls/label.py @@ -1,11 +1,11 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import LabelList, LabelDetail -urls = [url(r'^$', LabelList.as_view(), name='label_list'), url(r'^(?P[0-9]+)/$', LabelDetail.as_view(), name='label_detail')] +urls = [re_path(r'^$', LabelList.as_view(), name='label_list'), re_path(r'^(?P[0-9]+)/$', LabelDetail.as_view(), name='label_detail')] __all__ = ['urls'] diff --git a/awx/api/urls/notification.py b/awx/api/urls/notification.py index 960318ee15..2ed27e7c3d 100644 --- a/awx/api/urls/notification.py +++ b/awx/api/urls/notification.py @@ -1,11 +1,14 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import NotificationList, NotificationDetail -urls = [url(r'^$', NotificationList.as_view(), name='notification_list'), url(r'^(?P[0-9]+)/$', NotificationDetail.as_view(), name='notification_detail')] +urls = [ + re_path(r'^$', NotificationList.as_view(), name='notification_list'), + re_path(r'^(?P[0-9]+)/$', NotificationDetail.as_view(), name='notification_detail'), +] __all__ = ['urls'] diff --git a/awx/api/urls/notification_template.py b/awx/api/urls/notification_template.py index 8473878922..764c571868 100644 --- a/awx/api/urls/notification_template.py +++ b/awx/api/urls/notification_template.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( NotificationTemplateList, @@ -13,11 +13,11 @@ from awx.api.views import ( urls = [ - url(r'^$', NotificationTemplateList.as_view(), name='notification_template_list'), - url(r'^(?P[0-9]+)/$', NotificationTemplateDetail.as_view(), name='notification_template_detail'), - url(r'^(?P[0-9]+)/test/$', NotificationTemplateTest.as_view(), name='notification_template_test'), - url(r'^(?P[0-9]+)/notifications/$', NotificationTemplateNotificationList.as_view(), name='notification_template_notification_list'), - url(r'^(?P[0-9]+)/copy/$', NotificationTemplateCopy.as_view(), name='notification_template_copy'), + re_path(r'^$', NotificationTemplateList.as_view(), name='notification_template_list'), + re_path(r'^(?P[0-9]+)/$', NotificationTemplateDetail.as_view(), name='notification_template_detail'), + re_path(r'^(?P[0-9]+)/test/$', NotificationTemplateTest.as_view(), name='notification_template_test'), + re_path(r'^(?P[0-9]+)/notifications/$', NotificationTemplateNotificationList.as_view(), name='notification_template_notification_list'), + re_path(r'^(?P[0-9]+)/copy/$', NotificationTemplateCopy.as_view(), name='notification_template_copy'), ] __all__ = ['urls'] diff --git a/awx/api/urls/oauth2.py b/awx/api/urls/oauth2.py index 720ba2416f..f613b34a0b 100644 --- a/awx/api/urls/oauth2.py +++ b/awx/api/urls/oauth2.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( OAuth2ApplicationList, @@ -15,13 +15,13 @@ from awx.api.views import ( urls = [ - url(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'), - url(r'^applications/(?P[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'), - url(r'^applications/(?P[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='o_auth2_application_token_list'), - url(r'^applications/(?P[0-9]+)/activity_stream/$', OAuth2ApplicationActivityStreamList.as_view(), name='o_auth2_application_activity_stream_list'), - url(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'), - url(r'^tokens/(?P[0-9]+)/$', OAuth2TokenDetail.as_view(), name='o_auth2_token_detail'), - url(r'^tokens/(?P[0-9]+)/activity_stream/$', OAuth2TokenActivityStreamList.as_view(), name='o_auth2_token_activity_stream_list'), + re_path(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'), + re_path(r'^applications/(?P[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'), + re_path(r'^applications/(?P[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='o_auth2_application_token_list'), + re_path(r'^applications/(?P[0-9]+)/activity_stream/$', OAuth2ApplicationActivityStreamList.as_view(), name='o_auth2_application_activity_stream_list'), + re_path(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'), + re_path(r'^tokens/(?P[0-9]+)/$', OAuth2TokenDetail.as_view(), name='o_auth2_token_detail'), + re_path(r'^tokens/(?P[0-9]+)/activity_stream/$', OAuth2TokenActivityStreamList.as_view(), name='o_auth2_token_activity_stream_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/oauth2_root.py b/awx/api/urls/oauth2_root.py index 61e1e15850..d15d14825e 100644 --- a/awx/api/urls/oauth2_root.py +++ b/awx/api/urls/oauth2_root.py @@ -4,7 +4,7 @@ from datetime import timedelta from django.utils.timezone import now from django.conf import settings -from django.conf.urls import url +from django.urls import re_path from oauthlib import oauth2 from oauth2_provider import views @@ -35,10 +35,10 @@ class TokenView(views.TokenView): urls = [ - url(r'^$', ApiOAuthAuthorizationRootView.as_view(), name='oauth_authorization_root_view'), - url(r"^authorize/$", views.AuthorizationView.as_view(), name="authorize"), - url(r"^token/$", TokenView.as_view(), name="token"), - url(r"^revoke_token/$", views.RevokeTokenView.as_view(), name="revoke-token"), + re_path(r'^$', ApiOAuthAuthorizationRootView.as_view(), name='oauth_authorization_root_view'), + re_path(r"^authorize/$", views.AuthorizationView.as_view(), name="authorize"), + re_path(r"^token/$", TokenView.as_view(), name="token"), + re_path(r"^revoke_token/$", views.RevokeTokenView.as_view(), name="revoke-token"), ] diff --git a/awx/api/urls/organization.py b/awx/api/urls/organization.py index 9eac94da48..c841a53181 100644 --- a/awx/api/urls/organization.py +++ b/awx/api/urls/organization.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( OrganizationList, @@ -30,44 +30,44 @@ from awx.api.views import ( urls = [ - url(r'^$', OrganizationList.as_view(), name='organization_list'), - url(r'^(?P[0-9]+)/$', OrganizationDetail.as_view(), name='organization_detail'), - url(r'^(?P[0-9]+)/users/$', OrganizationUsersList.as_view(), name='organization_users_list'), - url(r'^(?P[0-9]+)/admins/$', OrganizationAdminsList.as_view(), name='organization_admins_list'), - url(r'^(?P[0-9]+)/inventories/$', OrganizationInventoriesList.as_view(), name='organization_inventories_list'), - url(r'^(?P[0-9]+)/execution_environments/$', OrganizationExecutionEnvironmentsList.as_view(), name='organization_execution_environments_list'), - url(r'^(?P[0-9]+)/projects/$', OrganizationProjectsList.as_view(), name='organization_projects_list'), - url(r'^(?P[0-9]+)/job_templates/$', OrganizationJobTemplatesList.as_view(), name='organization_job_templates_list'), - url(r'^(?P[0-9]+)/workflow_job_templates/$', OrganizationWorkflowJobTemplatesList.as_view(), name='organization_workflow_job_templates_list'), - url(r'^(?P[0-9]+)/teams/$', OrganizationTeamsList.as_view(), name='organization_teams_list'), - url(r'^(?P[0-9]+)/credentials/$', OrganizationCredentialList.as_view(), name='organization_credential_list'), - url(r'^(?P[0-9]+)/activity_stream/$', OrganizationActivityStreamList.as_view(), name='organization_activity_stream_list'), - url(r'^(?P[0-9]+)/notification_templates/$', OrganizationNotificationTemplatesList.as_view(), name='organization_notification_templates_list'), - url( + re_path(r'^$', OrganizationList.as_view(), name='organization_list'), + re_path(r'^(?P[0-9]+)/$', OrganizationDetail.as_view(), name='organization_detail'), + re_path(r'^(?P[0-9]+)/users/$', OrganizationUsersList.as_view(), name='organization_users_list'), + re_path(r'^(?P[0-9]+)/admins/$', OrganizationAdminsList.as_view(), name='organization_admins_list'), + re_path(r'^(?P[0-9]+)/inventories/$', OrganizationInventoriesList.as_view(), name='organization_inventories_list'), + re_path(r'^(?P[0-9]+)/execution_environments/$', OrganizationExecutionEnvironmentsList.as_view(), name='organization_execution_environments_list'), + re_path(r'^(?P[0-9]+)/projects/$', OrganizationProjectsList.as_view(), name='organization_projects_list'), + re_path(r'^(?P[0-9]+)/job_templates/$', OrganizationJobTemplatesList.as_view(), name='organization_job_templates_list'), + re_path(r'^(?P[0-9]+)/workflow_job_templates/$', OrganizationWorkflowJobTemplatesList.as_view(), name='organization_workflow_job_templates_list'), + re_path(r'^(?P[0-9]+)/teams/$', OrganizationTeamsList.as_view(), name='organization_teams_list'), + re_path(r'^(?P[0-9]+)/credentials/$', OrganizationCredentialList.as_view(), name='organization_credential_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', OrganizationActivityStreamList.as_view(), name='organization_activity_stream_list'), + re_path(r'^(?P[0-9]+)/notification_templates/$', OrganizationNotificationTemplatesList.as_view(), name='organization_notification_templates_list'), + re_path( r'^(?P[0-9]+)/notification_templates_started/$', OrganizationNotificationTemplatesStartedList.as_view(), name='organization_notification_templates_started_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_error/$', OrganizationNotificationTemplatesErrorList.as_view(), name='organization_notification_templates_error_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_success/$', OrganizationNotificationTemplatesSuccessList.as_view(), name='organization_notification_templates_success_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_approvals/$', OrganizationNotificationTemplatesApprovalList.as_view(), name='organization_notification_templates_approvals_list', ), - url(r'^(?P[0-9]+)/instance_groups/$', OrganizationInstanceGroupsList.as_view(), name='organization_instance_groups_list'), - url(r'^(?P[0-9]+)/galaxy_credentials/$', OrganizationGalaxyCredentialsList.as_view(), name='organization_galaxy_credentials_list'), - url(r'^(?P[0-9]+)/object_roles/$', OrganizationObjectRolesList.as_view(), name='organization_object_roles_list'), - url(r'^(?P[0-9]+)/access_list/$', OrganizationAccessList.as_view(), name='organization_access_list'), - url(r'^(?P[0-9]+)/applications/$', OrganizationApplicationList.as_view(), name='organization_applications_list'), + re_path(r'^(?P[0-9]+)/instance_groups/$', OrganizationInstanceGroupsList.as_view(), name='organization_instance_groups_list'), + re_path(r'^(?P[0-9]+)/galaxy_credentials/$', OrganizationGalaxyCredentialsList.as_view(), name='organization_galaxy_credentials_list'), + re_path(r'^(?P[0-9]+)/object_roles/$', OrganizationObjectRolesList.as_view(), name='organization_object_roles_list'), + re_path(r'^(?P[0-9]+)/access_list/$', OrganizationAccessList.as_view(), name='organization_access_list'), + re_path(r'^(?P[0-9]+)/applications/$', OrganizationApplicationList.as_view(), name='organization_applications_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/project.py b/awx/api/urls/project.py index ea356a651b..0ce6cacecb 100644 --- a/awx/api/urls/project.py +++ b/awx/api/urls/project.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( ProjectList, @@ -24,30 +24,32 @@ from awx.api.views import ( urls = [ - url(r'^$', ProjectList.as_view(), name='project_list'), - url(r'^(?P[0-9]+)/$', ProjectDetail.as_view(), name='project_detail'), - url(r'^(?P[0-9]+)/playbooks/$', ProjectPlaybooks.as_view(), name='project_playbooks'), - url(r'^(?P[0-9]+)/inventories/$', ProjectInventories.as_view(), name='project_inventories'), - url(r'^(?P[0-9]+)/scm_inventory_sources/$', ProjectScmInventorySources.as_view(), name='project_scm_inventory_sources'), - url(r'^(?P[0-9]+)/teams/$', ProjectTeamsList.as_view(), name='project_teams_list'), - url(r'^(?P[0-9]+)/update/$', ProjectUpdateView.as_view(), name='project_update_view'), - url(r'^(?P[0-9]+)/project_updates/$', ProjectUpdatesList.as_view(), name='project_updates_list'), - url(r'^(?P[0-9]+)/activity_stream/$', ProjectActivityStreamList.as_view(), name='project_activity_stream_list'), - url(r'^(?P[0-9]+)/schedules/$', ProjectSchedulesList.as_view(), name='project_schedules_list'), - url(r'^(?P[0-9]+)/notification_templates_error/$', ProjectNotificationTemplatesErrorList.as_view(), name='project_notification_templates_error_list'), - url( + re_path(r'^$', ProjectList.as_view(), name='project_list'), + re_path(r'^(?P[0-9]+)/$', ProjectDetail.as_view(), name='project_detail'), + re_path(r'^(?P[0-9]+)/playbooks/$', ProjectPlaybooks.as_view(), name='project_playbooks'), + re_path(r'^(?P[0-9]+)/inventories/$', ProjectInventories.as_view(), name='project_inventories'), + re_path(r'^(?P[0-9]+)/scm_inventory_sources/$', ProjectScmInventorySources.as_view(), name='project_scm_inventory_sources'), + re_path(r'^(?P[0-9]+)/teams/$', ProjectTeamsList.as_view(), name='project_teams_list'), + re_path(r'^(?P[0-9]+)/update/$', ProjectUpdateView.as_view(), name='project_update_view'), + re_path(r'^(?P[0-9]+)/project_updates/$', ProjectUpdatesList.as_view(), name='project_updates_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', ProjectActivityStreamList.as_view(), name='project_activity_stream_list'), + re_path(r'^(?P[0-9]+)/schedules/$', ProjectSchedulesList.as_view(), name='project_schedules_list'), + re_path( + r'^(?P[0-9]+)/notification_templates_error/$', ProjectNotificationTemplatesErrorList.as_view(), name='project_notification_templates_error_list' + ), + re_path( r'^(?P[0-9]+)/notification_templates_success/$', ProjectNotificationTemplatesSuccessList.as_view(), name='project_notification_templates_success_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_started/$', ProjectNotificationTemplatesStartedList.as_view(), name='project_notification_templates_started_list', ), - url(r'^(?P[0-9]+)/object_roles/$', ProjectObjectRolesList.as_view(), name='project_object_roles_list'), - url(r'^(?P[0-9]+)/access_list/$', ProjectAccessList.as_view(), name='project_access_list'), - url(r'^(?P[0-9]+)/copy/$', ProjectCopy.as_view(), name='project_copy'), + re_path(r'^(?P[0-9]+)/object_roles/$', ProjectObjectRolesList.as_view(), name='project_object_roles_list'), + re_path(r'^(?P[0-9]+)/access_list/$', ProjectAccessList.as_view(), name='project_access_list'), + re_path(r'^(?P[0-9]+)/copy/$', ProjectCopy.as_view(), name='project_copy'), ] __all__ = ['urls'] diff --git a/awx/api/urls/project_update.py b/awx/api/urls/project_update.py index 03356602ca..fc3e2d2d52 100644 --- a/awx/api/urls/project_update.py +++ b/awx/api/urls/project_update.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( ProjectUpdateList, @@ -15,13 +15,13 @@ from awx.api.views import ( urls = [ - url(r'^$', ProjectUpdateList.as_view(), name='project_update_list'), - url(r'^(?P[0-9]+)/$', ProjectUpdateDetail.as_view(), name='project_update_detail'), - url(r'^(?P[0-9]+)/cancel/$', ProjectUpdateCancel.as_view(), name='project_update_cancel'), - url(r'^(?P[0-9]+)/stdout/$', ProjectUpdateStdout.as_view(), name='project_update_stdout'), - url(r'^(?P[0-9]+)/scm_inventory_updates/$', ProjectUpdateScmInventoryUpdates.as_view(), name='project_update_scm_inventory_updates'), - url(r'^(?P[0-9]+)/notifications/$', ProjectUpdateNotificationsList.as_view(), name='project_update_notifications_list'), - url(r'^(?P[0-9]+)/events/$', ProjectUpdateEventsList.as_view(), name='project_update_events_list'), + re_path(r'^$', ProjectUpdateList.as_view(), name='project_update_list'), + re_path(r'^(?P[0-9]+)/$', ProjectUpdateDetail.as_view(), name='project_update_detail'), + re_path(r'^(?P[0-9]+)/cancel/$', ProjectUpdateCancel.as_view(), name='project_update_cancel'), + re_path(r'^(?P[0-9]+)/stdout/$', ProjectUpdateStdout.as_view(), name='project_update_stdout'), + re_path(r'^(?P[0-9]+)/scm_inventory_updates/$', ProjectUpdateScmInventoryUpdates.as_view(), name='project_update_scm_inventory_updates'), + re_path(r'^(?P[0-9]+)/notifications/$', ProjectUpdateNotificationsList.as_view(), name='project_update_notifications_list'), + re_path(r'^(?P[0-9]+)/events/$', ProjectUpdateEventsList.as_view(), name='project_update_events_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/role.py b/awx/api/urls/role.py index 2b6aed19b5..0ee306ef0c 100644 --- a/awx/api/urls/role.py +++ b/awx/api/urls/role.py @@ -1,18 +1,18 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import RoleList, RoleDetail, RoleUsersList, RoleTeamsList, RoleParentsList, RoleChildrenList urls = [ - url(r'^$', RoleList.as_view(), name='role_list'), - url(r'^(?P[0-9]+)/$', RoleDetail.as_view(), name='role_detail'), - url(r'^(?P[0-9]+)/users/$', RoleUsersList.as_view(), name='role_users_list'), - url(r'^(?P[0-9]+)/teams/$', RoleTeamsList.as_view(), name='role_teams_list'), - url(r'^(?P[0-9]+)/parents/$', RoleParentsList.as_view(), name='role_parents_list'), - url(r'^(?P[0-9]+)/children/$', RoleChildrenList.as_view(), name='role_children_list'), + re_path(r'^$', RoleList.as_view(), name='role_list'), + re_path(r'^(?P[0-9]+)/$', RoleDetail.as_view(), name='role_detail'), + re_path(r'^(?P[0-9]+)/users/$', RoleUsersList.as_view(), name='role_users_list'), + re_path(r'^(?P[0-9]+)/teams/$', RoleTeamsList.as_view(), name='role_teams_list'), + re_path(r'^(?P[0-9]+)/parents/$', RoleParentsList.as_view(), name='role_parents_list'), + re_path(r'^(?P[0-9]+)/children/$', RoleChildrenList.as_view(), name='role_children_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/schedule.py b/awx/api/urls/schedule.py index c3c91f7ae0..87907eda8f 100644 --- a/awx/api/urls/schedule.py +++ b/awx/api/urls/schedule.py @@ -1,16 +1,16 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ScheduleList, ScheduleDetail, ScheduleUnifiedJobsList, ScheduleCredentialsList urls = [ - url(r'^$', ScheduleList.as_view(), name='schedule_list'), - url(r'^(?P[0-9]+)/$', ScheduleDetail.as_view(), name='schedule_detail'), - url(r'^(?P[0-9]+)/jobs/$', ScheduleUnifiedJobsList.as_view(), name='schedule_unified_jobs_list'), - url(r'^(?P[0-9]+)/credentials/$', ScheduleCredentialsList.as_view(), name='schedule_credentials_list'), + re_path(r'^$', ScheduleList.as_view(), name='schedule_list'), + re_path(r'^(?P[0-9]+)/$', ScheduleDetail.as_view(), name='schedule_detail'), + re_path(r'^(?P[0-9]+)/jobs/$', ScheduleUnifiedJobsList.as_view(), name='schedule_unified_jobs_list'), + re_path(r'^(?P[0-9]+)/credentials/$', ScheduleCredentialsList.as_view(), name='schedule_credentials_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/system_job.py b/awx/api/urls/system_job.py index 8b060a2d85..891a697006 100644 --- a/awx/api/urls/system_job.py +++ b/awx/api/urls/system_job.py @@ -1,17 +1,17 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import SystemJobList, SystemJobDetail, SystemJobCancel, SystemJobNotificationsList, SystemJobEventsList urls = [ - url(r'^$', SystemJobList.as_view(), name='system_job_list'), - url(r'^(?P[0-9]+)/$', SystemJobDetail.as_view(), name='system_job_detail'), - url(r'^(?P[0-9]+)/cancel/$', SystemJobCancel.as_view(), name='system_job_cancel'), - url(r'^(?P[0-9]+)/notifications/$', SystemJobNotificationsList.as_view(), name='system_job_notifications_list'), - url(r'^(?P[0-9]+)/events/$', SystemJobEventsList.as_view(), name='system_job_events_list'), + re_path(r'^$', SystemJobList.as_view(), name='system_job_list'), + re_path(r'^(?P[0-9]+)/$', SystemJobDetail.as_view(), name='system_job_detail'), + re_path(r'^(?P[0-9]+)/cancel/$', SystemJobCancel.as_view(), name='system_job_cancel'), + re_path(r'^(?P[0-9]+)/notifications/$', SystemJobNotificationsList.as_view(), name='system_job_notifications_list'), + re_path(r'^(?P[0-9]+)/events/$', SystemJobEventsList.as_view(), name='system_job_events_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/system_job_template.py b/awx/api/urls/system_job_template.py index 532d35d97a..e0b68d3135 100644 --- a/awx/api/urls/system_job_template.py +++ b/awx/api/urls/system_job_template.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( SystemJobTemplateList, @@ -16,22 +16,22 @@ from awx.api.views import ( urls = [ - url(r'^$', SystemJobTemplateList.as_view(), name='system_job_template_list'), - url(r'^(?P[0-9]+)/$', SystemJobTemplateDetail.as_view(), name='system_job_template_detail'), - url(r'^(?P[0-9]+)/launch/$', SystemJobTemplateLaunch.as_view(), name='system_job_template_launch'), - url(r'^(?P[0-9]+)/jobs/$', SystemJobTemplateJobsList.as_view(), name='system_job_template_jobs_list'), - url(r'^(?P[0-9]+)/schedules/$', SystemJobTemplateSchedulesList.as_view(), name='system_job_template_schedules_list'), - url( + re_path(r'^$', SystemJobTemplateList.as_view(), name='system_job_template_list'), + re_path(r'^(?P[0-9]+)/$', SystemJobTemplateDetail.as_view(), name='system_job_template_detail'), + re_path(r'^(?P[0-9]+)/launch/$', SystemJobTemplateLaunch.as_view(), name='system_job_template_launch'), + re_path(r'^(?P[0-9]+)/jobs/$', SystemJobTemplateJobsList.as_view(), name='system_job_template_jobs_list'), + re_path(r'^(?P[0-9]+)/schedules/$', SystemJobTemplateSchedulesList.as_view(), name='system_job_template_schedules_list'), + re_path( r'^(?P[0-9]+)/notification_templates_started/$', SystemJobTemplateNotificationTemplatesStartedList.as_view(), name='system_job_template_notification_templates_started_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_error/$', SystemJobTemplateNotificationTemplatesErrorList.as_view(), name='system_job_template_notification_templates_error_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_success/$', SystemJobTemplateNotificationTemplatesSuccessList.as_view(), name='system_job_template_notification_templates_success_list', diff --git a/awx/api/urls/team.py b/awx/api/urls/team.py index 185c86e42a..311a2e8009 100644 --- a/awx/api/urls/team.py +++ b/awx/api/urls/team.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( TeamList, @@ -17,15 +17,15 @@ from awx.api.views import ( urls = [ - url(r'^$', TeamList.as_view(), name='team_list'), - url(r'^(?P[0-9]+)/$', TeamDetail.as_view(), name='team_detail'), - url(r'^(?P[0-9]+)/projects/$', TeamProjectsList.as_view(), name='team_projects_list'), - url(r'^(?P[0-9]+)/users/$', TeamUsersList.as_view(), name='team_users_list'), - url(r'^(?P[0-9]+)/credentials/$', TeamCredentialsList.as_view(), name='team_credentials_list'), - url(r'^(?P[0-9]+)/roles/$', TeamRolesList.as_view(), name='team_roles_list'), - url(r'^(?P[0-9]+)/object_roles/$', TeamObjectRolesList.as_view(), name='team_object_roles_list'), - url(r'^(?P[0-9]+)/activity_stream/$', TeamActivityStreamList.as_view(), name='team_activity_stream_list'), - url(r'^(?P[0-9]+)/access_list/$', TeamAccessList.as_view(), name='team_access_list'), + re_path(r'^$', TeamList.as_view(), name='team_list'), + re_path(r'^(?P[0-9]+)/$', TeamDetail.as_view(), name='team_detail'), + re_path(r'^(?P[0-9]+)/projects/$', TeamProjectsList.as_view(), name='team_projects_list'), + re_path(r'^(?P[0-9]+)/users/$', TeamUsersList.as_view(), name='team_users_list'), + re_path(r'^(?P[0-9]+)/credentials/$', TeamCredentialsList.as_view(), name='team_credentials_list'), + re_path(r'^(?P[0-9]+)/roles/$', TeamRolesList.as_view(), name='team_roles_list'), + re_path(r'^(?P[0-9]+)/object_roles/$', TeamObjectRolesList.as_view(), name='team_object_roles_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', TeamActivityStreamList.as_view(), name='team_activity_stream_list'), + re_path(r'^(?P[0-9]+)/access_list/$', TeamAccessList.as_view(), name='team_access_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/urls.py b/awx/api/urls/urls.py index 017fa307aa..c092696d24 100644 --- a/awx/api/urls/urls.py +++ b/awx/api/urls/urls.py @@ -3,7 +3,7 @@ from __future__ import absolute_import, unicode_literals from django.conf import settings -from django.conf.urls import include, url +from django.urls import include, re_path from awx.api.generics import LoggedLoginView, LoggedLogoutView from awx.api.views import ( @@ -74,78 +74,78 @@ from .workflow_approval import urls as workflow_approval_urls v2_urls = [ - url(r'^$', ApiV2RootView.as_view(), name='api_v2_root_view'), - url(r'^credential_types/', include(credential_type_urls)), - url(r'^credential_input_sources/', include(credential_input_source_urls)), - url(r'^hosts/(?P[0-9]+)/ansible_facts/$', HostAnsibleFactsDetail.as_view(), name='host_ansible_facts_detail'), - url(r'^jobs/(?P[0-9]+)/credentials/$', JobCredentialsList.as_view(), name='job_credentials_list'), - url(r'^job_templates/(?P[0-9]+)/credentials/$', JobTemplateCredentialsList.as_view(), name='job_template_credentials_list'), - url(r'^schedules/preview/$', SchedulePreview.as_view(), name='schedule_rrule'), - url(r'^schedules/zoneinfo/$', ScheduleZoneInfo.as_view(), name='schedule_zoneinfo'), - url(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'), - url(r'^applications/(?P[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'), - url(r'^applications/(?P[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='application_o_auth2_token_list'), - url(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'), - url(r'^', include(oauth2_urls)), - url(r'^metrics/$', MetricsView.as_view(), name='metrics_view'), - url(r'^ping/$', ApiV2PingView.as_view(), name='api_v2_ping_view'), - url(r'^config/$', ApiV2ConfigView.as_view(), name='api_v2_config_view'), - url(r'^config/subscriptions/$', ApiV2SubscriptionView.as_view(), name='api_v2_subscription_view'), - url(r'^config/attach/$', ApiV2AttachView.as_view(), name='api_v2_attach_view'), - url(r'^auth/$', AuthView.as_view()), - url(r'^me/$', UserMeList.as_view(), name='user_me_list'), - url(r'^dashboard/$', DashboardView.as_view(), name='dashboard_view'), - url(r'^dashboard/graphs/jobs/$', DashboardJobsGraphView.as_view(), name='dashboard_jobs_graph_view'), - url(r'^mesh_visualizer/', MeshVisualizer.as_view(), name='mesh_visualizer_view'), - url(r'^settings/', include('awx.conf.urls')), - url(r'^instances/', include(instance_urls)), - url(r'^instance_groups/', include(instance_group_urls)), - url(r'^schedules/', include(schedule_urls)), - url(r'^organizations/', include(organization_urls)), - url(r'^users/', include(user_urls)), - url(r'^execution_environments/', include(execution_environment_urls)), - url(r'^projects/', include(project_urls)), - url(r'^project_updates/', include(project_update_urls)), - url(r'^teams/', include(team_urls)), - url(r'^inventories/', include(inventory_urls)), - url(r'^hosts/', include(host_urls)), - url(r'^groups/', include(group_urls)), - url(r'^inventory_sources/', include(inventory_source_urls)), - url(r'^inventory_updates/', include(inventory_update_urls)), - url(r'^credentials/', include(credential_urls)), - url(r'^roles/', include(role_urls)), - url(r'^job_templates/', include(job_template_urls)), - url(r'^jobs/', include(job_urls)), - url(r'^job_host_summaries/', include(job_host_summary_urls)), - url(r'^job_events/', include(job_event_urls)), - url(r'^ad_hoc_commands/', include(ad_hoc_command_urls)), - url(r'^ad_hoc_command_events/', include(ad_hoc_command_event_urls)), - url(r'^system_job_templates/', include(system_job_template_urls)), - url(r'^system_jobs/', include(system_job_urls)), - url(r'^notification_templates/', include(notification_template_urls)), - url(r'^notifications/', include(notification_urls)), - url(r'^workflow_job_templates/', include(workflow_job_template_urls)), - url(r'^workflow_jobs/', include(workflow_job_urls)), - url(r'^labels/', include(label_urls)), - url(r'^workflow_job_template_nodes/', include(workflow_job_template_node_urls)), - url(r'^workflow_job_nodes/', include(workflow_job_node_urls)), - url(r'^unified_job_templates/$', UnifiedJobTemplateList.as_view(), name='unified_job_template_list'), - url(r'^unified_jobs/$', UnifiedJobList.as_view(), name='unified_job_list'), - url(r'^activity_stream/', include(activity_stream_urls)), - url(r'^workflow_approval_templates/', include(workflow_approval_template_urls)), - url(r'^workflow_approvals/', include(workflow_approval_urls)), + re_path(r'^$', ApiV2RootView.as_view(), name='api_v2_root_view'), + re_path(r'^credential_types/', include(credential_type_urls)), + re_path(r'^credential_input_sources/', include(credential_input_source_urls)), + re_path(r'^hosts/(?P[0-9]+)/ansible_facts/$', HostAnsibleFactsDetail.as_view(), name='host_ansible_facts_detail'), + re_path(r'^jobs/(?P[0-9]+)/credentials/$', JobCredentialsList.as_view(), name='job_credentials_list'), + re_path(r'^job_templates/(?P[0-9]+)/credentials/$', JobTemplateCredentialsList.as_view(), name='job_template_credentials_list'), + re_path(r'^schedules/preview/$', SchedulePreview.as_view(), name='schedule_rrule'), + re_path(r'^schedules/zoneinfo/$', ScheduleZoneInfo.as_view(), name='schedule_zoneinfo'), + re_path(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'), + re_path(r'^applications/(?P[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'), + re_path(r'^applications/(?P[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='application_o_auth2_token_list'), + re_path(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'), + re_path(r'^', include(oauth2_urls)), + re_path(r'^metrics/$', MetricsView.as_view(), name='metrics_view'), + re_path(r'^ping/$', ApiV2PingView.as_view(), name='api_v2_ping_view'), + re_path(r'^config/$', ApiV2ConfigView.as_view(), name='api_v2_config_view'), + re_path(r'^config/subscriptions/$', ApiV2SubscriptionView.as_view(), name='api_v2_subscription_view'), + re_path(r'^config/attach/$', ApiV2AttachView.as_view(), name='api_v2_attach_view'), + re_path(r'^auth/$', AuthView.as_view()), + re_path(r'^me/$', UserMeList.as_view(), name='user_me_list'), + re_path(r'^dashboard/$', DashboardView.as_view(), name='dashboard_view'), + re_path(r'^dashboard/graphs/jobs/$', DashboardJobsGraphView.as_view(), name='dashboard_jobs_graph_view'), + re_path(r'^mesh_visualizer/', MeshVisualizer.as_view(), name='mesh_visualizer_view'), + re_path(r'^settings/', include('awx.conf.urls')), + re_path(r'^instances/', include(instance_urls)), + re_path(r'^instance_groups/', include(instance_group_urls)), + re_path(r'^schedules/', include(schedule_urls)), + re_path(r'^organizations/', include(organization_urls)), + re_path(r'^users/', include(user_urls)), + re_path(r'^execution_environments/', include(execution_environment_urls)), + re_path(r'^projects/', include(project_urls)), + re_path(r'^project_updates/', include(project_update_urls)), + re_path(r'^teams/', include(team_urls)), + re_path(r'^inventories/', include(inventory_urls)), + re_path(r'^hosts/', include(host_urls)), + re_path(r'^groups/', include(group_urls)), + re_path(r'^inventory_sources/', include(inventory_source_urls)), + re_path(r'^inventory_updates/', include(inventory_update_urls)), + re_path(r'^credentials/', include(credential_urls)), + re_path(r'^roles/', include(role_urls)), + re_path(r'^job_templates/', include(job_template_urls)), + re_path(r'^jobs/', include(job_urls)), + re_path(r'^job_host_summaries/', include(job_host_summary_urls)), + re_path(r'^job_events/', include(job_event_urls)), + re_path(r'^ad_hoc_commands/', include(ad_hoc_command_urls)), + re_path(r'^ad_hoc_command_events/', include(ad_hoc_command_event_urls)), + re_path(r'^system_job_templates/', include(system_job_template_urls)), + re_path(r'^system_jobs/', include(system_job_urls)), + re_path(r'^notification_templates/', include(notification_template_urls)), + re_path(r'^notifications/', include(notification_urls)), + re_path(r'^workflow_job_templates/', include(workflow_job_template_urls)), + re_path(r'^workflow_jobs/', include(workflow_job_urls)), + re_path(r'^labels/', include(label_urls)), + re_path(r'^workflow_job_template_nodes/', include(workflow_job_template_node_urls)), + re_path(r'^workflow_job_nodes/', include(workflow_job_node_urls)), + re_path(r'^unified_job_templates/$', UnifiedJobTemplateList.as_view(), name='unified_job_template_list'), + re_path(r'^unified_jobs/$', UnifiedJobList.as_view(), name='unified_job_list'), + re_path(r'^activity_stream/', include(activity_stream_urls)), + re_path(r'^workflow_approval_templates/', include(workflow_approval_template_urls)), + re_path(r'^workflow_approvals/', include(workflow_approval_urls)), ] app_name = 'api' urlpatterns = [ - url(r'^$', ApiRootView.as_view(), name='api_root_view'), - url(r'^(?P(v2))/', include(v2_urls)), - url(r'^login/$', LoggedLoginView.as_view(template_name='rest_framework/login.html', extra_context={'inside_login_context': True}), name='login'), - url(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'), - url(r'^o/', include(oauth2_root_urls)), + re_path(r'^$', ApiRootView.as_view(), name='api_root_view'), + re_path(r'^(?P(v2))/', include(v2_urls)), + re_path(r'^login/$', LoggedLoginView.as_view(template_name='rest_framework/login.html', extra_context={'inside_login_context': True}), name='login'), + re_path(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'), + re_path(r'^o/', include(oauth2_root_urls)), ] if settings.SETTINGS_MODULE == 'awx.settings.development': from awx.api.swagger import SwaggerSchemaView - urlpatterns += [url(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view')] + urlpatterns += [re_path(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view')] diff --git a/awx/api/urls/user.py b/awx/api/urls/user.py index 340c428ba5..39bc07aec4 100644 --- a/awx/api/urls/user.py +++ b/awx/api/urls/user.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( UserList, @@ -21,20 +21,20 @@ from awx.api.views import ( ) urls = [ - url(r'^$', UserList.as_view(), name='user_list'), - url(r'^(?P[0-9]+)/$', UserDetail.as_view(), name='user_detail'), - url(r'^(?P[0-9]+)/teams/$', UserTeamsList.as_view(), name='user_teams_list'), - url(r'^(?P[0-9]+)/organizations/$', UserOrganizationsList.as_view(), name='user_organizations_list'), - url(r'^(?P[0-9]+)/admin_of_organizations/$', UserAdminOfOrganizationsList.as_view(), name='user_admin_of_organizations_list'), - url(r'^(?P[0-9]+)/projects/$', UserProjectsList.as_view(), name='user_projects_list'), - url(r'^(?P[0-9]+)/credentials/$', UserCredentialsList.as_view(), name='user_credentials_list'), - url(r'^(?P[0-9]+)/roles/$', UserRolesList.as_view(), name='user_roles_list'), - url(r'^(?P[0-9]+)/activity_stream/$', UserActivityStreamList.as_view(), name='user_activity_stream_list'), - url(r'^(?P[0-9]+)/access_list/$', UserAccessList.as_view(), name='user_access_list'), - url(r'^(?P[0-9]+)/applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'), - url(r'^(?P[0-9]+)/tokens/$', OAuth2UserTokenList.as_view(), name='o_auth2_token_list'), - url(r'^(?P[0-9]+)/authorized_tokens/$', UserAuthorizedTokenList.as_view(), name='user_authorized_token_list'), - url(r'^(?P[0-9]+)/personal_tokens/$', UserPersonalTokenList.as_view(), name='user_personal_token_list'), + re_path(r'^$', UserList.as_view(), name='user_list'), + re_path(r'^(?P[0-9]+)/$', UserDetail.as_view(), name='user_detail'), + re_path(r'^(?P[0-9]+)/teams/$', UserTeamsList.as_view(), name='user_teams_list'), + re_path(r'^(?P[0-9]+)/organizations/$', UserOrganizationsList.as_view(), name='user_organizations_list'), + re_path(r'^(?P[0-9]+)/admin_of_organizations/$', UserAdminOfOrganizationsList.as_view(), name='user_admin_of_organizations_list'), + re_path(r'^(?P[0-9]+)/projects/$', UserProjectsList.as_view(), name='user_projects_list'), + re_path(r'^(?P[0-9]+)/credentials/$', UserCredentialsList.as_view(), name='user_credentials_list'), + re_path(r'^(?P[0-9]+)/roles/$', UserRolesList.as_view(), name='user_roles_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', UserActivityStreamList.as_view(), name='user_activity_stream_list'), + re_path(r'^(?P[0-9]+)/access_list/$', UserAccessList.as_view(), name='user_access_list'), + re_path(r'^(?P[0-9]+)/applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'), + re_path(r'^(?P[0-9]+)/tokens/$', OAuth2UserTokenList.as_view(), name='o_auth2_token_list'), + re_path(r'^(?P[0-9]+)/authorized_tokens/$', UserAuthorizedTokenList.as_view(), name='user_authorized_token_list'), + re_path(r'^(?P[0-9]+)/personal_tokens/$', UserPersonalTokenList.as_view(), name='user_personal_token_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/webhooks.py b/awx/api/urls/webhooks.py index f6739a5df9..764e3dd6e2 100644 --- a/awx/api/urls/webhooks.py +++ b/awx/api/urls/webhooks.py @@ -1,10 +1,10 @@ -from django.conf.urls import url +from django.urls import re_path from awx.api.views import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver urlpatterns = [ - url(r'^webhook_key/$', WebhookKeyView.as_view(), name='webhook_key'), - url(r'^github/$', GithubWebhookReceiver.as_view(), name='webhook_receiver_github'), - url(r'^gitlab/$', GitlabWebhookReceiver.as_view(), name='webhook_receiver_gitlab'), + re_path(r'^webhook_key/$', WebhookKeyView.as_view(), name='webhook_key'), + re_path(r'^github/$', GithubWebhookReceiver.as_view(), name='webhook_receiver_github'), + re_path(r'^gitlab/$', GitlabWebhookReceiver.as_view(), name='webhook_receiver_gitlab'), ] diff --git a/awx/api/urls/workflow_approval.py b/awx/api/urls/workflow_approval.py index a3c6454af1..640528edbd 100644 --- a/awx/api/urls/workflow_approval.py +++ b/awx/api/urls/workflow_approval.py @@ -1,16 +1,16 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import WorkflowApprovalList, WorkflowApprovalDetail, WorkflowApprovalApprove, WorkflowApprovalDeny urls = [ - url(r'^$', WorkflowApprovalList.as_view(), name='workflow_approval_list'), - url(r'^(?P[0-9]+)/$', WorkflowApprovalDetail.as_view(), name='workflow_approval_detail'), - url(r'^(?P[0-9]+)/approve/$', WorkflowApprovalApprove.as_view(), name='workflow_approval_approve'), - url(r'^(?P[0-9]+)/deny/$', WorkflowApprovalDeny.as_view(), name='workflow_approval_deny'), + re_path(r'^$', WorkflowApprovalList.as_view(), name='workflow_approval_list'), + re_path(r'^(?P[0-9]+)/$', WorkflowApprovalDetail.as_view(), name='workflow_approval_detail'), + re_path(r'^(?P[0-9]+)/approve/$', WorkflowApprovalApprove.as_view(), name='workflow_approval_approve'), + re_path(r'^(?P[0-9]+)/deny/$', WorkflowApprovalDeny.as_view(), name='workflow_approval_deny'), ] __all__ = ['urls'] diff --git a/awx/api/urls/workflow_approval_template.py b/awx/api/urls/workflow_approval_template.py index f49929b283..811ad351d2 100644 --- a/awx/api/urls/workflow_approval_template.py +++ b/awx/api/urls/workflow_approval_template.py @@ -1,14 +1,14 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import WorkflowApprovalTemplateDetail, WorkflowApprovalTemplateJobsList urls = [ - url(r'^(?P[0-9]+)/$', WorkflowApprovalTemplateDetail.as_view(), name='workflow_approval_template_detail'), - url(r'^(?P[0-9]+)/approvals/$', WorkflowApprovalTemplateJobsList.as_view(), name='workflow_approval_template_jobs_list'), + re_path(r'^(?P[0-9]+)/$', WorkflowApprovalTemplateDetail.as_view(), name='workflow_approval_template_detail'), + re_path(r'^(?P[0-9]+)/approvals/$', WorkflowApprovalTemplateJobsList.as_view(), name='workflow_approval_template_jobs_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/workflow_job.py b/awx/api/urls/workflow_job.py index 1ecbb39373..707b7080f9 100644 --- a/awx/api/urls/workflow_job.py +++ b/awx/api/urls/workflow_job.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( WorkflowJobList, @@ -16,14 +16,14 @@ from awx.api.views import ( urls = [ - url(r'^$', WorkflowJobList.as_view(), name='workflow_job_list'), - url(r'^(?P[0-9]+)/$', WorkflowJobDetail.as_view(), name='workflow_job_detail'), - url(r'^(?P[0-9]+)/workflow_nodes/$', WorkflowJobWorkflowNodesList.as_view(), name='workflow_job_workflow_nodes_list'), - url(r'^(?P[0-9]+)/labels/$', WorkflowJobLabelList.as_view(), name='workflow_job_label_list'), - url(r'^(?P[0-9]+)/cancel/$', WorkflowJobCancel.as_view(), name='workflow_job_cancel'), - url(r'^(?P[0-9]+)/relaunch/$', WorkflowJobRelaunch.as_view(), name='workflow_job_relaunch'), - url(r'^(?P[0-9]+)/notifications/$', WorkflowJobNotificationsList.as_view(), name='workflow_job_notifications_list'), - url(r'^(?P[0-9]+)/activity_stream/$', WorkflowJobActivityStreamList.as_view(), name='workflow_job_activity_stream_list'), + re_path(r'^$', WorkflowJobList.as_view(), name='workflow_job_list'), + re_path(r'^(?P[0-9]+)/$', WorkflowJobDetail.as_view(), name='workflow_job_detail'), + re_path(r'^(?P[0-9]+)/workflow_nodes/$', WorkflowJobWorkflowNodesList.as_view(), name='workflow_job_workflow_nodes_list'), + re_path(r'^(?P[0-9]+)/labels/$', WorkflowJobLabelList.as_view(), name='workflow_job_label_list'), + re_path(r'^(?P[0-9]+)/cancel/$', WorkflowJobCancel.as_view(), name='workflow_job_cancel'), + re_path(r'^(?P[0-9]+)/relaunch/$', WorkflowJobRelaunch.as_view(), name='workflow_job_relaunch'), + re_path(r'^(?P[0-9]+)/notifications/$', WorkflowJobNotificationsList.as_view(), name='workflow_job_notifications_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', WorkflowJobActivityStreamList.as_view(), name='workflow_job_activity_stream_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/workflow_job_node.py b/awx/api/urls/workflow_job_node.py index 809ee515f0..5b246c95b4 100644 --- a/awx/api/urls/workflow_job_node.py +++ b/awx/api/urls/workflow_job_node.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( WorkflowJobNodeList, @@ -14,12 +14,12 @@ from awx.api.views import ( urls = [ - url(r'^$', WorkflowJobNodeList.as_view(), name='workflow_job_node_list'), - url(r'^(?P[0-9]+)/$', WorkflowJobNodeDetail.as_view(), name='workflow_job_node_detail'), - url(r'^(?P[0-9]+)/success_nodes/$', WorkflowJobNodeSuccessNodesList.as_view(), name='workflow_job_node_success_nodes_list'), - url(r'^(?P[0-9]+)/failure_nodes/$', WorkflowJobNodeFailureNodesList.as_view(), name='workflow_job_node_failure_nodes_list'), - url(r'^(?P[0-9]+)/always_nodes/$', WorkflowJobNodeAlwaysNodesList.as_view(), name='workflow_job_node_always_nodes_list'), - url(r'^(?P[0-9]+)/credentials/$', WorkflowJobNodeCredentialsList.as_view(), name='workflow_job_node_credentials_list'), + re_path(r'^$', WorkflowJobNodeList.as_view(), name='workflow_job_node_list'), + re_path(r'^(?P[0-9]+)/$', WorkflowJobNodeDetail.as_view(), name='workflow_job_node_detail'), + re_path(r'^(?P[0-9]+)/success_nodes/$', WorkflowJobNodeSuccessNodesList.as_view(), name='workflow_job_node_success_nodes_list'), + re_path(r'^(?P[0-9]+)/failure_nodes/$', WorkflowJobNodeFailureNodesList.as_view(), name='workflow_job_node_failure_nodes_list'), + re_path(r'^(?P[0-9]+)/always_nodes/$', WorkflowJobNodeAlwaysNodesList.as_view(), name='workflow_job_node_always_nodes_list'), + re_path(r'^(?P[0-9]+)/credentials/$', WorkflowJobNodeCredentialsList.as_view(), name='workflow_job_node_credentials_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/workflow_job_template.py b/awx/api/urls/workflow_job_template.py index 90b3c043fc..e2729186ca 100644 --- a/awx/api/urls/workflow_job_template.py +++ b/awx/api/urls/workflow_job_template.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import include, url +from django.urls import include, re_path from awx.api.views import ( WorkflowJobTemplateList, @@ -24,39 +24,39 @@ from awx.api.views import ( urls = [ - url(r'^$', WorkflowJobTemplateList.as_view(), name='workflow_job_template_list'), - url(r'^(?P[0-9]+)/$', WorkflowJobTemplateDetail.as_view(), name='workflow_job_template_detail'), - url(r'^(?P[0-9]+)/workflow_jobs/$', WorkflowJobTemplateJobsList.as_view(), name='workflow_job_template_jobs_list'), - url(r'^(?P[0-9]+)/launch/$', WorkflowJobTemplateLaunch.as_view(), name='workflow_job_template_launch'), - url(r'^(?P[0-9]+)/copy/$', WorkflowJobTemplateCopy.as_view(), name='workflow_job_template_copy'), - url(r'^(?P[0-9]+)/schedules/$', WorkflowJobTemplateSchedulesList.as_view(), name='workflow_job_template_schedules_list'), - url(r'^(?P[0-9]+)/survey_spec/$', WorkflowJobTemplateSurveySpec.as_view(), name='workflow_job_template_survey_spec'), - url(r'^(?P[0-9]+)/workflow_nodes/$', WorkflowJobTemplateWorkflowNodesList.as_view(), name='workflow_job_template_workflow_nodes_list'), - url(r'^(?P[0-9]+)/activity_stream/$', WorkflowJobTemplateActivityStreamList.as_view(), name='workflow_job_template_activity_stream_list'), - url( + re_path(r'^$', WorkflowJobTemplateList.as_view(), name='workflow_job_template_list'), + re_path(r'^(?P[0-9]+)/$', WorkflowJobTemplateDetail.as_view(), name='workflow_job_template_detail'), + re_path(r'^(?P[0-9]+)/workflow_jobs/$', WorkflowJobTemplateJobsList.as_view(), name='workflow_job_template_jobs_list'), + re_path(r'^(?P[0-9]+)/launch/$', WorkflowJobTemplateLaunch.as_view(), name='workflow_job_template_launch'), + re_path(r'^(?P[0-9]+)/copy/$', WorkflowJobTemplateCopy.as_view(), name='workflow_job_template_copy'), + re_path(r'^(?P[0-9]+)/schedules/$', WorkflowJobTemplateSchedulesList.as_view(), name='workflow_job_template_schedules_list'), + re_path(r'^(?P[0-9]+)/survey_spec/$', WorkflowJobTemplateSurveySpec.as_view(), name='workflow_job_template_survey_spec'), + re_path(r'^(?P[0-9]+)/workflow_nodes/$', WorkflowJobTemplateWorkflowNodesList.as_view(), name='workflow_job_template_workflow_nodes_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', WorkflowJobTemplateActivityStreamList.as_view(), name='workflow_job_template_activity_stream_list'), + re_path( r'^(?P[0-9]+)/notification_templates_started/$', WorkflowJobTemplateNotificationTemplatesStartedList.as_view(), name='workflow_job_template_notification_templates_started_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_error/$', WorkflowJobTemplateNotificationTemplatesErrorList.as_view(), name='workflow_job_template_notification_templates_error_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_success/$', WorkflowJobTemplateNotificationTemplatesSuccessList.as_view(), name='workflow_job_template_notification_templates_success_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_approvals/$', WorkflowJobTemplateNotificationTemplatesApprovalList.as_view(), name='workflow_job_template_notification_templates_approvals_list', ), - url(r'^(?P[0-9]+)/access_list/$', WorkflowJobTemplateAccessList.as_view(), name='workflow_job_template_access_list'), - url(r'^(?P[0-9]+)/object_roles/$', WorkflowJobTemplateObjectRolesList.as_view(), name='workflow_job_template_object_roles_list'), - url(r'^(?P[0-9]+)/labels/$', WorkflowJobTemplateLabelList.as_view(), name='workflow_job_template_label_list'), - url(r'^(?P[0-9]+)/', include('awx.api.urls.webhooks'), {'model_kwarg': 'workflow_job_templates'}), + re_path(r'^(?P[0-9]+)/access_list/$', WorkflowJobTemplateAccessList.as_view(), name='workflow_job_template_access_list'), + re_path(r'^(?P[0-9]+)/object_roles/$', WorkflowJobTemplateObjectRolesList.as_view(), name='workflow_job_template_object_roles_list'), + re_path(r'^(?P[0-9]+)/labels/$', WorkflowJobTemplateLabelList.as_view(), name='workflow_job_template_label_list'), + re_path(r'^(?P[0-9]+)/', include('awx.api.urls.webhooks'), {'model_kwarg': 'workflow_job_templates'}), ] __all__ = ['urls'] diff --git a/awx/api/urls/workflow_job_template_node.py b/awx/api/urls/workflow_job_template_node.py index 868c728a88..bcd61aed67 100644 --- a/awx/api/urls/workflow_job_template_node.py +++ b/awx/api/urls/workflow_job_template_node.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( WorkflowJobTemplateNodeList, @@ -15,13 +15,13 @@ from awx.api.views import ( urls = [ - url(r'^$', WorkflowJobTemplateNodeList.as_view(), name='workflow_job_template_node_list'), - url(r'^(?P[0-9]+)/$', WorkflowJobTemplateNodeDetail.as_view(), name='workflow_job_template_node_detail'), - url(r'^(?P[0-9]+)/success_nodes/$', WorkflowJobTemplateNodeSuccessNodesList.as_view(), name='workflow_job_template_node_success_nodes_list'), - url(r'^(?P[0-9]+)/failure_nodes/$', WorkflowJobTemplateNodeFailureNodesList.as_view(), name='workflow_job_template_node_failure_nodes_list'), - url(r'^(?P[0-9]+)/always_nodes/$', WorkflowJobTemplateNodeAlwaysNodesList.as_view(), name='workflow_job_template_node_always_nodes_list'), - url(r'^(?P[0-9]+)/credentials/$', WorkflowJobTemplateNodeCredentialsList.as_view(), name='workflow_job_template_node_credentials_list'), - url(r'^(?P[0-9]+)/create_approval_template/$', WorkflowJobTemplateNodeCreateApproval.as_view(), name='workflow_job_template_node_create_approval'), + re_path(r'^$', WorkflowJobTemplateNodeList.as_view(), name='workflow_job_template_node_list'), + re_path(r'^(?P[0-9]+)/$', WorkflowJobTemplateNodeDetail.as_view(), name='workflow_job_template_node_detail'), + re_path(r'^(?P[0-9]+)/success_nodes/$', WorkflowJobTemplateNodeSuccessNodesList.as_view(), name='workflow_job_template_node_success_nodes_list'), + re_path(r'^(?P[0-9]+)/failure_nodes/$', WorkflowJobTemplateNodeFailureNodesList.as_view(), name='workflow_job_template_node_failure_nodes_list'), + re_path(r'^(?P[0-9]+)/always_nodes/$', WorkflowJobTemplateNodeAlwaysNodesList.as_view(), name='workflow_job_template_node_always_nodes_list'), + re_path(r'^(?P[0-9]+)/credentials/$', WorkflowJobTemplateNodeCredentialsList.as_view(), name='workflow_job_template_node_credentials_list'), + re_path(r'^(?P[0-9]+)/create_approval_template/$', WorkflowJobTemplateNodeCreateApproval.as_view(), name='workflow_job_template_node_create_approval'), ] __all__ = ['urls'] diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py index 51ab4c9dd2..37bf8cfab7 100644 --- a/awx/api/views/__init__.py +++ b/awx/api/views/__init__.py @@ -29,7 +29,7 @@ from django.views.decorators.csrf import csrf_exempt from django.template.loader import render_to_string from django.http import HttpResponse from django.contrib.contenttypes.models import ContentType -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework @@ -105,7 +105,6 @@ from awx.api.permissions import ( ProjectUpdatePermission, InventoryInventorySourcesUpdatePermission, UserPermission, - InstanceGroupTowerPermission, VariableDataPermission, WorkflowApprovalPermission, IsSystemAdminOrAuditor, @@ -113,7 +112,7 @@ from awx.api.permissions import ( from awx.api import renderers from awx.api import serializers from awx.api.metadata import RoleMetadata -from awx.main.constants import ACTIVE_STATES +from awx.main.constants import ACTIVE_STATES, SURVEY_TYPE_MAPPING from awx.main.scheduler.dag_workflow import WorkflowDAG from awx.api.views.mixin import ( ControlledByScmMixin, @@ -480,7 +479,6 @@ class InstanceGroupDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAP name = _("Instance Group Detail") model = models.InstanceGroup serializer_class = serializers.InstanceGroupSerializer - permission_classes = (InstanceGroupTowerPermission,) def update_raw_data(self, data): if self.get_object().is_container_group: @@ -2468,8 +2466,6 @@ class JobTemplateSurveySpec(GenericAPIView): obj_permission_type = 'admin' serializer_class = serializers.EmptySerializer - ALLOWED_TYPES = {'text': str, 'textarea': str, 'password': str, 'multiplechoice': str, 'multiselect': str, 'integer': int, 'float': float} - def get(self, request, *args, **kwargs): obj = self.get_object() return Response(obj.display_survey_spec()) @@ -2540,17 +2536,17 @@ class JobTemplateSurveySpec(GenericAPIView): # Type-specific validation # validate question type <-> default type qtype = survey_item["type"] - if qtype not in JobTemplateSurveySpec.ALLOWED_TYPES: + if qtype not in SURVEY_TYPE_MAPPING: return Response( dict( error=_("'{survey_item[type]}' in survey question {idx} is not one of '{allowed_types}' allowed question types.").format( - allowed_types=', '.join(JobTemplateSurveySpec.ALLOWED_TYPES.keys()), **context + allowed_types=', '.join(SURVEY_TYPE_MAPPING.keys()), **context ) ), status=status.HTTP_400_BAD_REQUEST, ) if 'default' in survey_item and survey_item['default'] != '': - if not isinstance(survey_item['default'], JobTemplateSurveySpec.ALLOWED_TYPES[qtype]): + if not isinstance(survey_item['default'], SURVEY_TYPE_MAPPING[qtype]): type_label = 'string' if qtype in ['integer', 'float']: type_label = qtype diff --git a/awx/api/views/inventory.py b/awx/api/views/inventory.py index dfa7204f80..43815ae565 100644 --- a/awx/api/views/inventory.py +++ b/awx/api/views/inventory.py @@ -8,7 +8,7 @@ import logging from django.conf import settings from django.db.models import Q from django.contrib.contenttypes.models import ContentType -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework.exceptions import PermissionDenied diff --git a/awx/api/views/mesh_visualizer.py b/awx/api/views/mesh_visualizer.py index 741239cbfa..e790069700 100644 --- a/awx/api/views/mesh_visualizer.py +++ b/awx/api/views/mesh_visualizer.py @@ -1,7 +1,7 @@ # Copyright (c) 2018 Red Hat, Inc. # All Rights Reserved. -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from awx.api.generics import APIView, Response from awx.api.permissions import IsSystemAdminOrAuditor @@ -19,7 +19,7 @@ class MeshVisualizer(APIView): data = { 'nodes': InstanceNodeSerializer(Instance.objects.all(), many=True).data, - 'links': InstanceLinkSerializer(InstanceLink.objects.all(), many=True).data, + 'links': InstanceLinkSerializer(InstanceLink.objects.select_related('target', 'source'), many=True).data, } return Response(data) diff --git a/awx/api/views/metrics.py b/awx/api/views/metrics.py index 5a37092dd4..1634293cab 100644 --- a/awx/api/views/metrics.py +++ b/awx/api/views/metrics.py @@ -5,7 +5,7 @@ import logging # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework.response import Response diff --git a/awx/api/views/mixin.py b/awx/api/views/mixin.py index 059e1120f7..2ba254d3b3 100644 --- a/awx/api/views/mixin.py +++ b/awx/api/views/mixin.py @@ -8,7 +8,7 @@ from django.db.models import Count from django.db import transaction from django.shortcuts import get_object_or_404 from django.utils.timezone import now -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from rest_framework.permissions import SAFE_METHODS from rest_framework.exceptions import PermissionDenied diff --git a/awx/api/views/organization.py b/awx/api/views/organization.py index 4a12a7d599..099edcadb0 100644 --- a/awx/api/views/organization.py +++ b/awx/api/views/organization.py @@ -7,7 +7,7 @@ import logging # Django from django.db.models import Count from django.contrib.contenttypes.models import ContentType -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.main.models import ( diff --git a/awx/api/views/root.py b/awx/api/views/root.py index 675daa2569..d879e4537e 100644 --- a/awx/api/views/root.py +++ b/awx/api/views/root.py @@ -8,11 +8,11 @@ import operator from collections import OrderedDict from django.conf import settings -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str from django.utils.decorators import method_decorator from django.views.decorators.csrf import ensure_csrf_cookie from django.template.loader import render_to_string -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from rest_framework.permissions import AllowAny, IsAuthenticated from rest_framework.response import Response @@ -205,7 +205,7 @@ class ApiV2SubscriptionView(APIView): elif isinstance(exc, (ValueError, OSError)) and exc.args: msg = exc.args[0] else: - logger.exception(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) + logger.exception(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST) return Response(validated) @@ -246,7 +246,7 @@ class ApiV2AttachView(APIView): elif isinstance(exc, (ValueError, OSError)) and exc.args: msg = exc.args[0] else: - logger.exception(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) + logger.exception(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST) for sub in validated: if sub['pool_id'] == pool_id: @@ -322,7 +322,7 @@ class ApiV2ConfigView(APIView): try: data_actual = json.dumps(request.data) except Exception: - logger.info(smart_text(u"Invalid JSON submitted for license."), extra=dict(actor=request.user.username)) + logger.info(smart_str(u"Invalid JSON submitted for license."), extra=dict(actor=request.user.username)) return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST) license_data = json.loads(data_actual) @@ -346,7 +346,7 @@ class ApiV2ConfigView(APIView): try: license_data_validated = get_licenser().license_from_manifest(license_data) except Exception: - logger.warning(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) + logger.warning(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST) else: license_data_validated = get_licenser().validate() @@ -357,7 +357,7 @@ class ApiV2ConfigView(APIView): settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host()) return Response(license_data_validated) - logger.warning(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) + logger.warning(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) return Response({"error": _("Invalid subscription")}, status=status.HTTP_400_BAD_REQUEST) def delete(self, request): diff --git a/awx/api/views/webhooks.py b/awx/api/views/webhooks.py index 921839a3f5..c3d1604b0a 100644 --- a/awx/api/views/webhooks.py +++ b/awx/api/views/webhooks.py @@ -4,7 +4,7 @@ import logging import urllib.parse from django.utils.encoding import force_bytes -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.views.decorators.csrf import csrf_exempt from rest_framework import status diff --git a/awx/conf/__init__.py b/awx/conf/__init__.py index 661295a685..3580b7a45a 100644 --- a/awx/conf/__init__.py +++ b/awx/conf/__init__.py @@ -7,8 +7,6 @@ from django.utils.module_loading import autodiscover_modules # AWX from .registry import settings_registry -default_app_config = 'awx.conf.apps.ConfConfig' - def register(setting, **kwargs): settings_registry.register(setting, **kwargs) diff --git a/awx/conf/apps.py b/awx/conf/apps.py index a61e489858..811baba262 100644 --- a/awx/conf/apps.py +++ b/awx/conf/apps.py @@ -1,8 +1,10 @@ +import sys + # Django from django.apps import AppConfig # from django.core import checks -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ class ConfConfig(AppConfig): @@ -12,6 +14,9 @@ class ConfConfig(AppConfig): def ready(self): self.module.autodiscover() - from .settings import SettingsWrapper - SettingsWrapper.initialize() + if not set(sys.argv) & {'migrate', 'check_migrations'}: + + from .settings import SettingsWrapper + + SettingsWrapper.initialize() diff --git a/awx/conf/conf.py b/awx/conf/conf.py index 45a3442756..019bd1d068 100644 --- a/awx/conf/conf.py +++ b/awx/conf/conf.py @@ -1,6 +1,6 @@ # Django from django.conf import settings -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.conf import fields, register diff --git a/awx/conf/fields.py b/awx/conf/fields.py index 2ab3a9e8d9..7802b2a085 100644 --- a/awx/conf/fields.py +++ b/awx/conf/fields.py @@ -7,10 +7,10 @@ from collections import OrderedDict # Django from django.core.validators import URLValidator, _lazy_re_compile -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework -from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField, IntegerField, ListField, NullBooleanField # noqa +from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField, IntegerField, ListField # noqa from rest_framework.serializers import PrimaryKeyRelatedField # noqa # AWX @@ -65,11 +65,11 @@ class StringListBooleanField(ListField): try: if isinstance(value, (list, tuple)): return super(StringListBooleanField, self).to_representation(value) - elif value in NullBooleanField.TRUE_VALUES: + elif value in BooleanField.TRUE_VALUES: return True - elif value in NullBooleanField.FALSE_VALUES: + elif value in BooleanField.FALSE_VALUES: return False - elif value in NullBooleanField.NULL_VALUES: + elif value in BooleanField.NULL_VALUES: return None elif isinstance(value, str): return self.child.to_representation(value) @@ -82,11 +82,11 @@ class StringListBooleanField(ListField): try: if isinstance(data, (list, tuple)): return super(StringListBooleanField, self).to_internal_value(data) - elif data in NullBooleanField.TRUE_VALUES: + elif data in BooleanField.TRUE_VALUES: return True - elif data in NullBooleanField.FALSE_VALUES: + elif data in BooleanField.FALSE_VALUES: return False - elif data in NullBooleanField.NULL_VALUES: + elif data in BooleanField.NULL_VALUES: return None elif isinstance(data, str): return self.child.run_validation(data) diff --git a/awx/conf/migrations/0001_initial.py b/awx/conf/migrations/0001_initial.py index 8bb9b6bcec..b239f5e143 100644 --- a/awx/conf/migrations/0001_initial.py +++ b/awx/conf/migrations/0001_initial.py @@ -2,7 +2,6 @@ from __future__ import unicode_literals from django.db import migrations, models -import jsonfield.fields from django.conf import settings @@ -18,7 +17,7 @@ class Migration(migrations.Migration): ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), ('key', models.CharField(max_length=255)), - ('value', jsonfield.fields.JSONField(null=True)), + ('value', models.JSONField(null=True)), ( 'user', models.ForeignKey(related_name='settings', default=None, editable=False, to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True), diff --git a/awx/conf/migrations/0003_v310_JSONField_changes.py b/awx/conf/migrations/0003_v310_JSONField_changes.py index 2550d2fff0..d312c40b1d 100644 --- a/awx/conf/migrations/0003_v310_JSONField_changes.py +++ b/awx/conf/migrations/0003_v310_JSONField_changes.py @@ -1,12 +1,11 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals -from django.db import migrations -import awx.main.fields +from django.db import migrations, models class Migration(migrations.Migration): dependencies = [('conf', '0002_v310_copy_tower_settings')] - operations = [migrations.AlterField(model_name='setting', name='value', field=awx.main.fields.JSONField(null=True))] + operations = [migrations.AlterField(model_name='setting', name='value', field=models.JSONField(null=True))] diff --git a/awx/conf/migrations/_ldap_group_type.py b/awx/conf/migrations/_ldap_group_type.py index e8de5ca4aa..b6580f8cae 100644 --- a/awx/conf/migrations/_ldap_group_type.py +++ b/awx/conf/migrations/_ldap_group_type.py @@ -5,7 +5,7 @@ from django.utils.timezone import now def fill_ldap_group_type_params(apps, schema_editor): - group_type = settings.AUTH_LDAP_GROUP_TYPE + group_type = getattr(settings, 'AUTH_LDAP_GROUP_TYPE', None) Setting = apps.get_model('conf', 'Setting') group_type_params = {'name_attr': 'cn', 'member_attr': 'member'} @@ -17,7 +17,7 @@ def fill_ldap_group_type_params(apps, schema_editor): else: entry = Setting(key='AUTH_LDAP_GROUP_TYPE_PARAMS', value=group_type_params, created=now(), modified=now()) - init_attrs = set(inspect.getargspec(group_type.__init__).args[1:]) + init_attrs = set(inspect.getfullargspec(group_type.__init__).args[1:]) for k in list(group_type_params.keys()): if k not in init_attrs: del group_type_params[k] diff --git a/awx/conf/models.py b/awx/conf/models.py index f64d8a2aab..05162436d1 100644 --- a/awx/conf/models.py +++ b/awx/conf/models.py @@ -9,7 +9,6 @@ from django.db import models # AWX from awx.main.models.base import CreatedModifiedModel, prevent_search -from awx.main.fields import JSONField from awx.main.utils import encrypt_field from awx.conf import settings_registry @@ -19,7 +18,7 @@ __all__ = ['Setting'] class Setting(CreatedModifiedModel): key = models.CharField(max_length=255) - value = JSONField(null=True) + value = models.JSONField(null=True) user = prevent_search(models.ForeignKey('auth.User', related_name='settings', default=None, null=True, editable=False, on_delete=models.CASCADE)) def __str__(self): diff --git a/awx/conf/registry.py b/awx/conf/registry.py index 36f6eba6d2..da056e99db 100644 --- a/awx/conf/registry.py +++ b/awx/conf/registry.py @@ -8,7 +8,7 @@ import logging # Django from django.core.exceptions import ImproperlyConfigured from django.utils.text import slugify -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from awx.conf.license import get_license diff --git a/awx/conf/tests/unit/test_registry.py b/awx/conf/tests/unit/test_registry.py index 6a817985e2..1ce4dceaaf 100644 --- a/awx/conf/tests/unit/test_registry.py +++ b/awx/conf/tests/unit/test_registry.py @@ -6,7 +6,7 @@ from uuid import uuid4 from django.conf import LazySettings from django.core.cache.backends.locmem import LocMemCache from django.core.exceptions import ImproperlyConfigured -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from rest_framework.fields import empty import pytest diff --git a/awx/conf/tests/unit/test_settings.py b/awx/conf/tests/unit/test_settings.py index da97d41c6f..a184fa3191 100644 --- a/awx/conf/tests/unit/test_settings.py +++ b/awx/conf/tests/unit/test_settings.py @@ -11,7 +11,7 @@ import time from django.conf import LazySettings from django.core.cache.backends.locmem import LocMemCache from django.core.exceptions import ImproperlyConfigured -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ import pytest from awx.conf import models, fields diff --git a/awx/conf/urls.py b/awx/conf/urls.py index 61134d20b8..69d47b6afc 100644 --- a/awx/conf/urls.py +++ b/awx/conf/urls.py @@ -1,13 +1,13 @@ # Copyright (c) 2016 Ansible, Inc. # All Rights Reserved. +from django.urls import re_path -from django.conf.urls import url from awx.conf.views import SettingCategoryList, SettingSingletonDetail, SettingLoggingTest urlpatterns = [ - url(r'^$', SettingCategoryList.as_view(), name='setting_category_list'), - url(r'^(?P[a-z0-9-]+)/$', SettingSingletonDetail.as_view(), name='setting_singleton_detail'), - url(r'^logging/test/$', SettingLoggingTest.as_view(), name='setting_logging_test'), + re_path(r'^$', SettingCategoryList.as_view(), name='setting_category_list'), + re_path(r'^(?P[a-z0-9-]+)/$', SettingSingletonDetail.as_view(), name='setting_singleton_detail'), + re_path(r'^logging/test/$', SettingLoggingTest.as_view(), name='setting_logging_test'), ] diff --git a/awx/conf/views.py b/awx/conf/views.py index a9eae07409..b2b312d834 100644 --- a/awx/conf/views.py +++ b/awx/conf/views.py @@ -13,7 +13,7 @@ from socket import SHUT_RDWR from django.db import connection from django.conf import settings from django.http import Http404 -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework.exceptions import PermissionDenied diff --git a/awx/main/__init__.py b/awx/main/__init__.py index f500f439b6..e484e62be1 100644 --- a/awx/main/__init__.py +++ b/awx/main/__init__.py @@ -1,4 +1,2 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. - -default_app_config = 'awx.main.apps.MainConfig' diff --git a/awx/main/access.py b/awx/main/access.py index 06b560b9ae..ba91d290c1 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -11,7 +11,7 @@ from functools import reduce from django.conf import settings from django.db.models import Q, Prefetch from django.contrib.auth.models import User -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.core.exceptions import ObjectDoesNotExist # Django REST Framework @@ -465,7 +465,7 @@ class BaseAccess(object): if display_method == 'schedule': user_capabilities['schedule'] = user_capabilities['start'] continue - elif display_method == 'delete' and not isinstance(obj, (User, UnifiedJob, CredentialInputSource, ExecutionEnvironment)): + elif display_method == 'delete' and not isinstance(obj, (User, UnifiedJob, CredentialInputSource, ExecutionEnvironment, InstanceGroup)): user_capabilities['delete'] = user_capabilities['edit'] continue elif display_method == 'copy' and isinstance(obj, (Group, Host)): @@ -575,6 +575,11 @@ class InstanceGroupAccess(BaseAccess): def can_change(self, obj, data): return self.user.is_superuser + def can_delete(self, obj): + if obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]: + return False + return self.user.is_superuser + class UserAccess(BaseAccess): """ diff --git a/awx/main/analytics/broadcast_websocket.py b/awx/main/analytics/broadcast_websocket.py index ff4bcb4fa1..df1582c9b9 100644 --- a/awx/main/analytics/broadcast_websocket.py +++ b/awx/main/analytics/broadcast_websocket.py @@ -89,7 +89,7 @@ class BroadcastWebsocketStatsManager: await asyncio.sleep(settings.BROADCAST_WEBSOCKET_STATS_POLL_RATE_SECONDS) except Exception as e: - logger.warn(e) + logger.warning(e) await asyncio.sleep(settings.BROADCAST_WEBSOCKET_STATS_POLL_RATE_SECONDS) self.start() diff --git a/awx/main/analytics/collectors.py b/awx/main/analytics/collectors.py index f8456ca2f1..ee52dece89 100644 --- a/awx/main/analytics/collectors.py +++ b/awx/main/analytics/collectors.py @@ -10,7 +10,7 @@ from django.db.models import Count from django.conf import settings from django.contrib.sessions.models import Session from django.utils.timezone import now, timedelta -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from psycopg2.errors import UntranslatableCharacter @@ -337,6 +337,7 @@ def _events_table(since, full_path, until, tbl, where_column, project_job_create {tbl}.parent_uuid, {tbl}.event, task_action, + resolved_action, -- '-' operator listed here: -- https://www.postgresql.org/docs/12/functions-json.html -- note that operator is only supported by jsonb objects @@ -356,7 +357,7 @@ def _events_table(since, full_path, until, tbl, where_column, project_job_create x.duration AS duration, x.res->'warnings' AS warnings, x.res->'deprecations' AS deprecations - FROM {tbl}, jsonb_to_record({event_data}) AS x("res" json, "duration" text, "task_action" text, "start" text, "end" text) + FROM {tbl}, jsonb_to_record({event_data}) AS x("res" json, "duration" text, "task_action" text, "resolved_action" text, "start" text, "end" text) WHERE ({tbl}.{where_column} > '{since.isoformat()}' AND {tbl}.{where_column} <= '{until.isoformat()}')) TO STDOUT WITH CSV HEADER''' return query @@ -366,23 +367,24 @@ def _events_table(since, full_path, until, tbl, where_column, project_job_create return _copy_table(table='events', query=query(f"replace({tbl}.event_data::text, '\\u0000', '')::jsonb"), path=full_path) -@register('events_table', '1.3', format='csv', description=_('Automation task records'), expensive=four_hour_slicing) +@register('events_table', '1.4', format='csv', description=_('Automation task records'), expensive=four_hour_slicing) def events_table_unpartitioned(since, full_path, until, **kwargs): return _events_table(since, full_path, until, '_unpartitioned_main_jobevent', 'created', **kwargs) -@register('events_table', '1.3', format='csv', description=_('Automation task records'), expensive=four_hour_slicing) +@register('events_table', '1.4', format='csv', description=_('Automation task records'), expensive=four_hour_slicing) def events_table_partitioned_modified(since, full_path, until, **kwargs): return _events_table(since, full_path, until, 'main_jobevent', 'modified', project_job_created=True, **kwargs) -@register('unified_jobs_table', '1.2', format='csv', description=_('Data on jobs run'), expensive=four_hour_slicing) +@register('unified_jobs_table', '1.3', format='csv', description=_('Data on jobs run'), expensive=four_hour_slicing) def unified_jobs_table(since, full_path, until, **kwargs): unified_job_query = '''COPY (SELECT main_unifiedjob.id, main_unifiedjob.polymorphic_ctype_id, django_content_type.model, main_unifiedjob.organization_id, main_organization.name as organization_name, + main_executionenvironment.image as execution_environment_image, main_job.inventory_id, main_inventory.name as inventory_name, main_unifiedjob.created, @@ -407,6 +409,7 @@ def unified_jobs_table(since, full_path, until, **kwargs): LEFT JOIN main_job ON main_unifiedjob.id = main_job.unifiedjob_ptr_id LEFT JOIN main_inventory ON main_job.inventory_id = main_inventory.id LEFT JOIN main_organization ON main_organization.id = main_unifiedjob.organization_id + LEFT JOIN main_executionenvironment ON main_executionenvironment.id = main_unifiedjob.execution_environment_id WHERE ((main_unifiedjob.created > '{0}' AND main_unifiedjob.created <= '{1}') OR (main_unifiedjob.finished > '{0}' AND main_unifiedjob.finished <= '{1}')) AND main_unifiedjob.launch_type != 'sync' @@ -417,11 +420,12 @@ def unified_jobs_table(since, full_path, until, **kwargs): return _copy_table(table='unified_jobs', query=unified_job_query, path=full_path) -@register('unified_job_template_table', '1.0', format='csv', description=_('Data on job templates')) +@register('unified_job_template_table', '1.1', format='csv', description=_('Data on job templates')) def unified_job_template_table(since, full_path, **kwargs): unified_job_template_query = '''COPY (SELECT main_unifiedjobtemplate.id, main_unifiedjobtemplate.polymorphic_ctype_id, django_content_type.model, + main_executionenvironment.image as execution_environment_image, main_unifiedjobtemplate.created, main_unifiedjobtemplate.modified, main_unifiedjobtemplate.created_by_id, @@ -434,7 +438,8 @@ def unified_job_template_table(since, full_path, **kwargs): main_unifiedjobtemplate.next_job_run, main_unifiedjobtemplate.next_schedule_id, main_unifiedjobtemplate.status - FROM main_unifiedjobtemplate, django_content_type + FROM main_unifiedjobtemplate + LEFT JOIN main_executionenvironment ON main_executionenvironment.id = main_unifiedjobtemplate.execution_environment_id, django_content_type WHERE main_unifiedjobtemplate.polymorphic_ctype_id = django_content_type.id ORDER BY main_unifiedjobtemplate.id ASC) TO STDOUT WITH CSV HEADER''' return _copy_table(table='unified_job_template', query=unified_job_template_query, path=full_path) diff --git a/awx/main/apps.py b/awx/main/apps.py index b45b3c20f2..abd3332fd0 100644 --- a/awx/main/apps.py +++ b/awx/main/apps.py @@ -1,5 +1,5 @@ from django.apps import AppConfig -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ class MainConfig(AppConfig): diff --git a/awx/main/conf.py b/awx/main/conf.py index 6756347b54..0099fbe3ad 100644 --- a/awx/main/conf.py +++ b/awx/main/conf.py @@ -2,7 +2,7 @@ import logging # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework import serializers @@ -334,6 +334,19 @@ register( category_slug='jobs', ) +register( + 'AWX_MOUNT_ISOLATED_PATHS_ON_K8S', + field_class=fields.BooleanField, + default=False, + label=_('Expose host paths for Container Groups'), + help_text=_( + 'Expose paths via hostPath for the Pods created by a Container Group. ' + 'HostPath volumes present many security risks, and it is a best practice to avoid the use of HostPaths when possible. ' + ), + category=_('Jobs'), + category_slug='jobs', +) + register( 'GALAXY_IGNORE_CERTS', field_class=fields.BooleanField, diff --git a/awx/main/constants.py b/awx/main/constants.py index 36209c3334..cda6dd3a67 100644 --- a/awx/main/constants.py +++ b/awx/main/constants.py @@ -3,7 +3,7 @@ import re -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ __all__ = [ 'CLOUD_PROVIDERS', @@ -88,7 +88,10 @@ JOB_FOLDER_PREFIX = 'awx_%s_' # :z option tells Podman that two containers share the volume content with r/w # :O option tells Podman to mount the directory from the host as a temporary storage using the overlay file system. +# :ro or :rw option to mount a volume in read-only or read-write mode, respectively. By default, the volumes are mounted read-write. # see podman-run manpage for further details # /HOST-DIR:/CONTAINER-DIR:OPTIONS -CONTAINER_VOLUMES_MOUNT_TYPES = ['z', 'O'] +CONTAINER_VOLUMES_MOUNT_TYPES = ['z', 'O', 'ro', 'rw'] MAX_ISOLATED_PATH_COLON_DELIMITER = 2 + +SURVEY_TYPE_MAPPING = {'text': str, 'textarea': str, 'password': str, 'multiplechoice': str, 'multiselect': str, 'integer': int, 'float': (float, int)} diff --git a/awx/main/consumers.py b/awx/main/consumers.py index 21ebe9d771..ad1740c362 100644 --- a/awx/main/consumers.py +++ b/awx/main/consumers.py @@ -65,7 +65,7 @@ class WebsocketSecretAuthHelper: nonce_parsed = int(nonce_parsed) nonce_diff = now - nonce_parsed if abs(nonce_diff) > nonce_tolerance: - logger.warn(f"Potential replay attack or machine(s) time out of sync by {nonce_diff} seconds.") + logger.warning(f"Potential replay attack or machine(s) time out of sync by {nonce_diff} seconds.") raise ValueError(f"Potential replay attack or machine(s) time out of sync by {nonce_diff} seconds.") return True @@ -85,7 +85,7 @@ class BroadcastConsumer(AsyncJsonWebsocketConsumer): try: WebsocketSecretAuthHelper.is_authorized(self.scope) except Exception: - logger.warn(f"client '{self.channel_name}' failed to authorize against the broadcast endpoint.") + logger.warning(f"client '{self.channel_name}' failed to authorize against the broadcast endpoint.") await self.close() return diff --git a/awx/main/credential_plugins/aim.py b/awx/main/credential_plugins/aim.py index 235511f959..95bf767508 100644 --- a/awx/main/credential_plugins/aim.py +++ b/awx/main/credential_plugins/aim.py @@ -2,7 +2,7 @@ from .plugin import CredentialPlugin, CertFiles, raise_for_status from urllib.parse import quote, urlencode, urljoin -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ import requests aim_inputs = { diff --git a/awx/main/credential_plugins/azure_kv.py b/awx/main/credential_plugins/azure_kv.py index 58580edf9a..eecfde65b1 100644 --- a/awx/main/credential_plugins/azure_kv.py +++ b/awx/main/credential_plugins/azure_kv.py @@ -1,6 +1,6 @@ from .plugin import CredentialPlugin -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from azure.keyvault import KeyVaultClient, KeyVaultAuthentication from azure.common.credentials import ServicePrincipalCredentials from msrestazure import azure_cloud diff --git a/awx/main/credential_plugins/centrify_vault.py b/awx/main/credential_plugins/centrify_vault.py index a0be2250f4..1e05625e71 100644 --- a/awx/main/credential_plugins/centrify_vault.py +++ b/awx/main/credential_plugins/centrify_vault.py @@ -1,115 +1,115 @@ -from .plugin import CredentialPlugin, raise_for_status -from django.utils.translation import ugettext_lazy as _ -from urllib.parse import urljoin -import requests - -pas_inputs = { - 'fields': [ - { - 'id': 'url', - 'label': _('Centrify Tenant URL'), - 'type': 'string', - 'help_text': _('Centrify Tenant URL'), - 'format': 'url', - }, - { - 'id': 'client_id', - 'label': _('Centrify API User'), - 'type': 'string', - 'help_text': _('Centrify API User, having necessary permissions as mentioned in support doc'), - }, - { - 'id': 'client_password', - 'label': _('Centrify API Password'), - 'type': 'string', - 'help_text': _('Password of Centrify API User with necessary permissions'), - 'secret': True, - }, - { - 'id': 'oauth_application_id', - 'label': _('OAuth2 Application ID'), - 'type': 'string', - 'help_text': _('Application ID of the configured OAuth2 Client (defaults to \'awx\')'), - 'default': 'awx', - }, - { - 'id': 'oauth_scope', - 'label': _('OAuth2 Scope'), - 'type': 'string', - 'help_text': _('Scope of the configured OAuth2 Client (defaults to \'awx\')'), - 'default': 'awx', - }, - ], - 'metadata': [ - { - 'id': 'account-name', - 'label': _('Account Name'), - 'type': 'string', - 'help_text': _('Local system account or Domain account name enrolled in Centrify Vault. eg. (root or DOMAIN/Administrator)'), - }, - { - 'id': 'system-name', - 'label': _('System Name'), - 'type': 'string', - 'help_text': _('Machine Name enrolled with in Centrify Portal'), - }, - ], - 'required': ['url', 'account-name', 'system-name', 'client_id', 'client_password'], -} - - -# generate bearer token to authenticate with PAS portal, Input : Client ID, Client Secret -def handle_auth(**kwargs): - post_data = {"grant_type": "client_credentials", "scope": kwargs['oauth_scope']} - response = requests.post(kwargs['endpoint'], data=post_data, auth=(kwargs['client_id'], kwargs['client_password']), verify=True, timeout=(5, 30)) - raise_for_status(response) - try: - return response.json()['access_token'] - except KeyError: - raise RuntimeError('OAuth request to tenant was unsuccessful') - - -# fetch the ID of system with RedRock query, Input : System Name, Account Name -def get_ID(**kwargs): - endpoint = urljoin(kwargs['url'], '/Redrock/query') - name = " Name='{0}' and User='{1}'".format(kwargs['system_name'], kwargs['acc_name']) - query = 'Select ID from VaultAccount where {0}'.format(name) - post_headers = {"Authorization": "Bearer " + kwargs['access_token'], "X-CENTRIFY-NATIVE-CLIENT": "true"} - response = requests.post(endpoint, json={'Script': query}, headers=post_headers, verify=True, timeout=(5, 30)) - raise_for_status(response) - try: - result_str = response.json()["Result"]["Results"] - return result_str[0]["Row"]["ID"] - except (IndexError, KeyError): - raise RuntimeError("Error Detected!! Check the Inputs") - - -# CheckOut Password from Centrify Vault, Input : ID -def get_passwd(**kwargs): - endpoint = urljoin(kwargs['url'], '/ServerManage/CheckoutPassword') - post_headers = {"Authorization": "Bearer " + kwargs['access_token'], "X-CENTRIFY-NATIVE-CLIENT": "true"} - response = requests.post(endpoint, json={'ID': kwargs['acc_id']}, headers=post_headers, verify=True, timeout=(5, 30)) - raise_for_status(response) - try: - return response.json()["Result"]["Password"] - except KeyError: - raise RuntimeError("Password Not Found") - - -def centrify_backend(**kwargs): - url = kwargs.get('url') - acc_name = kwargs.get('account-name') - system_name = kwargs.get('system-name') - client_id = kwargs.get('client_id') - client_password = kwargs.get('client_password') - app_id = kwargs.get('oauth_application_id', 'awx') - endpoint = urljoin(url, f'/oauth2/token/{app_id}') - endpoint = {'endpoint': endpoint, 'client_id': client_id, 'client_password': client_password, 'oauth_scope': kwargs.get('oauth_scope', 'awx')} - token = handle_auth(**endpoint) - get_id_args = {'system_name': system_name, 'acc_name': acc_name, 'url': url, 'access_token': token} - acc_id = get_ID(**get_id_args) - get_pwd_args = {'url': url, 'acc_id': acc_id, 'access_token': token} - return get_passwd(**get_pwd_args) - - -centrify_plugin = CredentialPlugin('Centrify Vault Credential Provider Lookup', inputs=pas_inputs, backend=centrify_backend) +from .plugin import CredentialPlugin, raise_for_status +from django.utils.translation import gettext_lazy as _ +from urllib.parse import urljoin +import requests + +pas_inputs = { + 'fields': [ + { + 'id': 'url', + 'label': _('Centrify Tenant URL'), + 'type': 'string', + 'help_text': _('Centrify Tenant URL'), + 'format': 'url', + }, + { + 'id': 'client_id', + 'label': _('Centrify API User'), + 'type': 'string', + 'help_text': _('Centrify API User, having necessary permissions as mentioned in support doc'), + }, + { + 'id': 'client_password', + 'label': _('Centrify API Password'), + 'type': 'string', + 'help_text': _('Password of Centrify API User with necessary permissions'), + 'secret': True, + }, + { + 'id': 'oauth_application_id', + 'label': _('OAuth2 Application ID'), + 'type': 'string', + 'help_text': _('Application ID of the configured OAuth2 Client (defaults to \'awx\')'), + 'default': 'awx', + }, + { + 'id': 'oauth_scope', + 'label': _('OAuth2 Scope'), + 'type': 'string', + 'help_text': _('Scope of the configured OAuth2 Client (defaults to \'awx\')'), + 'default': 'awx', + }, + ], + 'metadata': [ + { + 'id': 'account-name', + 'label': _('Account Name'), + 'type': 'string', + 'help_text': _('Local system account or Domain account name enrolled in Centrify Vault. eg. (root or DOMAIN/Administrator)'), + }, + { + 'id': 'system-name', + 'label': _('System Name'), + 'type': 'string', + 'help_text': _('Machine Name enrolled with in Centrify Portal'), + }, + ], + 'required': ['url', 'account-name', 'system-name', 'client_id', 'client_password'], +} + + +# generate bearer token to authenticate with PAS portal, Input : Client ID, Client Secret +def handle_auth(**kwargs): + post_data = {"grant_type": "client_credentials", "scope": kwargs['oauth_scope']} + response = requests.post(kwargs['endpoint'], data=post_data, auth=(kwargs['client_id'], kwargs['client_password']), verify=True, timeout=(5, 30)) + raise_for_status(response) + try: + return response.json()['access_token'] + except KeyError: + raise RuntimeError('OAuth request to tenant was unsuccessful') + + +# fetch the ID of system with RedRock query, Input : System Name, Account Name +def get_ID(**kwargs): + endpoint = urljoin(kwargs['url'], '/Redrock/query') + name = " Name='{0}' and User='{1}'".format(kwargs['system_name'], kwargs['acc_name']) + query = 'Select ID from VaultAccount where {0}'.format(name) + post_headers = {"Authorization": "Bearer " + kwargs['access_token'], "X-CENTRIFY-NATIVE-CLIENT": "true"} + response = requests.post(endpoint, json={'Script': query}, headers=post_headers, verify=True, timeout=(5, 30)) + raise_for_status(response) + try: + result_str = response.json()["Result"]["Results"] + return result_str[0]["Row"]["ID"] + except (IndexError, KeyError): + raise RuntimeError("Error Detected!! Check the Inputs") + + +# CheckOut Password from Centrify Vault, Input : ID +def get_passwd(**kwargs): + endpoint = urljoin(kwargs['url'], '/ServerManage/CheckoutPassword') + post_headers = {"Authorization": "Bearer " + kwargs['access_token'], "X-CENTRIFY-NATIVE-CLIENT": "true"} + response = requests.post(endpoint, json={'ID': kwargs['acc_id']}, headers=post_headers, verify=True, timeout=(5, 30)) + raise_for_status(response) + try: + return response.json()["Result"]["Password"] + except KeyError: + raise RuntimeError("Password Not Found") + + +def centrify_backend(**kwargs): + url = kwargs.get('url') + acc_name = kwargs.get('account-name') + system_name = kwargs.get('system-name') + client_id = kwargs.get('client_id') + client_password = kwargs.get('client_password') + app_id = kwargs.get('oauth_application_id', 'awx') + endpoint = urljoin(url, f'/oauth2/token/{app_id}') + endpoint = {'endpoint': endpoint, 'client_id': client_id, 'client_password': client_password, 'oauth_scope': kwargs.get('oauth_scope', 'awx')} + token = handle_auth(**endpoint) + get_id_args = {'system_name': system_name, 'acc_name': acc_name, 'url': url, 'access_token': token} + acc_id = get_ID(**get_id_args) + get_pwd_args = {'url': url, 'acc_id': acc_id, 'access_token': token} + return get_passwd(**get_pwd_args) + + +centrify_plugin = CredentialPlugin('Centrify Vault Credential Provider Lookup', inputs=pas_inputs, backend=centrify_backend) diff --git a/awx/main/credential_plugins/conjur.py b/awx/main/credential_plugins/conjur.py index b9606d48bc..5ae6be27f3 100644 --- a/awx/main/credential_plugins/conjur.py +++ b/awx/main/credential_plugins/conjur.py @@ -3,7 +3,7 @@ from .plugin import CredentialPlugin, CertFiles, raise_for_status import base64 from urllib.parse import urljoin, quote -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ import requests diff --git a/awx/main/credential_plugins/dsv.py b/awx/main/credential_plugins/dsv.py index d256b27647..9c89199710 100644 --- a/awx/main/credential_plugins/dsv.py +++ b/awx/main/credential_plugins/dsv.py @@ -1,7 +1,7 @@ from .plugin import CredentialPlugin from django.conf import settings -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from thycotic.secrets.vault import SecretsVault diff --git a/awx/main/credential_plugins/hashivault.py b/awx/main/credential_plugins/hashivault.py index 6a22efc21e..1a636bdbf9 100644 --- a/awx/main/credential_plugins/hashivault.py +++ b/awx/main/credential_plugins/hashivault.py @@ -6,7 +6,7 @@ from urllib.parse import urljoin from .plugin import CredentialPlugin, CertFiles, raise_for_status import requests -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ base_inputs = { 'fields': [ diff --git a/awx/main/credential_plugins/tss.py b/awx/main/credential_plugins/tss.py index bf83693860..172a8aef00 100644 --- a/awx/main/credential_plugins/tss.py +++ b/awx/main/credential_plugins/tss.py @@ -1,5 +1,5 @@ from .plugin import CredentialPlugin -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from thycotic.secrets.server import PasswordGrantAuthorizer, SecretServer, ServerSecret diff --git a/awx/main/dispatch/control.py b/awx/main/dispatch/control.py index e5952f02bf..b1eb2281c9 100644 --- a/awx/main/dispatch/control.py +++ b/awx/main/dispatch/control.py @@ -42,7 +42,7 @@ class Control(object): return f"reply_to_{str(uuid.uuid4()).replace('-','_')}" def control_with_reply(self, command, timeout=5): - logger.warn('checking {} {} for {}'.format(self.service, command, self.queuename)) + logger.warning('checking {} {} for {}'.format(self.service, command, self.queuename)) reply_queue = Control.generate_reply_queue_name() self.result = None diff --git a/awx/main/dispatch/periodic.py b/awx/main/dispatch/periodic.py index 9ff6dd2570..e3e7da5db9 100644 --- a/awx/main/dispatch/periodic.py +++ b/awx/main/dispatch/periodic.py @@ -6,7 +6,8 @@ from multiprocessing import Process from django.conf import settings from django.db import connections from schedule import Scheduler -from django_guid.middleware import GuidMiddleware +from django_guid import set_guid +from django_guid.utils import generate_guid from awx.main.dispatch.worker import TaskWorker @@ -19,20 +20,20 @@ class Scheduler(Scheduler): def run(): ppid = os.getppid() - logger.warn('periodic beat started') + logger.warning('periodic beat started') while True: if os.getppid() != ppid: # if the parent PID changes, this process has been orphaned # via e.g., segfault or sigkill, we should exit too pid = os.getpid() - logger.warn(f'periodic beat exiting gracefully pid:{pid}') + logger.warning(f'periodic beat exiting gracefully pid:{pid}') raise SystemExit() try: for conn in connections.all(): # If the database connection has a hiccup, re-establish a new # connection conn.close_if_unusable_or_obsolete() - GuidMiddleware.set_guid(GuidMiddleware._generate_guid()) + set_guid(generate_guid()) self.run_pending() except Exception: logger.exception('encountered an error while scheduling periodic tasks') diff --git a/awx/main/dispatch/pool.py b/awx/main/dispatch/pool.py index 3d08ca3fd7..576f6bf799 100644 --- a/awx/main/dispatch/pool.py +++ b/awx/main/dispatch/pool.py @@ -16,13 +16,13 @@ from queue import Full as QueueFull, Empty as QueueEmpty from django.conf import settings from django.db import connection as django_connection, connections from django.core.cache import cache as django_cache -from django_guid.middleware import GuidMiddleware +from django_guid import set_guid from jinja2 import Template import psutil from awx.main.models import UnifiedJob from awx.main.dispatch import reaper -from awx.main.utils.common import convert_mem_str_to_bytes +from awx.main.utils.common import convert_mem_str_to_bytes, get_mem_effective_capacity if 'run_callback_receiver' in sys.argv: logger = logging.getLogger('awx.main.commands.run_callback_receiver') @@ -142,7 +142,7 @@ class PoolWorker(object): # when this occurs, it's _fine_ to ignore this KeyError because # the purpose of self.managed_tasks is to just track internal # state of which events are *currently* being processed. - logger.warn('Event UUID {} appears to be have been duplicated.'.format(uuid)) + logger.warning('Event UUID {} appears to be have been duplicated.'.format(uuid)) @property def current_task(self): @@ -291,8 +291,8 @@ class WorkerPool(object): pass except Exception: tb = traceback.format_exc() - logger.warn("could not write to queue %s" % preferred_queue) - logger.warn("detail: {}".format(tb)) + logger.warning("could not write to queue %s" % preferred_queue) + logger.warning("detail: {}".format(tb)) write_attempt_order.append(preferred_queue) logger.error("could not write payload to any queue, attempted order: {}".format(write_attempt_order)) return None @@ -324,8 +324,9 @@ class AutoscalePool(WorkerPool): total_memory_gb = convert_mem_str_to_bytes(settings_absmem) // 2**30 else: total_memory_gb = (psutil.virtual_memory().total >> 30) + 1 # noqa: round up - # 5 workers per GB of total memory - self.max_workers = total_memory_gb * 5 + + # Get same number as max forks based on memory, this function takes memory as bytes + self.max_workers = get_mem_effective_capacity(total_memory_gb * 2**30) # max workers can't be less than min_workers self.max_workers = max(self.min_workers, self.max_workers) @@ -435,7 +436,7 @@ class AutoscalePool(WorkerPool): def write(self, preferred_queue, body): if 'guid' in body: - GuidMiddleware.set_guid(body['guid']) + set_guid(body['guid']) try: # when the cluster heartbeat occurs, clean up internally if isinstance(body, dict) and 'cluster_node_heartbeat' in body['task']: diff --git a/awx/main/dispatch/publish.py b/awx/main/dispatch/publish.py index 63b2890e1e..e873465155 100644 --- a/awx/main/dispatch/publish.py +++ b/awx/main/dispatch/publish.py @@ -5,7 +5,7 @@ import json from uuid import uuid4 from django.conf import settings -from django_guid.middleware import GuidMiddleware +from django_guid import get_guid from . import pg_bus_conn @@ -76,7 +76,7 @@ class task: logger.error(msg) raise ValueError(msg) obj = {'uuid': task_id, 'args': args, 'kwargs': kwargs, 'task': cls.name} - guid = GuidMiddleware.get_guid() + guid = get_guid() if guid: obj['guid'] = guid obj.update(**kw) diff --git a/awx/main/dispatch/worker/base.py b/awx/main/dispatch/worker/base.py index 193fb778bb..6965416c94 100644 --- a/awx/main/dispatch/worker/base.py +++ b/awx/main/dispatch/worker/base.py @@ -60,7 +60,7 @@ class AWXConsumerBase(object): return f'listening on {self.queues}' def control(self, body): - logger.warn(f'Received control signal:\n{body}') + logger.warning(f'Received control signal:\n{body}') control = body.get('control') if control in ('status', 'running'): reply_queue = body['reply_to'] @@ -118,7 +118,7 @@ class AWXConsumerBase(object): def stop(self, signum, frame): self.should_stop = True - logger.warn('received {}, stopping'.format(signame(signum))) + logger.warning('received {}, stopping'.format(signame(signum))) self.worker.on_stop() raise SystemExit() @@ -153,7 +153,7 @@ class AWXConsumerPG(AWXConsumerBase): if self.should_stop: return except psycopg2.InterfaceError: - logger.warn("Stale Postgres message bus connection, reconnecting") + logger.warning("Stale Postgres message bus connection, reconnecting") continue diff --git a/awx/main/dispatch/worker/callback.py b/awx/main/dispatch/worker/callback.py index a88286364a..5026e72c06 100644 --- a/awx/main/dispatch/worker/callback.py +++ b/awx/main/dispatch/worker/callback.py @@ -9,7 +9,7 @@ from django.conf import settings from django.utils.timezone import now as tz_now from django.db import DatabaseError, OperationalError, connection as django_connection from django.db.utils import InterfaceError, InternalError -from django_guid.middleware import GuidMiddleware +from django_guid import set_guid import psutil @@ -184,7 +184,7 @@ class CallbackBrokerWorker(BaseWorker): if body.get('event') == 'EOF': try: if 'guid' in body: - GuidMiddleware.set_guid(body['guid']) + set_guid(body['guid']) final_counter = body.get('final_counter', 0) logger.info('Event processing is finished for Job {}, sending notifications'.format(job_identifier)) # EOF events are sent when stdout for the running task is @@ -208,7 +208,7 @@ class CallbackBrokerWorker(BaseWorker): logger.exception('Worker failed to emit notifications: Job {}'.format(job_identifier)) finally: self.subsystem_metrics.inc('callback_receiver_events_in_memory', -1) - GuidMiddleware.set_guid('') + set_guid('') return skip_websocket_message = body.pop('skip_websocket_message', False) diff --git a/awx/main/dispatch/worker/task.py b/awx/main/dispatch/worker/task.py index 91ce7f47b4..e1fe196ddb 100644 --- a/awx/main/dispatch/worker/task.py +++ b/awx/main/dispatch/worker/task.py @@ -7,7 +7,7 @@ import traceback from kubernetes.config import kube_config from django.conf import settings -from django_guid.middleware import GuidMiddleware +from django_guid import set_guid from awx.main.tasks.system import dispatch_startup, inform_cluster_of_shutdown @@ -54,7 +54,7 @@ class TaskWorker(BaseWorker): args = body.get('args', []) kwargs = body.get('kwargs', {}) if 'guid' in body: - GuidMiddleware.set_guid(body.pop('guid')) + set_guid(body.pop('guid')) _call = TaskWorker.resolve_callable(task) if inspect.isclass(_call): # the callable is a class, e.g., RunJob; instantiate and diff --git a/awx/main/fields.py b/awx/main/fields.py index 95ebfbca73..83ab57f37d 100644 --- a/awx/main/fields.py +++ b/awx/main/fields.py @@ -11,7 +11,6 @@ from jinja2 import sandbox, StrictUndefined from jinja2.exceptions import UndefinedError, TemplateSyntaxError, SecurityError # Django -from django.contrib.postgres.fields import JSONField as upstream_JSONBField from django.core import exceptions as django_exceptions from django.core.serializers.json import DjangoJSONEncoder from django.db.models.signals import ( @@ -28,17 +27,15 @@ from django.db.models.fields.related_descriptors import ( ReverseManyToOneDescriptor, create_forward_many_to_many_manager, ) -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str +from django.db.models import JSONField from django.utils.functional import cached_property -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # jsonschema from jsonschema import Draft4Validator, FormatChecker import jsonschema.exceptions -# Django-JSONField -from jsonfield import JSONField as upstream_JSONField - # DRF from rest_framework import serializers @@ -52,9 +49,9 @@ from awx.main import utils __all__ = [ + 'JSONBlob', 'AutoOneToOneField', 'ImplicitRoleField', - 'JSONField', 'SmartFilterField', 'OrderedManyToManyField', 'update_role_parentage_for_instance', @@ -71,34 +68,9 @@ def __enum_validate__(validator, enums, instance, schema): Draft4Validator.VALIDATORS['enum'] = __enum_validate__ -class JSONField(upstream_JSONField): - def db_type(self, connection): - return 'text' - - def from_db_value(self, value, expression, connection): - if value in {'', None} and not self.null: - return {} - return super(JSONField, self).from_db_value(value, expression, connection) - - -class JSONBField(upstream_JSONBField): - def get_prep_lookup(self, lookup_type, value): - if isinstance(value, str) and value == "null": - return 'null' - return super(JSONBField, self).get_prep_lookup(lookup_type, value) - - def get_db_prep_value(self, value, connection, prepared=False): - if connection.vendor == 'sqlite': - # sqlite (which we use for tests) does not support jsonb; - return json.dumps(value, cls=DjangoJSONEncoder) - return super(JSONBField, self).get_db_prep_value(value, connection, prepared) - - def from_db_value(self, value, expression, connection): - # Work around a bug in django-jsonfield - # https://bitbucket.org/schinckel/django-jsonfield/issues/57/cannot-use-in-the-same-project-as-djangos - if isinstance(value, str): - return json.loads(value) - return value +class JSONBlob(JSONField): + def get_internal_type(self): + return "TextField" # Based on AutoOneToOneField from django-annoying: @@ -140,7 +112,7 @@ def resolve_role_field(obj, field): # use extremely generous duck typing to accomidate all possible forms # of the model that may be used during various migrations if obj._meta.model_name != 'role' or obj._meta.app_label != 'main': - raise Exception(smart_text('{} refers to a {}, not a Role'.format(field, type(obj)))) + raise Exception(smart_str('{} refers to a {}, not a Role'.format(field, type(obj)))) ret.append(obj.id) else: if type(obj) is ManyToManyDescriptor: @@ -385,7 +357,7 @@ class SmartFilterField(models.TextField): return super(SmartFilterField, self).get_prep_value(value) -class JSONSchemaField(JSONBField): +class JSONSchemaField(models.JSONField): """ A JSONB field that self-validates against a defined JSON schema (http://json-schema.org). This base class is intended to be overwritten by @@ -398,8 +370,13 @@ class JSONSchemaField(JSONBField): # validation empty_values = (None, '') + def __init__(self, encoder=None, decoder=None, **options): + if encoder is None: + encoder = DjangoJSONEncoder + super().__init__(encoder=encoder, decoder=decoder, **options) + def get_default(self): - return copy.deepcopy(super(JSONBField, self).get_default()) + return copy.deepcopy(super(models.JSONField, self).get_default()) def schema(self, model_instance): raise NotImplementedError() diff --git a/awx/main/management/commands/cleanup_jobs.py b/awx/main/management/commands/cleanup_jobs.py index c9c508c6e8..dec5ca6e50 100644 --- a/awx/main/management/commands/cleanup_jobs.py +++ b/awx/main/management/commands/cleanup_jobs.py @@ -11,13 +11,12 @@ import re # Django from django.core.management.base import BaseCommand, CommandError from django.db import transaction, connection +from django.db.models import Min, Max +from django.db.models.signals import pre_save, post_save, pre_delete, post_delete, m2m_changed from django.utils.timezone import now # AWX from awx.main.models import Job, AdHocCommand, ProjectUpdate, InventoryUpdate, SystemJob, WorkflowJob, Notification -from awx.main.signals import disable_activity_stream, disable_computed_fields - -from awx.main.utils.deletion import AWXCollector, pre_delete def unified_job_class_to_event_table_name(job_class): @@ -80,7 +79,6 @@ class DeleteMeta: ).count() def identify_excluded_partitions(self): - part_drop = {} for pk, status, created in self.jobs_qs: @@ -94,7 +92,7 @@ class DeleteMeta: # Note that parts_no_drop _may_ contain the names of partitions that don't exist # This can happen when the cleanup of _unpartitioned_* logic leaves behind jobs with status pending, waiting, running. The find_jobs_to_delete() will # pick these jobs up. - self.parts_no_drop = set([k for k, v in part_drop.items() if v is False]) + self.parts_no_drop = {k for k, v in part_drop.items() if v is False} def delete_jobs(self): if not self.dry_run: @@ -116,7 +114,7 @@ class DeleteMeta: partitions_dt = [p for p in partitions_dt if not None] # convert datetime partition back to string partition - partitions_maybe_drop = set([dt_to_partition_name(tbl_name, dt) for dt in partitions_dt]) + partitions_maybe_drop = {dt_to_partition_name(tbl_name, dt) for dt in partitions_dt} # Do not drop partition if there is a job that will not be deleted pointing at it self.parts_to_drop = partitions_maybe_drop - self.parts_no_drop @@ -164,6 +162,15 @@ class Command(BaseCommand): parser.add_argument('--notifications', dest='only_notifications', action='store_true', default=False, help='Remove notifications') parser.add_argument('--workflow-jobs', default=False, action='store_true', dest='only_workflow_jobs', help='Remove workflow jobs') + def init_logging(self): + log_levels = dict(enumerate([logging.ERROR, logging.INFO, logging.DEBUG, 0])) + self.logger = logging.getLogger('awx.main.commands.cleanup_jobs') + self.logger.setLevel(log_levels.get(self.verbosity, 0)) + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter('%(message)s')) + self.logger.addHandler(handler) + self.logger.propagate = False + def cleanup(self, job_class): delete_meta = DeleteMeta(self.logger, job_class, self.cutoff, self.dry_run) skipped, deleted = delete_meta.delete() @@ -193,7 +200,7 @@ class Command(BaseCommand): return (delete_meta.jobs_no_delete_count, delete_meta.jobs_to_delete_count) def _cascade_delete_job_events(self, model, pk_list): - if len(pk_list) > 0: + if pk_list: with connection.cursor() as cursor: tblname = unified_job_class_to_event_table_name(model) @@ -202,37 +209,30 @@ class Command(BaseCommand): cursor.execute(f"DELETE FROM _unpartitioned_{tblname} WHERE {rel_name} IN ({pk_list_csv})") def cleanup_jobs(self): - skipped, deleted = 0, 0 + batch_size = 100000 - batch_size = 1000000 + # Hack to avoid doing N+1 queries as each item in the Job query set does + # an individual query to get the underlying UnifiedJob. + Job.polymorphic_super_sub_accessors_replaced = True - while True: - # get queryset for available jobs to remove - qs = Job.objects.filter(created__lt=self.cutoff).exclude(status__in=['pending', 'waiting', 'running']) - # get pk list for the first N (batch_size) objects - pk_list = qs[0:batch_size].values_list('pk', flat=True) - # You cannot delete queries with sql LIMIT set, so we must - # create a new query from this pk_list - qs_batch = Job.objects.filter(pk__in=pk_list) - just_deleted = 0 - if not self.dry_run: + skipped = (Job.objects.filter(created__gte=self.cutoff) | Job.objects.filter(status__in=['pending', 'waiting', 'running'])).count() + + qs = Job.objects.select_related('unifiedjob_ptr').filter(created__lt=self.cutoff).exclude(status__in=['pending', 'waiting', 'running']) + if self.dry_run: + deleted = qs.count() + return skipped, deleted + + deleted = 0 + info = qs.aggregate(min=Min('id'), max=Max('id')) + if info['min'] is not None: + for start in range(info['min'], info['max'] + 1, batch_size): + qs_batch = qs.filter(id__gte=start, id__lte=start + batch_size) + pk_list = qs_batch.values_list('id', flat=True) + + _, results = qs_batch.delete() + deleted += results['main.Job'] self._cascade_delete_job_events(Job, pk_list) - del_query = pre_delete(qs_batch) - collector = AWXCollector(del_query.db) - collector.collect(del_query) - _, models_deleted = collector.delete() - if models_deleted: - just_deleted = models_deleted['main.Job'] - deleted += just_deleted - else: - just_deleted = 0 # break from loop, this is dry run - deleted = qs.count() - - if just_deleted == 0: - break - - skipped += (Job.objects.filter(created__gte=self.cutoff) | Job.objects.filter(status__in=['pending', 'waiting', 'running'])).count() return skipped, deleted def cleanup_ad_hoc_commands(self): @@ -339,15 +339,6 @@ class Command(BaseCommand): skipped += SystemJob.objects.filter(created__gte=self.cutoff).count() return skipped, deleted - def init_logging(self): - log_levels = dict(enumerate([logging.ERROR, logging.INFO, logging.DEBUG, 0])) - self.logger = logging.getLogger('awx.main.commands.cleanup_jobs') - self.logger.setLevel(log_levels.get(self.verbosity, 0)) - handler = logging.StreamHandler() - handler.setFormatter(logging.Formatter('%(message)s')) - self.logger.addHandler(handler) - self.logger.propagate = False - def cleanup_workflow_jobs(self): skipped, deleted = 0, 0 workflow_jobs = WorkflowJob.objects.filter(created__lt=self.cutoff) @@ -398,6 +389,7 @@ class Command(BaseCommand): self.cutoff = now() - datetime.timedelta(days=self.days) except OverflowError: raise CommandError('--days specified is too large. Try something less than 99999 (about 270 years).') + model_names = ('jobs', 'ad_hoc_commands', 'project_updates', 'inventory_updates', 'management_jobs', 'workflow_jobs', 'notifications') models_to_cleanup = set() for m in model_names: @@ -405,18 +397,28 @@ class Command(BaseCommand): models_to_cleanup.add(m) if not models_to_cleanup: models_to_cleanup.update(model_names) - with disable_activity_stream(), disable_computed_fields(): - for m in model_names: - if m in models_to_cleanup: - skipped, deleted = getattr(self, 'cleanup_%s' % m)() - func = getattr(self, 'cleanup_%s_partition' % m, None) - if func: - skipped_partition, deleted_partition = func() - skipped += skipped_partition - deleted += deleted_partition + # Completely disconnect all signal handlers. This is very aggressive, + # but it will be ok since this command is run in its own process. The + # core of the logic is borrowed from Signal.disconnect(). + for s in (pre_save, post_save, pre_delete, post_delete, m2m_changed): + with s.lock: + del s.receivers[:] + s.sender_receivers_cache.clear() - if self.dry_run: - self.logger.log(99, '%s: %d would be deleted, %d would be skipped.', m.replace('_', ' '), deleted, skipped) - else: - self.logger.log(99, '%s: %d deleted, %d skipped.', m.replace('_', ' '), deleted, skipped) + for m in model_names: + if m not in models_to_cleanup: + continue + + skipped, deleted = getattr(self, 'cleanup_%s' % m)() + + func = getattr(self, 'cleanup_%s_partition' % m, None) + if func: + skipped_partition, deleted_partition = func() + skipped += skipped_partition + deleted += deleted_partition + + if self.dry_run: + self.logger.log(99, '%s: %d would be deleted, %d would be skipped.', m.replace('_', ' '), deleted, skipped) + else: + self.logger.log(99, '%s: %d deleted, %d skipped.', m.replace('_', ' '), deleted, skipped) diff --git a/awx/main/management/commands/inventory_import.py b/awx/main/management/commands/inventory_import.py index 0854784f10..78acec423d 100644 --- a/awx/main/management/commands/inventory_import.py +++ b/awx/main/management/commands/inventory_import.py @@ -16,7 +16,7 @@ from collections import OrderedDict from django.conf import settings from django.core.management.base import BaseCommand, CommandError from django.db import connection, transaction -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str # DRF error class to distinguish license exceptions from rest_framework.exceptions import PermissionDenied @@ -79,13 +79,13 @@ class AnsibleInventoryLoader(object): ee = get_default_execution_environment() if settings.IS_K8S: - logger.warn('This command is not able to run on kubernetes-based deployment. This action should be done using the API.') + logger.warning('This command is not able to run on kubernetes-based deployment. This action should be done using the API.') sys.exit(1) if ee.credential: process = subprocess.run(['podman', 'image', 'exists', ee.image], capture_output=True) if process.returncode != 0: - logger.warn( + logger.warning( f'The default execution environment (id={ee.id}, name={ee.name}, image={ee.image}) is not available on this node. ' 'The image needs to be available locally before using this command, due to registry authentication. ' 'To pull this image, either run a job on this node or manually pull the image.' @@ -109,8 +109,8 @@ class AnsibleInventoryLoader(object): proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate() - stdout = smart_text(stdout) - stderr = smart_text(stderr) + stdout = smart_str(stdout) + stderr = smart_str(stderr) if proc.returncode != 0: raise RuntimeError('%s failed (rc=%d) with stdout:\n%s\nstderr:\n%s' % ('ansible-inventory', proc.returncode, stdout, stderr)) @@ -224,7 +224,7 @@ class Command(BaseCommand): from_dict = instance_id if instance_id: break - return smart_text(instance_id) + return smart_str(instance_id) def _get_enabled(self, from_dict, default=None): """ diff --git a/awx/main/managers.py b/awx/main/managers.py index 2614193fe1..4702ad6a9e 100644 --- a/awx/main/managers.py +++ b/awx/main/managers.py @@ -1,12 +1,11 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. -import sys import logging import os from django.db import models from django.conf import settings - +from django.db.models.functions import Lower from awx.main.utils.filters import SmartFilter from awx.main.utils.pglock import advisory_lock from awx.main.utils.common import get_capacity_type @@ -35,7 +34,7 @@ class HostManager(models.Manager): - Only consider results that are unique - Return the count of this query """ - return self.order_by().exclude(inventory_sources__source='controller').values('name').distinct().count() + return self.order_by().exclude(inventory_sources__source='controller').values(name_lower=Lower('name')).distinct().count() def org_active_count(self, org_id): """Return count of active, unique hosts used by an organization. @@ -104,10 +103,6 @@ class InstanceManager(models.Manager): def me(self): """Return the currently active instance.""" - # If we are running unit tests, return a stub record. - if settings.IS_TESTING(sys.argv) or hasattr(sys, '_called_from_test'): - return self.model(id=1, hostname=settings.CLUSTER_HOST_ID, uuid=UUID_DEFAULT) - node = self.filter(hostname=settings.CLUSTER_HOST_ID) if node.exists(): return node[0] @@ -247,7 +242,7 @@ class InstanceGroupManager(models.Manager): if t.controller_node: control_groups = instance_ig_mapping.get(t.controller_node, []) if not control_groups: - logger.warn(f"No instance group found for {t.controller_node}, capacity consumed may be innaccurate.") + logger.warning(f"No instance group found for {t.controller_node}, capacity consumed may be innaccurate.") if t.status == 'waiting' or (not t.execution_node and not t.is_container_group_task): # Subtract capacity from any peer groups that share instances diff --git a/awx/main/middleware.py b/awx/main/middleware.py index 39caf4a7e7..90739aebbe 100644 --- a/awx/main/middleware.py +++ b/awx/main/middleware.py @@ -14,7 +14,7 @@ from django.db import connection from django.shortcuts import redirect from django.apps import apps from django.utils.deprecation import MiddlewareMixin -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.urls import reverse, resolve from awx.main.utils.named_url_graph import generate_graph, GraphNode @@ -103,7 +103,7 @@ def _customize_graph(): class URLModificationMiddleware(MiddlewareMixin): - def __init__(self, get_response=None): + def __init__(self, get_response): models = [m for m in apps.get_app_config('main').get_models() if hasattr(m, 'get_absolute_url')] generate_graph(models) _customize_graph() diff --git a/awx/main/migrations/0001_initial.py b/awx/main/migrations/0001_initial.py index 7ce9911546..c3dcbe36b7 100644 --- a/awx/main/migrations/0001_initial.py +++ b/awx/main/migrations/0001_initial.py @@ -7,7 +7,6 @@ from __future__ import unicode_literals from django.db import migrations, models import django.utils.timezone -import jsonfield.fields import django.db.models.deletion from django.conf import settings import taggit.managers @@ -70,7 +69,7 @@ class Migration(migrations.Migration): ], ), ), - ('event_data', jsonfield.fields.JSONField(default=dict, blank=True)), + ('event_data', awx.main.fields.JSONBlob(default=dict, blank=True)), ('failed', models.BooleanField(default=False, editable=False)), ('changed', models.BooleanField(default=False, editable=False)), ('counter', models.PositiveIntegerField(default=0)), @@ -433,7 +432,7 @@ class Migration(migrations.Migration): ], ), ), - ('event_data', jsonfield.fields.JSONField(default=dict, blank=True)), + ('event_data', awx.main.fields.JSONBlob(default=dict, blank=True)), ('failed', models.BooleanField(default=False, editable=False)), ('changed', models.BooleanField(default=False, editable=False)), ('host_name', models.CharField(default='', max_length=1024, editable=False)), @@ -623,7 +622,7 @@ class Migration(migrations.Migration): ('dtend', models.DateTimeField(default=None, null=True, editable=False)), ('rrule', models.CharField(max_length=255)), ('next_run', models.DateTimeField(default=None, null=True, editable=False)), - ('extra_data', jsonfield.fields.JSONField(default=dict, blank=True)), + ('extra_data', models.JSONField(default=dict, null=True, blank=True)), ( 'created_by', models.ForeignKey( @@ -751,7 +750,7 @@ class Migration(migrations.Migration): ('elapsed', models.DecimalField(editable=False, max_digits=12, decimal_places=3)), ('job_args', models.TextField(default='', editable=False, blank=True)), ('job_cwd', models.CharField(default='', max_length=1024, editable=False, blank=True)), - ('job_env', jsonfield.fields.JSONField(default=dict, editable=False, blank=True)), + ('job_env', models.JSONField(default=dict, editable=False, null=True, blank=True)), ('job_explanation', models.TextField(default='', editable=False, blank=True)), ('start_args', models.TextField(default='', editable=False, blank=True)), ('result_stdout_text', models.TextField(default='', editable=False, blank=True)), @@ -1035,7 +1034,7 @@ class Migration(migrations.Migration): ('host_config_key', models.CharField(default='', max_length=1024, blank=True)), ('ask_variables_on_launch', models.BooleanField(default=False)), ('survey_enabled', models.BooleanField(default=False)), - ('survey_spec', jsonfield.fields.JSONField(default=dict, blank=True)), + ('survey_spec', models.JSONField(default=dict, blank=True)), ], options={ 'ordering': ('name',), diff --git a/awx/main/migrations/0002_squashed_v300_release.py b/awx/main/migrations/0002_squashed_v300_release.py index 2afdef1845..5f23ed566f 100644 --- a/awx/main/migrations/0002_squashed_v300_release.py +++ b/awx/main/migrations/0002_squashed_v300_release.py @@ -12,7 +12,6 @@ import django.db.models.deletion from django.conf import settings from django.utils.timezone import now -import jsonfield.fields import taggit.managers @@ -199,7 +198,7 @@ class Migration(migrations.Migration): ), ('recipients', models.TextField(default='', editable=False, blank=True)), ('subject', models.TextField(default='', editable=False, blank=True)), - ('body', jsonfield.fields.JSONField(default=dict, blank=True)), + ('body', models.JSONField(default=dict, null=True, blank=True)), ], options={ 'ordering': ('pk',), @@ -230,7 +229,7 @@ class Migration(migrations.Migration): ], ), ), - ('notification_configuration', jsonfield.fields.JSONField(default=dict)), + ('notification_configuration', models.JSONField(default=dict)), ( 'created_by', models.ForeignKey( @@ -324,9 +323,7 @@ class Migration(migrations.Migration): ('module', models.CharField(max_length=128)), ( 'facts', - awx.main.fields.JSONBField( - default=dict, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True - ), + models.JSONField(default=dict, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True), ), ( 'host', diff --git a/awx/main/migrations/0004_squashed_v310_release.py b/awx/main/migrations/0004_squashed_v310_release.py index 06fd3aeed3..c0ac0d4a04 100644 --- a/awx/main/migrations/0004_squashed_v310_release.py +++ b/awx/main/migrations/0004_squashed_v310_release.py @@ -3,7 +3,6 @@ from __future__ import unicode_literals from django.db import migrations, models import awx.main.models.notifications -import jsonfield.fields import django.db.models.deletion import awx.main.models.workflow import awx.main.fields @@ -221,7 +220,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjobnode', name='char_prompts', - field=jsonfield.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AddField( model_name='workflowjobnode', @@ -260,7 +259,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjobtemplatenode', name='char_prompts', - field=jsonfield.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AddField( model_name='workflowjobtemplatenode', @@ -308,12 +307,12 @@ class Migration(migrations.Migration): migrations.AddField( model_name='job', name='artifacts', - field=jsonfield.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AddField( model_name='workflowjobnode', name='ancestor_artifacts', - field=jsonfield.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), # Job timeout settings migrations.AddField( @@ -381,9 +380,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='project', name='playbook_files', - field=jsonfield.fields.JSONField( - default=[], help_text='List of playbooks found in the project', verbose_name='Playbook Files', editable=False, blank=True - ), + field=models.JSONField(default=list, help_text='List of playbooks found in the project', verbose_name='Playbook Files', editable=False, blank=True), ), # Job events to stdout migrations.AddField( @@ -539,7 +536,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjob', name='survey_passwords', - field=jsonfield.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AddField( model_name='workflowjobtemplate', @@ -549,85 +546,83 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjobtemplate', name='survey_spec', - field=jsonfield.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, blank=True), ), # JSON field changes migrations.AlterField( model_name='adhoccommandevent', name='event_data', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=awx.main.fields.JSONBlob(default=dict, blank=True), ), migrations.AlterField( model_name='job', name='artifacts', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AlterField( model_name='job', name='survey_passwords', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AlterField( model_name='jobevent', name='event_data', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=awx.main.fields.JSONBlob(default=dict, blank=True), ), migrations.AlterField( model_name='jobtemplate', name='survey_spec', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, blank=True), ), migrations.AlterField( model_name='notification', name='body', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AlterField( model_name='notificationtemplate', name='notification_configuration', - field=awx.main.fields.JSONField(default=dict), + field=models.JSONField(default=dict), ), migrations.AlterField( model_name='project', name='playbook_files', - field=awx.main.fields.JSONField( - default=[], help_text='List of playbooks found in the project', verbose_name='Playbook Files', editable=False, blank=True - ), + field=models.JSONField(default=list, help_text='List of playbooks found in the project', verbose_name='Playbook Files', editable=False, blank=True), ), migrations.AlterField( model_name='schedule', name='extra_data', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AlterField( model_name='unifiedjob', name='job_env', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AlterField( model_name='workflowjob', name='survey_passwords', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AlterField( model_name='workflowjobnode', name='ancestor_artifacts', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AlterField( model_name='workflowjobnode', name='char_prompts', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AlterField( model_name='workflowjobtemplate', name='survey_spec', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, blank=True), ), migrations.AlterField( model_name='workflowjobtemplatenode', name='char_prompts', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), # Job Project Update migrations.AddField( diff --git a/awx/main/migrations/0006_v320_release.py b/awx/main/migrations/0006_v320_release.py index 1f755f94ce..c05bee3eec 100644 --- a/awx/main/migrations/0006_v320_release.py +++ b/awx/main/migrations/0006_v320_release.py @@ -108,14 +108,12 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='fact', name='facts', - field=awx.main.fields.JSONBField( - default=dict, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True - ), + field=models.JSONField(default=dict, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True), ), migrations.AddField( model_name='host', name='ansible_facts', - field=awx.main.fields.JSONBField(default=dict, help_text='Arbitrary JSON structure of most recent ansible_facts, per-host.', blank=True), + field=models.JSONField(default=dict, help_text='Arbitrary JSON structure of most recent ansible_facts, per-host.', blank=True), ), migrations.AddField( model_name='host', @@ -177,8 +175,8 @@ class Migration(migrations.Migration): migrations.AddField( model_name='project', name='inventory_files', - field=awx.main.fields.JSONField( - default=[], + field=models.JSONField( + default=list, help_text='Suggested list of content that could be Ansible inventory in the project', verbose_name='Inventory Files', editable=False, diff --git a/awx/main/migrations/0009_v322_add_setting_field_for_activity_stream.py b/awx/main/migrations/0009_v322_add_setting_field_for_activity_stream.py index 3d69de2b33..56c86b19a8 100644 --- a/awx/main/migrations/0009_v322_add_setting_field_for_activity_stream.py +++ b/awx/main/migrations/0009_v322_add_setting_field_for_activity_stream.py @@ -1,8 +1,7 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals -from django.db import migrations -import awx.main.fields +from django.db import migrations, models class Migration(migrations.Migration): @@ -15,6 +14,6 @@ class Migration(migrations.Migration): migrations.AddField( model_name='activitystream', name='setting', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), ] diff --git a/awx/main/migrations/0014_v330_saved_launchtime_configs.py b/awx/main/migrations/0014_v330_saved_launchtime_configs.py index d120166218..38c5d2b2f6 100644 --- a/awx/main/migrations/0014_v330_saved_launchtime_configs.py +++ b/awx/main/migrations/0014_v330_saved_launchtime_configs.py @@ -20,7 +20,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='schedule', name='char_prompts', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AddField( model_name='schedule', @@ -37,7 +37,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='schedule', name='survey_passwords', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AddField( model_name='workflowjobnode', @@ -47,12 +47,12 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjobnode', name='extra_data', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AddField( model_name='workflowjobnode', name='survey_passwords', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AddField( model_name='workflowjobtemplatenode', @@ -62,12 +62,12 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjobtemplatenode', name='extra_data', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AddField( model_name='workflowjobtemplatenode', name='survey_passwords', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), # Run data migration before removing the old credential field migrations.RunPython(migration_utils.set_current_apps_for_migrations, migrations.RunPython.noop), @@ -85,9 +85,9 @@ class Migration(migrations.Migration): name='JobLaunchConfig', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('extra_data', awx.main.fields.JSONField(blank=True, default=dict)), - ('survey_passwords', awx.main.fields.JSONField(blank=True, default=dict, editable=False)), - ('char_prompts', awx.main.fields.JSONField(blank=True, default=dict)), + ('extra_data', models.JSONField(blank=True, null=True, default=dict)), + ('survey_passwords', models.JSONField(blank=True, null=True, default=dict, editable=False)), + ('char_prompts', models.JSONField(blank=True, null=True, default=dict)), ('credentials', models.ManyToManyField(related_name='joblaunchconfigs', to='main.Credential')), ( 'inventory', diff --git a/awx/main/migrations/0018_v330_add_additional_stdout_events.py b/awx/main/migrations/0018_v330_add_additional_stdout_events.py index c9b026eeb5..ad399e72bb 100644 --- a/awx/main/migrations/0018_v330_add_additional_stdout_events.py +++ b/awx/main/migrations/0018_v330_add_additional_stdout_events.py @@ -2,10 +2,11 @@ # Generated by Django 1.11.7 on 2017-12-14 15:13 from __future__ import unicode_literals -import awx.main.fields from django.db import migrations, models import django.db.models.deletion +import awx.main.fields + class Migration(migrations.Migration): @@ -20,7 +21,7 @@ class Migration(migrations.Migration): ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('event_data', awx.main.fields.JSONField(blank=True, default=dict)), + ('event_data', awx.main.fields.JSONBlob(blank=True, default=dict)), ('uuid', models.CharField(default='', editable=False, max_length=1024)), ('counter', models.PositiveIntegerField(default=0, editable=False)), ('stdout', models.TextField(default='', editable=False)), @@ -84,7 +85,7 @@ class Migration(migrations.Migration): max_length=100, ), ), - ('event_data', awx.main.fields.JSONField(blank=True, default=dict)), + ('event_data', awx.main.fields.JSONBlob(blank=True, default=dict)), ('failed', models.BooleanField(default=False, editable=False)), ('changed', models.BooleanField(default=False, editable=False)), ('uuid', models.CharField(default='', editable=False, max_length=1024)), @@ -114,7 +115,7 @@ class Migration(migrations.Migration): ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('event_data', awx.main.fields.JSONField(blank=True, default=dict)), + ('event_data', awx.main.fields.JSONBlob(blank=True, default=dict)), ('uuid', models.CharField(default='', editable=False, max_length=1024)), ('counter', models.PositiveIntegerField(default=0, editable=False)), ('stdout', models.TextField(default='', editable=False)), diff --git a/awx/main/migrations/0020_v330_instancegroup_policies.py b/awx/main/migrations/0020_v330_instancegroup_policies.py index e2dc677b44..0577f14ee9 100644 --- a/awx/main/migrations/0020_v330_instancegroup_policies.py +++ b/awx/main/migrations/0020_v330_instancegroup_policies.py @@ -3,7 +3,6 @@ from __future__ import unicode_literals from django.db import migrations, models from decimal import Decimal -import awx.main.fields class Migration(migrations.Migration): @@ -16,8 +15,8 @@ class Migration(migrations.Migration): migrations.AddField( model_name='instancegroup', name='policy_instance_list', - field=awx.main.fields.JSONField( - default=[], help_text='List of exact-match Instances that will always be automatically assigned to this group', blank=True + field=models.JSONField( + default=list, help_text='List of exact-match Instances that will always be automatically assigned to this group', blank=True ), ), migrations.AddField( diff --git a/awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py b/awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py index cc1d1bfeba..e26571f1b9 100644 --- a/awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py +++ b/awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py @@ -29,7 +29,7 @@ class Migration(migrations.Migration): ('client_id', models.CharField(db_index=True, default=oauth2_provider.generators.generate_client_id, max_length=100, unique=True)), ( 'redirect_uris', - models.TextField(blank=True, help_text='Allowed URIs list, space separated', validators=[oauth2_provider.validators.validate_uris]), + models.TextField(blank=True, help_text='Allowed URIs list, space separated'), ), ('client_type', models.CharField(choices=[('confidential', 'Confidential'), ('public', 'Public')], max_length=32)), ( diff --git a/awx/main/migrations/0038_v330_add_deleted_activitystream_actor.py b/awx/main/migrations/0038_v330_add_deleted_activitystream_actor.py index 2f856e23f5..504fa14eb3 100644 --- a/awx/main/migrations/0038_v330_add_deleted_activitystream_actor.py +++ b/awx/main/migrations/0038_v330_add_deleted_activitystream_actor.py @@ -2,9 +2,7 @@ # Generated by Django 1.11.11 on 2018-05-21 19:51 from __future__ import unicode_literals -import awx.main.fields -import awx.main.models.activity_stream -from django.db import migrations +from django.db import models, migrations class Migration(migrations.Migration): @@ -17,6 +15,6 @@ class Migration(migrations.Migration): migrations.AddField( model_name='activitystream', name='deleted_actor', - field=awx.main.fields.JSONField(null=True), + field=models.JSONField(null=True), ), ] diff --git a/awx/main/migrations/0053_v340_workflow_inventory.py b/awx/main/migrations/0053_v340_workflow_inventory.py index 23bede35f7..e3dd56a3b2 100644 --- a/awx/main/migrations/0053_v340_workflow_inventory.py +++ b/awx/main/migrations/0053_v340_workflow_inventory.py @@ -17,7 +17,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjob', name='char_prompts', - field=awx.main.fields.JSONField(blank=True, default=dict), + field=models.JSONField(blank=True, null=True, default=dict), ), migrations.AddField( model_name='workflowjob', diff --git a/awx/main/migrations/0085_v360_add_notificationtemplate_messages.py b/awx/main/migrations/0085_v360_add_notificationtemplate_messages.py index 690989276b..c2c69bb440 100644 --- a/awx/main/migrations/0085_v360_add_notificationtemplate_messages.py +++ b/awx/main/migrations/0085_v360_add_notificationtemplate_messages.py @@ -4,7 +4,6 @@ from __future__ import unicode_literals from django.db import migrations, models -import awx.main.fields import awx.main.models.notifications @@ -18,7 +17,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='notificationtemplate', name='messages', - field=awx.main.fields.JSONField( + field=models.JSONField( default=awx.main.models.notifications.NotificationTemplate.default_messages, help_text='Optional custom messages for notification template.', null=True, diff --git a/awx/main/migrations/0090_v360_WFJT_prompts.py b/awx/main/migrations/0090_v360_WFJT_prompts.py index 46fb497202..fdc3b85fcc 100644 --- a/awx/main/migrations/0090_v360_WFJT_prompts.py +++ b/awx/main/migrations/0090_v360_WFJT_prompts.py @@ -24,7 +24,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjobtemplate', name='char_prompts', - field=awx.main.fields.JSONField(blank=True, default=dict), + field=models.JSONField(blank=True, null=True, default=dict), ), migrations.AlterField( model_name='joblaunchconfig', diff --git a/awx/main/migrations/0129_unifiedjob_installed_collections.py b/awx/main/migrations/0129_unifiedjob_installed_collections.py index d20c9068d0..644bff4132 100644 --- a/awx/main/migrations/0129_unifiedjob_installed_collections.py +++ b/awx/main/migrations/0129_unifiedjob_installed_collections.py @@ -1,7 +1,6 @@ # Generated by Django 2.2.16 on 2021-02-16 20:27 -import awx.main.fields -from django.db import migrations +from django.db import migrations, models class Migration(migrations.Migration): @@ -14,7 +13,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='unifiedjob', name='installed_collections', - field=awx.main.fields.JSONBField( + field=models.JSONField( blank=True, default=dict, editable=False, help_text='The Collections names and versions installed in the execution environment.' ), ), diff --git a/awx/main/migrations/0150_rename_inv_sources_inv_updates.py b/awx/main/migrations/0150_rename_inv_sources_inv_updates.py index 11c4b1b3f9..596d1f81f2 100644 --- a/awx/main/migrations/0150_rename_inv_sources_inv_updates.py +++ b/awx/main/migrations/0150_rename_inv_sources_inv_updates.py @@ -15,10 +15,10 @@ def forwards(apps, schema_editor): r = InventoryUpdate.objects.filter(source='tower').update(source='controller') if r: - logger.warn(f'Renamed {r} tower inventory updates to controller') + logger.warning(f'Renamed {r} tower inventory updates to controller') InventorySource.objects.filter(source='tower').update(source='controller') if r: - logger.warn(f'Renamed {r} tower inventory sources to controller') + logger.warning(f'Renamed {r} tower inventory sources to controller') CredentialType = apps.get_model('main', 'CredentialType') @@ -32,7 +32,7 @@ def forwards(apps, schema_editor): registry_type = ManagedCredentialType.registry.get('controller') if not registry_type: raise RuntimeError('Excpected to find controller credential, this may need to be edited in the future!') - logger.warn('Renaming the Ansible Tower credential type for existing install') + logger.warning('Renaming the Ansible Tower credential type for existing install') tower_type.name = registry_type.name # sensitive to translations tower_type.namespace = 'controller' # if not done, will error setup_tower_managed_defaults tower_type.save(update_fields=['name', 'namespace']) @@ -46,10 +46,10 @@ def backwards(apps, schema_editor): r = InventoryUpdate.objects.filter(source='controller').update(source='tower') if r: - logger.warn(f'Renamed {r} controller inventory updates to tower') + logger.warning(f'Renamed {r} controller inventory updates to tower') r = InventorySource.objects.filter(source='controller').update(source='tower') if r: - logger.warn(f'Renamed {r} controller inventory sources to tower') + logger.warning(f'Renamed {r} controller inventory sources to tower') CredentialType = apps.get_model('main', 'CredentialType') diff --git a/awx/main/migrations/_hg_removal.py b/awx/main/migrations/_hg_removal.py index e384ea5413..76828ef474 100644 --- a/awx/main/migrations/_hg_removal.py +++ b/awx/main/migrations/_hg_removal.py @@ -14,4 +14,4 @@ def delete_hg_scm(apps, schema_editor): update_ct = Project.objects.filter(scm_type='hg').update(scm_type='') if update_ct: - logger.warn('Changed {} mercurial projects to manual, deprecation period ended'.format(update_ct)) + logger.warning('Changed {} mercurial projects to manual, deprecation period ended'.format(update_ct)) diff --git a/awx/main/migrations/_inventory_source.py b/awx/main/migrations/_inventory_source.py index e6a65a82d4..023a7ee072 100644 --- a/awx/main/migrations/_inventory_source.py +++ b/awx/main/migrations/_inventory_source.py @@ -1,6 +1,6 @@ import logging -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str from awx.main.utils.common import set_current_apps from awx.main.utils.common import parse_yaml_or_json @@ -19,7 +19,7 @@ def _get_instance_id(from_dict, new_id, default=''): break instance_id = from_dict.get(key, default) from_dict = instance_id - return smart_text(instance_id) + return smart_str(instance_id) def _get_instance_id_for_upgrade(host, new_id): @@ -35,7 +35,7 @@ def _get_instance_id_for_upgrade(host, new_id): return None if len(new_id) > 255: # this should never happen - logger.warn('Computed instance id "{}"" for host {}-{} is too long'.format(new_id_value, host.name, host.pk)) + logger.warning('Computed instance id "{}"" for host {}-{} is too long'.format(new_id_value, host.name, host.pk)) return None return new_id_value @@ -47,7 +47,7 @@ def set_new_instance_id(apps, source, new_id): id_from_settings = getattr(settings, '{}_INSTANCE_ID_VAR'.format(source.upper())) if id_from_settings != new_id: # User applied an instance ID themselves, so nope on out of there - logger.warn('You have an instance ID set for {}, not migrating'.format(source)) + logger.warning('You have an instance ID set for {}, not migrating'.format(source)) return logger.debug('Migrating inventory instance_id for {} to {}'.format(source, new_id)) Host = apps.get_model('main', 'Host') diff --git a/awx/main/migrations/_inventory_source_vars.py b/awx/main/migrations/_inventory_source_vars.py index 71c96403a6..12bad4e4b8 100644 --- a/awx/main/migrations/_inventory_source_vars.py +++ b/awx/main/migrations/_inventory_source_vars.py @@ -2,7 +2,7 @@ import json import re import logging -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.utils.encoding import iri_to_uri diff --git a/awx/main/migrations/_squashed_30.py b/awx/main/migrations/_squashed_30.py index c604b95c37..90c2dd061b 100644 --- a/awx/main/migrations/_squashed_30.py +++ b/awx/main/migrations/_squashed_30.py @@ -2,7 +2,6 @@ from django.db import ( migrations, models, ) -import jsonfield.fields import awx.main.fields from awx.main.migrations import _save_password_keys @@ -30,7 +29,7 @@ SQUASHED_30 = { migrations.AddField( model_name='job', name='survey_passwords', - field=jsonfield.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), ], '0031_v302_migrate_survey_passwords': [ diff --git a/awx/main/models/__init__.py b/awx/main/models/__init__.py index ed49b98083..107c7a9418 100644 --- a/awx/main/models/__init__.py +++ b/awx/main/models/__init__.py @@ -3,6 +3,7 @@ # Django from django.conf import settings # noqa +from django.db import connection from django.db.models.signals import pre_delete # noqa # AWX @@ -97,6 +98,93 @@ User.add_to_class('can_access_with_errors', check_user_access_with_errors) User.add_to_class('accessible_objects', user_accessible_objects) +def convert_jsonfields_to_jsonb(): + if connection.vendor != 'postgresql': + return + + # fmt: off + fields = [ # Table name, expensive or not, tuple of column names + ('conf_setting', False, ( + 'value', + )), + ('main_instancegroup', False, ( + 'policy_instance_list', + )), + ('main_jobtemplate', False, ( + 'survey_spec', + )), + ('main_notificationtemplate', False, ( + 'notification_configuration', + 'messages', + )), + ('main_project', False, ( + 'playbook_files', + 'inventory_files', + )), + ('main_schedule', False, ( + 'extra_data', + 'char_prompts', + 'survey_passwords', + )), + ('main_workflowjobtemplate', False, ( + 'survey_spec', + 'char_prompts', + )), + ('main_workflowjobtemplatenode', False, ( + 'char_prompts', + 'extra_data', + 'survey_passwords', + )), + ('main_activitystream', True, ( + 'setting', # NN = NOT NULL + 'deleted_actor', + )), + ('main_job', True, ( + 'survey_passwords', # NN + 'artifacts', # NN + )), + ('main_joblaunchconfig', True, ( + 'extra_data', # NN + 'survey_passwords', # NN + 'char_prompts', # NN + )), + ('main_notification', True, ( + 'body', # NN + )), + ('main_unifiedjob', True, ( + 'job_env', # NN + )), + ('main_workflowjob', True, ( + 'survey_passwords', # NN + 'char_prompts', # NN + )), + ('main_workflowjobnode', True, ( + 'char_prompts', # NN + 'ancestor_artifacts', # NN + 'extra_data', # NN + 'survey_passwords', # NN + )), + ] + # fmt: on + + with connection.cursor() as cursor: + for table, expensive, columns in fields: + cursor.execute( + """ + select count(1) from information_schema.columns + where + table_name = %s and + column_name in %s and + data_type != 'jsonb'; + """, + (table, columns), + ) + if cursor.fetchone()[0]: + from awx.main.tasks.system import migrate_json_fields + + migrate_json_fields.apply_async([table, expensive, columns]) + + def cleanup_created_modified_by(sender, **kwargs): # work around a bug in django-polymorphic that doesn't properly # handle cascades for reverse foreign keys on the polymorphic base model diff --git a/awx/main/models/activity_stream.py b/awx/main/models/activity_stream.py index 6215e17a5a..aa0ab9d9d6 100644 --- a/awx/main/models/activity_stream.py +++ b/awx/main/models/activity_stream.py @@ -3,14 +3,13 @@ # AWX from awx.api.versioning import reverse -from awx.main.fields import JSONField from awx.main.models.base import accepts_json # Django from django.db import models from django.conf import settings from django.utils.encoding import smart_str -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ __all__ = ['ActivityStream'] @@ -36,7 +35,7 @@ class ActivityStream(models.Model): operation = models.CharField(max_length=13, choices=OPERATION_CHOICES) timestamp = models.DateTimeField(auto_now_add=True) changes = accepts_json(models.TextField(blank=True)) - deleted_actor = JSONField(null=True) + deleted_actor = models.JSONField(null=True) action_node = models.CharField( blank=True, default='', @@ -84,7 +83,7 @@ class ActivityStream(models.Model): o_auth2_application = models.ManyToManyField("OAuth2Application", blank=True) o_auth2_access_token = models.ManyToManyField("OAuth2AccessToken", blank=True) - setting = JSONField(blank=True) + setting = models.JSONField(default=dict, null=True, blank=True) def __str__(self): operation = self.operation if 'operation' in self.__dict__ else '_delayed_' diff --git a/awx/main/models/ad_hoc_commands.py b/awx/main/models/ad_hoc_commands.py index f4065e473d..d0608bd652 100644 --- a/awx/main/models/ad_hoc_commands.py +++ b/awx/main/models/ad_hoc_commands.py @@ -9,7 +9,7 @@ from urllib.parse import urljoin from django.conf import settings from django.db import models from django.utils.text import Truncator -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.core.exceptions import ValidationError # AWX diff --git a/awx/main/models/base.py b/awx/main/models/base.py index 8cdd557a84..da12f603cb 100644 --- a/awx/main/models/base.py +++ b/awx/main/models/base.py @@ -4,7 +4,7 @@ # Django from django.db import models from django.core.exceptions import ValidationError, ObjectDoesNotExist -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.utils.timezone import now # Django-Taggit diff --git a/awx/main/models/credential/__init__.py b/awx/main/models/credential/__init__.py index 88c0eedadd..53f6ffaa1f 100644 --- a/awx/main/models/credential/__init__.py +++ b/awx/main/models/credential/__init__.py @@ -15,9 +15,9 @@ from jinja2 import sandbox # Django from django.db import models -from django.utils.translation import ugettext_lazy as _, ugettext_noop +from django.utils.translation import gettext_lazy as _, gettext_noop from django.core.exceptions import ValidationError -from django.utils.encoding import force_text +from django.utils.encoding import force_str from django.utils.functional import cached_property from django.utils.timezone import now @@ -230,7 +230,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin): def display_inputs(self): field_val = self.inputs.copy() for k, v in field_val.items(): - if force_text(v).startswith('$encrypted$'): + if force_str(v).startswith('$encrypted$'): field_val[k] = '$encrypted$' return field_val @@ -579,34 +579,34 @@ class ManagedCredentialType(SimpleNamespace): ManagedCredentialType( namespace='ssh', kind='ssh', - name=ugettext_noop('Machine'), + name=gettext_noop('Machine'), inputs={ 'fields': [ - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, - {'id': 'password', 'label': ugettext_noop('Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, - {'id': 'ssh_key_data', 'label': ugettext_noop('SSH Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, + {'id': 'password', 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, + {'id': 'ssh_key_data', 'label': gettext_noop('SSH Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, { 'id': 'ssh_public_key_data', - 'label': ugettext_noop('Signed SSH Certificate'), + 'label': gettext_noop('Signed SSH Certificate'), 'type': 'string', 'multiline': True, 'secret': True, }, - {'id': 'ssh_key_unlock', 'label': ugettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, + {'id': 'ssh_key_unlock', 'label': gettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, { 'id': 'become_method', - 'label': ugettext_noop('Privilege Escalation Method'), + 'label': gettext_noop('Privilege Escalation Method'), 'type': 'string', - 'help_text': ugettext_noop( + 'help_text': gettext_noop( 'Specify a method for "become" operations. This is ' 'equivalent to specifying the --become-method ' 'Ansible parameter.' ), }, { 'id': 'become_username', - 'label': ugettext_noop('Privilege Escalation Username'), + 'label': gettext_noop('Privilege Escalation Username'), 'type': 'string', }, - {'id': 'become_password', 'label': ugettext_noop('Privilege Escalation Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, + {'id': 'become_password', 'label': gettext_noop('Privilege Escalation Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, ], }, ) @@ -614,14 +614,14 @@ ManagedCredentialType( ManagedCredentialType( namespace='scm', kind='scm', - name=ugettext_noop('Source Control'), + name=gettext_noop('Source Control'), managed=True, inputs={ 'fields': [ - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, - {'id': 'password', 'label': ugettext_noop('Password'), 'type': 'string', 'secret': True}, - {'id': 'ssh_key_data', 'label': ugettext_noop('SCM Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, - {'id': 'ssh_key_unlock', 'label': ugettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, + {'id': 'password', 'label': gettext_noop('Password'), 'type': 'string', 'secret': True}, + {'id': 'ssh_key_data', 'label': gettext_noop('SCM Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, + {'id': 'ssh_key_unlock', 'label': gettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True}, ], }, ) @@ -629,17 +629,17 @@ ManagedCredentialType( ManagedCredentialType( namespace='vault', kind='vault', - name=ugettext_noop('Vault'), + name=gettext_noop('Vault'), managed=True, inputs={ 'fields': [ - {'id': 'vault_password', 'label': ugettext_noop('Vault Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, + {'id': 'vault_password', 'label': gettext_noop('Vault Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, { 'id': 'vault_id', - 'label': ugettext_noop('Vault Identifier'), + 'label': gettext_noop('Vault Identifier'), 'type': 'string', 'format': 'vault_id', - 'help_text': ugettext_noop( + 'help_text': gettext_noop( 'Specify an (optional) Vault ID. This is ' 'equivalent to specifying the --vault-id ' 'Ansible parameter for providing multiple Vault ' @@ -655,32 +655,32 @@ ManagedCredentialType( ManagedCredentialType( namespace='net', kind='net', - name=ugettext_noop('Network'), + name=gettext_noop('Network'), managed=True, inputs={ 'fields': [ - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Password'), + 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, }, - {'id': 'ssh_key_data', 'label': ugettext_noop('SSH Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, + {'id': 'ssh_key_data', 'label': gettext_noop('SSH Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, { 'id': 'ssh_key_unlock', - 'label': ugettext_noop('Private Key Passphrase'), + 'label': gettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True, }, { 'id': 'authorize', - 'label': ugettext_noop('Authorize'), + 'label': gettext_noop('Authorize'), 'type': 'boolean', }, { 'id': 'authorize_password', - 'label': ugettext_noop('Authorize Password'), + 'label': gettext_noop('Authorize Password'), 'type': 'string', 'secret': True, }, @@ -695,23 +695,23 @@ ManagedCredentialType( ManagedCredentialType( namespace='aws', kind='cloud', - name=ugettext_noop('Amazon Web Services'), + name=gettext_noop('Amazon Web Services'), managed=True, inputs={ 'fields': [ - {'id': 'username', 'label': ugettext_noop('Access Key'), 'type': 'string'}, + {'id': 'username', 'label': gettext_noop('Access Key'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Secret Key'), + 'label': gettext_noop('Secret Key'), 'type': 'string', 'secret': True, }, { 'id': 'security_token', - 'label': ugettext_noop('STS Token'), + 'label': gettext_noop('STS Token'), 'type': 'string', 'secret': True, - 'help_text': ugettext_noop( + 'help_text': gettext_noop( 'Security Token Service (STS) is a web service ' 'that enables you to request temporary, ' 'limited-privilege credentials for AWS Identity ' @@ -726,38 +726,38 @@ ManagedCredentialType( ManagedCredentialType( namespace='openstack', kind='cloud', - name=ugettext_noop('OpenStack'), + name=gettext_noop('OpenStack'), managed=True, inputs={ 'fields': [ - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Password (API Key)'), + 'label': gettext_noop('Password (API Key)'), 'type': 'string', 'secret': True, }, { 'id': 'host', - 'label': ugettext_noop('Host (Authentication URL)'), + 'label': gettext_noop('Host (Authentication URL)'), 'type': 'string', - 'help_text': ugettext_noop('The host to authenticate with. For example, ' 'https://openstack.business.com/v2.0/'), + 'help_text': gettext_noop('The host to authenticate with. For example, ' 'https://openstack.business.com/v2.0/'), }, { 'id': 'project', - 'label': ugettext_noop('Project (Tenant Name)'), + 'label': gettext_noop('Project (Tenant Name)'), 'type': 'string', }, { 'id': 'project_domain_name', - 'label': ugettext_noop('Project (Domain Name)'), + 'label': gettext_noop('Project (Domain Name)'), 'type': 'string', }, { 'id': 'domain', - 'label': ugettext_noop('Domain Name'), + 'label': gettext_noop('Domain Name'), 'type': 'string', - 'help_text': ugettext_noop( + 'help_text': gettext_noop( 'OpenStack domains define administrative boundaries. ' 'It is only needed for Keystone v3 authentication ' 'URLs. Refer to the documentation for ' @@ -766,13 +766,13 @@ ManagedCredentialType( }, { 'id': 'region', - 'label': ugettext_noop('Region Name'), + 'label': gettext_noop('Region Name'), 'type': 'string', - 'help_text': ugettext_noop('For some cloud providers, like OVH, region must be specified'), + 'help_text': gettext_noop('For some cloud providers, like OVH, region must be specified'), }, { 'id': 'verify_ssl', - 'label': ugettext_noop('Verify SSL'), + 'label': gettext_noop('Verify SSL'), 'type': 'boolean', 'default': True, }, @@ -784,20 +784,20 @@ ManagedCredentialType( ManagedCredentialType( namespace='vmware', kind='cloud', - name=ugettext_noop('VMware vCenter'), + name=gettext_noop('VMware vCenter'), managed=True, inputs={ 'fields': [ { 'id': 'host', - 'label': ugettext_noop('VCenter Host'), + 'label': gettext_noop('VCenter Host'), 'type': 'string', - 'help_text': ugettext_noop('Enter the hostname or IP address that corresponds ' 'to your VMware vCenter.'), + 'help_text': gettext_noop('Enter the hostname or IP address that corresponds ' 'to your VMware vCenter.'), }, - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Password'), + 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, }, @@ -809,20 +809,20 @@ ManagedCredentialType( ManagedCredentialType( namespace='satellite6', kind='cloud', - name=ugettext_noop('Red Hat Satellite 6'), + name=gettext_noop('Red Hat Satellite 6'), managed=True, inputs={ 'fields': [ { 'id': 'host', - 'label': ugettext_noop('Satellite 6 URL'), + 'label': gettext_noop('Satellite 6 URL'), 'type': 'string', - 'help_text': ugettext_noop('Enter the URL that corresponds to your Red Hat ' 'Satellite 6 server. For example, https://satellite.example.org'), + 'help_text': gettext_noop('Enter the URL that corresponds to your Red Hat ' 'Satellite 6 server. For example, https://satellite.example.org'), }, - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Password'), + 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, }, @@ -834,21 +834,21 @@ ManagedCredentialType( ManagedCredentialType( namespace='gce', kind='cloud', - name=ugettext_noop('Google Compute Engine'), + name=gettext_noop('Google Compute Engine'), managed=True, inputs={ 'fields': [ { 'id': 'username', - 'label': ugettext_noop('Service Account Email Address'), + 'label': gettext_noop('Service Account Email Address'), 'type': 'string', - 'help_text': ugettext_noop('The email address assigned to the Google Compute ' 'Engine service account.'), + 'help_text': gettext_noop('The email address assigned to the Google Compute ' 'Engine service account.'), }, { 'id': 'project', 'label': 'Project', 'type': 'string', - 'help_text': ugettext_noop( + 'help_text': gettext_noop( 'The Project ID is the GCE assigned identification. ' 'It is often constructed as three words or two words ' 'followed by a three-digit number. Examples: project-id-000 ' @@ -857,12 +857,12 @@ ManagedCredentialType( }, { 'id': 'ssh_key_data', - 'label': ugettext_noop('RSA Private Key'), + 'label': gettext_noop('RSA Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True, - 'help_text': ugettext_noop('Paste the contents of the PEM file associated ' 'with the service account email.'), + 'help_text': gettext_noop('Paste the contents of the PEM file associated ' 'with the service account email.'), }, ], 'required': ['username', 'ssh_key_data'], @@ -872,36 +872,36 @@ ManagedCredentialType( ManagedCredentialType( namespace='azure_rm', kind='cloud', - name=ugettext_noop('Microsoft Azure Resource Manager'), + name=gettext_noop('Microsoft Azure Resource Manager'), managed=True, inputs={ 'fields': [ { 'id': 'subscription', - 'label': ugettext_noop('Subscription ID'), + 'label': gettext_noop('Subscription ID'), 'type': 'string', - 'help_text': ugettext_noop('Subscription ID is an Azure construct, which is ' 'mapped to a username.'), + 'help_text': gettext_noop('Subscription ID is an Azure construct, which is ' 'mapped to a username.'), }, - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Password'), + 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, }, - {'id': 'client', 'label': ugettext_noop('Client ID'), 'type': 'string'}, + {'id': 'client', 'label': gettext_noop('Client ID'), 'type': 'string'}, { 'id': 'secret', - 'label': ugettext_noop('Client Secret'), + 'label': gettext_noop('Client Secret'), 'type': 'string', 'secret': True, }, - {'id': 'tenant', 'label': ugettext_noop('Tenant ID'), 'type': 'string'}, + {'id': 'tenant', 'label': gettext_noop('Tenant ID'), 'type': 'string'}, { 'id': 'cloud_environment', - 'label': ugettext_noop('Azure Cloud Environment'), + 'label': gettext_noop('Azure Cloud Environment'), 'type': 'string', - 'help_text': ugettext_noop('Environment variable AZURE_CLOUD_ENVIRONMENT when' ' using Azure GovCloud or Azure stack.'), + 'help_text': gettext_noop('Environment variable AZURE_CLOUD_ENVIRONMENT when' ' using Azure GovCloud or Azure stack.'), }, ], 'required': ['subscription'], @@ -911,16 +911,16 @@ ManagedCredentialType( ManagedCredentialType( namespace='github_token', kind='token', - name=ugettext_noop('GitHub Personal Access Token'), + name=gettext_noop('GitHub Personal Access Token'), managed=True, inputs={ 'fields': [ { 'id': 'token', - 'label': ugettext_noop('Token'), + 'label': gettext_noop('Token'), 'type': 'string', 'secret': True, - 'help_text': ugettext_noop('This token needs to come from your profile settings in GitHub'), + 'help_text': gettext_noop('This token needs to come from your profile settings in GitHub'), } ], 'required': ['token'], @@ -930,16 +930,16 @@ ManagedCredentialType( ManagedCredentialType( namespace='gitlab_token', kind='token', - name=ugettext_noop('GitLab Personal Access Token'), + name=gettext_noop('GitLab Personal Access Token'), managed=True, inputs={ 'fields': [ { 'id': 'token', - 'label': ugettext_noop('Token'), + 'label': gettext_noop('Token'), 'type': 'string', 'secret': True, - 'help_text': ugettext_noop('This token needs to come from your profile settings in GitLab'), + 'help_text': gettext_noop('This token needs to come from your profile settings in GitLab'), } ], 'required': ['token'], @@ -949,12 +949,12 @@ ManagedCredentialType( ManagedCredentialType( namespace='insights', kind='insights', - name=ugettext_noop('Insights'), + name=gettext_noop('Insights'), managed=True, inputs={ 'fields': [ - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, - {'id': 'password', 'label': ugettext_noop('Password'), 'type': 'string', 'secret': True}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, + {'id': 'password', 'label': gettext_noop('Password'), 'type': 'string', 'secret': True}, ], 'required': ['username', 'password'], }, @@ -973,23 +973,23 @@ ManagedCredentialType( ManagedCredentialType( namespace='rhv', kind='cloud', - name=ugettext_noop('Red Hat Virtualization'), + name=gettext_noop('Red Hat Virtualization'), managed=True, inputs={ 'fields': [ - {'id': 'host', 'label': ugettext_noop('Host (Authentication URL)'), 'type': 'string', 'help_text': ugettext_noop('The host to authenticate with.')}, - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, + {'id': 'host', 'label': gettext_noop('Host (Authentication URL)'), 'type': 'string', 'help_text': gettext_noop('The host to authenticate with.')}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Password'), + 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, }, { 'id': 'ca_file', - 'label': ugettext_noop('CA File'), + 'label': gettext_noop('CA File'), 'type': 'string', - 'help_text': ugettext_noop('Absolute file path to the CA file to use (optional)'), + 'help_text': gettext_noop('Absolute file path to the CA file to use (optional)'), }, ], 'required': ['host', 'username', 'password'], @@ -1017,38 +1017,38 @@ ManagedCredentialType( ManagedCredentialType( namespace='controller', kind='cloud', - name=ugettext_noop('Red Hat Ansible Automation Platform'), + name=gettext_noop('Red Hat Ansible Automation Platform'), managed=True, inputs={ 'fields': [ { 'id': 'host', - 'label': ugettext_noop('Red Hat Ansible Automation Platform'), + 'label': gettext_noop('Red Hat Ansible Automation Platform'), 'type': 'string', - 'help_text': ugettext_noop('Red Hat Ansible Automation Platform base URL to authenticate with.'), + 'help_text': gettext_noop('Red Hat Ansible Automation Platform base URL to authenticate with.'), }, { 'id': 'username', - 'label': ugettext_noop('Username'), + 'label': gettext_noop('Username'), 'type': 'string', - 'help_text': ugettext_noop( + 'help_text': gettext_noop( 'Red Hat Ansible Automation Platform username id to authenticate as.' 'This should not be set if an OAuth token is being used.' ), }, { 'id': 'password', - 'label': ugettext_noop('Password'), + 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, }, { 'id': 'oauth_token', - 'label': ugettext_noop('OAuth Token'), + 'label': gettext_noop('OAuth Token'), 'type': 'string', 'secret': True, - 'help_text': ugettext_noop('An OAuth token to use to authenticate with.' 'This should not be set if username/password are being used.'), + 'help_text': gettext_noop('An OAuth token to use to authenticate with.' 'This should not be set if username/password are being used.'), }, - {'id': 'verify_ssl', 'label': ugettext_noop('Verify SSL'), 'type': 'boolean', 'secret': False}, + {'id': 'verify_ssl', 'label': gettext_noop('Verify SSL'), 'type': 'boolean', 'secret': False}, ], 'required': ['host'], }, @@ -1071,30 +1071,30 @@ ManagedCredentialType( ManagedCredentialType( namespace='kubernetes_bearer_token', kind='kubernetes', - name=ugettext_noop('OpenShift or Kubernetes API Bearer Token'), + name=gettext_noop('OpenShift or Kubernetes API Bearer Token'), inputs={ 'fields': [ { 'id': 'host', - 'label': ugettext_noop('OpenShift or Kubernetes API Endpoint'), + 'label': gettext_noop('OpenShift or Kubernetes API Endpoint'), 'type': 'string', - 'help_text': ugettext_noop('The OpenShift or Kubernetes API Endpoint to authenticate with.'), + 'help_text': gettext_noop('The OpenShift or Kubernetes API Endpoint to authenticate with.'), }, { 'id': 'bearer_token', - 'label': ugettext_noop('API authentication bearer token'), + 'label': gettext_noop('API authentication bearer token'), 'type': 'string', 'secret': True, }, { 'id': 'verify_ssl', - 'label': ugettext_noop('Verify SSL'), + 'label': gettext_noop('Verify SSL'), 'type': 'boolean', 'default': True, }, { 'id': 'ssl_ca_cert', - 'label': ugettext_noop('Certificate Authority data'), + 'label': gettext_noop('Certificate Authority data'), 'type': 'string', 'secret': True, 'multiline': True, @@ -1107,31 +1107,31 @@ ManagedCredentialType( ManagedCredentialType( namespace='registry', kind='registry', - name=ugettext_noop('Container Registry'), + name=gettext_noop('Container Registry'), inputs={ 'fields': [ { 'id': 'host', - 'label': ugettext_noop('Authentication URL'), + 'label': gettext_noop('Authentication URL'), 'type': 'string', - 'help_text': ugettext_noop('Authentication endpoint for the container registry.'), + 'help_text': gettext_noop('Authentication endpoint for the container registry.'), 'default': 'quay.io', }, { 'id': 'username', - 'label': ugettext_noop('Username'), + 'label': gettext_noop('Username'), 'type': 'string', }, { 'id': 'password', - 'label': ugettext_noop('Password or Token'), + 'label': gettext_noop('Password or Token'), 'type': 'string', 'secret': True, - 'help_text': ugettext_noop('A password or token used to authenticate with'), + 'help_text': gettext_noop('A password or token used to authenticate with'), }, { 'id': 'verify_ssl', - 'label': ugettext_noop('Verify SSL'), + 'label': gettext_noop('Verify SSL'), 'type': 'boolean', 'default': True, }, @@ -1144,27 +1144,27 @@ ManagedCredentialType( ManagedCredentialType( namespace='galaxy_api_token', kind='galaxy', - name=ugettext_noop('Ansible Galaxy/Automation Hub API Token'), + name=gettext_noop('Ansible Galaxy/Automation Hub API Token'), inputs={ 'fields': [ { 'id': 'url', - 'label': ugettext_noop('Galaxy Server URL'), + 'label': gettext_noop('Galaxy Server URL'), 'type': 'string', - 'help_text': ugettext_noop('The URL of the Galaxy instance to connect to.'), + 'help_text': gettext_noop('The URL of the Galaxy instance to connect to.'), }, { 'id': 'auth_url', - 'label': ugettext_noop('Auth Server URL'), + 'label': gettext_noop('Auth Server URL'), 'type': 'string', - 'help_text': ugettext_noop('The URL of a Keycloak server token_endpoint, if using ' 'SSO auth.'), + 'help_text': gettext_noop('The URL of a Keycloak server token_endpoint, if using ' 'SSO auth.'), }, { 'id': 'token', - 'label': ugettext_noop('API Token'), + 'label': gettext_noop('API Token'), 'type': 'string', 'secret': True, - 'help_text': ugettext_noop('A token to use for authentication against the Galaxy instance.'), + 'help_text': gettext_noop('A token to use for authentication against the Galaxy instance.'), }, ], 'required': ['url'], diff --git a/awx/main/models/events.py b/awx/main/models/events.py index f953e7ca61..f80c23d58b 100644 --- a/awx/main/models/events.py +++ b/awx/main/models/events.py @@ -10,13 +10,13 @@ from django.db import models, DatabaseError, connection from django.utils.dateparse import parse_datetime from django.utils.text import Truncator from django.utils.timezone import utc, now -from django.utils.translation import ugettext_lazy as _ -from django.utils.encoding import force_text +from django.utils.translation import gettext_lazy as _ +from django.utils.encoding import force_str from awx.api.versioning import reverse from awx.main import consumers +from awx.main.fields import JSONBlob from awx.main.managers import DeferJobCreatedManager -from awx.main.fields import JSONField from awx.main.constants import MINIMAL_EVENTS from awx.main.models.base import CreatedModifiedModel from awx.main.utils import ignore_inventory_computed_fields, camelcase_to_underscore @@ -209,10 +209,7 @@ class BasePlaybookEvent(CreatedModifiedModel): max_length=100, choices=EVENT_CHOICES, ) - event_data = JSONField( - blank=True, - default=dict, - ) + event_data = JSONBlob(default=dict, blank=True) failed = models.BooleanField( default=False, editable=False, @@ -396,7 +393,7 @@ class BasePlaybookEvent(CreatedModifiedModel): connection.on_commit(_send_notifications) for field in ('playbook', 'play', 'task', 'role'): - value = force_text(event_data.get(field, '')).strip() + value = force_str(event_data.get(field, '')).strip() if value != getattr(self, field): setattr(self, field, value) if settings.LOG_AGGREGATOR_ENABLED: @@ -648,10 +645,7 @@ class BaseCommandEvent(CreatedModifiedModel): class Meta: abstract = True - event_data = JSONField( - blank=True, - default=dict, - ) + event_data = JSONBlob(default=dict, blank=True) uuid = models.CharField( max_length=1024, default='', diff --git a/awx/main/models/execution_environments.py b/awx/main/models/execution_environments.py index b0b3dd7579..55ce69098b 100644 --- a/awx/main/models/execution_environments.py +++ b/awx/main/models/execution_environments.py @@ -1,5 +1,5 @@ from django.db import models -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from awx.api.versioning import reverse from awx.main.models.base import CommonModel diff --git a/awx/main/models/ha.py b/awx/main/models/ha.py index add2564015..a9dc9b887d 100644 --- a/awx/main/models/ha.py +++ b/awx/main/models/ha.py @@ -9,7 +9,7 @@ from django.core.validators import MinValueValidator from django.db import models, connection from django.db.models.signals import post_save, post_delete from django.dispatch import receiver -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.conf import settings from django.utils.timezone import now, timedelta @@ -19,7 +19,6 @@ from solo.models import SingletonModel from awx import __version__ as awx_application_version from awx.api.versioning import reverse from awx.main.managers import InstanceManager, InstanceGroupManager, UUID_DEFAULT -from awx.main.fields import JSONField from awx.main.constants import JOB_FOLDER_PREFIX from awx.main.models.base import BaseModel, HasEditsMixin, prevent_search from awx.main.models.unified_jobs import UnifiedJob @@ -233,13 +232,19 @@ class Instance(HasPolicyEditsMixin, BaseModel): def refresh_capacity_fields(self): """Update derived capacity fields from cpu and memory (no save)""" - self.cpu_capacity = get_cpu_effective_capacity(self.cpu) - self.mem_capacity = get_mem_effective_capacity(self.memory) + if self.node_type == 'hop': + self.cpu_capacity = 0 + self.mem_capacity = 0 # formula has a non-zero offset, so we make sure it is 0 for hop nodes + else: + self.cpu_capacity = get_cpu_effective_capacity(self.cpu) + self.mem_capacity = get_mem_effective_capacity(self.memory) self.set_capacity_value() - def save_health_data(self, version, cpu, memory, uuid=None, update_last_seen=False, errors=''): - self.last_health_check = now() - update_fields = ['last_health_check'] + def save_health_data(self, version=None, cpu=0, memory=0, uuid=None, update_last_seen=False, errors=''): + update_fields = ['errors'] + if self.node_type != 'hop': + self.last_health_check = now() + update_fields.append('last_health_check') if update_last_seen: self.last_seen = self.last_health_check @@ -247,11 +252,11 @@ class Instance(HasPolicyEditsMixin, BaseModel): if uuid is not None and self.uuid != uuid: if self.uuid is not None: - logger.warn(f'Self-reported uuid of {self.hostname} changed from {self.uuid} to {uuid}') + logger.warning(f'Self-reported uuid of {self.hostname} changed from {self.uuid} to {uuid}') self.uuid = uuid update_fields.append('uuid') - if self.version != version: + if version is not None and self.version != version: self.version = version update_fields.append('version') @@ -270,7 +275,7 @@ class Instance(HasPolicyEditsMixin, BaseModel): self.errors = '' else: self.mark_offline(perform_save=False, errors=errors) - update_fields.extend(['cpu_capacity', 'mem_capacity', 'capacity', 'errors']) + update_fields.extend(['cpu_capacity', 'mem_capacity', 'capacity']) # disabling activity stream will avoid extra queries, which is important for heatbeat actions from awx.main.signals import disable_activity_stream @@ -322,8 +327,8 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin): ) policy_instance_percentage = models.IntegerField(default=0, help_text=_("Percentage of Instances to automatically assign to this group")) policy_instance_minimum = models.IntegerField(default=0, help_text=_("Static minimum number of Instances to automatically assign to this group")) - policy_instance_list = JSONField( - default=[], blank=True, help_text=_("List of exact-match Instances that will always be automatically assigned to this group") + policy_instance_list = models.JSONField( + default=list, blank=True, help_text=_("List of exact-match Instances that will always be automatically assigned to this group") ) POLICY_FIELDS = frozenset(('policy_instance_list', 'policy_instance_minimum', 'policy_instance_percentage')) diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index 0cac6602e0..3b7945c965 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -14,7 +14,7 @@ import yaml # Django from django.conf import settings from django.db import models, connection -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.db import transaction from django.core.exceptions import ValidationError from django.utils.timezone import now @@ -29,7 +29,6 @@ from awx.main.constants import CLOUD_PROVIDERS from awx.main.consumers import emit_channel_notification from awx.main.fields import ( ImplicitRoleField, - JSONBField, SmartFilterField, OrderedManyToManyField, ) @@ -488,7 +487,7 @@ class Host(CommonModelNameNotUnique, RelatedJobsMixin): editable=False, help_text=_('Inventory source(s) that created or modified this host.'), ) - ansible_facts = JSONBField( + ansible_facts = models.JSONField( blank=True, default=dict, help_text=_('Arbitrary JSON structure of most recent ansible_facts, per-host.'), diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index e405c98596..3b22ecd02c 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -19,7 +19,7 @@ from django.db import models # from django.core.cache import cache from django.utils.encoding import smart_str from django.utils.timezone import now -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.core.exceptions import FieldDoesNotExist # REST Framework @@ -44,7 +44,7 @@ from awx.main.models.notifications import ( JobNotificationMixin, ) from awx.main.utils import parse_yaml_or_json, getattr_dne, NullablePromptPseudoField -from awx.main.fields import ImplicitRoleField, JSONField, AskForField +from awx.main.fields import ImplicitRoleField, AskForField from awx.main.models.mixins import ( ResourceMixin, SurveyJobTemplateMixin, @@ -546,9 +546,10 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana editable=False, through='JobHostSummary', ) - artifacts = JSONField( - blank=True, + artifacts = models.JSONField( default=dict, + null=True, + blank=True, editable=False, ) scm_revision = models.CharField( @@ -885,7 +886,7 @@ class LaunchTimeConfigBase(BaseModel): ) # All standard fields are stored in this dictionary field # This is a solution to the nullable CharField problem, specific to prompting - char_prompts = JSONField(blank=True, default=dict) + char_prompts = models.JSONField(default=dict, null=True, blank=True) def prompts_dict(self, display=False): data = {} @@ -938,12 +939,13 @@ class LaunchTimeConfig(LaunchTimeConfigBase): abstract = True # Special case prompting fields, even more special than the other ones - extra_data = JSONField(blank=True, default=dict) + extra_data = models.JSONField(default=dict, null=True, blank=True) survey_passwords = prevent_search( - JSONField( - blank=True, + models.JSONField( default=dict, editable=False, + null=True, + blank=True, ) ) # Credentials needed for non-unified job / unified JT models diff --git a/awx/main/models/label.py b/awx/main/models/label.py index 18bdb2b025..7ca92d4ff2 100644 --- a/awx/main/models/label.py +++ b/awx/main/models/label.py @@ -3,7 +3,7 @@ # Django from django.db import models -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.api.versioning import reverse diff --git a/awx/main/models/mixins.py b/awx/main/models/mixins.py index 45a3cae885..94e737859b 100644 --- a/awx/main/models/mixins.py +++ b/awx/main/models/mixins.py @@ -15,7 +15,7 @@ from django.core.exceptions import ValidationError from django.db import models from django.db.models.query import QuerySet from django.utils.crypto import get_random_string -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.main.models.base import prevent_search @@ -24,7 +24,7 @@ from awx.main.utils import parse_yaml_or_json, get_custom_venv_choices, get_lice from awx.main.utils.execution_environments import get_default_execution_environment from awx.main.utils.encryption import decrypt_value, get_encryption_key, is_encrypted from awx.main.utils.polymorphic import build_polymorphic_ctypes_map -from awx.main.fields import JSONField, AskForField +from awx.main.fields import AskForField from awx.main.constants import ACTIVE_STATES @@ -103,12 +103,7 @@ class SurveyJobTemplateMixin(models.Model): survey_enabled = models.BooleanField( default=False, ) - survey_spec = prevent_search( - JSONField( - blank=True, - default=dict, - ) - ) + survey_spec = prevent_search(models.JSONField(default=dict, blank=True)) ask_variables_on_launch = AskForField(blank=True, default=False, allows_field='extra_vars') def survey_password_variables(self): @@ -370,10 +365,11 @@ class SurveyJobMixin(models.Model): abstract = True survey_passwords = prevent_search( - JSONField( - blank=True, + models.JSONField( default=dict, editable=False, + null=True, + blank=True, ) ) diff --git a/awx/main/models/notifications.py b/awx/main/models/notifications.py index 860e591e2c..9bfd1bc6b5 100644 --- a/awx/main/models/notifications.py +++ b/awx/main/models/notifications.py @@ -10,8 +10,8 @@ from django.db import models from django.conf import settings from django.core.mail.message import EmailMessage from django.db import connection -from django.utils.translation import ugettext_lazy as _ -from django.utils.encoding import smart_str, force_text +from django.utils.translation import gettext_lazy as _ +from django.utils.encoding import smart_str, force_str from jinja2 import sandbox, ChainableUndefined from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError @@ -28,7 +28,6 @@ from awx.main.notifications.mattermost_backend import MattermostBackend from awx.main.notifications.grafana_backend import GrafanaBackend from awx.main.notifications.rocketchat_backend import RocketChatBackend from awx.main.notifications.irc_backend import IrcBackend -from awx.main.fields import JSONField logger = logging.getLogger('awx.main.models.notifications') @@ -70,12 +69,12 @@ class NotificationTemplate(CommonModelNameNotUnique): choices=NOTIFICATION_TYPE_CHOICES, ) - notification_configuration = prevent_search(JSONField(blank=False)) + notification_configuration = prevent_search(models.JSONField(default=dict)) def default_messages(): return {'started': None, 'success': None, 'error': None, 'workflow_approval': None} - messages = JSONField(null=True, blank=True, default=default_messages, help_text=_('Optional custom messages for notification template.')) + messages = models.JSONField(null=True, blank=True, default=default_messages, help_text=_('Optional custom messages for notification template.')) def has_message(self, condition): potential_template = self.messages.get(condition, {}) @@ -187,7 +186,7 @@ class NotificationTemplate(CommonModelNameNotUnique): def display_notification_configuration(self): field_val = self.notification_configuration.copy() for field in self.notification_class.init_parameters: - if field in field_val and force_text(field_val[field]).startswith('$encrypted$'): + if field in field_val and force_str(field_val[field]).startswith('$encrypted$'): field_val[field] = '$encrypted$' return field_val @@ -237,7 +236,7 @@ class Notification(CreatedModifiedModel): default='', editable=False, ) - body = JSONField(blank=True) + body = models.JSONField(default=dict, null=True, blank=True) def get_absolute_url(self, request=None): return reverse('api:notification_detail', kwargs={'pk': self.pk}, request=request) @@ -515,7 +514,7 @@ class JobNotificationMixin(object): try: notification_templates = self.get_notification_templates() except Exception: - logger.warn("No notification template defined for emitting notification") + logger.warning("No notification template defined for emitting notification") return if not notification_templates: diff --git a/awx/main/models/oauth.py b/awx/main/models/oauth.py index b9b4b8c217..c9927f78bd 100644 --- a/awx/main/models/oauth.py +++ b/awx/main/models/oauth.py @@ -6,7 +6,7 @@ import re from django.core.validators import RegexValidator from django.db import models, connection from django.utils.timezone import now -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.conf import settings # Django OAuth Toolkit diff --git a/awx/main/models/organization.py b/awx/main/models/organization.py index 277b33315e..30a393d72b 100644 --- a/awx/main/models/organization.py +++ b/awx/main/models/organization.py @@ -8,7 +8,7 @@ from django.db import models from django.contrib.auth.models import User from django.contrib.sessions.models import Session from django.utils.timezone import now as tz_now -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index a2de97e34f..385674d7ab 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -9,8 +9,8 @@ import urllib.parse as urlparse # Django from django.conf import settings from django.db import models -from django.utils.translation import ugettext_lazy as _ -from django.utils.encoding import smart_str, smart_text +from django.utils.translation import gettext_lazy as _ +from django.utils.encoding import smart_str from django.utils.text import slugify from django.core.exceptions import ValidationError from django.utils.timezone import now, make_aware, get_default_timezone @@ -38,7 +38,6 @@ from awx.main.models.rbac import ( ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR, ) -from awx.main.fields import JSONField __all__ = ['Project', 'ProjectUpdate'] @@ -214,7 +213,7 @@ class ProjectOptions(models.Model): for filename in filenames: playbook = could_be_playbook(project_path, dirpath, filename) if playbook is not None: - results.append(smart_text(playbook)) + results.append(smart_str(playbook)) return sorted(results, key=lambda x: smart_str(x).lower()) @property @@ -230,7 +229,7 @@ class ProjectOptions(models.Model): for filename in filenames: inv_path = could_be_inventory(project_path, dirpath, filename) if inv_path is not None: - results.append(smart_text(inv_path)) + results.append(smart_str(inv_path)) if len(results) > max_inventory_listing: break if len(results) > max_inventory_listing: @@ -294,17 +293,17 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn help_text=_('The last revision fetched by a project update'), ) - playbook_files = JSONField( + playbook_files = models.JSONField( + default=list, blank=True, - default=[], editable=False, verbose_name=_('Playbook Files'), help_text=_('List of playbooks found in the project'), ) - inventory_files = JSONField( + inventory_files = models.JSONField( + default=list, blank=True, - default=[], editable=False, verbose_name=_('Inventory Files'), help_text=_('Suggested list of content that could be Ansible inventory in the project'), diff --git a/awx/main/models/rbac.py b/awx/main/models/rbac.py index 485f70bd0d..8f54cc3e43 100644 --- a/awx/main/models/rbac.py +++ b/awx/main/models/rbac.py @@ -11,7 +11,7 @@ import re from django.db import models, transaction, connection from django.contrib.contenttypes.models import ContentType from django.contrib.contenttypes.fields import GenericForeignKey -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.api.versioning import reverse diff --git a/awx/main/models/schedules.py b/awx/main/models/schedules.py index dca50d9232..c3fae526f1 100644 --- a/awx/main/models/schedules.py +++ b/awx/main/models/schedules.py @@ -14,7 +14,7 @@ from dateutil.zoneinfo import get_zonefile_instance from django.db import models from django.db.models.query import QuerySet from django.utils.timezone import now, make_aware -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.api.versioning import reverse @@ -103,7 +103,7 @@ class Schedule(PrimordialModel, LaunchTimeConfig): for zone in all_zones: if fname.endswith(zone): return zone - logger.warn('Could not detect valid zoneinfo for {}'.format(self.rrule)) + logger.warning('Could not detect valid zoneinfo for {}'.format(self.rrule)) return '' @property diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index 489cba9799..65804c97b0 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -19,9 +19,9 @@ from collections import OrderedDict from django.conf import settings from django.db import models, connection from django.core.exceptions import NON_FIELD_ERRORS -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.utils.timezone import now -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str from django.contrib.contenttypes.models import ContentType # REST Framework @@ -54,7 +54,7 @@ from awx.main.utils import polymorphic from awx.main.constants import ACTIVE_STATES, CAN_CANCEL from awx.main.redact import UriCleaner, REPLACE_STR from awx.main.consumers import emit_channel_notification -from awx.main.fields import JSONField, JSONBField, AskForField, OrderedManyToManyField +from awx.main.fields import AskForField, OrderedManyToManyField __all__ = ['UnifiedJobTemplate', 'UnifiedJob', 'StdoutMaxBytesExceeded'] @@ -357,7 +357,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn validated_kwargs = kwargs.copy() if unallowed_fields: if parent_field_name is None: - logger.warn('Fields {} are not allowed as overrides to spawn from {}.'.format(', '.join(unallowed_fields), self)) + logger.warning('Fields {} are not allowed as overrides to spawn from {}.'.format(', '.join(unallowed_fields), self)) for f in unallowed_fields: validated_kwargs.pop(f) @@ -653,9 +653,10 @@ class UnifiedJob( editable=False, ) job_env = prevent_search( - JSONField( - blank=True, + models.JSONField( default=dict, + null=True, + blank=True, editable=False, ) ) @@ -704,7 +705,7 @@ class UnifiedJob( 'Credential', related_name='%(class)ss', ) - installed_collections = JSONBField( + installed_collections = models.JSONField( blank=True, default=dict, editable=False, @@ -1090,7 +1091,7 @@ class UnifiedJob( # function assume a str-based fd will be returned; decode # .write() calls on the fly to maintain this interface _write = fd.write - fd.write = lambda s: _write(smart_text(s)) + fd.write = lambda s: _write(smart_str(s)) tbl = self._meta.db_table + 'event' created_by_cond = '' if self.has_unpartitioned_events: @@ -1205,7 +1206,7 @@ class UnifiedJob( try: extra_data_dict = parse_yaml_or_json(extra_data, silent_failure=False) except Exception as e: - logger.warn("Exception deserializing extra vars: " + str(e)) + logger.warning("Exception deserializing extra vars: " + str(e)) evars = self.extra_vars_dict evars.update(extra_data_dict) self.update_fields(extra_vars=json.dumps(evars)) @@ -1273,7 +1274,7 @@ class UnifiedJob( id=self.id, name=self.name, url=self.get_ui_url(), - created_by=smart_text(self.created_by), + created_by=smart_str(self.created_by), started=self.started.isoformat() if self.started is not None else None, finished=self.finished.isoformat() if self.finished is not None else None, status=self.status, diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index 684e25b967..197951ea05 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -11,7 +11,7 @@ from urllib.parse import urljoin # Django from django.db import connection, models from django.conf import settings -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.core.exceptions import ObjectDoesNotExist # from django import settings as tower_settings @@ -40,7 +40,6 @@ from awx.main.models.mixins import ( from awx.main.models.jobs import LaunchTimeConfigBase, LaunchTimeConfig, JobTemplate from awx.main.models.credential import Credential from awx.main.redact import REPLACE_STR -from awx.main.fields import JSONField from awx.main.utils import schedule_task_manager @@ -232,9 +231,10 @@ class WorkflowJobNode(WorkflowNodeBase): default=None, on_delete=models.CASCADE, ) - ancestor_artifacts = JSONField( - blank=True, + ancestor_artifacts = models.JSONField( default=dict, + null=True, + blank=True, editable=False, ) do_not_run = models.BooleanField( diff --git a/awx/main/notifications/grafana_backend.py b/awx/main/notifications/grafana_backend.py index 4e9a7a6262..51a27a897e 100644 --- a/awx/main/notifications/grafana_backend.py +++ b/awx/main/notifications/grafana_backend.py @@ -7,8 +7,8 @@ import logging import requests import dateutil.parser as dp -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.notifications.custom_notification_base import CustomNotificationBase @@ -82,9 +82,9 @@ class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase): if m.body.get('finished'): grafana_data['timeEnd'] = int((dp.parse(m.body['finished']).replace(tzinfo=None) - epoch).total_seconds() * 1000) except ValueError: - logger.error(smart_text(_("Error converting time {} or timeEnd {} to int.").format(m.body['started'], m.body['finished']))) + logger.error(smart_str(_("Error converting time {} or timeEnd {} to int.").format(m.body['started'], m.body['finished']))) if not self.fail_silently: - raise Exception(smart_text(_("Error converting time {} and/or timeEnd {} to int.").format(m.body['started'], m.body['finished']))) + raise Exception(smart_str(_("Error converting time {} and/or timeEnd {} to int.").format(m.body['started'], m.body['finished']))) grafana_data['isRegion'] = self.isRegion grafana_data['dashboardId'] = self.dashboardId grafana_data['panelId'] = self.panelId @@ -97,8 +97,8 @@ class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase): "{}/api/annotations".format(m.recipients()[0]), json=grafana_data, headers=grafana_headers, verify=(not self.grafana_no_verify_ssl) ) if r.status_code >= 400: - logger.error(smart_text(_("Error sending notification grafana: {}").format(r.status_code))) + logger.error(smart_str(_("Error sending notification grafana: {}").format(r.status_code))) if not self.fail_silently: - raise Exception(smart_text(_("Error sending notification grafana: {}").format(r.status_code))) + raise Exception(smart_str(_("Error sending notification grafana: {}").format(r.status_code))) sent_messages += 1 return sent_messages diff --git a/awx/main/notifications/irc_backend.py b/awx/main/notifications/irc_backend.py index d020de824d..20a5523b52 100644 --- a/awx/main/notifications/irc_backend.py +++ b/awx/main/notifications/irc_backend.py @@ -7,8 +7,8 @@ import logging import irc.client -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.notifications.custom_notification_base import CustomNotificationBase @@ -55,7 +55,7 @@ class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase): connect_factory=connection_factory, ) except irc.client.ServerConnectionError as e: - logger.error(smart_text(_("Exception connecting to irc server: {}").format(e))) + logger.error(smart_str(_("Exception connecting to irc server: {}").format(e))) if not self.fail_silently: raise return True diff --git a/awx/main/notifications/mattermost_backend.py b/awx/main/notifications/mattermost_backend.py index b9cc513ba7..c96b3e9f54 100644 --- a/awx/main/notifications/mattermost_backend.py +++ b/awx/main/notifications/mattermost_backend.py @@ -4,8 +4,8 @@ import logging import requests -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.notifications.custom_notification_base import CustomNotificationBase @@ -44,8 +44,8 @@ class MattermostBackend(AWXBaseEmailBackend, CustomNotificationBase): r = requests.post("{}".format(m.recipients()[0]), json=payload, verify=(not self.mattermost_no_verify_ssl)) if r.status_code >= 400: - logger.error(smart_text(_("Error sending notification mattermost: {}").format(r.status_code))) + logger.error(smart_str(_("Error sending notification mattermost: {}").format(r.status_code))) if not self.fail_silently: - raise Exception(smart_text(_("Error sending notification mattermost: {}").format(r.status_code))) + raise Exception(smart_str(_("Error sending notification mattermost: {}").format(r.status_code))) sent_messages += 1 return sent_messages diff --git a/awx/main/notifications/pagerduty_backend.py b/awx/main/notifications/pagerduty_backend.py index 8cde9e3cfd..cfc3073ed4 100644 --- a/awx/main/notifications/pagerduty_backend.py +++ b/awx/main/notifications/pagerduty_backend.py @@ -5,8 +5,8 @@ import json import logging import pygerduty -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.notifications.custom_notification_base import CustomNotificationBase @@ -78,13 +78,13 @@ class PagerDutyBackend(AWXBaseEmailBackend, CustomNotificationBase): except Exception as e: if not self.fail_silently: raise - logger.error(smart_text(_("Exception connecting to PagerDuty: {}").format(e))) + logger.error(smart_str(_("Exception connecting to PagerDuty: {}").format(e))) for m in messages: try: pager.trigger_incident(m.recipients()[0], description=m.subject, details=m.body, client=m.from_email) sent_messages += 1 except Exception as e: - logger.error(smart_text(_("Exception sending messages: {}").format(e))) + logger.error(smart_str(_("Exception sending messages: {}").format(e))) if not self.fail_silently: raise return sent_messages diff --git a/awx/main/notifications/rocketchat_backend.py b/awx/main/notifications/rocketchat_backend.py index 9092b90f17..67155233c7 100644 --- a/awx/main/notifications/rocketchat_backend.py +++ b/awx/main/notifications/rocketchat_backend.py @@ -5,8 +5,8 @@ import logging import requests import json -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.utils import get_awx_http_client_headers @@ -44,8 +44,8 @@ class RocketChatBackend(AWXBaseEmailBackend, CustomNotificationBase): ) if r.status_code >= 400: - logger.error(smart_text(_("Error sending notification rocket.chat: {}").format(r.status_code))) + logger.error(smart_str(_("Error sending notification rocket.chat: {}").format(r.status_code))) if not self.fail_silently: - raise Exception(smart_text(_("Error sending notification rocket.chat: {}").format(r.status_code))) + raise Exception(smart_str(_("Error sending notification rocket.chat: {}").format(r.status_code))) sent_messages += 1 return sent_messages diff --git a/awx/main/notifications/slack_backend.py b/awx/main/notifications/slack_backend.py index 73364dc037..d1016526aa 100644 --- a/awx/main/notifications/slack_backend.py +++ b/awx/main/notifications/slack_backend.py @@ -5,8 +5,8 @@ import logging from slack_sdk import WebClient from slack_sdk.errors import SlackApiError -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.notifications.custom_notification_base import CustomNotificationBase @@ -53,7 +53,7 @@ class SlackBackend(AWXBaseEmailBackend, CustomNotificationBase): else: raise RuntimeError("Slack Notification unable to send {}: {} ({})".format(r, m.subject, response['error'])) except SlackApiError as e: - logger.error(smart_text(_("Exception sending messages: {}").format(e))) + logger.error(smart_str(_("Exception sending messages: {}").format(e))) if not self.fail_silently: raise return sent_messages diff --git a/awx/main/notifications/twilio_backend.py b/awx/main/notifications/twilio_backend.py index 0b730a56b2..1f54d603ac 100644 --- a/awx/main/notifications/twilio_backend.py +++ b/awx/main/notifications/twilio_backend.py @@ -5,8 +5,8 @@ import logging from twilio.rest import Client -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.notifications.custom_notification_base import CustomNotificationBase @@ -37,14 +37,14 @@ class TwilioBackend(AWXBaseEmailBackend, CustomNotificationBase): except Exception as e: if not self.fail_silently: raise - logger.error(smart_text(_("Exception connecting to Twilio: {}").format(e))) + logger.error(smart_str(_("Exception connecting to Twilio: {}").format(e))) for m in messages: try: connection.messages.create(to=m.to, from_=m.from_email, body=m.subject) sent_messages += 1 except Exception as e: - logger.error(smart_text(_("Exception sending messages: {}").format(e))) + logger.error(smart_str(_("Exception sending messages: {}").format(e))) if not self.fail_silently: raise return sent_messages diff --git a/awx/main/notifications/webhook_backend.py b/awx/main/notifications/webhook_backend.py index 342184ecf2..30518e0714 100644 --- a/awx/main/notifications/webhook_backend.py +++ b/awx/main/notifications/webhook_backend.py @@ -5,8 +5,8 @@ import json import logging import requests -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.utils import get_awx_http_client_headers @@ -76,8 +76,8 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase): verify=(not self.disable_ssl_verification), ) if r.status_code >= 400: - logger.error(smart_text(_("Error sending notification webhook: {}").format(r.status_code))) + logger.error(smart_str(_("Error sending notification webhook: {}").format(r.status_code))) if not self.fail_silently: - raise Exception(smart_text(_("Error sending notification webhook: {}").format(r.status_code))) + raise Exception(smart_str(_("Error sending notification webhook: {}").format(r.status_code))) sent_messages += 1 return sent_messages diff --git a/awx/main/registrar.py b/awx/main/registrar.py index 07e721a953..31133f936b 100644 --- a/awx/main/registrar.py +++ b/awx/main/registrar.py @@ -32,7 +32,7 @@ class ActivityStreamRegistrar(object): post_save.disconnect(dispatch_uid=str(self.__class__) + str(model) + "_create") pre_save.disconnect(dispatch_uid=str(self.__class__) + str(model) + "_update") pre_delete.disconnect(dispatch_uid=str(self.__class__) + str(model) + "_delete") - self.models.pop(model) + self.models.remove(model) for m2mfield in model._meta.many_to_many: m2m_attr = getattr(model, m2mfield.name) diff --git a/awx/main/routing.py b/awx/main/routing.py index 6ba58e68c6..2818559428 100644 --- a/awx/main/routing.py +++ b/awx/main/routing.py @@ -1,8 +1,8 @@ import redis import logging -from django.conf.urls import url from django.conf import settings +from django.urls import re_path from channels.auth import AuthMiddlewareStack from channels.routing import ProtocolTypeRouter, URLRouter @@ -21,14 +21,14 @@ class AWXProtocolTypeRouter(ProtocolTypeRouter): logger.debug(f"cleaning up Redis key {k}") r.delete(k) except redis.exceptions.RedisError as e: - logger.warn("encountered an error communicating with redis.") + logger.warning("encountered an error communicating with redis.") raise e super().__init__(*args, **kwargs) websocket_urlpatterns = [ - url(r'websocket/$', consumers.EventConsumer), - url(r'websocket/broadcast/$', consumers.BroadcastConsumer), + re_path(r'websocket/$', consumers.EventConsumer), + re_path(r'websocket/broadcast/$', consumers.BroadcastConsumer), ] application = AWXProtocolTypeRouter( diff --git a/awx/main/scheduler/dag_workflow.py b/awx/main/scheduler/dag_workflow.py index 39995f437c..c2afba68ad 100644 --- a/awx/main/scheduler/dag_workflow.py +++ b/awx/main/scheduler/dag_workflow.py @@ -1,5 +1,5 @@ -from django.utils.translation import ugettext_lazy as _ -from django.utils.encoding import smart_text +from django.utils.translation import gettext_lazy as _ +from django.utils.encoding import smart_str # Python from awx.main.models import ( @@ -171,7 +171,7 @@ class WorkflowDAG(SimpleDAG): parms['node_status'] = ",".join(["({},{})".format(id, status) for id, status in failed_path_nodes_id_status]) if len(failed_unified_job_template_node_ids) > 0: parms['no_ufjt'] = ",".join(failed_unified_job_template_node_ids) - return True, smart_text(s.format(**parms)) + return True, smart_str(s.format(**parms)) return False, None r''' diff --git a/awx/main/scheduler/kubernetes.py b/awx/main/scheduler/kubernetes.py index 6e36226df5..8566ca4864 100644 --- a/awx/main/scheduler/kubernetes.py +++ b/awx/main/scheduler/kubernetes.py @@ -7,7 +7,7 @@ from urllib import parse as urlparse from django.conf import settings from kubernetes import client, config from django.utils.functional import cached_property -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from awx.main.utils.common import parse_yaml_or_json, deepmerge from awx.main.utils.execution_environments import get_default_pod_spec diff --git a/awx/main/scheduler/task_manager.py b/awx/main/scheduler/task_manager.py index 09ea5e23e8..fba17917cf 100644 --- a/awx/main/scheduler/task_manager.py +++ b/awx/main/scheduler/task_manager.py @@ -10,7 +10,7 @@ from types import SimpleNamespace # Django from django.db import transaction, connection -from django.utils.translation import ugettext_lazy as _, gettext_noop +from django.utils.translation import gettext_lazy as _, gettext_noop from django.utils.timezone import now as tz_now from django.conf import settings @@ -493,6 +493,8 @@ class TaskManager: control_instance.jobs_running += 1 self.dependency_graph.add_job(task) execution_instance = self.real_instances[control_instance.hostname] + task.log_lifecycle("controller_node_chosen") + task.log_lifecycle("execution_node_chosen") self.start_task(task, self.controlplane_ig, task.get_jobs_fail_chain(), execution_instance) found_acceptable_queue = True continue @@ -572,7 +574,7 @@ class TaskManager: timeout_message = _("The approval node {name} ({pk}) has expired after {timeout} seconds.").format( name=task.name, pk=task.pk, timeout=task.timeout ) - logger.warn(timeout_message) + logger.warning(timeout_message) task.timed_out = True task.status = 'failed' task.send_approval_notification('timed_out') diff --git a/awx/main/tasks/__init__.py b/awx/main/tasks/__init__.py index e69de29bb2..517df4a285 100644 --- a/awx/main/tasks/__init__.py +++ b/awx/main/tasks/__init__.py @@ -0,0 +1 @@ +from . import jobs, receptor, system # noqa diff --git a/awx/main/tasks/callback.py b/awx/main/tasks/callback.py index ccd9c39815..b1a4c450e5 100644 --- a/awx/main/tasks/callback.py +++ b/awx/main/tasks/callback.py @@ -8,7 +8,7 @@ import stat # Django from django.utils.timezone import now from django.conf import settings -from django_guid.middleware import GuidMiddleware +from django_guid import get_guid # AWX from awx.main.redact import UriCleaner @@ -25,7 +25,7 @@ class RunnerCallback: def __init__(self, model=None): self.parent_workflow_job_id = None self.host_map = {} - self.guid = GuidMiddleware.get_guid() + self.guid = get_guid() self.job_created = None self.recent_event_timings = deque(maxlen=settings.MAX_WEBSOCKET_EVENT_RATE) self.dispatcher = CallbackQueueDispatcher() @@ -154,7 +154,7 @@ class RunnerCallback: if self.instance.cancel_flag or self.instance.status == 'canceled': cancel_wait = (now() - self.instance.modified).seconds if self.instance.modified else 0 if cancel_wait > 5: - logger.warn('Request to cancel {} took {} seconds to complete.'.format(self.instance.log_format, cancel_wait)) + logger.warning('Request to cancel {} took {} seconds to complete.'.format(self.instance.log_format, cancel_wait)) return True return False diff --git a/awx/main/tasks/jobs.py b/awx/main/tasks/jobs.py index f31eb7084f..6fb1613f0f 100644 --- a/awx/main/tasks/jobs.py +++ b/awx/main/tasks/jobs.py @@ -40,6 +40,7 @@ from awx.main.constants import ( STANDARD_INVENTORY_UPDATE_ENV, JOB_FOLDER_PREFIX, MAX_ISOLATED_PATH_COLON_DELIMITER, + CONTAINER_VOLUMES_MOUNT_TYPES, ) from awx.main.models import ( Instance, @@ -80,7 +81,7 @@ from awx.main.utils.handlers import SpecialInventoryHandler from awx.main.tasks.system import handle_success_and_failure_notifications, update_smart_memberships_for_inventory, update_inventory_computed_fields from awx.main.utils.update_model import update_model from rest_framework.exceptions import PermissionDenied -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ logger = logging.getLogger('awx.main.tasks.jobs') @@ -163,8 +164,14 @@ class BaseTask(object): # Using z allows the dir to be mounted by multiple containers # Uppercase Z restricts access (in weird ways) to 1 container at a time if this_path.count(':') == MAX_ISOLATED_PATH_COLON_DELIMITER: - src, dest, scontext = this_path.split(':') - params['container_volume_mounts'].append(f'{src}:{dest}:{scontext}') + src, dest, mount_option = this_path.split(':') + + # mount_option validation via performed via API, but since this can be overriden via settings.py + if mount_option not in CONTAINER_VOLUMES_MOUNT_TYPES: + mount_option = 'z' + logger.warning(f'The path {this_path} has volume mount type {mount_option} which is not supported. Using "z" instead.') + + params['container_volume_mounts'].append(f'{src}:{dest}:{mount_option}') elif this_path.count(':') == MAX_ISOLATED_PATH_COLON_DELIMITER - 1: src, dest = this_path.split(':') params['container_volume_mounts'].append(f'{src}:{dest}:z') @@ -816,11 +823,12 @@ class RunJob(BaseTask): return job.playbook def build_extra_vars_file(self, job, private_data_dir): - # Define special extra_vars for AWX, combine with job.extra_vars. - extra_vars = job.awx_meta_vars() - + extra_vars = dict() + # load in JT extra vars if job.extra_vars_dict: extra_vars.update(json.loads(job.decrypted_extra_vars())) + # load in meta vars, overriding any variable set in JT extra vars + extra_vars.update(job.awx_meta_vars()) # By default, all extra vars disallow Jinja2 template usage for # security reasons; top level key-values defined in JT.extra_vars, however, @@ -854,24 +862,6 @@ class RunJob(BaseTask): d[r'Vault password \({}\):\s*?$'.format(vault_id)] = k return d - def build_execution_environment_params(self, instance, private_data_dir): - if settings.IS_K8S: - return {} - - params = super(RunJob, self).build_execution_environment_params(instance, private_data_dir) - # If this has an insights agent and it is not already mounted then show it - insights_dir = os.path.dirname(settings.INSIGHTS_SYSTEM_ID_FILE) - if instance.use_fact_cache and os.path.exists(insights_dir): - logger.info('not parent of others') - params.setdefault('container_volume_mounts', []) - params['container_volume_mounts'].extend( - [ - f"{insights_dir}:{insights_dir}:Z", - ] - ) - - return params - def pre_run_hook(self, job, private_data_dir): super(RunJob, self).pre_run_hook(job, private_data_dir) if job.inventory is None: @@ -1896,14 +1886,6 @@ class RunAdHocCommand(BaseTask): if ad_hoc_command.verbosity: args.append('-%s' % ('v' * min(5, ad_hoc_command.verbosity))) - extra_vars = ad_hoc_command.awx_meta_vars() - - if ad_hoc_command.extra_vars_dict: - redacted_extra_vars, removed_vars = extract_ansible_vars(ad_hoc_command.extra_vars_dict) - if removed_vars: - raise ValueError(_("{} are prohibited from use in ad hoc commands.").format(", ".join(removed_vars))) - extra_vars.update(ad_hoc_command.extra_vars_dict) - if ad_hoc_command.limit: args.append(ad_hoc_command.limit) else: @@ -1912,13 +1894,13 @@ class RunAdHocCommand(BaseTask): return args def build_extra_vars_file(self, ad_hoc_command, private_data_dir): - extra_vars = ad_hoc_command.awx_meta_vars() - + extra_vars = dict() if ad_hoc_command.extra_vars_dict: redacted_extra_vars, removed_vars = extract_ansible_vars(ad_hoc_command.extra_vars_dict) if removed_vars: raise ValueError(_("{} are prohibited from use in ad hoc commands.").format(", ".join(removed_vars))) extra_vars.update(ad_hoc_command.extra_vars_dict) + extra_vars.update(ad_hoc_command.awx_meta_vars()) self._write_extra_vars_file(private_data_dir, extra_vars) def build_module_name(self, ad_hoc_command): diff --git a/awx/main/tasks/receptor.py b/awx/main/tasks/receptor.py index 4cb0a543a2..544311afa3 100644 --- a/awx/main/tasks/receptor.py +++ b/awx/main/tasks/receptor.py @@ -7,8 +7,6 @@ import logging import os import shutil import socket -import sys -import threading import time import yaml @@ -26,6 +24,8 @@ from awx.main.utils.common import ( parse_yaml_or_json, cleanup_new_process, ) +from awx.main.constants import MAX_ISOLATED_PATH_COLON_DELIMITER + # Receptorctl from receptorctl.socket_interface import ReceptorControl @@ -164,7 +164,7 @@ def run_until_complete(node, timing_data=None, **kwargs): if settings.RECEPTOR_RELEASE_WORK: res = receptor_ctl.simple_command(f"work release {unit_id}") if res != {'released': unit_id}: - logger.warn(f'Could not confirm release of receptor work unit id {unit_id} from {node}, data: {res}') + logger.warning(f'Could not confirm release of receptor work unit id {unit_id} from {node}, data: {res}') receptor_ctl.close() @@ -247,16 +247,6 @@ def worker_cleanup(node_name, vargs, timeout=300.0): return stdout -class TransmitterThread(threading.Thread): - def run(self): - self.exc = None - - try: - super().run() - except Exception: - self.exc = sys.exc_info() - - class AWXReceptorJob: def __init__(self, task, runner_params=None): self.task = task @@ -296,46 +286,47 @@ class AWXReceptorJob: # reading. sockin, sockout = socket.socketpair() - transmitter_thread = TransmitterThread(target=self.transmit, args=[sockin]) - transmitter_thread.start() - - # submit our work, passing - # in the right side of our socketpair for reading. - _kw = {} + # Prepare the submit_work kwargs before creating threads, because references to settings are not thread-safe + work_submit_kw = dict(worktype=self.work_type, params=self.receptor_params, signwork=self.sign_work) if self.work_type == 'ansible-runner': - _kw['node'] = self.task.instance.execution_node - use_stream_tls = get_conn_type(_kw['node'], receptor_ctl).name == "STREAMTLS" - _kw['tlsclient'] = get_tls_client(use_stream_tls) - result = receptor_ctl.submit_work(worktype=self.work_type, payload=sockout.makefile('rb'), params=self.receptor_params, signwork=self.sign_work, **_kw) - self.unit_id = result['unitid'] - # Update the job with the work unit in-memory so that the log_lifecycle - # will print out the work unit that is to be associated with the job in the database - # via the update_model() call. - # We want to log the work_unit_id as early as possible. A failure can happen in between - # when we start the job in receptor and when we associate the job <-> work_unit_id. - # In that case, there will be work running in receptor and Controller will not know - # which Job it is associated with. - # We do not programatically handle this case. Ideally, we would handle this with a reaper case. - # The two distinct job lifecycle log events below allow for us to at least detect when this - # edge case occurs. If the lifecycle event work_unit_id_received occurs without the - # work_unit_id_assigned event then this case may have occured. - self.task.instance.work_unit_id = result['unitid'] # Set work_unit_id in-memory only - self.task.instance.log_lifecycle("work_unit_id_received") - self.task.update_model(self.task.instance.pk, work_unit_id=result['unitid']) - self.task.instance.log_lifecycle("work_unit_id_assigned") + work_submit_kw['node'] = self.task.instance.execution_node + use_stream_tls = get_conn_type(work_submit_kw['node'], receptor_ctl).name == "STREAMTLS" + work_submit_kw['tlsclient'] = get_tls_client(use_stream_tls) - sockin.close() - sockout.close() + with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor: + transmitter_future = executor.submit(self.transmit, sockin) - if transmitter_thread.exc: - raise transmitter_thread.exc[1].with_traceback(transmitter_thread.exc[2]) + # submit our work, passing in the right side of our socketpair for reading. + result = receptor_ctl.submit_work(payload=sockout.makefile('rb'), **work_submit_kw) - transmitter_thread.join() + sockin.close() + sockout.close() + + self.unit_id = result['unitid'] + # Update the job with the work unit in-memory so that the log_lifecycle + # will print out the work unit that is to be associated with the job in the database + # via the update_model() call. + # We want to log the work_unit_id as early as possible. A failure can happen in between + # when we start the job in receptor and when we associate the job <-> work_unit_id. + # In that case, there will be work running in receptor and Controller will not know + # which Job it is associated with. + # We do not programatically handle this case. Ideally, we would handle this with a reaper case. + # The two distinct job lifecycle log events below allow for us to at least detect when this + # edge case occurs. If the lifecycle event work_unit_id_received occurs without the + # work_unit_id_assigned event then this case may have occured. + self.task.instance.work_unit_id = result['unitid'] # Set work_unit_id in-memory only + self.task.instance.log_lifecycle("work_unit_id_received") + self.task.update_model(self.task.instance.pk, work_unit_id=result['unitid']) + self.task.instance.log_lifecycle("work_unit_id_assigned") + + # Throws an exception if the transmit failed. + # Will be caught by the try/except in BaseTask#run. + transmitter_future.result() # Artifacts are an output, but sometimes they are an input as well # this is the case with fact cache, where clearing facts deletes a file, and this must be captured artifact_dir = os.path.join(self.runner_params['private_data_dir'], 'artifacts') - if os.path.exists(artifact_dir): + if self.work_type != 'local' and os.path.exists(artifact_dir): shutil.rmtree(artifact_dir) resultsock, resultfile = receptor_ctl.get_work_results(self.unit_id, return_socket=True, return_sockfile=True) @@ -367,9 +358,9 @@ class AWXReceptorJob: logger.exception(f'An error was encountered while getting status for work unit {self.unit_id}') if 'exceeded quota' in detail: - logger.warn(detail) + logger.warning(detail) log_name = self.task.instance.log_format - logger.warn(f"Could not launch pod for {log_name}. Exceeded quota.") + logger.warning(f"Could not launch pod for {log_name}. Exceeded quota.") self.task.update_model(self.task.instance.pk, status='pending') return # If ansible-runner ran, but an error occured at runtime, the traceback information @@ -389,7 +380,7 @@ class AWXReceptorJob: self.task.instance.result_traceback = detail self.task.instance.save(update_fields=['result_traceback']) else: - logger.warn(f'No result details or output from {self.task.instance.log_format}, status:\n{state_name}') + logger.warning(f'No result details or output from {self.task.instance.log_format}, status:\n{state_name}') except Exception: raise RuntimeError(detail) @@ -488,6 +479,48 @@ class AWXReceptorJob: if self.task.instance.execution_environment.pull: pod_spec['spec']['containers'][0]['imagePullPolicy'] = pull_options[self.task.instance.execution_environment.pull] + # This allows the user to also expose the isolated path list + # to EEs running in k8s/ocp environments, i.e. container groups. + # This assumes the node and SA supports hostPath volumes + # type is not passed due to backward compatibility, + # which means that no checks will be performed before mounting the hostPath volume. + if settings.AWX_MOUNT_ISOLATED_PATHS_ON_K8S and settings.AWX_ISOLATION_SHOW_PATHS: + spec_volume_mounts = [] + spec_volumes = [] + + for idx, this_path in enumerate(settings.AWX_ISOLATION_SHOW_PATHS): + mount_option = None + if this_path.count(':') == MAX_ISOLATED_PATH_COLON_DELIMITER: + src, dest, mount_option = this_path.split(':') + elif this_path.count(':') == MAX_ISOLATED_PATH_COLON_DELIMITER - 1: + src, dest = this_path.split(':') + else: + src = dest = this_path + + # Enforce read-only volume if 'ro' has been explicitly passed + # We do this so we can use the same configuration for regular scenarios and k8s + # Since flags like ':O', ':z' or ':Z' are not valid in the k8s realm + # Example: /data:/data:ro + read_only = bool('ro' == mount_option) + + # Since type is not being passed, k8s by default will not perform any checks if the + # hostPath volume exists on the k8s node itself. + spec_volumes.append({'name': f'volume-{idx}', 'hostPath': {'path': src}}) + + spec_volume_mounts.append({'name': f'volume-{idx}', 'mountPath': f'{dest}', 'readOnly': read_only}) + + # merge any volumes definition already present in the pod_spec + if 'volumes' in pod_spec['spec']: + pod_spec['spec']['volumes'] += spec_volumes + else: + pod_spec['spec']['volumes'] = spec_volumes + + # merge any volumesMounts definition already present in the pod_spec + if 'volumeMounts' in pod_spec['spec']['containers'][0]: + pod_spec['spec']['containers'][0]['volumeMounts'] += spec_volume_mounts + else: + pod_spec['spec']['containers'][0]['volumeMounts'] = spec_volume_mounts + if self.task and self.task.instance.is_container_group_task: # If EE credential is passed, create an imagePullSecret if self.task.instance.execution_environment and self.task.instance.execution_environment.credential: diff --git a/awx/main/tasks/system.py b/awx/main/tasks/system.py index 43ac6c2b26..008c3bcb2f 100644 --- a/awx/main/tasks/system.py +++ b/awx/main/tasks/system.py @@ -1,5 +1,6 @@ # Python from collections import namedtuple +import itertools import functools import importlib import json @@ -13,15 +14,16 @@ from distutils.version import LooseVersion as Version # Django from django.conf import settings -from django.db import transaction, DatabaseError, IntegrityError +from django.db import connection, transaction, DatabaseError, IntegrityError from django.db.models.fields.related import ForeignKey from django.utils.timezone import now from django.utils.encoding import smart_str from django.contrib.auth.models import User -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_noop from django.core.cache import cache from django.core.exceptions import ObjectDoesNotExist +from django.contrib.contenttypes.models import ContentType # Django-CRUM from crum import impersonate @@ -46,6 +48,7 @@ from awx.main.models import ( Inventory, SmartInventoryMembership, Job, + convert_jsonfields_to_jsonb, ) from awx.main.constants import ACTIVE_STATES from awx.main.dispatch.publish import task @@ -78,6 +81,9 @@ Try upgrading OpenSSH or providing your private key in an different format. \ def dispatch_startup(): startup_logger = logging.getLogger('awx.main.tasks') + + convert_jsonfields_to_jsonb() + startup_logger.debug("Syncing Schedules") for sch in Schedule.objects.all(): try: @@ -121,6 +127,123 @@ def inform_cluster_of_shutdown(): logger.exception('Encountered problem with normal shutdown signal.') +def migrate_json_fields_expensive(table, columns): + batchsize = 50000 + + ct = ContentType.objects.get_by_natural_key(*table.split('_', 1)) + model = ct.model_class() + + # Phase 1: add the new columns, making them nullable to avoid populating them + with connection.schema_editor() as schema_editor: + # See: https://docs.djangoproject.com/en/3.1/ref/schema-editor/ + + for colname in columns: + f = model._meta.get_field(colname) + _, _, args, kwargs = f.deconstruct() + kwargs['null'] = True + new_f = f.__class__(*args, **kwargs) + new_f.set_attributes_from_name(f'_{colname}') + + schema_editor.add_field(model, new_f) + + # Create a trigger to make sure new data automatically gets put in both fields. + with connection.cursor() as cursor: + # It's a little annoying, I think this trigger will re-do + # the same work as the update query in Phase 2 + cursor.execute( + f""" + create or replace function update_{table}_{colname}() + returns trigger as $body$ + begin + new._{colname} = new.{colname}::jsonb + return new; + end + $body$ language plpgsql; + """ + ) + cursor.execute( + f""" + create trigger {table}_{colname}_trigger + before insert or update + on {table} + for each row + execute procedure update_{table}_{colname}; + """ + ) + + # Phase 2: copy over the data + with connection.cursor() as cursor: + rows = 0 + for i in itertools.count(0, batchsize): + cursor.execute(f"select count(1) from {table} where id >= %s;", (i,)) + if not cursor.fetchone()[0]: + break + + column_expr = ', '.join(f"_{colname} = {colname}::jsonb" for colname in columns) + cursor.execute( + f""" + update {table} + set {column_expr} + where id >= %s and id < %s; + """, + (i, i + batchsize), + ) + rows += cursor.rowcount + logger.debug(f"Batch {i} to {i + batchsize} copied on {table}.") + + logger.warning(f"Data copied for {rows} rows on {table}.") + + # Phase 3: drop the old column and rename the new one + with connection.schema_editor() as schema_editor: + + # FIXME: Grab a lock explicitly here? + for colname in columns: + with connection.cursor() as cursor: + cursor.execute(f"drop trigger {table}_{colname}_trigger;") + cursor.execute(f"drop function update_{table}_{colname};") + + f = model._meta.get_field(colname) + _, _, args, kwargs = f.deconstruct() + kwargs['null'] = True + new_f = f.__class__(*args, **kwargs) + new_f.set_attributes_from_name(f'_{colname}') + + schema_editor.remove_field(model, f) + + _, _, args, kwargs = new_f.deconstruct() + f = new_f.__class__(*args, **kwargs) + f.set_attributes_from_name(colname) + + schema_editor.alter_field(model, new_f, f) + + +@task(queue=get_local_queuename) +def migrate_json_fields(table, expensive, columns): + logger.warning(f"Migrating json fields: {table} {columns}") + + with advisory_lock(f'json_migration_{table}', wait=False) as acquired: + if not acquired: + return + + from django.db.migrations.executor import MigrationExecutor + + # If Django is currently running migrations, wait until it is done. + while True: + executor = MigrationExecutor(connection) + if not executor.migration_plan(executor.loader.graph.leaf_nodes()): + break + time.sleep(60) + + if expensive: + migrate_json_fields_expensive(table, columns) + else: + with connection.cursor() as cursor: + column_expr = " ".join(f"ALTER {colname} TYPE jsonb" for colname in columns) + cursor.execute(f"ALTER TABLE {table} {column_expr};") + + logger.warning(f"Migration of {table} to jsonb is finished") + + @task(queue=get_local_queuename) def apply_cluster_membership_policies(): from awx.main.signals import disable_activity_stream @@ -374,15 +497,15 @@ def cluster_node_health_check(node): Used for the health check endpoint, refreshes the status of the instance, but must be ran on target node """ if node == '': - logger.warn('Local health check incorrectly called with blank string') + logger.warning('Local health check incorrectly called with blank string') return elif node != settings.CLUSTER_HOST_ID: - logger.warn(f'Local health check for {node} incorrectly sent to {settings.CLUSTER_HOST_ID}') + logger.warning(f'Local health check for {node} incorrectly sent to {settings.CLUSTER_HOST_ID}') return try: this_inst = Instance.objects.me() except Instance.DoesNotExist: - logger.warn(f'Instance record for {node} missing, could not check capacity.') + logger.warning(f'Instance record for {node} missing, could not check capacity.') return this_inst.local_health_check() @@ -390,12 +513,12 @@ def cluster_node_health_check(node): @task(queue=get_local_queuename) def execution_node_health_check(node): if node == '': - logger.warn('Remote health check incorrectly called with blank string') + logger.warning('Remote health check incorrectly called with blank string') return try: instance = Instance.objects.get(hostname=node) except Instance.DoesNotExist: - logger.warn(f'Instance record for {node} missing, could not check capacity.') + logger.warning(f'Instance record for {node} missing, could not check capacity.') return if instance.node_type != 'execution': @@ -416,7 +539,7 @@ def execution_node_health_check(node): if data['errors']: formatted_error = "\n".join(data["errors"]) if prior_capacity: - logger.warn(f'Health check marking execution node {node} as lost, errors:\n{formatted_error}') + logger.warning(f'Health check marking execution node {node} as lost, errors:\n{formatted_error}') else: logger.info(f'Failed to find capacity of new or lost execution node {node}, errors:\n{formatted_error}') else: @@ -436,12 +559,11 @@ def inspect_execution_nodes(instance_list): workers = mesh_status['Advertisements'] for ad in workers: hostname = ad['NodeID'] - changed = False if hostname in node_lookup: instance = node_lookup[hostname] else: - logger.warn(f"Unrecognized node advertising on mesh: {hostname}") + logger.warning(f"Unrecognized node advertising on mesh: {hostname}") continue # Control-plane nodes are dealt with via local_health_check instead. @@ -458,15 +580,16 @@ def inspect_execution_nodes(instance_list): # Only execution nodes should be dealt with by execution_node_health_check if instance.node_type == 'hop': + if was_lost and (not instance.is_lost(ref_time=nowtime)): + logger.warning(f'Hop node {hostname}, has rejoined the receptor mesh') + instance.save_health_data(errors='') continue - if changed: - execution_node_health_check.apply_async([hostname]) - elif was_lost: + if was_lost: # if the instance *was* lost, but has appeared again, # attempt to re-establish the initial capacity and version # check - logger.warn(f'Execution node attempting to rejoin as instance {hostname}.') + logger.warning(f'Execution node attempting to rejoin as instance {hostname}.') execution_node_health_check.apply_async([hostname]) elif instance.capacity == 0 and instance.enabled: # nodes with proven connection but need remediation run health checks are reduced frequency @@ -534,20 +657,14 @@ def cluster_node_heartbeat(): except Exception: logger.exception('failed to reap jobs for {}'.format(other_inst.hostname)) try: - # Capacity could already be 0 because: - # * It's a new node and it never had a heartbeat - # * It was set to 0 by another tower node running this method - # * It was set to 0 by this node, but auto deprovisioning is off - # - # If auto deprovisioning is on, don't bother setting the capacity to 0 - # since we will delete the node anyway. - if other_inst.capacity != 0 and not settings.AWX_AUTO_DEPROVISION_INSTANCES: - other_inst.mark_offline(errors=_('Another cluster node has determined this instance to be unresponsive')) - logger.error("Host {} last checked in at {}, marked as lost.".format(other_inst.hostname, other_inst.last_seen)) - elif settings.AWX_AUTO_DEPROVISION_INSTANCES: + if settings.AWX_AUTO_DEPROVISION_INSTANCES: deprovision_hostname = other_inst.hostname other_inst.delete() logger.info("Host {} Automatically Deprovisioned.".format(deprovision_hostname)) + elif other_inst.capacity != 0 or (not other_inst.errors): + other_inst.mark_offline(errors=_('Another cluster node has determined this instance to be unresponsive')) + logger.error("Host {} last checked in at {}, marked as lost.".format(other_inst.hostname, other_inst.last_seen)) + except DatabaseError as e: if 'did not affect any rows' in str(e): logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname)) @@ -640,7 +757,7 @@ def awx_periodic_scheduler(): template = schedule.unified_job_template schedule.update_computed_fields() # To update next_run timestamp. if template.cache_timeout_blocked: - logger.warn("Cache timeout is in the future, bypassing schedule for template %s" % str(template.id)) + logger.warning("Cache timeout is in the future, bypassing schedule for template %s" % str(template.id)) continue try: job_kwargs = schedule.get_job_kwargs() @@ -694,7 +811,7 @@ def handle_work_error(task_id, *args, **kwargs): instance = UnifiedJob.get_instance_by_type(each_task['type'], each_task['id']) if not instance: # Unknown task type - logger.warn("Unknown task type: {}".format(each_task['type'])) + logger.warning("Unknown task type: {}".format(each_task['type'])) continue except ObjectDoesNotExist: logger.warning('Missing {} `{}` in error callback.'.format(each_task['type'], each_task['id'])) @@ -741,7 +858,7 @@ def handle_success_and_failure_notifications(job_id): time.sleep(1) uj = UnifiedJob.objects.get(pk=job_id) - logger.warn(f"Failed to even try to send notifications for job '{uj}' due to job not being in finished state.") + logger.warning(f"Failed to even try to send notifications for job '{uj}' due to job not being in finished state.") @task(queue=get_local_queuename) diff --git a/awx/main/tests/conftest.py b/awx/main/tests/conftest.py index 0400f025d2..28565901b0 100644 --- a/awx/main/tests/conftest.py +++ b/awx/main/tests/conftest.py @@ -3,7 +3,7 @@ import pytest from unittest import mock from contextlib import contextmanager -from awx.main.models import Credential, UnifiedJob +from awx.main.models import Credential, UnifiedJob, Instance from awx.main.tests.factories import ( create_organization, create_job_template, @@ -212,3 +212,10 @@ def mock_get_event_queryset_no_job_created(): with mock.patch.object(UnifiedJob, 'get_event_queryset', lambda self: event_qs(self)) as _fixture: yield _fixture + + +@pytest.fixture +def mock_me(): + me_mock = mock.MagicMock(return_value=Instance(id=1, hostname=settings.CLUSTER_HOST_ID, uuid='00000000-0000-0000-0000-000000000000')) + with mock.patch.object(Instance.objects, 'me', me_mock): + yield diff --git a/awx/main/tests/docs/test_swagger_generation.py b/awx/main/tests/docs/test_swagger_generation.py index e1257cf889..658d8ad2d4 100644 --- a/awx/main/tests/docs/test_swagger_generation.py +++ b/awx/main/tests/docs/test_swagger_generation.py @@ -5,7 +5,7 @@ import re from django.conf import settings from django.core.serializers.json import DjangoJSONEncoder from django.utils.functional import Promise -from django.utils.encoding import force_text +from django.utils.encoding import force_str from openapi_codec.encode import generate_swagger_object import pytest @@ -16,9 +16,9 @@ from awx.api.versioning import drf_reverse class i18nEncoder(DjangoJSONEncoder): def default(self, obj): if isinstance(obj, Promise): - return force_text(obj) + return force_str(obj) if type(obj) == bytes: - return force_text(obj) + return force_str(obj) return super(i18nEncoder, self).default(obj) diff --git a/awx/main/tests/factories/fixtures.py b/awx/main/tests/factories/fixtures.py index 574916a84f..200fa0f195 100644 --- a/awx/main/tests/factories/fixtures.py +++ b/awx/main/tests/factories/fixtures.py @@ -180,8 +180,8 @@ def mk_job_template( jt.project = project - jt.survey_spec = spec - if jt.survey_spec is not None: + if spec is not None: + jt.survey_spec = spec jt.survey_enabled = True if persisted: @@ -212,8 +212,8 @@ def mk_workflow_job_template(name, extra_vars='', spec=None, organization=None, wfjt = WorkflowJobTemplate(name=name, extra_vars=extra_vars, organization=organization, webhook_service=webhook_service) - wfjt.survey_spec = spec - if wfjt.survey_spec: + if spec: + wfjt.survey_spec = spec wfjt.survey_enabled = True if persisted: diff --git a/awx/main/tests/functional/api/test_unified_jobs_stdout.py b/awx/main/tests/functional/api/test_unified_jobs_stdout.py index acfc7a0459..dad55c5ba0 100644 --- a/awx/main/tests/functional/api/test_unified_jobs_stdout.py +++ b/awx/main/tests/functional/api/test_unified_jobs_stdout.py @@ -3,11 +3,12 @@ import base64 import json import re -from datetime import datetime +from unittest import mock from django.conf import settings from django.utils.encoding import smart_str -from unittest import mock +from django.utils.timezone import now as tz_now + import pytest from awx.api.versioning import reverse @@ -146,7 +147,7 @@ def test_stdout_line_range(sqlite_copy_expert, Parent, Child, relation, view, ge @pytest.mark.django_db def test_text_stdout_from_system_job_events(sqlite_copy_expert, get, admin): - created = datetime.utcnow() + created = tz_now() job = SystemJob(created=created) job.save() for i in range(3): @@ -158,7 +159,7 @@ def test_text_stdout_from_system_job_events(sqlite_copy_expert, get, admin): @pytest.mark.django_db def test_text_stdout_with_max_stdout(sqlite_copy_expert, get, admin): - created = datetime.utcnow() + created = tz_now() job = SystemJob(created=created) job.save() total_bytes = settings.STDOUT_MAX_BYTES_DISPLAY + 1 @@ -185,7 +186,7 @@ def test_text_stdout_with_max_stdout(sqlite_copy_expert, get, admin): @pytest.mark.parametrize('fmt', ['txt', 'ansi']) @mock.patch('awx.main.redact.UriCleaner.SENSITIVE_URI_PATTERN', mock.Mock(**{'search.return_value': None})) # really slow for large strings def test_max_bytes_display(sqlite_copy_expert, Parent, Child, relation, view, fmt, get, admin): - created = datetime.utcnow() + created = tz_now() job = Parent(created=created) job.save() total_bytes = settings.STDOUT_MAX_BYTES_DISPLAY + 1 @@ -267,7 +268,7 @@ def test_text_with_unicode_stdout(sqlite_copy_expert, Parent, Child, relation, v @pytest.mark.django_db def test_unicode_with_base64_ansi(sqlite_copy_expert, get, admin): - created = datetime.utcnow() + created = tz_now() job = Job(created=created) job.save() for i in range(3): diff --git a/awx/main/tests/functional/api/test_user.py b/awx/main/tests/functional/api/test_user.py index a201d4e1cc..c19192c90c 100644 --- a/awx/main/tests/functional/api/test_user.py +++ b/awx/main/tests/functional/api/test_user.py @@ -1,4 +1,5 @@ from datetime import date +from unittest import mock import pytest @@ -17,7 +18,7 @@ EXAMPLE_USER_DATA = {"username": "affable", "first_name": "a", "last_name": "a", @pytest.mark.django_db def test_user_create(post, admin): - response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware()) + response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware(mock.Mock())) assert response.status_code == 201 assert not response.data['is_superuser'] assert not response.data['is_system_auditor'] @@ -25,22 +26,22 @@ def test_user_create(post, admin): @pytest.mark.django_db def test_fail_double_create_user(post, admin): - response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware()) + response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware(mock.Mock())) assert response.status_code == 201 - response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware()) + response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware(mock.Mock())) assert response.status_code == 400 @pytest.mark.django_db def test_create_delete_create_user(post, delete, admin): - response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware()) + response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware(mock.Mock())) assert response.status_code == 201 - response = delete(reverse('api:user_detail', kwargs={'pk': response.data['id']}), admin, middleware=SessionMiddleware()) + response = delete(reverse('api:user_detail', kwargs={'pk': response.data['id']}), admin, middleware=SessionMiddleware(mock.Mock())) assert response.status_code == 204 - response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware()) + response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware(mock.Mock())) print(response.data) assert response.status_code == 201 @@ -48,7 +49,7 @@ def test_create_delete_create_user(post, delete, admin): @pytest.mark.django_db def test_user_cannot_update_last_login(patch, admin): assert admin.last_login is None - patch(reverse('api:user_detail', kwargs={'pk': admin.pk}), {'last_login': '2020-03-13T16:39:47.303016Z'}, admin, middleware=SessionMiddleware()) + patch(reverse('api:user_detail', kwargs={'pk': admin.pk}), {'last_login': '2020-03-13T16:39:47.303016Z'}, admin, middleware=SessionMiddleware(mock.Mock())) assert User.objects.get(pk=admin.pk).last_login is None diff --git a/awx/main/tests/functional/commands/test_cleanup_jobs.py b/awx/main/tests/functional/commands/test_cleanup_jobs.py deleted file mode 100644 index 612895559a..0000000000 --- a/awx/main/tests/functional/commands/test_cleanup_jobs.py +++ /dev/null @@ -1,178 +0,0 @@ -import pytest -from datetime import datetime, timedelta -from pytz import timezone -from collections import OrderedDict -from unittest import mock - -from django.db.models.deletion import Collector, SET_NULL, CASCADE -from django.core.management import call_command - -from awx.main.management.commands import cleanup_jobs -from awx.main.utils.deletion import AWXCollector -from awx.main.models import JobTemplate, User, Job, Notification, WorkflowJobNode, JobHostSummary - - -@pytest.fixture -def setup_environment(inventory, project, machine_credential, host, notification_template, label): - """ - Create old jobs and new jobs, with various other objects to hit the - related fields of Jobs. This makes sure on_delete() effects are tested - properly. - """ - old_jobs = [] - new_jobs = [] - days = 10 - days_str = str(days) - - jt = JobTemplate.objects.create(name='testjt', inventory=inventory, project=project) - jt.credentials.add(machine_credential) - jt_user = User.objects.create(username='jobtemplateuser') - jt.execute_role.members.add(jt_user) - - notification = Notification() - notification.notification_template = notification_template - notification.save() - - for i in range(3): - # create jobs with current time - job1 = jt.create_job() - job1.created = datetime.now(tz=timezone('UTC')) - job1.save() - # sqlite does not support partitioning so we cannot test partition-based jobevent cleanup - # JobEvent.create_from_data(job_id=job1.pk, uuid='abc123', event='runner_on_start', stdout='a' * 1025).save() - new_jobs.append(job1) - - # create jobs 10 days ago - job2 = jt.create_job() - job2.created = datetime.now(tz=timezone('UTC')) - timedelta(days=days) - job2.save() - job2.dependent_jobs.add(job1) - # JobEvent.create_from_data(job_id=job2.pk, uuid='abc123', event='runner_on_start', stdout='a' * 1025).save() - old_jobs.append(job2) - - jt.last_job = job2 - jt.current_job = job2 - jt.save() - host.last_job = job2 - host.save() - notification.unifiedjob_notifications.add(job2) - label.unifiedjob_labels.add(job2) - jn = WorkflowJobNode.objects.create(job=job2) - jn.save() - jh = JobHostSummary.objects.create(job=job2) - jh.save() - - return (old_jobs, new_jobs, days_str) - - -# sqlite does not support table partitioning so we mock out the methods responsible for pruning -# job event partitions during the job cleanup task -# https://github.com/ansible/awx/issues/9039 -@pytest.mark.django_db -@mock.patch.object(cleanup_jobs.DeleteMeta, 'identify_excluded_partitions', mock.MagicMock()) -@mock.patch.object(cleanup_jobs.DeleteMeta, 'find_partitions_to_drop', mock.MagicMock()) -@mock.patch.object(cleanup_jobs.DeleteMeta, 'drop_partitions', mock.MagicMock()) -def test_cleanup_jobs(setup_environment): - (old_jobs, new_jobs, days_str) = setup_environment - - # related_fields - related = [f for f in Job._meta.get_fields(include_hidden=True) if f.auto_created and not f.concrete and (f.one_to_one or f.one_to_many)] - - job = old_jobs[-1] # last job - - # gather related objects for job - related_should_be_removed = {} - related_should_be_null = {} - for r in related: - qs = r.related_model._base_manager.using('default').filter(**{"%s__in" % r.field.name: [job.pk]}) - if qs.exists(): - if r.field.remote_field.on_delete == CASCADE: - related_should_be_removed[qs.model] = set(qs.values_list('pk', flat=True)) - if r.field.remote_field.on_delete == SET_NULL: - related_should_be_null[(qs.model, r.field.name)] = set(qs.values_list('pk', flat=True)) - - assert related_should_be_removed - assert related_should_be_null - - call_command('cleanup_jobs', '--days', days_str) - # make sure old jobs are removed - assert not Job.objects.filter(pk__in=[obj.pk for obj in old_jobs]).exists() - - # make sure new jobs are untouched - assert len(new_jobs) == Job.objects.filter(pk__in=[obj.pk for obj in new_jobs]).count() - - # make sure related objects are destroyed or set to NULL (none) - for model, values in related_should_be_removed.items(): - assert not model.objects.filter(pk__in=values).exists() - - for (model, fieldname), values in related_should_be_null.items(): - for v in values: - assert not getattr(model.objects.get(pk=v), fieldname) - - -@pytest.mark.django_db -def test_awxcollector(setup_environment): - """ - Efforts to improve the performance of cleanup_jobs involved - sub-classing the django Collector class. This unit test will - check for parity between the django Collector and the modified - AWXCollector class. AWXCollector is used in cleanup_jobs to - bulk-delete old jobs from the database. - - Specifically, Collector has four dictionaries to check: - .dependencies, .data, .fast_deletes, and .field_updates - - These tests will convert each dictionary from AWXCollector - (after running .collect on jobs), from querysets to sets of - objects. The final result should be a dictionary that is - equivalent to django's Collector. - """ - - (old_jobs, new_jobs, days_str) = setup_environment - collector = Collector('default') - collector.collect(old_jobs) - - awx_col = AWXCollector('default') - # awx_col accepts a queryset as input - awx_col.collect(Job.objects.filter(pk__in=[obj.pk for obj in old_jobs])) - - # check that dependencies are the same - assert awx_col.dependencies == collector.dependencies - - # check that objects to delete are the same - awx_del_dict = OrderedDict() - for model, instances in awx_col.data.items(): - awx_del_dict.setdefault(model, set()) - for inst in instances: - # .update() will put each object in a queryset into the set - awx_del_dict[model].update(inst) - assert awx_del_dict == collector.data - - # check that field updates are the same - awx_del_dict = OrderedDict() - for model, instances_for_fieldvalues in awx_col.field_updates.items(): - awx_del_dict.setdefault(model, {}) - for (field, value), instances in instances_for_fieldvalues.items(): - awx_del_dict[model].setdefault((field, value), set()) - for inst in instances: - awx_del_dict[model][(field, value)].update(inst) - - # collector field updates don't use the base (polymorphic parent) model, e.g. - # it will use JobTemplate instead of UnifiedJobTemplate. Therefore, - # we need to rebuild the dictionary and grab the model from the field - collector_del_dict = OrderedDict() - for model, instances_for_fieldvalues in collector.field_updates.items(): - for (field, value), instances in instances_for_fieldvalues.items(): - collector_del_dict.setdefault(field.model, {}) - collector_del_dict[field.model][(field, value)] = collector.field_updates[model][(field, value)] - assert awx_del_dict == collector_del_dict - - # check that fast deletes are the same - collector_fast_deletes = set() - for q in collector.fast_deletes: - collector_fast_deletes.update(q) - - awx_col_fast_deletes = set() - for q in awx_col.fast_deletes: - awx_col_fast_deletes.update(q) - assert collector_fast_deletes == awx_col_fast_deletes diff --git a/awx/main/tests/functional/conftest.py b/awx/main/tests/functional/conftest.py index 7e2178ca4d..ea18b491e6 100644 --- a/awx/main/tests/functional/conftest.py +++ b/awx/main/tests/functional/conftest.py @@ -15,7 +15,6 @@ from django.core.serializers.json import DjangoJSONEncoder from django.db.backends.sqlite3.base import SQLiteCursorWrapper # AWX -from awx.main.fields import JSONBField from awx.main.models.projects import Project from awx.main.models.ha import Instance @@ -755,11 +754,6 @@ def get_db_prep_save(self, value, connection, **kwargs): return value -@pytest.fixture -def monkeypatch_jsonbfield_get_db_prep_save(mocker): - JSONBField.get_db_prep_save = get_db_prep_save - - @pytest.fixture def oauth_application(admin): return Application.objects.create(name='test app', user=admin, client_type='confidential', authorization_grant_type='password') diff --git a/awx/main/tests/functional/models/test_inventory.py b/awx/main/tests/functional/models/test_inventory.py index 40620fd0a3..6c418e5b16 100644 --- a/awx/main/tests/functional/models/test_inventory.py +++ b/awx/main/tests/functional/models/test_inventory.py @@ -110,6 +110,16 @@ class TestActiveCount: source.hosts.create(name='remotely-managed-host', inventory=inventory) assert Host.objects.active_count() == 1 + def test_host_case_insensitivity(self, organization): + inv1 = Inventory.objects.create(name='inv1', organization=organization) + inv2 = Inventory.objects.create(name='inv2', organization=organization) + assert Host.objects.active_count() == 0 + inv1.hosts.create(name='host1') + inv2.hosts.create(name='Host1') + assert Host.objects.active_count() == 1 + inv1.hosts.create(name='host2') + assert Host.objects.active_count() == 2 + @pytest.mark.django_db class TestSCMUpdateFeatures: diff --git a/awx/main/tests/functional/test_instances.py b/awx/main/tests/functional/test_instances.py index 81771a7253..39afa7dd32 100644 --- a/awx/main/tests/functional/test_instances.py +++ b/awx/main/tests/functional/test_instances.py @@ -363,6 +363,23 @@ def test_health_check_oh_no(): assert instance.errors == 'This it not a real instance!' +@pytest.mark.django_db +def test_errors_field_alone(): + instance = Instance.objects.create(hostname='foo-1', enabled=True, node_type='hop') + + instance.save_health_data(errors='Node went missing!') + assert instance.errors == 'Node went missing!' + assert instance.capacity == 0 + assert instance.memory == instance.mem_capacity == 0 + assert instance.cpu == instance.cpu_capacity == 0 + + instance.save_health_data(errors='') + assert not instance.errors + assert instance.capacity == 0 + assert instance.memory == instance.mem_capacity == 0 + assert instance.cpu == instance.cpu_capacity == 0 + + @pytest.mark.django_db class TestInstanceGroupOrdering: def test_ad_hoc_instance_groups(self, instance_group_factory, inventory, default_instance_group): diff --git a/awx/main/tests/functional/test_inventory_source_injectors.py b/awx/main/tests/functional/test_inventory_source_injectors.py index 0d4247feb3..01b7c3e2b0 100644 --- a/awx/main/tests/functional/test_inventory_source_injectors.py +++ b/awx/main/tests/functional/test_inventory_source_injectors.py @@ -181,7 +181,7 @@ def create_reference_data(source_dir, env, content): @pytest.mark.django_db @pytest.mark.parametrize('this_kind', CLOUD_PROVIDERS) -def test_inventory_update_injected_content(this_kind, inventory, fake_credential_factory): +def test_inventory_update_injected_content(this_kind, inventory, fake_credential_factory, mock_me): ExecutionEnvironment.objects.create(name='Control Plane EE', managed=True) ExecutionEnvironment.objects.create(name='Default Job EE', managed=False) diff --git a/awx/main/tests/functional/test_named_url.py b/awx/main/tests/functional/test_named_url.py index 7df38aa4e1..884ecd7dc0 100644 --- a/awx/main/tests/functional/test_named_url.py +++ b/awx/main/tests/functional/test_named_url.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- +from unittest import mock + import pytest from django.core.exceptions import ImproperlyConfigured @@ -31,7 +33,7 @@ def setup_module(module): # in unit test environment. So it is wrapped by try-except block to mute any # unwanted exceptions. try: - URLModificationMiddleware() + URLModificationMiddleware(mock.Mock()) except ImproperlyConfigured: pass diff --git a/awx/main/tests/functional/test_session.py b/awx/main/tests/functional/test_session.py index f9eb4c42a4..157000d1ab 100644 --- a/awx/main/tests/functional/test_session.py +++ b/awx/main/tests/functional/test_session.py @@ -1,16 +1,12 @@ from importlib import import_module import pytest -import re from django.conf import settings from django.test.utils import override_settings -from django.contrib.sessions.middleware import SessionMiddleware from django.contrib.sessions.models import Session from django.contrib.auth import SESSION_KEY from unittest import mock -from awx.api.versioning import reverse - class AlwaysPassBackend(object): @@ -30,26 +26,6 @@ def test_login_json_not_allowed(get, accept, status): get('/api/login/', HTTP_ACCEPT=accept, expect=status) -@pytest.mark.skip(reason="Needs Update - CA") -@pytest.mark.django_db -def test_session_create_delete(admin, post, get): - AlwaysPassBackend.user = admin - with override_settings(AUTHENTICATION_BACKENDS=(AlwaysPassBackend.get_backend_path(),), SESSION_COOKIE_NAME='session_id'): - response = post( - '/api/login/', - data={'username': admin.username, 'password': admin.password, 'next': '/api/'}, - expect=302, - middleware=SessionMiddleware(), - format='multipart', - ) - assert 'session_id' in response.cookies - session_key = re.findall(r'session_id=[a-zA-z0-9]+', str(response.cookies['session_id']))[0][len('session_id=') :] - session = Session.objects.get(session_key=session_key) - assert int(session.get_decoded()[SESSION_KEY]) == admin.pk - response = get('/api/logout/', middleware=SessionMiddleware(), cookies={'session_id': session_key}, expect=302) - assert not Session.objects.filter(session_key=session_key).exists() - - @pytest.mark.django_db @mock.patch('awx.main.consumers.emit_channel_notification') def test_sessions_unlimited(emit, admin): @@ -81,21 +57,3 @@ def test_session_overlimit(emit, admin, alice): store = import_module(settings.SESSION_ENGINE).SessionStore() store.create_model_instance({SESSION_KEY: alice.pk}).save() assert Session.objects.count() == 4 - - -@pytest.mark.skip(reason="Needs Update - CA") -@pytest.mark.django_db -def test_password_update_clears_sessions(admin, alice, post, patch): - AlwaysPassBackend.user = alice - with override_settings(AUTHENTICATION_BACKENDS=(AlwaysPassBackend.get_backend_path(),), SESSION_COOKIE_NAME='session_id'): - response = post( - '/api/login/', - data={'username': alice.username, 'password': alice.password, 'next': '/api/'}, - expect=302, - middleware=SessionMiddleware(), - format='multipart', - ) - session_key = re.findall(r'session_id=[a-zA-z0-9]+', str(response.cookies['session_id']))[0][len('session_id=') :] - assert Session.objects.filter(session_key=session_key).exists() - patch(reverse('api:user_detail', kwargs={'pk': alice.pk}), admin, data={'password': 'new_password'}, expect=200) - assert not Session.objects.filter(session_key=session_key).exists() diff --git a/awx/main/tests/functional/test_tasks.py b/awx/main/tests/functional/test_tasks.py index 951767d08e..14c48fa5ff 100644 --- a/awx/main/tests/functional/test_tasks.py +++ b/awx/main/tests/functional/test_tasks.py @@ -27,7 +27,7 @@ def test_no_worker_info_on_AWX_nodes(node_type): @pytest.mark.django_db class TestDependentInventoryUpdate: - def test_dependent_inventory_updates_is_called(self, scm_inventory_source, scm_revision_file): + def test_dependent_inventory_updates_is_called(self, scm_inventory_source, scm_revision_file, mock_me): task = RunProjectUpdate() task.revision_path = scm_revision_file proj_update = scm_inventory_source.source_project.create_project_update() @@ -36,7 +36,7 @@ class TestDependentInventoryUpdate: task.post_run_hook(proj_update, 'successful') inv_update_mck.assert_called_once_with(proj_update, mock.ANY) - def test_no_unwanted_dependent_inventory_updates(self, project, scm_revision_file): + def test_no_unwanted_dependent_inventory_updates(self, project, scm_revision_file, mock_me): task = RunProjectUpdate() task.revision_path = scm_revision_file proj_update = project.create_project_update() @@ -45,7 +45,7 @@ class TestDependentInventoryUpdate: task.post_run_hook(proj_update, 'successful') assert not inv_update_mck.called - def test_dependent_inventory_updates(self, scm_inventory_source, default_instance_group): + def test_dependent_inventory_updates(self, scm_inventory_source, default_instance_group, mock_me): task = RunProjectUpdate() scm_inventory_source.scm_last_revision = '' proj_update = ProjectUpdate.objects.create(project=scm_inventory_source.source_project) @@ -57,7 +57,7 @@ class TestDependentInventoryUpdate: iu_run_mock.assert_called_once_with(inv_update.id) assert inv_update.source_project_update_id == proj_update.pk - def test_dependent_inventory_project_cancel(self, project, inventory, default_instance_group): + def test_dependent_inventory_project_cancel(self, project, inventory, default_instance_group, mock_me): """ Test that dependent inventory updates exhibit good behavior on cancel of the source project update diff --git a/awx/main/tests/unit/api/test_filters.py b/awx/main/tests/unit/api/test_filters.py index c523cd2650..21e651e22b 100644 --- a/awx/main/tests/unit/api/test_filters.py +++ b/awx/main/tests/unit/api/test_filters.py @@ -2,7 +2,11 @@ import pytest +# Django +from django.core.exceptions import FieldDoesNotExist + from rest_framework.exceptions import PermissionDenied, ParseError + from awx.api.filters import FieldLookupBackend, OrderByBackend, get_field_from_path from awx.main.models import ( AdHocCommand, @@ -22,9 +26,6 @@ from awx.main.models import ( from awx.main.models.oauth import OAuth2Application from awx.main.models.jobs import JobOptions -# Django -from django.db.models.fields import FieldDoesNotExist - def test_related(): field_lookup = FieldLookupBackend() diff --git a/awx/main/tests/unit/models/test_credential.py b/awx/main/tests/unit/models/test_credential.py index 082d7df7eb..0dc8daff33 100644 --- a/awx/main/tests/unit/models/test_credential.py +++ b/awx/main/tests/unit/models/test_credential.py @@ -1,12 +1,15 @@ # -*- coding: utf-8 -*- +import pytest + from awx.main.models import Credential, CredentialType +@pytest.mark.django_db def test_unique_hash_with_unicode(): - ct = CredentialType(name=u'Väult', kind='vault') - cred = Credential(id=4, name=u'Iñtërnâtiônàlizætiøn', credential_type=ct, inputs={u'vault_id': u'🐉🐉🐉'}, credential_type_id=42) - assert cred.unique_hash(display=True) == u'Väult (id=🐉🐉🐉)' + ct = CredentialType.objects.create(name='Väult', kind='vault') + cred = Credential.objects.create(name='Iñtërnâtiônàlizætiøn', credential_type=ct, inputs={'vault_id': '🐉🐉🐉'}) + assert cred.unique_hash(display=True) == 'Väult (id=🐉🐉🐉)' def test_custom_cred_with_empty_encrypted_field(): diff --git a/awx/main/tests/unit/models/test_survey_models.py b/awx/main/tests/unit/models/test_survey_models.py index c3c9a8723f..9ec5673cd8 100644 --- a/awx/main/tests/unit/models/test_survey_models.py +++ b/awx/main/tests/unit/models/test_survey_models.py @@ -59,6 +59,38 @@ class SurveyVariableValidation: assert accepted == {} assert str(errors[0]) == "Value 5 for 'a' expected to be a string." + def test_job_template_survey_default_variable_validation(self, job_template_factory): + objects = job_template_factory( + "survey_variable_validation", + organization="org1", + inventory="inventory1", + credential="cred1", + persisted=False, + ) + obj = objects.job_template + obj.survey_spec = { + "description": "", + "spec": [ + { + "required": True, + "min": 0, + "default": "2", + "max": 1024, + "question_description": "", + "choices": "", + "variable": "a", + "question_name": "float_number", + "type": "float", + } + ], + "name": "", + } + + obj.survey_enabled = True + accepted, _, errors = obj.accept_or_ignore_variables({"a": 2}) + assert accepted == {{"a": 2.0}} + assert not errors + @pytest.fixture def job(mocker): diff --git a/awx/main/tests/unit/scheduler/test_dag_workflow.py b/awx/main/tests/unit/scheduler/test_dag_workflow.py index 18c3d193f7..a3225b76a3 100644 --- a/awx/main/tests/unit/scheduler/test_dag_workflow.py +++ b/awx/main/tests/unit/scheduler/test_dag_workflow.py @@ -2,8 +2,8 @@ import pytest import uuid import os -from django.utils.translation import ugettext_lazy as _ -from django.utils.encoding import smart_text +from django.utils.translation import gettext_lazy as _ +from django.utils.encoding import smart_str from awx.main.scheduler.dag_workflow import WorkflowDAG @@ -468,7 +468,7 @@ class TestIsWorkflowDone: assert g.is_workflow_done() is True assert g.has_workflow_failed() == ( True, - smart_text( + smart_str( _( "No error handling path for workflow job node(s) [({},{})]. Workflow job node(s)" " missing unified job template and error handling path []." @@ -484,7 +484,7 @@ class TestIsWorkflowDone: assert g.is_workflow_done() is True assert g.has_workflow_failed() == ( True, - smart_text( + smart_str( _( "No error handling path for workflow job node(s) []. Workflow job node(s) missing" " unified job template and error handling path [{}]." ).format(nodes[2].id) @@ -500,7 +500,7 @@ class TestIsWorkflowDone: assert g.is_workflow_done() is True assert g.has_workflow_failed() == ( True, - smart_text( + smart_str( _( "No error handling path for workflow job node(s) []. Workflow job node(s) missing" " unified job template and error handling path [{}]." ).format(nodes[0].id) @@ -512,7 +512,7 @@ class TestIsWorkflowDone: assert g.has_workflow_failed() == ( True, - smart_text( + smart_str( _( "No error handling path for workflow job node(s) [({},{})]. Workflow job node(s)" " missing unified job template and error handling path []." @@ -525,7 +525,7 @@ class TestIsWorkflowDone: assert g.has_workflow_failed() == ( True, - smart_text( + smart_str( _( "No error handling path for workflow job node(s) [({},{})]. Workflow job node(s)" " missing unified job template and error handling path []." diff --git a/awx/main/tests/unit/test_fields.py b/awx/main/tests/unit/test_fields.py index 8c00a95194..da669ae47d 100644 --- a/awx/main/tests/unit/test_fields.py +++ b/awx/main/tests/unit/test_fields.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +from unittest import mock import pytest from django.core.exceptions import ValidationError @@ -8,7 +9,7 @@ from django.db.models.fields.related_descriptors import ReverseManyToOneDescript from rest_framework.serializers import ValidationError as DRFValidationError -from awx.main.models import Credential, CredentialType, BaseModel +from awx.main.models import Credential, CredentialType from awx.main.fields import JSONSchemaField, ImplicitRoleField, ImplicitRoleDescriptor @@ -16,7 +17,7 @@ from awx.main.fields import JSONSchemaField, ImplicitRoleField, ImplicitRoleDesc 'schema, given, message', [ ( - { # immitates what the CredentialType injectors field is + { # imitates what the CredentialType injectors field is "additionalProperties": False, "type": "object", "properties": {"extra_vars": {"additionalProperties": False, "type": "object"}}, @@ -25,7 +26,7 @@ from awx.main.fields import JSONSchemaField, ImplicitRoleField, ImplicitRoleDesc "list provided in relative path ['extra_vars'], expected dict", ), ( - { # immitates what the CredentialType injectors field is + { # imitates what the CredentialType injectors field is "additionalProperties": False, "type": "object", }, @@ -35,7 +36,7 @@ from awx.main.fields import JSONSchemaField, ImplicitRoleField, ImplicitRoleDesc ], ) def test_custom_error_messages(schema, given, message): - instance = BaseModel() + instance = mock.Mock() class MockFieldSubclass(JSONSchemaField): def schema(self, model_instance): diff --git a/awx/main/tests/unit/test_tasks.py b/awx/main/tests/unit/test_tasks.py index f2d617abb8..69a7f03c33 100644 --- a/awx/main/tests/unit/test_tasks.py +++ b/awx/main/tests/unit/test_tasks.py @@ -166,7 +166,7 @@ def test_safe_env_returns_new_copy(): @pytest.mark.parametrize("source,expected", [(None, True), (False, False), (True, True)]) -def test_openstack_client_config_generation(mocker, source, expected, private_data_dir): +def test_openstack_client_config_generation(mocker, source, expected, private_data_dir, mock_me): update = jobs.RunInventoryUpdate() credential_type = CredentialType.defaults['openstack']() inputs = { @@ -206,7 +206,7 @@ def test_openstack_client_config_generation(mocker, source, expected, private_da @pytest.mark.parametrize("source,expected", [(None, True), (False, False), (True, True)]) -def test_openstack_client_config_generation_with_project_domain_name(mocker, source, expected, private_data_dir): +def test_openstack_client_config_generation_with_project_domain_name(mocker, source, expected, private_data_dir, mock_me): update = jobs.RunInventoryUpdate() credential_type = CredentialType.defaults['openstack']() inputs = { @@ -248,7 +248,7 @@ def test_openstack_client_config_generation_with_project_domain_name(mocker, sou @pytest.mark.parametrize("source,expected", [(None, True), (False, False), (True, True)]) -def test_openstack_client_config_generation_with_region(mocker, source, expected, private_data_dir): +def test_openstack_client_config_generation_with_region(mocker, source, expected, private_data_dir, mock_me): update = jobs.RunInventoryUpdate() credential_type = CredentialType.defaults['openstack']() inputs = { @@ -292,7 +292,7 @@ def test_openstack_client_config_generation_with_region(mocker, source, expected @pytest.mark.parametrize("source,expected", [(False, False), (True, True)]) -def test_openstack_client_config_generation_with_private_source_vars(mocker, source, expected, private_data_dir): +def test_openstack_client_config_generation_with_private_source_vars(mocker, source, expected, private_data_dir, mock_me): update = jobs.RunInventoryUpdate() credential_type = CredentialType.defaults['openstack']() inputs = { @@ -352,7 +352,7 @@ class TestExtraVarSanitation(TestJobExecution): UNSAFE = '{{ lookup(' 'pipe' ',' 'ls -la' ') }}' - def test_vars_unsafe_by_default(self, job, private_data_dir): + def test_vars_unsafe_by_default(self, job, private_data_dir, mock_me): job.created_by = User(pk=123, username='angry-spud') job.inventory = Inventory(pk=123, name='example-inv') @@ -390,7 +390,7 @@ class TestExtraVarSanitation(TestJobExecution): ]: assert not hasattr(extra_vars[safe], '__UNSAFE__') - def test_launchtime_vars_unsafe(self, job, private_data_dir): + def test_launchtime_vars_unsafe(self, job, private_data_dir, mock_me): job.extra_vars = json.dumps({'msg': self.UNSAFE}) task = jobs.RunJob() @@ -401,7 +401,7 @@ class TestExtraVarSanitation(TestJobExecution): assert extra_vars['msg'] == self.UNSAFE assert hasattr(extra_vars['msg'], '__UNSAFE__') - def test_nested_launchtime_vars_unsafe(self, job, private_data_dir): + def test_nested_launchtime_vars_unsafe(self, job, private_data_dir, mock_me): job.extra_vars = json.dumps({'msg': {'a': [self.UNSAFE]}}) task = jobs.RunJob() @@ -412,7 +412,7 @@ class TestExtraVarSanitation(TestJobExecution): assert extra_vars['msg'] == {'a': [self.UNSAFE]} assert hasattr(extra_vars['msg']['a'][0], '__UNSAFE__') - def test_allowed_jt_extra_vars(self, job, private_data_dir): + def test_allowed_jt_extra_vars(self, job, private_data_dir, mock_me): job.job_template.extra_vars = job.extra_vars = json.dumps({'msg': self.UNSAFE}) task = jobs.RunJob() @@ -423,7 +423,7 @@ class TestExtraVarSanitation(TestJobExecution): assert extra_vars['msg'] == self.UNSAFE assert not hasattr(extra_vars['msg'], '__UNSAFE__') - def test_nested_allowed_vars(self, job, private_data_dir): + def test_nested_allowed_vars(self, job, private_data_dir, mock_me): job.extra_vars = json.dumps({'msg': {'a': {'b': [self.UNSAFE]}}}) job.job_template.extra_vars = job.extra_vars task = jobs.RunJob() @@ -435,7 +435,7 @@ class TestExtraVarSanitation(TestJobExecution): assert extra_vars['msg'] == {'a': {'b': [self.UNSAFE]}} assert not hasattr(extra_vars['msg']['a']['b'][0], '__UNSAFE__') - def test_sensitive_values_dont_leak(self, job, private_data_dir): + def test_sensitive_values_dont_leak(self, job, private_data_dir, mock_me): # JT defines `msg=SENSITIVE`, the job *should not* be able to do # `other_var=SENSITIVE` job.job_template.extra_vars = json.dumps({'msg': self.UNSAFE}) @@ -452,7 +452,7 @@ class TestExtraVarSanitation(TestJobExecution): assert extra_vars['other_var'] == self.UNSAFE assert hasattr(extra_vars['other_var'], '__UNSAFE__') - def test_overwritten_jt_extra_vars(self, job, private_data_dir): + def test_overwritten_jt_extra_vars(self, job, private_data_dir, mock_me): job.job_template.extra_vars = json.dumps({'msg': 'SAFE'}) job.extra_vars = json.dumps({'msg': self.UNSAFE}) task = jobs.RunJob() @@ -466,7 +466,7 @@ class TestExtraVarSanitation(TestJobExecution): class TestGenericRun: - def test_generic_failure(self, patch_Job, execution_environment): + def test_generic_failure(self, patch_Job, execution_environment, mock_me): job = Job(status='running', inventory=Inventory(), project=Project(local_path='/projects/_23_foo')) job.websocket_emit_status = mock.Mock() job.execution_environment = execution_environment @@ -486,7 +486,7 @@ class TestGenericRun: assert update_model_call['status'] == 'error' assert update_model_call['emitted_events'] == 0 - def test_cancel_flag(self, job, update_model_wrapper, execution_environment): + def test_cancel_flag(self, job, update_model_wrapper, execution_environment, mock_me): job.status = 'running' job.cancel_flag = True job.websocket_emit_status = mock.Mock() @@ -506,7 +506,7 @@ class TestGenericRun: for c in [mock.call(1, status='running', start_args=''), mock.call(1, status='canceled')]: assert c in task.update_model.call_args_list - def test_event_count(self): + def test_event_count(self, mock_me): task = jobs.RunJob() task.runner_callback.dispatcher = mock.MagicMock() task.runner_callback.instance = Job() @@ -516,7 +516,7 @@ class TestGenericRun: [task.runner_callback.event_handler(event_data) for i in range(20)] assert 20 == task.runner_callback.event_ct - def test_finished_callback_eof(self): + def test_finished_callback_eof(self, mock_me): task = jobs.RunJob() task.runner_callback.dispatcher = mock.MagicMock() task.runner_callback.instance = Job(pk=1, id=1) @@ -524,7 +524,7 @@ class TestGenericRun: task.runner_callback.finished_callback(None) task.runner_callback.dispatcher.dispatch.assert_called_with({'event': 'EOF', 'final_counter': 17, 'job_id': 1, 'guid': None}) - def test_save_job_metadata(self, job, update_model_wrapper): + def test_save_job_metadata(self, job, update_model_wrapper, mock_me): class MockMe: pass @@ -542,7 +542,7 @@ class TestGenericRun: 1, job_args=json.dumps({'foo': 'bar'}), job_cwd='/foobar', job_env={'switch': 'blade', 'foot': 'ball', 'secret_key': 'redacted_value'} ) - def test_created_by_extra_vars(self): + def test_created_by_extra_vars(self, mock_me): job = Job(created_by=User(pk=123, username='angry-spud')) task = jobs.RunJob() @@ -557,7 +557,7 @@ class TestGenericRun: assert extra_vars['awx_user_id'] == 123 assert extra_vars['awx_user_name'] == "angry-spud" - def test_survey_extra_vars(self): + def test_survey_extra_vars(self, mock_me): job = Job() job.extra_vars = json.dumps({'super_secret': encrypt_value('CLASSIFIED', pk=None)}) job.survey_passwords = {'super_secret': '$encrypted$'} @@ -571,7 +571,7 @@ class TestGenericRun: private_data_dir, extra_vars, safe_dict = call_args assert extra_vars['super_secret'] == "CLASSIFIED" - def test_awx_task_env(self, patch_Job, private_data_dir, execution_environment): + def test_awx_task_env(self, patch_Job, private_data_dir, execution_environment, mock_me): job = Job(project=Project(), inventory=Inventory()) job.execution_environment = execution_environment @@ -586,7 +586,7 @@ class TestGenericRun: @pytest.mark.django_db class TestAdhocRun(TestJobExecution): - def test_options_jinja_usage(self, adhoc_job, adhoc_update_model_wrapper): + def test_options_jinja_usage(self, adhoc_job, adhoc_update_model_wrapper, mock_me): ExecutionEnvironment.objects.create(name='Control Plane EE', managed=True) ExecutionEnvironment.objects.create(name='Default Job EE', managed=False) @@ -611,7 +611,7 @@ class TestAdhocRun(TestJobExecution): be wrapped in unsafe ''' ''' - def test_extra_vars_jinja_usage(self, adhoc_job, adhoc_update_model_wrapper): + def test_extra_vars_jinja_usage(self, adhoc_job, adhoc_update_model_wrapper, mock_me): adhoc_job.module_args = 'ls' adhoc_job.extra_vars = json.dumps({ 'foo': '{{ bar }}' @@ -630,7 +630,7 @@ class TestAdhocRun(TestJobExecution): assert extra_vars['foo'] == '{{ bar }}' ''' - def test_created_by_extra_vars(self): + def test_created_by_extra_vars(self, mock_me): adhoc_job = AdHocCommand(created_by=User(pk=123, username='angry-spud')) task = jobs.RunAdHocCommand() @@ -691,7 +691,7 @@ class TestJobCredentials(TestJobExecution): ] } - def test_username_jinja_usage(self, job, private_data_dir): + def test_username_jinja_usage(self, job, private_data_dir, mock_me): task = jobs.RunJob() ssh = CredentialType.defaults['ssh']() credential = Credential(pk=1, credential_type=ssh, inputs={'username': '{{ ansible_ssh_pass }}'}) @@ -702,7 +702,7 @@ class TestJobCredentials(TestJobExecution): assert 'Jinja variables are not allowed' in str(e.value) @pytest.mark.parametrize("flag", ['become_username', 'become_method']) - def test_become_jinja_usage(self, job, private_data_dir, flag): + def test_become_jinja_usage(self, job, private_data_dir, flag, mock_me): task = jobs.RunJob() ssh = CredentialType.defaults['ssh']() credential = Credential(pk=1, credential_type=ssh, inputs={'username': 'joe', flag: '{{ ansible_ssh_pass }}'}) @@ -713,7 +713,7 @@ class TestJobCredentials(TestJobExecution): assert 'Jinja variables are not allowed' in str(e.value) - def test_ssh_passwords(self, job, private_data_dir, field, password_name, expected_flag): + def test_ssh_passwords(self, job, private_data_dir, field, password_name, expected_flag, mock_me): task = jobs.RunJob() ssh = CredentialType.defaults['ssh']() credential = Credential(pk=1, credential_type=ssh, inputs={'username': 'bob', field: 'secret'}) @@ -730,7 +730,7 @@ class TestJobCredentials(TestJobExecution): if expected_flag: assert expected_flag in ' '.join(args) - def test_net_ssh_key_unlock(self, job): + def test_net_ssh_key_unlock(self, job, mock_me): task = jobs.RunJob() net = CredentialType.defaults['net']() credential = Credential(pk=1, credential_type=net, inputs={'ssh_key_unlock': 'secret'}) @@ -743,7 +743,7 @@ class TestJobCredentials(TestJobExecution): assert 'secret' in expect_passwords.values() - def test_net_first_ssh_key_unlock_wins(self, job): + def test_net_first_ssh_key_unlock_wins(self, job, mock_me): task = jobs.RunJob() for i in range(3): net = CredentialType.defaults['net']() @@ -757,7 +757,7 @@ class TestJobCredentials(TestJobExecution): assert 'secret0' in expect_passwords.values() - def test_prefer_ssh_over_net_ssh_key_unlock(self, job): + def test_prefer_ssh_over_net_ssh_key_unlock(self, job, mock_me): task = jobs.RunJob() net = CredentialType.defaults['net']() net_credential = Credential(pk=1, credential_type=net, inputs={'ssh_key_unlock': 'net_secret'}) @@ -776,7 +776,7 @@ class TestJobCredentials(TestJobExecution): assert 'ssh_secret' in expect_passwords.values() - def test_vault_password(self, private_data_dir, job): + def test_vault_password(self, private_data_dir, job, mock_me): task = jobs.RunJob() vault = CredentialType.defaults['vault']() credential = Credential(pk=1, credential_type=vault, inputs={'vault_password': 'vault-me'}) @@ -788,10 +788,10 @@ class TestJobCredentials(TestJobExecution): password_prompts = task.get_password_prompts(passwords) expect_passwords = task.create_expect_passwords_data_struct(password_prompts, passwords) - assert expect_passwords['Vault password:\s*?$'] == 'vault-me' # noqa + assert expect_passwords[r'Vault password:\s*?$'] == 'vault-me' # noqa assert '--ask-vault-pass' in ' '.join(args) - def test_vault_password_ask(self, private_data_dir, job): + def test_vault_password_ask(self, private_data_dir, job, mock_me): task = jobs.RunJob() vault = CredentialType.defaults['vault']() credential = Credential(pk=1, credential_type=vault, inputs={'vault_password': 'ASK'}) @@ -803,10 +803,10 @@ class TestJobCredentials(TestJobExecution): password_prompts = task.get_password_prompts(passwords) expect_passwords = task.create_expect_passwords_data_struct(password_prompts, passwords) - assert expect_passwords['Vault password:\s*?$'] == 'provided-at-launch' # noqa + assert expect_passwords[r'Vault password:\s*?$'] == 'provided-at-launch' # noqa assert '--ask-vault-pass' in ' '.join(args) - def test_multi_vault_password(self, private_data_dir, job): + def test_multi_vault_password(self, private_data_dir, job, mock_me): task = jobs.RunJob() vault = CredentialType.defaults['vault']() for i, label in enumerate(['dev', 'prod', 'dotted.name']): @@ -820,16 +820,16 @@ class TestJobCredentials(TestJobExecution): expect_passwords = task.create_expect_passwords_data_struct(password_prompts, passwords) vault_passwords = dict((k, v) for k, v in expect_passwords.items() if 'Vault' in k) - assert vault_passwords['Vault password \(prod\):\\s*?$'] == 'pass@prod' # noqa - assert vault_passwords['Vault password \(dev\):\\s*?$'] == 'pass@dev' # noqa - assert vault_passwords['Vault password \(dotted.name\):\\s*?$'] == 'pass@dotted.name' # noqa - assert vault_passwords['Vault password:\\s*?$'] == '' # noqa + assert vault_passwords[r'Vault password \(prod\):\s*?$'] == 'pass@prod' # noqa + assert vault_passwords[r'Vault password \(dev\):\s*?$'] == 'pass@dev' # noqa + assert vault_passwords[r'Vault password \(dotted.name\):\s*?$'] == 'pass@dotted.name' # noqa + assert vault_passwords[r'Vault password:\s*?$'] == '' # noqa assert '--ask-vault-pass' not in ' '.join(args) assert '--vault-id dev@prompt' in ' '.join(args) assert '--vault-id prod@prompt' in ' '.join(args) assert '--vault-id dotted.name@prompt' in ' '.join(args) - def test_multi_vault_id_conflict(self, job): + def test_multi_vault_id_conflict(self, job, mock_me): task = jobs.RunJob() vault = CredentialType.defaults['vault']() for i in range(2): @@ -842,7 +842,7 @@ class TestJobCredentials(TestJobExecution): assert 'multiple vault credentials were specified with --vault-id' in str(e.value) - def test_multi_vault_password_ask(self, private_data_dir, job): + def test_multi_vault_password_ask(self, private_data_dir, job, mock_me): task = jobs.RunJob() vault = CredentialType.defaults['vault']() for i, label in enumerate(['dev', 'prod']): @@ -855,15 +855,15 @@ class TestJobCredentials(TestJobExecution): expect_passwords = task.create_expect_passwords_data_struct(password_prompts, passwords) vault_passwords = dict((k, v) for k, v in expect_passwords.items() if 'Vault' in k) - assert vault_passwords['Vault password \(prod\):\\s*?$'] == 'provided-at-launch@prod' # noqa - assert vault_passwords['Vault password \(dev\):\\s*?$'] == 'provided-at-launch@dev' # noqa - assert vault_passwords['Vault password:\\s*?$'] == '' # noqa + assert vault_passwords[r'Vault password \(prod\):\s*?$'] == 'provided-at-launch@prod' # noqa + assert vault_passwords[r'Vault password \(dev\):\s*?$'] == 'provided-at-launch@dev' # noqa + assert vault_passwords[r'Vault password:\s*?$'] == '' # noqa assert '--ask-vault-pass' not in ' '.join(args) assert '--vault-id dev@prompt' in ' '.join(args) assert '--vault-id prod@prompt' in ' '.join(args) @pytest.mark.parametrize("verify", (True, False)) - def test_k8s_credential(self, job, private_data_dir, verify): + def test_k8s_credential(self, job, private_data_dir, verify, mock_me): k8s = CredentialType.defaults['kubernetes_bearer_token']() inputs = { 'host': 'https://example.org/', @@ -898,7 +898,7 @@ class TestJobCredentials(TestJobExecution): assert safe_env['K8S_AUTH_API_KEY'] == HIDDEN_PASSWORD - def test_aws_cloud_credential(self, job, private_data_dir): + def test_aws_cloud_credential(self, job, private_data_dir, mock_me): aws = CredentialType.defaults['aws']() credential = Credential(pk=1, credential_type=aws, inputs={'username': 'bob', 'password': 'secret'}) credential.inputs['password'] = encrypt_field(credential, 'password') @@ -913,7 +913,7 @@ class TestJobCredentials(TestJobExecution): assert 'AWS_SECURITY_TOKEN' not in env assert safe_env['AWS_SECRET_ACCESS_KEY'] == HIDDEN_PASSWORD - def test_aws_cloud_credential_with_sts_token(self, private_data_dir, job): + def test_aws_cloud_credential_with_sts_token(self, private_data_dir, job, mock_me): aws = CredentialType.defaults['aws']() credential = Credential(pk=1, credential_type=aws, inputs={'username': 'bob', 'password': 'secret', 'security_token': 'token'}) for key in ('password', 'security_token'): @@ -929,7 +929,7 @@ class TestJobCredentials(TestJobExecution): assert env['AWS_SECURITY_TOKEN'] == 'token' assert safe_env['AWS_SECRET_ACCESS_KEY'] == HIDDEN_PASSWORD - def test_gce_credentials(self, private_data_dir, job): + def test_gce_credentials(self, private_data_dir, job, mock_me): gce = CredentialType.defaults['gce']() credential = Credential(pk=1, credential_type=gce, inputs={'username': 'bob', 'project': 'some-project', 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY}) credential.inputs['ssh_key_data'] = encrypt_field(credential, 'ssh_key_data') @@ -946,7 +946,7 @@ class TestJobCredentials(TestJobExecution): assert json_data['client_email'] == 'bob' assert json_data['project_id'] == 'some-project' - def test_azure_rm_with_tenant(self, private_data_dir, job): + def test_azure_rm_with_tenant(self, private_data_dir, job, mock_me): azure = CredentialType.defaults['azure_rm']() credential = Credential( pk=1, credential_type=azure, inputs={'client': 'some-client', 'secret': 'some-secret', 'tenant': 'some-tenant', 'subscription': 'some-subscription'} @@ -964,7 +964,7 @@ class TestJobCredentials(TestJobExecution): assert env['AZURE_SUBSCRIPTION_ID'] == 'some-subscription' assert safe_env['AZURE_SECRET'] == HIDDEN_PASSWORD - def test_azure_rm_with_password(self, private_data_dir, job): + def test_azure_rm_with_password(self, private_data_dir, job, mock_me): azure = CredentialType.defaults['azure_rm']() credential = Credential( pk=1, credential_type=azure, inputs={'subscription': 'some-subscription', 'username': 'bob', 'password': 'secret', 'cloud_environment': 'foobar'} @@ -982,7 +982,7 @@ class TestJobCredentials(TestJobExecution): assert env['AZURE_CLOUD_ENVIRONMENT'] == 'foobar' assert safe_env['AZURE_PASSWORD'] == HIDDEN_PASSWORD - def test_vmware_credentials(self, private_data_dir, job): + def test_vmware_credentials(self, private_data_dir, job, mock_me): vmware = CredentialType.defaults['vmware']() credential = Credential(pk=1, credential_type=vmware, inputs={'username': 'bob', 'password': 'secret', 'host': 'https://example.org'}) credential.inputs['password'] = encrypt_field(credential, 'password') @@ -997,7 +997,7 @@ class TestJobCredentials(TestJobExecution): assert env['VMWARE_HOST'] == 'https://example.org' assert safe_env['VMWARE_PASSWORD'] == HIDDEN_PASSWORD - def test_openstack_credentials(self, private_data_dir, job): + def test_openstack_credentials(self, private_data_dir, job, mock_me): task = jobs.RunJob() task.instance = job openstack = CredentialType.defaults['openstack']() @@ -1028,7 +1028,7 @@ class TestJobCredentials(TestJobExecution): ) @pytest.mark.parametrize("ca_file", [None, '/path/to/some/file']) - def test_rhv_credentials(self, private_data_dir, job, ca_file): + def test_rhv_credentials(self, private_data_dir, job, ca_file, mock_me): rhv = CredentialType.defaults['rhv']() inputs = { 'host': 'some-ovirt-host.example.org', @@ -1065,7 +1065,7 @@ class TestJobCredentials(TestJobExecution): [None, '0'], ], ) - def test_net_credentials(self, authorize, expected_authorize, job, private_data_dir): + def test_net_credentials(self, authorize, expected_authorize, job, private_data_dir, mock_me): task = jobs.RunJob() task.instance = job net = CredentialType.defaults['net']() @@ -1090,7 +1090,7 @@ class TestJobCredentials(TestJobExecution): assert open(env['ANSIBLE_NET_SSH_KEYFILE'], 'r').read() == self.EXAMPLE_PRIVATE_KEY assert safe_env['ANSIBLE_NET_PASSWORD'] == HIDDEN_PASSWORD - def test_custom_environment_injectors_with_jinja_syntax_error(self, private_data_dir): + def test_custom_environment_injectors_with_jinja_syntax_error(self, private_data_dir, mock_me): some_cloud = CredentialType( kind='cloud', name='SomeCloud', @@ -1103,7 +1103,7 @@ class TestJobCredentials(TestJobExecution): with pytest.raises(jinja2.exceptions.UndefinedError): credential.credential_type.inject_credential(credential, {}, {}, [], private_data_dir) - def test_custom_environment_injectors(self, private_data_dir): + def test_custom_environment_injectors(self, private_data_dir, mock_me): some_cloud = CredentialType( kind='cloud', name='SomeCloud', @@ -1118,7 +1118,7 @@ class TestJobCredentials(TestJobExecution): assert env['MY_CLOUD_API_TOKEN'] == 'ABC123' - def test_custom_environment_injectors_with_boolean_env_var(self, private_data_dir): + def test_custom_environment_injectors_with_boolean_env_var(self, private_data_dir, mock_me): some_cloud = CredentialType( kind='cloud', name='SomeCloud', @@ -1133,7 +1133,7 @@ class TestJobCredentials(TestJobExecution): assert env['TURBO_BUTTON'] == str(True) - def test_custom_environment_injectors_with_reserved_env_var(self, private_data_dir, job): + def test_custom_environment_injectors_with_reserved_env_var(self, private_data_dir, job, mock_me): task = jobs.RunJob() task.instance = job some_cloud = CredentialType( @@ -1150,7 +1150,7 @@ class TestJobCredentials(TestJobExecution): assert env['JOB_ID'] == str(job.pk) - def test_custom_environment_injectors_with_secret_field(self, private_data_dir): + def test_custom_environment_injectors_with_secret_field(self, private_data_dir, mock_me): some_cloud = CredentialType( kind='cloud', name='SomeCloud', @@ -1169,7 +1169,7 @@ class TestJobCredentials(TestJobExecution): assert 'SUPER-SECRET-123' not in safe_env.values() assert safe_env['MY_CLOUD_PRIVATE_VAR'] == HIDDEN_PASSWORD - def test_custom_environment_injectors_with_extra_vars(self, private_data_dir, job): + def test_custom_environment_injectors_with_extra_vars(self, private_data_dir, job, mock_me): task = jobs.RunJob() some_cloud = CredentialType( kind='cloud', @@ -1188,7 +1188,7 @@ class TestJobCredentials(TestJobExecution): assert extra_vars["api_token"] == "ABC123" assert hasattr(extra_vars["api_token"], '__UNSAFE__') - def test_custom_environment_injectors_with_boolean_extra_vars(self, job, private_data_dir): + def test_custom_environment_injectors_with_boolean_extra_vars(self, job, private_data_dir, mock_me): task = jobs.RunJob() some_cloud = CredentialType( kind='cloud', @@ -1207,7 +1207,7 @@ class TestJobCredentials(TestJobExecution): assert extra_vars["turbo_button"] == "True" return ['successful', 0] - def test_custom_environment_injectors_with_complicated_boolean_template(self, job, private_data_dir): + def test_custom_environment_injectors_with_complicated_boolean_template(self, job, private_data_dir, mock_me): task = jobs.RunJob() some_cloud = CredentialType( kind='cloud', @@ -1225,7 +1225,7 @@ class TestJobCredentials(TestJobExecution): assert extra_vars["turbo_button"] == "FAST!" - def test_custom_environment_injectors_with_secret_extra_vars(self, job, private_data_dir): + def test_custom_environment_injectors_with_secret_extra_vars(self, job, private_data_dir, mock_me): """ extra_vars that contain secret field values should be censored in the DB """ @@ -1247,7 +1247,7 @@ class TestJobCredentials(TestJobExecution): extra_vars = parse_extra_vars(args, private_data_dir) assert extra_vars["password"] == "SUPER-SECRET-123" - def test_custom_environment_injectors_with_file(self, private_data_dir): + def test_custom_environment_injectors_with_file(self, private_data_dir, mock_me): some_cloud = CredentialType( kind='cloud', name='SomeCloud', @@ -1263,7 +1263,7 @@ class TestJobCredentials(TestJobExecution): path = to_host_path(env['MY_CLOUD_INI_FILE'], private_data_dir) assert open(path, 'r').read() == '[mycloud]\nABC123' - def test_custom_environment_injectors_with_unicode_content(self, private_data_dir): + def test_custom_environment_injectors_with_unicode_content(self, private_data_dir, mock_me): value = 'Iñtërnâtiônàlizætiøn' some_cloud = CredentialType( kind='cloud', @@ -1283,7 +1283,7 @@ class TestJobCredentials(TestJobExecution): path = to_host_path(env['MY_CLOUD_INI_FILE'], private_data_dir) assert open(path, 'r').read() == value - def test_custom_environment_injectors_with_files(self, private_data_dir): + def test_custom_environment_injectors_with_files(self, private_data_dir, mock_me): some_cloud = CredentialType( kind='cloud', name='SomeCloud', @@ -1304,7 +1304,7 @@ class TestJobCredentials(TestJobExecution): assert open(cert_path, 'r').read() == '[mycert]\nCERT123' assert open(key_path, 'r').read() == '[mykey]\nKEY123' - def test_multi_cloud(self, private_data_dir): + def test_multi_cloud(self, private_data_dir, mock_me): gce = CredentialType.defaults['gce']() gce_credential = Credential(pk=1, credential_type=gce, inputs={'username': 'bob', 'project': 'some-project', 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY}) gce_credential.inputs['ssh_key_data'] = encrypt_field(gce_credential, 'ssh_key_data') @@ -1332,7 +1332,7 @@ class TestJobCredentials(TestJobExecution): assert safe_env['AZURE_PASSWORD'] == HIDDEN_PASSWORD - def test_awx_task_env(self, settings, private_data_dir, job): + def test_awx_task_env(self, settings, private_data_dir, job, mock_me): settings.AWX_TASK_ENV = {'FOO': 'BAR'} task = jobs.RunJob() task.instance = job @@ -1359,7 +1359,7 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution): ], } - def test_galaxy_credentials_ignore_certs(self, private_data_dir, project_update, ignore): + def test_galaxy_credentials_ignore_certs(self, private_data_dir, project_update, ignore, mock_me): settings.GALAXY_IGNORE_CERTS = ignore task = jobs.RunProjectUpdate() task.instance = project_update @@ -1369,7 +1369,7 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution): else: assert 'ANSIBLE_GALAXY_IGNORE' not in env - def test_galaxy_credentials_empty(self, private_data_dir, project_update): + def test_galaxy_credentials_empty(self, private_data_dir, project_update, mock_me): class RunProjectUpdate(jobs.RunProjectUpdate): __vars__ = {} @@ -1388,7 +1388,7 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution): for k in env: assert not k.startswith('ANSIBLE_GALAXY_SERVER') - def test_single_public_galaxy(self, private_data_dir, project_update): + def test_single_public_galaxy(self, private_data_dir, project_update, mock_me): class RunProjectUpdate(jobs.RunProjectUpdate): __vars__ = {} @@ -1418,7 +1418,7 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution): ('ANSIBLE_GALAXY_SERVER_SERVER0_URL', 'https://galaxy.ansible.com/'), ] - def test_multiple_galaxy_endpoints(self, private_data_dir, project_update): + def test_multiple_galaxy_endpoints(self, private_data_dir, project_update, mock_me): credential_type = CredentialType.defaults['galaxy_api_token']() public_galaxy = Credential( pk=1, @@ -1479,7 +1479,7 @@ class TestProjectUpdateCredentials(TestJobExecution): ], } - def test_username_and_password_auth(self, project_update, scm_type): + def test_username_and_password_auth(self, project_update, scm_type, mock_me): task = jobs.RunProjectUpdate() ssh = CredentialType.defaults['ssh']() project_update.scm_type = scm_type @@ -1493,7 +1493,7 @@ class TestProjectUpdateCredentials(TestJobExecution): assert 'bob' in expect_passwords.values() assert 'secret' in expect_passwords.values() - def test_ssh_key_auth(self, project_update, scm_type): + def test_ssh_key_auth(self, project_update, scm_type, mock_me): task = jobs.RunProjectUpdate() ssh = CredentialType.defaults['ssh']() project_update.scm_type = scm_type @@ -1505,7 +1505,7 @@ class TestProjectUpdateCredentials(TestJobExecution): expect_passwords = task.create_expect_passwords_data_struct(password_prompts, passwords) assert 'bob' in expect_passwords.values() - def test_awx_task_env(self, project_update, settings, private_data_dir, scm_type, execution_environment): + def test_awx_task_env(self, project_update, settings, private_data_dir, scm_type, execution_environment, mock_me): project_update.execution_environment = execution_environment settings.AWX_TASK_ENV = {'FOO': 'BAR'} task = jobs.RunProjectUpdate() @@ -1522,7 +1522,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): def inventory_update(self, execution_environment): return InventoryUpdate(pk=1, execution_environment=execution_environment, inventory_source=InventorySource(pk=1, inventory=Inventory(pk=1))) - def test_source_without_credential(self, mocker, inventory_update, private_data_dir): + def test_source_without_credential(self, mocker, inventory_update, private_data_dir, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update inventory_update.source = 'ec2' @@ -1535,7 +1535,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert 'AWS_ACCESS_KEY_ID' not in env assert 'AWS_SECRET_ACCESS_KEY' not in env - def test_ec2_source(self, private_data_dir, inventory_update, mocker): + def test_ec2_source(self, private_data_dir, inventory_update, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update aws = CredentialType.defaults['aws']() @@ -1559,7 +1559,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert safe_env['AWS_SECRET_ACCESS_KEY'] == HIDDEN_PASSWORD - def test_vmware_source(self, inventory_update, private_data_dir, mocker): + def test_vmware_source(self, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update vmware = CredentialType.defaults['vmware']() @@ -1587,7 +1587,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): env["VMWARE_HOST"] == "https://example.org", env["VMWARE_VALIDATE_CERTS"] == "False", - def test_azure_rm_source_with_tenant(self, private_data_dir, inventory_update, mocker): + def test_azure_rm_source_with_tenant(self, private_data_dir, inventory_update, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update azure_rm = CredentialType.defaults['azure_rm']() @@ -1623,7 +1623,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert safe_env['AZURE_SECRET'] == HIDDEN_PASSWORD - def test_azure_rm_source_with_password(self, private_data_dir, inventory_update, mocker): + def test_azure_rm_source_with_password(self, private_data_dir, inventory_update, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update azure_rm = CredentialType.defaults['azure_rm']() @@ -1652,7 +1652,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert safe_env['AZURE_PASSWORD'] == HIDDEN_PASSWORD - def test_gce_source(self, inventory_update, private_data_dir, mocker): + def test_gce_source(self, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update gce = CredentialType.defaults['gce']() @@ -1682,7 +1682,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert json_data['client_email'] == 'bob' assert json_data['project_id'] == 'some-project' - def test_openstack_source(self, inventory_update, private_data_dir, mocker): + def test_openstack_source(self, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update openstack = CredentialType.defaults['openstack']() @@ -1722,7 +1722,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): in shade_config ) - def test_satellite6_source(self, inventory_update, private_data_dir, mocker): + def test_satellite6_source(self, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update satellite6 = CredentialType.defaults['satellite6']() @@ -1745,7 +1745,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert env["FOREMAN_PASSWORD"] == "secret" assert safe_env["FOREMAN_PASSWORD"] == HIDDEN_PASSWORD - def test_insights_source(self, inventory_update, private_data_dir, mocker): + def test_insights_source(self, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update insights = CredentialType.defaults['insights']() @@ -1774,7 +1774,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert safe_env['INSIGHTS_PASSWORD'] == HIDDEN_PASSWORD @pytest.mark.parametrize('verify', [True, False]) - def test_tower_source(self, verify, inventory_update, private_data_dir, mocker): + def test_tower_source(self, verify, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update tower = CredentialType.defaults['controller']() @@ -1802,7 +1802,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert env['CONTROLLER_VERIFY_SSL'] == 'False' assert safe_env['CONTROLLER_PASSWORD'] == HIDDEN_PASSWORD - def test_tower_source_ssl_verify_empty(self, inventory_update, private_data_dir, mocker): + def test_tower_source_ssl_verify_empty(self, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update tower = CredentialType.defaults['controller']() @@ -1830,7 +1830,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert env['TOWER_VERIFY_SSL'] == 'False' - def test_awx_task_env(self, inventory_update, private_data_dir, settings, mocker): + def test_awx_task_env(self, inventory_update, private_data_dir, settings, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update gce = CredentialType.defaults['gce']() @@ -1869,7 +1869,7 @@ def test_fcntl_ioerror(): @mock.patch('os.open') @mock.patch('logging.getLogger') -def test_aquire_lock_open_fail_logged(logging_getLogger, os_open): +def test_acquire_lock_open_fail_logged(logging_getLogger, os_open, mock_me): err = OSError() err.errno = 3 err.strerror = 'dummy message' @@ -1893,7 +1893,7 @@ def test_aquire_lock_open_fail_logged(logging_getLogger, os_open): @mock.patch('os.close') @mock.patch('logging.getLogger') @mock.patch('fcntl.lockf') -def test_aquire_lock_acquisition_fail_logged(fcntl_lockf, logging_getLogger, os_close, os_open): +def test_acquire_lock_acquisition_fail_logged(fcntl_lockf, logging_getLogger, os_close, os_open, mock_me): err = IOError() err.errno = 3 err.strerror = 'dummy message' @@ -1913,7 +1913,7 @@ def test_aquire_lock_acquisition_fail_logged(fcntl_lockf, logging_getLogger, os_ with pytest.raises(IOError): ProjectUpdate.acquire_lock(instance) os_close.assert_called_with(3) - assert logger.err.called_with("I/O error({0}) while trying to aquire lock on file [{1}]: {2}".format(3, 'this_file_does_not_exist', 'dummy message')) + assert logger.err.called_with("I/O error({0}) while trying to acquire lock on file [{1}]: {2}".format(3, 'this_file_does_not_exist', 'dummy message')) @pytest.mark.parametrize('injector_cls', [cls for cls in ManagedCredentialType.registry.values() if cls.injectors]) @@ -1947,7 +1947,7 @@ def test_notification_job_not_finished(logging_getLogger, mocker): with mocker.patch('awx.main.models.UnifiedJob.objects.get', uj): system.handle_success_and_failure_notifications(1) - assert logger.warn.called_with(f"Failed to even try to send notifications for job '{uj}' due to job not being in finished state.") + assert logger.warning.called_with(f"Failed to even try to send notifications for job '{uj}' due to job not being in finished state.") def test_notification_job_finished(mocker): @@ -1958,7 +1958,7 @@ def test_notification_job_finished(mocker): uj.send_notification_templates.assert_called() -def test_job_run_no_ee(): +def test_job_run_no_ee(mock_me): org = Organization(pk=1) proj = Project(pk=1, organization=org) job = Job(project=proj, organization=org, inventory=Inventory(pk=1)) @@ -1977,7 +1977,7 @@ def test_job_run_no_ee(): assert 'Job could not start because no Execution Environment could be found' in str(e.value) -def test_project_update_no_ee(): +def test_project_update_no_ee(mock_me): org = Organization(pk=1) proj = Project(pk=1, organization=org) project_update = ProjectUpdate(pk=1, project=proj, scm_type='git') diff --git a/awx/main/tests/unit/utils/test_filters.py b/awx/main/tests/unit/utils/test_filters.py index 52e37ab893..ef0abb80d3 100644 --- a/awx/main/tests/unit/utils/test_filters.py +++ b/awx/main/tests/unit/utils/test_filters.py @@ -4,7 +4,6 @@ from unittest import mock # AWX from awx.main.utils.filters import SmartFilter, ExternalLoggerEnabled -from awx.main.models import Host # Django from django.db.models import Q @@ -219,39 +218,6 @@ class TestSmartFilterQueryFromString: assert str(q) == str(q_expected) -class TestSmartFilterQueryFromStringNoDB: - @pytest.mark.parametrize( - "filter_string,q_expected", - [ - ( - 'ansible_facts__a="true" and ansible_facts__b="true" and ansible_facts__c="true"', - ( - Q(**{u"ansible_facts__contains": {u"a": u"true"}}) - & Q(**{u"ansible_facts__contains": {u"b": u"true"}}) - & Q(**{u"ansible_facts__contains": {u"c": u"true"}}) - ), - ), - ( - 'ansible_facts__a="true" or ansible_facts__b="true" or ansible_facts__c="true"', - ( - Q(**{u"ansible_facts__contains": {u"a": u"true"}}) - | Q(**{u"ansible_facts__contains": {u"b": u"true"}}) - | Q(**{u"ansible_facts__contains": {u"c": u"true"}}) - ), - ), - ('search=foo', Q(Q(**{u"description__icontains": u"foo"}) | Q(**{u"name__icontains": u"foo"}))), - ( - 'search=foo and ansible_facts__a="null"', - Q(Q(**{u"description__icontains": u"foo"}) | Q(**{u"name__icontains": u"foo"})) & Q(**{u"ansible_facts__contains": {u"a": u"\"null\""}}), - ), - ('name=foo or name=bar and name=foobar', Q(name="foo") | Q(name="bar") & Q(name="foobar")), - ], - ) - def test_does_not_invoke_db(self, filter_string, q_expected): - q = SmartFilter.query_from_string(filter_string) - assert str(q.query) == str(Host.objects.filter(q_expected).query) - - ''' #('"facts__quoted_val"="f\"oo"', 1), #('facts__facts__arr[]="foo"', 1), diff --git a/awx/main/utils/common.py b/awx/main/utils/common.py index 49885d70c7..a88113d2e4 100644 --- a/awx/main/utils/common.py +++ b/awx/main/utils/common.py @@ -19,7 +19,7 @@ from functools import reduce, wraps # Django from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist from django.utils.dateparse import parse_datetime -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.utils.functional import cached_property from django.db import connection from django.db.models.fields.related import ForeignObjectRel, ManyToManyField diff --git a/awx/main/utils/deletion.py b/awx/main/utils/deletion.py deleted file mode 100644 index d17bc0b710..0000000000 --- a/awx/main/utils/deletion.py +++ /dev/null @@ -1,173 +0,0 @@ -from django.contrib.contenttypes.models import ContentType -from django.db.models.deletion import ( - DO_NOTHING, - Collector, - get_candidate_relations_to_delete, -) -from collections import Counter, OrderedDict -from django.db import transaction -from django.db.models import sql - - -def bulk_related_objects(field, objs, using): - # This overrides the method in django.contrib.contenttypes.fields.py - """ - Return all objects related to ``objs`` via this ``GenericRelation``. - """ - return field.remote_field.model._base_manager.db_manager(using).filter( - **{ - "%s__pk" - % field.content_type_field_name: ContentType.objects.db_manager(using).get_for_model(field.model, for_concrete_model=field.for_concrete_model).pk, - "%s__in" % field.object_id_field_name: list(objs.values_list('pk', flat=True)), - } - ) - - -def pre_delete(qs): - # taken from .delete method in django.db.models.query.py - assert qs.query.can_filter(), "Cannot use 'limit' or 'offset' with delete." - - if qs._fields is not None: - raise TypeError("Cannot call delete() after .values() or .values_list()") - - del_query = qs._chain() - - # The delete is actually 2 queries - one to find related objects, - # and one to delete. Make sure that the discovery of related - # objects is performed on the same database as the deletion. - del_query._for_write = True - - # Disable non-supported fields. - del_query.query.select_for_update = False - del_query.query.select_related = False - del_query.query.clear_ordering(force_empty=True) - return del_query - - -class AWXCollector(Collector): - def add(self, objs, source=None, nullable=False, reverse_dependency=False): - """ - Add 'objs' to the collection of objects to be deleted. If the call is - the result of a cascade, 'source' should be the model that caused it, - and 'nullable' should be set to True if the relation can be null. - - Return a list of all objects that were not already collected. - """ - if not objs.exists(): - return objs - model = objs.model - self.data.setdefault(model, []) - self.data[model].append(objs) - # Nullable relationships can be ignored -- they are nulled out before - # deleting, and therefore do not affect the order in which objects have - # to be deleted. - if source is not None and not nullable: - if reverse_dependency: - source, model = model, source - self.dependencies.setdefault(source._meta.concrete_model, set()).add(model._meta.concrete_model) - return objs - - def add_field_update(self, field, value, objs): - """ - Schedule a field update. 'objs' must be a homogeneous iterable - collection of model instances (e.g. a QuerySet). - """ - if not objs.exists(): - return - model = objs.model - self.field_updates.setdefault(model, {}) - self.field_updates[model].setdefault((field, value), []) - self.field_updates[model][(field, value)].append(objs) - - def collect(self, objs, source=None, nullable=False, collect_related=True, source_attr=None, reverse_dependency=False, keep_parents=False): - """ - Add 'objs' to the collection of objects to be deleted as well as all - parent instances. 'objs' must be a homogeneous iterable collection of - model instances (e.g. a QuerySet). If 'collect_related' is True, - related objects will be handled by their respective on_delete handler. - - If the call is the result of a cascade, 'source' should be the model - that caused it and 'nullable' should be set to True, if the relation - can be null. - - If 'reverse_dependency' is True, 'source' will be deleted before the - current model, rather than after. (Needed for cascading to parent - models, the one case in which the cascade follows the forwards - direction of an FK rather than the reverse direction.) - - If 'keep_parents' is True, data of parent model's will be not deleted. - """ - - if hasattr(objs, 'polymorphic_disabled'): - objs.polymorphic_disabled = True - - if self.can_fast_delete(objs): - self.fast_deletes.append(objs) - return - new_objs = self.add(objs, source, nullable, reverse_dependency=reverse_dependency) - if not new_objs.exists(): - return - - model = new_objs.model - - if not keep_parents: - # Recursively collect concrete model's parent models, but not their - # related objects. These will be found by meta.get_fields() - concrete_model = model._meta.concrete_model - for ptr in concrete_model._meta.parents.keys(): - if ptr: - parent_objs = ptr.objects.filter(pk__in=new_objs.values_list('pk', flat=True)) - self.collect(parent_objs, source=model, collect_related=False, reverse_dependency=True) - if collect_related: - parents = model._meta.parents - for related in get_candidate_relations_to_delete(model._meta): - # Preserve parent reverse relationships if keep_parents=True. - if keep_parents and related.model in parents: - continue - field = related.field - if field.remote_field.on_delete == DO_NOTHING: - continue - related_qs = self.related_objects(related, new_objs) - if self.can_fast_delete(related_qs, from_field=field): - self.fast_deletes.append(related_qs) - elif related_qs: - field.remote_field.on_delete(self, field, related_qs, self.using) - for field in model._meta.private_fields: - if hasattr(field, 'bulk_related_objects'): - # It's something like generic foreign key. - sub_objs = bulk_related_objects(field, new_objs, self.using) - self.collect(sub_objs, source=model, nullable=True) - - def delete(self): - self.sort() - - # collect pk_list before deletion (once things start to delete - # queries might not be able to retreive pk list) - del_dict = OrderedDict() - for model, instances in self.data.items(): - del_dict.setdefault(model, []) - for inst in instances: - del_dict[model] += list(inst.values_list('pk', flat=True)) - - deleted_counter = Counter() - - with transaction.atomic(using=self.using, savepoint=False): - - # update fields - for model, instances_for_fieldvalues in self.field_updates.items(): - for (field, value), instances in instances_for_fieldvalues.items(): - for inst in instances: - query = sql.UpdateQuery(model) - query.update_batch(inst.values_list('pk', flat=True), {field.name: value}, self.using) - # fast deletes - for qs in self.fast_deletes: - count = qs._raw_delete(using=self.using) - deleted_counter[qs.model._meta.label] += count - - # delete instances - for model, pk_list in del_dict.items(): - query = sql.DeleteQuery(model) - count = query.delete_batch(pk_list, self.using) - deleted_counter[model._meta.label] += count - - return sum(deleted_counter.values()), dict(deleted_counter) diff --git a/awx/main/utils/filters.py b/awx/main/utils/filters.py index 002ab957bd..f0d29c0d10 100644 --- a/awx/main/utils/filters.py +++ b/awx/main/utils/filters.py @@ -15,8 +15,8 @@ from django.apps import apps from django.db import models from django.conf import settings +from django_guid import get_guid from django_guid.log_filters import CorrelationId -from django_guid.middleware import GuidMiddleware from awx import MODE from awx.main.constants import LOGGER_BLOCKLIST @@ -188,13 +188,11 @@ class SmartFilter(object): ''' def _json_path_to_contains(self, k, v): - from awx.main.fields import JSONBField # avoid a circular import - if not k.startswith(SmartFilter.SEARCHABLE_RELATIONSHIP): v = self.strip_quotes_traditional_logic(v) return (k, v) - for match in JSONBField.get_lookups().keys(): + for match in models.JSONField.get_lookups().keys(): match = '__{}'.format(match) if k.endswith(match): if match == '__exact': @@ -368,7 +366,7 @@ class SmartFilter(object): class DefaultCorrelationId(CorrelationId): def filter(self, record): - guid = GuidMiddleware.get_guid() or '-' + guid = get_guid() or '-' if MODE == 'development': guid = guid[:8] record.guid = guid diff --git a/awx/main/utils/handlers.py b/awx/main/utils/handlers.py index ef761159ed..c6a2b3b596 100644 --- a/awx/main/utils/handlers.py +++ b/awx/main/utils/handlers.py @@ -10,6 +10,7 @@ from datetime import datetime # Django from django.conf import settings from django.utils.timezone import now +from django.utils.encoding import force_str # AWX from awx.main.exceptions import PostRunError @@ -42,7 +43,7 @@ class RSysLogHandler(logging.handlers.SysLogHandler): msg += exc.splitlines()[-1] except Exception: msg += exc - msg = '\n'.join([msg, record.msg, '']) + msg = '\n'.join([msg, force_str(record.msg), '']) # force_str used in case of translated strings sys.stderr.write(msg) def emit(self, msg): diff --git a/awx/main/utils/licensing.py b/awx/main/utils/licensing.py index eeae581655..bec953f822 100644 --- a/awx/main/utils/licensing.py +++ b/awx/main/utils/licensing.py @@ -33,7 +33,7 @@ from cryptography import x509 # Django from django.conf import settings -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ MAX_INSTANCES = 9999999 diff --git a/awx/main/utils/reload.py b/awx/main/utils/reload.py index 6651fcf44d..a7c2a1ed99 100644 --- a/awx/main/utils/reload.py +++ b/awx/main/utils/reload.py @@ -40,5 +40,5 @@ def supervisor_service_command(command, service='*', communicate=True): def stop_local_services(communicate=True): - logger.warn('Stopping services on this node in response to user action') + logger.warning('Stopping services on this node in response to user action') supervisor_service_command(command='stop', communicate=communicate) diff --git a/awx/main/validators.py b/awx/main/validators.py index 872eabafdc..751d38060b 100644 --- a/awx/main/validators.py +++ b/awx/main/validators.py @@ -6,7 +6,7 @@ import base64 import re # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.core.exceptions import ValidationError # REST framework @@ -198,7 +198,7 @@ def vars_validate_or_raise(vars_str): def validate_container_image_name(value): - """ + r""" from https://github.com/distribution/distribution/blob/af8ac809336c2316c81b08605d92d94f8670ad15/reference/reference.go#L4 Grammar diff --git a/awx/main/views.py b/awx/main/views.py index bb6c43b6bf..8ff612e8ba 100644 --- a/awx/main/views.py +++ b/awx/main/views.py @@ -7,7 +7,7 @@ import json from django.http import HttpResponse, HttpResponseRedirect from django.shortcuts import render from django.utils.html import format_html -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.views.decorators.csrf import csrf_exempt # Django REST Framework diff --git a/awx/main/wsbroadcast.py b/awx/main/wsbroadcast.py index ec2fae5e89..47006adc9d 100644 --- a/awx/main/wsbroadcast.py +++ b/awx/main/wsbroadcast.py @@ -4,6 +4,7 @@ import asyncio import aiohttp from aiohttp import client_exceptions +from asgiref.sync import sync_to_async from channels.layers import get_channel_layer @@ -30,6 +31,7 @@ def unwrap_broadcast_msg(payload: dict): return (payload['group'], payload['message']) +@sync_to_async def get_broadcast_hosts(): Instance = apps.get_model('main', 'Instance') instances = ( @@ -90,7 +92,7 @@ class WebsocketTask: if attempt > 0: await asyncio.sleep(settings.BROADCAST_WEBSOCKET_RECONNECT_RETRY_RATE_SECONDS) except asyncio.CancelledError: - logger.warn(f"Connection from {self.name} to {self.remote_host} cancelled") + logger.warning(f"Connection from {self.name} to {self.remote_host} cancelled") raise uri = f"{self.protocol}://{self.remote_host}:{self.remote_port}/websocket/{self.endpoint}/" @@ -107,18 +109,18 @@ class WebsocketTask: except asyncio.CancelledError: # TODO: Check if connected and disconnect # Possibly use run_until_complete() if disconnect is async - logger.warn(f"Connection from {self.name} to {self.remote_host} cancelled.") + logger.warning(f"Connection from {self.name} to {self.remote_host} cancelled.") self.stats.record_connection_lost() raise except client_exceptions.ClientConnectorError as e: - logger.warn(f"Connection from {self.name} to {self.remote_host} failed: '{e}'.") + logger.warning(f"Connection from {self.name} to {self.remote_host} failed: '{e}'.") except asyncio.TimeoutError: - logger.warn(f"Connection from {self.name} to {self.remote_host} timed out.") + logger.warning(f"Connection from {self.name} to {self.remote_host} timed out.") except Exception as e: # Early on, this is our canary. I'm not sure what exceptions we can really encounter. - logger.warn(f"Connection from {self.name} to {self.remote_host} failed for unknown reason: '{e}'.") + logger.warning(f"Connection from {self.name} to {self.remote_host} failed for unknown reason: '{e}'.") else: - logger.warn(f"Connection from {self.name} to {self.remote_host} list.") + logger.warning(f"Connection from {self.name} to {self.remote_host} list.") self.stats.record_connection_lost() self.start(attempt=attempt + 1) @@ -144,7 +146,7 @@ class BroadcastWebsocketTask(WebsocketTask): logmsg = "Failed to decode broadcast message" if logger.isEnabledFor(logging.DEBUG): logmsg = "{} {}".format(logmsg, payload) - logger.warn(logmsg) + logger.warning(logmsg) continue (group, message) = unwrap_broadcast_msg(payload) if group == "metrics": @@ -170,7 +172,7 @@ class BroadcastWebsocketManager(object): async def run_per_host_websocket(self): while True: - known_hosts = get_broadcast_hosts() + known_hosts = await get_broadcast_hosts() future_remote_hosts = known_hosts.keys() current_remote_hosts = self.broadcast_tasks.keys() deleted_remote_hosts = set(current_remote_hosts) - set(future_remote_hosts) @@ -183,9 +185,9 @@ class BroadcastWebsocketManager(object): new_remote_hosts.add(hostname) if deleted_remote_hosts: - logger.warn(f"Removing {deleted_remote_hosts} from websocket broadcast list") + logger.warning(f"Removing {deleted_remote_hosts} from websocket broadcast list") if new_remote_hosts: - logger.warn(f"Adding {new_remote_hosts} to websocket broadcast list") + logger.warning(f"Adding {new_remote_hosts} to websocket broadcast list") for h in deleted_remote_hosts: self.broadcast_tasks[h].cancel() diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 5ddefb66bd..b11710495e 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -7,14 +7,6 @@ import re # noqa import sys from datetime import timedelta -# global settings -from django.conf import global_settings - -# Update this module's local settings from the global settings module. -this_module = sys.modules[__name__] -for setting in dir(global_settings): - if setting == setting.upper(): - setattr(this_module, setting, getattr(global_settings, setting)) # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(__file__)) @@ -49,6 +41,11 @@ else: DEBUG = True SQL_DEBUG = DEBUG +# FIXME: it would be nice to cycle back around and allow this to be +# BigAutoField going forward, but we'd have to be explicit about our +# existing models. +DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' + DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', @@ -152,7 +149,8 @@ SITE_ID = 1 # Make this unique, and don't share it with anybody. if os.path.exists('/etc/tower/SECRET_KEY'): - SECRET_KEY = open('/etc/tower/SECRET_KEY', 'rb').read().strip() + with open('/etc/tower/SECRET_KEY', 'rb') as f: + SECRET_KEY = f.read().strip() else: SECRET_KEY = base64.encodebytes(os.urandom(32)).decode().rstrip() @@ -252,6 +250,10 @@ SESSION_COOKIE_SECURE = True # Note: This setting may be overridden by database settings. SESSION_COOKIE_AGE = 1800 +# Name of the cookie that contains the session information. +# Note: Changing this value may require changes to any clients. +SESSION_COOKIE_NAME = 'awx_sessionid' + # Maximum number of per-user valid, concurrent sessions. # -1 is unlimited # Note: This setting may be overridden by database settings. @@ -269,8 +271,8 @@ TEMPLATES = [ { 'NAME': 'default', 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'APP_DIRS': True, 'OPTIONS': { - 'debug': DEBUG, 'context_processors': [ # NOQA 'django.contrib.auth.context_processors.auth', 'django.template.context_processors.debug', @@ -285,13 +287,10 @@ TEMPLATES = [ 'social_django.context_processors.backends', 'social_django.context_processors.login_redirect', ], - 'loaders': [ - ('django.template.loaders.cached.Loader', ('django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader')) - ], 'builtins': ['awx.main.templatetags.swagger'], }, 'DIRS': [os.path.join(BASE_DIR, 'templates'), os.path.join(BASE_DIR, 'ui', 'build'), os.path.join(BASE_DIR, 'ui', 'public')], - } + }, ] ROOT_URLCONF = 'awx.urls' @@ -449,7 +448,7 @@ CACHES = {'default': {'BACKEND': 'django_redis.cache.RedisCache', 'LOCATION': 'u # Social Auth configuration. SOCIAL_AUTH_STRATEGY = 'social_django.strategy.DjangoStrategy' SOCIAL_AUTH_STORAGE = 'social_django.models.DjangoStorage' -SOCIAL_AUTH_USER_MODEL = AUTH_USER_MODEL # noqa +SOCIAL_AUTH_USER_MODEL = 'auth.User' _SOCIAL_AUTH_PIPELINE_BASE = ( 'social_core.pipeline.social_auth.social_details', @@ -941,7 +940,7 @@ AWX_CLEANUP_PATHS = True RECEPTOR_RELEASE_WORK = True MIDDLEWARE = [ - 'django_guid.middleware.GuidMiddleware', + 'django_guid.middleware.guid_middleware', 'awx.main.middleware.TimingMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'awx.main.middleware.MigrationRanCheckMiddleware', @@ -996,4 +995,7 @@ DEFAULT_CONTROL_PLANE_QUEUE_NAME = 'controlplane' # Extend container runtime attributes. # For example, to disable SELinux in containers for podman # DEFAULT_CONTAINER_RUN_OPTIONS = ['--security-opt', 'label=disable'] -DEFAULT_CONTAINER_RUN_OPTIONS = [] +DEFAULT_CONTAINER_RUN_OPTIONS = ['--network', 'slirp4netns:enable_ipv6=true'] + +# Mount exposed paths as hostPath resource in k8s/ocp +AWX_MOUNT_ISOLATED_PATHS_ON_K8S = False diff --git a/awx/settings/development.py b/awx/settings/development.py index 70b64643dd..be1c115606 100644 --- a/awx/settings/development.py +++ b/awx/settings/development.py @@ -45,10 +45,6 @@ SESSION_COOKIE_SECURE = False # Disallow sending csrf cookies over insecure connections CSRF_COOKIE_SECURE = False -# Override django.template.loaders.cached.Loader in defaults.py -template = next((tpl_backend for tpl_backend in TEMPLATES if tpl_backend['NAME'] == 'default'), None) # noqa -template['OPTIONS']['loaders'] = ('django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader') - # Disable Pendo on the UI for development/test. # Note: This setting may be overridden by database settings. PENDO_TRACKING_STATE = "off" diff --git a/awx/settings/production.py b/awx/settings/production.py index 75b70f7bfc..d4a40ef5d4 100644 --- a/awx/settings/production.py +++ b/awx/settings/production.py @@ -39,8 +39,20 @@ BASE_VENV_PATH = os.path.realpath("/var/lib/awx/venv") # Base virtualenv paths and enablement AWX_VENV_PATH = os.path.join(BASE_VENV_PATH, "awx") +# Very important that this is editable (not read_only) in the API +AWX_ISOLATION_SHOW_PATHS = [ + '/etc/pki/ca-trust:/etc/pki/ca-trust:O', + '/usr/share/pki:/usr/share/pki:O', +] + # Store a snapshot of default settings at this point before loading any # customizable config files. +# +############################################################################################### +# +# Any settings defined after this point will be marked as as a read_only database setting +# +################################################################################################ DEFAULTS_SNAPSHOT = {} this_module = sys.modules[__name__] for setting in dir(this_module): diff --git a/awx/sso/__init__.py b/awx/sso/__init__.py index bb4e958844..e484e62be1 100644 --- a/awx/sso/__init__.py +++ b/awx/sso/__init__.py @@ -1,4 +1,2 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. - -default_app_config = 'awx.sso.apps.SSOConfig' diff --git a/awx/sso/apps.py b/awx/sso/apps.py index 45c00e871b..4d09b7acf6 100644 --- a/awx/sso/apps.py +++ b/awx/sso/apps.py @@ -1,6 +1,6 @@ # Django from django.apps import AppConfig -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ class SSOConfig(AppConfig): diff --git a/awx/sso/backends.py b/awx/sso/backends.py index 727cacab20..e54a124560 100644 --- a/awx/sso/backends.py +++ b/awx/sso/backends.py @@ -13,7 +13,7 @@ from django.dispatch import receiver from django.contrib.auth.models import User from django.conf import settings as django_settings from django.core.signals import setting_changed -from django.utils.encoding import force_text +from django.utils.encoding import force_str # django-auth-ldap from django_auth_ldap.backend import LDAPSettings as BaseLDAPSettings @@ -179,7 +179,7 @@ def _get_or_set_enterprise_user(username, password, provider): created = True if created or user.is_in_enterprise_category(provider): return user - logger.warn("Enterprise user %s already defined in Tower." % username) + logger.warning("Enterprise user %s already defined in Tower." % username) class RADIUSBackend(BaseRADIUSBackend): @@ -199,8 +199,8 @@ class RADIUSBackend(BaseRADIUSBackend): if not user.has_usable_password(): return user - def get_django_user(self, username, password=None): - return _get_or_set_enterprise_user(force_text(username), force_text(password), 'radius') + def get_django_user(self, username, password=None, groups=[], is_staff=False, is_superuser=False): + return _get_or_set_enterprise_user(force_str(username), force_str(password), 'radius') class TACACSPlusBackend(object): @@ -257,7 +257,7 @@ class TowerSAMLIdentityProvider(BaseSAMLIdentityProvider): if isinstance(value, (list, tuple)): value = value[0] if conf_key in ('attr_first_name', 'attr_last_name', 'attr_username', 'attr_email') and value is None: - logger.warn( + logger.warning( "Could not map user detail '%s' from SAML attribute '%s'; " "update SOCIAL_AUTH_SAML_ENABLED_IDPS['%s']['%s'] with the correct SAML attribute.", conf_key[5:], key, @@ -370,7 +370,7 @@ def on_populate_user(sender, **kwargs): if field_len > max_len: setattr(user, field, getattr(user, field)[:max_len]) force_user_update = True - logger.warn('LDAP user {} has {} > max {} characters'.format(user.username, field, max_len)) + logger.warning('LDAP user {} has {} > max {} characters'.format(user.username, field, max_len)) # Update organization membership based on group memberships. org_map = getattr(backend.settings, 'ORGANIZATION_MAP', {}) diff --git a/awx/sso/conf.py b/awx/sso/conf.py index 2faf342934..29d7f401d3 100644 --- a/awx/sso/conf.py +++ b/awx/sso/conf.py @@ -5,7 +5,7 @@ import urllib.parse as urlparse # Django from django.conf import settings from django.urls import reverse -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework import serializers diff --git a/awx/sso/fields.py b/awx/sso/fields.py index e2d46d9362..9ad016f594 100644 --- a/awx/sso/fields.py +++ b/awx/sso/fields.py @@ -4,13 +4,14 @@ import inspect import json import re +import six + # Python LDAP import ldap import awx # Django -from django.utils import six -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django Auth LDAP import django_auth_ldap.config @@ -456,7 +457,7 @@ class LDAPGroupTypeField(fields.ChoiceField, DependsOnMixin): params = self.get_depends_on() or {} params_sanitized = dict() - cls_args = inspect.getargspec(cls.__init__).args[1:] + cls_args = inspect.getfullargspec(cls.__init__).args[1:] if cls_args: if not isinstance(params, dict): @@ -487,7 +488,7 @@ class LDAPGroupTypeParamsField(fields.DictField, DependsOnMixin): # Fail safe return {} - invalid_keys = set(value.keys()) - set(inspect.getargspec(group_type_cls.__init__).args[1:]) + invalid_keys = set(value.keys()) - set(inspect.getfullargspec(group_type_cls.__init__).args[1:]) if invalid_keys: invalid_keys = sorted(list(invalid_keys)) keys_display = json.dumps(invalid_keys).lstrip('[').rstrip(']') @@ -582,11 +583,11 @@ class SocialMapField(fields.ListField): def to_representation(self, value): if isinstance(value, (list, tuple)): return super(SocialMapField, self).to_representation(value) - elif value in fields.NullBooleanField.TRUE_VALUES: + elif value in fields.BooleanField.TRUE_VALUES: return True - elif value in fields.NullBooleanField.FALSE_VALUES: + elif value in fields.BooleanField.FALSE_VALUES: return False - elif value in fields.NullBooleanField.NULL_VALUES: + elif value in fields.BooleanField.NULL_VALUES: return None elif isinstance(value, (str, type(re.compile('')))): return self.child.to_representation(value) @@ -596,11 +597,11 @@ class SocialMapField(fields.ListField): def to_internal_value(self, data): if isinstance(data, (list, tuple)): return super(SocialMapField, self).to_internal_value(data) - elif data in fields.NullBooleanField.TRUE_VALUES: + elif data in fields.BooleanField.TRUE_VALUES: return True - elif data in fields.NullBooleanField.FALSE_VALUES: + elif data in fields.BooleanField.FALSE_VALUES: return False - elif data in fields.NullBooleanField.NULL_VALUES: + elif data in fields.BooleanField.NULL_VALUES: return None elif isinstance(data, str): return self.child.run_validation(data) diff --git a/awx/sso/models.py b/awx/sso/models.py index 95da1b82c7..28eb23857f 100644 --- a/awx/sso/models.py +++ b/awx/sso/models.py @@ -4,7 +4,7 @@ # Django from django.db import models from django.contrib.auth.models import User -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ class UserEnterpriseAuth(models.Model): diff --git a/awx/sso/pipeline.py b/awx/sso/pipeline.py index 13549861bb..85bfd499fd 100644 --- a/awx/sso/pipeline.py +++ b/awx/sso/pipeline.py @@ -11,7 +11,7 @@ from social_core.exceptions import AuthException # Django from django.core.exceptions import ObjectDoesNotExist -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.db.models import Q @@ -263,9 +263,14 @@ def _check_flag(user, flag, attributes, user_flags_settings): if user_flags_settings.get(is_value_key, None): # If so, check and see if the value of the attr matches the required value attribute_value = attributes.get(attr_setting, None) + attribute_matches = False if isinstance(attribute_value, (list, tuple)): - attribute_value = attribute_value[0] - if attribute_value == user_flags_settings.get(is_value_key): + if user_flags_settings.get(is_value_key) in attribute_value: + attribute_matches = True + elif attribute_value == user_flags_settings.get(is_value_key): + attribute_matches = True + + if attribute_matches: logger.debug("Giving %s %s from attribute %s with matching value" % (user.username, flag, attr_setting)) new_flag = True # if they don't match make sure that new_flag is false diff --git a/awx/sso/tests/functional/test_get_or_set_enterprise_user.py b/awx/sso/tests/functional/test_get_or_set_enterprise_user.py index 0d48c54d87..3f37b41df3 100644 --- a/awx/sso/tests/functional/test_get_or_set_enterprise_user.py +++ b/awx/sso/tests/functional/test_get_or_set_enterprise_user.py @@ -11,7 +11,7 @@ def test_fetch_user_if_exist(existing_tacacsplus_user): with mock.patch('awx.sso.backends.logger') as mocked_logger: new_user = _get_or_set_enterprise_user("foo", "password", "tacacs+") mocked_logger.debug.assert_not_called() - mocked_logger.warn.assert_not_called() + mocked_logger.warning.assert_not_called() assert new_user == existing_tacacsplus_user @@ -33,5 +33,5 @@ def test_created_user_has_no_usable_password(): def test_non_enterprise_user_does_not_get_pass(existing_normal_user): with mock.patch('awx.sso.backends.logger') as mocked_logger: new_user = _get_or_set_enterprise_user("alice", "password", "tacacs+") - mocked_logger.warn.assert_called_once_with(u'Enterprise user alice already defined in Tower.') + mocked_logger.warning.assert_called_once_with(u'Enterprise user alice already defined in Tower.') assert new_user is None diff --git a/awx/sso/tests/functional/test_pipeline.py b/awx/sso/tests/functional/test_pipeline.py index 6ed084a9d7..7954ac11f3 100644 --- a/awx/sso/tests/functional/test_pipeline.py +++ b/awx/sso/tests/functional/test_pipeline.py @@ -447,6 +447,16 @@ class TestSAMLUserFlags: {'is_superuser_role': 'test-role-1', 'is_superuser_attr': 'is_superuser', 'is_superuser_value': 'true'}, (True, True), ), + # In this test case we will validate that a single attribute (instead of a list) still works + ( + {'is_superuser_attr': 'name_id', 'is_superuser_value': 'test_id'}, + (True, True), + ), + # This will be a negative test for a single atrribute + ( + {'is_superuser_attr': 'name_id', 'is_superuser_value': 'junk'}, + (False, False), + ), ], ) def test__check_flag(self, user_flags_settings, expected): @@ -457,10 +467,10 @@ class TestSAMLUserFlags: attributes = { 'email': ['noone@nowhere.com'], 'last_name': ['Westcott'], - 'is_superuser': ['true'], + 'is_superuser': ['something', 'else', 'true'], 'username': ['test_id'], 'first_name': ['John'], - 'Role': ['test-role-1'], + 'Role': ['test-role-1', 'something', 'different'], 'name_id': 'test_id', } diff --git a/awx/sso/urls.py b/awx/sso/urls.py index a32b11d6d6..93da0996c9 100644 --- a/awx/sso/urls.py +++ b/awx/sso/urls.py @@ -1,14 +1,15 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path + from awx.sso.views import sso_complete, sso_error, sso_inactive, saml_metadata app_name = 'sso' urlpatterns = [ - url(r'^complete/$', sso_complete, name='sso_complete'), - url(r'^error/$', sso_error, name='sso_error'), - url(r'^inactive/$', sso_inactive, name='sso_inactive'), - url(r'^metadata/saml/$', saml_metadata, name='saml_metadata'), + re_path(r'^complete/$', sso_complete, name='sso_complete'), + re_path(r'^error/$', sso_error, name='sso_error'), + re_path(r'^inactive/$', sso_inactive, name='sso_inactive'), + re_path(r'^metadata/saml/$', saml_metadata, name='saml_metadata'), ] diff --git a/awx/sso/validators.py b/awx/sso/validators.py index 821abc3b15..478b86b36f 100644 --- a/awx/sso/validators.py +++ b/awx/sso/validators.py @@ -6,7 +6,7 @@ import ldap # Django from django.core.exceptions import ValidationError -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ __all__ = [ 'validate_ldap_dn', diff --git a/awx/sso/views.py b/awx/sso/views.py index 35f81b26a4..67921b2fa4 100644 --- a/awx/sso/views.py +++ b/awx/sso/views.py @@ -10,7 +10,7 @@ from django.urls import reverse from django.http import HttpResponse from django.views.generic import View from django.views.generic.base import RedirectView -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str from awx.api.serializers import UserSerializer from rest_framework.renderers import JSONRenderer from django.conf import settings @@ -40,12 +40,13 @@ class CompleteView(BaseRedirectView): def dispatch(self, request, *args, **kwargs): response = super(CompleteView, self).dispatch(request, *args, **kwargs) if self.request.user and self.request.user.is_authenticated: - logger.info(smart_text(u"User {} logged in".format(self.request.user.username))) + logger.info(smart_str(u"User {} logged in".format(self.request.user.username))) response.set_cookie('userLoggedIn', 'true') current_user = UserSerializer(self.request.user) - current_user = smart_text(JSONRenderer().render(current_user.data)) + current_user = smart_str(JSONRenderer().render(current_user.data)) current_user = urllib.parse.quote('%s' % current_user, '') response.set_cookie('current_user', current_user, secure=settings.SESSION_COOKIE_SECURE or None) + response.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid')) return response diff --git a/awx/ui/__init__.py b/awx/ui/__init__.py index ac6a554356..e484e62be1 100644 --- a/awx/ui/__init__.py +++ b/awx/ui/__init__.py @@ -1,4 +1,2 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. - -default_app_config = 'awx.ui.apps.UIConfig' diff --git a/awx/ui/apps.py b/awx/ui/apps.py index 40943c6f53..d567e64b80 100644 --- a/awx/ui/apps.py +++ b/awx/ui/apps.py @@ -1,6 +1,6 @@ # Django from django.apps import AppConfig -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ class UIConfig(AppConfig): diff --git a/awx/ui/conf.py b/awx/ui/conf.py index 34208f2339..9f1cef04fc 100644 --- a/awx/ui/conf.py +++ b/awx/ui/conf.py @@ -2,7 +2,7 @@ # All Rights Reserved. # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.conf import register, fields diff --git a/awx/ui/fields.py b/awx/ui/fields.py index d9b46890ff..37089c0265 100644 --- a/awx/ui/fields.py +++ b/awx/ui/fields.py @@ -7,7 +7,7 @@ import binascii import re # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.conf import fields @@ -16,7 +16,7 @@ from awx.conf import fields class PendoTrackingStateField(fields.ChoiceField): def to_internal_value(self, data): # Any false/null values get converted to 'off'. - if data in fields.NullBooleanField.FALSE_VALUES or data in fields.NullBooleanField.NULL_VALUES: + if data in fields.BooleanField.FALSE_VALUES or data in fields.BooleanField.NULL_VALUES: return 'off' return super(PendoTrackingStateField, self).to_internal_value(data) diff --git a/awx/ui/package-lock.json b/awx/ui/package-lock.json index b7796452cd..1eada88b0e 100644 --- a/awx/ui/package-lock.json +++ b/awx/ui/package-lock.json @@ -66,7 +66,7 @@ "react-scripts": "5.0.0" }, "engines": { - "node": "14.x" + "node": ">=16.14.0" } }, "node_modules/@babel/code-frame": { @@ -20507,9 +20507,9 @@ } }, "node_modules/url-parse": { - "version": "1.5.3", - "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.3.tgz", - "integrity": "sha512-IIORyIQD9rvj0A4CLWsHkBBJuNqWpFQe224b6j9t/ABmquIS0qDU2pY6kl6AuOrL5OkCXHMCFNe1jBcuAggjvQ==", + "version": "1.5.9", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.9.tgz", + "integrity": "sha512-HpOvhKBvre8wYez+QhHcYiVvVmeF6DVnuSOOPhe3cTum3BnqHhvKaZm8FU5yTiOu/Jut2ZpB2rA/SbBA1JIGlQ==", "dev": true, "dependencies": { "querystringify": "^2.1.1", @@ -37195,9 +37195,9 @@ } }, "url-parse": { - "version": "1.5.3", - "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.3.tgz", - "integrity": "sha512-IIORyIQD9rvj0A4CLWsHkBBJuNqWpFQe224b6j9t/ABmquIS0qDU2pY6kl6AuOrL5OkCXHMCFNe1jBcuAggjvQ==", + "version": "1.5.9", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.9.tgz", + "integrity": "sha512-HpOvhKBvre8wYez+QhHcYiVvVmeF6DVnuSOOPhe3cTum3BnqHhvKaZm8FU5yTiOu/Jut2ZpB2rA/SbBA1JIGlQ==", "dev": true, "requires": { "querystringify": "^2.1.1", diff --git a/awx/ui/package.json b/awx/ui/package.json index 040d6c14d1..adaacbb5e8 100644 --- a/awx/ui/package.json +++ b/awx/ui/package.json @@ -75,6 +75,8 @@ "start-instrumented": "ESLINT_NO_DEV_ERRORS=true DEBUG=instrument-cra PORT=3001 HTTPS=true DANGEROUSLY_DISABLE_HOST_CHECK=true react-scripts -r @cypress/instrument-cra start", "build": "INLINE_RUNTIME_CHUNK=false react-scripts build", "test": "TZ='UTC' react-scripts test --watchAll=false", + "test-screens": "TZ='UTC' react-scripts test screens --watchAll=false", + "test-general": "TZ='UTC' react-scripts test --testPathIgnorePatterns='/src/screens/' --watchAll=false", "test-watch": "TZ='UTC' react-scripts test", "eject": "react-scripts eject", "lint": "eslint --ext .js --ext .jsx .", diff --git a/awx/ui/public/index.html b/awx/ui/public/index.html index 301f9bf209..c3200940bf 100644 --- a/awx/ui/public/index.html +++ b/awx/ui/public/index.html @@ -24,7 +24,7 @@ <% } else { %> diff --git a/awx/ui/src/App.js b/awx/ui/src/App.js index 674dec8b07..bf1701eec9 100644 --- a/awx/ui/src/App.js +++ b/awx/ui/src/App.js @@ -27,7 +27,7 @@ import { isAuthenticated } from 'util/auth'; import { getLanguageWithoutRegionCode } from 'util/language'; import Metrics from 'screens/Metrics'; import SubscriptionEdit from 'screens/Setting/Subscription/SubscriptionEdit'; -import { RootAPI } from 'api'; +import useTitle from 'hooks/useTitle'; import { dynamicActivate, locales } from './i18nLoader'; import getRouteConfig from './routeConfig'; import { SESSION_REDIRECT_URL } from './constants'; @@ -150,16 +150,7 @@ function App() { dynamicActivate(language); }, [language]); - useEffect(() => { - async function fetchBrandName() { - const { - data: { BRAND_NAME }, - } = await RootAPI.readAssetVariables(); - - document.title = BRAND_NAME; - } - fetchBrandName(); - }, []); + useTitle(); const redirectURL = window.sessionStorage.getItem(SESSION_REDIRECT_URL); if (redirectURL) { diff --git a/awx/ui/src/App.test.js b/awx/ui/src/App.test.js index de080062fd..e1f2fb3bc3 100644 --- a/awx/ui/src/App.test.js +++ b/awx/ui/src/App.test.js @@ -7,6 +7,7 @@ import { mountWithContexts } from '../testUtils/enzymeHelpers'; import App, { ProtectedRoute } from './App'; jest.mock('./api'); +jest.mock('util/webWorker', () => jest.fn()); describe('', () => { beforeEach(() => { diff --git a/awx/ui/src/api/index.js b/awx/ui/src/api/index.js index a098f28781..5281ad861d 100644 --- a/awx/ui/src/api/index.js +++ b/awx/ui/src/api/index.js @@ -21,6 +21,7 @@ import Jobs from './models/Jobs'; import JobEvents from './models/JobEvents'; import Labels from './models/Labels'; import Me from './models/Me'; +import Mesh from './models/Mesh'; import Metrics from './models/Metrics'; import NotificationTemplates from './models/NotificationTemplates'; import Notifications from './models/Notifications'; @@ -67,6 +68,7 @@ const JobsAPI = new Jobs(); const JobEventsAPI = new JobEvents(); const LabelsAPI = new Labels(); const MeAPI = new Me(); +const MeshAPI = new Mesh(); const MetricsAPI = new Metrics(); const NotificationTemplatesAPI = new NotificationTemplates(); const NotificationsAPI = new Notifications(); @@ -114,6 +116,7 @@ export { JobEventsAPI, LabelsAPI, MeAPI, + MeshAPI, MetricsAPI, NotificationTemplatesAPI, NotificationsAPI, diff --git a/awx/ui/src/api/models/Instances.js b/awx/ui/src/api/models/Instances.js index 78ea59d1dd..07ee085c14 100644 --- a/awx/ui/src/api/models/Instances.js +++ b/awx/ui/src/api/models/Instances.js @@ -7,6 +7,7 @@ class Instances extends Base { this.readHealthCheckDetail = this.readHealthCheckDetail.bind(this); this.healthCheck = this.healthCheck.bind(this); + this.readInstanceGroup = this.readInstanceGroup.bind(this); } healthCheck(instanceId) { @@ -16,6 +17,10 @@ class Instances extends Base { readHealthCheckDetail(instanceId) { return this.http.get(`${this.baseUrl}${instanceId}/health_check/`); } + + readInstanceGroup(instanceId) { + return this.http.get(`${this.baseUrl}${instanceId}/instance_groups/`); + } } export default Instances; diff --git a/awx/ui/src/api/models/Mesh.js b/awx/ui/src/api/models/Mesh.js new file mode 100644 index 0000000000..d7ad08067c --- /dev/null +++ b/awx/ui/src/api/models/Mesh.js @@ -0,0 +1,9 @@ +import Base from '../Base'; + +class Mesh extends Base { + constructor(http) { + super(http); + this.baseUrl = '/api/v2/mesh_visualizer/'; + } +} +export default Mesh; diff --git a/awx/ui/src/components/AdHocCommands/AdHocCommands.js b/awx/ui/src/components/AdHocCommands/AdHocCommands.js index 7ddee926bf..5dd69d91a3 100644 --- a/awx/ui/src/components/AdHocCommands/AdHocCommands.js +++ b/awx/ui/src/components/AdHocCommands/AdHocCommands.js @@ -59,6 +59,7 @@ function AdHocCommands({ useEffect(() => { fetchData(); }, [fetchData]); + const { isLoading: isLaunchLoading, error: launchError, @@ -172,6 +173,8 @@ function AdHocCommands({ AdHocCommands.propTypes = { adHocItems: PropTypes.arrayOf(PropTypes.object).isRequired, hasListItems: PropTypes.bool.isRequired, + onLaunchLoading: PropTypes.func.isRequired, + moduleOptions: PropTypes.arrayOf(PropTypes.array).isRequired, }; export default AdHocCommands; diff --git a/awx/ui/src/components/AdHocCommands/AdHocCommands.test.js b/awx/ui/src/components/AdHocCommands/AdHocCommands.test.js index 25eb74ffbc..6e51fb3522 100644 --- a/awx/ui/src/components/AdHocCommands/AdHocCommands.test.js +++ b/awx/ui/src/components/AdHocCommands/AdHocCommands.test.js @@ -73,6 +73,10 @@ describe('', () => { adHocItems={adHocItems} hasListItems onLaunchLoading={() => jest.fn()} + moduleOptions={[ + ['command', 'command'], + ['shell', 'shell'], + ]} /> ); }); diff --git a/awx/ui/src/components/ContentEmpty/ContentEmpty.js b/awx/ui/src/components/ContentEmpty/ContentEmpty.js index a1d88be425..d74ee46dc6 100644 --- a/awx/ui/src/components/ContentEmpty/ContentEmpty.js +++ b/awx/ui/src/components/ContentEmpty/ContentEmpty.js @@ -1,6 +1,5 @@ import React from 'react'; import { t } from '@lingui/macro'; - import { Title, EmptyState, @@ -9,9 +8,14 @@ import { } from '@patternfly/react-core'; import { CubesIcon } from '@patternfly/react-icons'; -const ContentEmpty = ({ title = '', message = '' }) => ( - - +const ContentEmpty = ({ + title = '', + message = '', + icon = CubesIcon, + className = '', +}) => ( + + {title || t`No items found.`} diff --git a/awx/ui/src/components/DetailList/LaunchedByDetail.js b/awx/ui/src/components/DetailList/LaunchedByDetail.js index 099d2385a7..6f542231c5 100644 --- a/awx/ui/src/components/DetailList/LaunchedByDetail.js +++ b/awx/ui/src/components/DetailList/LaunchedByDetail.js @@ -1,45 +1,16 @@ import React from 'react'; import { Link } from 'react-router-dom'; import { t } from '@lingui/macro'; +import getScheduleUrl from 'util/getScheduleUrl'; import Detail from './Detail'; -function getScheduleURL(template, scheduleId, inventoryId = null) { - let scheduleUrl; - - switch (template.unified_job_type) { - case 'inventory_update': - scheduleUrl = - inventoryId && - `/inventories/inventory/${inventoryId}/sources/${template.id}/schedules/${scheduleId}/details`; - break; - case 'job': - scheduleUrl = `/templates/job_template/${template.id}/schedules/${scheduleId}/details`; - break; - case 'project_update': - scheduleUrl = `/projects/${template.id}/schedules/${scheduleId}/details`; - break; - case 'system_job': - scheduleUrl = `/management_jobs/${template.id}/schedules/${scheduleId}/details`; - break; - case 'workflow_job': - scheduleUrl = `/templates/workflow_job_template/${template.id}/schedules/${scheduleId}/details`; - break; - default: - break; - } - - return scheduleUrl; -} - -const getLaunchedByDetails = ({ summary_fields = {}, launch_type }) => { +const getLaunchedByDetails = (job) => { const { created_by: createdBy, job_template: jobTemplate, - unified_job_template: unifiedJT, workflow_job_template: workflowJT, - inventory, schedule, - } = summary_fields; + } = job.summary_fields; if (!createdBy && !schedule) { return {}; @@ -48,7 +19,7 @@ const getLaunchedByDetails = ({ summary_fields = {}, launch_type }) => { let link; let value; - switch (launch_type) { + switch (job.launch_type) { case 'webhook': value = t`Webhook`; link = @@ -58,7 +29,7 @@ const getLaunchedByDetails = ({ summary_fields = {}, launch_type }) => { break; case 'scheduled': value = schedule.name; - link = getScheduleURL(unifiedJT, schedule.id, inventory?.id); + link = getScheduleUrl(job); break; case 'manual': link = `/users/${createdBy.id}/details`; diff --git a/awx/ui/src/components/ErrorDetail/ErrorDetail.js b/awx/ui/src/components/ErrorDetail/ErrorDetail.js index 81a7176998..1ecceea468 100644 --- a/awx/ui/src/components/ErrorDetail/ErrorDetail.js +++ b/awx/ui/src/components/ErrorDetail/ErrorDetail.js @@ -1,3 +1,4 @@ +import 'styled-components/macro'; import React, { useState } from 'react'; import PropTypes from 'prop-types'; import styled from 'styled-components'; @@ -68,7 +69,11 @@ function ErrorDetail({ error }) { ); }; - const renderStack = () => {error.stack}; + const renderStack = () => ( + + {error.stack} + + ); return ( )} + {job.launch_type === 'scheduled' && + (schedule ? ( + {schedule.name} + } + /> + ) : ( + + ))} {job_template && ( ', () => { test('initially renders successfully', () => { expect(wrapper.find('JobListItem').length).toBe(1); + }); + + test('should display expected details', () => { assertDetail('Job Slice', '1/3'); + assertDetail('Schedule', 'mock schedule'); }); test('launch button shown to users with launch capabilities', () => { @@ -129,6 +142,25 @@ describe('', () => { expect(wrapper.find('Td[dataLabel="Type"]').length).toBe(1); }); + test('should not show schedule detail in expanded view', () => { + wrapper = mountWithContexts( + + + {}} + /> + +
+ ); + expect(wrapper.find('Detail[label="Schedule"] dt').length).toBe(1); + }); + test('should not display EE for canceled jobs', () => { wrapper = mountWithContexts( diff --git a/awx/ui/src/screens/Project/ProjectJobTemplatesList/ProjectJobTemplatesList.js b/awx/ui/src/components/RelatedTemplateList/RelatedTemplateList.js similarity index 66% rename from awx/ui/src/screens/Project/ProjectJobTemplatesList/ProjectJobTemplatesList.js rename to awx/ui/src/components/RelatedTemplateList/RelatedTemplateList.js index eb8d690d96..98f890ed12 100644 --- a/awx/ui/src/screens/Project/ProjectJobTemplatesList/ProjectJobTemplatesList.js +++ b/awx/ui/src/components/RelatedTemplateList/RelatedTemplateList.js @@ -1,7 +1,7 @@ import React, { useCallback, useEffect } from 'react'; -import { useLocation, useParams } from 'react-router-dom'; +import { useLocation } from 'react-router-dom'; -import { t } from '@lingui/macro'; +import { t, Plural } from '@lingui/macro'; import { Card } from '@patternfly/react-core'; import { JobTemplatesAPI } from 'api'; import AlertModal from 'components/AlertModal'; @@ -14,10 +14,14 @@ import PaginatedTable, { ToolbarDeleteButton, getSearchableKeys, } from 'components/PaginatedTable'; -import { getQSConfig, parseQueryString } from 'util/qs'; +import { getQSConfig, parseQueryString, mergeParams } from 'util/qs'; +import useWsTemplates from 'hooks/useWsTemplates'; import useSelected from 'hooks/useSelected'; +import useExpanded from 'hooks/useExpanded'; import useRequest, { useDeleteItems } from 'hooks/useRequest'; -import ProjectTemplatesListItem from './ProjectJobTemplatesListItem'; +import { TemplateListItem } from 'components/TemplateList'; +import useToast, { AlertVariant } from 'hooks/useToast'; +import { relatedResourceDeleteRequests } from 'util/getRelatedResourceDeleteDetails'; const QS_CONFIG = getQSConfig('template', { page: 1, @@ -25,13 +29,13 @@ const QS_CONFIG = getQSConfig('template', { order_by: 'name', }); -function ProjectJobTemplatesList() { - const { id: projectId } = useParams(); +function RelatedTemplateList({ searchParams }) { const location = useLocation(); + const { addToast, Toast, toastProps } = useToast(); const { result: { - jobTemplates, + results, itemCount, actions, relatedSearchableKeys, @@ -43,13 +47,12 @@ function ProjectJobTemplatesList() { } = useRequest( useCallback(async () => { const params = parseQueryString(QS_CONFIG, location.search); - params.project = projectId; const [response, actionsResponse] = await Promise.all([ - JobTemplatesAPI.read(params), + JobTemplatesAPI.read(mergeParams(params, searchParams)), JobTemplatesAPI.readOptions(), ]); return { - jobTemplates: response.data.results, + results: response.data.results, itemCount: response.data.count, actions: actionsResponse.data.actions, relatedSearchableKeys: ( @@ -57,9 +60,9 @@ function ProjectJobTemplatesList() { ).map((val) => val.slice(0, -8)), searchableKeys: getSearchableKeys(actionsResponse.data.actions?.GET), }; - }, [location, projectId]), + }, [location]), // eslint-disable-line react-hooks/exhaustive-deps { - jobTemplates: [], + results: [], itemCount: 0, actions: {}, relatedSearchableKeys: [], @@ -71,9 +74,14 @@ function ProjectJobTemplatesList() { fetchTemplates(); }, [fetchTemplates]); + const jobTemplates = useWsTemplates(results); + const { selected, isAllSelected, handleSelect, clearSelected, selectAll } = useSelected(jobTemplates); + const { expanded, isAllExpanded, handleExpand, expandAll } = + useExpanded(jobTemplates); + const { isLoading: isDeleteLoading, deleteItems: deleteTemplates, @@ -94,6 +102,18 @@ function ProjectJobTemplatesList() { } ); + const handleCopy = useCallback( + (newTemplateId) => { + addToast({ + id: newTemplateId, + title: t`Template copied successfully`, + variant: AlertVariant.success, + hasTimeout: true, + }); + }, + [addToast] + ); + const handleTemplateDelete = async () => { await deleteTemplates(); clearSelected(); @@ -106,6 +126,10 @@ function ProjectJobTemplatesList() { ); + const deleteDetailsRequests = relatedResourceDeleteRequests.template( + selected[0] + ); + return ( <> @@ -131,14 +155,32 @@ function ProjectJobTemplatesList() { name: t`Modified By (Username)`, key: 'modified_by__username__icontains', }, + { + name: t`Playbook name`, + key: 'job_template__playbook__icontains', + }, + { + name: t`Label`, + key: 'labels__name__icontains', + }, ]} toolbarSearchableKeys={searchableKeys} toolbarRelatedSearchableKeys={relatedSearchableKeys} + headerRow={ + + {t`Name`} + {t`Type`} + {t`Recent jobs`} + {t`Actions`} + + } renderToolbar={(props) => ( + } />, ]} /> )} - headerRow={ - - {t`Name`} - {t`Type`} - {t`Recent jobs`} - {t`Actions`} - - } renderRow={(template, index) => ( - handleSelect(template)} + isExpanded={expanded.some((row) => row.id === template.id)} + onExpand={() => handleExpand(template)} + onCopy={handleCopy} isSelected={selected.some((row) => row.id === template.id)} + fetchTemplates={fetchTemplates} rowIndex={index} /> )} emptyStateControls={canAddJT && addButton} /> + ', () => { + let debug; + beforeEach(() => { + JobTemplatesAPI.read.mockResolvedValue({ + data: { + count: mockTemplates.length, + results: mockTemplates, + }, + }); + + JobTemplatesAPI.readOptions.mockResolvedValue({ + data: { + actions: [], + }, + }); + debug = global.console.debug; // eslint-disable-line prefer-destructuring + global.console.debug = () => {}; + }); + + afterEach(() => { + jest.clearAllMocks(); + global.console.debug = debug; + }); + + test('Templates are retrieved from the api and the components finishes loading', async () => { + let wrapper; + await act(async () => { + wrapper = mountWithContexts( + + ); + }); + expect(JobTemplatesAPI.read).toBeCalledWith({ + credentials__id: 1, + order_by: 'name', + page: 1, + page_size: 20, + }); + await act(async () => { + await waitForElement(wrapper, 'ContentLoading', (el) => el.length === 0); + }); + expect(wrapper.find('TemplateListItem').length).toEqual( + mockTemplates.length + ); + }); + + test('handleSelect is called when a template list item is selected', async () => { + const wrapper = mountWithContexts( + + ); + await act(async () => { + await waitForElement(wrapper, 'ContentLoading', (el) => el.length === 0); + }); + const checkBox = wrapper.find('TemplateListItem').at(1).find('input'); + + checkBox.simulate('change', { + target: { + id: 2, + name: 'Job Template 2', + url: '/templates/job_template/2', + type: 'job_template', + summary_fields: { user_capabilities: { delete: true } }, + }, + }); + + expect(wrapper.find('TemplateListItem').at(1).prop('isSelected')).toBe( + true + ); + }); + + test('handleSelectAll is called when a template list item is selected', async () => { + const wrapper = mountWithContexts( + + ); + await act(async () => { + await waitForElement(wrapper, 'ContentLoading', (el) => el.length === 0); + }); + expect(wrapper.find('Checkbox#select-all').prop('isChecked')).toBe(false); + + const toolBarCheckBox = wrapper.find('Checkbox#select-all'); + act(() => { + toolBarCheckBox.prop('onChange')(true); + }); + wrapper.update(); + expect(wrapper.find('Checkbox#select-all').prop('isChecked')).toBe(true); + }); + + test('delete button is disabled if user does not have delete capabilities on a selected template', async () => { + const wrapper = mountWithContexts( + + ); + await act(async () => { + await waitForElement(wrapper, 'ContentLoading', (el) => el.length === 0); + }); + const deleteAbleItem = wrapper.find('TemplateListItem').at(0).find('input'); + const nonDeleteAbleItem = wrapper + .find('TemplateListItem') + .at(2) + .find('input'); + + deleteAbleItem.simulate('change', { + id: 1, + name: 'Job Template 1', + url: '/templates/job_template/1', + type: 'job_template', + summary_fields: { + user_capabilities: { + delete: true, + }, + }, + }); + + expect(wrapper.find('Button[aria-label="Delete"]').prop('isDisabled')).toBe( + false + ); + deleteAbleItem.simulate('change', { + id: 1, + name: 'Job Template 1', + url: '/templates/job_template/1', + type: 'job_template', + summary_fields: { + user_capabilities: { + delete: true, + }, + }, + }); + expect(wrapper.find('Button[aria-label="Delete"]').prop('isDisabled')).toBe( + true + ); + nonDeleteAbleItem.simulate('change', { + id: 5, + name: 'Workflow Job Template 2', + url: '/templates/workflow_job_template/5', + type: 'workflow_job_template', + summary_fields: { + user_capabilities: { + delete: false, + }, + }, + }); + expect(wrapper.find('Button[aria-label="Delete"]').prop('isDisabled')).toBe( + true + ); + }); + + test('api is called to delete templates for each selected template.', async () => { + const wrapper = mountWithContexts( + + ); + await act(async () => { + await waitForElement(wrapper, 'ContentLoading', (el) => el.length === 0); + }); + const jobTemplate = wrapper.find('TemplateListItem').at(1).find('input'); + + jobTemplate.simulate('change', { + target: { + id: 2, + name: 'Job Template 2', + url: '/templates/job_template/2', + type: 'job_template', + summary_fields: { user_capabilities: { delete: true } }, + }, + }); + + await act(async () => { + wrapper.find('button[aria-label="Delete"]').prop('onClick')(); + }); + wrapper.update(); + await act(async () => { + await wrapper + .find('button[aria-label="confirm delete"]') + .prop('onClick')(); + }); + expect(JobTemplatesAPI.destroy).toBeCalledWith(2); + }); + + test('error is shown when template not successfully deleted from api', async () => { + JobTemplatesAPI.destroy.mockRejectedValue( + new Error({ + response: { + config: { + method: 'delete', + url: '/api/v2/job_templates/1', + }, + data: 'An error occurred', + }, + }) + ); + let wrapper; + + await act(async () => { + wrapper = mountWithContexts( + + ); + }); + wrapper.update(); + expect(JobTemplatesAPI.read).toHaveBeenCalledTimes(1); + + await act(async () => { + wrapper.find('TemplateListItem').at(0).invoke('onSelect')(); + }); + wrapper.update(); + + await act(async () => { + wrapper.find('ToolbarDeleteButton').invoke('onDelete')(); + }); + wrapper.update(); + + const modal = wrapper.find('Modal'); + expect(modal).toHaveLength(1); + expect(modal.prop('title')).toEqual('Error!'); + }); + + test('should properly copy template', async () => { + JobTemplatesAPI.copy.mockResolvedValue({}); + const wrapper = mountWithContexts( + + ); + await act(async () => { + await waitForElement(wrapper, 'ContentLoading', (el) => el.length === 0); + }); + await act(async () => + wrapper.find('Button[aria-label="Copy"]').prop('onClick')() + ); + expect(JobTemplatesAPI.copy).toHaveBeenCalled(); + }); +}); diff --git a/awx/ui/src/components/RelatedTemplateList/index.js b/awx/ui/src/components/RelatedTemplateList/index.js new file mode 100644 index 0000000000..af65c11b6f --- /dev/null +++ b/awx/ui/src/components/RelatedTemplateList/index.js @@ -0,0 +1 @@ +export { default } from './RelatedTemplateList'; diff --git a/awx/ui/src/components/ResourceAccessList/ResourceAccessList.js b/awx/ui/src/components/ResourceAccessList/ResourceAccessList.js index e4892e0371..0817928fbd 100644 --- a/awx/ui/src/components/ResourceAccessList/ResourceAccessList.js +++ b/awx/ui/src/components/ResourceAccessList/ResourceAccessList.js @@ -56,31 +56,29 @@ function ResourceAccessList({ apiModel, resource }) { let orgRoles; if (location.pathname.includes('/organizations')) { - const { - data: { results: roles }, - } = await RolesAPI.read({ content_type__isnull: true }); - const sysAdmin = roles.filter( - (role) => role.name === 'System Administrator' - ); - const sysAud = roles.filter((role) => { - let auditor; - if (role.name === 'System Auditor') { - auditor = role.id; - } - return auditor; - }); + const [ + { + data: { results: systemAdmin }, + }, + { + data: { results: systemAuditor }, + }, + ] = await Promise.all([ + RolesAPI.read({ singleton_name: 'system_administrator' }), + RolesAPI.read({ singleton_name: 'system_auditor' }), + ]); - orgRoles = Object.values(resource.summary_fields.object_roles).map( - (opt) => { - let item; - if (opt.name === 'Admin') { - item = [`${opt.id}, ${sysAdmin[0].id}`, opt.name]; - } else if (sysAud[0].id && opt.name === 'Auditor') { - item = [`${sysAud[0].id}, ${opt.id}`, opt.name]; - } else { - item = [`${opt.id}`, opt.name]; + orgRoles = Object.entries(resource.summary_fields.object_roles).map( + ([key, value]) => { + if (key === 'admin_role') { + return [`${value.id}, ${systemAdmin[0].id}`, value.name]; } - return item; + + if (key === 'auditor_role') { + return [`${value.id}, ${systemAuditor[0].id}`, value.name]; + } + + return [`${value.id}`, value.name]; } ); } diff --git a/awx/ui/src/components/ScreenHeader/ScreenHeader.js b/awx/ui/src/components/ScreenHeader/ScreenHeader.js index 670f8a4f51..eeb2b5f990 100644 --- a/awx/ui/src/components/ScreenHeader/ScreenHeader.js +++ b/awx/ui/src/components/ScreenHeader/ScreenHeader.js @@ -1,5 +1,6 @@ import React from 'react'; import PropTypes from 'prop-types'; +import useTitle from 'hooks/useTitle'; import { t } from '@lingui/macro'; import { @@ -12,7 +13,7 @@ import { Tooltip, } from '@patternfly/react-core'; import { HistoryIcon } from '@patternfly/react-icons'; -import { Link, Route, useRouteMatch } from 'react-router-dom'; +import { Link, Route, useRouteMatch, useLocation } from 'react-router-dom'; const ScreenHeader = ({ breadcrumbConfig, streamType }) => { const { light } = PageSectionVariants; @@ -20,6 +21,16 @@ const ScreenHeader = ({ breadcrumbConfig, streamType }) => { path: Object.keys(breadcrumbConfig)[0], strict: true, }); + + const location = useLocation(); + const parts = location.pathname.split('/'); + if (parts.length > 2) { + parts.pop(); + } + + const pathTitle = breadcrumbConfig[parts.join('/')]; + useTitle(pathTitle); + const isOnlyOneCrumb = oneCrumbMatch && oneCrumbMatch.isExact; return ( diff --git a/awx/ui/src/components/TemplateList/TemplateList.js b/awx/ui/src/components/TemplateList/TemplateList.js index 01fef24065..9e9997dd63 100644 --- a/awx/ui/src/components/TemplateList/TemplateList.js +++ b/awx/ui/src/components/TemplateList/TemplateList.js @@ -12,6 +12,7 @@ import useSelected from 'hooks/useSelected'; import useExpanded from 'hooks/useExpanded'; import { getQSConfig, parseQueryString } from 'util/qs'; import useWsTemplates from 'hooks/useWsTemplates'; +import useToast, { AlertVariant } from 'hooks/useToast'; import { relatedResourceDeleteRequests } from 'util/getRelatedResourceDeleteDetails'; import AlertModal from '../AlertModal'; import DatalistToolbar from '../DataListToolbar'; @@ -41,6 +42,8 @@ function TemplateList({ defaultParams }) { ); const location = useLocation(); + const { addToast, Toast, toastProps } = useToast(); + const { result: { results, @@ -123,6 +126,18 @@ function TemplateList({ defaultParams }) { } ); + const handleCopy = useCallback( + (newTemplateId) => { + addToast({ + id: newTemplateId, + title: t`Template copied successfully`, + variant: AlertVariant.success, + hasTimeout: true, + }); + }, + [addToast] + ); + const handleTemplateDelete = async () => { await deleteTemplates(); clearSelected(); @@ -266,6 +281,7 @@ function TemplateList({ defaultParams }) { onSelect={() => handleSelect(template)} isExpanded={expanded.some((row) => row.id === template.id)} onExpand={() => handleExpand(template)} + onCopy={handleCopy} isSelected={selected.some((row) => row.id === template.id)} fetchTemplates={fetchTemplates} rowIndex={index} @@ -274,6 +290,7 @@ function TemplateList({ defaultParams }) { emptyStateControls={(canAddJT || canAddWFJT) && addButton} /> + { + let response; if (template.type === 'job_template') { - await JobTemplatesAPI.copy(template.id, { + response = await JobTemplatesAPI.copy(template.id, { name: `${template.name} @ ${timeOfDay()}`, }); } else { - await WorkflowJobTemplatesAPI.copy(template.id, { + response = await WorkflowJobTemplatesAPI.copy(template.id, { name: `${template.name} @ ${timeOfDay()}`, }); } + if (response.status === 201) { + onCopy(response.data.id); + } await fetchTemplates(); - }, [fetchTemplates, template.id, template.name, template.type]); + }, [fetchTemplates, template.id, template.name, template.type, onCopy]); const handleCopyStart = useCallback(() => { setIsDisabled(true); diff --git a/awx/ui/src/components/UserAndTeamAccessAdd/UserAndTeamAccessAdd.js b/awx/ui/src/components/UserAndTeamAccessAdd/UserAndTeamAccessAdd.js index 7396daeff4..28d71d7424 100644 --- a/awx/ui/src/components/UserAndTeamAccessAdd/UserAndTeamAccessAdd.js +++ b/awx/ui/src/components/UserAndTeamAccessAdd/UserAndTeamAccessAdd.js @@ -1,6 +1,6 @@ import React, { useState, useCallback } from 'react'; import { t } from '@lingui/macro'; -import { useParams } from 'react-router-dom'; +import { useParams, useRouteMatch } from 'react-router-dom'; import styled from 'styled-components'; import useRequest from 'hooks/useRequest'; import useSelected from 'hooks/useSelected'; @@ -27,6 +27,11 @@ function UserAndTeamAccessAdd({ const [selectedResourceType, setSelectedResourceType] = useState(null); const [stepIdReached, setStepIdReached] = useState(1); const { id: userId } = useParams(); + const teamsRouteMatch = useRouteMatch({ + path: '/teams/:id/roles', + exact: true, + }); + const { selected: resourcesSelected, handleSelect: handleResourceSelect } = useSelected([]); @@ -54,6 +59,19 @@ function UserAndTeamAccessAdd({ {} ); + // Object roles can be user only, so we remove them when + // showing role choices for team access + const selectableRoles = { + ...resourcesSelected[0]?.summary_fields?.object_roles, + }; + if (teamsRouteMatch && resourcesSelected[0]?.type === 'organization') { + Object.keys(selectableRoles).forEach((key) => { + if (selectableRoles[key].user_only) { + delete selectableRoles[key]; + } + }); + } + const steps = [ { id: 1, @@ -101,7 +119,7 @@ function UserAndTeamAccessAdd({ component: resourcesSelected?.length > 0 && ( { + const prevTitle = document.title; + if (title) { + document.title = `${brandName} | ${title}`; + } else { + document.title = brandName; + } + + return () => { + document.title = prevTitle; + }; + }, [title, brandName]); +} diff --git a/awx/ui/src/hooks/useToast.js b/awx/ui/src/hooks/useToast.js new file mode 100644 index 0000000000..0f5ec1da61 --- /dev/null +++ b/awx/ui/src/hooks/useToast.js @@ -0,0 +1,64 @@ +import React, { useState, useCallback } from 'react'; +import { + AlertGroup, + Alert, + AlertActionCloseButton, + AlertVariant, +} from '@patternfly/react-core'; +import { arrayOf, func } from 'prop-types'; +import { Toast as ToastType } from 'types'; + +export default function useToast() { + const [toasts, setToasts] = useState([]); + + const addToast = useCallback((newToast) => { + setToasts((oldToasts) => [...oldToasts, newToast]); + }, []); + + const removeToast = useCallback((toastId) => { + setToasts((oldToasts) => oldToasts.filter((t) => t.id !== toastId)); + }, []); + + return { + addToast, + removeToast, + Toast, + toastProps: { + toasts, + removeToast, + }, + }; +} + +export function Toast({ toasts, removeToast }) { + if (!toasts.length) { + return null; + } + + return ( + + {toasts.map((toast) => ( + removeToast(toast.id)} /> + } + onTimeout={() => removeToast(toast.id)} + timeout={toast.hasTimeout} + title={toast.title} + variant={toast.variant} + key={`toast-message-${toast.id}`} + ouiaId={`toast-message-${toast.id}`} + > + {toast.message} + + ))} + + ); +} + +Toast.propTypes = { + toasts: arrayOf(ToastType).isRequired, + removeToast: func.isRequired, +}; + +export { AlertVariant }; diff --git a/awx/ui/src/hooks/useToast.test.js b/awx/ui/src/hooks/useToast.test.js new file mode 100644 index 0000000000..23b6ca845f --- /dev/null +++ b/awx/ui/src/hooks/useToast.test.js @@ -0,0 +1,124 @@ +import React from 'react'; +import { act } from 'react-dom/test-utils'; +import { shallow, mount } from 'enzyme'; +import useToast, { Toast, AlertVariant } from './useToast'; + +describe('useToast', () => { + const Child = () =>
; + const Test = () => { + const toastVals = useToast(); + return ; + }; + + test('should provide Toast component', () => { + const wrapper = mount(); + + expect(wrapper.find('Child').prop('Toast')).toEqual(Toast); + }); + + test('should add toast', () => { + const wrapper = mount(); + + expect(wrapper.find('Child').prop('toastProps').toasts).toEqual([]); + act(() => { + wrapper.find('Child').prop('addToast')({ + message: 'one', + id: 1, + variant: 'success', + }); + }); + wrapper.update(); + + expect(wrapper.find('Child').prop('toastProps').toasts).toEqual([ + { + message: 'one', + id: 1, + variant: 'success', + }, + ]); + }); + + test('should remove toast', () => { + const wrapper = mount(); + + act(() => { + wrapper.find('Child').prop('addToast')({ + message: 'one', + id: 1, + variant: 'success', + }); + }); + wrapper.update(); + expect(wrapper.find('Child').prop('toastProps').toasts).toHaveLength(1); + act(() => { + wrapper.find('Child').prop('removeToast')(1); + }); + wrapper.update(); + + expect(wrapper.find('Child').prop('toastProps').toasts).toHaveLength(0); + }); +}); + +describe('Toast', () => { + test('should render nothing with no toasts', () => { + const wrapper = shallow( {}} />); + expect(wrapper).toEqual({}); + }); + + test('should render toast alert', () => { + const toast = { + title: 'Inventory saved', + variant: AlertVariant.success, + id: 1, + message: 'the message', + }; + const wrapper = shallow( {}} />); + + const alert = wrapper.find('Alert'); + expect(alert.prop('title')).toEqual('Inventory saved'); + expect(alert.prop('variant')).toEqual('success'); + expect(alert.prop('ouiaId')).toEqual('toast-message-1'); + expect(alert.prop('children')).toEqual('the message'); + }); + + test('should call removeToast', () => { + const removeToast = jest.fn(); + const toast = { + title: 'Inventory saved', + variant: AlertVariant.success, + id: 1, + }; + const wrapper = shallow( + + ); + + const alert = wrapper.find('Alert'); + alert.prop('actionClose').props.onClose(1); + expect(removeToast).toHaveBeenCalledTimes(1); + }); + + test('should render multiple alerts', () => { + const toasts = [ + { + title: 'Inventory saved', + variant: AlertVariant.success, + id: 1, + message: 'the message', + }, + { + title: 'error saving', + variant: AlertVariant.danger, + id: 2, + }, + ]; + const wrapper = shallow( {}} />); + + const alert = wrapper.find('Alert'); + expect(alert).toHaveLength(2); + + expect(alert.at(0).prop('title')).toEqual('Inventory saved'); + expect(alert.at(0).prop('variant')).toEqual('success'); + expect(alert.at(1).prop('title')).toEqual('error saving'); + expect(alert.at(1).prop('variant')).toEqual('danger'); + }); +}); diff --git a/awx/ui/src/index.test.js b/awx/ui/src/index.test.js index 49ae9e2317..a0419c9933 100644 --- a/awx/ui/src/index.test.js +++ b/awx/ui/src/index.test.js @@ -3,6 +3,7 @@ import ReactDOM from 'react-dom'; import App from './App'; jest.mock('react-dom', () => ({ render: jest.fn() })); +jest.mock('util/webWorker', () => jest.fn()); describe('index.jsx', () => { it('renders ok', () => { diff --git a/awx/ui/src/routeConfig.js b/awx/ui/src/routeConfig.js index 339e52a228..76bb2e39a5 100644 --- a/awx/ui/src/routeConfig.js +++ b/awx/ui/src/routeConfig.js @@ -19,6 +19,7 @@ import Schedules from 'screens/Schedule'; import Settings from 'screens/Setting'; import Teams from 'screens/Team'; import Templates from 'screens/Template'; +import TopologyView from 'screens/TopologyView'; import Users from 'screens/User'; import WorkflowApprovals from 'screens/WorkflowApproval'; import { Jobs } from 'screens/Job'; @@ -147,6 +148,11 @@ function getRouteConfig(userProfile = {}) { path: '/execution_environments', screen: ExecutionEnvironments, }, + { + title: Topology View, + path: '/topology_view', + screen: TopologyView, + }, ], }, { @@ -179,6 +185,7 @@ function getRouteConfig(userProfile = {}) { deleteRoute('management_jobs'); if (userProfile?.isOrgAdmin) return routeConfig; deleteRoute('instance_groups'); + deleteRoute('topology_view'); if (!userProfile?.isNotificationAdmin) deleteRoute('notification_templates'); return routeConfig; diff --git a/awx/ui/src/routeConfig.test.js b/awx/ui/src/routeConfig.test.js index da0dc7e536..5a7def4348 100644 --- a/awx/ui/src/routeConfig.test.js +++ b/awx/ui/src/routeConfig.test.js @@ -1,4 +1,5 @@ import getRouteConfig from './routeConfig'; +jest.mock('util/webWorker', () => jest.fn()); const userProfile = { isSuperUser: false, @@ -43,6 +44,7 @@ describe('getRouteConfig', () => { '/instances', '/applications', '/execution_environments', + '/topology_view', '/settings', ]); }); @@ -71,6 +73,7 @@ describe('getRouteConfig', () => { '/instances', '/applications', '/execution_environments', + '/topology_view', '/settings', ]); }); @@ -98,6 +101,7 @@ describe('getRouteConfig', () => { '/instances', '/applications', '/execution_environments', + '/topology_view', ]); }); @@ -233,6 +237,7 @@ describe('getRouteConfig', () => { '/instances', '/applications', '/execution_environments', + '/topology_view', ]); }); @@ -263,6 +268,7 @@ describe('getRouteConfig', () => { '/instances', '/applications', '/execution_environments', + '/topology_view', ]); }); }); diff --git a/awx/ui/src/screens/ActivityStream/ActivityStream.js b/awx/ui/src/screens/ActivityStream/ActivityStream.js index c4e196ae42..505608120e 100644 --- a/awx/ui/src/screens/ActivityStream/ActivityStream.js +++ b/awx/ui/src/screens/ActivityStream/ActivityStream.js @@ -20,6 +20,7 @@ import PaginatedTable, { getSearchableKeys, } from 'components/PaginatedTable'; import useRequest from 'hooks/useRequest'; +import useTitle from 'hooks/useTitle'; import { getQSConfig, parseQueryString, updateQueryString } from 'util/qs'; import { ActivityStreamAPI } from 'api'; @@ -31,6 +32,7 @@ function ActivityStream() { const [isTypeDropdownOpen, setIsTypeDropdownOpen] = useState(false); const location = useLocation(); const history = useHistory(); + useTitle(t`Activity Stream`); const urlParams = new URLSearchParams(location.search); const activityStreamType = urlParams.get('type') || 'all'; diff --git a/awx/ui/src/screens/Credential/Credential.js b/awx/ui/src/screens/Credential/Credential.js index 2304737dca..ba580fd76e 100644 --- a/awx/ui/src/screens/Credential/Credential.js +++ b/awx/ui/src/screens/Credential/Credential.js @@ -17,6 +17,7 @@ import { ResourceAccessList } from 'components/ResourceAccessList'; import ContentError from 'components/ContentError'; import ContentLoading from 'components/ContentLoading'; import RoutedTabs from 'components/RoutedTabs'; +import RelatedTemplateList from 'components/RelatedTemplateList'; import { CredentialsAPI } from 'api'; import CredentialDetail from './CredentialDetail'; import CredentialEdit from './CredentialEdit'; @@ -73,6 +74,11 @@ function Credential({ setBreadcrumb }) { link: `/credentials/${id}/access`, id: 1, }, + { + name: t`Job Templates`, + link: `/credentials/${id}/job_templates`, + id: 2, + }, ]; let showCardHeader = true; @@ -123,6 +129,11 @@ function Credential({ setBreadcrumb }) { apiModel={CredentialsAPI} /> , + + + , {!hasContentLoading && ( diff --git a/awx/ui/src/screens/Credential/Credential.test.js b/awx/ui/src/screens/Credential/Credential.test.js index a4e9cf6b68..b66619c877 100644 --- a/awx/ui/src/screens/Credential/Credential.test.js +++ b/awx/ui/src/screens/Credential/Credential.test.js @@ -7,7 +7,6 @@ import { waitForElement, } from '../../../testUtils/enzymeHelpers'; import mockCredential from './shared/data.scmCredential.json'; -import mockOrgCredential from './shared/data.orgCredential.json'; import Credential from './Credential'; jest.mock('../../api'); @@ -32,21 +31,24 @@ describe('', () => { await act(async () => { wrapper = mountWithContexts( {}} />); }); - await waitForElement(wrapper, 'ContentLoading', (el) => el.length === 0); - await waitForElement(wrapper, '.pf-c-tabs__item', (el) => el.length === 3); + wrapper.update(); + expect(wrapper.find('Credential').length).toBe(1); + expect(wrapper.find('RoutedTabs li').length).toBe(4); }); - test('initially renders org-based credential successfully', async () => { - CredentialsAPI.readDetail.mockResolvedValueOnce({ - data: mockOrgCredential, - }); - + test('should render expected tabs', async () => { + const expectedTabs = [ + 'Back to Credentials', + 'Details', + 'Access', + 'Job Templates', + ]; await act(async () => { wrapper = mountWithContexts( {}} />); }); - await waitForElement(wrapper, 'ContentLoading', (el) => el.length === 0); - // org-based credential detail needs access tab - await waitForElement(wrapper, '.pf-c-tabs__item', (el) => el.length === 3); + wrapper.find('RoutedTabs li').forEach((tab, index) => { + expect(tab.text()).toEqual(expectedTabs[index]); + }); }); test('should show content error when user attempts to navigate to erroneous route', async () => { diff --git a/awx/ui/src/screens/Credential/CredentialAdd/CredentialAdd.js b/awx/ui/src/screens/Credential/CredentialAdd/CredentialAdd.js index f4940f7d9b..2525e74826 100644 --- a/awx/ui/src/screens/Credential/CredentialAdd/CredentialAdd.js +++ b/awx/ui/src/screens/Credential/CredentialAdd/CredentialAdd.js @@ -12,6 +12,20 @@ import { import useRequest from 'hooks/useRequest'; import CredentialForm from '../shared/CredentialForm'; +const fetchCredentialTypes = async (pageNo = 1, credentialTypes = []) => { + const { data } = await CredentialTypesAPI.read({ + page_size: 200, + page: pageNo, + }); + if (data.next) { + return fetchCredentialTypes( + pageNo + 1, + credentialTypes.concat(data.results) + ); + } + return credentialTypes.concat(data.results); +}; + function CredentialAdd({ me }) { const history = useHistory(); @@ -76,6 +90,7 @@ function CredentialAdd({ me }) { history.push(`/credentials/${credentialId}/details`); } }, [credentialId, history]); + const { isLoading, error, @@ -83,18 +98,7 @@ function CredentialAdd({ me }) { result, } = useRequest( useCallback(async () => { - const { data } = await CredentialTypesAPI.read({ page_size: 200 }); - const credTypes = data.results; - if (data.next && data.next.includes('page=2')) { - const { - data: { results }, - } = await CredentialTypesAPI.read({ - page_size: 200, - page: 2, - }); - credTypes.concat(results); - } - + const credTypes = await fetchCredentialTypes(); const creds = credTypes.reduce((credentialTypesMap, credentialType) => { credentialTypesMap[credentialType.id] = credentialType; return credentialTypesMap; diff --git a/awx/ui/src/screens/Credential/CredentialList/CredentialList.js b/awx/ui/src/screens/Credential/CredentialList/CredentialList.js index ab8fa76004..c02b8d7047 100644 --- a/awx/ui/src/screens/Credential/CredentialList/CredentialList.js +++ b/awx/ui/src/screens/Credential/CredentialList/CredentialList.js @@ -4,6 +4,7 @@ import { t, Plural } from '@lingui/macro'; import { Card, PageSection } from '@patternfly/react-core'; import { CredentialsAPI } from 'api'; import useSelected from 'hooks/useSelected'; +import useToast, { AlertVariant } from 'hooks/useToast'; import AlertModal from 'components/AlertModal'; import ErrorDetail from 'components/ErrorDetail'; import DataListToolbar from 'components/DataListToolbar'; @@ -27,6 +28,8 @@ const QS_CONFIG = getQSConfig('credential', { function CredentialList() { const location = useLocation(); + const { addToast, Toast, toastProps } = useToast(); + const { result: { credentials, @@ -104,100 +107,116 @@ function CredentialList() { setSelected([]); }; + const handleCopy = useCallback( + (newCredentialId) => { + addToast({ + id: newCredentialId, + title: t`Credential copied successfully`, + variant: AlertVariant.success, + hasTimeout: true, + }); + }, + [addToast] + ); + const canAdd = actions && Object.prototype.hasOwnProperty.call(actions, 'POST'); const deleteDetailsRequests = relatedResourceDeleteRequests.credential( selected[0] ); return ( - - - - {t`Name`} - {t`Type`} - {t`Actions`} - - } - renderRow={(item, index) => ( - row.id === item.id)} - onSelect={() => handleSelect(item)} - rowIndex={index} - /> - )} - renderToolbar={(props) => ( - ] - : []), - - } - />, - ]} - /> - )} - /> - - - {t`Failed to delete one or more credentials.`} - - - + <> + + + + {t`Name`} + {t`Type`} + {t`Actions`} + + } + renderRow={(item, index) => ( + row.id === item.id)} + onSelect={() => handleSelect(item)} + onCopy={handleCopy} + rowIndex={index} + /> + )} + renderToolbar={(props) => ( + ] + : []), + + } + />, + ]} + /> + )} + /> + + + {t`Failed to delete one or more credentials.`} + + + + + ); } diff --git a/awx/ui/src/screens/Credential/CredentialList/CredentialListItem.js b/awx/ui/src/screens/Credential/CredentialList/CredentialListItem.js index ad54179832..83470149ae 100644 --- a/awx/ui/src/screens/Credential/CredentialList/CredentialListItem.js +++ b/awx/ui/src/screens/Credential/CredentialList/CredentialListItem.js @@ -18,7 +18,7 @@ function CredentialListItem({ detailUrl, isSelected, onSelect, - + onCopy, fetchCredentials, rowIndex, }) { @@ -28,11 +28,14 @@ function CredentialListItem({ const canEdit = credential.summary_fields.user_capabilities.edit; const copyCredential = useCallback(async () => { - await CredentialsAPI.copy(credential.id, { + const response = await CredentialsAPI.copy(credential.id, { name: `${credential.name} @ ${timeOfDay()}`, }); + if (response.status === 201) { + onCopy(response.data.id); + } await fetchCredentials(); - }, [credential.id, credential.name, fetchCredentials]); + }, [credential.id, credential.name, fetchCredentials, onCopy]); const handleCopyStart = useCallback(() => { setIsDisabled(true); diff --git a/awx/ui/src/screens/Credential/Credentials.js b/awx/ui/src/screens/Credential/Credentials.js index e95ce0c853..cb213b5534 100644 --- a/awx/ui/src/screens/Credential/Credentials.js +++ b/awx/ui/src/screens/Credential/Credentials.js @@ -26,6 +26,7 @@ function Credentials() { [`/credentials/${credential.id}/edit`]: t`Edit Details`, [`/credentials/${credential.id}/details`]: t`Details`, [`/credentials/${credential.id}/access`]: t`Access`, + [`/credentials/${credential.id}/job_templates`]: t`Job Templates`, }); }, []); diff --git a/awx/ui/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentList.js b/awx/ui/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentList.js index 547dd28507..6153f3217c 100644 --- a/awx/ui/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentList.js +++ b/awx/ui/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentList.js @@ -7,6 +7,7 @@ import { ExecutionEnvironmentsAPI } from 'api'; import { getQSConfig, parseQueryString } from 'util/qs'; import useRequest, { useDeleteItems } from 'hooks/useRequest'; import useSelected from 'hooks/useSelected'; +import useToast, { AlertVariant } from 'hooks/useToast'; import PaginatedTable, { HeaderRow, HeaderCell, @@ -29,6 +30,7 @@ const QS_CONFIG = getQSConfig('execution_environments', { function ExecutionEnvironmentList() { const location = useLocation(); const match = useRouteMatch(); + const { addToast, Toast, toastProps } = useToast(); const { error: contentError, @@ -94,6 +96,18 @@ function ExecutionEnvironmentList() { } ); + const handleCopy = useCallback( + (newId) => { + addToast({ + id: newId, + title: t`Execution environment copied successfully`, + variant: AlertVariant.success, + hasTimeout: true, + }); + }, + [addToast] + ); + const handleDelete = async () => { await deleteExecutionEnvironments(); clearSelected(); @@ -194,6 +208,7 @@ function ExecutionEnvironmentList() { executionEnvironment={executionEnvironment} detailUrl={`${match.url}/${executionEnvironment.id}/details`} onSelect={() => handleSelect(executionEnvironment)} + onCopy={handleCopy} isSelected={selected.some( (row) => row.id === executionEnvironment.id )} @@ -218,6 +233,7 @@ function ExecutionEnvironmentList() { {t`Failed to delete one or more execution environments`} + ); } diff --git a/awx/ui/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentListItem.js b/awx/ui/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentListItem.js index 35280d0204..8281c55a68 100644 --- a/awx/ui/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentListItem.js +++ b/awx/ui/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentListItem.js @@ -18,20 +18,28 @@ function ExecutionEnvironmentListItem({ detailUrl, isSelected, onSelect, + onCopy, rowIndex, fetchExecutionEnvironments, }) { const [isDisabled, setIsDisabled] = useState(false); const copyExecutionEnvironment = useCallback(async () => { - await ExecutionEnvironmentsAPI.copy(executionEnvironment.id, { - name: `${executionEnvironment.name} @ ${timeOfDay()}`, - }); + const response = await ExecutionEnvironmentsAPI.copy( + executionEnvironment.id, + { + name: `${executionEnvironment.name} @ ${timeOfDay()}`, + } + ); + if (response.status === 201) { + onCopy(response.data.id); + } await fetchExecutionEnvironments(); }, [ executionEnvironment.id, executionEnvironment.name, fetchExecutionEnvironments, + onCopy, ]); const handleCopyStart = useCallback(() => { @@ -114,6 +122,7 @@ ExecutionEnvironmentListItem.prototype = { detailUrl: string.isRequired, isSelected: bool.isRequired, onSelect: func.isRequired, + onCopy: func.isRequired, }; export default ExecutionEnvironmentListItem; diff --git a/awx/ui/src/screens/InstanceGroup/ContainerGroup.js b/awx/ui/src/screens/InstanceGroup/ContainerGroup.js index 1e1be0b88d..3533b1c97a 100644 --- a/awx/ui/src/screens/InstanceGroup/ContainerGroup.js +++ b/awx/ui/src/screens/InstanceGroup/ContainerGroup.js @@ -13,7 +13,7 @@ import { CaretLeftIcon } from '@patternfly/react-icons'; import { Card, PageSection } from '@patternfly/react-core'; import useRequest from 'hooks/useRequest'; -import { InstanceGroupsAPI, SettingsAPI } from 'api'; +import { InstanceGroupsAPI } from 'api'; import RoutedTabs from 'components/RoutedTabs'; import ContentError from 'components/ContentError'; import ContentLoading from 'components/ContentLoading'; @@ -30,28 +30,15 @@ function ContainerGroup({ setBreadcrumb }) { isLoading, error: contentError, request: fetchInstanceGroups, - result: { instanceGroup, defaultControlPlane, defaultExecution }, + result: { instanceGroup }, } = useRequest( useCallback(async () => { - const [ - { data }, - { - data: { - DEFAULT_EXECUTION_QUEUE_NAME, - DEFAULT_CONTROL_PLANE_QUEUE_NAME, - }, - }, - ] = await Promise.all([ - InstanceGroupsAPI.readDetail(id), - SettingsAPI.readAll(), - ]); + const { data } = await InstanceGroupsAPI.readDetail(id); return { instanceGroup: data, - defaultControlPlane: DEFAULT_CONTROL_PLANE_QUEUE_NAME, - defaultExecution: DEFAULT_EXECUTION_QUEUE_NAME, }; }, [id]), - { instanceGroup: null, defaultExecution: '' } + { instanceGroup: null } ); useEffect(() => { @@ -125,17 +112,10 @@ function ContainerGroup({ setBreadcrumb }) { {instanceGroup && ( <> - + - + )} - {name !== defaultExecution && - instanceGroup.summary_fields.user_capabilities && + {instanceGroup.summary_fields.user_capabilities && instanceGroup.summary_fields.user_capabilities.delete && ( { - const [ - { data }, - { - data: { - DEFAULT_CONTROL_PLANE_QUEUE_NAME, - DEFAULT_EXECUTION_QUEUE_NAME, - }, - }, - ] = await Promise.all([ - InstanceGroupsAPI.readDetail(id), - SettingsAPI.readAll(), - ]); + const { data } = await InstanceGroupsAPI.readDetail(id); return { instanceGroup: data, - defaultControlPlane: DEFAULT_CONTROL_PLANE_QUEUE_NAME, - defaultExecution: DEFAULT_EXECUTION_QUEUE_NAME, }; }, [id]), - { instanceGroup: null, defaultControlPlane: '', defaultExecution: '' } + { instanceGroup: null } ); useEffect(() => { @@ -133,18 +120,10 @@ function InstanceGroup({ setBreadcrumb }) { {instanceGroup && ( <> - + - + @@ -115,8 +109,7 @@ function InstanceGroupDetails({ {t`Edit`} )} - {!isDefaultInstanceGroup && - instanceGroup.summary_fields.user_capabilities && + {instanceGroup.summary_fields.user_capabilities && instanceGroup.summary_fields.user_capabilities.delete && ( ', () => { history = createMemoryHistory(); await act(async () => { wrapper = mountWithContexts( - , + , { context: { router: { history } }, } @@ -70,27 +67,6 @@ describe('', () => { jest.clearAllMocks(); }); - test('controlplane instance group name can not be updated', async () => { - let towerWrapper; - await act(async () => { - towerWrapper = mountWithContexts( - , - { - context: { router: { history } }, - } - ); - }); - expect( - towerWrapper.find('input#instance-group-name').prop('disabled') - ).toBeTruthy(); - expect( - towerWrapper.find('input#instance-group-name').prop('value') - ).toEqual('controlplane'); - }); - test('handleSubmit should call the api and redirect to details page', async () => { await act(async () => { wrapper.find('InstanceGroupForm').invoke('onSubmit')( diff --git a/awx/ui/src/screens/InstanceGroup/InstanceGroupList/InstanceGroupList.js b/awx/ui/src/screens/InstanceGroup/InstanceGroupList/InstanceGroupList.js index 8f6e39edb6..dfce293dbd 100644 --- a/awx/ui/src/screens/InstanceGroup/InstanceGroupList/InstanceGroupList.js +++ b/awx/ui/src/screens/InstanceGroup/InstanceGroupList/InstanceGroupList.js @@ -4,7 +4,7 @@ import { useLocation, useRouteMatch, Link } from 'react-router-dom'; import { t, Plural } from '@lingui/macro'; import { Card, PageSection, DropdownItem } from '@patternfly/react-core'; -import { InstanceGroupsAPI, SettingsAPI } from 'api'; +import { InstanceGroupsAPI } from 'api'; import { getQSConfig, parseQueryString } from 'util/qs'; import useRequest, { useDeleteItems } from 'hooks/useRequest'; import useSelected from 'hooks/useSelected'; @@ -27,28 +27,6 @@ const QS_CONFIG = getQSConfig('instance-group', { page_size: 20, }); -function modifyInstanceGroups( - defaultControlPlane, - defaultExecution, - items = [] -) { - return items.map((item) => { - const clonedItem = { - ...item, - summary_fields: { - ...item.summary_fields, - user_capabilities: { - ...item.summary_fields.user_capabilities, - }, - }, - }; - if (clonedItem.name === (defaultControlPlane || defaultExecution)) { - clonedItem.summary_fields.user_capabilities.delete = false; - } - return clonedItem; - }); -} - function InstanceGroupList({ isKubernetes, isSettingsRequestLoading, @@ -56,30 +34,6 @@ function InstanceGroupList({ }) { const location = useLocation(); const match = useRouteMatch(); - const { - error: protectedItemsError, - isLoading: isLoadingProtectedItems, - request: fetchProtectedItems, - result: { defaultControlPlane, defaultExecution }, - } = useRequest( - useCallback(async () => { - const { - data: { - DEFAULT_CONTROL_PLANE_QUEUE_NAME, - DEFAULT_EXECUTION_QUEUE_NAME, - }, - } = await SettingsAPI.readAll(); - return { - defaultControlPlane: DEFAULT_CONTROL_PLANE_QUEUE_NAME, - defaultExecution: DEFAULT_EXECUTION_QUEUE_NAME, - }; - }, []), - { defaultControlPlane: '', defaultExecution: '' } - ); - - useEffect(() => { - fetchProtectedItems(); - }, [fetchProtectedItems]); const { error: contentError, @@ -127,12 +81,6 @@ function InstanceGroupList({ const { selected, isAllSelected, handleSelect, clearSelected, selectAll } = useSelected(instanceGroups); - const modifiedSelected = modifyInstanceGroups( - defaultControlPlane, - defaultExecution, - selected - ); - const { isLoading: deleteLoading, deletionError, @@ -158,28 +106,10 @@ function InstanceGroupList({ const canAdd = actions && actions.POST; - const cannotDelete = (item) => - !item.summary_fields.user_capabilities.delete || - item.name === defaultExecution || - item.name === defaultControlPlane; + const cannotDelete = (item) => !item.summary_fields.user_capabilities.delete; const pluralizedItemName = t`Instance Groups`; - let errorMessageDelete = ''; - const notdeletedable = selected.filter( - (i) => i.name === defaultControlPlane || i.name === defaultExecution - ); - - if (notdeletedable.length) { - errorMessageDelete = ( - - ); - } - const addContainerGroup = t`Add container group`; const addInstanceGroup = t`Add instance group`; @@ -229,14 +159,9 @@ function InstanceGroupList({ { const { - data: { - IS_K8S, - DEFAULT_CONTROL_PLANE_QUEUE_NAME, - DEFAULT_EXECUTION_QUEUE_NAME, - }, + data: { IS_K8S }, } = await SettingsAPI.readCategory('all'); return { isKubernetes: IS_K8S, - defaultControlPlane: DEFAULT_CONTROL_PLANE_QUEUE_NAME, - defaultExecution: DEFAULT_EXECUTION_QUEUE_NAME, }; }, []), - { isLoading: true } + { isKubernetes: false } ); useEffect(() => { - settingsRequest(); - }, [settingsRequest]); + userCanReadSettings && settingsRequest(); + }, [settingsRequest, userCanReadSettings]); const [breadcrumbConfig, setBreadcrumbConfig] = useState({ '/instance_groups': t`Instance Groups`, @@ -91,20 +89,14 @@ function InstanceGroups() { ) : ( - + {!isKubernetes && ( - + )} diff --git a/awx/ui/src/screens/InstanceGroup/InstanceGroups.test.js b/awx/ui/src/screens/InstanceGroup/InstanceGroups.test.js index fdd53c6e8f..84f269cc0a 100644 --- a/awx/ui/src/screens/InstanceGroup/InstanceGroups.test.js +++ b/awx/ui/src/screens/InstanceGroup/InstanceGroups.test.js @@ -2,6 +2,7 @@ import React from 'react'; import { shallow } from 'enzyme'; import { InstanceGroupsAPI } from 'api'; import InstanceGroups from './InstanceGroups'; +import { useUserProfile } from 'contexts/Config'; const mockUseLocationValue = { pathname: '', @@ -11,6 +12,19 @@ jest.mock('react-router-dom', () => ({ ...jest.requireActual('react-router-dom'), useLocation: () => mockUseLocationValue, })); + +beforeEach(() => { + useUserProfile.mockImplementation(() => { + return { + isSuperUser: true, + isSystemAuditor: false, + isOrgAdmin: false, + isNotificationAdmin: false, + isExecEnvAdmin: false, + }; + }); +}); + describe('', () => { test('should set breadcrumbs', () => { mockUseLocationValue.pathname = '/instance_groups'; diff --git a/awx/ui/src/screens/InstanceGroup/shared/ContainerGroupForm.js b/awx/ui/src/screens/InstanceGroup/shared/ContainerGroupForm.js index 41d7d60ac5..e9feed4e87 100644 --- a/awx/ui/src/screens/InstanceGroup/shared/ContainerGroupForm.js +++ b/awx/ui/src/screens/InstanceGroup/shared/ContainerGroupForm.js @@ -11,7 +11,7 @@ import FormField, { CheckboxField, } from 'components/FormField'; import FormActionGroup from 'components/FormActionGroup'; -import { combine, required, protectedResourceName } from 'util/validators'; +import { required } from 'util/validators'; import { FormColumnLayout, FormFullWidthLayout, @@ -21,21 +21,11 @@ import { import CredentialLookup from 'components/Lookup/CredentialLookup'; import { VariablesField } from 'components/CodeEditor'; -function ContainerGroupFormFields({ - instanceGroup, - defaultControlPlane, - defaultExecution, -}) { +function ContainerGroupFormFields({ instanceGroup }) { const { setFieldValue, setFieldTouched } = useFormikContext(); const [credentialField, credentialMeta, credentialHelpers] = useField('credential'); - const [, { initialValue }] = useField('name'); - - const isProtected = - initialValue === `${defaultControlPlane}` || - initialValue === `${defaultExecution}`; - const [overrideField] = useField('override'); const handleCredentialUpdate = useCallback( @@ -50,21 +40,10 @@ function ContainerGroupFormFields({ <> ', () => { wrapper.find('button[aria-label="Cancel"]').invoke('onClick')(); expect(onCancel).toBeCalled(); }); - - test('Name field should be disabled, default', async () => { - let defaultInstanceGroupWrapper; - await act(async () => { - defaultInstanceGroupWrapper = mountWithContexts( - - ); - }); - expect( - defaultInstanceGroupWrapper - .find('TextInput[name="name"]') - .prop('isDisabled') - ).toBe(true); - }); - - test('Name field should be disabled, controlplane', async () => { - let defaultInstanceGroupWrapper; - await act(async () => { - defaultInstanceGroupWrapper = mountWithContexts( - - ); - }); - expect( - defaultInstanceGroupWrapper - .find('TextInput[name="name"]') - .prop('isDisabled') - ).toBe(true); - }); }); diff --git a/awx/ui/src/screens/Inventory/Inventories.js b/awx/ui/src/screens/Inventory/Inventories.js index 815f51cd34..cb4e51b712 100644 --- a/awx/ui/src/screens/Inventory/Inventories.js +++ b/awx/ui/src/screens/Inventory/Inventories.js @@ -58,6 +58,7 @@ function Inventories() { [`${inventoryPath}/access`]: t`Access`, [`${inventoryPath}/jobs`]: t`Jobs`, [`${inventoryPath}/details`]: t`Details`, + [`${inventoryPath}/job_templates`]: t`Job Templates`, [`${inventoryPath}/edit`]: t`Edit details`, [inventoryHostsPath]: t`Hosts`, diff --git a/awx/ui/src/screens/Inventory/Inventory.js b/awx/ui/src/screens/Inventory/Inventory.js index 2e18bf1811..c0bf58c39b 100644 --- a/awx/ui/src/screens/Inventory/Inventory.js +++ b/awx/ui/src/screens/Inventory/Inventory.js @@ -16,6 +16,7 @@ import ContentLoading from 'components/ContentLoading'; import JobList from 'components/JobList'; import RoutedTabs from 'components/RoutedTabs'; import { ResourceAccessList } from 'components/ResourceAccessList'; +import RelatedTemplateList from 'components/RelatedTemplateList'; import { InventoriesAPI } from 'api'; import InventoryDetail from './InventoryDetail'; import InventoryEdit from './InventoryEdit'; @@ -69,6 +70,7 @@ function Inventory({ setBreadcrumb }) { link: `${match.url}/jobs`, id: 5, }, + { name: t`Job Templates`, link: `${match.url}/job_templates`, id: 6 }, ]; if (hasContentLoading) { @@ -172,6 +174,14 @@ function Inventory({ setBreadcrumb }) { ]} /> , + + + , {match.params.id && ( diff --git a/awx/ui/src/screens/Inventory/Inventory.test.js b/awx/ui/src/screens/Inventory/Inventory.test.js index ee35ad6139..55a37b6c39 100644 --- a/awx/ui/src/screens/Inventory/Inventory.test.js +++ b/awx/ui/src/screens/Inventory/Inventory.test.js @@ -31,8 +31,27 @@ describe('', () => { await act(async () => { wrapper = mountWithContexts( {}} />); }); - await waitForElement(wrapper, 'ContentLoading', (el) => el.length === 0); - await waitForElement(wrapper, '.pf-c-tabs__item', (el) => el.length === 7); + wrapper.update(); + expect(wrapper.find('Inventory').length).toBe(1); + expect(wrapper.find('RoutedTabs li').length).toBe(8); + }); + + test('should render expected tabs', async () => { + const expectedTabs = [ + 'Back to Inventories', + 'Details', + 'Access', + 'Groups', + 'Hosts', + 'Jobs', + 'Job Templates', + ]; + await act(async () => { + wrapper = mountWithContexts( {}} />); + }); + wrapper.find('RoutedTabs li').forEach((tab, index) => { + expect(tab.text()).toEqual(expectedTabs[index]); + }); }); test('should show content error when user attempts to navigate to erroneous route', async () => { diff --git a/awx/ui/src/screens/Inventory/InventoryList/InventoryList.js b/awx/ui/src/screens/Inventory/InventoryList/InventoryList.js index b1c6bcd032..22108eb30d 100644 --- a/awx/ui/src/screens/Inventory/InventoryList/InventoryList.js +++ b/awx/ui/src/screens/Inventory/InventoryList/InventoryList.js @@ -5,6 +5,7 @@ import { Card, PageSection, DropdownItem } from '@patternfly/react-core'; import { InventoriesAPI } from 'api'; import useRequest, { useDeleteItems } from 'hooks/useRequest'; import useSelected from 'hooks/useSelected'; +import useToast, { AlertVariant } from 'hooks/useToast'; import AlertModal from 'components/AlertModal'; import DatalistToolbar from 'components/DataListToolbar'; import ErrorDetail from 'components/ErrorDetail'; @@ -29,6 +30,7 @@ const QS_CONFIG = getQSConfig('inventory', { function InventoryList() { const location = useLocation(); const match = useRouteMatch(); + const { addToast, Toast, toastProps } = useToast(); const { result: { @@ -112,6 +114,18 @@ function InventoryList() { clearSelected(); }; + const handleCopy = useCallback( + (newInventoryId) => { + addToast({ + id: newInventoryId, + title: t`Inventory copied successfully`, + variant: AlertVariant.success, + hasTimeout: true, + }); + }, + [addToast] + ); + const hasContentLoading = isDeleteLoading || isLoading; const canAdd = actions && actions.POST; @@ -149,130 +163,134 @@ function InventoryList() { ); return ( - - - - {t`Name`} - {t`Status`} - {t`Type`} - {t`Organization`} - {t`Actions`} - - } - renderToolbar={(props) => ( - - } - warningMessage={ - - } - />, - ]} - /> - )} - renderRow={(inventory, index) => ( - { - if (!inventory.pending_deletion) { - handleSelect(inventory); + <> + + + + {t`Name`} + {t`Status`} + {t`Type`} + {t`Organization`} + {t`Actions`} + + } + renderToolbar={(props) => ( + + } + warningMessage={ + + } + />, + ]} + /> + )} + renderRow={(inventory, index) => ( + row.id === inventory.id)} - /> - )} - emptyStateControls={canAdd && addButton} - /> - - - {t`Failed to delete one or more inventories.`} - - - + onSelect={() => { + if (!inventory.pending_deletion) { + handleSelect(inventory); + } + }} + onCopy={handleCopy} + isSelected={selected.some((row) => row.id === inventory.id)} + /> + )} + emptyStateControls={canAdd && addButton} + /> + + + {t`Failed to delete one or more inventories.`} + + + + + ); } diff --git a/awx/ui/src/screens/Inventory/InventoryList/InventoryListItem.js b/awx/ui/src/screens/Inventory/InventoryList/InventoryListItem.js index 49a0456e8d..c692c32f51 100644 --- a/awx/ui/src/screens/Inventory/InventoryList/InventoryListItem.js +++ b/awx/ui/src/screens/Inventory/InventoryList/InventoryListItem.js @@ -18,6 +18,7 @@ function InventoryListItem({ rowIndex, isSelected, onSelect, + onCopy, detailUrl, fetchInventories, }) { @@ -30,11 +31,14 @@ function InventoryListItem({ const [isCopying, setIsCopying] = useState(false); const copyInventory = useCallback(async () => { - await InventoriesAPI.copy(inventory.id, { + const response = await InventoriesAPI.copy(inventory.id, { name: `${inventory.name} @ ${timeOfDay()}`, }); + if (response.status === 201) { + onCopy(response.data.id); + } await fetchInventories(); - }, [inventory.id, inventory.name, fetchInventories]); + }, [inventory.id, inventory.name, fetchInventories, onCopy]); const handleCopyStart = useCallback(() => { setIsCopying(true); diff --git a/awx/ui/src/screens/Inventory/InventorySources/InventorySourceListItem.js b/awx/ui/src/screens/Inventory/InventorySources/InventorySourceListItem.js index d958f82a9e..1c435095b3 100644 --- a/awx/ui/src/screens/Inventory/InventorySources/InventorySourceListItem.js +++ b/awx/ui/src/screens/Inventory/InventorySources/InventorySourceListItem.js @@ -13,6 +13,7 @@ import { ActionsTd, ActionItem, TdBreakWord } from 'components/PaginatedTable'; import StatusLabel from 'components/StatusLabel'; import JobCancelButton from 'components/JobCancelButton'; import { formatDateString } from 'util/dates'; +import { isJobRunning } from 'util/jobs'; import InventorySourceSyncButton from '../shared/InventorySourceSyncButton'; const ExclamationTriangleIcon = styled(PFExclamationTriangleIcon)` @@ -64,6 +65,7 @@ function InventorySourceListItem({ rowIndex, isSelected, onSelect, + disable: isJobRunning(source.status), }} /> diff --git a/awx/ui/src/screens/Inventory/SmartInventory.js b/awx/ui/src/screens/Inventory/SmartInventory.js index 146c0e3868..952cf5dc31 100644 --- a/awx/ui/src/screens/Inventory/SmartInventory.js +++ b/awx/ui/src/screens/Inventory/SmartInventory.js @@ -19,6 +19,7 @@ import ContentLoading from 'components/ContentLoading'; import JobList from 'components/JobList'; import { ResourceAccessList } from 'components/ResourceAccessList'; import RoutedTabs from 'components/RoutedTabs'; +import RelatedTemplateList from 'components/RelatedTemplateList'; import SmartInventoryDetail from './SmartInventoryDetail'; import SmartInventoryEdit from './SmartInventoryEdit'; import SmartInventoryHosts from './SmartInventoryHosts'; @@ -70,6 +71,7 @@ function SmartInventory({ setBreadcrumb }) { link: `${match.url}/jobs`, id: 3, }, + { name: t`Job Templates`, link: `${match.url}/job_templates`, id: 4 }, ]; if (hasContentLoading) { @@ -155,6 +157,14 @@ function SmartInventory({ setBreadcrumb }) { }} /> , + + + , {!hasContentLoading && ( diff --git a/awx/ui/src/screens/Inventory/SmartInventory.test.js b/awx/ui/src/screens/Inventory/SmartInventory.test.js index 13bb6426c7..9d5ae971c4 100644 --- a/awx/ui/src/screens/Inventory/SmartInventory.test.js +++ b/awx/ui/src/screens/Inventory/SmartInventory.test.js @@ -32,8 +32,26 @@ describe('', () => { await act(async () => { wrapper = mountWithContexts( {}} />); }); - await waitForElement(wrapper, 'SmartInventory'); - await waitForElement(wrapper, '.pf-c-tabs__item', (el) => el.length === 5); + wrapper.update(); + expect(wrapper.find('SmartInventory').length).toBe(1); + expect(wrapper.find('RoutedTabs li').length).toBe(6); + }); + + test('should render expected tabs', async () => { + const expectedTabs = [ + 'Back to Inventories', + 'Details', + 'Access', + 'Hosts', + 'Jobs', + 'Job Templates', + ]; + await act(async () => { + wrapper = mountWithContexts( {}} />); + }); + wrapper.find('RoutedTabs li').forEach((tab, index) => { + expect(tab.text()).toEqual(expectedTabs[index]); + }); }); test('should show content error when api throws an error', async () => { diff --git a/awx/ui/src/screens/Inventory/SmartInventoryHosts/SmartInventoryHostList.js b/awx/ui/src/screens/Inventory/SmartInventoryHosts/SmartInventoryHostList.js index 2a611891ae..747e7bd058 100644 --- a/awx/ui/src/screens/Inventory/SmartInventoryHosts/SmartInventoryHostList.js +++ b/awx/ui/src/screens/Inventory/SmartInventoryHosts/SmartInventoryHostList.js @@ -25,25 +25,33 @@ function SmartInventoryHostList({ inventory }) { const location = useLocation(); const [isAdHocLaunchLoading, setIsAdHocLaunchLoading] = useState(false); const { - result: { hosts, count }, + result: { hosts, count, moduleOptions }, error: contentError, isLoading, request: fetchHosts, } = useRequest( useCallback(async () => { const params = parseQueryString(QS_CONFIG, location.search); - const { - data: { results, count: hostCount }, - } = await InventoriesAPI.readHosts(inventory.id, params); + const [ + { + data: { results, count: hostCount }, + }, + adHocOptions, + ] = await Promise.all([ + InventoriesAPI.readHosts(inventory.id, params), + InventoriesAPI.readAdHocOptions(inventory.id), + ]); return { hosts: results, count: hostCount, + moduleOptions: adHocOptions.data.actions.GET.module_name.choices, }; }, [location.search, inventory.id]), { hosts: [], count: 0, + moduleOptions: [], } ); @@ -91,6 +99,7 @@ function SmartInventoryHostList({ inventory }) { adHocItems={selected} hasListItems={count > 0} onLaunchLoading={setIsAdHocLaunchLoading} + moduleOptions={moduleOptions} />, ] : [] diff --git a/awx/ui/src/screens/Inventory/SmartInventoryHosts/SmartInventoryHostList.test.js b/awx/ui/src/screens/Inventory/SmartInventoryHosts/SmartInventoryHostList.test.js index 1639c80f50..0b87981836 100644 --- a/awx/ui/src/screens/Inventory/SmartInventoryHosts/SmartInventoryHostList.test.js +++ b/awx/ui/src/screens/Inventory/SmartInventoryHosts/SmartInventoryHostList.test.js @@ -27,6 +27,21 @@ describe('', () => { InventoriesAPI.readHosts.mockResolvedValue({ data: mockHosts, }); + InventoriesAPI.readAdHocOptions.mockResolvedValue({ + data: { + actions: { + GET: { + module_name: { + choices: [ + ['command', 'command'], + ['shell', 'shell'], + ], + }, + }, + POST: {}, + }, + }, + }); await act(async () => { wrapper = mountWithContexts( diff --git a/awx/ui/src/screens/Job/JobDetail/JobDetail.js b/awx/ui/src/screens/Job/JobDetail/JobDetail.js index 1c9d7cac90..76e8329bf4 100644 --- a/awx/ui/src/screens/Job/JobDetail/JobDetail.js +++ b/awx/ui/src/screens/Job/JobDetail/JobDetail.js @@ -9,6 +9,7 @@ import styled from 'styled-components'; import { useConfig } from 'contexts/Config'; import AlertModal from 'components/AlertModal'; import { + DeletedDetail, DetailList, Detail, UserDateDetail, @@ -158,7 +159,7 @@ function JobDetail({ job, inventorySourceLabels }) { value={jobTypes[job.type]} /> - {inventory && ( + {inventory ? ( } /> + ) : ( + )} {inventory_source && ( <> @@ -215,7 +218,7 @@ function JobDetail({ job, inventorySourceLabels }) { } /> )} - {project && ( + {project ? ( <> + ) : ( + )} {scmBranch && ( ', () => { onUnmount); + + if (hasQueryParams) { + title = t`The search filter did not produce any results…`; + message = t`Please try another search using the filter above`; + icon = SearchIcon; + } else if (isJobRunning) { + title = t`Waiting for job output…`; + } else { + title = t`No output found for this job.`; + } + + return ( + + ); +} diff --git a/awx/ui/src/screens/Job/JobOutput/JobEvent.js b/awx/ui/src/screens/Job/JobOutput/JobEvent.js index 3516f24749..8f3dd81f8e 100644 --- a/awx/ui/src/screens/Job/JobOutput/JobEvent.js +++ b/awx/ui/src/screens/Job/JobOutput/JobEvent.js @@ -17,11 +17,15 @@ function JobEvent({ isCollapsed, onToggleCollapsed, hasChildren, + jobStatus, }) { const numOutputLines = lineTextHtml?.length || 0; useEffect(() => { - measure(); - }, [numOutputLines, isCollapsed, measure]); + const timeout = setTimeout(measure, 0); + return () => { + clearTimeout(timeout); + }; + }, [numOutputLines, isCollapsed, measure, jobStatus]); let toggleLineIndex = -1; if (hasChildren) { diff --git a/awx/ui/src/screens/Job/JobOutput/JobOutput.js b/awx/ui/src/screens/Job/JobOutput/JobOutput.js index 1173da6c80..c5eb8ca217 100644 --- a/awx/ui/src/screens/Job/JobOutput/JobOutput.js +++ b/awx/ui/src/screens/Job/JobOutput/JobOutput.js @@ -30,6 +30,7 @@ import JobEventSkeleton from './JobEventSkeleton'; import PageControls from './PageControls'; import HostEventModal from './HostEventModal'; import JobOutputSearch from './JobOutputSearch'; +import EmptyOutput from './EmptyOutput'; import { HostStatusBar, OutputToolbar } from './shared'; import getLineTextHtml from './getLineTextHtml'; import connectJobSocket, { closeWebSocket } from './connectJobSocket'; @@ -220,6 +221,7 @@ function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) { ...Object.values(siblingRequests.current || {}), ...Object.values(numEventsRequests.current || {}), ]; + setHasContentLoading(true); // prevents "no content found" screen from flashing Promise.all(pendingRequests).then(() => { setRemoteRowCount(0); clearLoadedEvents(); @@ -509,6 +511,7 @@ function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) { onToggleCollapsed={() => { toggleNodeIsCollapsed(event.uuid, !node.isCollapsed); }} + jobStatus={jobStatus} /> ) : ( - {({ onRowsRendered, registerChild }) => ( - - {({ width, height }) => ( - <> - {hasContentLoading ? ( -
- -
- ) : ( - { - registerChild(ref); - listRef.current = ref; - }} - deferredMeasurementCache={cache} - height={height || 1} - onRowsRendered={onRowsRendered} - rowCount={totalNonCollapsedRows + wsEvents.length} - rowHeight={cache.rowHeight} - rowRenderer={rowRenderer} - scrollToAlignment="start" - width={width || 1} - overscanRowCount={20} - onScroll={handleScroll} - /> - )} - - )} -
- )} + {({ onRowsRendered, registerChild }) => { + if ( + !hasContentLoading && + remoteRowCount + wsEvents.length === 0 + ) { + return ( + 1} + isJobRunning={isJobRunning(jobStatus)} + onUnmount={() => { + if (listRef.current?.recomputeRowHeights) { + listRef.current.recomputeRowHeights(); + } + }} + /> + ); + } + return ( + + {({ width, height }) => ( + <> + {hasContentLoading ? ( +
+ +
+ ) : ( + { + registerChild(ref); + listRef.current = ref; + }} + deferredMeasurementCache={cache} + height={height || 1} + onRowsRendered={onRowsRendered} + rowCount={totalNonCollapsedRows + wsEvents.length} + rowHeight={cache.rowHeight} + rowRenderer={rowRenderer} + scrollToAlignment="start" + width={width || 1} + overscanRowCount={20} + onScroll={handleScroll} + /> + )} + + )} +
+ ); + }} diff --git a/awx/ui/src/screens/NotificationTemplate/NotificationTemplateList/NotificationTemplateList.js b/awx/ui/src/screens/NotificationTemplate/NotificationTemplateList/NotificationTemplateList.js index 81c3845e42..defa2ef920 100644 --- a/awx/ui/src/screens/NotificationTemplate/NotificationTemplateList/NotificationTemplateList.js +++ b/awx/ui/src/screens/NotificationTemplate/NotificationTemplateList/NotificationTemplateList.js @@ -1,14 +1,8 @@ -import React, { useCallback, useEffect, useState } from 'react'; +import React, { useCallback, useEffect } from 'react'; import { useLocation, useRouteMatch } from 'react-router-dom'; import { t } from '@lingui/macro'; -import { - Alert, - AlertActionCloseButton, - AlertGroup, - Card, - PageSection, -} from '@patternfly/react-core'; +import { Card, PageSection } from '@patternfly/react-core'; import { NotificationTemplatesAPI } from 'api'; import PaginatedTable, { HeaderRow, @@ -22,6 +16,7 @@ import ErrorDetail from 'components/ErrorDetail'; import DataListToolbar from 'components/DataListToolbar'; import useRequest, { useDeleteItems } from 'hooks/useRequest'; import useSelected from 'hooks/useSelected'; +import useToast, { AlertVariant } from 'hooks/useToast'; import { getQSConfig, parseQueryString } from 'util/qs'; import NotificationTemplateListItem from './NotificationTemplateListItem'; @@ -34,7 +29,8 @@ const QS_CONFIG = getQSConfig('notification-templates', { function NotificationTemplatesList() { const location = useLocation(); const match = useRouteMatch(); - const [testToasts, setTestToasts] = useState([]); + // const [testToasts, setTestToasts] = useState([]); + const { addToast, Toast, toastProps } = useToast(); const addUrl = `${match.url}/add`; @@ -107,18 +103,7 @@ function NotificationTemplatesList() { clearSelected(); }; - const addTestToast = useCallback((notification) => { - setTestToasts((oldToasts) => [...oldToasts, notification]); - }, []); - - const removeTestToast = (notificationId) => { - setTestToasts((oldToasts) => - oldToasts.filter((toast) => toast.id !== notificationId) - ); - }; - const canAdd = actions && actions.POST; - const alertGroupDataCy = 'notification-template-alerts'; return ( <> @@ -198,7 +183,35 @@ function NotificationTemplatesList() { } renderRow={(template, index) => ( { + if (notification.status === 'pending') { + return; + } + + let message; + if (notification.status === 'successful') { + message = t`Notification sent successfully`; + } + if (notification.status === 'failed') { + if (notification?.error === 'timed out') { + message = t`Notification timed out`; + } else { + message = notification.error; + } + } + + addToast({ + id: notification.id, + title: + notification.summary_fields.notification_template.name, + variant: + notification.status === 'failed' + ? AlertVariant.danger + : AlertVariant.success, + hasTimeout: notification.status !== 'failed', + message, + }); + }} key={template.id} fetchTemplates={fetchTemplates} template={template} @@ -223,39 +236,7 @@ function NotificationTemplatesList() { {t`Failed to delete one or more notification template.`} - - {testToasts - .filter((notification) => notification.status !== 'pending') - .map((notification) => ( - removeTestToast(notification.id)} - /> - } - onTimeout={() => removeTestToast(notification.id)} - timeout={notification.status !== 'failed'} - title={notification.summary_fields.notification_template.name} - variant={notification.status === 'failed' ? 'danger' : 'success'} - key={`notification-template-alert-${notification.id}`} - ouiaId={`notification-template-alert-${notification.id}`} - > - <> - {notification.status === 'successful' && ( -

{t`Notification sent successfully`}

- )} - {notification.status === 'failed' && - notification?.error === 'timed out' && ( -

{t`Notification timed out`}

- )} - {notification.status === 'failed' && - notification?.error !== 'timed out' && ( -

{notification.error}

- )} - -
- ))} -
+ ); } diff --git a/awx/ui/src/screens/Project/Project.js b/awx/ui/src/screens/Project/Project.js index 8426ef59df..a3156ca151 100644 --- a/awx/ui/src/screens/Project/Project.js +++ b/awx/ui/src/screens/Project/Project.js @@ -19,10 +19,10 @@ import ContentLoading from 'components/ContentLoading'; import NotificationList from 'components/NotificationList'; import { ResourceAccessList } from 'components/ResourceAccessList'; import { Schedules } from 'components/Schedule'; +import RelatedTemplateList from 'components/RelatedTemplateList'; import { OrganizationsAPI, ProjectsAPI } from 'api'; import ProjectDetail from './ProjectDetail'; import ProjectEdit from './ProjectEdit'; -import ProjectJobTemplatesList from './ProjectJobTemplatesList'; function Project({ setBreadcrumb }) { const { me = {} } = useConfig(); @@ -102,6 +102,10 @@ function Project({ setBreadcrumb }) { }, { name: t`Details`, link: `/projects/${id}/details` }, { name: t`Access`, link: `/projects/${id}/access` }, + { + name: t`Job Templates`, + link: `/projects/${id}/job_templates`, + }, ]; if (canSeeNotificationsTab) { @@ -110,12 +114,6 @@ function Project({ setBreadcrumb }) { link: `/projects/${id}/notifications`, }); } - - tabsArray.push({ - name: t`Job Templates`, - link: `/projects/${id}/job_templates`, - }); - if (project?.scm_type) { tabsArray.push({ name: t`Schedules`, @@ -176,7 +174,7 @@ function Project({ setBreadcrumb }) {
)} - + {project?.scm_type && project.scm_type !== '' && ( diff --git a/awx/ui/src/screens/Project/Project.test.js b/awx/ui/src/screens/Project/Project.test.js index 4bd7689eba..48621a31ac 100644 --- a/awx/ui/src/screens/Project/Project.test.js +++ b/awx/ui/src/screens/Project/Project.test.js @@ -63,7 +63,7 @@ describe('', () => { '.pf-c-tabs__item-text', (el) => el.length === 6 ); - expect(tabs.at(3).text()).toEqual('Notifications'); + expect(tabs.at(4).text()).toEqual('Notifications'); }); test('notifications tab hidden with reduced permissions', async () => { diff --git a/awx/ui/src/screens/Project/ProjectJobTemplatesList/ProjectJobTemplatesListItem.js b/awx/ui/src/screens/Project/ProjectJobTemplatesList/ProjectJobTemplatesListItem.js deleted file mode 100644 index 199a629c77..0000000000 --- a/awx/ui/src/screens/Project/ProjectJobTemplatesList/ProjectJobTemplatesListItem.js +++ /dev/null @@ -1,121 +0,0 @@ -import 'styled-components/macro'; -import React from 'react'; -import { Link } from 'react-router-dom'; -import { Button, Tooltip } from '@patternfly/react-core'; -import { Tr, Td } from '@patternfly/react-table'; -import { - ExclamationTriangleIcon, - PencilAltIcon, - RocketIcon, -} from '@patternfly/react-icons'; -import { t } from '@lingui/macro'; -import styled from 'styled-components'; - -import { ActionsTd, ActionItem } from 'components/PaginatedTable'; -import { LaunchButton } from 'components/LaunchButton'; -import Sparkline from 'components/Sparkline'; -import { toTitleCase } from 'util/strings'; - -const ExclamationTriangleIconWarning = styled(ExclamationTriangleIcon)` - color: var(--pf-global--warning-color--100); - margin-left: 18px; -`; - -function ProjectJobTemplateListItem({ - template, - isSelected, - onSelect, - detailUrl, - rowIndex, -}) { - const canLaunch = template.summary_fields.user_capabilities.start; - - const missingResourceIcon = - template.type === 'job_template' && - (!template.summary_fields.project || - (!template.summary_fields.inventory && - !template.ask_inventory_on_launch)); - - const missingExecutionEnvironment = - template.type === 'job_template' && - template.custom_virtualenv && - !template.execution_environment; - - return ( -
- - - - - - - {({ handleLaunch, isLaunching }) => ( - - )} - - - - - - - - ); -} - -export { ProjectJobTemplateListItem as _ProjectJobTemplateListItem }; -export default ProjectJobTemplateListItem; diff --git a/awx/ui/src/screens/Project/ProjectJobTemplatesList/ProjectJobTemplatesListItem.test.js b/awx/ui/src/screens/Project/ProjectJobTemplatesList/ProjectJobTemplatesListItem.test.js deleted file mode 100644 index d61ed8f720..0000000000 --- a/awx/ui/src/screens/Project/ProjectJobTemplatesList/ProjectJobTemplatesListItem.test.js +++ /dev/null @@ -1,262 +0,0 @@ -import React from 'react'; - -import { createMemoryHistory } from 'history'; -import { mountWithContexts } from '../../../../testUtils/enzymeHelpers'; -import ProjectJobTemplatesListItem from './ProjectJobTemplatesListItem'; - -describe('', () => { - test('launch button shown to users with start capabilities', () => { - const wrapper = mountWithContexts( -
- - - {template.name} - {missingResourceIcon && ( - - - - )} - {missingExecutionEnvironment && ( - - - - )} - - {toTitleCase(template.type)} - -
- - - -
- ); - expect(wrapper.find('LaunchButton').exists()).toBeTruthy(); - }); - - test('launch button hidden from users without start capabilities', () => { - const wrapper = mountWithContexts( - - - - -
- ); - expect(wrapper.find('LaunchButton').exists()).toBeFalsy(); - }); - - test('edit button shown to users with edit capabilities', () => { - const wrapper = mountWithContexts( - - - - -
- ); - expect(wrapper.find('PencilAltIcon').exists()).toBeTruthy(); - }); - - test('edit button hidden from users without edit capabilities', () => { - const wrapper = mountWithContexts( - - - - -
- ); - expect(wrapper.find('PencilAltIcon').exists()).toBeFalsy(); - }); - - test('missing resource icon is shown.', () => { - const wrapper = mountWithContexts( - - - - -
- ); - expect(wrapper.find('ExclamationTriangleIcon').exists()).toBe(true); - }); - - test('missing resource icon is not shown when there is a project and an inventory.', () => { - const wrapper = mountWithContexts( - - - - -
- ); - expect(wrapper.find('ExclamationTriangleIcon').exists()).toBe(false); - }); - - test('missing resource icon is not shown when inventory is prompt_on_launch, and a project', () => { - const wrapper = mountWithContexts( - - - - -
- ); - expect(wrapper.find('ExclamationTriangleIcon').exists()).toBe(false); - }); - test('missing resource icon is not shown type is workflow_job_template', () => { - const wrapper = mountWithContexts( - - - - -
- ); - expect(wrapper.find('ExclamationTriangleIcon').exists()).toBe(false); - }); - test('clicking on template from project templates list navigates properly', () => { - const history = createMemoryHistory({ - initialEntries: ['/projects/1/job_templates'], - }); - const wrapper = mountWithContexts( - - - - -
, - { context: { router: { history } } } - ); - wrapper.find('Link').simulate('click', { button: 0 }); - expect(history.location.pathname).toEqual( - '/templates/job_template/2/details' - ); - }); - - test('should render warning about missing execution environment', () => { - const wrapper = mountWithContexts( - - - - -
- ); - - expect( - wrapper.find('.missing-execution-environment').prop('content') - ).toEqual( - 'Custom virtual environment /var/lib/awx/env must be replaced by an execution environment.' - ); - }); -}); diff --git a/awx/ui/src/screens/Project/ProjectJobTemplatesList/index.js b/awx/ui/src/screens/Project/ProjectJobTemplatesList/index.js deleted file mode 100644 index d0be30040f..0000000000 --- a/awx/ui/src/screens/Project/ProjectJobTemplatesList/index.js +++ /dev/null @@ -1 +0,0 @@ -export { default } from './ProjectJobTemplatesList'; diff --git a/awx/ui/src/screens/Project/ProjectList/ProjectList.js b/awx/ui/src/screens/Project/ProjectList/ProjectList.js index e71571f2c6..6c3e829048 100644 --- a/awx/ui/src/screens/Project/ProjectList/ProjectList.js +++ b/awx/ui/src/screens/Project/ProjectList/ProjectList.js @@ -19,6 +19,7 @@ import PaginatedTable, { } from 'components/PaginatedTable'; import useSelected from 'hooks/useSelected'; import useExpanded from 'hooks/useExpanded'; +import useToast, { AlertVariant } from 'hooks/useToast'; import { relatedResourceDeleteRequests } from 'util/getRelatedResourceDeleteDetails'; import { getQSConfig, parseQueryString } from 'util/qs'; import useWsProjects from './useWsProjects'; @@ -34,6 +35,7 @@ const QS_CONFIG = getQSConfig('project', { function ProjectList() { const location = useLocation(); const match = useRouteMatch(); + const { addToast, Toast, toastProps } = useToast(); const { request: fetchUpdatedProject, @@ -123,6 +125,18 @@ function ProjectList() { } ); + const handleCopy = useCallback( + (newId) => { + addToast({ + id: newId, + title: t`Project copied successfully`, + variant: AlertVariant.success, + hasTimeout: true, + }); + }, + [addToast] + ); + const handleProjectDelete = async () => { await deleteProjects(); setSelected([]); @@ -255,6 +269,7 @@ function ProjectList() { detailUrl={`${match.url}/${project.id}`} isSelected={selected.some((row) => row.id === project.id)} onSelect={() => handleSelect(project)} + onCopy={handleCopy} rowIndex={index} onRefreshRow={(projectId) => fetchUpdatedProject(projectId)} /> @@ -267,6 +282,7 @@ function ProjectList() { /> + {deletionError && ( { - await ProjectsAPI.copy(project.id, { + const response = await ProjectsAPI.copy(project.id, { name: `${project.name} @ ${timeOfDay()}`, }); + if (response.status === 201) { + onCopy(response.data.id); + } await fetchProjects(); - }, [project.id, project.name, fetchProjects]); + }, [project.id, project.name, fetchProjects, onCopy]); const generateLastJobTooltip = (job) => ( <> @@ -168,6 +172,7 @@ function ProjectListItem({ rowIndex, isSelected, onSelect, + disable: isJobRunning(job?.status), }} dataLabel={t`Selected`} /> diff --git a/awx/ui/src/screens/Setting/Jobs/JobsDetail/JobsDetail.test.js b/awx/ui/src/screens/Setting/Jobs/JobsDetail/JobsDetail.test.js index 9b0dda4233..97110e3169 100644 --- a/awx/ui/src/screens/Setting/Jobs/JobsDetail/JobsDetail.test.js +++ b/awx/ui/src/screens/Setting/Jobs/JobsDetail/JobsDetail.test.js @@ -69,6 +69,7 @@ describe('', () => { assertDetail(wrapper, 'Default Project Update Timeout', '0 seconds'); assertDetail(wrapper, 'Per-Host Ansible Fact Cache Timeout', '0 seconds'); assertDetail(wrapper, 'Maximum number of forks per job', '200'); + assertDetail(wrapper, 'Expose host paths for Container Groups', 'Off'); assertVariableDetail( wrapper, 'Ansible Modules Allowed for Ad Hoc Jobs', diff --git a/awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.js b/awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.js index fec8d6cdb8..22066243b7 100644 --- a/awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.js +++ b/awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.js @@ -212,6 +212,10 @@ function JobsEdit() { name="AWX_ISOLATION_SHOW_PATHS" config={jobs.AWX_ISOLATION_SHOW_PATHS} /> + {submitError && } {revertError && } diff --git a/awx/ui/src/screens/Setting/Jobs/JobsEdit/data.defaultJobSettings.json b/awx/ui/src/screens/Setting/Jobs/JobsEdit/data.defaultJobSettings.json index 749249494a..9736f3794b 100644 --- a/awx/ui/src/screens/Setting/Jobs/JobsEdit/data.defaultJobSettings.json +++ b/awx/ui/src/screens/Setting/Jobs/JobsEdit/data.defaultJobSettings.json @@ -27,6 +27,7 @@ "AWX_ISOLATION_SHOW_PATHS": [], "AWX_ROLES_ENABLED": true, "AWX_SHOW_PLAYBOOK_LINKS": false, + "AWX_MOUNT_ISOLATED_PATHS_ON_K8S": false, "AWX_TASK_ENV": {}, "DEFAULT_INVENTORY_UPDATE_TIMEOUT": 0, "DEFAULT_JOB_TIMEOUT": 0, diff --git a/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionModal.js b/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionModal.js index 69ef9dc029..99a9c993af 100644 --- a/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionModal.js +++ b/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionModal.js @@ -47,6 +47,15 @@ function SubscriptionModal({ subscriptionCreds.username, subscriptionCreds.password ); + + // Ensure unique ids for each subscription + // because it is possible to have multiple + // subscriptions with the same pool_id + let repeatId = 1; + data.forEach((i) => { + i.id = repeatId++; + }); + return data; }, []), // eslint-disable-line react-hooks/exhaustive-deps [] @@ -64,17 +73,9 @@ function SubscriptionModal({ fetchSubscriptions(); }, [fetchSubscriptions]); - const handleSelect = (item) => { - if (selected.some((s) => s.pool_id === item.pool_id)) { - setSelected(selected.filter((s) => s.pool_id !== item.pool_id)); - } else { - setSelected(selected.concat(item)); - } - }; - useEffect(() => { - if (selectedSubscription?.pool_id) { - handleSelect({ pool_id: selectedSubscription.pool_id }); + if (selectedSubscription?.id) { + setSelected([selectedSubscription]); } }, []); // eslint-disable-line react-hooks/exhaustive-deps @@ -150,19 +151,18 @@ function SubscriptionModal({ {subscriptions.map((subscription) => ( handleSelect(subscription), + onSelect: () => setSelected([subscription]), isSelected: selected.some( - (row) => row.pool_id === subscription.pool_id + (row) => row.id === subscription.id ), variant: 'radio', - rowIndex: `row-${subscription.pool_id}`, + rowIndex: `row-${subscription.id}`, }} /> {subscription.subscription_name} diff --git a/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionModal.test.js b/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionModal.test.js index 6667a776ff..4c8fa843f2 100644 --- a/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionModal.test.js +++ b/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionModal.test.js @@ -125,14 +125,14 @@ describe('', () => { password: '$encrypted', }} selectedSubscription={{ - pool_id: 8, + id: 2, }} /> ); await waitForElement(wrapper, 'table'); - expect(wrapper.find('tr[id=7] input').prop('checked')).toBe(false); - expect(wrapper.find('tr[id=8] input').prop('checked')).toBe(true); - expect(wrapper.find('tr[id=9] input').prop('checked')).toBe(false); + expect(wrapper.find('tr[id="row-1"] input').prop('checked')).toBe(false); + expect(wrapper.find('tr[id="row-2"] input').prop('checked')).toBe(true); + expect(wrapper.find('tr[id="row-3"] input').prop('checked')).toBe(false); }); }); diff --git a/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionStep.js b/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionStep.js index 2d9aebc631..333a7939fd 100644 --- a/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionStep.js +++ b/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionStep.js @@ -227,7 +227,7 @@ function SubscriptionStep() { username: username.value, password: password.value, }} - selectedSubscripion={subscription?.value} + selectedSubscription={subscription?.value} onClose={closeModal} onConfirm={(value) => subscriptionHelpers.setValue(value)} /> diff --git a/awx/ui/src/screens/Setting/shared/data.allSettingOptions.json b/awx/ui/src/screens/Setting/shared/data.allSettingOptions.json index 21cdae90c6..ab0bc3f8e1 100644 --- a/awx/ui/src/screens/Setting/shared/data.allSettingOptions.json +++ b/awx/ui/src/screens/Setting/shared/data.allSettingOptions.json @@ -276,6 +276,15 @@ "category_slug": "jobs", "default": false }, + "AWX_MOUNT_ISOLATED_PATHS_ON_K8S": { + "type": "boolean", + "required": false, + "label": "Expose host paths for Container Groups", + "help_text": "Expose paths via hostPath for the Pods created by a Container Group. HostPath volumes present many security risks, and it is a best practice to avoid the use of HostPaths when possible. ", + "category": "Jobs", + "category_slug": "jobs", + "default": false + }, "GALAXY_IGNORE_CERTS": { "type": "boolean", "required": false, @@ -3973,6 +3982,14 @@ "category_slug": "jobs", "defined_in_file": false }, + "AWX_MOUNT_ISOLATED_PATHS_ON_K8S": { + "type": "boolean", + "label": "Expose host paths for Container Groups", + "help_text": "Expose paths via hostPath for the Pods created by a Container Group. HostPath volumes present many security risks, and it is a best practice to avoid the use of HostPaths when possible. ", + "category": "Jobs", + "category_slug": "jobs", + "defined_in_file": false + }, "GALAXY_IGNORE_CERTS": { "type": "boolean", "label": "Ignore Ansible Galaxy SSL Certificate Verification", diff --git a/awx/ui/src/screens/Setting/shared/data.allSettings.json b/awx/ui/src/screens/Setting/shared/data.allSettings.json index 4715c4e03e..555713c239 100644 --- a/awx/ui/src/screens/Setting/shared/data.allSettings.json +++ b/awx/ui/src/screens/Setting/shared/data.allSettings.json @@ -297,5 +297,6 @@ "users":{"fields":["username"],"adj_list":[]}, "instances":{"fields":["hostname"],"adj_list":[]} }, - "DEFAULT_EXECUTION_ENVIRONMENT": 1 + "DEFAULT_EXECUTION_ENVIRONMENT": 1, + "AWX_MOUNT_ISOLATED_PATHS_ON_K8S": false } diff --git a/awx/ui/src/screens/Setting/shared/data.jobSettings.json b/awx/ui/src/screens/Setting/shared/data.jobSettings.json index 1815cc12b7..e24eedb36d 100644 --- a/awx/ui/src/screens/Setting/shared/data.jobSettings.json +++ b/awx/ui/src/screens/Setting/shared/data.jobSettings.json @@ -21,5 +21,6 @@ "DEFAULT_INVENTORY_UPDATE_TIMEOUT": 0, "DEFAULT_PROJECT_UPDATE_TIMEOUT": 0, "ANSIBLE_FACT_CACHE_TIMEOUT": 0, - "MAX_FORKS": 200 + "MAX_FORKS": 200, + "AWX_MOUNT_ISOLATED_PATHS_ON_K8S": false } diff --git a/awx/ui/src/screens/Template/Survey/SurveyReorderModal.js b/awx/ui/src/screens/Template/Survey/SurveyReorderModal.js index e5bf1980c4..f3c84a6371 100644 --- a/awx/ui/src/screens/Template/Survey/SurveyReorderModal.js +++ b/awx/ui/src/screens/Template/Survey/SurveyReorderModal.js @@ -116,6 +116,13 @@ function SurveyReorderModal({ const defaultAnswer = (q) => { let component = null; switch (q.type) { + case 'password': + component = ( + + {t`encrypted`.toUpperCase()} + + ); + break; case 'textarea': component = (