diff --git a/.gitignore b/.gitignore index 7e0f07a83c..9e7a41e6cc 100644 --- a/.gitignore +++ b/.gitignore @@ -34,8 +34,6 @@ awx/ui_next/coverage/ awx/ui_next/build awx/ui_next/.env.local rsyslog.pid -/tower-license -/tower-license/** tools/prometheus/data tools/docker-compose/Dockerfile @@ -147,3 +145,4 @@ use_dev_supervisor.txt .idea/* *.unison.tmp *.# +/tools/docker-compose/overrides/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 9f04a26ae3..c50243b1e8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,22 @@ This is a list of high-level changes for each release of AWX. A full list of commits can be found at `https://github.com/ansible/awx/releases/tag/`. +## 15.0.1 (October 20, 2020) +- Added several optimizations to improve performance for a variety of high-load simultaneous job launch use cases https://github.com/ansible/awx/pull/8403 +- Added the ability to source roles and collections from requirements.yaml files (not just requirements.yml) - https://github.com/ansible/awx/issues/4540 +- awx.awx collection modules now provide a clearer error message for incompatible versions of awxkit - https://github.com/ansible/awx/issues/8127 +- Fixed a bug in notification messages that contain certain unicode characters - https://github.com/ansible/awx/issues/7400 +- Fixed a bug that prevents the deletion of Workflow Approval records - https://github.com/ansible/awx/issues/8305 +- Fixed a bug that broke the selection of webhook credentials - https://github.com/ansible/awx/issues/7892 +- Fixed a bug which can cause confusing behavior for social auth logins across distinct browser tabs - https://github.com/ansible/awx/issues/8154 +- Fixed several bugs in the output of Workflow Job Templates using the `awx export` tool - https://github.com/ansible/awx/issues/7798 https://github.com/ansible/awx/pull/7847 +- Fixed a race condition that can lead to missing hosts when running parallel inventory syncs - https://github.com/ansible/awx/issues/5571 +- Fixed an HTTP 500 error when certain LDAP group parameters aren't properly set - https://github.com/ansible/awx/issues/7622 +- Updated a few dependencies in response to several CVEs: + * CVE-2020-7720 + * CVE-2020-7743 + * CVE-2020-7676 + ## 15.0.0 (September 30, 2020) - Added improved support for fetching Ansible collections from private Galaxy content sources (such as https://github.com/ansible/galaxy_ng) - https://github.com/ansible/awx/issues/7813 **Note:** as part of this change, new Organizations created in the AWX API will _no longer_ automatically synchronize roles and collections from galaxy.ansible.com by default. More details on this change can be found at: https://github.com/ansible/awx/issues/8341#issuecomment-707310633 diff --git a/INSTALL.md b/INSTALL.md index 6bc3f869d0..dfbd0cbe7e 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -78,6 +78,8 @@ Before you can run a deployment, you'll need the following installed in your loc - [docker](https://pypi.org/project/docker/) Python module + This is incompatible with `docker-py`. If you have previously installed `docker-py`, please uninstall it. + We use this module instead of `docker-py` because it is what the `docker-compose` Python module requires. +- [community.general.docker_image collection](https://docs.ansible.com/ansible/latest/collections/community/general/docker_image_module.html) + + This is only required if you are using Ansible >= 2.10 - [GNU Make](https://www.gnu.org/software/make/) - [Git](https://git-scm.com/) Requires Version 1.8.4+ - Python 3.6+ diff --git a/Makefile b/Makefile index ca76648499..3f1b6876e7 100644 --- a/Makefile +++ b/Makefile @@ -214,7 +214,11 @@ requirements_awx_dev: requirements_collections: mkdir -p $(COLLECTION_BASE) - ansible-galaxy collection install -r requirements/collections_requirements.yml -p $(COLLECTION_BASE) + n=0; \ + until [ "$$n" -ge 5 ]; do \ + ansible-galaxy collection install -r requirements/collections_requirements.yml -p $(COLLECTION_BASE) && break; \ + n=$$((n+1)); \ + done requirements: requirements_ansible requirements_awx requirements_collections @@ -646,9 +650,11 @@ awx/projects: docker-compose-isolated: awx/projects CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml -f tools/docker-isolated-override.yml up +COMPOSE_UP_OPTS ?= + # Docker Compose Development environment docker-compose: docker-auth awx/projects - CURRENT_UID=$(shell id -u) OS="$(shell docker info | grep 'Operating System')" TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml up --no-recreate awx + CURRENT_UID=$(shell id -u) OS="$(shell docker info | grep 'Operating System')" TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose.yml $(COMPOSE_UP_OPTS) up --no-recreate awx docker-compose-cluster: docker-auth awx/projects CURRENT_UID=$(shell id -u) TAG=$(COMPOSE_TAG) DEV_DOCKER_TAG_BASE=$(DEV_DOCKER_TAG_BASE) docker-compose -f tools/docker-compose-cluster.yml up diff --git a/VERSION b/VERSION index 94188a7483..2bbd2b4b42 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -15.0.0 +15.0.1 diff --git a/awx/api/generics.py b/awx/api/generics.py index fce5bb9b49..ac9ab03907 100644 --- a/awx/api/generics.py +++ b/awx/api/generics.py @@ -47,8 +47,6 @@ from awx.main.utils import ( get_object_or_400, decrypt_field, get_awx_version, - get_licenser, - StubLicense ) from awx.main.utils.db import get_all_field_names from awx.main.views import ApiErrorView @@ -189,7 +187,8 @@ class APIView(views.APIView): ''' Log warning for 400 requests. Add header with elapsed time. ''' - + from awx.main.utils import get_licenser + from awx.main.utils.licensing import OpenLicense # # If the URL was rewritten, and we get a 404, we should entirely # replace the view in the request context with an ApiErrorView() @@ -225,7 +224,8 @@ class APIView(views.APIView): response = super(APIView, self).finalize_response(request, response, *args, **kwargs) time_started = getattr(self, 'time_started', None) response['X-API-Product-Version'] = get_awx_version() - response['X-API-Product-Name'] = 'AWX' if isinstance(get_licenser(), StubLicense) else 'Red Hat Ansible Tower' + response['X-API-Product-Name'] = 'AWX' if isinstance(get_licenser(), OpenLicense) else 'Red Hat Ansible Tower' + response['X-API-Node'] = settings.CLUSTER_HOST_ID if time_started: time_elapsed = time.time() - self.time_started diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 2e70d2c8c9..abd30a7fab 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -453,7 +453,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl if 'capability_map' not in self.context: if hasattr(self, 'polymorphic_base'): model = self.polymorphic_base.Meta.model - prefetch_list = self.polymorphic_base._capabilities_prefetch + prefetch_list = self.polymorphic_base.capabilities_prefetch else: model = self.Meta.model prefetch_list = self.capabilities_prefetch @@ -640,12 +640,9 @@ class EmptySerializer(serializers.Serializer): class UnifiedJobTemplateSerializer(BaseSerializer): - # As a base serializer, the capabilities prefetch is not used directly - _capabilities_prefetch = [ - 'admin', 'execute', - {'copy': ['jobtemplate.project.use', 'jobtemplate.inventory.use', - 'organization.workflow_admin']} - ] + # As a base serializer, the capabilities prefetch is not used directly, + # instead they are derived from the Workflow Job Template Serializer and the Job Template Serializer, respectively. + capabilities_prefetch = [] class Meta: model = UnifiedJobTemplate @@ -695,7 +692,7 @@ class UnifiedJobTemplateSerializer(BaseSerializer): serializer.polymorphic_base = self # capabilities prefetch is only valid for these models if isinstance(obj, (JobTemplate, WorkflowJobTemplate)): - serializer.capabilities_prefetch = self._capabilities_prefetch + serializer.capabilities_prefetch = serializer_class.capabilities_prefetch else: serializer.capabilities_prefetch = None return serializer.to_representation(obj) @@ -1333,6 +1330,8 @@ class ProjectOptionsSerializer(BaseSerializer): scm_type = attrs.get('scm_type', u'') or u'' if self.instance and not scm_type: valid_local_paths.append(self.instance.local_path) + if self.instance and scm_type and "local_path" in attrs and self.instance.local_path != attrs['local_path']: + errors['local_path'] = _(f'Cannot change local_path for {scm_type}-based projects') if scm_type: attrs.pop('local_path', None) if 'local_path' in attrs and attrs['local_path'] not in valid_local_paths: diff --git a/awx/api/templates/api/dashboard_jobs_graph_view.md b/awx/api/templates/api/dashboard_jobs_graph_view.md index 2e510b2a56..baadd4d561 100644 --- a/awx/api/templates/api/dashboard_jobs_graph_view.md +++ b/awx/api/templates/api/dashboard_jobs_graph_view.md @@ -8,7 +8,7 @@ The `period` of the data can be adjusted with: ?period=month -Where `month` can be replaced with `week`, or `day`. `month` is the default. +Where `month` can be replaced with `week`, `two_weeks`, or `day`. `month` is the default. The type of job can be filtered with: diff --git a/awx/api/urls/urls.py b/awx/api/urls/urls.py index 2745f87095..636e68e4bd 100644 --- a/awx/api/urls/urls.py +++ b/awx/api/urls/urls.py @@ -15,6 +15,7 @@ from awx.api.views import ( ApiV2PingView, ApiV2ConfigView, ApiV2SubscriptionView, + ApiV2AttachView, AuthView, UserMeList, DashboardView, @@ -94,6 +95,7 @@ v2_urls = [ url(r'^ping/$', ApiV2PingView.as_view(), name='api_v2_ping_view'), url(r'^config/$', ApiV2ConfigView.as_view(), name='api_v2_config_view'), url(r'^config/subscriptions/$', ApiV2SubscriptionView.as_view(), name='api_v2_subscription_view'), + url(r'^config/attach/$', ApiV2AttachView.as_view(), name='api_v2_attach_view'), url(r'^auth/$', AuthView.as_view()), url(r'^me/$', UserMeList.as_view(), name='user_me_list'), url(r'^dashboard/$', DashboardView.as_view(), name='dashboard_view'), diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py index 4f436c8f0e..87a12a7d51 100644 --- a/awx/api/views/__init__.py +++ b/awx/api/views/__init__.py @@ -153,6 +153,7 @@ from awx.api.views.root import ( # noqa ApiV2PingView, ApiV2ConfigView, ApiV2SubscriptionView, + ApiV2AttachView, ) from awx.api.views.webhooks import ( # noqa WebhookKeyView, @@ -316,6 +317,9 @@ class DashboardJobsGraphView(APIView): if period == 'month': end_date = start_date - dateutil.relativedelta.relativedelta(months=1) interval = 'days' + elif period == 'two_weeks': + end_date = start_date - dateutil.relativedelta.relativedelta(weeks=2) + interval = 'days' elif period == 'week': end_date = start_date - dateutil.relativedelta.relativedelta(weeks=1) interval = 'days' @@ -3043,7 +3047,7 @@ class WorkflowJobTemplateNodeCreateApproval(RetrieveAPIView): approval_template, context=self.get_serializer_context() ).data - return Response(data, status=status.HTTP_200_OK) + return Response(data, status=status.HTTP_201_CREATED) def check_permissions(self, request): obj = self.get_object().workflow_job_template @@ -4253,7 +4257,9 @@ class NotificationTemplateDetail(RetrieveUpdateDestroyAPIView): obj = self.get_object() if not request.user.can_access(self.model, 'delete', obj): return Response(status=status.HTTP_404_NOT_FOUND) - if obj.notifications.filter(status='pending').exists(): + + hours_old = now() - dateutil.relativedelta.relativedelta(hours=8) + if obj.notifications.filter(status='pending', created__gt=hours_old).exists(): return Response({"error": _("Delete not allowed while there are pending notifications")}, status=status.HTTP_405_METHOD_NOT_ALLOWED) return super(NotificationTemplateDetail, self).delete(request, *args, **kwargs) diff --git a/awx/api/views/root.py b/awx/api/views/root.py index aeda19cdeb..0f5e7e6cdd 100644 --- a/awx/api/views/root.py +++ b/awx/api/views/root.py @@ -1,9 +1,10 @@ # Copyright (c) 2018 Ansible, Inc. # All Rights Reserved. +import base64 +import json import logging import operator -import json from collections import OrderedDict from django.conf import settings @@ -29,8 +30,8 @@ from awx.main.utils import ( get_custom_venv_choices, to_python_boolean, ) +from awx.main.utils.licensing import validate_entitlement_manifest from awx.api.versioning import reverse, drf_reverse -from awx.conf.license import get_license from awx.main.constants import PRIVILEGE_ESCALATION_METHODS from awx.main.models import ( Project, @@ -178,7 +179,7 @@ class ApiV2PingView(APIView): class ApiV2SubscriptionView(APIView): permission_classes = (IsAuthenticated,) - name = _('Configuration') + name = _('Subscriptions') swagger_topic = 'System Configuration' def check_permissions(self, request): @@ -189,18 +190,18 @@ class ApiV2SubscriptionView(APIView): def post(self, request): from awx.main.utils.common import get_licenser data = request.data.copy() - if data.get('rh_password') == '$encrypted$': - data['rh_password'] = settings.REDHAT_PASSWORD + if data.get('subscriptions_password') == '$encrypted$': + data['subscriptions_password'] = settings.SUBSCRIPTIONS_PASSWORD try: - user, pw = data.get('rh_username'), data.get('rh_password') + user, pw = data.get('subscriptions_username'), data.get('subscriptions_password') with set_environ(**settings.AWX_TASK_ENV): validated = get_licenser().validate_rh(user, pw) if user: - settings.REDHAT_USERNAME = data['rh_username'] + settings.SUBSCRIPTIONS_USERNAME = data['subscriptions_username'] if pw: - settings.REDHAT_PASSWORD = data['rh_password'] + settings.SUBSCRIPTIONS_PASSWORD = data['subscriptions_password'] except Exception as exc: - msg = _("Invalid License") + msg = _("Invalid Subscription") if ( isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401 @@ -213,13 +214,63 @@ class ApiV2SubscriptionView(APIView): elif isinstance(exc, (ValueError, OSError)) and exc.args: msg = exc.args[0] else: - logger.exception(smart_text(u"Invalid license submitted."), + logger.exception(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST) return Response(validated) +class ApiV2AttachView(APIView): + + permission_classes = (IsAuthenticated,) + name = _('Attach Subscription') + swagger_topic = 'System Configuration' + + def check_permissions(self, request): + super(ApiV2AttachView, self).check_permissions(request) + if not request.user.is_superuser and request.method.lower() not in {'options', 'head'}: + self.permission_denied(request) # Raises PermissionDenied exception. + + def post(self, request): + data = request.data.copy() + pool_id = data.get('pool_id', None) + if not pool_id: + return Response({"error": _("No subscription pool ID provided.")}, status=status.HTTP_400_BAD_REQUEST) + user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None) + pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None) + if pool_id and user and pw: + from awx.main.utils.common import get_licenser + data = request.data.copy() + try: + with set_environ(**settings.AWX_TASK_ENV): + validated = get_licenser().validate_rh(user, pw) + except Exception as exc: + msg = _("Invalid Subscription") + if ( + isinstance(exc, requests.exceptions.HTTPError) and + getattr(getattr(exc, 'response', None), 'status_code', None) == 401 + ): + msg = _("The provided credentials are invalid (HTTP 401).") + elif isinstance(exc, requests.exceptions.ProxyError): + msg = _("Unable to connect to proxy server.") + elif isinstance(exc, requests.exceptions.ConnectionError): + msg = _("Could not connect to subscription service.") + elif isinstance(exc, (ValueError, OSError)) and exc.args: + msg = exc.args[0] + else: + logger.exception(smart_text(u"Invalid subscription submitted."), + extra=dict(actor=request.user.username)) + return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST) + for sub in validated: + if sub['pool_id'] == pool_id: + sub['valid_key'] = True + settings.LICENSE = sub + return Response(sub) + + return Response({"error": _("Error processing subscription metadata.")}, status=status.HTTP_400_BAD_REQUEST) + + class ApiV2ConfigView(APIView): permission_classes = (IsAuthenticated,) @@ -234,15 +285,11 @@ class ApiV2ConfigView(APIView): def get(self, request, format=None): '''Return various sitewide configuration settings''' - if request.user.is_superuser or request.user.is_system_auditor: - license_data = get_license(show_key=True) - else: - license_data = get_license(show_key=False) + from awx.main.utils.common import get_licenser + license_data = get_licenser().validate() + if not license_data.get('valid_key', False): license_data = {} - if license_data and 'features' in license_data and 'activity_streams' in license_data['features']: - # FIXME: Make the final setting value dependent on the feature? - license_data['features']['activity_streams'] &= settings.ACTIVITY_STREAM_ENABLED pendo_state = settings.PENDO_TRACKING_STATE if settings.PENDO_TRACKING_STATE in ('off', 'anonymous', 'detailed') else 'off' @@ -281,9 +328,10 @@ class ApiV2ConfigView(APIView): return Response(data) + def post(self, request): if not isinstance(request.data, dict): - return Response({"error": _("Invalid license data")}, status=status.HTTP_400_BAD_REQUEST) + return Response({"error": _("Invalid subscription data")}, status=status.HTTP_400_BAD_REQUEST) if "eula_accepted" not in request.data: return Response({"error": _("Missing 'eula_accepted' property")}, status=status.HTTP_400_BAD_REQUEST) try: @@ -300,25 +348,47 @@ class ApiV2ConfigView(APIView): logger.info(smart_text(u"Invalid JSON submitted for license."), extra=dict(actor=request.user.username)) return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST) - try: - from awx.main.utils.common import get_licenser - license_data = json.loads(data_actual) - license_data_validated = get_licenser(**license_data).validate() - except Exception: - logger.warning(smart_text(u"Invalid license submitted."), - extra=dict(actor=request.user.username)) - return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST) + + from awx.main.utils.common import get_licenser + license_data = json.loads(data_actual) + if 'license_key' in license_data: + return Response({"error": _('Legacy license submitted. A subscription manifest is now required.')}, status=status.HTTP_400_BAD_REQUEST) + if 'manifest' in license_data: + try: + json_actual = json.loads(base64.b64decode(license_data['manifest'])) + if 'license_key' in json_actual: + return Response( + {"error": _('Legacy license submitted. A subscription manifest is now required.')}, + status=status.HTTP_400_BAD_REQUEST + ) + except Exception: + pass + try: + license_data = validate_entitlement_manifest(license_data['manifest']) + except ValueError as e: + return Response({"error": str(e)}, status=status.HTTP_400_BAD_REQUEST) + except Exception: + logger.exception('Invalid manifest submitted. {}') + return Response({"error": _('Invalid manifest submitted.')}, status=status.HTTP_400_BAD_REQUEST) + + try: + license_data_validated = get_licenser().license_from_manifest(license_data) + except Exception: + logger.warning(smart_text(u"Invalid subscription submitted."), + extra=dict(actor=request.user.username)) + return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST) + else: + license_data_validated = get_licenser().validate() # If the license is valid, write it to the database. if license_data_validated['valid_key']: - settings.LICENSE = license_data if not settings_registry.is_setting_read_only('TOWER_URL_BASE'): settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host()) return Response(license_data_validated) - logger.warning(smart_text(u"Invalid license submitted."), + logger.warning(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) - return Response({"error": _("Invalid license")}, status=status.HTTP_400_BAD_REQUEST) + return Response({"error": _("Invalid subscription")}, status=status.HTTP_400_BAD_REQUEST) def delete(self, request): try: diff --git a/awx/asgi.py b/awx/asgi.py index 698c5f7533..eb141aabdb 100644 --- a/awx/asgi.py +++ b/awx/asgi.py @@ -25,10 +25,12 @@ if MODE == 'production': try: fd = open("/var/lib/awx/.tower_version", "r") if fd.read().strip() != tower_version: - raise Exception() - except Exception: + raise ValueError() + except FileNotFoundError: + pass + except ValueError as e: logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") - raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") + raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") from e os.environ.setdefault("DJANGO_SETTINGS_MODULE", "awx.settings") diff --git a/awx/conf/license.py b/awx/conf/license.py index 6ad1042f9a..3929c37921 100644 --- a/awx/conf/license.py +++ b/awx/conf/license.py @@ -1,18 +1,14 @@ # Copyright (c) 2016 Ansible, Inc. # All Rights Reserved. - __all__ = ['get_license'] def _get_validated_license_data(): - from awx.main.utils.common import get_licenser + from awx.main.utils import get_licenser return get_licenser().validate() -def get_license(show_key=False): +def get_license(): """Return a dictionary representing the active license on this Tower instance.""" - license_data = _get_validated_license_data() - if not show_key: - license_data.pop('license_key', None) - return license_data + return _get_validated_license_data() diff --git a/awx/conf/migrations/0008_subscriptions.py b/awx/conf/migrations/0008_subscriptions.py new file mode 100644 index 0000000000..dacd066b4d --- /dev/null +++ b/awx/conf/migrations/0008_subscriptions.py @@ -0,0 +1,26 @@ +# Generated by Django 2.2.11 on 2020-08-04 15:19 + +import logging + +from django.db import migrations + +from awx.conf.migrations._subscriptions import clear_old_license, prefill_rh_credentials + +logger = logging.getLogger('awx.conf.migrations') + + +def _noop(apps, schema_editor): + pass + + +class Migration(migrations.Migration): + + dependencies = [ + ('conf', '0007_v380_rename_more_settings'), + ] + + + operations = [ + migrations.RunPython(clear_old_license, _noop), + migrations.RunPython(prefill_rh_credentials, _noop) + ] diff --git a/awx/conf/migrations/_subscriptions.py b/awx/conf/migrations/_subscriptions.py new file mode 100644 index 0000000000..2b979fb68e --- /dev/null +++ b/awx/conf/migrations/_subscriptions.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +import logging +from django.utils.timezone import now +from awx.main.utils.encryption import decrypt_field, encrypt_field + +logger = logging.getLogger('awx.conf.settings') + +__all__ = ['clear_old_license', 'prefill_rh_credentials'] + + +def clear_old_license(apps, schema_editor): + Setting = apps.get_model('conf', 'Setting') + Setting.objects.filter(key='LICENSE').delete() + + +def _migrate_setting(apps, old_key, new_key, encrypted=False): + Setting = apps.get_model('conf', 'Setting') + if not Setting.objects.filter(key=old_key).exists(): + return + new_setting = Setting.objects.create(key=new_key, + created=now(), + modified=now() + ) + if encrypted: + new_setting.value = decrypt_field(Setting.objects.filter(key=old_key).first(), 'value') + new_setting.value = encrypt_field(new_setting, 'value') + else: + new_setting.value = getattr(Setting.objects.filter(key=old_key).first(), 'value') + new_setting.save() + + +def prefill_rh_credentials(apps, schema_editor): + _migrate_setting(apps, 'REDHAT_USERNAME', 'SUBSCRIPTIONS_USERNAME', encrypted=False) + _migrate_setting(apps, 'REDHAT_PASSWORD', 'SUBSCRIPTIONS_PASSWORD', encrypted=True) diff --git a/awx/conf/models.py b/awx/conf/models.py index 2859650f54..fe28fd89a8 100644 --- a/awx/conf/models.py +++ b/awx/conf/models.py @@ -78,14 +78,6 @@ class Setting(CreatedModifiedModel): def get_cache_id_key(self, key): return '{}_ID'.format(key) - def display_value(self): - if self.key == 'LICENSE' and 'license_key' in self.value: - # don't log the license key in activity stream - value = self.value.copy() - value['license_key'] = '********' - return value - return self.value - import awx.conf.signals # noqa diff --git a/awx/main/analytics/collectors.py b/awx/main/analytics/collectors.py index e1dc468d51..bcbc7b0118 100644 --- a/awx/main/analytics/collectors.py +++ b/awx/main/analytics/collectors.py @@ -33,9 +33,9 @@ data _since_ the last report date - i.e., new data in the last 24 hours) ''' -@register('config', '1.1', description=_('General platform configuration.')) +@register('config', '1.2', description=_('General platform configuration.')) def config(since, **kwargs): - license_info = get_license(show_key=False) + license_info = get_license() install_type = 'traditional' if os.environ.get('container') == 'oci': install_type = 'openshift' diff --git a/awx/main/analytics/core.py b/awx/main/analytics/core.py index fe48fb30bf..2c77444929 100644 --- a/awx/main/analytics/core.py +++ b/awx/main/analytics/core.py @@ -24,7 +24,7 @@ logger = logging.getLogger('awx.main.analytics') def _valid_license(): try: - if get_license(show_key=False).get('license_type', 'UNLICENSED') == 'open': + if get_license().get('license_type', 'UNLICENSED') == 'open': return False access_registry[Job](None).check_license() except PermissionDenied: diff --git a/awx/main/analytics/metrics.py b/awx/main/analytics/metrics.py index 1dd85eb6a7..676ccdabf5 100644 --- a/awx/main/analytics/metrics.py +++ b/awx/main/analytics/metrics.py @@ -54,7 +54,7 @@ LICENSE_INSTANCE_FREE = Gauge('awx_license_instance_free', 'Number of remaining def metrics(): - license_info = get_license(show_key=False) + license_info = get_license() SYSTEM_INFO.info({ 'install_uuid': settings.INSTALL_UUID, 'insights_analytics': str(settings.INSIGHTS_TRACKING_STATE), diff --git a/awx/main/conf.py b/awx/main/conf.py index 3b41c3a19b..6bf86db214 100644 --- a/awx/main/conf.py +++ b/awx/main/conf.py @@ -1,7 +1,5 @@ # Python -import json import logging -import os # Django from django.utils.translation import ugettext_lazy as _ @@ -13,6 +11,7 @@ from rest_framework.fields import FloatField # Tower from awx.conf import fields, register, register_validate + logger = logging.getLogger('awx.main.conf') register( @@ -92,22 +91,10 @@ register( ) -def _load_default_license_from_file(): - try: - license_file = os.environ.get('AWX_LICENSE_FILE', '/etc/tower/license') - if os.path.exists(license_file): - license_data = json.load(open(license_file)) - logger.debug('Read license data from "%s".', license_file) - return license_data - except Exception: - logger.warning('Could not read license from "%s".', license_file, exc_info=True) - return {} - - register( 'LICENSE', field_class=fields.DictField, - default=_load_default_license_from_file, + default=lambda: {}, label=_('License'), help_text=_('The license controls which features and functionality are ' 'enabled. Use /api/v2/config/ to update or change ' @@ -124,7 +111,7 @@ register( encrypted=False, read_only=False, label=_('Red Hat customer username'), - help_text=_('This username is used to retrieve license information and to send Automation Analytics'), # noqa + help_text=_('This username is used to send data to Automation Analytics'), category=_('System'), category_slug='system', ) @@ -137,7 +124,33 @@ register( encrypted=True, read_only=False, label=_('Red Hat customer password'), - help_text=_('This password is used to retrieve license information and to send Automation Analytics'), # noqa + help_text=_('This password is used to send data to Automation Analytics'), + category=_('System'), + category_slug='system', +) + +register( + 'SUBSCRIPTIONS_USERNAME', + field_class=fields.CharField, + default='', + allow_blank=True, + encrypted=False, + read_only=False, + label=_('Red Hat or Satellite username'), + help_text=_('This username is used to retrieve subscription and content information'), # noqa + category=_('System'), + category_slug='system', +) + +register( + 'SUBSCRIPTIONS_PASSWORD', + field_class=fields.CharField, + default='', + allow_blank=True, + encrypted=True, + read_only=False, + label=_('Red Hat or Satellite password'), + help_text=_('This password is used to retrieve subscription and content information'), # noqa category=_('System'), category_slug='system', ) diff --git a/awx/main/dispatch/worker/callback.py b/awx/main/dispatch/worker/callback.py index 79033e329a..fd96a4f04e 100644 --- a/awx/main/dispatch/worker/callback.py +++ b/awx/main/dispatch/worker/callback.py @@ -1,10 +1,7 @@ -import cProfile import json import logging import os -import pstats import signal -import tempfile import time import traceback @@ -23,6 +20,7 @@ from awx.main.models import (JobEvent, AdHocCommandEvent, ProjectUpdateEvent, Job) from awx.main.tasks import handle_success_and_failure_notifications from awx.main.models.events import emit_event_detail +from awx.main.utils.profiling import AWXProfiler from .base import BaseWorker @@ -48,6 +46,7 @@ class CallbackBrokerWorker(BaseWorker): self.buff = {} self.pid = os.getpid() self.redis = redis.Redis.from_url(settings.BROKER_URL) + self.prof = AWXProfiler("CallbackBrokerWorker") for key in self.redis.keys('awx_callback_receiver_statistics_*'): self.redis.delete(key) @@ -87,19 +86,12 @@ class CallbackBrokerWorker(BaseWorker): ) def toggle_profiling(self, *args): - if self.prof: - self.prof.disable() - filename = f'callback-{self.pid}.pstats' - filepath = os.path.join(tempfile.gettempdir(), filename) - with open(filepath, 'w') as f: - pstats.Stats(self.prof, stream=f).sort_stats('cumulative').print_stats() - pstats.Stats(self.prof).dump_stats(filepath + '.raw') - self.prof = False - logger.error(f'profiling is disabled, wrote {filepath}') - else: - self.prof = cProfile.Profile() - self.prof.enable() + if not self.prof.is_started(): + self.prof.start() logger.error('profiling is enabled') + else: + filepath = self.prof.stop() + logger.error(f'profiling is disabled, wrote {filepath}') def work_loop(self, *args, **kw): if settings.AWX_CALLBACK_PROFILE: diff --git a/awx/main/management/commands/check_license.py b/awx/main/management/commands/check_license.py index 8c0798cc53..356ab42249 100644 --- a/awx/main/management/commands/check_license.py +++ b/awx/main/management/commands/check_license.py @@ -18,7 +18,5 @@ class Command(BaseCommand): super(Command, self).__init__() license = get_licenser().validate() if options.get('data'): - if license.get('license_key', '') != 'UNLICENSED': - license['license_key'] = '********' return json.dumps(license) return license.get('license_type', 'none') diff --git a/awx/main/management/commands/check_migrations.py b/awx/main/management/commands/check_migrations.py index 50ea354960..6f9cfc7727 100644 --- a/awx/main/management/commands/check_migrations.py +++ b/awx/main/management/commands/check_migrations.py @@ -8,5 +8,7 @@ class Command(MakeMigrations): def execute(self, *args, **options): settings = connections['default'].settings_dict.copy() settings['ENGINE'] = 'sqlite3' + if 'application_name' in settings['OPTIONS']: + del settings['OPTIONS']['application_name'] connections['default'] = DatabaseWrapper(settings) return MakeMigrations().execute(*args, **options) diff --git a/awx/main/management/commands/graph_jobs.py b/awx/main/management/commands/graph_jobs.py new file mode 100644 index 0000000000..f1c8ad75e1 --- /dev/null +++ b/awx/main/management/commands/graph_jobs.py @@ -0,0 +1,117 @@ +# Python +import asciichartpy as chart +import collections +import time +import sys + +# Django +from django.db.models import Count +from django.core.management.base import BaseCommand + +# AWX +from awx.main.models import ( + Job, + Instance +) + + +DEFAULT_WIDTH = 100 +DEFAULT_HEIGHT = 30 + + +def chart_color_lookup(color_str): + return getattr(chart, color_str) + + +def clear_screen(): + print(chr(27) + "[2J") + + +class JobStatus(): + def __init__(self, status, color, width): + self.status = status + self.color = color + self.color_code = chart_color_lookup(color) + self.x = collections.deque(maxlen=width) + self.y = collections.deque(maxlen=width) + + def tick(self, x, y): + self.x.append(x) + self.y.append(y) + + +class JobStatusController: + RESET = chart_color_lookup('reset') + + def __init__(self, width): + self.plots = [ + JobStatus('pending', 'red', width), + JobStatus('waiting', 'blue', width), + JobStatus('running', 'green', width) + ] + self.ts_start = int(time.time()) + + def tick(self): + ts = int(time.time()) - self.ts_start + q = Job.objects.filter(status__in=['pending','waiting','running']).values_list('status').order_by().annotate(Count('status')) + status_count = dict(pending=0, waiting=0, running=0) + for status, count in q: + status_count[status] = count + + for p in self.plots: + p.tick(ts, status_count[p.status]) + + def series(self): + return [list(p.y) for p in self.plots] + + def generate_status(self): + line = "" + lines = [] + for p in self.plots: + lines.append(f'{p.color_code}{p.status} {p.y[-1]}{self.RESET}') + + line += ", ".join(lines) + '\n' + + width = 5 + time_running = int(time.time()) - self.ts_start + instances = Instance.objects.all().order_by('hostname') + line += "Capacity: " + ", ".join([f"{instance.capacity:{width}}" for instance in instances]) + '\n' + line += "Remaining: " + ", ".join([f"{instance.remaining_capacity:{width}}" for instance in instances]) + '\n' + line += f"Seconds running: {time_running}" + '\n' + + return line + + +class Command(BaseCommand): + help = "Plot pending, waiting, running jobs over time on the terminal" + + def add_arguments(self, parser): + parser.add_argument('--refresh', dest='refresh', type=float, default=1.0, + help='Time between refreshes of the graph and data in seconds (defaults to 1.0)') + parser.add_argument('--width', dest='width', type=int, default=DEFAULT_WIDTH, + help=f'Width of the graph (defaults to {DEFAULT_WIDTH})') + parser.add_argument('--height', dest='height', type=int, default=DEFAULT_HEIGHT, + help=f'Height of the graph (defaults to {DEFAULT_HEIGHT})') + + def handle(self, *args, **options): + refresh_seconds = options['refresh'] + width = options['width'] + height = options['height'] + + jctl = JobStatusController(width) + + conf = { + 'colors': [chart_color_lookup(p.color) for p in jctl.plots], + 'height': height, + } + + while True: + jctl.tick() + + draw = chart.plot(jctl.series(), conf) + status_line = jctl.generate_status() + clear_screen() + print(draw) + sys.stdout.write(status_line) + time.sleep(refresh_seconds) + diff --git a/awx/main/management/commands/inventory_import.py b/awx/main/management/commands/inventory_import.py index f4431b2705..c92215560e 100644 --- a/awx/main/management/commands/inventory_import.py +++ b/awx/main/management/commands/inventory_import.py @@ -903,7 +903,7 @@ class Command(BaseCommand): def check_license(self): license_info = get_licenser().validate() local_license_type = license_info.get('license_type', 'UNLICENSED') - if license_info.get('license_key', 'UNLICENSED') == 'UNLICENSED': + if local_license_type == 'UNLICENSED': logger.error(LICENSE_NON_EXISTANT_MESSAGE) raise CommandError('No license found!') elif local_license_type == 'open': diff --git a/awx/main/management/commands/profile_sql.py b/awx/main/management/commands/profile_sql.py index 5bbc4c80ca..585fb3d706 100644 --- a/awx/main/management/commands/profile_sql.py +++ b/awx/main/management/commands/profile_sql.py @@ -19,7 +19,9 @@ class Command(BaseCommand): profile_sql.delay( threshold=options['threshold'], minutes=options['minutes'] ) - print(f"Logging initiated with a threshold of {options['threshold']} second(s) and a duration of" - f" {options['minutes']} minute(s), any queries that meet criteria can" - f" be found in /var/log/tower/profile/." - ) + if options['threshold'] > 0: + print(f"SQL profiling initiated with a threshold of {options['threshold']} second(s) and a" + f" duration of {options['minutes']} minute(s), any queries that meet criteria can" + f" be found in /var/log/tower/profile/.") + else: + print("SQL profiling disabled.") diff --git a/awx/main/middleware.py b/awx/main/middleware.py index 781266e8dd..759c2daa98 100644 --- a/awx/main/middleware.py +++ b/awx/main/middleware.py @@ -1,13 +1,9 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. -import uuid import logging import threading import time -import cProfile -import pstats -import os import urllib.parse from django.conf import settings @@ -22,6 +18,7 @@ from django.urls import reverse, resolve from awx.main.utils.named_url_graph import generate_graph, GraphNode from awx.conf import fields, register +from awx.main.utils.profiling import AWXProfiler logger = logging.getLogger('awx.main.middleware') @@ -32,11 +29,14 @@ class TimingMiddleware(threading.local, MiddlewareMixin): dest = '/var/log/tower/profile' + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.prof = AWXProfiler("TimingMiddleware") + def process_request(self, request): self.start_time = time.time() if settings.AWX_REQUEST_PROFILE: - self.prof = cProfile.Profile() - self.prof.enable() + self.prof.start() def process_response(self, request, response): if not hasattr(self, 'start_time'): # some tools may not invoke process_request @@ -44,33 +44,10 @@ class TimingMiddleware(threading.local, MiddlewareMixin): total_time = time.time() - self.start_time response['X-API-Total-Time'] = '%0.3fs' % total_time if settings.AWX_REQUEST_PROFILE: - self.prof.disable() - cprofile_file = self.save_profile_file(request) - response['cprofile_file'] = cprofile_file + response['X-API-Profile-File'] = self.prof.stop() perf_logger.info('api response times', extra=dict(python_objects=dict(request=request, response=response))) return response - def save_profile_file(self, request): - if not os.path.isdir(self.dest): - os.makedirs(self.dest) - filename = '%.3fs-%s.pstats' % (pstats.Stats(self.prof).total_tt, uuid.uuid4()) - filepath = os.path.join(self.dest, filename) - with open(filepath, 'w') as f: - f.write('%s %s\n' % (request.method, request.get_full_path())) - pstats.Stats(self.prof, stream=f).sort_stats('cumulative').print_stats() - - if settings.AWX_REQUEST_PROFILE_WITH_DOT: - from gprof2dot import main as generate_dot - raw = os.path.join(self.dest, filename) + '.raw' - pstats.Stats(self.prof).dump_stats(raw) - generate_dot([ - '-n', '2.5', '-f', 'pstats', '-o', - os.path.join( self.dest, filename).replace('.pstats', '.dot'), - raw - ]) - os.remove(raw) - return filepath - class SessionTimeoutMiddleware(MiddlewareMixin): """ diff --git a/awx/main/migrations/0117_v400_remove_cloudforms_inventory.py b/awx/main/migrations/0117_v400_remove_cloudforms_inventory.py index 8c33318755..9a94c6b02b 100644 --- a/awx/main/migrations/0117_v400_remove_cloudforms_inventory.py +++ b/awx/main/migrations/0117_v400_remove_cloudforms_inventory.py @@ -1,11 +1,7 @@ # Generated by Django 2.2.11 on 2020-05-01 13:25 from django.db import migrations, models -from awx.main.migrations._inventory_source import create_scm_script_substitute - - -def convert_cloudforms_to_scm(apps, schema_editor): - create_scm_script_substitute(apps, 'cloudforms') +from awx.main.migrations._inventory_source import delete_cloudforms_inv_source class Migration(migrations.Migration): @@ -15,7 +11,7 @@ class Migration(migrations.Migration): ] operations = [ - migrations.RunPython(convert_cloudforms_to_scm), + migrations.RunPython(delete_cloudforms_inv_source), migrations.AlterField( model_name='inventorysource', name='source', diff --git a/awx/main/migrations/_inventory_source.py b/awx/main/migrations/_inventory_source.py index ed79606587..c53a18f035 100644 --- a/awx/main/migrations/_inventory_source.py +++ b/awx/main/migrations/_inventory_source.py @@ -5,6 +5,7 @@ from uuid import uuid4 from django.utils.encoding import smart_text from django.utils.timezone import now +from awx.main.utils.common import set_current_apps from awx.main.utils.common import parse_yaml_or_json logger = logging.getLogger('awx.main.migrations') @@ -91,43 +92,14 @@ def back_out_new_instance_id(apps, source, new_id): )) -def create_scm_script_substitute(apps, source): - """Only applies for cloudforms in practice, but written generally. - Given a source type, this will replace all inventory sources of that type - with SCM inventory sources that source the script from Ansible core - """ - # the revision in the Ansible 2.9 stable branch this project will start out as - # it can still be updated manually later (but staying within 2.9 branch), if desired - ansible_rev = '6f83b9aff42331e15c55a171de0a8b001208c18c' +def delete_cloudforms_inv_source(apps, schema_editor): + set_current_apps(apps) InventorySource = apps.get_model('main', 'InventorySource') - ContentType = apps.get_model('contenttypes', 'ContentType') - Project = apps.get_model('main', 'Project') - if not InventorySource.objects.filter(source=source).exists(): - logger.debug('No sources of type {} to migrate'.format(source)) - return - proj_name = 'Replacement project for {} type sources - {}'.format(source, uuid4()) - right_now = now() - project = Project.objects.create( - name=proj_name, - created=right_now, - modified=right_now, - description='Created by migration', - polymorphic_ctype=ContentType.objects.get(model='project'), - # project-specific fields - scm_type='git', - scm_url='https://github.com/ansible/ansible.git', - scm_branch='stable-2.9', - scm_revision=ansible_rev - ) - ct = 0 - for inv_src in InventorySource.objects.filter(source=source).iterator(): - inv_src.source = 'scm' - inv_src.source_project = project - inv_src.source_path = 'contrib/inventory/{}.py'.format(source) - inv_src.scm_last_revision = ansible_rev - inv_src.save(update_fields=['source', 'source_project', 'source_path', 'scm_last_revision']) - logger.debug('Changed inventory source {} to scm type'.format(inv_src.pk)) - ct += 1 + InventoryUpdate = apps.get_model('main', 'InventoryUpdate') + CredentialType = apps.get_model('main', 'CredentialType') + InventoryUpdate.objects.filter(inventory_source__source='cloudforms').delete() + InventorySource.objects.filter(source='cloudforms').delete() + ct = CredentialType.objects.filter(namespace='cloudforms').first() if ct: - logger.info('Changed total of {} inventory sources from {} type to scm'.format(ct, source)) - + ct.credentials.all().delete() + ct.delete() diff --git a/awx/main/models/credential/__init__.py b/awx/main/models/credential/__init__.py index df12177aae..66db962430 100644 --- a/awx/main/models/credential/__init__.py +++ b/awx/main/models/credential/__init__.py @@ -881,33 +881,6 @@ ManagedCredentialType( } ) -ManagedCredentialType( - namespace='cloudforms', - kind='cloud', - name=ugettext_noop('Red Hat CloudForms'), - managed_by_tower=True, - inputs={ - 'fields': [{ - 'id': 'host', - 'label': ugettext_noop('CloudForms URL'), - 'type': 'string', - 'help_text': ugettext_noop('Enter the URL for the virtual machine that ' - 'corresponds to your CloudForms instance. ' - 'For example, https://cloudforms.example.org') - }, { - 'id': 'username', - 'label': ugettext_noop('Username'), - 'type': 'string' - }, { - 'id': 'password', - 'label': ugettext_noop('Password'), - 'type': 'string', - 'secret': True, - }], - 'required': ['host', 'username', 'password'], - } -) - ManagedCredentialType( namespace='gce', kind='cloud', diff --git a/awx/main/models/ha.py b/awx/main/models/ha.py index fc4e9c022e..5071786653 100644 --- a/awx/main/models/ha.py +++ b/awx/main/models/ha.py @@ -261,18 +261,20 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin): app_label = 'main' - def fit_task_to_most_remaining_capacity_instance(self, task): + @staticmethod + def fit_task_to_most_remaining_capacity_instance(task, instances): instance_most_capacity = None - for i in self.instances.filter(capacity__gt=0, enabled=True).order_by('hostname'): + for i in instances: if i.remaining_capacity >= task.task_impact and \ (instance_most_capacity is None or i.remaining_capacity > instance_most_capacity.remaining_capacity): instance_most_capacity = i return instance_most_capacity - def find_largest_idle_instance(self): + @staticmethod + def find_largest_idle_instance(instances): largest_instance = None - for i in self.instances.filter(capacity__gt=0, enabled=True).order_by('hostname'): + for i in instances: if i.jobs_running == 0: if largest_instance is None: largest_instance = i diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 8f42b9d577..2c274e18f6 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -798,6 +798,10 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana if self.project: for name in ('awx', 'tower'): r['{}_project_revision'.format(name)] = self.project.scm_revision + r['{}_project_scm_branch'.format(name)] = self.project.scm_branch + if self.scm_branch: + for name in ('awx', 'tower'): + r['{}_job_scm_branch'.format(name)] = self.scm_branch if self.job_template: for name in ('awx', 'tower'): r['{}_job_template_id'.format(name)] = self.job_template.pk diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index 1abbb29fcb..c50c8668d5 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -873,7 +873,13 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique # If status changed, update the parent instance. if self.status != status_before: - self._update_parent_instance() + # Update parent outside of the transaction for Job w/ allow_simultaneous=True + # This dodges lock contention at the expense of the foreign key not being + # completely correct. + if getattr(self, 'allow_simultaneous', False): + connection.on_commit(self._update_parent_instance) + else: + self._update_parent_instance() # Done. return result diff --git a/awx/main/notifications/webhook_backend.py b/awx/main/notifications/webhook_backend.py index a33cf026f8..d67fc11a35 100644 --- a/awx/main/notifications/webhook_backend.py +++ b/awx/main/notifications/webhook_backend.py @@ -57,6 +57,7 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase): def send_messages(self, messages): sent_messages = 0 + self.headers['Content-Type'] = 'application/json' if 'User-Agent' not in self.headers: self.headers['User-Agent'] = "Tower {}".format(get_awx_version()) if self.http_method.lower() not in ['put','post']: @@ -68,7 +69,7 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase): auth = (self.username, self.password) r = chosen_method("{}".format(m.recipients()[0]), auth=auth, - json=m.body, + data=json.dumps(m.body, ensure_ascii=False).encode('utf-8'), headers=self.headers, verify=(not self.disable_ssl_verification)) if r.status_code >= 400: diff --git a/awx/main/scheduler/kubernetes.py b/awx/main/scheduler/kubernetes.py index 862cfd3f04..529a5e5442 100644 --- a/awx/main/scheduler/kubernetes.py +++ b/awx/main/scheduler/kubernetes.py @@ -12,6 +12,24 @@ from awx.main.utils.common import parse_yaml_or_json logger = logging.getLogger('awx.main.scheduler') +def deepmerge(a, b): + """ + Merge dict structures and return the result. + + >>> a = {'first': {'all_rows': {'pass': 'dog', 'number': '1'}}} + >>> b = {'first': {'all_rows': {'fail': 'cat', 'number': '5'}}} + >>> import pprint; pprint.pprint(deepmerge(a, b)) + {'first': {'all_rows': {'fail': 'cat', 'number': '5', 'pass': 'dog'}}} + """ + if isinstance(a, dict) and isinstance(b, dict): + return dict([(k, deepmerge(a.get(k), b.get(k))) + for k in set(a.keys()).union(b.keys())]) + elif b is None: + return a + else: + return b + + class PodManager(object): def __init__(self, task=None): @@ -128,11 +146,13 @@ class PodManager(object): pod_spec = {**default_pod_spec, **pod_spec_override} if self.task: - pod_spec['metadata']['name'] = self.pod_name - pod_spec['metadata']['labels'] = { - 'ansible-awx': settings.INSTALL_UUID, - 'ansible-awx-job-id': str(self.task.id) - } + pod_spec['metadata'] = deepmerge( + pod_spec.get('metadata', {}), + dict(name=self.pod_name, + labels={ + 'ansible-awx': settings.INSTALL_UUID, + 'ansible-awx-job-id': str(self.task.id) + })) pod_spec['spec']['containers'][0]['name'] = self.pod_name return pod_spec diff --git a/awx/main/scheduler/task_manager.py b/awx/main/scheduler/task_manager.py index 9f4818bd37..43d43fe64d 100644 --- a/awx/main/scheduler/task_manager.py +++ b/awx/main/scheduler/task_manager.py @@ -7,12 +7,14 @@ import logging import uuid import json import random +from types import SimpleNamespace # Django from django.db import transaction, connection from django.utils.translation import ugettext_lazy as _, gettext_noop from django.utils.timezone import now as tz_now from django.conf import settings +from django.db.models import Q # AWX from awx.main.dispatch.reaper import reap_job @@ -45,6 +47,15 @@ logger = logging.getLogger('awx.main.scheduler') class TaskManager(): def __init__(self): + ''' + Do NOT put database queries or other potentially expensive operations + in the task manager init. The task manager object is created every time a + job is created, transitions state, and every 30 seconds on each tower node. + More often then not, the object is destroyed quickly because the NOOP case is hit. + + The NOOP case is short-circuit logic. If the task manager realizes that another instance + of the task manager is already running, then it short-circuits and decides not to run. + ''' self.graph = dict() # start task limit indicates how many pending jobs can be started on this # .schedule() run. Starting jobs is expensive, and there is code in place to reap @@ -52,10 +63,30 @@ class TaskManager(): # 5 minutes to start pending jobs. If this limit is reached, pending jobs # will no longer be started and will be started on the next task manager cycle. self.start_task_limit = settings.START_TASK_LIMIT + + def after_lock_init(self): + ''' + Init AFTER we know this instance of the task manager will run because the lock is acquired. + ''' + instances = Instance.objects.filter(~Q(hostname=None), capacity__gt=0, enabled=True) + self.real_instances = {i.hostname: i for i in instances} + + instances_partial = [SimpleNamespace(obj=instance, + remaining_capacity=instance.remaining_capacity, + capacity=instance.capacity, + jobs_running=instance.jobs_running, + hostname=instance.hostname) for instance in instances] + + instances_by_hostname = {i.hostname: i for i in instances_partial} + for rampart_group in InstanceGroup.objects.prefetch_related('instances'): self.graph[rampart_group.name] = dict(graph=DependencyGraph(rampart_group.name), capacity_total=rampart_group.capacity, - consumed_capacity=0) + consumed_capacity=0, + instances=[]) + for instance in rampart_group.instances.filter(capacity__gt=0, enabled=True).order_by('hostname'): + if instance.hostname in instances_by_hostname: + self.graph[rampart_group.name]['instances'].append(instances_by_hostname[instance.hostname]) def is_job_blocked(self, task): # TODO: I'm not happy with this, I think blocking behavior should be decided outside of the dependency graph @@ -254,7 +285,7 @@ class TaskManager(): for group in InstanceGroup.objects.all(): if group.is_containerized or group.controller_id: continue - match = group.fit_task_to_most_remaining_capacity_instance(task) + match = group.fit_task_to_most_remaining_capacity_instance(task, group.instances.all()) if match: break task.instance_group = rampart_group @@ -466,7 +497,6 @@ class TaskManager(): continue preferred_instance_groups = task.preferred_instance_groups found_acceptable_queue = False - idle_instance_that_fits = None if isinstance(task, WorkflowJob): if task.unified_job_template_id in running_workflow_templates: if not task.allow_simultaneous: @@ -483,24 +513,24 @@ class TaskManager(): found_acceptable_queue = True break - if idle_instance_that_fits is None: - idle_instance_that_fits = rampart_group.find_largest_idle_instance() remaining_capacity = self.get_remaining_capacity(rampart_group.name) if not rampart_group.is_containerized and self.get_remaining_capacity(rampart_group.name) <= 0: logger.debug("Skipping group {}, remaining_capacity {} <= 0".format( rampart_group.name, remaining_capacity)) continue - execution_instance = rampart_group.fit_task_to_most_remaining_capacity_instance(task) - if execution_instance: - logger.debug("Starting {} in group {} instance {} (remaining_capacity={})".format( - task.log_format, rampart_group.name, execution_instance.hostname, remaining_capacity)) - elif not execution_instance and idle_instance_that_fits: + execution_instance = InstanceGroup.fit_task_to_most_remaining_capacity_instance(task, self.graph[rampart_group.name]['instances']) or \ + InstanceGroup.find_largest_idle_instance(self.graph[rampart_group.name]['instances']) + + if execution_instance or rampart_group.is_containerized: if not rampart_group.is_containerized: - execution_instance = idle_instance_that_fits + execution_instance.remaining_capacity = max(0, execution_instance.remaining_capacity - task.task_impact) + execution_instance.jobs_running += 1 logger.debug("Starting {} in group {} instance {} (remaining_capacity={})".format( task.log_format, rampart_group.name, execution_instance.hostname, remaining_capacity)) - if execution_instance or rampart_group.is_containerized: + + if execution_instance: + execution_instance = self.real_instances[execution_instance.hostname] self.graph[rampart_group.name]['graph'].add_job(task) self.start_task(task, rampart_group, task.get_jobs_fail_chain(), execution_instance) found_acceptable_queue = True @@ -572,6 +602,9 @@ class TaskManager(): def _schedule(self): finished_wfjs = [] all_sorted_tasks = self.get_tasks() + + self.after_lock_init() + if len(all_sorted_tasks) > 0: # TODO: Deal with # latest_project_updates = self.get_latest_project_update_tasks(all_sorted_tasks) diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 0e03055055..2a1c85e23d 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -313,7 +313,7 @@ def delete_project_files(project_path): @task(queue='tower_broadcast_all') def profile_sql(threshold=1, minutes=1): - if threshold == 0: + if threshold <= 0: cache.delete('awx-profile-sql-threshold') logger.error('SQL PROFILING DISABLED') else: @@ -2160,7 +2160,7 @@ class RunProjectUpdate(BaseTask): 'local_path': os.path.basename(project_update.project.local_path), 'project_path': project_update.get_project_path(check_if_exists=False), # deprecated 'insights_url': settings.INSIGHTS_URL_BASE, - 'awx_license_type': get_license(show_key=False).get('license_type', 'UNLICENSED'), + 'awx_license_type': get_license().get('license_type', 'UNLICENSED'), 'awx_version': get_awx_version(), 'scm_url': scm_url, 'scm_branch': scm_branch, diff --git a/awx/main/tests/functional/api/test_credential.py b/awx/main/tests/functional/api/test_credential.py index 9a534a8897..e8e7b4b271 100644 --- a/awx/main/tests/functional/api/test_credential.py +++ b/awx/main/tests/functional/api/test_credential.py @@ -675,33 +675,6 @@ def test_net_create_ok(post, organization, admin): assert cred.inputs['authorize'] is True -# -# Cloudforms Credentials -# -@pytest.mark.django_db -def test_cloudforms_create_ok(post, organization, admin): - params = { - 'credential_type': 1, - 'name': 'Best credential ever', - 'inputs': { - 'host': 'some_host', - 'username': 'some_username', - 'password': 'some_password', - } - } - cloudforms = CredentialType.defaults['cloudforms']() - cloudforms.save() - params['organization'] = organization.id - response = post(reverse('api:credential_list'), params, admin) - assert response.status_code == 201 - - assert Credential.objects.count() == 1 - cred = Credential.objects.all()[:1].get() - assert cred.inputs['host'] == 'some_host' - assert cred.inputs['username'] == 'some_username' - assert decrypt_field(cred, 'password') == 'some_password' - - # # GCE Credentials # diff --git a/awx/main/tests/functional/api/test_project.py b/awx/main/tests/functional/api/test_project.py index 09fed17c67..a31eb0804a 100644 --- a/awx/main/tests/functional/api/test_project.py +++ b/awx/main/tests/functional/api/test_project.py @@ -99,3 +99,12 @@ def test_changing_overwrite_behavior_okay_if_not_used(post, patch, organization, expect=200 ) assert Project.objects.get(pk=r1.data['id']).allow_override is False + + +@pytest.mark.django_db +def test_scm_project_local_path_invalid(get, patch, project, admin): + url = reverse('api:project_detail', kwargs={'pk': project.id}) + resp = patch(url, {'local_path': '/foo/bar'}, user=admin, expect=400) + assert resp.data['local_path'] == [ + 'Cannot change local_path for git-based projects' + ] diff --git a/awx/main/tests/functional/api/test_rbac_displays.py b/awx/main/tests/functional/api/test_rbac_displays.py index 4180647d44..d0a0cb4f98 100644 --- a/awx/main/tests/functional/api/test_rbac_displays.py +++ b/awx/main/tests/functional/api/test_rbac_displays.py @@ -282,10 +282,6 @@ def test_prefetch_ujt_project_capabilities(alice, project, job_template, mocker) list_serializer.child.to_representation(project) assert 'capability_map' not in list_serializer.child.context - # Models for which the prefetch is valid for do - list_serializer.child.to_representation(job_template) - assert set(list_serializer.child.context['capability_map'][job_template.id].keys()) == set(('copy', 'edit', 'start')) - @pytest.mark.django_db def test_prefetch_group_capabilities(group, rando): diff --git a/awx/main/tests/functional/api/test_schedules.py b/awx/main/tests/functional/api/test_schedules.py index 7b93c2804b..bdaa6aa4a6 100644 --- a/awx/main/tests/functional/api/test_schedules.py +++ b/awx/main/tests/functional/api/test_schedules.py @@ -349,7 +349,7 @@ def test_months_with_31_days(post, admin_user): ('MINUTELY', 1, 60), ('MINUTELY', 15, 15 * 60), ('HOURLY', 1, 3600), - ('HOURLY', 4, 3600 * 4), + ('HOURLY', 2, 3600 * 2), )) def test_really_old_dtstart(post, admin_user, freq, delta, total_seconds): url = reverse('api:schedule_rrule') diff --git a/awx/main/tests/functional/api/test_settings.py b/awx/main/tests/functional/api/test_settings.py index 67c9868649..8a1f50035f 100644 --- a/awx/main/tests/functional/api/test_settings.py +++ b/awx/main/tests/functional/api/test_settings.py @@ -4,7 +4,6 @@ # Python import pytest -import os from django.conf import settings @@ -19,15 +18,6 @@ TEST_PNG_LOGO = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACEAAAAjCAYAAAAaL TEST_JPEG_LOGO = 'data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAASABIAAD/4QBkRXhpZgAATU0AKgAAAAgAAwEGAAMAAAABAAIAAAESAAMAAAABAAEAAIdpAAQAAAABAAAAMgAAAAAAA6ABAAMAAAABAAEAAKACAAQAAAABAAAAIaADAAQAAAABAAAAIwAAAAD/4QkhaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wLwA8P3hwYWNrZXQgYmVnaW49Iu+7vyIgaWQ9Ilc1TTBNcENlaGlIenJlU3pOVGN6a2M5ZCI/PiA8eDp4bXBtZXRhIHhtbG5zOng9ImFkb2JlOm5zOm1ldGEvIiB4OnhtcHRrPSJYTVAgQ29yZSA1LjQuMCI+IDxyZGY6UkRGIHhtbG5zOnJkZj0iaHR0cDovL3d3dy53My5vcmcvMTk5OS8wMi8yMi1yZGYtc3ludGF4LW5zIyI+IDxyZGY6RGVzY3JpcHRpb24gcmRmOmFib3V0PSIiLz4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA8P3hwYWNrZXQgZW5kPSJ3Ij8+AP/tADhQaG90b3Nob3AgMy4wADhCSU0EBAAAAAAAADhCSU0EJQAAAAAAENQdjNmPALIE6YAJmOz4Qn7/wAARCAAjACEDASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9sAQwAGBgYGBgYKBgYKDgoKCg4SDg4ODhIXEhISEhIXHBcXFxcXFxwcHBwcHBwcIiIiIiIiJycnJycsLCwsLCwsLCws/9sAQwEHBwcLCgsTCgoTLh8aHy4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4u/90ABAAD/9oADAMBAAIRAxEAPwD6poormvFfivSvB2lHVtWLGMtsRE2hnYKzlVLsi52oxALDdjauWKqQCXQfFXh7xP8Aaf7AvYrz7HL5U3lk/K3YjIGVODtcZVsHBODXQV806bcT+E9L03XbCOS2udMsLQanbB4po72xYMfOQpKYyV2zPEwcNwVK7WAr6WriwWMWIUvdcZRdmnuu33rVFSjYKKKK7ST/0PqmuF8Vv4X8S+HNZ0+e/gIsYJvtEsL+bJZsI3UuyxNvBA3gpxvXchyCRXdV8ta3bW667DoloW1y10tLLTJxZWP2hoLSGYzNHclGZpJC0ESk8IAZcRB8is61T2cHK1/1DrY526h8YXHh691vxCz6dafY5Q0U7yGSeQxSxohNzJLcbUeQ4VnVNxBRCWL19b2eraVqE9xa2F3BcS2jbJ0ikV2ibJG1wpJU5UjBx0PpXzrrniy4k17TrrWrGex022ufMijvd9m11PGH8naXKqsUcgR3MhB5U7MA16x4L8F3vhq2sY9Ru4rg6day2tusEAhCrcOkknmEMRI2Y1AcLGT8xYMzZHjZFGu6cquKjaUnt2XS76vv/SN8RVjOdoKyXY9Cooor3TA//9H6pr4gfxRrMvxJ0/whLJE+maVrcVnZRtBCzwQQ3SIipMU80fKignflgPmJr7fr4A/5rf8A9zJ/7eUAdX8SfGviPwl8TtaPh6eK1eTyN0n2eCSUg28OV8ySNn2/KDtztzzjNfZVhY2umWMGm2KeXb2sSQxJknakYCqMkknAHUnNfBXxt/5Kdq//AG7/APpPFX3/AEAFFFFAH//Z' # NOQA -@pytest.fixture -def mock_no_license_file(mocker): - ''' - Ensures that tests don't pick up dev container license file - ''' - os.environ['AWX_LICENSE_FILE'] = '/does_not_exist' - return None - - @pytest.mark.django_db def test_url_base_defaults_to_request(options, admin): # If TOWER_URL_BASE is not set, default to the Tower request hostname diff --git a/awx/main/tests/functional/api/test_workflow_node.py b/awx/main/tests/functional/api/test_workflow_node.py index ec70716f94..6253548d60 100644 --- a/awx/main/tests/functional/api/test_workflow_node.py +++ b/awx/main/tests/functional/api/test_workflow_node.py @@ -89,7 +89,7 @@ class TestApprovalNodes(): url = reverse('api:workflow_job_template_node_create_approval', kwargs={'pk': approval_node.pk, 'version': 'v2'}) post(url, {'name': 'Test', 'description': 'Approval Node', 'timeout': 0}, - user=admin_user, expect=200) + user=admin_user, expect=201) approval_node = WorkflowJobTemplateNode.objects.get(pk=approval_node.pk) assert isinstance(approval_node.unified_job_template, WorkflowApprovalTemplate) @@ -108,9 +108,9 @@ class TestApprovalNodes(): assert {'name': ['This field may not be blank.']} == json.loads(r.content) @pytest.mark.parametrize("is_admin, is_org_admin, status", [ - [True, False, 200], # if they're a WFJT admin, they get a 200 + [True, False, 201], # if they're a WFJT admin, they get a 201 [False, False, 403], # if they're not a WFJT *nor* org admin, they get a 403 - [False, True, 200], # if they're an organization admin, they get a 200 + [False, True, 201], # if they're an organization admin, they get a 201 ]) def test_approval_node_creation_rbac(self, post, approval_node, alice, is_admin, is_org_admin, status): url = reverse('api:workflow_job_template_node_create_approval', @@ -165,7 +165,7 @@ class TestApprovalNodes(): url = reverse('api:workflow_job_template_node_create_approval', kwargs={'pk': node.pk, 'version': 'v2'}) post(url, {'name': 'Approve Test', 'description': '', 'timeout': 0}, - user=admin_user, expect=200) + user=admin_user, expect=201) post(reverse('api:workflow_job_template_launch', kwargs={'pk': wfjt.pk}), user=admin_user, expect=201) wf_job = WorkflowJob.objects.first() @@ -195,7 +195,7 @@ class TestApprovalNodes(): url = reverse('api:workflow_job_template_node_create_approval', kwargs={'pk': node.pk, 'version': 'v2'}) post(url, {'name': 'Deny Test', 'description': '', 'timeout': 0}, - user=admin_user, expect=200) + user=admin_user, expect=201) post(reverse('api:workflow_job_template_launch', kwargs={'pk': wfjt.pk}), user=admin_user, expect=201) wf_job = WorkflowJob.objects.first() diff --git a/awx/main/tests/functional/core/test_licenses.py b/awx/main/tests/functional/core/test_licenses.py deleted file mode 100644 index f59318502c..0000000000 --- a/awx/main/tests/functional/core/test_licenses.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (c) 2015 Ansible, Inc. -# All Rights Reserved. - -from awx.main.utils.common import StubLicense - - -def test_stub_license(): - license_actual = StubLicense().validate() - assert license_actual['license_key'] == 'OPEN' - assert license_actual['valid_key'] - assert license_actual['compliant'] - assert license_actual['license_type'] == 'open' - diff --git a/awx/main/tests/functional/test_credential.py b/awx/main/tests/functional/test_credential.py index 684f9dd5a7..27f67b96f4 100644 --- a/awx/main/tests/functional/test_credential.py +++ b/awx/main/tests/functional/test_credential.py @@ -79,7 +79,6 @@ def test_default_cred_types(): 'aws', 'azure_kv', 'azure_rm', - 'cloudforms', 'conjur', 'galaxy_api_token', 'gce', diff --git a/awx/main/tests/functional/test_inventory_source_migration.py b/awx/main/tests/functional/test_inventory_source_migration.py index ecea2f0408..2b1e089392 100644 --- a/awx/main/tests/functional/test_inventory_source_migration.py +++ b/awx/main/tests/functional/test_inventory_source_migration.py @@ -5,7 +5,7 @@ from awx.main.migrations import _inventory_source as invsrc from django.apps import apps -from awx.main.models import InventorySource +from awx.main.models import InventorySource, InventoryUpdate, ManagedCredentialType, CredentialType, Credential @pytest.mark.parametrize('vars,id_var,result', [ @@ -42,16 +42,40 @@ def test_apply_new_instance_id(inventory_source): @pytest.mark.django_db -def test_replacement_scm_sources(inventory): - inv_source = InventorySource.objects.create( - name='test', - inventory=inventory, - organization=inventory.organization, - source='ec2' +def test_cloudforms_inventory_removal(inventory): + ManagedCredentialType( + name='Red Hat CloudForms', + namespace='cloudforms', + kind='cloud', + managed_by_tower=True, + inputs={}, ) - invsrc.create_scm_script_substitute(apps, 'ec2') - inv_source.refresh_from_db() - assert inv_source.source == 'scm' - assert inv_source.source_project - project = inv_source.source_project - assert 'Replacement project for' in project.name + CredentialType.defaults['cloudforms']().save() + cloudforms = CredentialType.objects.get(namespace='cloudforms') + Credential.objects.create( + name='test', + credential_type=cloudforms, + ) + + for source in ('ec2', 'cloudforms'): + i = InventorySource.objects.create( + name='test', + inventory=inventory, + organization=inventory.organization, + source=source, + ) + InventoryUpdate.objects.create( + name='test update', + inventory_source=i, + source=source, + ) + assert Credential.objects.count() == 1 + assert InventorySource.objects.count() == 2 # ec2 + cf + assert InventoryUpdate.objects.count() == 2 # ec2 + cf + invsrc.delete_cloudforms_inv_source(apps, None) + assert InventorySource.objects.count() == 1 # ec2 + assert InventoryUpdate.objects.count() == 1 # ec2 + assert InventorySource.objects.first().source == 'ec2' + assert InventoryUpdate.objects.first().source == 'ec2' + assert Credential.objects.count() == 0 + assert CredentialType.objects.filter(namespace='cloudforms').exists() is False diff --git a/awx/main/tests/functional/test_licenses.py b/awx/main/tests/functional/test_licenses.py index 6c34321f8d..f3e623d281 100644 --- a/awx/main/tests/functional/test_licenses.py +++ b/awx/main/tests/functional/test_licenses.py @@ -30,8 +30,7 @@ def test_python_and_js_licenses(): # Check variations of '-' and '_' in filenames due to python for fname in [name, name.replace('-','_')]: if entry.startswith(fname) and entry.endswith('.tar.gz'): - entry = entry[:-7] - (n, v) = entry.rsplit('-',1) + v = entry.split(name + '-')[1].split('.tar.gz')[0] return v return None diff --git a/awx/main/tests/unit/models/test_ha.py b/awx/main/tests/unit/models/test_ha.py index 0e29caf8aa..2534acfd15 100644 --- a/awx/main/tests/unit/models/test_ha.py +++ b/awx/main/tests/unit/models/test_ha.py @@ -45,19 +45,14 @@ class TestInstanceGroup(object): (T(100), Is([50, 0, 20, 99, 11, 1, 5, 99]), None, "The task don't a fit, you must a quit!"), ]) def test_fit_task_to_most_remaining_capacity_instance(self, task, instances, instance_fit_index, reason): - with mock.patch.object(InstanceGroup, - 'instances', - Mock(spec_set=['filter'], - filter=lambda *args, **kargs: Mock(spec_set=['order_by'], - order_by=lambda x: instances))): - ig = InstanceGroup(id=10) + ig = InstanceGroup(id=10) - if instance_fit_index is None: - assert ig.fit_task_to_most_remaining_capacity_instance(task) is None, reason - else: - assert ig.fit_task_to_most_remaining_capacity_instance(task) == \ - instances[instance_fit_index], reason + instance_picked = ig.fit_task_to_most_remaining_capacity_instance(task, instances) + if instance_fit_index is None: + assert instance_picked is None, reason + else: + assert instance_picked == instances[instance_fit_index], reason @pytest.mark.parametrize('instances,instance_fit_index,reason', [ (Is([(0, 100)]), 0, "One idle instance, pick it"), @@ -70,16 +65,12 @@ class TestInstanceGroup(object): def filter_offline_instances(*args): return filter(lambda i: i.capacity > 0, instances) - with mock.patch.object(InstanceGroup, - 'instances', - Mock(spec_set=['filter'], - filter=lambda *args, **kargs: Mock(spec_set=['order_by'], - order_by=filter_offline_instances))): - ig = InstanceGroup(id=10) + ig = InstanceGroup(id=10) + instances_online_only = filter_offline_instances(instances) - if instance_fit_index is None: - assert ig.find_largest_idle_instance() is None, reason - else: - assert ig.find_largest_idle_instance() == \ - instances[instance_fit_index], reason + if instance_fit_index is None: + assert ig.find_largest_idle_instance(instances_online_only) is None, reason + else: + assert ig.find_largest_idle_instance(instances_online_only) == \ + instances[instance_fit_index], reason diff --git a/awx/main/tests/unit/test_tasks.py b/awx/main/tests/unit/test_tasks.py index b49af2efd0..93ef41a190 100644 --- a/awx/main/tests/unit/test_tasks.py +++ b/awx/main/tests/unit/test_tasks.py @@ -39,6 +39,8 @@ from awx.main import tasks from awx.main.utils import encrypt_field, encrypt_value from awx.main.utils.safe_yaml import SafeLoader +from awx.main.utils.licensing import Licenser + class TestJobExecution(object): EXAMPLE_PRIVATE_KEY = '-----BEGIN PRIVATE KEY-----\nxyz==\n-----END PRIVATE KEY-----' @@ -1830,7 +1832,10 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution): task = RunProjectUpdate() env = task.build_env(project_update, private_data_dir) - task.build_extra_vars_file(project_update, private_data_dir) + + with mock.patch.object(Licenser, 'validate', lambda *args, **kw: {}): + task.build_extra_vars_file(project_update, private_data_dir) + assert task.__vars__['roles_enabled'] is False assert task.__vars__['collections_enabled'] is False for k in env: @@ -1850,7 +1855,10 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution): project_update.project.organization.galaxy_credentials.add(public_galaxy) task = RunProjectUpdate() env = task.build_env(project_update, private_data_dir) - task.build_extra_vars_file(project_update, private_data_dir) + + with mock.patch.object(Licenser, 'validate', lambda *args, **kw: {}): + task.build_extra_vars_file(project_update, private_data_dir) + assert task.__vars__['roles_enabled'] is True assert task.__vars__['collections_enabled'] is True assert sorted([ @@ -1935,7 +1943,9 @@ class TestProjectUpdateCredentials(TestJobExecution): assert settings.PROJECTS_ROOT in process_isolation['process_isolation_show_paths'] task._write_extra_vars_file = mock.Mock() - task.build_extra_vars_file(project_update, private_data_dir) + + with mock.patch.object(Licenser, 'validate', lambda *args, **kw: {}): + task.build_extra_vars_file(project_update, private_data_dir) call_args, _ = task._write_extra_vars_file.call_args_list[0] _, extra_vars = call_args diff --git a/awx/main/utils/common.py b/awx/main/utils/common.py index a65120c8e8..00fc73c631 100644 --- a/awx/main/utils/common.py +++ b/awx/main/utils/common.py @@ -55,8 +55,7 @@ __all__ = [ 'model_instance_diff', 'parse_yaml_or_json', 'RequireDebugTrueOrTest', 'has_model_field_prefetched', 'set_environ', 'IllegalArgumentError', 'get_custom_venv_choices', 'get_external_account', 'task_manager_bulk_reschedule', - 'schedule_task_manager', 'classproperty', 'create_temporary_fifo', 'truncate_stdout', - 'StubLicense' + 'schedule_task_manager', 'classproperty', 'create_temporary_fifo', 'truncate_stdout' ] @@ -190,7 +189,7 @@ def get_awx_version(): def get_awx_http_client_headers(): - license = get_license(show_key=False).get('license_type', 'UNLICENSED') + license = get_license().get('license_type', 'UNLICENSED') headers = { 'Content-Type': 'application/json', 'User-Agent': '{} {} ({})'.format( @@ -202,34 +201,15 @@ def get_awx_http_client_headers(): return headers -class StubLicense(object): - - features = { - 'activity_streams': True, - 'ha': True, - 'ldap': True, - 'multiple_organizations': True, - 'surveys': True, - 'system_tracking': True, - 'rebranding': True, - 'enterprise_auth': True, - 'workflows': True, - } - - def validate(self): - return dict(license_key='OPEN', - valid_key=True, - compliant=True, - features=self.features, - license_type='open') - - def get_licenser(*args, **kwargs): + from awx.main.utils.licensing import Licenser, OpenLicense try: - from tower_license import TowerLicense - return TowerLicense(*args, **kwargs) - except ImportError: - return StubLicense(*args, **kwargs) + if os.path.exists('/var/lib/awx/.tower_version'): + return Licenser(*args, **kwargs) + else: + return OpenLicense() + except Exception as e: + raise ValueError(_('Error importing Tower License: %s') % e) def update_scm_url(scm_type, url, username=True, password=True, diff --git a/awx/main/utils/licensing.py b/awx/main/utils/licensing.py new file mode 100644 index 0000000000..d838e37e69 --- /dev/null +++ b/awx/main/utils/licensing.py @@ -0,0 +1,390 @@ +# Copyright (c) 2015 Ansible, Inc. +# All Rights Reserved. + +''' +This is intended to be a lightweight license class for verifying subscriptions, and parsing subscription data +from entitlement certificates. + +The Licenser class can do the following: + - Parse an Entitlement cert to generate license +''' + +import base64 +import configparser +from datetime import datetime +import collections +import copy +import io +import json +import logging +import re +import requests +import time +import zipfile + +from dateutil.parser import parse as parse_date + +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.asymmetric import padding +from cryptography import x509 + +# Django +from django.conf import settings +from django.utils.translation import ugettext_lazy as _ + +# AWX +from awx.main.models import Host + +MAX_INSTANCES = 9999999 + +logger = logging.getLogger(__name__) + + +def rhsm_config(): + path = '/etc/rhsm/rhsm.conf' + config = configparser.ConfigParser() + config.read(path) + return config + + +def validate_entitlement_manifest(data): + buff = io.BytesIO() + buff.write(base64.b64decode(data)) + try: + z = zipfile.ZipFile(buff) + except zipfile.BadZipFile as e: + raise ValueError(_("Invalid manifest: a subscription manifest zip file is required.")) from e + buff = io.BytesIO() + + files = z.namelist() + if 'consumer_export.zip' not in files or 'signature' not in files: + raise ValueError(_("Invalid manifest: missing required files.")) + export = z.open('consumer_export.zip').read() + sig = z.open('signature').read() + with open('/etc/tower/candlepin-redhat-ca.crt', 'rb') as f: + cert = x509.load_pem_x509_certificate(f.read(), backend=default_backend()) + key = cert.public_key() + try: + key.verify(sig, export, padding=padding.PKCS1v15(), algorithm=hashes.SHA256()) + except InvalidSignature as e: + raise ValueError(_("Invalid manifest: signature verification failed.")) from e + + buff.write(export) + z = zipfile.ZipFile(buff) + for f in z.filelist: + if f.filename.startswith('export/entitlements') and f.filename.endswith('.json'): + return json.loads(z.open(f).read()) + raise ValueError(_("Invalid manifest: manifest contains no subscriptions.")) + + +class OpenLicense(object): + def validate(self): + return dict( + license_type='open', + valid_key=True, + subscription_name='OPEN', + product_name="AWX", + ) + + +class Licenser(object): + # warn when there is a month (30 days) left on the subscription + SUBSCRIPTION_TIMEOUT = 60 * 60 * 24 * 30 + + UNLICENSED_DATA = dict( + subscription_name=None, + sku=None, + support_level=None, + instance_count=0, + license_date=0, + license_type="UNLICENSED", + product_name="Red Hat Ansible Automation Platform", + valid_key=False + ) + + def __init__(self, **kwargs): + self._attrs = dict( + instance_count=0, + license_date=0, + license_type='UNLICENSED', + ) + self.config = rhsm_config() + if not kwargs: + license_setting = getattr(settings, 'LICENSE', None) + if license_setting is not None: + kwargs = license_setting + + if 'company_name' in kwargs: + kwargs.pop('company_name') + self._attrs.update(kwargs) + if 'valid_key' in self._attrs: + if not self._attrs['valid_key']: + self._unset_attrs() + else: + self._unset_attrs() + + + def _unset_attrs(self): + self._attrs = self.UNLICENSED_DATA.copy() + + + def license_from_manifest(self, manifest): + # Parse output for subscription metadata to build config + license = dict() + license['sku'] = manifest['pool']['productId'] + license['instance_count'] = manifest['pool']['quantity'] + license['subscription_name'] = manifest['pool']['productName'] + license['pool_id'] = manifest['pool']['id'] + license['license_date'] = parse_date(manifest['endDate']).strftime('%s') + license['product_name'] = manifest['pool']['productName'] + license['valid_key'] = True + license['license_type'] = 'enterprise' + license['satellite'] = False + + self._attrs.update(license) + settings.LICENSE = self._attrs + return self._attrs + + + def update(self, **kwargs): + # Update attributes of the current license. + if 'instance_count' in kwargs: + kwargs['instance_count'] = int(kwargs['instance_count']) + if 'license_date' in kwargs: + kwargs['license_date'] = int(kwargs['license_date']) + self._attrs.update(kwargs) + + + def validate_rh(self, user, pw): + try: + host = 'https://' + str(self.config.get("server", "hostname")) + except Exception: + logger.exception('Cannot access rhsm.conf, make sure subscription manager is installed and configured.') + host = None + if not host: + host = getattr(settings, 'REDHAT_CANDLEPIN_HOST', None) + + if not user: + raise ValueError('subscriptions_username is required') + + if not pw: + raise ValueError('subscriptions_password is required') + + if host and user and pw: + if 'subscription.rhsm.redhat.com' in host: + json = self.get_rhsm_subs(host, user, pw) + else: + json = self.get_satellite_subs(host, user, pw) + return self.generate_license_options_from_entitlements(json) + return [] + + + def get_rhsm_subs(self, host, user, pw): + verify = getattr(settings, 'REDHAT_CANDLEPIN_VERIFY', True) + json = [] + try: + subs = requests.get( + '/'.join([host, 'subscription/users/{}/owners'.format(user)]), + verify=verify, + auth=(user, pw) + ) + except requests.exceptions.ConnectionError as error: + raise error + except OSError as error: + raise OSError('Unable to open certificate bundle {}. Check that Ansible Tower is running on Red Hat Enterprise Linux.'.format(verify)) from error # noqa + subs.raise_for_status() + + for sub in subs.json(): + resp = requests.get( + '/'.join([ + host, + 'subscription/owners/{}/pools/?match=*tower*'.format(sub['key']) + ]), + verify=verify, + auth=(user, pw) + ) + resp.raise_for_status() + json.extend(resp.json()) + return json + + + def get_satellite_subs(self, host, user, pw): + try: + verify = str(self.config.get("rhsm", "repo_ca_cert")) + except Exception as e: + logger.exception('Unable to read rhsm config to get ca_cert location. {}'.format(str(e))) + verify = getattr(settings, 'REDHAT_CANDLEPIN_VERIFY', True) + json = [] + try: + orgs = requests.get( + '/'.join([host, 'katello/api/organizations']), + verify=verify, + auth=(user, pw) + ) + except requests.exceptions.ConnectionError as error: + raise error + except OSError as error: + raise OSError('Unable to open certificate bundle {}. Check that Ansible Tower is running on Red Hat Enterprise Linux.'.format(verify)) from error # noqa + orgs.raise_for_status() + + for org in orgs.json()['results']: + resp = requests.get( + '/'.join([ + host, + '/katello/api/organizations/{}/subscriptions/?search=Red Hat Ansible Automation'.format(org['id']) + ]), + verify=verify, + auth=(user, pw) + ) + resp.raise_for_status() + results = resp.json()['results'] + if results != []: + for sub in results: + # Parse output for subscription metadata to build config + license = dict() + license['productId'] = sub['product_id'] + license['quantity'] = int(sub['quantity']) + license['support_level'] = sub['support_level'] + license['subscription_name'] = sub['name'] + license['id'] = sub['upstream_pool_id'] + license['endDate'] = sub['end_date'] + license['productName'] = "Red Hat Ansible Automation" + license['valid_key'] = True + license['license_type'] = 'enterprise' + license['satellite'] = True + json.append(license) + return json + + + def is_appropriate_sat_sub(self, sub): + if 'Red Hat Ansible Automation' not in sub['subscription_name']: + return False + return True + + + def is_appropriate_sub(self, sub): + if sub['activeSubscription'] is False: + return False + # Products that contain Ansible Tower + products = sub.get('providedProducts', []) + if any(map(lambda product: product.get('productId', None) == "480", products)): + return True + return False + + + def generate_license_options_from_entitlements(self, json): + from dateutil.parser import parse + ValidSub = collections.namedtuple('ValidSub', 'sku name support_level end_date trial quantity pool_id satellite') + valid_subs = [] + for sub in json: + satellite = sub.get('satellite') + if satellite: + is_valid = self.is_appropriate_sat_sub(sub) + else: + is_valid = self.is_appropriate_sub(sub) + if is_valid: + try: + end_date = parse(sub.get('endDate')) + except Exception: + continue + now = datetime.utcnow() + now = now.replace(tzinfo=end_date.tzinfo) + if end_date < now: + # If the sub has a past end date, skip it + continue + try: + quantity = int(sub['quantity']) + if quantity == -1: + # effectively, unlimited + quantity = MAX_INSTANCES + except Exception: + continue + + sku = sub['productId'] + trial = sku.startswith('S') # i.e.,, SER/SVC + support_level = '' + pool_id = sub['id'] + if satellite: + support_level = sub['support_level'] + else: + for attr in sub.get('productAttributes', []): + if attr.get('name') == 'support_level': + support_level = attr.get('value') + + valid_subs.append(ValidSub( + sku, sub['productName'], support_level, end_date, trial, quantity, pool_id, satellite + )) + + if valid_subs: + licenses = [] + for sub in valid_subs: + license = self.__class__(subscription_name='Red Hat Ansible Automation Platform') + license._attrs['instance_count'] = int(sub.quantity) + license._attrs['sku'] = sub.sku + license._attrs['support_level'] = sub.support_level + license._attrs['license_type'] = 'enterprise' + if sub.trial: + license._attrs['trial'] = True + license._attrs['license_type'] = 'trial' + license._attrs['instance_count'] = min( + MAX_INSTANCES, license._attrs['instance_count'] + ) + human_instances = license._attrs['instance_count'] + if human_instances == MAX_INSTANCES: + human_instances = 'Unlimited' + subscription_name = re.sub( + r' \([\d]+ Managed Nodes', + ' ({} Managed Nodes'.format(human_instances), + sub.name + ) + license._attrs['subscription_name'] = subscription_name + license._attrs['satellite'] = satellite + license._attrs['valid_key'] = True + license.update( + license_date=int(sub.end_date.strftime('%s')) + ) + license.update( + pool_id=sub.pool_id + ) + licenses.append(license._attrs.copy()) + return licenses + + raise ValueError( + 'No valid Red Hat Ansible Automation subscription could be found for this account.' # noqa + ) + + + def validate(self): + # Return license attributes with additional validation info. + attrs = copy.deepcopy(self._attrs) + type = attrs.get('license_type', 'none') + + if (type == 'UNLICENSED' or False): + attrs.update(dict(valid_key=False, compliant=False)) + return attrs + attrs['valid_key'] = True + + if Host: + current_instances = Host.objects.active_count() + else: + current_instances = 0 + available_instances = int(attrs.get('instance_count', None) or 0) + attrs['current_instances'] = current_instances + attrs['available_instances'] = available_instances + free_instances = (available_instances - current_instances) + attrs['free_instances'] = max(0, free_instances) + + license_date = int(attrs.get('license_date', 0) or 0) + current_date = int(time.time()) + time_remaining = license_date - current_date + attrs['time_remaining'] = time_remaining + if attrs.setdefault('trial', False): + attrs['grace_period_remaining'] = time_remaining + else: + attrs['grace_period_remaining'] = (license_date + 2592000) - current_date + attrs['compliant'] = bool(time_remaining > 0 and free_instances >= 0) + attrs['date_warning'] = bool(time_remaining < self.SUBSCRIPTION_TIMEOUT) + attrs['date_expired'] = bool(time_remaining <= 0) + return attrs diff --git a/awx/main/utils/profiling.py b/awx/main/utils/profiling.py new file mode 100644 index 0000000000..c550175d7b --- /dev/null +++ b/awx/main/utils/profiling.py @@ -0,0 +1,151 @@ +import cProfile +import functools +import pstats +import os +import uuid +import datetime +import json +import sys + + +class AWXProfileBase: + def __init__(self, name, dest): + self.name = name + self.dest = dest + self.results = {} + + def generate_results(self): + raise RuntimeError("define me") + + def output_results(self, fname=None): + if not os.path.isdir(self.dest): + os.makedirs(self.dest) + + if fname: + fpath = os.path.join(self.dest, fname) + with open(fpath, 'w') as f: + f.write(json.dumps(self.results, indent=2)) + + +class AWXTiming(AWXProfileBase): + def __init__(self, name, dest='/var/log/tower/timing'): + super().__init__(name, dest) + + self.time_start = None + self.time_end = None + + def start(self): + self.time_start = datetime.datetime.now() + + def stop(self): + self.time_end = datetime.datetime.now() + + self.generate_results() + self.output_results() + + def generate_results(self): + diff = (self.time_end - self.time_start).total_seconds() + self.results = { + 'name': self.name, + 'diff': f'{diff}-seconds', + } + + def output_results(self): + fname = f"{self.results['diff']}-{self.name}-{uuid.uuid4()}.time" + super().output_results(fname) + + +def timing(name, *init_args, **init_kwargs): + def decorator_profile(func): + @functools.wraps(func) + def wrapper_profile(*args, **kwargs): + timing = AWXTiming(name, *init_args, **init_kwargs) + timing.start() + res = func(*args, **kwargs) + timing.stop() + return res + return wrapper_profile + return decorator_profile + + +class AWXProfiler(AWXProfileBase): + def __init__(self, name, dest='/var/log/tower/profile', dot_enabled=True): + ''' + Try to do as little as possible in init. Instead, do the init + only when the profiling is started. + ''' + super().__init__(name, dest) + self.started = False + self.dot_enabled = dot_enabled + self.results = { + 'total_time_seconds': 0, + } + + def generate_results(self): + self.results['total_time_seconds'] = pstats.Stats(self.prof).total_tt + + def output_results(self): + super().output_results() + + filename_base = '%.3fs-%s-%s-%s' % (self.results['total_time_seconds'], self.name, self.pid, uuid.uuid4()) + pstats_filepath = os.path.join(self.dest, f"{filename_base}.pstats") + extra_data = "" + + if self.dot_enabled: + try: + from gprof2dot import main as generate_dot + except ImportError: + extra_data = 'Dot graph generation failed due to package "gprof2dot" being unavailable.' + else: + raw_filepath = os.path.join(self.dest, f"{filename_base}.raw") + dot_filepath = os.path.join(self.dest, f"{filename_base}.dot") + + pstats.Stats(self.prof).dump_stats(raw_filepath) + generate_dot([ + '-n', '2.5', '-f', 'pstats', '-o', + dot_filepath, + raw_filepath + ]) + os.remove(raw_filepath) + + with open(pstats_filepath, 'w') as f: + print(f"{self.name}, {extra_data}", file=f) + pstats.Stats(self.prof, stream=f).sort_stats('cumulative').print_stats() + return pstats_filepath + + + def start(self): + self.prof = cProfile.Profile() + self.pid = os.getpid() + + self.prof.enable() + self.started = True + + def is_started(self): + return self.started + + def stop(self): + if self.started: + self.prof.disable() + + self.generate_results() + res = self.output_results() + self.started = False + return res + else: + print("AWXProfiler::stop() called without calling start() first", file=sys.stderr) + return None + + +def profile(name, *init_args, **init_kwargs): + def decorator_profile(func): + @functools.wraps(func) + def wrapper_profile(*args, **kwargs): + prof = AWXProfiler(name, *init_args, **init_kwargs) + prof.start() + res = func(*args, **kwargs) + prof.stop() + return res + return wrapper_profile + return decorator_profile + diff --git a/awx/playbooks/project_update.yml b/awx/playbooks/project_update.yml index 169273d628..74e55e7ada 100644 --- a/awx/playbooks/project_update.yml +++ b/awx/playbooks/project_update.yml @@ -159,23 +159,29 @@ gather_facts: false connection: local name: Install content with ansible-galaxy command if necessary + vars: + yaml_exts: + - {ext: .yml} + - {ext: .yaml} tasks: - block: - - name: detect requirements.yml + - name: detect roles/requirements.(yml/yaml) stat: - path: '{{project_path|quote}}/roles/requirements.yml' + path: "{{project_path|quote}}/roles/requirements{{ item.ext }}" + with_items: "{{ yaml_exts }}" register: doesRequirementsExist - - name: fetch galaxy roles from requirements.yml + - name: fetch galaxy roles from requirements.(yml/yaml) command: > - ansible-galaxy role install -r roles/requirements.yml + ansible-galaxy role install -r {{ item.stat.path }} --roles-path {{projects_root}}/.__awx_cache/{{local_path}}/stage/requirements_roles {{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }} args: chdir: "{{project_path|quote}}" register: galaxy_result - when: doesRequirementsExist.stat.exists + with_items: "{{ doesRequirementsExist.results }}" + when: item.stat.exists changed_when: "'was installed successfully' in galaxy_result.stdout" environment: ANSIBLE_FORCE_COLOR: false @@ -186,20 +192,22 @@ - install_roles - block: - - name: detect collections/requirements.yml + - name: detect collections/requirements.(yml/yaml) stat: - path: '{{project_path|quote}}/collections/requirements.yml' + path: "{{project_path|quote}}/collections/requirements{{ item.ext }}" + with_items: "{{ yaml_exts }}" register: doesCollectionRequirementsExist - - name: fetch galaxy collections from collections/requirements.yml + - name: fetch galaxy collections from collections/requirements.(yml/yaml) command: > - ansible-galaxy collection install -r collections/requirements.yml + ansible-galaxy collection install -r {{ item.stat.path }} --collections-path {{projects_root}}/.__awx_cache/{{local_path}}/stage/requirements_collections {{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }} args: chdir: "{{project_path|quote}}" register: galaxy_collection_result - when: doesCollectionRequirementsExist.stat.exists + with_items: "{{ doesCollectionRequirementsExist.results }}" + when: item.stat.exists changed_when: "'Installing ' in galaxy_collection_result.stdout" environment: ANSIBLE_FORCE_COLOR: false diff --git a/awx/settings/development.py b/awx/settings/development.py index 3a4e008488..108767b98c 100644 --- a/awx/settings/development.py +++ b/awx/settings/development.py @@ -184,3 +184,6 @@ else: pass AWX_CALLBACK_PROFILE = True + +if 'sqlite3' not in DATABASES['default']['ENGINE']: # noqa + DATABASES['default'].setdefault('OPTIONS', dict()).setdefault('application_name', f'{CLUSTER_HOST_ID}-{os.getpid()}-{" ".join(sys.argv)}'[:63]) # noqa diff --git a/awx/settings/production.py b/awx/settings/production.py index c2cde28c0f..fb24b7087f 100644 --- a/awx/settings/production.py +++ b/awx/settings/production.py @@ -102,6 +102,7 @@ except IOError: else: raise +# The below runs AFTER all of the custom settings are imported. CELERYBEAT_SCHEDULE.update({ # noqa 'isolated_heartbeat': { @@ -110,3 +111,5 @@ CELERYBEAT_SCHEDULE.update({ # noqa 'options': {'expires': AWX_ISOLATED_PERIODIC_CHECK * 2}, # noqa } }) + +DATABASES['default'].setdefault('OPTIONS', dict()).setdefault('application_name', f'{CLUSTER_HOST_ID}-{os.getpid()}-{" ".join(sys.argv)}'[:63]) # noqa diff --git a/awx/ui/client/features/credentials/add-edit-credentials.controller.js b/awx/ui/client/features/credentials/add-edit-credentials.controller.js index 57c8eed1dd..30ecb467e7 100644 --- a/awx/ui/client/features/credentials/add-edit-credentials.controller.js +++ b/awx/ui/client/features/credentials/add-edit-credentials.controller.js @@ -29,6 +29,7 @@ function AddEditCredentialsController ( const isExternal = credentialType.get('kind') === 'external'; const mode = $state.current.name.startsWith('credentials.add') ? 'add' : 'edit'; + vm.isEditable = credential.get('summary_fields.user_capabilities.edit'); vm.mode = mode; vm.strings = strings; @@ -52,6 +53,7 @@ function AddEditCredentialsController ( vm.form = credential.createFormSchema({ omit }); vm.form.disabled = !isEditable; } + vm.form.disabled = !vm.isEditable; vm.form._organization._disabled = !isOrgEditableByUser; // Only exists for permissions compatibility diff --git a/awx/ui/client/src/configuration/forms/settings-form.route.js b/awx/ui/client/src/configuration/forms/settings-form.route.js index 20ca06fd7f..b75ae5552e 100644 --- a/awx/ui/client/src/configuration/forms/settings-form.route.js +++ b/awx/ui/client/src/configuration/forms/settings-form.route.js @@ -54,20 +54,20 @@ export default { }); }], resolve: { - rhCreds: ['Rest', 'GetBasePath', function(Rest, GetBasePath) { + subscriptionCreds: ['Rest', 'GetBasePath', function(Rest, GetBasePath) { Rest.setUrl(`${GetBasePath('settings')}system/`); return Rest.get() .then(({data}) => { - const rhCreds = {}; - if (data.REDHAT_USERNAME && data.REDHAT_USERNAME !== "") { - rhCreds.REDHAT_USERNAME = data.REDHAT_USERNAME; + const subscriptionCreds = {}; + if (data.SUBSCRIPTIONS_USERNAME && data.SUBSCRIPTIONS_USERNAME !== "") { + subscriptionCreds.SUBSCRIPTIONS_USERNAME = data.SUBSCRIPTIONS_USERNAME; } - if (data.REDHAT_PASSWORD && data.REDHAT_PASSWORD !== "") { - rhCreds.REDHAT_PASSWORD = data.REDHAT_PASSWORD; + if (data.SUBSCRIPTIONS_PASSWORD && data.SUBSCRIPTIONS_PASSWORD !== "") { + subscriptionCreds.SUBSCRIPTIONS_PASSWORD = data.SUBSCRIPTIONS_PASSWORD; } - return rhCreds; + return subscriptionCreds; }).catch(() => { return {}; }); diff --git a/awx/ui/client/src/inventories-hosts/inventories/related/sources/add/sources-add.controller.js b/awx/ui/client/src/inventories-hosts/inventories/related/sources/add/sources-add.controller.js index 05bfe07718..382edb320b 100644 --- a/awx/ui/client/src/inventories-hosts/inventories/related/sources/add/sources-add.controller.js +++ b/awx/ui/client/src/inventories-hosts/inventories/related/sources/add/sources-add.controller.js @@ -139,7 +139,7 @@ export default ['$state', 'ConfigData', '$scope', 'SourcesFormDefinition', 'Pars else{ $scope.credentialBasePath = (source === 'ec2') ? GetBasePath('credentials') + '?credential_type__namespace=aws' : GetBasePath('credentials') + (source === '' ? '' : '?credential_type__namespace=' + (source)); } - if (source === 'ec2' || source === 'custom' || source === 'vmware' || source === 'openstack' || source === 'scm' || source === 'cloudforms' || source === "satellite6" || source === "azure_rm") { + if (true) { $scope.envParseType = 'yaml'; var varName; diff --git a/awx/ui/client/src/inventories-hosts/inventories/related/sources/edit/sources-edit.controller.js b/awx/ui/client/src/inventories-hosts/inventories/related/sources/edit/sources-edit.controller.js index 6da60e6b3b..b8207fe5f2 100644 --- a/awx/ui/client/src/inventories-hosts/inventories/related/sources/edit/sources-edit.controller.js +++ b/awx/ui/client/src/inventories-hosts/inventories/related/sources/edit/sources-edit.controller.js @@ -68,11 +68,7 @@ export default ['$state', '$scope', 'ParseVariableString', 'ParseTypeChange', multiple: false }); - if (source === 'ec2' || source === 'custom' || - source === 'vmware' || source === 'openstack' || - source === 'scm' || source === 'cloudforms' || - source === 'satellite6' || source === 'azure_rm') { - + if (true) { var varName; if (source === 'scm') { varName = 'custom_variables'; diff --git a/awx/ui/client/src/inventories-hosts/inventories/related/sources/sources.form.js b/awx/ui/client/src/inventories-hosts/inventories/related/sources/sources.form.js index 66b6693d58..01e322e24d 100644 --- a/awx/ui/client/src/inventories-hosts/inventories/related/sources/sources.form.js +++ b/awx/ui/client/src/inventories-hosts/inventories/related/sources/sources.form.js @@ -174,9 +174,11 @@ export default ['NotificationsList', 'i18n', function(NotificationsList, i18n){ parseTypeName: 'envParseType', dataTitle: i18n._("Source Variables"), dataPlacement: 'right', - awPopOver: "

" + i18n._("Override variables found in ec2.ini and used by the inventory update script. For a detailed description of these variables ") + - "" + - i18n._("view ec2.ini in the community.aws repo.") + "

" + + awPopOver: "

" + i18n._("Enter variables to configure the inventory source. For a detailed description of how to configure this plugin, see ") + + "" + + i18n._("Inventory Plugins") + " " + i18n._("in the documentation and the ") + + "aws_ec2 " + + i18n._("plugin configuration guide.") + "

" + "

" + i18n._("Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two.") + "

" + i18n._("JSON:") + "
\n" + "
{
 \"somevar\": \"somevalue\",
 \"password\": \"magic\"
}
\n" + @@ -198,9 +200,11 @@ export default ['NotificationsList', 'i18n', function(NotificationsList, i18n){ parseTypeName: 'envParseType', dataTitle: i18n._("Source Variables"), dataPlacement: 'right', - awPopOver: "

" + i18n._("Override variables found in vmware.ini and used by the inventory update script. For a detailed description of these variables ") + - "" + - i18n._("view vmware_inventory.ini in the vmware community repo.") + "

" + + awPopOver: "

" + i18n._("Enter variables to configure the inventory source. For a detailed description of how to configure this plugin, see ") + + "" + + i18n._("Inventory Plugins") + " " + i18n._("in the documentation and the ") + + "vmware_vm_inventory " + + i18n._("plugin configuration guide.") + "

" + "

" + i18n._("Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two.") + "

" + i18n._("JSON:") + "
\n" + "
{
 \"somevar\": \"somevalue\",
 \"password\": \"magic\"
}
\n" + @@ -222,9 +226,18 @@ export default ['NotificationsList', 'i18n', function(NotificationsList, i18n){ parseTypeName: 'envParseType', dataTitle: i18n._("Source Variables"), dataPlacement: 'right', - awPopOver: i18n._("Override variables found in openstack.yml and used by the inventory update script. For an example variable configuration") + - '' + - i18n._("view openstack.yml in the Openstack github repo.") + "" + i18n._("Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax."), + awPopOver: "

" + i18n._("Enter variables to configure the inventory source. For a detailed description of how to configure this plugin, see ") + + "" + + i18n._("Inventory Plugins") + " " + i18n._("in the documentation and the ") + + "openstack " + + i18n._("plugin configuration guide.") + "

" + + "

" + i18n._("Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two.") + "

" + + i18n._("JSON:") + "
\n" + + "
{
 \"somevar\": \"somevalue\",
 \"password\": \"magic\"
}
\n" + + i18n._("YAML:") + "
\n" + + "
---
somevar: somevalue
password: magic
\n" + + "

" + i18n._("View JSON examples at ") + 'www.json.org

' + + "

" + i18n._("View YAML examples at ") + 'docs.ansible.com

', dataContainer: 'body', subForm: 'sourceSubForm' }, @@ -256,9 +269,18 @@ export default ['NotificationsList', 'i18n', function(NotificationsList, i18n){ parseTypeName: 'envParseType', dataTitle: i18n._("Source Variables"), dataPlacement: 'right', - awPopOver: i18n._("Override variables found in foreman.ini and used by the inventory update script. For an example variable configuration") + - '' + - i18n._("view foreman.ini in the Ansible Collections github repo.") + "" + i18n._("Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax."), + awPopOver: "

" + i18n._("Enter variables to configure the inventory source. For a detailed description of how to configure this plugin, see ") + + "" + + i18n._("Inventory Plugins") + " " + i18n._("in the documentation and the ") + + "foreman " + + i18n._("plugin configuration guide.") + "

" + + "

" + i18n._("Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two.") + "

" + + i18n._("JSON:") + "
\n" + + "
{
 \"somevar\": \"somevalue\",
 \"password\": \"magic\"
}
\n" + + i18n._("YAML:") + "
\n" + + "
---
somevar: somevalue
password: magic
\n" + + "

" + i18n._("View JSON examples at ") + 'www.json.org

' + + "

" + i18n._("View YAML examples at ") + 'docs.ansible.com

', dataContainer: 'body', subForm: 'sourceSubForm' }, @@ -273,9 +295,89 @@ export default ['NotificationsList', 'i18n', function(NotificationsList, i18n){ parseTypeName: 'envParseType', dataTitle: i18n._("Source Variables"), dataPlacement: 'right', - awPopOver: "

" + i18n._("Override variables found in azure_rm.ini and used by the inventory update script. For a detailed description of these variables ") + - "" + - i18n._("view azure_rm.ini in the Ansible community.general github repo.") + "

" + + awPopOver: "

" + i18n._("Enter variables to configure the inventory source. For a detailed description of how to configure this plugin, see ") + + "" + + i18n._("Inventory Plugins") + " " + i18n._("in the documentation and the ") + + "azure_rm " + + i18n._("plugin configuration guide.") + "

" + + "

" + i18n._("Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two.") + "

" + + i18n._("JSON:") + "
\n" + + "
{
 \"somevar\": \"somevalue\",
 \"password\": \"magic\"
}
\n" + + i18n._("YAML:") + "
\n" + + "
---
somevar: somevalue
password: magic
\n" + + "

" + i18n._("View JSON examples at ") + 'www.json.org

' + + "

" + i18n._("View YAML examples at ") + 'docs.ansible.com

', + dataContainer: 'body', + subForm: 'sourceSubForm' + }, + gce_variables: { + id: 'gce_variables', + label: i18n._('Source Variables'), //"{{vars_label}}" , + ngShow: "source && source.value == 'gce'", + type: 'textarea', + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + rows: 6, + 'default': '---', + parseTypeName: 'envParseType', + dataTitle: i18n._("Source Variables"), + dataPlacement: 'right', + awPopOver: "

" + i18n._("Enter variables to configure the inventory source. For a detailed description of how to configure this plugin, see ") + + "" + + i18n._("Inventory Plugins") + " " + i18n._("in the documentation and the ") + + "gcp_compute " + + i18n._("plugin configuration guide.") + "

" + + "

" + i18n._("Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two.") + "

" + + i18n._("JSON:") + "
\n" + + "
{
 \"somevar\": \"somevalue\",
 \"password\": \"magic\"
}
\n" + + i18n._("YAML:") + "
\n" + + "
---
somevar: somevalue
password: magic
\n" + + "

" + i18n._("View JSON examples at ") + 'www.json.org

' + + "

" + i18n._("View YAML examples at ") + 'docs.ansible.com

', + dataContainer: 'body', + subForm: 'sourceSubForm' + }, + tower_variables: { + id: 'tower_variables', + label: i18n._('Source Variables'), //"{{vars_label}}" , + ngShow: "source && source.value == 'tower'", + type: 'textarea', + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + rows: 6, + 'default': '---', + parseTypeName: 'envParseType', + dataTitle: i18n._("Source Variables"), + dataPlacement: 'right', + awPopOver: "

" + i18n._("Enter variables to configure the inventory source. For a detailed description of how to configure this plugin, see ") + + "" + + i18n._("Inventory Plugins") + " " + i18n._("in the documentation and the ") + + "tower " + + i18n._("plugin configuration guide.") + "

" + + "

" + i18n._("Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two.") + "

" + + i18n._("JSON:") + "
\n" + + "
{
 \"somevar\": \"somevalue\",
 \"password\": \"magic\"
}
\n" + + i18n._("YAML:") + "
\n" + + "
---
somevar: somevalue
password: magic
\n" + + "

" + i18n._("View JSON examples at ") + 'www.json.org

' + + "

" + i18n._("View YAML examples at ") + 'docs.ansible.com

', + dataContainer: 'body', + subForm: 'sourceSubForm' + }, + rhv_variables: { + id: 'rhv_variables', + label: i18n._('Source Variables'), //"{{vars_label}}" , + ngShow: "source && source.value == 'rhv'", + type: 'textarea', + class: 'Form-textAreaLabel Form-formGroup--fullWidth', + rows: 6, + 'default': '---', + parseTypeName: 'envParseType', + dataTitle: i18n._("Source Variables"), + dataPlacement: 'right', + awPopOver: "

" + i18n._("Enter variables to configure the inventory source. For a detailed description of how to configure this plugin, see ") + + "" + + i18n._("Inventory Plugins") + " " + i18n._("in the documentation and the ") + + "ovirt " + + i18n._("plugin configuration guide.") + "

" + "

" + i18n._("Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two.") + "

" + i18n._("JSON:") + "
\n" + "
{
 \"somevar\": \"somevalue\",
 \"password\": \"magic\"
}
\n" + diff --git a/awx/ui/client/src/license/checkLicense.factory.js b/awx/ui/client/src/license/checkLicense.factory.js index e76f1ea0b7..73fdc5e15e 100644 --- a/awx/ui/client/src/license/checkLicense.factory.js +++ b/awx/ui/client/src/license/checkLicense.factory.js @@ -15,14 +15,26 @@ export default return config.license_info; }, - post: function(payload, eula){ - var defaultUrl = GetBasePath('config'); + post: function(payload, eula, attach){ + var defaultUrl = GetBasePath('config') + (attach ? 'attach/' : ''); Rest.setUrl(defaultUrl); var data = payload; - data.eula_accepted = eula; + + if (!attach) { + data.eula_accepted = eula; + } return Rest.post(JSON.stringify(data)) .then((response) =>{ + if (attach) { + var configPayload = {}; + configPayload.eula_accepted = eula; + Rest.setUrl(GetBasePath('config')); + return Rest.post(configPayload) + .then((configResponse) => { + return configResponse.data; + }); + } return response.data; }) .catch(({data}) => { diff --git a/awx/ui/client/src/license/license.controller.js b/awx/ui/client/src/license/license.controller.js index 37c1c38364..2cae41e363 100644 --- a/awx/ui/client/src/license/license.controller.js +++ b/awx/ui/client/src/license/license.controller.js @@ -8,9 +8,9 @@ import {N_} from "../i18n"; export default ['Wait', '$state', '$scope', '$rootScope', 'ProcessErrors', 'CheckLicense', 'moment', '$timeout', 'Rest', 'LicenseStrings', - '$window', 'ConfigService', 'pendoService', 'insightsEnablementService', 'i18n', 'config', 'rhCreds', 'GetBasePath', + '$window', 'ConfigService', 'pendoService', 'insightsEnablementService', 'i18n', 'config', 'subscriptionCreds', 'GetBasePath', function(Wait, $state, $scope, $rootScope, ProcessErrors, CheckLicense, moment, $timeout, Rest, LicenseStrings, - $window, ConfigService, pendoService, insightsEnablementService, i18n, config, rhCreds, GetBasePath) { + $window, ConfigService, pendoService, insightsEnablementService, i18n, config, subscriptionCreds, GetBasePath) { $scope.strings = LicenseStrings; @@ -35,7 +35,7 @@ export default const reset = function() { $scope.newLicense.eula = undefined; - $scope.rhCreds = {}; + $scope.subscriptionCreds = {}; $scope.selectedLicense = {}; }; @@ -44,9 +44,9 @@ export default $scope.fileName = N_("No file selected."); if ($rootScope.licenseMissing) { - $scope.title = $rootScope.BRAND_NAME + i18n._(" License"); + $scope.title = $rootScope.BRAND_NAME + i18n._(" Subscription"); } else { - $scope.title = i18n._("License Management"); + $scope.title = i18n._("Subscription Management"); } $scope.license = config; @@ -62,30 +62,30 @@ export default insights: true }; - $scope.rhCreds = {}; + $scope.subscriptionCreds = {}; - if (rhCreds.REDHAT_USERNAME && rhCreds.REDHAT_USERNAME !== "") { - $scope.rhCreds.username = rhCreds.REDHAT_USERNAME; + if (subscriptionCreds.SUBSCRIPTIONS_USERNAME && subscriptionCreds.SUBSCRIPTIONS_USERNAME !== "") { + $scope.subscriptionCreds.username = subscriptionCreds.SUBSCRIPTIONS_USERNAME; } - if (rhCreds.REDHAT_PASSWORD && rhCreds.REDHAT_PASSWORD !== "") { - $scope.rhCreds.password = rhCreds.REDHAT_PASSWORD; + if (subscriptionCreds.SUBSCRIPTIONS_PASSWORD && subscriptionCreds.SUBSCRIPTIONS_PASSWORD !== "") { + $scope.subscriptionCreds.password = subscriptionCreds.SUBSCRIPTIONS_PASSWORD; $scope.showPlaceholderPassword = true; } }; - const updateRHCreds = (config) => { + const updateSubscriptionCreds = (config) => { Rest.setUrl(`${GetBasePath('settings')}system/`); Rest.get() .then(({data}) => { initVars(config); - if (data.REDHAT_USERNAME && data.REDHAT_USERNAME !== "") { - $scope.rhCreds.username = data.REDHAT_USERNAME; + if (data.SUBSCRIPTIONS_USERNAME && data.SUBSCRIPTIONS_USERNAME !== "") { + $scope.subscriptionCreds.username = data.SUBSCRIPTIONS_USERNAME; } - if (data.REDHAT_PASSWORD && data.REDHAT_PASSWORD !== "") { - $scope.rhCreds.password = data.REDHAT_PASSWORD; + if (data.SUBSCRIPTIONS_PASSWORD && data.SUBSCRIPTIONS_PASSWORD !== "") { + $scope.subscriptionCreds.password = data.SUBSCRIPTIONS_PASSWORD; $scope.showPlaceholderPassword = true; } }).catch(() => { @@ -100,28 +100,23 @@ export default $scope.fileName = event.target.files[0].name; // Grab the key from the raw license file const raw = new FileReader(); - // readAsFoo runs async + raw.onload = function() { - try { - $scope.newLicense.file = JSON.parse(raw.result); - } catch(err) { - ProcessErrors($rootScope, null, null, null, - {msg: i18n._('Invalid file format. Please upload valid JSON.')}); - } + $scope.newLicense.manifest = btoa(raw.result); }; try { - raw.readAsText(event.target.files[0]); + raw.readAsBinaryString(event.target.files[0]); } catch(err) { ProcessErrors($rootScope, null, null, null, - {msg: i18n._('Invalid file format. Please upload valid JSON.')}); + {msg: i18n._('Invalid file format. Please upload a valid Red Hat Subscription Manifest.')}); } }; // HTML5 spec doesn't provide a way to customize file input css // So we hide the default input, show our own, and simulate clicks to the hidden input $scope.fakeClick = function() { - if($scope.user_is_superuser && (!$scope.rhCreds.username || $scope.rhCreds.username === '') && (!$scope.rhCreds.password || $scope.rhCreds.password === '')) { + if($scope.user_is_superuser && (!$scope.subscriptionCreds.username || $scope.subscriptionCreds.username === '') && (!$scope.subscriptionCreds.password || $scope.subscriptionCreds.password === '')) { $('#License-file').click(); } }; @@ -131,9 +126,9 @@ export default }; $scope.replacePassword = () => { - if ($scope.user_is_superuser && !$scope.newLicense.file) { + if ($scope.user_is_superuser && !$scope.newLicense.manifest) { $scope.showPlaceholderPassword = false; - $scope.rhCreds.password = ""; + $scope.subscriptionCreds.password = ""; $timeout(() => { $('.tooltip').remove(); $('#rh-password').focus(); @@ -142,9 +137,9 @@ export default }; $scope.lookupLicenses = () => { - if ($scope.rhCreds.username && $scope.rhCreds.password) { + if ($scope.subscriptionCreds.username && $scope.subscriptionCreds.password) { Wait('start'); - ConfigService.getSubscriptions($scope.rhCreds.username, $scope.rhCreds.password) + ConfigService.getSubscriptions($scope.subscriptionCreds.username, $scope.subscriptionCreds.password) .then(({data}) => { Wait('stop'); if (data && data.length > 0) { @@ -172,29 +167,30 @@ export default $scope.confirmLicenseSelection = () => { $scope.showLicenseModal = false; $scope.selectedLicense.fullLicense = $scope.rhLicenses.find((license) => { - return license.license_key === $scope.selectedLicense.modalKey; + return license.pool_id === $scope.selectedLicense.modalPoolId; }); - $scope.selectedLicense.modalKey = undefined; + $scope.selectedLicense.modalPoolId = undefined; }; $scope.cancelLicenseLookup = () => { $scope.showLicenseModal = false; - $scope.selectedLicense.modalKey = undefined; + $scope.selectedLicense.modalPoolId = undefined; }; $scope.submit = function() { Wait('start'); let payload = {}; - if ($scope.newLicense.file) { - payload = $scope.newLicense.file; + let attach = false; + if ($scope.newLicense.manifest) { + payload.manifest = $scope.newLicense.manifest; } else if ($scope.selectedLicense.fullLicense) { - payload = $scope.selectedLicense.fullLicense; + payload.pool_id = $scope.selectedLicense.fullLicense.pool_id; + attach = true; } - CheckLicense.post(payload, $scope.newLicense.eula) - .then((licenseInfo) => { + CheckLicense.post(payload, $scope.newLicense.eula, attach) + .finally((licenseInfo) => { reset(); - ConfigService.delete(); ConfigService.getConfig(licenseInfo) .then(function(config) { @@ -217,7 +213,7 @@ export default licenseMissing: false }); } else { - updateRHCreds(config); + updateSubscriptionCreds(config); $scope.success = true; $rootScope.licenseMissing = false; // for animation purposes diff --git a/awx/ui/client/src/license/license.partial.html b/awx/ui/client/src/license/license.partial.html index 9a4a9a80f7..037ac17256 100644 --- a/awx/ui/client/src/license/license.partial.html +++ b/awx/ui/client/src/license/license.partial.html @@ -5,10 +5,10 @@
Details
-
License
+
Subscription
- Valid License - Invalid License + Compliant + Out of Compliance
@@ -18,7 +18,7 @@
-
License Type
+
Subscription Type
{{license.license_info.license_type}}
@@ -29,12 +29,6 @@ {{license.license_info.subscription_name}}
-
-
License Key
-
- {{license.license_info.license_key}} -
-
Expires On
@@ -64,53 +58,66 @@ {{license.license_info.current_instances}}
-
+
+
Hosts Remaining
+
+ {{license.license_info.free_instances}} +
+
+
Hosts Remaining
{{license.license_info.free_instances}}
-
If you are ready to upgrade, please contact us by clicking the button below
- +
If you are ready to upgrade or renew, please contact us by clicking the button below.
+
{{title}}
-
Welcome to Ansible Tower! Please complete the steps below to acquire a license.
+
Welcome to Red Hat Ansible Automation Platform! Please complete the steps below to activate your subscription.
+
+ + 1 + + + If you do not have a subscription, you can visit Red Hat to obtain a trial subscription. + +
+ +
+
+ + 2 + + + Select your Ansible Automation Platform subscription to use. + +
+
-
- - 1 - - - Please click the button below to visit Ansible's website to get a Tower license key. - -
- - -
- - 2 - - Choose your license file, agree to the End User License Agreement, and click submit. + Upload a Red Hat Subscription Manifest containing your subscription. To generate your subscription manifest, go to subscription allocations on the Red Hat Customer Portal.
* - License + Red Hat Subscription Manifest + + +
-
Upload a license file
- Browse + Browse {{fileName|translate}}
@@ -125,12 +132,12 @@
- Provide your Red Hat customer credentials and you can choose from a list of your available licenses. The credentials you use will be stored for future use in retrieving renewal or expanded licenses. You can update or remove them in SETTINGS > SYSTEM. + Provide your Red Hat or Red Hat Satellite credentials below and you can choose from a list of your available subscriptions. The credentials you use will be stored for future use in retrieving renewal or expanded subscriptions.
- +
@@ -143,11 +150,11 @@
- +
- GET LICENSES + GET SUBSCRIPTIONS
@@ -158,6 +165,14 @@
+
+ + 3 + + + Agree to the End User License Agreement, and click submit. + +
* End User License Agreement @@ -200,7 +215,7 @@ Save successful!
- +
@@ -223,12 +238,12 @@