diff --git a/CHANGELOG.md b/CHANGELOG.md index 49fcf77ad3..27bfb8be32 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ This is a list of high-level changes for each release of AWX. A full list of commits can be found at `https://github.com/ansible/awx/releases/tag/`. +# 19.0.0 (TBD) + +* AWX now runs on Python 3.8 (https://github.com/ansible/awx/pull/8778/) +* Added support for Execution Environments to the Activity Stream (https://github.com/ansible/awx/issues/9308) +* Fixed a bug that improperly formats OpenSSH keys specified in custom Credential Types (https://github.com/ansible/awx/issues/9361) + # 18.0.0 (March 23, 2021) **IMPORTANT INSTALL AND UPGRADE NOTES** diff --git a/Makefile b/Makefile index fb6e25e657..e2cac573bd 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -PYTHON ?= python3 +PYTHON ?= python3.8 PYTHON_VERSION = $(shell $(PYTHON) -c "from distutils.sysconfig import get_python_version; print(get_python_version())") SITELIB=$(shell $(PYTHON) -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())") OFFICIAL ?= no @@ -23,7 +23,7 @@ VENV_BASE ?= /var/lib/awx/venv/ SCL_PREFIX ?= CELERY_SCHEDULE_FILE ?= /var/lib/awx/beat.db -DEV_DOCKER_TAG_BASE ?= gcr.io/ansible-tower-engineering +DEV_DOCKER_TAG_BASE ?= quay.io/awx DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) # Python packages to install only from source (not from binary wheels) @@ -125,7 +125,7 @@ virtualenv_awx: mkdir $(VENV_BASE); \ fi; \ if [ ! -d "$(VENV_BASE)/awx" ]; then \ - virtualenv -p $(PYTHON) $(VENV_BASE)/awx; \ + $(PYTHON) -m venv $(VENV_BASE)/awx; \ $(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) $(VENV_BOOTSTRAP); \ fi; \ fi @@ -164,7 +164,7 @@ version_file: if [ "$(VENV_BASE)" ]; then \ . $(VENV_BASE)/awx/bin/activate; \ fi; \ - python -c "import awx; print(awx.__version__)" > /var/lib/awx/.awx_version; \ + $(PYTHON) -c "import awx; print(awx.__version__)" > /var/lib/awx/.awx_version; \ # Do any one-time init tasks. comma := , @@ -272,12 +272,12 @@ reports: mkdir -p $@ black: reports - command -v black >/dev/null 2>&1 || { echo "could not find black on your PATH, you may need to \`pip install black\`, or set AWX_IGNORE_BLACK=1" && exit 1; } - (set -o pipefail && $@ $(BLACK_ARGS) awx awxkit awx_collection | tee reports/$@.report) + @command -v black >/dev/null 2>&1 || { echo "could not find black on your PATH, you may need to \`pip install black\`, or set AWX_IGNORE_BLACK=1" && exit 1; } + @(set -o pipefail && $@ $(BLACK_ARGS) awx awxkit awx_collection | tee reports/$@.report) .git/hooks/pre-commit: - echo "[ -z \$$AWX_IGNORE_BLACK ] && (black --check \`git diff --cached --name-only | grep -E '\.py$\'\` || (echo 'To fix this, run \`make black\` to auto-format your code prior to commit, or set AWX_IGNORE_BLACK=1' && exit 1))" > .git/hooks/pre-commit - chmod +x .git/hooks/pre-commit + @echo "[ -z \$$AWX_IGNORE_BLACK ] && (black --check \`git diff --cached --name-only --diff-filter=AM | grep -E '\.py$\'\` || (echo 'To fix this, run \`make black\` to auto-format your code prior to commit, or set AWX_IGNORE_BLACK=1' && exit 1))" > .git/hooks/pre-commit + @chmod +x .git/hooks/pre-commit genschema: reports $(MAKE) swagger PYTEST_ARGS="--genschema --create-db " @@ -292,7 +292,7 @@ swagger: reports check: black awx-link: - [ -d "/awx_devel/awx.egg-info" ] || python3 /awx_devel/setup.py egg_info_dev + [ -d "/awx_devel/awx.egg-info" ] || $(PYTHON) /awx_devel/setup.py egg_info_dev cp -f /tmp/awx.egg-link /var/lib/awx/venv/awx/lib/python$(PYTHON_VERSION)/site-packages/awx.egg-link TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests diff --git a/awx/api/renderers.py b/awx/api/renderers.py index 48cba6cf5c..d19d6ee318 100644 --- a/awx/api/renderers.py +++ b/awx/api/renderers.py @@ -129,6 +129,18 @@ class PrometheusJSONRenderer(renderers.JSONRenderer): parsed_metrics = text_string_to_metric_families(data) data = {} for family in parsed_metrics: + data[family.name] = {} + data[family.name]['help_text'] = family.documentation + data[family.name]['type'] = family.type + data[family.name]['samples'] = [] for sample in family.samples: - data[sample[0]] = {"labels": sample[1], "value": sample[2]} + sample_dict = {"labels": sample[1], "value": sample[2]} + if family.type == 'histogram': + if sample[0].endswith("_sum"): + sample_dict['sample_type'] = "sum" + elif sample[0].endswith("_count"): + sample_dict['sample_type'] = "count" + elif sample[0].endswith("_bucket"): + sample_dict['sample_type'] = "bucket" + data[family.name]['samples'].append(sample_dict) return super(PrometheusJSONRenderer, self).render(data, accepted_media_type, renderer_context) diff --git a/awx/api/templates/api/metrics_view.md b/awx/api/templates/api/metrics_view.md new file mode 100644 index 0000000000..dbc4d2e043 --- /dev/null +++ b/awx/api/templates/api/metrics_view.md @@ -0,0 +1 @@ +query params to filter response, e.g., ?subsystemonly=1&metric=callback_receiver_events_insert_db&node=awx-1 diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py index fa175eff7b..cf64e4114c 100644 --- a/awx/api/views/__init__.py +++ b/awx/api/views/__init__.py @@ -3043,6 +3043,8 @@ class WorkflowJobTemplateNodeCreateApproval(RetrieveAPIView): return Response(data, status=status.HTTP_201_CREATED) def check_permissions(self, request): + if not request.user.is_authenticated: + raise PermissionDenied() obj = self.get_object().workflow_job_template if request.method == 'POST': if not request.user.can_access(models.WorkflowJobTemplate, 'change', obj, request.data): diff --git a/awx/api/views/metrics.py b/awx/api/views/metrics.py index dd40f11900..212acf3890 100644 --- a/awx/api/views/metrics.py +++ b/awx/api/views/metrics.py @@ -14,6 +14,7 @@ from rest_framework.exceptions import PermissionDenied # AWX # from awx.main.analytics import collectors +import awx.main.analytics.subsystem_metrics as s_metrics from awx.main.analytics.metrics import metrics from awx.api import renderers @@ -33,5 +34,10 @@ class MetricsView(APIView): def get(self, request): ''' Show Metrics Details ''' if request.user.is_superuser or request.user.is_system_auditor: - return Response(metrics().decode('UTF-8')) + metrics_to_show = '' + if not request.query_params.get('subsystemonly', "0") == "1": + metrics_to_show += metrics().decode('UTF-8') + if not request.query_params.get('dbonly', "0") == "1": + metrics_to_show += s_metrics.metrics(request) + return Response(metrics_to_show) raise PermissionDenied() diff --git a/awx/api/views/root.py b/awx/api/views/root.py index 012d0c7c96..7a7ea649b1 100644 --- a/awx/api/views/root.py +++ b/awx/api/views/root.py @@ -24,7 +24,7 @@ from awx.api.generics import APIView from awx.conf.registry import settings_registry from awx.main.analytics import all_collectors from awx.main.ha import is_ha_environment -from awx.main.utils import get_awx_version, get_ansible_version, get_custom_venv_choices, to_python_boolean +from awx.main.utils import get_awx_version, get_custom_venv_choices, to_python_boolean from awx.main.utils.licensing import validate_entitlement_manifest from awx.api.versioning import reverse, drf_reverse from awx.main.constants import PRIVILEGE_ESCALATION_METHODS @@ -279,7 +279,6 @@ class ApiV2ConfigView(APIView): time_zone=settings.TIME_ZONE, license_info=license_data, version=get_awx_version(), - ansible_version=get_ansible_version(), eula=render_to_string("eula.md") if license_data.get('license_type', 'UNLICENSED') != 'open' else '', analytics_status=pendo_state, analytics_collectors=all_collectors(), diff --git a/awx/conf/migrations/0009_rename_proot_settings.py b/awx/conf/migrations/0009_rename_proot_settings.py new file mode 100644 index 0000000000..2b0e2175aa --- /dev/null +++ b/awx/conf/migrations/0009_rename_proot_settings.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +from __future__ import unicode_literals +from django.db import migrations +from awx.conf.migrations import _rename_setting + + +def rename_proot_settings(apps, schema_editor): + _rename_setting.rename_setting(apps, schema_editor, old_key='AWX_PROOT_BASE_PATH', new_key='AWX_ISOLATION_BASE_PATH') + _rename_setting.rename_setting(apps, schema_editor, old_key='AWX_PROOT_SHOW_PATHS', new_key='AWX_ISOLATION_SHOW_PATHS') + + +class Migration(migrations.Migration): + + dependencies = [('conf', '0008_subscriptions')] + + operations = [migrations.RunPython(rename_proot_settings)] diff --git a/awx/main/analytics/analytics_tasks.py b/awx/main/analytics/analytics_tasks.py new file mode 100644 index 0000000000..990cacfafb --- /dev/null +++ b/awx/main/analytics/analytics_tasks.py @@ -0,0 +1,14 @@ +# Python +import logging + +# AWX +from awx.main.analytics.subsystem_metrics import Metrics +from awx.main.dispatch.publish import task +from awx.main.dispatch import get_local_queuename + +logger = logging.getLogger('awx.main.scheduler') + + +@task(queue=get_local_queuename) +def send_subsystem_metrics(): + Metrics().send_metrics() diff --git a/awx/main/analytics/collectors.py b/awx/main/analytics/collectors.py index abc10682a9..abdeb88b6c 100644 --- a/awx/main/analytics/collectors.py +++ b/awx/main/analytics/collectors.py @@ -2,6 +2,7 @@ import io import os import os.path import platform +import distro from django.db import connection from django.db.models import Count @@ -10,7 +11,7 @@ from django.utils.timezone import now from django.utils.translation import ugettext_lazy as _ from awx.conf.license import get_license -from awx.main.utils import get_awx_version, get_ansible_version, get_custom_venv_choices, camelcase_to_underscore +from awx.main.utils import get_awx_version, get_custom_venv_choices, camelcase_to_underscore from awx.main import models from django.contrib.sessions.models import Session from awx.main.analytics import register @@ -32,7 +33,7 @@ data _since_ the last report date - i.e., new data in the last 24 hours) ''' -@register('config', '1.2', description=_('General platform configuration.')) +@register('config', '1.3', description=_('General platform configuration.')) def config(since, **kwargs): license_info = get_license() install_type = 'traditional' @@ -43,7 +44,7 @@ def config(since, **kwargs): return { 'platform': { 'system': platform.system(), - 'dist': platform.dist(), + 'dist': distro.linux_distribution(), 'release': platform.release(), 'type': install_type, }, @@ -51,7 +52,6 @@ def config(since, **kwargs): 'instance_uuid': settings.SYSTEM_UUID, 'tower_url_base': settings.TOWER_URL_BASE, 'tower_version': get_awx_version(), - 'ansible_version': get_ansible_version(), 'license_type': license_info.get('license_type', 'UNLICENSED'), 'free_instances': license_info.get('free_instances', 0), 'total_licensed_instances': license_info.get('instance_count', 0), diff --git a/awx/main/analytics/metrics.py b/awx/main/analytics/metrics.py index e889719ded..81c6ab96ad 100644 --- a/awx/main/analytics/metrics.py +++ b/awx/main/analytics/metrics.py @@ -1,8 +1,8 @@ from django.conf import settings -from prometheus_client import REGISTRY, PROCESS_COLLECTOR, PLATFORM_COLLECTOR, GC_COLLECTOR, Gauge, Info, generate_latest +from prometheus_client import PROCESS_COLLECTOR, PLATFORM_COLLECTOR, GC_COLLECTOR, CollectorRegistry, Gauge, Info, generate_latest from awx.conf.license import get_license -from awx.main.utils import get_awx_version, get_ansible_version +from awx.main.utils import get_awx_version from awx.main.analytics.collectors import ( counts, instance_info, @@ -11,115 +11,123 @@ from awx.main.analytics.collectors import ( ) -REGISTRY.unregister(PROCESS_COLLECTOR) -REGISTRY.unregister(PLATFORM_COLLECTOR) -REGISTRY.unregister(GC_COLLECTOR) - -SYSTEM_INFO = Info('awx_system', 'AWX System Information') -ORG_COUNT = Gauge('awx_organizations_total', 'Number of organizations') -USER_COUNT = Gauge('awx_users_total', 'Number of users') -TEAM_COUNT = Gauge('awx_teams_total', 'Number of teams') -INV_COUNT = Gauge('awx_inventories_total', 'Number of inventories') -PROJ_COUNT = Gauge('awx_projects_total', 'Number of projects') -JT_COUNT = Gauge('awx_job_templates_total', 'Number of job templates') -WFJT_COUNT = Gauge('awx_workflow_job_templates_total', 'Number of workflow job templates') -HOST_COUNT = Gauge( - 'awx_hosts_total', - 'Number of hosts', - [ - 'type', - ], -) -SCHEDULE_COUNT = Gauge('awx_schedules_total', 'Number of schedules') -INV_SCRIPT_COUNT = Gauge('awx_inventory_scripts_total', 'Number of invetory scripts') -USER_SESSIONS = Gauge( - 'awx_sessions_total', - 'Number of sessions', - [ - 'type', - ], -) -CUSTOM_VENVS = Gauge('awx_custom_virtualenvs_total', 'Number of virtualenvs') -RUNNING_JOBS = Gauge('awx_running_jobs_total', 'Number of running jobs on the Tower system') -PENDING_JOBS = Gauge('awx_pending_jobs_total', 'Number of pending jobs on the Tower system') -STATUS = Gauge( - 'awx_status_total', - 'Status of Job launched', - [ - 'status', - ], -) - -INSTANCE_CAPACITY = Gauge( - 'awx_instance_capacity', - 'Capacity of each node in a Tower system', - [ - 'hostname', - 'instance_uuid', - ], -) -INSTANCE_CPU = Gauge( - 'awx_instance_cpu', - 'CPU cores on each node in a Tower system', - [ - 'hostname', - 'instance_uuid', - ], -) -INSTANCE_MEMORY = Gauge( - 'awx_instance_memory', - 'RAM (Kb) on each node in a Tower system', - [ - 'hostname', - 'instance_uuid', - ], -) -INSTANCE_INFO = Info( - 'awx_instance', - 'Info about each node in a Tower system', - [ - 'hostname', - 'instance_uuid', - ], -) -INSTANCE_LAUNCH_TYPE = Gauge( - 'awx_instance_launch_type_total', - 'Type of Job launched', - [ - 'node', - 'launch_type', - ], -) -INSTANCE_STATUS = Gauge( - 'awx_instance_status_total', - 'Status of Job launched', - [ - 'node', - 'status', - ], -) -INSTANCE_CONSUMED_CAPACITY = Gauge( - 'awx_instance_consumed_capacity', - 'Consumed capacity of each node in a Tower system', - [ - 'hostname', - 'instance_uuid', - ], -) -INSTANCE_REMAINING_CAPACITY = Gauge( - 'awx_instance_remaining_capacity', - 'Remaining capacity of each node in a Tower system', - [ - 'hostname', - 'instance_uuid', - ], -) - -LICENSE_INSTANCE_TOTAL = Gauge('awx_license_instance_total', 'Total number of managed hosts provided by your license') -LICENSE_INSTANCE_FREE = Gauge('awx_license_instance_free', 'Number of remaining managed hosts provided by your license') - - def metrics(): + REGISTRY = CollectorRegistry() + + SYSTEM_INFO = Info('awx_system', 'AWX System Information', registry=REGISTRY) + ORG_COUNT = Gauge('awx_organizations_total', 'Number of organizations', registry=REGISTRY) + USER_COUNT = Gauge('awx_users_total', 'Number of users', registry=REGISTRY) + TEAM_COUNT = Gauge('awx_teams_total', 'Number of teams', registry=REGISTRY) + INV_COUNT = Gauge('awx_inventories_total', 'Number of inventories', registry=REGISTRY) + PROJ_COUNT = Gauge('awx_projects_total', 'Number of projects', registry=REGISTRY) + JT_COUNT = Gauge('awx_job_templates_total', 'Number of job templates', registry=REGISTRY) + WFJT_COUNT = Gauge('awx_workflow_job_templates_total', 'Number of workflow job templates', registry=REGISTRY) + HOST_COUNT = Gauge( + 'awx_hosts_total', + 'Number of hosts', + [ + 'type', + ], + registry=REGISTRY, + ) + SCHEDULE_COUNT = Gauge('awx_schedules_total', 'Number of schedules', registry=REGISTRY) + INV_SCRIPT_COUNT = Gauge('awx_inventory_scripts_total', 'Number of invetory scripts', registry=REGISTRY) + USER_SESSIONS = Gauge( + 'awx_sessions_total', + 'Number of sessions', + [ + 'type', + ], + registry=REGISTRY, + ) + CUSTOM_VENVS = Gauge('awx_custom_virtualenvs_total', 'Number of virtualenvs', registry=REGISTRY) + RUNNING_JOBS = Gauge('awx_running_jobs_total', 'Number of running jobs on the Tower system', registry=REGISTRY) + PENDING_JOBS = Gauge('awx_pending_jobs_total', 'Number of pending jobs on the Tower system', registry=REGISTRY) + STATUS = Gauge( + 'awx_status_total', + 'Status of Job launched', + [ + 'status', + ], + registry=REGISTRY, + ) + + INSTANCE_CAPACITY = Gauge( + 'awx_instance_capacity', + 'Capacity of each node in a Tower system', + [ + 'hostname', + 'instance_uuid', + ], + registry=REGISTRY, + ) + INSTANCE_CPU = Gauge( + 'awx_instance_cpu', + 'CPU cores on each node in a Tower system', + [ + 'hostname', + 'instance_uuid', + ], + registry=REGISTRY, + ) + INSTANCE_MEMORY = Gauge( + 'awx_instance_memory', + 'RAM (Kb) on each node in a Tower system', + [ + 'hostname', + 'instance_uuid', + ], + registry=REGISTRY, + ) + INSTANCE_INFO = Info( + 'awx_instance', + 'Info about each node in a Tower system', + [ + 'hostname', + 'instance_uuid', + ], + registry=REGISTRY, + ) + INSTANCE_LAUNCH_TYPE = Gauge( + 'awx_instance_launch_type_total', + 'Type of Job launched', + [ + 'node', + 'launch_type', + ], + registry=REGISTRY, + ) + INSTANCE_STATUS = Gauge( + 'awx_instance_status_total', + 'Status of Job launched', + [ + 'node', + 'status', + ], + registry=REGISTRY, + ) + INSTANCE_CONSUMED_CAPACITY = Gauge( + 'awx_instance_consumed_capacity', + 'Consumed capacity of each node in a Tower system', + [ + 'hostname', + 'instance_uuid', + ], + registry=REGISTRY, + ) + INSTANCE_REMAINING_CAPACITY = Gauge( + 'awx_instance_remaining_capacity', + 'Remaining capacity of each node in a Tower system', + [ + 'hostname', + 'instance_uuid', + ], + registry=REGISTRY, + ) + + LICENSE_INSTANCE_TOTAL = Gauge('awx_license_instance_total', 'Total number of managed hosts provided by your license', registry=REGISTRY) + LICENSE_INSTANCE_FREE = Gauge('awx_license_instance_free', 'Number of remaining managed hosts provided by your license', registry=REGISTRY) + license_info = get_license() SYSTEM_INFO.info( { @@ -127,7 +135,6 @@ def metrics(): 'insights_analytics': str(settings.INSIGHTS_TRACKING_STATE), 'tower_url_base': settings.TOWER_URL_BASE, 'tower_version': get_awx_version(), - 'ansible_version': get_ansible_version(), 'license_type': license_info.get('license_type', 'UNLICENSED'), 'license_expiry': str(license_info.get('time_remaining', 0)), 'pendo_tracking': settings.PENDO_TRACKING_STATE, @@ -197,7 +204,7 @@ def metrics(): for status, value in statuses.items(): INSTANCE_STATUS.labels(node=node, status=status).set(value) - return generate_latest() + return generate_latest(registry=REGISTRY) __all__ = ['metrics'] diff --git a/awx/main/analytics/subsystem_metrics.py b/awx/main/analytics/subsystem_metrics.py new file mode 100644 index 0000000000..b5ecf39e90 --- /dev/null +++ b/awx/main/analytics/subsystem_metrics.py @@ -0,0 +1,304 @@ +import redis +import json +import time +import logging + +from django.conf import settings +from django.apps import apps +from awx.main.consumers import emit_channel_notification + +root_key = 'awx_metrics' +logger = logging.getLogger('awx.main.wsbroadcast') + + +class BaseM: + def __init__(self, field, help_text): + self.field = field + self.help_text = help_text + self.current_value = 0 + + def clear_value(self, conn): + conn.hset(root_key, self.field, 0) + self.current_value = 0 + + def inc(self, value): + self.current_value += value + + def set(self, value): + self.current_value = value + + def decode(self, conn): + value = conn.hget(root_key, self.field) + return self.decode_value(value) + + def to_prometheus(self, instance_data): + output_text = f"# HELP {self.field} {self.help_text}\n# TYPE {self.field} gauge\n" + for instance in instance_data: + output_text += f'{self.field}{{node="{instance}"}} {instance_data[instance][self.field]}\n' + return output_text + + +class FloatM(BaseM): + def decode_value(self, value): + if value is not None: + return float(value) + else: + return 0.0 + + def store_value(self, conn): + conn.hincrbyfloat(root_key, self.field, self.current_value) + self.current_value = 0 + + +class IntM(BaseM): + def decode_value(self, value): + if value is not None: + return int(value) + else: + return 0 + + def store_value(self, conn): + conn.hincrby(root_key, self.field, self.current_value) + self.current_value = 0 + + +class SetIntM(BaseM): + def decode_value(self, value): + if value is not None: + return int(value) + else: + return 0 + + def store_value(self, conn): + # do not set value if it has not changed since last time this was called + if self.current_value is not None: + conn.hset(root_key, self.field, self.current_value) + self.current_value = None + + +class SetFloatM(SetIntM): + def decode_value(self, value): + if value is not None: + return float(value) + else: + return 0 + + +class HistogramM(BaseM): + def __init__(self, field, help_text, buckets): + self.buckets = buckets + self.buckets_to_keys = {} + for b in buckets: + self.buckets_to_keys[b] = IntM(field + '_' + str(b), '') + self.inf = IntM(field + '_inf', '') + self.sum = IntM(field + '_sum', '') + super(HistogramM, self).__init__(field, help_text) + + def clear_value(self, conn): + conn.hset(root_key, self.field, 0) + self.inf.clear_value(conn) + self.sum.clear_value(conn) + for b in self.buckets_to_keys.values(): + b.clear_value(conn) + super(HistogramM, self).clear_value(conn) + + def observe(self, value): + for b in self.buckets: + if value <= b: + self.buckets_to_keys[b].inc(1) + break + self.sum.inc(value) + self.inf.inc(1) + + def decode(self, conn): + values = {'counts': []} + for b in self.buckets_to_keys: + values['counts'].append(self.buckets_to_keys[b].decode(conn)) + values['sum'] = self.sum.decode(conn) + values['inf'] = self.inf.decode(conn) + return values + + def store_value(self, conn): + for b in self.buckets: + self.buckets_to_keys[b].store_value(conn) + self.sum.store_value(conn) + self.inf.store_value(conn) + + def to_prometheus(self, instance_data): + output_text = f"# HELP {self.field} {self.help_text}\n# TYPE {self.field} histogram\n" + for instance in instance_data: + for i, b in enumerate(self.buckets): + output_text += f'{self.field}_bucket{{le="{b}",node="{instance}"}} {sum(instance_data[instance][self.field]["counts"][0:i+1])}\n' + output_text += f'{self.field}_bucket{{le="+Inf",node="{instance}"}} {instance_data[instance][self.field]["inf"]}\n' + output_text += f'{self.field}_count{{node="{instance}"}} {instance_data[instance][self.field]["inf"]}\n' + output_text += f'{self.field}_sum{{node="{instance}"}} {instance_data[instance][self.field]["sum"]}\n' + return output_text + + +class Metrics: + def __init__(self, auto_pipe_execute=True): + self.pipe = redis.Redis.from_url(settings.BROKER_URL).pipeline() + self.conn = redis.Redis.from_url(settings.BROKER_URL) + self.last_pipe_execute = time.time() + # track if metrics have been modified since last saved to redis + # start with True so that we get an initial save to redis + self.metrics_have_changed = True + self.pipe_execute_interval = settings.SUBSYSTEM_METRICS_INTERVAL_SAVE_TO_REDIS + self.send_metrics_interval = settings.SUBSYSTEM_METRICS_INTERVAL_SEND_METRICS + # auto pipe execute will commit transaction of metric data to redis + # at a regular interval (pipe_execute_interval). If set to False, + # the calling function should call .pipe_execute() explicitly + self.auto_pipe_execute = auto_pipe_execute + Instance = apps.get_model('main', 'Instance') + self.instance_name = Instance.objects.me().hostname + + # metric name, help_text + METRICSLIST = [ + SetIntM('callback_receiver_events_queue_size_redis', 'Current number of events in redis queue'), + IntM('callback_receiver_events_popped_redis', 'Number of events popped from redis'), + IntM('callback_receiver_events_in_memory', 'Current number of events in memory (in transfer from redis to db)'), + IntM('callback_receiver_batch_events_errors', 'Number of times batch insertion failed'), + FloatM('callback_receiver_events_insert_db_seconds', 'Time spent saving events to database'), + IntM('callback_receiver_events_insert_db', 'Number of events batch inserted into database'), + HistogramM( + 'callback_receiver_batch_events_insert_db', 'Number of events batch inserted into database', settings.SUBSYSTEM_METRICS_BATCH_INSERT_BUCKETS + ), + FloatM('subsystem_metrics_pipe_execute_seconds', 'Time spent saving metrics to redis'), + IntM('subsystem_metrics_pipe_execute_calls', 'Number of calls to pipe_execute'), + FloatM('subsystem_metrics_send_metrics_seconds', 'Time spent sending metrics to other nodes'), + ] + # turn metric list into dictionary with the metric name as a key + self.METRICS = {} + for m in METRICSLIST: + self.METRICS[m.field] = m + + # track last time metrics were sent to other nodes + self.previous_send_metrics = SetFloatM('send_metrics_time', 'Timestamp of previous send_metrics call') + + def clear_values(self): + for m in self.METRICS.values(): + m.clear_value(self.conn) + self.metrics_have_changed = True + self.conn.delete(root_key + "_lock") + + def inc(self, field, value): + if value != 0: + self.METRICS[field].inc(value) + self.metrics_have_changed = True + if self.auto_pipe_execute is True and self.should_pipe_execute() is True: + self.pipe_execute() + + def set(self, field, value): + self.METRICS[field].set(value) + self.metrics_have_changed = True + if self.auto_pipe_execute is True and self.should_pipe_execute() is True: + self.pipe_execute() + + def observe(self, field, value): + self.METRICS[field].observe(value) + self.metrics_have_changed = True + if self.auto_pipe_execute is True and self.should_pipe_execute() is True: + self.pipe_execute() + + def serialize_local_metrics(self): + data = self.load_local_metrics() + return json.dumps(data) + + def load_local_metrics(self): + # generate python dictionary of key values from metrics stored in redis + data = {} + for field in self.METRICS: + data[field] = self.METRICS[field].decode(self.conn) + return data + + def store_metrics(self, data_json): + # called when receiving metrics from other instances + data = json.loads(data_json) + if self.instance_name != data['instance']: + logger.debug(f"{self.instance_name} received subsystem metrics from {data['instance']}") + self.conn.set(root_key + "_instance_" + data['instance'], data['metrics']) + + def should_pipe_execute(self): + if self.metrics_have_changed is False: + return False + if time.time() - self.last_pipe_execute > self.pipe_execute_interval: + return True + else: + return False + + def pipe_execute(self): + if self.metrics_have_changed is True: + duration_to_save = time.perf_counter() + for m in self.METRICS: + self.METRICS[m].store_value(self.pipe) + self.pipe.execute() + self.last_pipe_execute = time.time() + self.metrics_have_changed = False + duration_to_save = time.perf_counter() - duration_to_save + self.METRICS['subsystem_metrics_pipe_execute_seconds'].inc(duration_to_save) + self.METRICS['subsystem_metrics_pipe_execute_calls'].inc(1) + + duration_to_save = time.perf_counter() + self.send_metrics() + duration_to_save = time.perf_counter() - duration_to_save + self.METRICS['subsystem_metrics_send_metrics_seconds'].inc(duration_to_save) + + def send_metrics(self): + # more than one thread could be calling this at the same time, so should + # get acquire redis lock before sending metrics + lock = self.conn.lock(root_key + '_lock', thread_local=False) + if not lock.acquire(blocking=False): + return + try: + current_time = time.time() + if current_time - self.previous_send_metrics.decode(self.conn) > self.send_metrics_interval: + payload = { + 'instance': self.instance_name, + 'metrics': self.serialize_local_metrics(), + } + # store a local copy as well + self.store_metrics(json.dumps(payload)) + emit_channel_notification("metrics", payload) + self.previous_send_metrics.set(current_time) + self.previous_send_metrics.store_value(self.conn) + finally: + lock.release() + + def load_other_metrics(self, request): + # data received from other nodes are stored in their own keys + # e.g., awx_metrics_instance_awx-1, awx_metrics_instance_awx-2 + # this method looks for keys with "_instance_" in the name and loads the data + # also filters data based on request query params + # if additional filtering is added, update metrics_view.md + instances_filter = request.query_params.getlist("node") + # get a sorted list of instance names + instance_names = [self.instance_name] + for m in self.conn.scan_iter(root_key + '_instance_*'): + instance_names.append(m.decode('UTF-8').split('_instance_')[1]) + instance_names.sort() + # load data, including data from the this local instance + instance_data = {} + for instance in instance_names: + if len(instances_filter) == 0 or instance in instances_filter: + instance_data_from_redis = self.conn.get(root_key + '_instance_' + instance) + # data from other instances may not be available. That is OK. + if instance_data_from_redis: + instance_data[instance] = json.loads(instance_data_from_redis.decode('UTF-8')) + return instance_data + + def generate_metrics(self, request): + # takes the api request, filters, and generates prometheus data + # if additional filtering is added, update metrics_view.md + instance_data = self.load_other_metrics(request) + metrics_filter = request.query_params.getlist("metric") + output_text = '' + if instance_data: + for field in self.METRICS: + if len(metrics_filter) == 0 or field in metrics_filter: + output_text += self.METRICS[field].to_prometheus(instance_data) + return output_text + + +def metrics(request): + m = Metrics() + return m.generate_metrics(request) diff --git a/awx/main/conf.py b/awx/main/conf.py index 2cfe06a25f..5cfd2977f7 100644 --- a/awx/main/conf.py +++ b/awx/main/conf.py @@ -186,7 +186,7 @@ register( default=None, queryset=ExecutionEnvironment.objects.all(), label=_('Global default execution environment'), - help_text=_('.'), + help_text=_('The Execution Environment to be used when one has not been configured for a job template.'), category=_('System'), category_slug='system', ) @@ -233,16 +233,7 @@ register( ) register( - 'AWX_PROOT_ENABLED', - field_class=fields.BooleanField, - label=_('Enable job isolation'), - help_text=_('Isolates an Ansible job from protected parts of the system to prevent exposing sensitive information.'), - category=_('Jobs'), - category_slug='jobs', -) - -register( - 'AWX_PROOT_BASE_PATH', + 'AWX_ISOLATION_BASE_PATH', field_class=fields.CharField, label=_('Job execution path'), help_text=_( @@ -255,17 +246,7 @@ register( ) register( - 'AWX_PROOT_HIDE_PATHS', - field_class=fields.StringListField, - required=False, - label=_('Paths to hide from isolated jobs'), - help_text=_('Additional paths to hide from isolated processes. Enter one path per line.'), - category=_('Jobs'), - category_slug='jobs', -) - -register( - 'AWX_PROOT_SHOW_PATHS', + 'AWX_ISOLATION_SHOW_PATHS', field_class=fields.StringListField, required=False, label=_('Paths to expose to isolated jobs'), diff --git a/awx/main/constants.py b/awx/main/constants.py index db2e9c44d7..6a44087c28 100644 --- a/awx/main/constants.py +++ b/awx/main/constants.py @@ -52,7 +52,6 @@ ENV_BLOCKLIST = frozenset( 'VIRTUAL_ENV', 'PATH', 'PYTHONPATH', - 'PROOT_TMP_DIR', 'JOB_ID', 'INVENTORY_ID', 'INVENTORY_SOURCE_ID', diff --git a/awx/main/consumers.py b/awx/main/consumers.py index a2425ec337..21ebe9d771 100644 --- a/awx/main/consumers.py +++ b/awx/main/consumers.py @@ -13,7 +13,6 @@ from channels.generic.websocket import AsyncJsonWebsocketConsumer from channels.layers import get_channel_layer from channels.db import database_sync_to_async - logger = logging.getLogger('awx.main.consumers') XRF_KEY = '_auth_user_xrf' diff --git a/awx/main/dispatch/worker/callback.py b/awx/main/dispatch/worker/callback.py index 68b8d5fd4f..acfb0bce02 100644 --- a/awx/main/dispatch/worker/callback.py +++ b/awx/main/dispatch/worker/callback.py @@ -20,7 +20,7 @@ from awx.main.models import JobEvent, AdHocCommandEvent, ProjectUpdateEvent, Inv from awx.main.tasks import handle_success_and_failure_notifications from awx.main.models.events import emit_event_detail from awx.main.utils.profiling import AWXProfiler - +import awx.main.analytics.subsystem_metrics as s_metrics from .base import BaseWorker logger = logging.getLogger('awx.main.commands.run_callback_receiver') @@ -46,16 +46,22 @@ class CallbackBrokerWorker(BaseWorker): self.buff = {} self.pid = os.getpid() self.redis = redis.Redis.from_url(settings.BROKER_URL) + self.subsystem_metrics = s_metrics.Metrics(auto_pipe_execute=False) + self.queue_pop = 0 + self.queue_name = settings.CALLBACK_QUEUE self.prof = AWXProfiler("CallbackBrokerWorker") for key in self.redis.keys('awx_callback_receiver_statistics_*'): self.redis.delete(key) def read(self, queue): try: - res = self.redis.blpop(settings.CALLBACK_QUEUE, timeout=1) + res = self.redis.blpop(self.queue_name, timeout=1) if res is None: return {'event': 'FLUSH'} self.total += 1 + self.queue_pop += 1 + self.subsystem_metrics.inc('callback_receiver_events_popped_redis', 1) + self.subsystem_metrics.inc('callback_receiver_events_in_memory', 1) return json.loads(res[1]) except redis.exceptions.RedisError: logger.exception("encountered an error communicating with redis") @@ -64,8 +70,19 @@ class CallbackBrokerWorker(BaseWorker): logger.exception("failed to decode JSON message from redis") finally: self.record_statistics() + self.record_read_metrics() + return {'event': 'FLUSH'} + def record_read_metrics(self): + if self.queue_pop == 0: + return + if self.subsystem_metrics.should_pipe_execute() is True: + queue_size = self.redis.llen(self.queue_name) + self.subsystem_metrics.set('callback_receiver_events_queue_size_redis', queue_size) + self.subsystem_metrics.pipe_execute() + self.queue_pop = 0 + def record_statistics(self): # buffer stat recording to once per (by default) 5s if time.time() - self.last_stats > settings.JOB_EVENT_STATISTICS_INTERVAL: @@ -99,27 +116,44 @@ class CallbackBrokerWorker(BaseWorker): def flush(self, force=False): now = tz_now() if force or (time.time() - self.last_flush) > settings.JOB_EVENT_BUFFER_SECONDS or any([len(events) >= 1000 for events in self.buff.values()]): + bulk_events_saved = 0 + singular_events_saved = 0 + metrics_events_batch_save_errors = 0 for cls, events in self.buff.items(): logger.debug(f'{cls.__name__}.objects.bulk_create({len(events)})') for e in events: if not e.created: e.created = now e.modified = now + duration_to_save = time.perf_counter() try: cls.objects.bulk_create(events) + bulk_events_saved += len(events) except Exception: # if an exception occurs, we should re-attempt to save the # events one-by-one, because something in the list is # broken/stale + metrics_events_batch_save_errors += 1 for e in events: try: e.save() + singular_events_saved += 1 except Exception: logger.exception('Database Error Saving Job Event') + duration_to_save = time.perf_counter() - duration_to_save for e in events: emit_event_detail(e) self.buff = {} self.last_flush = time.time() + # only update metrics if we saved events + if (bulk_events_saved + singular_events_saved) > 0: + self.subsystem_metrics.inc('callback_receiver_batch_events_errors', metrics_events_batch_save_errors) + self.subsystem_metrics.inc('callback_receiver_events_insert_db_seconds', duration_to_save) + self.subsystem_metrics.inc('callback_receiver_events_insert_db', bulk_events_saved + singular_events_saved) + self.subsystem_metrics.observe('callback_receiver_batch_events_insert_db', bulk_events_saved) + self.subsystem_metrics.inc('callback_receiver_events_in_memory', -(bulk_events_saved + singular_events_saved)) + if self.subsystem_metrics.should_pipe_execute() is True: + self.subsystem_metrics.pipe_execute() def perform_work(self, body): try: @@ -169,6 +203,7 @@ class CallbackBrokerWorker(BaseWorker): except Exception: logger.exception('Worker failed to emit notifications: Job {}'.format(job_identifier)) finally: + self.subsystem_metrics.inc('callback_receiver_events_in_memory', -1) GuidMiddleware.set_guid('') return diff --git a/awx/main/isolated/manager.py b/awx/main/isolated/manager.py index 79dac4445f..3fbda06ab8 100644 --- a/awx/main/isolated/manager.py +++ b/awx/main/isolated/manager.py @@ -135,7 +135,7 @@ class IsolatedManager(object): extravars = { 'src': self.private_data_dir, - 'dest': settings.AWX_PROOT_BASE_PATH, + 'dest': settings.AWX_ISOLATION_BASE_PATH, 'ident': self.ident, 'job_id': self.instance.id, } @@ -304,7 +304,7 @@ class IsolatedManager(object): if not len(instance_qs): return try: - private_data_dir = tempfile.mkdtemp(prefix='awx_iso_heartbeat_', dir=settings.AWX_PROOT_BASE_PATH) + private_data_dir = tempfile.mkdtemp(prefix='awx_iso_heartbeat_', dir=settings.AWX_ISOLATION_BASE_PATH) self.runner_params = self.build_runner_params([instance.hostname for instance in instance_qs]) self.runner_params['private_data_dir'] = private_data_dir self.runner_params['forks'] = len(instance_qs) diff --git a/awx/main/management/commands/inventory_import.py b/awx/main/management/commands/inventory_import.py index 0a2a19937d..9cdd2f3017 100644 --- a/awx/main/management/commands/inventory_import.py +++ b/awx/main/management/commands/inventory_import.py @@ -69,8 +69,6 @@ class AnsibleInventoryLoader(object): def __init__(self, source, venv_path=None, verbosity=0): self.source = source self.verbosity = verbosity - # TODO: remove once proot has been removed - self.tmp_private_dir = None if venv_path: self.venv_path = venv_path else: diff --git a/awx/main/management/commands/test_isolated_connection.py b/awx/main/management/commands/test_isolated_connection.py index 3983967251..c89b71a892 100644 --- a/awx/main/management/commands/test_isolated_connection.py +++ b/awx/main/management/commands/test_isolated_connection.py @@ -25,7 +25,7 @@ class Command(BaseCommand): raise CommandError("--hostname is a required argument") try: - path = tempfile.mkdtemp(prefix='awx_isolated_ssh', dir=settings.AWX_PROOT_BASE_PATH) + path = tempfile.mkdtemp(prefix='awx_isolated_ssh', dir=settings.AWX_ISOLATION_BASE_PATH) ssh_key = None if all([getattr(settings, 'AWX_ISOLATED_KEY_GENERATION', False) is True, getattr(settings, 'AWX_ISOLATED_PRIVATE_KEY', None)]): ssh_key = settings.AWX_ISOLATED_PRIVATE_KEY diff --git a/awx/main/models/credential/__init__.py b/awx/main/models/credential/__init__.py index 1d29fbb5e7..f37bfee884 100644 --- a/awx/main/models/credential/__init__.py +++ b/awx/main/models/credential/__init__.py @@ -466,10 +466,14 @@ class CredentialType(CommonModelNameNotUnique): if len(value): namespace[field_name] = value - # default missing boolean fields to False for field in self.inputs.get('fields', []): + # default missing boolean fields to False if field['type'] == 'boolean' and field['id'] not in credential.inputs.keys(): namespace[field['id']] = safe_namespace[field['id']] = False + # make sure private keys end with a \n + if field.get('format') == 'ssh_private_key': + if field['id'] in namespace and not namespace[field['id']].endswith('\n'): + namespace[field['id']] += '\n' file_tmpls = self.injectors.get('file', {}) # If any file templates are provided, render the files and update the diff --git a/awx/main/queue.py b/awx/main/queue.py index 88fc2c8288..ebac0622e4 100644 --- a/awx/main/queue.py +++ b/awx/main/queue.py @@ -8,7 +8,7 @@ import redis # Django from django.conf import settings - +import awx.main.analytics.subsystem_metrics as s_metrics __all__ = ['CallbackQueueDispatcher'] @@ -28,6 +28,7 @@ class CallbackQueueDispatcher(object): self.queue = getattr(settings, 'CALLBACK_QUEUE', '') self.logger = logging.getLogger('awx.main.queue.CallbackQueueDispatcher') self.connection = redis.Redis.from_url(settings.BROKER_URL) + self.subsystem_metrics = s_metrics.Metrics() def dispatch(self, obj): self.connection.rpush(self.queue, json.dumps(obj, cls=AnsibleJSONEncoder)) diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 36c0350259..b5cb7ec1e8 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -107,6 +107,7 @@ from awx.main.consumers import emit_channel_notification from awx.main import analytics from awx.conf import settings_registry from awx.conf.license import get_license +from awx.main.analytics.subsystem_metrics import Metrics from rest_framework.exceptions import PermissionDenied @@ -170,6 +171,7 @@ def dispatch_startup(): cluster_node_heartbeat() if Instance.objects.me().is_controller(): awx_isolated_heartbeat() + Metrics().clear_values() # Update Tower's rsyslog.conf file based on loggins settings in the db reconfigure_rsyslog() @@ -841,7 +843,6 @@ class BaseTask(object): model = None event_model = None abstract = True - proot_show_paths = [] def __init__(self): self.cleanup_paths = [] @@ -908,9 +909,9 @@ class BaseTask(object): if pull: params['container_options'].append(f'--pull={pull}') - if settings.AWX_PROOT_SHOW_PATHS: + if settings.AWX_ISOLATION_SHOW_PATHS: params['container_volume_mounts'] = [] - for this_path in settings.AWX_PROOT_SHOW_PATHS: + for this_path in settings.AWX_ISOLATION_SHOW_PATHS: params['container_volume_mounts'].append(f'{this_path}:{this_path}:Z') return params @@ -924,7 +925,7 @@ class BaseTask(object): """ Create a temporary directory for job-related files. """ - pdd_wrapper_path = tempfile.mkdtemp(prefix=f'pdd_wrapper_{instance.pk}_', dir=settings.AWX_PROOT_BASE_PATH) + pdd_wrapper_path = tempfile.mkdtemp(prefix=f'pdd_wrapper_{instance.pk}_', dir=settings.AWX_ISOLATION_BASE_PATH) os.chmod(pdd_wrapper_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) if settings.AWX_CLEANUP_PATHS: self.cleanup_paths.append(pdd_wrapper_path) @@ -1090,12 +1091,6 @@ class BaseTask(object): """ return False - def should_use_proot(self, instance): - """ - Return whether this task should use proot. - """ - return False - def build_inventory(self, instance, private_data_dir): script_params = dict(hostvars=True, towervars=True) if hasattr(instance, 'job_slice_number'): @@ -1371,8 +1366,8 @@ class BaseTask(object): status = self.instance.status raise RuntimeError('not starting %s task' % self.instance.status) - if not os.path.exists(settings.AWX_PROOT_BASE_PATH): - raise RuntimeError('AWX_PROOT_BASE_PATH=%s does not exist' % settings.AWX_PROOT_BASE_PATH) + if not os.path.exists(settings.AWX_ISOLATION_BASE_PATH): + raise RuntimeError('AWX_ISOLATION_BASE_PATH=%s does not exist' % settings.AWX_ISOLATION_BASE_PATH) # store a record of the venv used at runtime if hasattr(self.instance, 'custom_virtualenv'): @@ -1598,8 +1593,7 @@ class RunJob(BaseTask): env['ANSIBLE_CALLBACK_PLUGINS'] = ':'.join(settings.AWX_ANSIBLE_CALLBACK_PLUGINS) env['AWX_HOST'] = settings.TOWER_URL_BASE - # Create a directory for ControlPath sockets that is unique to each - # job and visible inside the proot environment (when enabled). + # Create a directory for ControlPath sockets that is unique to each job cp_dir = os.path.join(private_data_dir, 'cp') if not os.path.exists(cp_dir): os.mkdir(cp_dir, 0o700) @@ -1768,14 +1762,6 @@ class RunJob(BaseTask): """ return settings.AWX_RESOURCE_PROFILING_ENABLED - def should_use_proot(self, job): - """ - Return whether this task should use proot. - """ - if job.is_container_group_task: - return False - return getattr(settings, 'AWX_PROOT_ENABLED', False) - def build_execution_environment_params(self, instance): if settings.IS_K8S: return {} @@ -1929,10 +1915,6 @@ class RunProjectUpdate(BaseTask): event_model = ProjectUpdateEvent event_data_key = 'project_update_id' - @property - def proot_show_paths(self): - return [settings.PROJECTS_ROOT] - def __init__(self, *args, job_private_data_dir=None, **kwargs): super(RunProjectUpdate, self).__init__(*args, **kwargs) self.playbook_new_revision = None @@ -1990,7 +1972,7 @@ class RunProjectUpdate(BaseTask): env['DISPLAY'] = '' # Prevent stupid password popup when running tests. # give ansible a hint about the intended tmpdir to work around issues # like https://github.com/ansible/ansible/issues/30064 - env['TMP'] = settings.AWX_PROOT_BASE_PATH + env['TMP'] = settings.AWX_ISOLATION_BASE_PATH env['PROJECT_UPDATE_ID'] = str(project_update.pk) if settings.GALAXY_IGNORE_CERTS: env['ANSIBLE_GALAXY_IGNORE'] = True @@ -2124,7 +2106,7 @@ class RunProjectUpdate(BaseTask): d = super(RunProjectUpdate, self).get_password_prompts(passwords) d[r'Username for.*:\s*?$'] = 'scm_username' d[r'Password for.*:\s*?$'] = 'scm_password' - d['Password:\s*?$'] = 'scm_password' # noqa + d[r'Password:\s*?$'] = 'scm_password' d[r'\S+?@\S+?\'s\s+?password:\s*?$'] = 'scm_password' d[r'Enter passphrase for .*:\s*?$'] = 'scm_key_unlock' d[r'Bad passphrase, try again for .*:\s*?$'] = '' @@ -2394,12 +2376,6 @@ class RunProjectUpdate(BaseTask): if status == 'successful' and instance.launch_type != 'sync': self._update_dependent_inventories(instance, dependent_inventory_sources) - def should_use_proot(self, project_update): - """ - Return whether this task should use proot. - """ - return getattr(settings, 'AWX_PROOT_ENABLED', False) - def build_execution_environment_params(self, instance): if settings.IS_K8S: return {} @@ -2790,7 +2766,7 @@ class RunAdHocCommand(BaseTask): env['ANSIBLE_SFTP_BATCH_MODE'] = 'False' # Create a directory for ControlPath sockets that is unique to each - # ad hoc command and visible inside the proot environment (when enabled). + # ad hoc command cp_dir = os.path.join(private_data_dir, 'cp') if not os.path.exists(cp_dir): os.mkdir(cp_dir, 0o700) @@ -2894,14 +2870,6 @@ class RunAdHocCommand(BaseTask): d[r'Password:\s*?$'] = 'ssh_password' return d - def should_use_proot(self, ad_hoc_command): - """ - Return whether this task should use proot. - """ - if ad_hoc_command.is_container_group_task: - return False - return getattr(settings, 'AWX_PROOT_ENABLED', False) - def final_run_hook(self, adhoc_job, status, private_data_dir, fact_modification_times, isolated_manager_instance=None): super(RunAdHocCommand, self).final_run_hook(adhoc_job, status, private_data_dir, fact_modification_times) if isolated_manager_instance: diff --git a/awx/main/tests/functional/analytics/test_metrics.py b/awx/main/tests/functional/analytics/test_metrics.py index 94076d1362..442c83699c 100644 --- a/awx/main/tests/functional/analytics/test_metrics.py +++ b/awx/main/tests/functional/analytics/test_metrics.py @@ -56,24 +56,28 @@ def test_metrics_counts(organization_factory, job_template_factory, workflow_job assert EXPECTED_VALUES[name] == value +def get_metrics_view_db_only(): + return reverse('api:metrics_view') + '?dbonly=1' + + @pytest.mark.django_db def test_metrics_permissions(get, admin, org_admin, alice, bob, organization): - assert get(reverse('api:metrics_view'), user=admin).status_code == 200 - assert get(reverse('api:metrics_view'), user=org_admin).status_code == 403 - assert get(reverse('api:metrics_view'), user=alice).status_code == 403 - assert get(reverse('api:metrics_view'), user=bob).status_code == 403 + assert get(get_metrics_view_db_only(), user=admin).status_code == 200 + assert get(get_metrics_view_db_only(), user=org_admin).status_code == 403 + assert get(get_metrics_view_db_only(), user=alice).status_code == 403 + assert get(get_metrics_view_db_only(), user=bob).status_code == 403 organization.auditor_role.members.add(bob) - assert get(reverse('api:metrics_view'), user=bob).status_code == 403 + assert get(get_metrics_view_db_only(), user=bob).status_code == 403 Role.singleton('system_auditor').members.add(bob) bob.is_system_auditor = True - assert get(reverse('api:metrics_view'), user=bob).status_code == 200 + assert get(get_metrics_view_db_only(), user=bob).status_code == 200 @pytest.mark.django_db def test_metrics_http_methods(get, post, patch, put, options, admin): - assert get(reverse('api:metrics_view'), user=admin).status_code == 200 - assert put(reverse('api:metrics_view'), user=admin).status_code == 405 - assert patch(reverse('api:metrics_view'), user=admin).status_code == 405 - assert post(reverse('api:metrics_view'), user=admin).status_code == 405 - assert options(reverse('api:metrics_view'), user=admin).status_code == 200 + assert get(get_metrics_view_db_only(), user=admin).status_code == 200 + assert put(get_metrics_view_db_only(), user=admin).status_code == 405 + assert patch(get_metrics_view_db_only(), user=admin).status_code == 405 + assert post(get_metrics_view_db_only(), user=admin).status_code == 405 + assert options(get_metrics_view_db_only(), user=admin).status_code == 200 diff --git a/awx/main/tests/functional/api/test_settings.py b/awx/main/tests/functional/api/test_settings.py index fa53c65aa9..84bfff2d18 100644 --- a/awx/main/tests/functional/api/test_settings.py +++ b/awx/main/tests/functional/api/test_settings.py @@ -33,16 +33,14 @@ def test_jobs_settings(get, put, patch, delete, admin): response = get(url, user=admin, expect=200) data = dict(response.data.items()) put(url, user=admin, data=data, expect=200) - patch(url, user=admin, data={'AWX_PROOT_HIDE_PATHS': ['/home']}, expect=200) + patch(url, user=admin, data={'AWX_ISOLATION_SHOW_PATHS': ['/home']}, expect=200) response = get(url, user=admin, expect=200) - assert response.data['AWX_PROOT_HIDE_PATHS'] == ['/home'] - data.pop('AWX_PROOT_HIDE_PATHS') - data.pop('AWX_PROOT_SHOW_PATHS') + assert response.data['AWX_ISOLATION_SHOW_PATHS'] == ['/home'] + data.pop('AWX_ISOLATION_SHOW_PATHS') data.pop('AWX_ANSIBLE_CALLBACK_PLUGINS') put(url, user=admin, data=data, expect=200) response = get(url, user=admin, expect=200) - assert response.data['AWX_PROOT_HIDE_PATHS'] == [] - assert response.data['AWX_PROOT_SHOW_PATHS'] == [] + assert response.data['AWX_ISOLATION_SHOW_PATHS'] == [] assert response.data['AWX_ANSIBLE_CALLBACK_PLUGINS'] == [] diff --git a/awx/main/tests/unit/models/test_ha.py b/awx/main/tests/unit/models/test_ha.py index 83530f8ded..52d5fdc16c 100644 --- a/awx/main/tests/unit/models/test_ha.py +++ b/awx/main/tests/unit/models/test_ha.py @@ -3,13 +3,12 @@ from unittest import mock from unittest.mock import Mock from awx.main.models import ( - Job, InstanceGroup, ) def T(impact): - j = mock.Mock(Job()) + j = mock.Mock(spec_set=['task_impact']) j.task_impact = impact return j diff --git a/awx/main/tests/unit/scheduler/test_kubernetes.py b/awx/main/tests/unit/scheduler/test_kubernetes.py index dc794ab65e..1f51401fe4 100644 --- a/awx/main/tests/unit/scheduler/test_kubernetes.py +++ b/awx/main/tests/unit/scheduler/test_kubernetes.py @@ -1,5 +1,4 @@ import pytest -from unittest import mock from django.conf import settings from awx.main.models import ( @@ -14,7 +13,7 @@ from awx.main.scheduler.kubernetes import PodManager @pytest.fixture def container_group(): - instance_group = mock.Mock(InstanceGroup(name='container-group')) + instance_group = InstanceGroup(name='container-group', id=1) return instance_group diff --git a/awx/main/tests/unit/test_tasks.py b/awx/main/tests/unit/test_tasks.py index 01e02b67d7..26df22c4f2 100644 --- a/awx/main/tests/unit/test_tasks.py +++ b/awx/main/tests/unit/test_tasks.py @@ -725,7 +725,6 @@ class TestIsolatedExecution(TestJobExecution): extra_vars = json.loads(extra_vars) assert extra_vars['dest'] == '/tmp' assert extra_vars['src'] == private_data - assert extra_vars['proot_temp_dir'].startswith('/tmp/awx_proot_') def test_systemctl_failure(self): # If systemctl fails, read the contents of `artifacts/systemctl_logs` diff --git a/awx/main/utils/common.py b/awx/main/utils/common.py index 9ad45d68c6..8ad4a9f485 100644 --- a/awx/main/utils/common.py +++ b/awx/main/utils/common.py @@ -44,7 +44,6 @@ __all__ = [ 'underscore_to_camelcase', 'memoize', 'memoize_delete', - 'get_ansible_version', 'get_licenser', 'get_awx_http_client_headers', 'get_awx_version', @@ -69,9 +68,6 @@ __all__ = [ 'get_system_task_capacity', 'get_cpu_capacity', 'get_mem_capacity', - 'wrap_args_with_proot', - 'build_proot_temp_dir', - 'check_proot_installed', 'model_to_dict', 'NullablePromptPseudoField', 'model_instance_diff', @@ -195,20 +191,6 @@ def memoize_delete(function_name): return cache.delete(function_name) -@memoize() -def get_ansible_version(): - """ - Return Ansible version installed. - Ansible path needs to be provided to account for custom virtual environments - """ - try: - proc = subprocess.Popen(['ansible', '--version'], stdout=subprocess.PIPE) - result = smart_str(proc.communicate()[0]) - return result.split('\n')[0].replace('ansible', '').strip() - except Exception: - return 'unknown' - - def get_awx_version(): """ Return AWX version as reported by setuptools. @@ -842,94 +824,6 @@ def set_environ(**environ): os.environ.update(old_environ) -@memoize() -def check_proot_installed(): - """ - Check that proot is installed. - """ - from django.conf import settings - - cmd = [getattr(settings, 'AWX_PROOT_CMD', 'bwrap'), '--version'] - try: - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - proc.communicate() - return bool(proc.returncode == 0) - except (OSError, ValueError) as e: - if isinstance(e, ValueError) or getattr(e, 'errno', 1) != 2: # ENOENT, no such file or directory - logger.exception('bwrap unavailable for unexpected reason.') - return False - - -def build_proot_temp_dir(): - """ - Create a temporary directory for proot to use. - """ - from django.conf import settings - - path = tempfile.mkdtemp(prefix='awx_proot_', dir=settings.AWX_PROOT_BASE_PATH) - os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) - return path - - -def wrap_args_with_proot(args, cwd, **kwargs): - """ - Wrap existing command line with proot to restrict access to: - - AWX_PROOT_BASE_PATH (generally, /tmp) (except for own /tmp files) - For non-isolated nodes: - - /etc/tower (to prevent obtaining db info or secret key) - - /var/lib/awx (except for current project) - - /var/log/tower - - /var/log/supervisor - """ - from django.conf import settings - - cwd = os.path.realpath(cwd) - new_args = [getattr(settings, 'AWX_PROOT_CMD', 'bwrap'), '--unshare-pid', '--dev-bind', '/', '/', '--proc', '/proc'] - hide_paths = [settings.AWX_PROOT_BASE_PATH] - if not kwargs.get('isolated'): - hide_paths.extend(['/etc/tower', '/var/lib/awx', '/var/log', '/etc/ssh', settings.PROJECTS_ROOT, settings.JOBOUTPUT_ROOT]) - hide_paths.extend(getattr(settings, 'AWX_PROOT_HIDE_PATHS', None) or []) - for path in sorted(set(hide_paths)): - if not os.path.exists(path): - continue - path = os.path.realpath(path) - if os.path.isdir(path): - new_path = tempfile.mkdtemp(dir=kwargs['proot_temp_dir']) - os.chmod(new_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) - else: - handle, new_path = tempfile.mkstemp(dir=kwargs['proot_temp_dir']) - os.close(handle) - os.chmod(new_path, stat.S_IRUSR | stat.S_IWUSR) - new_args.extend(['--bind', '%s' % (new_path,), '%s' % (path,)]) - if kwargs.get('isolated'): - show_paths = [kwargs['private_data_dir']] - elif 'private_data_dir' in kwargs: - show_paths = [cwd, kwargs['private_data_dir']] - else: - show_paths = [cwd] - for venv in (settings.ANSIBLE_VENV_PATH, settings.AWX_VENV_PATH, kwargs.get('proot_custom_virtualenv')): - if venv: - new_args.extend(['--ro-bind', venv, venv]) - show_paths.extend(getattr(settings, 'AWX_PROOT_SHOW_PATHS', None) or []) - show_paths.extend(kwargs.get('proot_show_paths', [])) - for path in sorted(set(show_paths)): - if not os.path.exists(path): - continue - path = os.path.realpath(path) - new_args.extend(['--bind', '%s' % (path,), '%s' % (path,)]) - if kwargs.get('isolated'): - if '/bin/ansible-playbook' in ' '.join(args): - # playbook runs should cwd to the SCM checkout dir - new_args.extend(['--chdir', os.path.join(kwargs['private_data_dir'], 'project')]) - else: - # ad-hoc runs should cwd to the root of the private data dir - new_args.extend(['--chdir', kwargs['private_data_dir']]) - else: - new_args.extend(['--chdir', cwd]) - new_args.extend(args) - return new_args - - def get_pk_from_dict(_dict, key): """ Helper for obtaining a pk from user data dict or None if not present. diff --git a/awx/main/utils/handlers.py b/awx/main/utils/handlers.py index 19deb234b5..ef761159ed 100644 --- a/awx/main/utils/handlers.py +++ b/awx/main/utils/handlers.py @@ -5,6 +5,7 @@ import logging import sys import traceback +from datetime import datetime # Django from django.conf import settings @@ -34,7 +35,8 @@ class RSysLogHandler(logging.handlers.SysLogHandler): # because the alternative is blocking the # socket.send() in the Python process, which we definitely don't # want to do) - msg = f'{record.asctime} ERROR rsyslogd was unresponsive: ' + dt = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S') + msg = f'{dt} ERROR rsyslogd was unresponsive: ' exc = traceback.format_exc() try: msg += exc.splitlines()[-1] diff --git a/awx/main/wsbroadcast.py b/awx/main/wsbroadcast.py index e2ee9fc431..184ae06122 100644 --- a/awx/main/wsbroadcast.py +++ b/awx/main/wsbroadcast.py @@ -15,7 +15,7 @@ from awx.main.analytics.broadcast_websocket import ( BroadcastWebsocketStats, BroadcastWebsocketStatsManager, ) - +import awx.main.analytics.subsystem_metrics as s_metrics logger = logging.getLogger('awx.main.wsbroadcast') @@ -68,6 +68,7 @@ class WebsocketTask: self.protocol = protocol self.verify_ssl = verify_ssl self.channel_layer = None + self.subsystem_metrics = s_metrics.Metrics() async def run_loop(self, websocket: aiohttp.ClientWebSocketResponse): raise RuntimeError("Implement me") @@ -144,9 +145,10 @@ class BroadcastWebsocketTask(WebsocketTask): logmsg = "{} {}".format(logmsg, payload) logger.warn(logmsg) continue - (group, message) = unwrap_broadcast_msg(payload) - + if group == "metrics": + self.subsystem_metrics.store_metrics(message) + continue await self.channel_layer.group_send(group, {"type": "internal.message", "text": message}) diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 194d363a6d..2daa33d4b3 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -224,6 +224,15 @@ JOB_EVENT_MAX_QUEUE_SIZE = 10000 # The number of job events to migrate per-transaction when moving from int -> bigint JOB_EVENT_MIGRATION_CHUNK_SIZE = 1000000 +# Histogram buckets for the callback_receiver_batch_events_insert_db metric +SUBSYSTEM_METRICS_BATCH_INSERT_BUCKETS = [10, 50, 150, 350, 650, 2000] + +# Interval in seconds for sending local metrics to other nodes +SUBSYSTEM_METRICS_INTERVAL_SEND_METRICS = 3 + +# Interval in seconds for saving local metrics to redis +SUBSYSTEM_METRICS_INTERVAL_SAVE_TO_REDIS = 2 + # The maximum allowed jobs to start on a given task manager cycle START_TASK_LIMIT = 100 @@ -427,6 +436,7 @@ CELERYBEAT_SCHEDULE = { 'gather_analytics': {'task': 'awx.main.tasks.gather_analytics', 'schedule': timedelta(minutes=5)}, 'task_manager': {'task': 'awx.main.scheduler.tasks.run_task_manager', 'schedule': timedelta(seconds=20), 'options': {'expires': 20}}, 'k8s_reaper': {'task': 'awx.main.tasks.awx_k8s_reaper', 'schedule': timedelta(seconds=60), 'options': {'expires': 50}}, + 'send_subsystem_metrics': {'task': 'awx.main.analytics.analytics_tasks.send_subsystem_metrics', 'schedule': timedelta(seconds=20)}, # 'isolated_heartbeat': set up at the end of production.py and development.py } @@ -569,26 +579,15 @@ AWX_SHOW_PLAYBOOK_LINKS = False # Applies to any galaxy server GALAXY_IGNORE_CERTS = False -# Enable bubblewrap support for running jobs (playbook runs only). +# Additional paths to show for jobs using process isolation. # Note: This setting may be overridden by database settings. -AWX_PROOT_ENABLED = True - -# Command/path to bubblewrap. -AWX_PROOT_CMD = 'bwrap' - -# Additional paths to hide from jobs using bubblewrap. -# Note: This setting may be overridden by database settings. -AWX_PROOT_HIDE_PATHS = [] - -# Additional paths to show for jobs using bubbelwrap. -# Note: This setting may be overridden by database settings. -AWX_PROOT_SHOW_PATHS = [] +AWX_ISOLATION_SHOW_PATHS = [] # The directory in which Tower will create new temporary directories for job # execution and isolation (such as credential files and custom # inventory scripts). # Note: This setting may be overridden by database settings. -AWX_PROOT_BASE_PATH = "/tmp" +AWX_ISOLATION_BASE_PATH = "/tmp" # Disable resource profiling by default AWX_RESOURCE_PROFILING_ENABLED = False diff --git a/awx/settings/development.py b/awx/settings/development.py index e2a42fef67..66dc12f50f 100644 --- a/awx/settings/development.py +++ b/awx/settings/development.py @@ -67,10 +67,6 @@ CALLBACK_QUEUE = "callback_tasks" # Note: This setting may be overridden by database settings. AWX_ROLES_ENABLED = True -# Enable PROOT for tower-qa integration tests. -# Note: This setting may be overridden by database settings. -AWX_PROOT_ENABLED = True - AWX_ISOLATED_USERNAME = 'root' AWX_ISOLATED_CHECK_INTERVAL = 1 AWX_ISOLATED_PERIODIC_CHECK = 30 diff --git a/awx/ui_next/src/api/mixins/Relaunch.mixin.js b/awx/ui_next/src/api/mixins/Relaunch.mixin.js deleted file mode 100644 index 06594c6dd3..0000000000 --- a/awx/ui_next/src/api/mixins/Relaunch.mixin.js +++ /dev/null @@ -1,12 +0,0 @@ -const RelaunchMixin = parent => - class extends parent { - relaunch(id, data) { - return this.http.post(`${this.baseUrl}${id}/relaunch/`, data); - } - - readRelaunch(id) { - return this.http.get(`${this.baseUrl}${id}/relaunch/`); - } - }; - -export default RelaunchMixin; diff --git a/awx/ui_next/src/api/mixins/Runnable.mixin.js b/awx/ui_next/src/api/mixins/Runnable.mixin.js new file mode 100644 index 0000000000..ba8aac8681 --- /dev/null +++ b/awx/ui_next/src/api/mixins/Runnable.mixin.js @@ -0,0 +1,48 @@ +const Runnable = parent => + class extends parent { + jobEventSlug = '/events/'; + + cancel(id) { + const endpoint = `${this.baseUrl}${id}/cancel/`; + + return this.http.post(endpoint); + } + + launchUpdate(id, data) { + const endpoint = `${this.baseUrl}${id}/update/`; + + return this.http.post(endpoint, data); + } + + readLaunchUpdate(id) { + const endpoint = `${this.baseUrl}${id}/update/`; + + return this.http.get(endpoint); + } + + readEvents(id, params = {}) { + const endpoint = `${this.baseUrl}${id}${this.jobEventSlug}`; + + return this.http.get(endpoint, { params }); + } + + readEventOptions(id) { + const endpoint = `${this.baseUrl}${id}${this.jobEventSlug}`; + + return this.http.options(endpoint); + } + + readRelaunch(id) { + const endpoint = `${this.baseUrl}${id}/relaunch/`; + + return this.http.get(endpoint); + } + + relaunch(id, data) { + const endpoint = `${this.baseUrl}${id}/relaunch/`; + + return this.http.post(endpoint, data); + } + }; + +export default Runnable; diff --git a/awx/ui_next/src/api/models/AdHocCommands.js b/awx/ui_next/src/api/models/AdHocCommands.js index 4879b81b32..2db8e7ddf8 100644 --- a/awx/ui_next/src/api/models/AdHocCommands.js +++ b/awx/ui_next/src/api/models/AdHocCommands.js @@ -1,11 +1,15 @@ import Base from '../Base'; -import RelaunchMixin from '../mixins/Relaunch.mixin'; +import RunnableMixin from '../mixins/Runnable.mixin'; -class AdHocCommands extends RelaunchMixin(Base) { +class AdHocCommands extends RunnableMixin(Base) { constructor(http) { super(http); this.baseUrl = '/api/v2/ad_hoc_commands/'; } + + readCredentials(id) { + return this.http.get(`${this.baseUrl}${id}/credentials/`); + } } export default AdHocCommands; diff --git a/awx/ui_next/src/api/models/Inventories.js b/awx/ui_next/src/api/models/Inventories.js index c9d774e002..bf049f911b 100644 --- a/awx/ui_next/src/api/models/Inventories.js +++ b/awx/ui_next/src/api/models/Inventories.js @@ -78,6 +78,12 @@ class Inventories extends InstanceGroupsMixin(Base) { }); } + updateSources(inventoryId) { + return this.http.get( + `${this.baseUrl}${inventoryId}/update_inventory_sources/` + ); + } + async readSourceDetail(inventoryId, sourceId) { const { data: { results }, diff --git a/awx/ui_next/src/api/models/InventorySources.js b/awx/ui_next/src/api/models/InventorySources.js index baa2a85cb0..479978db13 100644 --- a/awx/ui_next/src/api/models/InventorySources.js +++ b/awx/ui_next/src/api/models/InventorySources.js @@ -22,6 +22,14 @@ class InventorySources extends LaunchUpdateMixin( }); } + readGroups(id) { + return this.http.get(`${this.baseUrl}${id}/groups/`); + } + + readHosts(id) { + return this.http.get(`${this.baseUrl}${id}/hosts/`); + } + destroyGroups(id) { return this.http.delete(`${this.baseUrl}${id}/groups/`); } diff --git a/awx/ui_next/src/api/models/InventoryUpdates.js b/awx/ui_next/src/api/models/InventoryUpdates.js index 1700c7b26b..0d917b0aeb 100644 --- a/awx/ui_next/src/api/models/InventoryUpdates.js +++ b/awx/ui_next/src/api/models/InventoryUpdates.js @@ -1,7 +1,7 @@ import Base from '../Base'; -import LaunchUpdateMixin from '../mixins/LaunchUpdate.mixin'; +import RunnableMixin from '../mixins/Runnable.mixin'; -class InventoryUpdates extends LaunchUpdateMixin(Base) { +class InventoryUpdates extends RunnableMixin(Base) { constructor(http) { super(http); this.baseUrl = '/api/v2/inventory_updates/'; @@ -11,5 +11,9 @@ class InventoryUpdates extends LaunchUpdateMixin(Base) { createSyncCancel(sourceId) { return this.http.post(`${this.baseUrl}${sourceId}/cancel/`); } + + readCredentials(id) { + return this.http.get(`${this.baseUrl}${id}/credentials/`); + } } export default InventoryUpdates; diff --git a/awx/ui_next/src/api/models/Jobs.js b/awx/ui_next/src/api/models/Jobs.js index 026ae671f0..ae3b94cc31 100644 --- a/awx/ui_next/src/api/models/Jobs.js +++ b/awx/ui_next/src/api/models/Jobs.js @@ -1,67 +1,23 @@ import Base from '../Base'; -import RelaunchMixin from '../mixins/Relaunch.mixin'; +import RunnableMixin from '../mixins/Runnable.mixin'; -const getBaseURL = type => { - switch (type) { - case 'playbook': - case 'job': - return '/jobs/'; - case 'project': - case 'project_update': - return '/project_updates/'; - case 'management': - case 'management_job': - return '/system_jobs/'; - case 'inventory': - case 'inventory_update': - return '/inventory_updates/'; - case 'command': - case 'ad_hoc_command': - return '/ad_hoc_commands/'; - case 'workflow': - case 'workflow_job': - return '/workflow_jobs/'; - default: - throw new Error('Unable to find matching job type'); - } -}; - -class Jobs extends RelaunchMixin(Base) { +class Jobs extends RunnableMixin(Base) { constructor(http) { super(http); this.baseUrl = '/api/v2/jobs/'; + this.jobEventSlug = '/job_events/'; } - cancel(id, type) { - return this.http.post(`/api/v2${getBaseURL(type)}${id}/cancel/`); + cancel(id) { + return this.http.post(`${this.baseUrl}${id}/cancel/`); } - readCredentials(id, type) { - return this.http.get(`/api/v2${getBaseURL(type)}${id}/credentials/`); + readCredentials(id) { + return this.http.get(`${this.baseUrl}${id}/credentials/`); } - readDetail(id, type) { - return this.http.get(`/api/v2${getBaseURL(type)}${id}/`); - } - - readEvents(id, type = 'playbook', params = {}) { - let endpoint; - if (type === 'playbook') { - endpoint = `/api/v2${getBaseURL(type)}${id}/job_events/`; - } else { - endpoint = `/api/v2${getBaseURL(type)}${id}/events/`; - } - return this.http.get(endpoint, { params }); - } - - readEventOptions(id, type = 'playbook') { - let endpoint; - if (type === 'playbook') { - endpoint = `/api/v2${getBaseURL(type)}${id}/job_events/`; - } else { - endpoint = `/api/v2${getBaseURL(type)}${id}/events/`; - } - return this.http.options(endpoint); + readDetail(id) { + return this.http.get(`${this.baseUrl}${id}/`); } } diff --git a/awx/ui_next/src/api/models/Metrics.js b/awx/ui_next/src/api/models/Metrics.js new file mode 100644 index 0000000000..e808d26662 --- /dev/null +++ b/awx/ui_next/src/api/models/Metrics.js @@ -0,0 +1,9 @@ +import Base from '../Base'; + +class Metrics extends Base { + constructor(http) { + super(http); + this.baseUrl = '/api/v2/inventories/'; + } +} +export default Metrics; diff --git a/awx/ui_next/src/api/models/ProjectUpdates.js b/awx/ui_next/src/api/models/ProjectUpdates.js index 46d0633f0d..3925ae95e9 100644 --- a/awx/ui_next/src/api/models/ProjectUpdates.js +++ b/awx/ui_next/src/api/models/ProjectUpdates.js @@ -1,10 +1,15 @@ import Base from '../Base'; +import RunnableMixin from '../mixins/Runnable.mixin'; -class ProjectUpdates extends Base { +class ProjectUpdates extends RunnableMixin(Base) { constructor(http) { super(http); this.baseUrl = '/api/v2/project_updates/'; } + + readCredentials(id) { + return this.http.get(`${this.baseUrl}${id}/credentials/`); + } } export default ProjectUpdates; diff --git a/awx/ui_next/src/api/models/SystemJobs.js b/awx/ui_next/src/api/models/SystemJobs.js index d7b6ec1750..8365f6f65b 100644 --- a/awx/ui_next/src/api/models/SystemJobs.js +++ b/awx/ui_next/src/api/models/SystemJobs.js @@ -1,10 +1,16 @@ import Base from '../Base'; -class SystemJobs extends Base { +import RunnableMixin from '../mixins/Runnable.mixin'; + +class SystemJobs extends RunnableMixin(Base) { constructor(http) { super(http); this.baseUrl = '/api/v2/system_jobs/'; } + + readCredentials(id) { + return this.http.get(`${this.baseUrl}${id}/credentials/`); + } } export default SystemJobs; diff --git a/awx/ui_next/src/api/models/WorkflowJobs.js b/awx/ui_next/src/api/models/WorkflowJobs.js index 87e336e8f5..f2799973b0 100644 --- a/awx/ui_next/src/api/models/WorkflowJobs.js +++ b/awx/ui_next/src/api/models/WorkflowJobs.js @@ -1,7 +1,7 @@ import Base from '../Base'; -import RelaunchMixin from '../mixins/Relaunch.mixin'; +import RunnableMixin from '../mixins/Runnable.mixin'; -class WorkflowJobs extends RelaunchMixin(Base) { +class WorkflowJobs extends RunnableMixin(Base) { constructor(http) { super(http); this.baseUrl = '/api/v2/workflow_jobs/'; @@ -10,6 +10,10 @@ class WorkflowJobs extends RelaunchMixin(Base) { readNodes(id, params) { return this.http.get(`${this.baseUrl}${id}/workflow_nodes/`, { params }); } + + readCredentials(id) { + return this.http.get(`${this.baseUrl}${id}/credentials/`); + } } export default WorkflowJobs; diff --git a/awx/ui_next/src/components/About/About.jsx b/awx/ui_next/src/components/About/About.jsx index f68f75b613..db6fbd782d 100644 --- a/awx/ui_next/src/components/About/About.jsx +++ b/awx/ui_next/src/components/About/About.jsx @@ -2,17 +2,12 @@ import React from 'react'; import PropTypes from 'prop-types'; import { withI18n } from '@lingui/react'; import { t } from '@lingui/macro'; -import { - AboutModal, - TextContent, - TextList, - TextListItem, -} from '@patternfly/react-core'; +import { AboutModal } from '@patternfly/react-core'; import { BrandName } from '../../variables'; import brandLogoImg from './brand-logo.svg'; -function About({ ansible_version, version, isOpen, onClose, i18n }) { +function About({ version, isOpen, onClose, i18n }) { const createSpeechBubble = () => { let text = `${BrandName} ${version}`; let top = ''; @@ -52,27 +47,17 @@ function About({ ansible_version, version, isOpen, onClose, i18n }) { || || `} - - - - {i18n._(t`Ansible Version`)} - - {ansible_version} - - ); } About.propTypes = { - ansible_version: PropTypes.string, isOpen: PropTypes.bool, onClose: PropTypes.func.isRequired, version: PropTypes.string, }; About.defaultProps = { - ansible_version: null, isOpen: false, version: null, }; diff --git a/awx/ui_next/src/components/AddRole/AddResourceRole.jsx b/awx/ui_next/src/components/AddRole/AddResourceRole.jsx index 2b4fbccd9d..94db3339ac 100644 --- a/awx/ui_next/src/components/AddRole/AddResourceRole.jsx +++ b/awx/ui_next/src/components/AddRole/AddResourceRole.jsx @@ -54,8 +54,9 @@ function AddResourceRole({ onSave, onClose, roles, i18n, resource, onError }) { ); if (selectedIndex > -1) { - selectedRoleRows.splice(selectedIndex, 1); - setSelectedRoleRows(selectedRoleRows); + setSelectedRoleRows( + selectedRoleRows.filter((r, index) => index !== selectedIndex) + ); } else { setSelectedRoleRows([...selectedRoleRows, role]); } diff --git a/awx/ui_next/src/components/AppContainer/AppContainer.jsx b/awx/ui_next/src/components/AppContainer/AppContainer.jsx index 102d61ac26..6c4016ac9b 100644 --- a/awx/ui_next/src/components/AppContainer/AppContainer.jsx +++ b/awx/ui_next/src/components/AppContainer/AppContainer.jsx @@ -204,7 +204,6 @@ function AppContainer({ i18n, navRouteConfig = [], children }) { {isReady && {children}} ', () => { - const ansible_version = '111'; const version = '222'; beforeEach(() => { ConfigAPI.read.mockResolvedValue({ data: { - ansible_version, version, }, }); @@ -93,7 +91,6 @@ describe('', () => { // check about modal content const content = await waitForElement(wrapper, aboutModalContent); - expect(content.find('dd').text()).toContain(ansible_version); expect(content.find('pre').text()).toContain(`< AWX ${version} >`); // close about modal diff --git a/awx/ui_next/src/components/DeleteButton/DeleteButton.jsx b/awx/ui_next/src/components/DeleteButton/DeleteButton.jsx index 68963bcf12..783360ab59 100644 --- a/awx/ui_next/src/components/DeleteButton/DeleteButton.jsx +++ b/awx/ui_next/src/components/DeleteButton/DeleteButton.jsx @@ -2,9 +2,20 @@ import React, { useState } from 'react'; import PropTypes from 'prop-types'; import { withI18n } from '@lingui/react'; import { t } from '@lingui/macro'; -import { Button } from '@patternfly/react-core'; +import styled from 'styled-components'; +import { Button, Badge, Alert, Tooltip } from '@patternfly/react-core'; import AlertModal from '../AlertModal'; +import { getRelatedResourceDeleteCounts } from '../../util/getRelatedResourceDeleteDetails'; +import ErrorDetail from '../ErrorDetail'; +const WarningMessage = styled(Alert)` + margin-top: 10px; +`; +const Label = styled.span` + && { + margin-right: 10px; + } +`; function DeleteButton({ onConfirm, modalTitle, @@ -14,33 +25,91 @@ function DeleteButton({ children, isDisabled, ouiaId, + deleteMessage, + deleteDetailsRequests, + disabledTooltip, }) { const [isOpen, setIsOpen] = useState(false); + const [deleteMessageError, setDeleteMessageError] = useState(); + const [deleteDetails, setDeleteDetails] = useState({}); + const [isLoading, setIsLoading] = useState(false); + const toggleModal = async isModalOpen => { + setIsLoading(true); + if (deleteDetailsRequests?.length && isModalOpen) { + const { results, error } = await getRelatedResourceDeleteCounts( + deleteDetailsRequests + ); + if (error) { + setDeleteMessageError(error); + } else { + setDeleteDetails(results); + } + } + setIsLoading(false); + setIsOpen(isModalOpen); + }; + + if (deleteMessageError) { + return ( + { + toggleModal(false); + setDeleteMessageError(); + }} + > + + + ); + } return ( <> - + {disabledTooltip ? ( + +
+ +
+
+ ) : ( + + )} setIsOpen(false)} + onClose={() => toggleModal(false)} actions={[ , @@ -49,7 +118,7 @@ function DeleteButton({ key="cancel" variant="link" aria-label={i18n._(t`Cancel`)} - onClick={() => setIsOpen(false)} + onClick={() => toggleModal(false)} > {i18n._(t`Cancel`)} , @@ -58,6 +127,23 @@ function DeleteButton({ {i18n._(t`Are you sure you want to delete:`)}
{name} + {Object.values(deleteDetails).length > 0 && ( + +
{deleteMessage}
+
+ {Object.entries(deleteDetails).map(([key, value]) => ( +
+ {value} +
+ ))} + + } + /> + )}
); diff --git a/awx/ui_next/src/components/DeleteButton/DeleteButton.test.jsx b/awx/ui_next/src/components/DeleteButton/DeleteButton.test.jsx new file mode 100644 index 0000000000..966fd9b74b --- /dev/null +++ b/awx/ui_next/src/components/DeleteButton/DeleteButton.test.jsx @@ -0,0 +1,112 @@ +import React from 'react'; +import { act } from 'react-dom/test-utils'; +import { + mountWithContexts, + waitForElement, +} from '../../../testUtils/enzymeHelpers'; +import { CredentialsAPI } from '../../api'; +import DeleteButton from './DeleteButton'; + +jest.mock('../../api'); + +describe('', () => { + test('should render button', () => { + const wrapper = mountWithContexts( + {}} name="Foo" /> + ); + expect(wrapper.find('button')).toHaveLength(1); + }); + + test('should open confirmation modal', async () => { + let wrapper; + await act(async () => { + wrapper = mountWithContexts( + {}} + name="Foo" + deleteDetailsRequests={[ + { + label: 'job', + request: CredentialsAPI.read.mockResolvedValue({ + data: { count: 1 }, + }), + }, + ]} + deleteMessage="Delete this?" + warningMessage="Are you sure to want to delete this" + /> + ); + }); + + await act(async () => { + wrapper.find('button').prop('onClick')(); + }); + + await waitForElement(wrapper, 'Modal', el => el.length > 0); + expect(wrapper.find('Modal')).toHaveLength(1); + + expect(wrapper.find('div[aria-label="Delete this?"]')).toHaveLength(1); + }); + + test('should invoke onConfirm prop', async () => { + const onConfirm = jest.fn(); + const wrapper = mountWithContexts( + + ); + await act(async () => wrapper.find('button').simulate('click')); + wrapper.update(); + await act(async () => + wrapper + .find('ModalBoxFooter button[aria-label="Confirm Delete"]') + .simulate('click') + ); + wrapper.update(); + expect(onConfirm).toHaveBeenCalled(); + }); + + test('should show delete details error', async () => { + const onConfirm = jest.fn(); + let wrapper; + await act(async () => { + wrapper = mountWithContexts( + + ); + }); + await act(async () => wrapper.find('button').simulate('click')); + wrapper.update(); + + expect(wrapper.find('AlertModal[title="Error!"]')).toHaveLength(1); + }); +}); diff --git a/awx/ui_next/src/components/JobList/JobList.jsx b/awx/ui_next/src/components/JobList/JobList.jsx index 5c331fd8a5..c92096ad68 100644 --- a/awx/ui_next/src/components/JobList/JobList.jsx +++ b/awx/ui_next/src/components/JobList/JobList.jsx @@ -13,20 +13,12 @@ import useRequest, { useDeleteItems, useDismissableError, } from '../../util/useRequest'; -import isJobRunning from '../../util/jobs'; +import { isJobRunning, getJobModel } from '../../util/jobs'; import { getQSConfig, parseQueryString } from '../../util/qs'; import JobListItem from './JobListItem'; import JobListCancelButton from './JobListCancelButton'; import useWsJobs from './useWsJobs'; -import { - AdHocCommandsAPI, - InventoryUpdatesAPI, - JobsAPI, - ProjectUpdatesAPI, - SystemJobsAPI, - UnifiedJobsAPI, - WorkflowJobsAPI, -} from '../../api'; +import { UnifiedJobsAPI } from '../../api'; function JobList({ i18n, defaultParams, showTypeColumn = false }) { const qsConfig = getQSConfig( @@ -104,7 +96,7 @@ function JobList({ i18n, defaultParams, showTypeColumn = false }) { return Promise.all( selected.map(job => { if (isJobRunning(job.status)) { - return JobsAPI.cancel(job.id, job.type); + return getJobModel(job.type).cancel(job.id); } return Promise.resolve(); }) @@ -127,22 +119,7 @@ function JobList({ i18n, defaultParams, showTypeColumn = false }) { useCallback(() => { return Promise.all( selected.map(({ type, id }) => { - switch (type) { - case 'job': - return JobsAPI.destroy(id); - case 'ad_hoc_command': - return AdHocCommandsAPI.destroy(id); - case 'system_job': - return SystemJobsAPI.destroy(id); - case 'project_update': - return ProjectUpdatesAPI.destroy(id); - case 'inventory_update': - return InventoryUpdatesAPI.destroy(id); - case 'workflow_job': - return WorkflowJobsAPI.destroy(id); - default: - return null; - } + return getJobModel(type).destroy(id); }) ); }, [selected]), diff --git a/awx/ui_next/src/components/JobList/JobList.test.jsx b/awx/ui_next/src/components/JobList/JobList.test.jsx index 87f74abfeb..45451de8dd 100644 --- a/awx/ui_next/src/components/JobList/JobList.test.jsx +++ b/awx/ui_next/src/components/JobList/JobList.test.jsx @@ -319,13 +319,12 @@ describe('', () => { wrapper.find('JobListCancelButton').invoke('onCancel')(); }); - expect(JobsAPI.cancel).toHaveBeenCalledTimes(6); - expect(JobsAPI.cancel).toHaveBeenCalledWith(1, 'project_update'); - expect(JobsAPI.cancel).toHaveBeenCalledWith(2, 'job'); - expect(JobsAPI.cancel).toHaveBeenCalledWith(3, 'inventory_update'); - expect(JobsAPI.cancel).toHaveBeenCalledWith(4, 'workflow_job'); - expect(JobsAPI.cancel).toHaveBeenCalledWith(5, 'system_job'); - expect(JobsAPI.cancel).toHaveBeenCalledWith(6, 'ad_hoc_command'); + expect(ProjectUpdatesAPI.cancel).toHaveBeenCalledWith(1); + expect(JobsAPI.cancel).toHaveBeenCalledWith(2); + expect(InventoryUpdatesAPI.cancel).toHaveBeenCalledWith(3); + expect(WorkflowJobsAPI.cancel).toHaveBeenCalledWith(4); + expect(SystemJobsAPI.cancel).toHaveBeenCalledWith(5); + expect(AdHocCommandsAPI.cancel).toHaveBeenCalledWith(6); jest.restoreAllMocks(); }); diff --git a/awx/ui_next/src/components/JobList/JobListCancelButton.jsx b/awx/ui_next/src/components/JobList/JobListCancelButton.jsx index 6f008552b7..efad12993b 100644 --- a/awx/ui_next/src/components/JobList/JobListCancelButton.jsx +++ b/awx/ui_next/src/components/JobList/JobListCancelButton.jsx @@ -4,7 +4,7 @@ import { t } from '@lingui/macro'; import { arrayOf, func } from 'prop-types'; import { Button, DropdownItem, Tooltip } from '@patternfly/react-core'; import { KebabifiedContext } from '../../contexts/Kebabified'; -import isJobRunning from '../../util/jobs'; +import { isJobRunning } from '../../util/jobs'; import AlertModal from '../AlertModal'; import { Job } from '../../types'; diff --git a/awx/ui_next/src/components/Lookup/ExecutionEnvironmentLookup.jsx b/awx/ui_next/src/components/Lookup/ExecutionEnvironmentLookup.jsx index 4647d5809e..b3134cb527 100644 --- a/awx/ui_next/src/components/Lookup/ExecutionEnvironmentLookup.jsx +++ b/awx/ui_next/src/components/Lookup/ExecutionEnvironmentLookup.jsx @@ -25,6 +25,7 @@ function ExecutionEnvironmentLookup({ globallyAvailable, i18n, isDefaultEnvironment, + isGlobalDefaultEnvironment, isDisabled, onBlur, onChange, @@ -154,17 +155,26 @@ function ExecutionEnvironmentLookup({ ); + const renderLabel = ( + globalDefaultEnvironment, + defaultExecutionEnvironment + ) => { + if (globalDefaultEnvironment) { + return i18n._(t`Global Default Execution Environment`); + } + if (defaultExecutionEnvironment) { + return i18n._(t`Default Execution Environment`); + } + return i18n._(t`Execution Environment`); + }; + return ( } > - {isDisabled ? ( + {tooltip ? ( {renderLookup()} ) : ( renderLookup() @@ -180,6 +190,7 @@ ExecutionEnvironmentLookup.propTypes = { popoverContent: string, onChange: func.isRequired, isDefaultEnvironment: bool, + isGlobalDefaultEnvironment: bool, projectId: oneOfType([number, string]), organizationId: oneOfType([number, string]), }; @@ -187,6 +198,7 @@ ExecutionEnvironmentLookup.propTypes = { ExecutionEnvironmentLookup.defaultProps = { popoverContent: '', isDefaultEnvironment: false, + isGlobalDefaultEnvironment: false, value: null, projectId: null, organizationId: null, diff --git a/awx/ui_next/src/components/PaginatedDataList/ToolbarDeleteButton.jsx b/awx/ui_next/src/components/PaginatedDataList/ToolbarDeleteButton.jsx index 4874ab9f64..a45a0365de 100644 --- a/awx/ui_next/src/components/PaginatedDataList/ToolbarDeleteButton.jsx +++ b/awx/ui_next/src/components/PaginatedDataList/ToolbarDeleteButton.jsx @@ -10,16 +10,31 @@ import { checkPropTypes, } from 'prop-types'; import styled from 'styled-components'; -import { Alert, Button, DropdownItem, Tooltip } from '@patternfly/react-core'; +import { + Alert, + Badge, + Button, + DropdownItem, + Tooltip, +} from '@patternfly/react-core'; import { withI18n } from '@lingui/react'; import { t } from '@lingui/macro'; import AlertModal from '../AlertModal'; import { KebabifiedContext } from '../../contexts/Kebabified'; +import { getRelatedResourceDeleteCounts } from '../../util/getRelatedResourceDeleteDetails'; + +import ErrorDetail from '../ErrorDetail'; const WarningMessage = styled(Alert)` margin-top: 10px; `; +const Label = styled.span` + && { + margin-right: 10px; + } +`; + const requiredField = props => { const { name, username, image } = props; if (!name && !username && !image) { @@ -77,20 +92,43 @@ function ToolbarDeleteButton({ pluralizedItemName, errorMessage, onDelete, + deleteDetailsRequests, warningMessage, + deleteMessage, i18n, cannotDelete, }) { const { isKebabified, onKebabModalChange } = useContext(KebabifiedContext); const [isModalOpen, setIsModalOpen] = useState(false); + const [deleteDetails, setDeleteDetails] = useState(null); + const [isLoading, setIsLoading] = useState(false); + const [deleteMessageError, setDeleteMessageError] = useState(); const handleDelete = () => { onDelete(); toggleModal(); }; - const toggleModal = () => { - setIsModalOpen(!isModalOpen); + const toggleModal = async isOpen => { + setIsLoading(true); + setDeleteDetails(null); + if ( + isOpen && + itemsToDelete.length === 1 && + deleteDetailsRequests?.length > 0 + ) { + const { results, error } = await getRelatedResourceDeleteCounts( + deleteDetailsRequests + ); + + if (error) { + setDeleteMessageError(error); + } else { + setDeleteDetails(results); + } + } + setIsLoading(false); + setIsModalOpen(isOpen); }; useEffect(() => { @@ -126,27 +164,84 @@ function ToolbarDeleteButton({ const isDisabled = itemsToDelete.length === 0 || itemsToDelete.some(cannotDelete); - // NOTE: Once PF supports tooltips on disabled elements, - // we can delete the extra
around the below. - // See: https://github.com/patternfly/patternfly-react/issues/1894 + const buildDeleteWarning = () => { + const deleteMessages = []; + if (warningMessage) { + deleteMessages.push(warningMessage); + } + if (deleteMessage) { + if ( + itemsToDelete[0]?.type !== 'inventory' && + (itemsToDelete.length > 1 || deleteDetails) + ) { + deleteMessages.push(deleteMessage); + } else if (deleteDetails || itemsToDelete.length > 1) { + deleteMessages.push(deleteMessage); + } + } + return ( +
+ {deleteMessages.map(message => ( +
+ {message} +
+ ))} + {deleteDetails && + Object.entries(deleteDetails).map(([key, value]) => ( +
+ + {value} +
+ ))} +
+ ); + }; + + if (deleteMessageError) { + return ( + { + toggleModal(false); + setDeleteMessageError(); + }} + > + + + ); + } + const shouldShowDeleteWarning = + warningMessage || + (itemsToDelete.length === 1 && deleteDetails) || + (itemsToDelete.length > 1 && deleteMessage); + return ( <> {isKebabified ? ( - - {i18n._(t`Delete`)} - + + { + toggleModal(true); + }} + > + {i18n._(t`Delete`)} + + ) : (
)} + {isModalOpen && ( toggleModal(false)} actions={[ , @@ -186,8 +286,12 @@ function ToolbarDeleteButton({
))} - {warningMessage && ( - + {shouldShowDeleteWarning && ( + )}
)} diff --git a/awx/ui_next/src/components/PaginatedDataList/ToolbarDeleteButton.test.jsx b/awx/ui_next/src/components/PaginatedDataList/ToolbarDeleteButton.test.jsx index 487f2c17f0..e4366a16eb 100644 --- a/awx/ui_next/src/components/PaginatedDataList/ToolbarDeleteButton.test.jsx +++ b/awx/ui_next/src/components/PaginatedDataList/ToolbarDeleteButton.test.jsx @@ -1,7 +1,14 @@ import React from 'react'; -import { mountWithContexts } from '../../../testUtils/enzymeHelpers'; +import { act } from 'react-dom/test-utils'; +import { + mountWithContexts, + waitForElement, +} from '../../../testUtils/enzymeHelpers'; +import { CredentialsAPI } from '../../api'; import ToolbarDeleteButton from './ToolbarDeleteButton'; +jest.mock('../../api'); + const itemA = { id: 1, name: 'Foo', @@ -19,27 +26,180 @@ const itemC = { }; describe('', () => { + let deleteDetailsRequests; + let wrapper; + beforeEach(() => { + deleteDetailsRequests = [ + { + label: 'Workflow Job Template Node', + request: CredentialsAPI.read.mockResolvedValue({ data: { count: 1 } }), + }, + ]; + }); + + afterEach(() => { + jest.clearAllMocks(); + wrapper.unmount(); + }); test('should render button', () => { - const wrapper = mountWithContexts( + wrapper = mountWithContexts( {}} itemsToDelete={[]} /> ); expect(wrapper.find('button')).toHaveLength(1); expect(wrapper.find('ToolbarDeleteButton')).toMatchSnapshot(); }); - test('should open confirmation modal', () => { - const wrapper = mountWithContexts( - {}} itemsToDelete={[itemA]} /> - ); + test('should open confirmation modal', async () => { + await act(async () => { + wrapper = mountWithContexts( + {}} + itemsToDelete={[itemA]} + deleteDetailsRequests={deleteDetailsRequests} + deleteMessage="Delete this?" + warningMessage="Are you sure to want to delete this" + /> + ); + }); + expect(wrapper.find('Modal')).toHaveLength(0); - wrapper.find('button').simulate('click'); - wrapper.update(); + await act(async () => { + wrapper.find('button').prop('onClick')(); + }); + await waitForElement(wrapper, 'Modal', el => el.length > 0); + expect(CredentialsAPI.read).toBeCalled(); expect(wrapper.find('Modal')).toHaveLength(1); + expect( + wrapper.find('div[aria-label="Workflow Job Template Node: 1"]') + ).toHaveLength(1); + expect( + wrapper.find('Button[aria-label="confirm delete"]').prop('isDisabled') + ).toBe(false); + expect(wrapper.find('div[aria-label="Delete this?"]')).toHaveLength(1); + }); + + test('should open confirmation with enabled delete button modal', async () => { + await act(async () => { + wrapper = mountWithContexts( + {}} + itemsToDelete={[ + { + name: 'foo', + id: 1, + type: 'credential_type', + summary_fields: { user_capabilities: { delete: true } }, + }, + { + name: 'bar', + id: 2, + type: 'credential_type', + summary_fields: { user_capabilities: { delete: true } }, + }, + ]} + deleteDetailsRequests={deleteDetailsRequests} + deleteMessage="Delete this?" + warningMessage="Are you sure to want to delete this" + /> + ); + }); + + expect(wrapper.find('Modal')).toHaveLength(0); + await act(async () => { + wrapper.find('button').prop('onClick')(); + }); + await waitForElement(wrapper, 'Modal', el => el.length > 0); + expect(CredentialsAPI.read).not.toBeCalled(); + expect(wrapper.find('Modal')).toHaveLength(1); + expect( + wrapper.find('Button[aria-label="confirm delete"]').prop('isDisabled') + ).toBe(false); + }); + + test('should disable confirm delete button', async () => { + const request = [ + { + label: 'Workflow Job Template Node', + request: CredentialsAPI.read.mockResolvedValue({ data: { count: 3 } }), + }, + ]; + await act(async () => { + wrapper = mountWithContexts( + {}} + itemsToDelete={[ + { + name: 'foo', + id: 1, + type: 'credential_type', + summary_fields: { user_capabilities: { delete: true } }, + }, + ]} + deleteDetailsRequests={request} + deleteMessage="Delete this?" + warningMessage="Are you sure to want to delete this" + /> + ); + }); + + expect(wrapper.find('Modal')).toHaveLength(0); + await act(async () => { + wrapper.find('button').prop('onClick')(); + }); + await waitForElement(wrapper, 'Modal', el => el.length > 0); + expect(CredentialsAPI.read).toBeCalled(); + expect(wrapper.find('Modal')).toHaveLength(1); + + expect( + wrapper.find('Button[aria-label="confirm delete"]').prop('isDisabled') + ).toBe(true); + expect(wrapper.find('div[aria-label="Delete this?"]')).toHaveLength(1); + }); + + test('should open delete error modal', async () => { + const request = [ + { + label: 'Workflow Job Template Node', + request: CredentialsAPI.read.mockRejectedValue( + new Error({ + response: { + config: { + method: 'get', + url: '/api/v2/credentals', + }, + data: 'An error occurred', + status: 403, + }, + }) + ), + }, + ]; + + await act(async () => { + wrapper = mountWithContexts( + {}} + itemsToDelete={[itemA]} + deleteDetailsRequests={request} + deleteMessage="Delete this?" + warningMessage="Are you sure to want to delete this" + /> + ); + }); + + expect(wrapper.find('Modal')).toHaveLength(0); + await act(async () => wrapper.find('button').simulate('click')); + await waitForElement(wrapper, 'Modal', el => el.length > 0); + expect(CredentialsAPI.read).toBeCalled(); + + wrapper.update(); + + expect(wrapper.find('AlertModal[title="Error!"]')).toHaveLength(1); }); test('should invoke onDelete prop', () => { const onDelete = jest.fn(); - const wrapper = mountWithContexts( + wrapper = mountWithContexts( ); wrapper.find('button').simulate('click'); @@ -53,14 +213,14 @@ describe('', () => { }); test('should disable button when no delete permissions', () => { - const wrapper = mountWithContexts( + wrapper = mountWithContexts( {}} itemsToDelete={[itemB]} /> ); expect(wrapper.find('button[disabled]')).toHaveLength(1); }); test('should render tooltip', () => { - const wrapper = mountWithContexts( + wrapper = mountWithContexts( {}} itemsToDelete={[itemA]} /> ); expect(wrapper.find('Tooltip')).toHaveLength(1); @@ -68,7 +228,7 @@ describe('', () => { }); test('should render tooltip for username', () => { - const wrapper = mountWithContexts( + wrapper = mountWithContexts( {}} itemsToDelete={[itemC]} /> ); expect(wrapper.find('Tooltip')).toHaveLength(1); diff --git a/awx/ui_next/src/components/PaginatedDataList/__snapshots__/ToolbarDeleteButton.test.jsx.snap b/awx/ui_next/src/components/PaginatedDataList/__snapshots__/ToolbarDeleteButton.test.jsx.snap index fc24195951..eb410530a8 100644 --- a/awx/ui_next/src/components/PaginatedDataList/__snapshots__/ToolbarDeleteButton.test.jsx.snap +++ b/awx/ui_next/src/components/PaginatedDataList/__snapshots__/ToolbarDeleteButton.test.jsx.snap @@ -75,6 +75,7 @@ exports[` should render button 1`] = ` + )} + {!isJobRunning(job.status) && + job?.summary_fields?.user_capabilities?.delete && ( + + {i18n._(t`Delete`)} + + )} + {showCancelModal && isJobRunning(job.status) && ( + setShowCancelModal(false)} + title={i18n._(t`Cancel Job`)} + label={i18n._(t`Cancel Job`)} + actions={[ + , + , + ]} + > + {i18n._( + t`Are you sure you want to submit the request to cancel this job?` + )} + + )} + {dismissableCancelError && ( + + + + )} {errorMsg && ( ', () => { wrapper.update(); const modal = wrapper.find('Modal'); expect(modal.length).toBe(1); - modal.find('button[aria-label="Delete"]').simulate('click'); + modal.find('button[aria-label="Confirm Delete"]').simulate('click'); expect(JobsAPI.destroy).toHaveBeenCalledTimes(1); }); @@ -138,7 +138,7 @@ describe('', () => { const modal = wrapper.find('Modal'); expect(modal.length).toBe(1); await act(async () => { - modal.find('button[aria-label="Delete"]').simulate('click'); + modal.find('button[aria-label="Confirm Delete"]').simulate('click'); }); wrapper.update(); diff --git a/awx/ui_next/src/screens/Job/JobOutput/JobOutput.jsx b/awx/ui_next/src/screens/Job/JobOutput/JobOutput.jsx index 4533dcc585..293986b779 100644 --- a/awx/ui_next/src/screens/Job/JobOutput/JobOutput.jsx +++ b/awx/ui_next/src/screens/Job/JobOutput/JobOutput.jsx @@ -37,7 +37,7 @@ import PageControls from './PageControls'; import HostEventModal from './HostEventModal'; import { HostStatusBar, OutputToolbar } from './shared'; import getRowRangePageSize from './shared/jobOutputUtils'; -import isJobRunning from '../../../util/jobs'; +import { getJobModel, isJobRunning } from '../../../util/jobs'; import useRequest, { useDismissableError } from '../../../util/useRequest'; import { encodeNonDefaultQueryString, @@ -47,14 +47,6 @@ import { removeParams, getQSConfig, } from '../../../util/qs'; -import { - JobsAPI, - ProjectUpdatesAPI, - SystemJobsAPI, - WorkflowJobsAPI, - InventoriesAPI, - AdHocCommandsAPI, -} from '../../../api'; const QS_CONFIG = getQSConfig('job_output', { order_by: 'start_line', @@ -280,12 +272,7 @@ const cache = new CellMeasurerCache({ defaultHeight: 25, }); -function JobOutput({ - job, - type, - eventRelatedSearchableKeys, - eventSearchableKeys, -}) { +function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) { const location = useLocation(); const listRef = useRef(null); const isMounted = useRef(false); @@ -348,8 +335,8 @@ function JobOutput({ request: cancelJob, } = useRequest( useCallback(async () => { - await JobsAPI.cancel(job.id, type); - }, [job.id, type]), + await getJobModel(job.type).cancel(job.id); + }, [job.id, job.type]), {} ); @@ -364,27 +351,10 @@ function JobOutput({ error: deleteError, } = useRequest( useCallback(async () => { - switch (job.type) { - case 'project_update': - await ProjectUpdatesAPI.destroy(job.id); - break; - case 'system_job': - await SystemJobsAPI.destroy(job.id); - break; - case 'workflow_job': - await WorkflowJobsAPI.destroy(job.id); - break; - case 'ad_hoc_command': - await AdHocCommandsAPI.destroy(job.id); - break; - case 'inventory_update': - await InventoriesAPI.destroy(job.id); - break; - default: - await JobsAPI.destroy(job.id); - } + await getJobModel(job.type).destroy(job.id); + history.push('/jobs'); - }, [job, history]) + }, [job.type, job.id, history]) ); const { @@ -417,7 +387,7 @@ function JobOutput({ try { const { data: { results: fetchedEvents = [], count }, - } = await JobsAPI.readEvents(job.id, type, { + } = await getJobModel(job.type).readEvents(job.id, { page: 1, page_size: 50, ...parseQueryString(QS_CONFIG, location.search), @@ -557,31 +527,33 @@ function JobOutput({ ...parseQueryString(QS_CONFIG, location.search), }; - return JobsAPI.readEvents(job.id, type, params).then(response => { - if (isMounted.current) { - const newResults = {}; - let newResultsCssMap = {}; - response.data.results.forEach((jobEvent, index) => { - newResults[firstIndex + index] = jobEvent; - const { lineCssMap } = getLineTextHtml(jobEvent); - newResultsCssMap = { ...newResultsCssMap, ...lineCssMap }; - }); - setResults(prevResults => ({ - ...prevResults, - ...newResults, - })); - setCssMap(prevCssMap => ({ - ...prevCssMap, - ...newResultsCssMap, - })); - setCurrentlyLoading(prevCurrentlyLoading => - prevCurrentlyLoading.filter(n => !loadRange.includes(n)) - ); - loadRange.forEach(n => { - cache.clear(n); - }); - } - }); + return getJobModel(job.type) + .readEvents(job.id, params) + .then(response => { + if (isMounted.current) { + const newResults = {}; + let newResultsCssMap = {}; + response.data.results.forEach((jobEvent, index) => { + newResults[firstIndex + index] = jobEvent; + const { lineCssMap } = getLineTextHtml(jobEvent); + newResultsCssMap = { ...newResultsCssMap, ...lineCssMap }; + }); + setResults(prevResults => ({ + ...prevResults, + ...newResults, + })); + setCssMap(prevCssMap => ({ + ...prevCssMap, + ...newResultsCssMap, + })); + setCurrentlyLoading(prevCurrentlyLoading => + prevCurrentlyLoading.filter(n => !loadRange.includes(n)) + ); + loadRange.forEach(n => { + cache.clear(n); + }); + } + }); }; const scrollToRow = rowIndex => { diff --git a/awx/ui_next/src/screens/Job/JobOutput/JobOutput.test.jsx b/awx/ui_next/src/screens/Job/JobOutput/JobOutput.test.jsx index ef2fa35190..72354fe32a 100644 --- a/awx/ui_next/src/screens/Job/JobOutput/JobOutput.test.jsx +++ b/awx/ui_next/src/screens/Job/JobOutput/JobOutput.test.jsx @@ -188,9 +188,19 @@ describe('', () => { wrapper = mountWithContexts(); }); await waitForElement(wrapper, 'JobEvent', el => el.length > 0); - await act(async () => { - wrapper.find('DeleteButton').invoke('onConfirm')(); - }); + await act(async () => + wrapper.find('button[aria-label="Delete"]').simulate('click') + ); + await waitForElement( + wrapper, + 'Modal', + el => el.props().isOpen === true && el.props().title === 'Delete Job' + ); + await act(async () => + wrapper + .find('Modal button[aria-label="Confirm Delete"]') + .simulate('click') + ); expect(JobsAPI.destroy).toHaveBeenCalledTimes(1); }); @@ -268,7 +278,7 @@ describe('', () => { wrapper.find(searchBtn).simulate('click'); }); wrapper.update(); - expect(JobsAPI.readEvents).toHaveBeenCalledWith(2, undefined, { + expect(JobsAPI.readEvents).toHaveBeenCalledWith(2, { order_by: 'start_line', page: 1, page_size: 50, diff --git a/awx/ui_next/src/screens/Job/JobTypeRedirect.jsx b/awx/ui_next/src/screens/Job/JobTypeRedirect.jsx index dbf2256fdc..22b70c7f17 100644 --- a/awx/ui_next/src/screens/Job/JobTypeRedirect.jsx +++ b/awx/ui_next/src/screens/Job/JobTypeRedirect.jsx @@ -55,8 +55,8 @@ function JobTypeRedirect({ id, path, view, i18n }) { ); } - const type = JOB_TYPE_URL_SEGMENTS[job.type]; - return ; + const typeSegment = JOB_TYPE_URL_SEGMENTS[job.type]; + return ; } JobTypeRedirect.defaultProps = { diff --git a/awx/ui_next/src/screens/Job/Jobs.jsx b/awx/ui_next/src/screens/Job/Jobs.jsx index 318729407a..f75d560d70 100644 --- a/awx/ui_next/src/screens/Job/Jobs.jsx +++ b/awx/ui_next/src/screens/Job/Jobs.jsx @@ -21,12 +21,12 @@ function Jobs({ i18n }) { return; } - const type = JOB_TYPE_URL_SEGMENTS[job.type]; + const typeSegment = JOB_TYPE_URL_SEGMENTS[job.type]; setBreadcrumbConfig({ '/jobs': i18n._(t`Jobs`), - [`/jobs/${type}/${job.id}`]: `${job.name}`, - [`/jobs/${type}/${job.id}/output`]: i18n._(t`Output`), - [`/jobs/${type}/${job.id}/details`]: i18n._(t`Details`), + [`/jobs/${typeSegment}/${job.id}`]: `${job.name}`, + [`/jobs/${typeSegment}/${job.id}/output`]: i18n._(t`Output`), + [`/jobs/${typeSegment}/${job.id}/details`]: i18n._(t`Details`), }); }, [i18n] @@ -53,7 +53,7 @@ function Jobs({ i18n }) { - + diff --git a/awx/ui_next/src/screens/Job/shared/data.job.json b/awx/ui_next/src/screens/Job/shared/data.job.json index 614e7d6aad..8b2d31a197 100644 --- a/awx/ui_next/src/screens/Job/shared/data.job.json +++ b/awx/ui_next/src/screens/Job/shared/data.job.json @@ -154,7 +154,6 @@ "ANSIBLE_INVENTORY_UNPARSED_FAILED": "True", "ANSIBLE_PARAMIKO_RECORD_HOST_KEYS": "False", "ANSIBLE_VENV_PATH": "/var/lib/awx/venv/ansible", - "PROOT_TMP_DIR": "/tmp", "AWX_PRIVATE_DATA_DIR": "/tmp/awx_2_a4b1afiw", "ANSIBLE_COLLECTIONS_PATHS": "/tmp/collections", "PYTHONPATH": "/var/lib/awx/venv/ansible/lib/python2.7/site-packages:/awx_devel/awx/lib:", diff --git a/awx/ui_next/src/screens/Job/useWsJob.js b/awx/ui_next/src/screens/Job/useWsJob.js index ace2cf2ce6..e9461888d7 100644 --- a/awx/ui_next/src/screens/Job/useWsJob.js +++ b/awx/ui_next/src/screens/Job/useWsJob.js @@ -1,10 +1,8 @@ import { useState, useEffect } from 'react'; -import { useParams } from 'react-router-dom'; import useWebsocket from '../../util/useWebsocket'; -import { JobsAPI } from '../../api'; +import { getJobModel } from '../../util/jobs'; export default function useWsJob(initialJob) { - const { type } = useParams(); const [job, setJob] = useState(initialJob); const lastMessage = useWebsocket({ jobs: ['status_changed'], @@ -18,7 +16,7 @@ export default function useWsJob(initialJob) { useEffect( function parseWsMessage() { async function fetchJob() { - const { data } = await JobsAPI.readDetail(job.id, type); + const { data } = await getJobModel(job.type).readDetail(job.id); setJob(data); } diff --git a/awx/ui_next/src/screens/Organization/OrganizationDetail/OrganizationDetail.jsx b/awx/ui_next/src/screens/Organization/OrganizationDetail/OrganizationDetail.jsx index db282620a5..9369eaebfb 100644 --- a/awx/ui_next/src/screens/Organization/OrganizationDetail/OrganizationDetail.jsx +++ b/awx/ui_next/src/screens/Organization/OrganizationDetail/OrganizationDetail.jsx @@ -20,6 +20,7 @@ import ErrorDetail from '../../../components/ErrorDetail'; import useRequest, { useDismissableError } from '../../../util/useRequest'; import { useConfig } from '../../../contexts/Config'; import ExecutionEnvironmentDetail from '../../../components/ExecutionEnvironmentDetail'; +import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails'; function OrganizationDetail({ i18n, organization }) { const { @@ -71,6 +72,11 @@ function OrganizationDetail({ i18n, organization }) { const { error, dismissError } = useDismissableError(deleteError); + const deleteDetailsRequests = relatedResourceDeleteRequests.organization( + organization, + i18n + ); + if (hasContentLoading) { return ; } @@ -157,6 +163,10 @@ function OrganizationDetail({ i18n, organization }) { modalTitle={i18n._(t`Delete Organization`)} onConfirm={deleteOrganization} isDisabled={isLoading} + deleteDetailsRequests={deleteDetailsRequests} + deleteMessage={i18n._( + t`This organization is currently being by other resources. Are you sure you want to delete it?` + )} > {i18n._(t`Delete`)}
diff --git a/awx/ui_next/src/screens/Organization/OrganizationDetail/OrganizationDetail.test.jsx b/awx/ui_next/src/screens/Organization/OrganizationDetail/OrganizationDetail.test.jsx index a33ddc1c61..d91b38c85d 100644 --- a/awx/ui_next/src/screens/Organization/OrganizationDetail/OrganizationDetail.test.jsx +++ b/awx/ui_next/src/screens/Organization/OrganizationDetail/OrganizationDetail.test.jsx @@ -1,7 +1,7 @@ import React from 'react'; import { act } from 'react-dom/test-utils'; -import { OrganizationsAPI } from '../../../api'; +import { OrganizationsAPI, CredentialsAPI } from '../../../api'; import { mountWithContexts, waitForElement, @@ -44,6 +44,8 @@ describe('', () => { }; beforeEach(() => { + CredentialsAPI.read.mockResolvedValue({ data: { count: 0 } }); + OrganizationsAPI.readInstanceGroups.mockResolvedValue(mockInstanceGroups); }); @@ -64,6 +66,20 @@ describe('', () => { expect(OrganizationsAPI.readInstanceGroups).toHaveBeenCalledTimes(1); }); + test('should have proper number of delete detail requests', async () => { + let component; + await act(async () => { + component = mountWithContexts( + + ); + }); + await waitForElement(component, 'ContentLoading', el => el.length === 0); + + expect( + component.find('DeleteButton').prop('deleteDetailsRequests') + ).toHaveLength(7); + }); + test('should render the expected instance group', async () => { let component; await act(async () => { diff --git a/awx/ui_next/src/screens/Organization/OrganizationList/OrganizationList.jsx b/awx/ui_next/src/screens/Organization/OrganizationList/OrganizationList.jsx index 03b062a83d..d5dce4dcac 100644 --- a/awx/ui_next/src/screens/Organization/OrganizationList/OrganizationList.jsx +++ b/awx/ui_next/src/screens/Organization/OrganizationList/OrganizationList.jsx @@ -19,6 +19,7 @@ import PaginatedTable, { } from '../../../components/PaginatedTable'; import { getQSConfig, parseQueryString } from '../../../util/qs'; import OrganizationListItem from './OrganizationListItem'; +import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails'; const QS_CONFIG = getQSConfig('organization', { page: 1, @@ -116,6 +117,10 @@ function OrganizationsList({ i18n }) { setSelected(selected.concat(row)); } }; + const deleteDetailsRequests = relatedResourceDeleteRequests.organization( + selected[0], + i18n + ); return ( <> @@ -173,6 +178,11 @@ function OrganizationsList({ i18n }) { onDelete={handleOrgDelete} itemsToDelete={selected} pluralizedItemName={i18n._(t`Organizations`)} + deleteDetailsRequests={deleteDetailsRequests} + deleteMessage={i18n._( + '{numItemsToDelete, plural, one {This organization is currently being by other resources. Are you sure you want to delete it?} other {Deleting these organizations could impact other resources that rely on them. Are you sure you want to delete anyway?}}', + { numItemsToDelete: selected.length } + )} />, ]} /> diff --git a/awx/ui_next/src/screens/Organization/OrganizationList/OrganizationList.test.jsx b/awx/ui_next/src/screens/Organization/OrganizationList/OrganizationList.test.jsx index 34e9b93e33..02c6c65b0e 100644 --- a/awx/ui_next/src/screens/Organization/OrganizationList/OrganizationList.test.jsx +++ b/awx/ui_next/src/screens/Organization/OrganizationList/OrganizationList.test.jsx @@ -1,7 +1,7 @@ import React from 'react'; import { act } from 'react-dom/test-utils'; -import { OrganizationsAPI } from '../../../api'; +import { OrganizationsAPI, CredentialsAPI } from '../../../api'; import { mountWithContexts, waitForElement, @@ -70,6 +70,7 @@ const mockOrganizations = { describe('', () => { let wrapper; beforeEach(() => { + CredentialsAPI.read.mockResolvedValue({ data: { count: 0 } }); OrganizationsAPI.read.mockResolvedValue(mockOrganizations); OrganizationsAPI.readOptions.mockResolvedValue({ data: { @@ -90,6 +91,20 @@ describe('', () => { }); }); + test('should have proper number of delete detail requests', async () => { + await act(async () => { + wrapper = mountWithContexts(); + }); + await waitForElement( + wrapper, + 'OrganizationsList', + el => el.find('ContentLoading').length === 0 + ); + expect( + wrapper.find('ToolbarDeleteButton').prop('deleteDetailsRequests') + ).toHaveLength(7); + }); + test('Items are rendered after loading', async () => { await act(async () => { wrapper = mountWithContexts(); diff --git a/awx/ui_next/src/screens/Project/ProjectDetail/ProjectDetail.jsx b/awx/ui_next/src/screens/Project/ProjectDetail/ProjectDetail.jsx index d00ab5da57..f8bcd351b7 100644 --- a/awx/ui_next/src/screens/Project/ProjectDetail/ProjectDetail.jsx +++ b/awx/ui_next/src/screens/Project/ProjectDetail/ProjectDetail.jsx @@ -20,6 +20,7 @@ import CredentialChip from '../../../components/CredentialChip'; import { ProjectsAPI } from '../../../api'; import { toTitleCase } from '../../../util/strings'; import useRequest, { useDismissableError } from '../../../util/useRequest'; +import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails'; import ProjectSyncButton from '../shared/ProjectSyncButton'; function ProjectDetail({ project, i18n }) { @@ -52,7 +53,10 @@ function ProjectDetail({ project, i18n }) { ); const { error, dismissError } = useDismissableError(deleteError); - + const deleteDetailsRequests = relatedResourceDeleteRequests.project( + project, + i18n + ); let optionsList = ''; if ( scm_clean || @@ -171,6 +175,10 @@ function ProjectDetail({ project, i18n }) { modalTitle={i18n._(t`Delete Project`)} onConfirm={deleteProject} isDisabled={isLoading} + deleteDetailsRequests={deleteDetailsRequests} + deleteMessage={i18n._( + t`This project is currently being used by other resources. Are you sure you want to delete it?` + )} > {i18n._(t`Delete`)} diff --git a/awx/ui_next/src/screens/Project/ProjectDetail/ProjectDetail.test.jsx b/awx/ui_next/src/screens/Project/ProjectDetail/ProjectDetail.test.jsx index 36caa48a12..6f65e6b7f7 100644 --- a/awx/ui_next/src/screens/Project/ProjectDetail/ProjectDetail.test.jsx +++ b/awx/ui_next/src/screens/Project/ProjectDetail/ProjectDetail.test.jsx @@ -5,7 +5,12 @@ import { mountWithContexts, waitForElement, } from '../../../../testUtils/enzymeHelpers'; -import { ProjectsAPI } from '../../../api'; +import { + ProjectsAPI, + JobTemplatesAPI, + WorkflowJobTemplatesAPI, + InventorySourcesAPI, +} from '../../../api'; import ProjectDetail from './ProjectDetail'; jest.mock('../../../api'); @@ -147,6 +152,27 @@ describe('', () => { expect(wrapper.find('Detail[label="Options"]').length).toBe(0); }); + test('should have proper number of delete detail requests', () => { + JobTemplatesAPI.read.mockResolvedValue({ data: { count: 0 } }); + WorkflowJobTemplatesAPI.read.mockResolvedValue({ data: { count: 0 } }); + InventorySourcesAPI.read.mockResolvedValue({ data: { count: 0 } }); + const mockOptions = { + scm_type: '', + scm_clean: false, + scm_delete_on_update: false, + scm_update_on_launch: false, + allow_override: false, + created: '', + modified: '', + }; + const wrapper = mountWithContexts( + + ); + expect( + wrapper.find('DeleteButton').prop('deleteDetailsRequests') + ).toHaveLength(3); + }); + test('should render with missing summary fields', async () => { const wrapper = mountWithContexts( diff --git a/awx/ui_next/src/screens/Project/ProjectList/ProjectList.jsx b/awx/ui_next/src/screens/Project/ProjectList/ProjectList.jsx index acb473a34d..205628ca27 100644 --- a/awx/ui_next/src/screens/Project/ProjectList/ProjectList.jsx +++ b/awx/ui_next/src/screens/Project/ProjectList/ProjectList.jsx @@ -18,6 +18,7 @@ import PaginatedTable, { HeaderCell, } from '../../../components/PaginatedTable'; import useWsProjects from './useWsProjects'; +import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails'; import { getQSConfig, parseQueryString } from '../../../util/qs'; import ProjectListItem from './ProjectListItem'; @@ -116,6 +117,11 @@ function ProjectList({ i18n }) { } }; + const deleteDetailsRequests = relatedResourceDeleteRequests.project( + selected[0], + i18n + ); + return ( @@ -194,6 +200,11 @@ function ProjectList({ i18n }) { onDelete={handleProjectDelete} itemsToDelete={selected} pluralizedItemName={i18n._(t`Projects`)} + deleteDetailsRequests={deleteDetailsRequests} + deleteMessage={i18n._( + '{numItemsToDelete, plural, one {This project is currently being used by other resources. Are you sure you want to delete it?} other {Deleting these projects could impact other resources that rely on them. Are you sure you want to delete anyway?}}', + { numItemsToDelete: selected.length } + )} />, ]} /> diff --git a/awx/ui_next/src/screens/Project/ProjectList/ProjectList.test.jsx b/awx/ui_next/src/screens/Project/ProjectList/ProjectList.test.jsx index 5a6945d892..6b6f913e79 100644 --- a/awx/ui_next/src/screens/Project/ProjectList/ProjectList.test.jsx +++ b/awx/ui_next/src/screens/Project/ProjectList/ProjectList.test.jsx @@ -1,7 +1,15 @@ import React from 'react'; import { act } from 'react-dom/test-utils'; -import { ProjectsAPI } from '../../../api'; -import { mountWithContexts } from '../../../../testUtils/enzymeHelpers'; +import { + ProjectsAPI, + JobTemplatesAPI, + WorkflowJobTemplatesAPI, + InventorySourcesAPI, +} from '../../../api'; +import { + mountWithContexts, + waitForElement, +} from '../../../../testUtils/enzymeHelpers'; import ProjectList from './ProjectList'; jest.mock('../../../api'); @@ -83,6 +91,9 @@ const mockProjects = [ describe('', () => { beforeEach(() => { + JobTemplatesAPI.read.mockResolvedValue({ data: { count: 0 } }); + WorkflowJobTemplatesAPI.read.mockResolvedValue({ data: { count: 0 } }); + InventorySourcesAPI.read.mockResolvedValue({ data: { count: 0 } }); ProjectsAPI.read.mockResolvedValue({ data: { count: mockProjects.length, @@ -138,6 +149,17 @@ describe('', () => { ).toEqual(true); }); + test('should have proper number of delete detail requests', async () => { + let wrapper; + await act(async () => { + wrapper = mountWithContexts(); + }); + wrapper.update(); + expect( + wrapper.find('ToolbarDeleteButton').prop('deleteDetailsRequests') + ).toHaveLength(3); + }); + test('should select all', async () => { let wrapper; await act(async () => { @@ -177,10 +199,11 @@ describe('', () => { .at(2) .invoke('onSelect')(); }); - wrapper.update(); - expect(wrapper.find('ToolbarDeleteButton button').prop('disabled')).toEqual( - true + waitForElement( + wrapper, + 'ToolbarDeleteButton button', + el => el.prop('disabled') === true ); }); diff --git a/awx/ui_next/src/screens/Setting/Jobs/JobsDetail/JobsDetail.test.jsx b/awx/ui_next/src/screens/Setting/Jobs/JobsDetail/JobsDetail.test.jsx index 02ec0b8118..715bd649e3 100644 --- a/awx/ui_next/src/screens/Setting/Jobs/JobsDetail/JobsDetail.test.jsx +++ b/awx/ui_next/src/screens/Setting/Jobs/JobsDetail/JobsDetail.test.jsx @@ -50,7 +50,6 @@ describe('', () => { }); test('should render expected details', () => { - assertDetail(wrapper, 'Enable job isolation', 'On'); assertDetail(wrapper, 'Job execution path', '/tmp'); assertDetail(wrapper, 'Isolated status check interval', '1 seconds'); assertDetail(wrapper, 'Isolated launch timeout', '600 seconds'); @@ -81,7 +80,6 @@ describe('', () => { 'Ansible Modules Allowed for Ad Hoc Jobs', '[\n "command"\n]' ); - assertVariableDetail(wrapper, 'Paths to hide from isolated jobs', '[]'); assertVariableDetail(wrapper, 'Paths to expose to isolated jobs', '[]'); assertVariableDetail(wrapper, 'Extra Environment Variables', '{}'); assertVariableDetail(wrapper, 'Ansible Callback Plugins', '[]'); diff --git a/awx/ui_next/src/screens/Setting/Jobs/JobsEdit/JobsEdit.jsx b/awx/ui_next/src/screens/Setting/Jobs/JobsEdit/JobsEdit.jsx index 143c805a01..3e5d287727 100644 --- a/awx/ui_next/src/screens/Setting/Jobs/JobsEdit/JobsEdit.jsx +++ b/awx/ui_next/src/screens/Setting/Jobs/JobsEdit/JobsEdit.jsx @@ -70,8 +70,7 @@ function JobsEdit() { await submitForm({ ...form, AD_HOC_COMMANDS: formatJson(form.AD_HOC_COMMANDS), - AWX_PROOT_SHOW_PATHS: formatJson(form.AWX_PROOT_SHOW_PATHS), - AWX_PROOT_HIDE_PATHS: formatJson(form.AWX_PROOT_HIDE_PATHS), + AWX_ISOLATION_SHOW_PATHS: formatJson(form.AWX_ISOLATION_SHOW_PATHS), AWX_ANSIBLE_CALLBACK_PLUGINS: formatJson( form.AWX_ANSIBLE_CALLBACK_PLUGINS ), @@ -116,8 +115,8 @@ function JobsEdit() {
- - {submitError && } diff --git a/awx/ui_next/src/screens/Setting/Jobs/JobsEdit/data.defaultJobSettings.json b/awx/ui_next/src/screens/Setting/Jobs/JobsEdit/data.defaultJobSettings.json index 70c73869d7..8770514f75 100644 --- a/awx/ui_next/src/screens/Setting/Jobs/JobsEdit/data.defaultJobSettings.json +++ b/awx/ui_next/src/screens/Setting/Jobs/JobsEdit/data.defaultJobSettings.json @@ -27,10 +27,8 @@ "AWX_ISOLATED_CONNECTION_TIMEOUT": 10, "AWX_ISOLATED_HOST_KEY_CHECKING": false, "AWX_ISOLATED_LAUNCH_TIMEOUT": 600, - "AWX_PROOT_BASE_PATH": "/tmp", - "AWX_PROOT_ENABLED": true, - "AWX_PROOT_HIDE_PATHS": [], - "AWX_PROOT_SHOW_PATHS": [], + "AWX_ISOLATION_BASE_PATH": "/tmp", + "AWX_ISOLATION_SHOW_PATHS": [], "AWX_RESOURCE_PROFILING_CPU_POLL_INTERVAL": 0.25, "AWX_RESOURCE_PROFILING_ENABLED": false, "AWX_RESOURCE_PROFILING_MEMORY_POLL_INTERVAL": 0.25, @@ -45,4 +43,4 @@ "MAX_FORKS": 200, "PROJECT_UPDATE_VVV": false, "SCHEDULE_MAX_JOBS": 10 -} \ No newline at end of file +} diff --git a/awx/ui_next/src/screens/Setting/MiscSystem/MiscSystemDetail/MiscSystemDetail.jsx b/awx/ui_next/src/screens/Setting/MiscSystem/MiscSystemDetail/MiscSystemDetail.jsx index 02f37b2d96..54eac90e9f 100644 --- a/awx/ui_next/src/screens/Setting/MiscSystem/MiscSystemDetail/MiscSystemDetail.jsx +++ b/awx/ui_next/src/screens/Setting/MiscSystem/MiscSystemDetail/MiscSystemDetail.jsx @@ -9,7 +9,7 @@ import ContentError from '../../../../components/ContentError'; import ContentLoading from '../../../../components/ContentLoading'; import { DetailList } from '../../../../components/DetailList'; import RoutedTabs from '../../../../components/RoutedTabs'; -import { SettingsAPI } from '../../../../api'; +import { SettingsAPI, ExecutionEnvironmentsAPI } from '../../../../api'; import useRequest from '../../../../util/useRequest'; import { useConfig } from '../../../../contexts/Config'; import { useSettings } from '../../../../contexts/Settings'; @@ -23,7 +23,15 @@ function MiscSystemDetail({ i18n }) { const { isLoading, error, request, result: system } = useRequest( useCallback(async () => { const { data } = await SettingsAPI.readCategory('all'); - + let DEFAULT_EXECUTION_ENVIRONMENT = ''; + if (data.DEFAULT_EXECUTION_ENVIRONMENT) { + const { + data: { name }, + } = await ExecutionEnvironmentsAPI.readDetail( + data.DEFAULT_EXECUTION_ENVIRONMENT + ); + DEFAULT_EXECUTION_ENVIRONMENT = name; + } const { OAUTH2_PROVIDER: { ACCESS_TOKEN_EXPIRE_SECONDS, @@ -49,19 +57,17 @@ function MiscSystemDetail({ i18n }) { 'SESSION_COOKIE_AGE', 'TOWER_URL_BASE' ); - const systemData = { ...pluckedSystemData, ACCESS_TOKEN_EXPIRE_SECONDS, REFRESH_TOKEN_EXPIRE_SECONDS, AUTHORIZATION_CODE_EXPIRE_SECONDS, + DEFAULT_EXECUTION_ENVIRONMENT, }; - const { OAUTH2_PROVIDER: OAUTH2_PROVIDER_OPTIONS, ...options } = allOptions; - const systemOptions = { ...options, ACCESS_TOKEN_EXPIRE_SECONDS: { @@ -80,7 +86,6 @@ function MiscSystemDetail({ i18n }) { label: i18n._(t`Authorization Code Expiration`), }, }; - const mergedData = {}; Object.keys(systemData).forEach(key => { mergedData[key] = systemOptions[key]; diff --git a/awx/ui_next/src/screens/Setting/MiscSystem/MiscSystemDetail/MiscSystemDetail.test.jsx b/awx/ui_next/src/screens/Setting/MiscSystem/MiscSystemDetail/MiscSystemDetail.test.jsx index aa8b2e334d..6fbebb6ab8 100644 --- a/awx/ui_next/src/screens/Setting/MiscSystem/MiscSystemDetail/MiscSystemDetail.test.jsx +++ b/awx/ui_next/src/screens/Setting/MiscSystem/MiscSystemDetail/MiscSystemDetail.test.jsx @@ -5,7 +5,7 @@ import { waitForElement, } from '../../../../../testUtils/enzymeHelpers'; import { SettingsProvider } from '../../../../contexts/Settings'; -import { SettingsAPI } from '../../../../api'; +import { SettingsAPI, ExecutionEnvironmentsAPI } from '../../../../api'; import { assertDetail, assertVariableDetail, @@ -14,13 +14,14 @@ import mockAllOptions from '../../shared/data.allSettingOptions.json'; import MiscSystemDetail from './MiscSystemDetail'; jest.mock('../../../../api/models/Settings'); +jest.mock('../../../../api/models/ExecutionEnvironments'); + SettingsAPI.readCategory.mockResolvedValue({ data: { ALLOW_OAUTH2_FOR_EXTERNAL_USERS: false, AUTH_BASIC_ENABLED: true, AUTOMATION_ANALYTICS_GATHER_INTERVAL: 14400, AUTOMATION_ANALYTICS_URL: 'https://example.com', - CUSTOM_VENV_PATHS: [], INSIGHTS_TRACKING_STATE: false, LOGIN_REDIRECT_OVERRIDE: 'https://redirect.com', MANAGE_ORGANIZATION_AUTH: true, @@ -36,6 +37,16 @@ SettingsAPI.readCategory.mockResolvedValue({ SESSIONS_PER_USER: -1, SESSION_COOKIE_AGE: 30000000000, TOWER_URL_BASE: 'https://towerhost', + DEFAULT_EXECUTION_ENVIRONMENT: 1, + }, +}); + +ExecutionEnvironmentsAPI.readDetail.mockResolvedValue({ + data: { + id: 1, + name: 'Foo', + image: 'quay.io/ansible/awx-ee', + pull: 'missing', }, }); @@ -110,6 +121,33 @@ describe('', () => { assertDetail(wrapper, 'Red Hat customer username', 'mock name'); assertDetail(wrapper, 'Refresh Token Expiration', '3 seconds'); assertVariableDetail(wrapper, 'Remote Host Headers', '[]'); + assertDetail(wrapper, 'Global default execution environment', 'Foo'); + }); + + test('should render execution environment as not configured', async () => { + ExecutionEnvironmentsAPI.readDetail.mockResolvedValue({ + data: {}, + }); + let newWrapper; + await act(async () => { + newWrapper = mountWithContexts( + + + + ); + }); + await waitForElement(newWrapper, 'ContentLoading', el => el.length === 0); + + assertDetail( + newWrapper, + 'Global default execution environment', + 'Not configured' + ); }); test('should hide edit button from non-superusers', async () => { diff --git a/awx/ui_next/src/screens/Setting/MiscSystem/MiscSystemEdit/MiscSystemEdit.jsx b/awx/ui_next/src/screens/Setting/MiscSystem/MiscSystemEdit/MiscSystemEdit.jsx index bb19b52f21..5411326eb0 100644 --- a/awx/ui_next/src/screens/Setting/MiscSystem/MiscSystemEdit/MiscSystemEdit.jsx +++ b/awx/ui_next/src/screens/Setting/MiscSystem/MiscSystemEdit/MiscSystemEdit.jsx @@ -9,6 +9,7 @@ import ContentError from '../../../../components/ContentError'; import ContentLoading from '../../../../components/ContentLoading'; import { FormSubmitError } from '../../../../components/FormField'; import { FormColumnLayout } from '../../../../components/FormLayout'; +import { ExecutionEnvironmentLookup } from '../../../../components/Lookup'; import { useSettings } from '../../../../contexts/Settings'; import { BooleanField, @@ -20,7 +21,7 @@ import { } from '../../shared'; import useModal from '../../../../util/useModal'; import useRequest from '../../../../util/useRequest'; -import { SettingsAPI } from '../../../../api'; +import { SettingsAPI, ExecutionEnvironmentsAPI } from '../../../../api'; import { pluck, formatJson } from '../../shared/settingUtils'; function MiscSystemEdit({ i18n }) { @@ -44,7 +45,6 @@ function MiscSystemEdit({ i18n }) { 'AUTH_BASIC_ENABLED', 'AUTOMATION_ANALYTICS_GATHER_INTERVAL', 'AUTOMATION_ANALYTICS_URL', - 'CUSTOM_VENV_PATHS', 'INSIGHTS_TRACKING_STATE', 'LOGIN_REDIRECT_OVERRIDE', 'MANAGE_ORGANIZATION_AUTH', @@ -55,7 +55,8 @@ function MiscSystemEdit({ i18n }) { 'REMOTE_HOST_HEADERS', 'SESSIONS_PER_USER', 'SESSION_COOKIE_AGE', - 'TOWER_URL_BASE' + 'TOWER_URL_BASE', + 'DEFAULT_EXECUTION_ENVIRONMENT' ); const systemData = { @@ -128,6 +129,7 @@ function MiscSystemEdit({ i18n }) { AUTHORIZATION_CODE_EXPIRE_SECONDS, ...formData } = form; + await submitForm({ ...formData, REMOTE_HOST_HEADERS: formatJson(formData.REMOTE_HOST_HEADERS), @@ -136,6 +138,8 @@ function MiscSystemEdit({ i18n }) { REFRESH_TOKEN_EXPIRE_SECONDS, AUTHORIZATION_CODE_EXPIRE_SECONDS, }, + DEFAULT_EXECUTION_ENVIRONMENT: + formData.DEFAULT_EXECUTION_ENVIRONMENT?.id || null, }); }; @@ -178,16 +182,73 @@ function MiscSystemEdit({ i18n }) { return acc; }, {}); + const executionEnvironmentId = + system?.DEFAULT_EXECUTION_ENVIRONMENT?.value || null; + + const { + isLoading: isLoadingExecutionEnvironment, + error: errorExecutionEnvironment, + request: fetchExecutionEnvironment, + result: executionEnvironment, + } = useRequest( + useCallback(async () => { + if (!executionEnvironmentId) { + return ''; + } + const { data } = await ExecutionEnvironmentsAPI.readDetail( + executionEnvironmentId + ); + return data; + }, [executionEnvironmentId]) + ); + + useEffect(() => { + fetchExecutionEnvironment(); + }, [fetchExecutionEnvironment]); + return ( - {isLoading && } - {!isLoading && error && } - {!isLoading && system && ( - + {(isLoading || isLoadingExecutionEnvironment) && } + {!(isLoading || isLoadingExecutionEnvironment) && error && ( + + )} + {!(isLoading || isLoadingExecutionEnvironment) && system && ( + {formik => { return ( + + formik.setFieldTouched('DEFAULT_EXECUTION_ENVIRONMENT') + } + value={formik.values.DEFAULT_EXECUTION_ENVIRONMENT} + onChange={value => + formik.setFieldValue( + 'DEFAULT_EXECUTION_ENVIRONMENT', + value + ) + } + popoverContent={i18n._( + t`The Execution Environment to be used when one has not been configured for a job template.` + )} + isGlobalDefaultEnvironment + /> ', () => { let wrapper; let history; @@ -42,10 +83,40 @@ describe('', () => { await waitForElement(wrapper, 'ContentLoading', el => el.length === 0); }); - test('initially renders without crashing', () => { + test('initially renders without crashing', async () => { expect(wrapper.find('MiscSystemEdit').length).toBe(1); }); + test('save button should call updateAll', async () => { + expect(wrapper.find('MiscSystemEdit').length).toBe(1); + + wrapper.find('ExecutionEnvironmentLookup').invoke('onChange')({ + id: 1, + name: 'Foo', + }); + wrapper.update(); + await act(async () => { + wrapper.find('button[aria-label="Save"]').simulate('click'); + }); + wrapper.update(); + expect(SettingsAPI.updateAll).toHaveBeenCalledWith(systemData); + }); + + test('should remove execution environment', async () => { + expect(wrapper.find('MiscSystemEdit').length).toBe(1); + + wrapper.find('ExecutionEnvironmentLookup').invoke('onChange')(null); + wrapper.update(); + await act(async () => { + wrapper.find('button[aria-label="Save"]').simulate('click'); + }); + + expect(SettingsAPI.updateAll).toHaveBeenCalledWith({ + ...systemData, + DEFAULT_EXECUTION_ENVIRONMENT: null, + }); + }); + test('should successfully send default values to api on form revert all', async () => { expect(SettingsAPI.updateAll).toHaveBeenCalledTimes(0); expect(wrapper.find('RevertAllAlert')).toHaveLength(0); diff --git a/awx/ui_next/src/screens/Setting/shared/SettingDetail.jsx b/awx/ui_next/src/screens/Setting/shared/SettingDetail.jsx index d58c89e721..fc6498b67e 100644 --- a/awx/ui_next/src/screens/Setting/shared/SettingDetail.jsx +++ b/awx/ui_next/src/screens/Setting/shared/SettingDetail.jsx @@ -88,6 +88,8 @@ export default withI18n()( ); break; case 'choice': + case 'field': + case 'string': detail = ( ); break; - case 'string': - detail = ( - - ); - break; default: detail = null; } diff --git a/awx/ui_next/src/screens/Setting/shared/data.allSettingOptions.json b/awx/ui_next/src/screens/Setting/shared/data.allSettingOptions.json index 758e267ed3..ac885ced92 100644 --- a/awx/ui_next/src/screens/Setting/shared/data.allSettingOptions.json +++ b/awx/ui_next/src/screens/Setting/shared/data.allSettingOptions.json @@ -166,15 +166,7 @@ ] ] }, - "AWX_PROOT_ENABLED": { - "type": "boolean", - "label": "Enable job isolation", - "help_text": "Isolates an Ansible job from protected parts of the system to prevent exposing sensitive information.", - "category": "Jobs", - "category_slug": "jobs", - "defined_in_file": false - }, - "AWX_PROOT_BASE_PATH": { + "AWX_ISOLATION_BASE_PATH": { "type": "string", "label": "Job execution path", "help_text": "The directory in which Tower will create new temporary directories for job execution and isolation (such as credential files and custom inventory scripts).", @@ -182,18 +174,7 @@ "category_slug": "jobs", "defined_in_file": false }, - "AWX_PROOT_HIDE_PATHS": { - "type": "list", - "label": "Paths to hide from isolated jobs", - "help_text": "Additional paths to hide from isolated processes. Enter one path per line.", - "category": "Jobs", - "category_slug": "jobs", - "defined_in_file": false, - "child": { - "type": "string" - } - }, - "AWX_PROOT_SHOW_PATHS": { + "AWX_ISOLATION_SHOW_PATHS": { "type": "list", "label": "Paths to expose to isolated jobs", "help_text": "List of paths that would otherwise be hidden to expose to isolated jobs. Enter one path per line.", @@ -2963,7 +2944,15 @@ "child": { "type": "field" } - } + }, + "DEFAULT_EXECUTION_ENVIRONMENT": { + "type": "field", + "label": "Global default execution environment", + "help_text": "The Execution Environment to be used when one has not been configured for a job template.", + "category": "System", + "category_slug": "system", + "defined_in_file": false + } }, "PUT": { "ACTIVITY_STREAM_ENABLED": { @@ -3158,16 +3147,7 @@ ] ] }, - "AWX_PROOT_ENABLED": { - "type": "boolean", - "required": true, - "label": "Enable job isolation", - "help_text": "Isolates an Ansible job from protected parts of the system to prevent exposing sensitive information.", - "category": "Jobs", - "category_slug": "jobs", - "default": true - }, - "AWX_PROOT_BASE_PATH": { + "AWX_ISOLATION_BASE_PATH": { "type": "string", "required": true, "label": "Job execution path", @@ -3176,21 +3156,7 @@ "category_slug": "jobs", "default": "/tmp" }, - "AWX_PROOT_HIDE_PATHS": { - "type": "list", - "required": false, - "label": "Paths to hide from isolated jobs", - "help_text": "Additional paths to hide from isolated processes. Enter one path per line.", - "category": "Jobs", - "category_slug": "jobs", - "default": [], - "child": { - "type": "string", - "required": true, - "read_only": false - } - }, - "AWX_PROOT_SHOW_PATHS": { + "AWX_ISOLATION_SHOW_PATHS": { "type": "list", "required": false, "label": "Paths to expose to isolated jobs", @@ -7091,6 +7057,15 @@ "read_only": false } }, + "DEFAULT_EXECUTION_ENVIRONMENT": { + "type": "field", + "required": false, + "label": "Global default execution environment", + "help_text": "The Execution Environment to be used when one has not been configured for a job template.", + "category": "System", + "category_slug": "system", + "default": null + }, "SOCIAL_AUTH_SAML_TEAM_ATTR": { "type": "nested object", "required": false, diff --git a/awx/ui_next/src/screens/Setting/shared/data.allSettings.json b/awx/ui_next/src/screens/Setting/shared/data.allSettings.json index 2567289cf7..57b810615c 100644 --- a/awx/ui_next/src/screens/Setting/shared/data.allSettings.json +++ b/awx/ui_next/src/screens/Setting/shared/data.allSettings.json @@ -34,10 +34,8 @@ "win_user" ], "ALLOW_JINJA_IN_EXTRA_VARS":"template", - "AWX_PROOT_ENABLED":true, - "AWX_PROOT_BASE_PATH":"/tmp", - "AWX_PROOT_HIDE_PATHS":[], - "AWX_PROOT_SHOW_PATHS":[], + "AWX_ISOLATION_BASE_PATH":"/tmp", + "AWX_ISOLATION_SHOW_PATHS":[], "AWX_ISOLATED_CHECK_INTERVAL":1, "AWX_ISOLATED_LAUNCH_TIMEOUT":600, "AWX_ISOLATED_CONNECTION_TIMEOUT":10, @@ -305,5 +303,6 @@ "applications":{"fields":["name"],"adj_list":[["organization","organizations"]]}, "users":{"fields":["username"],"adj_list":[]}, "instances":{"fields":["hostname"],"adj_list":[]} - } -} \ No newline at end of file + }, + "DEFAULT_EXECUTION_ENVIRONMENT": 1 +} diff --git a/awx/ui_next/src/screens/Setting/shared/data.jobSettings.json b/awx/ui_next/src/screens/Setting/shared/data.jobSettings.json index 190a346560..d3910ded77 100644 --- a/awx/ui_next/src/screens/Setting/shared/data.jobSettings.json +++ b/awx/ui_next/src/screens/Setting/shared/data.jobSettings.json @@ -4,10 +4,8 @@ "command" ], "ALLOW_JINJA_IN_EXTRA_VARS": "template", - "AWX_PROOT_ENABLED": true, - "AWX_PROOT_BASE_PATH": "/tmp", - "AWX_PROOT_HIDE_PATHS": [], - "AWX_PROOT_SHOW_PATHS": [], + "AWX_ISOLATION_BASE_PATH": "/tmp", + "AWX_ISOLATION_SHOW_PATHS": [], "AWX_ISOLATED_CHECK_INTERVAL": 1, "AWX_ISOLATED_LAUNCH_TIMEOUT": 600, "AWX_ISOLATED_CONNECTION_TIMEOUT": 10, @@ -34,4 +32,4 @@ "DEFAULT_PROJECT_UPDATE_TIMEOUT": 0, "ANSIBLE_FACT_CACHE_TIMEOUT": 0, "MAX_FORKS": 200 -} \ No newline at end of file +} diff --git a/awx/ui_next/src/screens/Team/TeamRoles/TeamRolesList.jsx b/awx/ui_next/src/screens/Team/TeamRoles/TeamRolesList.jsx index af3089df35..8c55e2f687 100644 --- a/awx/ui_next/src/screens/Team/TeamRoles/TeamRolesList.jsx +++ b/awx/ui_next/src/screens/Team/TeamRoles/TeamRolesList.jsx @@ -35,7 +35,7 @@ function TeamRolesList({ i18n, me, team }) { const { isLoading, request: fetchRoles, - contentError, + error: contentError, result: { roleCount, roles, diff --git a/awx/ui_next/src/screens/Template/JobTemplateDetail/JobTemplateDetail.jsx b/awx/ui_next/src/screens/Template/JobTemplateDetail/JobTemplateDetail.jsx index abd0078571..27276ec543 100644 --- a/awx/ui_next/src/screens/Template/JobTemplateDetail/JobTemplateDetail.jsx +++ b/awx/ui_next/src/screens/Template/JobTemplateDetail/JobTemplateDetail.jsx @@ -31,6 +31,7 @@ import { VariablesDetail } from '../../../components/CodeEditor'; import { JobTemplatesAPI } from '../../../api'; import useRequest, { useDismissableError } from '../../../util/useRequest'; import ExecutionEnvironmentDetail from '../../../components/ExecutionEnvironmentDetail'; +import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails'; function JobTemplateDetail({ i18n, template }) { const { @@ -96,6 +97,10 @@ function JobTemplateDetail({ i18n, template }) { const { error, dismissError } = useDismissableError(deleteError); + const deleteDetailsRequests = relatedResourceDeleteRequests.template( + template, + i18n + ); const canLaunch = summary_fields.user_capabilities && summary_fields.user_capabilities.start; const verbosityOptions = [ @@ -401,6 +406,10 @@ function JobTemplateDetail({ i18n, template }) { modalTitle={i18n._(t`Delete Job Template`)} onConfirm={deleteJobTemplate} isDisabled={isDeleteLoading} + deleteDetailsRequests={deleteDetailsRequests} + deleteMessage={i18n._( + t`This job template is currently being used by other resources. Are you sure you want to delete it?` + )} > {i18n._(t`Delete`)} diff --git a/awx/ui_next/src/screens/Template/JobTemplateDetail/JobTemplateDetail.test.jsx b/awx/ui_next/src/screens/Template/JobTemplateDetail/JobTemplateDetail.test.jsx index 775dc5f50a..ccd698da10 100644 --- a/awx/ui_next/src/screens/Template/JobTemplateDetail/JobTemplateDetail.test.jsx +++ b/awx/ui_next/src/screens/Template/JobTemplateDetail/JobTemplateDetail.test.jsx @@ -5,7 +5,7 @@ import { waitForElement, } from '../../../../testUtils/enzymeHelpers'; import JobTemplateDetail from './JobTemplateDetail'; -import { JobTemplatesAPI } from '../../../api'; +import { JobTemplatesAPI, WorkflowJobTemplateNodesAPI } from '../../../api'; import mockTemplate from '../shared/data.job_template.json'; jest.mock('../../../api'); @@ -25,6 +25,7 @@ describe('', () => { beforeEach(async () => { JobTemplatesAPI.readInstanceGroups.mockResolvedValue(mockInstanceGroups); + WorkflowJobTemplateNodesAPI.read.mockResolvedValue({ data: { count: 0 } }); await act(async () => { wrapper = mountWithContexts( @@ -56,6 +57,23 @@ describe('', () => { ); }); + test('should have proper number of delete detail requests', async () => { + await act(async () => { + wrapper = mountWithContexts( + + ); + }); + expect( + wrapper.find('DeleteButton').prop('deleteDetailsRequests') + ).toHaveLength(1); + }); + test('should request instance groups from api', async () => { expect(JobTemplatesAPI.readInstanceGroups).toHaveBeenCalledTimes(1); }); diff --git a/awx/ui_next/src/screens/Template/WorkflowJobTemplateDetail/WorkflowJobTemplateDetail.jsx b/awx/ui_next/src/screens/Template/WorkflowJobTemplateDetail/WorkflowJobTemplateDetail.jsx index 7d326d649d..91e5016e89 100644 --- a/awx/ui_next/src/screens/Template/WorkflowJobTemplateDetail/WorkflowJobTemplateDetail.jsx +++ b/awx/ui_next/src/screens/Template/WorkflowJobTemplateDetail/WorkflowJobTemplateDetail.jsx @@ -27,6 +27,7 @@ import ErrorDetail from '../../../components/ErrorDetail'; import { LaunchButton } from '../../../components/LaunchButton'; import Sparkline from '../../../components/Sparkline'; import { toTitleCase } from '../../../util/strings'; +import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails'; import useRequest, { useDismissableError } from '../../../util/useRequest'; function WorkflowJobTemplateDetail({ template, i18n }) { @@ -102,6 +103,11 @@ function WorkflowJobTemplateDetail({ template, i18n }) { type: 'workflow_job', })); + const deleteDetailsRequests = relatedResourceDeleteRequests.template( + template, + i18n + ); + return ( @@ -241,6 +247,10 @@ function WorkflowJobTemplateDetail({ template, i18n }) { modalTitle={i18n._(t`Delete Workflow Job Template`)} onConfirm={deleteWorkflowJobTemplate} isDisabled={isLoading} + deleteDetailsRequests={deleteDetailsRequests} + deleteMessage={i18n._( + t`This workflow job template is currently being used by other resources. Are you sure you want to delete it?` + )} > {i18n._(t`Delete`)} diff --git a/awx/ui_next/src/screens/Template/WorkflowJobTemplateDetail/WorkflowJobTemplateDetail.test.jsx b/awx/ui_next/src/screens/Template/WorkflowJobTemplateDetail/WorkflowJobTemplateDetail.test.jsx index 0c1be1e8e9..71b2eb1158 100644 --- a/awx/ui_next/src/screens/Template/WorkflowJobTemplateDetail/WorkflowJobTemplateDetail.test.jsx +++ b/awx/ui_next/src/screens/Template/WorkflowJobTemplateDetail/WorkflowJobTemplateDetail.test.jsx @@ -5,6 +5,9 @@ import { act } from 'react-dom/test-utils'; import { mountWithContexts } from '../../../../testUtils/enzymeHelpers'; import WorkflowJobTemplateDetail from './WorkflowJobTemplateDetail'; +import { WorkflowJobTemplateNodesAPI } from '../../../api'; + +jest.mock('../../../api'); describe('', () => { let wrapper; @@ -50,6 +53,7 @@ describe('', () => { }; beforeEach(async () => { + WorkflowJobTemplateNodesAPI.read.mockResolvedValue({ data: { count: 0 } }); history = createMemoryHistory({ initialEntries: ['/templates/workflow_job_template/1/details'], }); @@ -86,6 +90,7 @@ describe('', () => { afterEach(() => { wrapper.unmount(); + jest.clearAllMocks(); }); test('renders successfully', () => { @@ -163,6 +168,12 @@ describe('', () => { ).toBe('Demo EE'); }); + test('should have proper number of delete detail requests', async () => { + expect( + wrapper.find('DeleteButton').prop('deleteDetailsRequests') + ).toHaveLength(1); + }); + test('link out resource have the correct url', () => { const inventory = wrapper.find('Detail[label="Inventory"]').find('Link'); const organization = wrapper diff --git a/awx/ui_next/src/util/getRelatedResouceDeleteDetails.test.js b/awx/ui_next/src/util/getRelatedResouceDeleteDetails.test.js new file mode 100644 index 0000000000..32f795704a --- /dev/null +++ b/awx/ui_next/src/util/getRelatedResouceDeleteDetails.test.js @@ -0,0 +1,144 @@ +import { + getRelatedResourceDeleteCounts, + relatedResourceDeleteRequests, +} from './getRelatedResourceDeleteDetails'; +import { + InventoriesAPI, + InventorySourcesAPI, + JobTemplatesAPI, + ProjectsAPI, + WorkflowJobTemplatesAPI, + WorkflowJobTemplateNodesAPI, + CredentialsAPI, +} from '../api'; + +jest.mock('../api/models/Credentials'); +jest.mock('../api/models/Inventories'); +jest.mock('../api/models/InventorySources'); +jest.mock('../api/models/JobTemplates'); +jest.mock('../api/models/Projects'); +jest.mock('../api/models/WorkflowJobTemplates'); +jest.mock('../api/models/WorkflowJobTemplateNodes'); + +const i18n = { + _: key => { + if (key.values) { + Object.entries(key.values).forEach(([k, v]) => { + key.id = key.id.replace(new RegExp(`\\{${k}\\}`), v); + }); + } + return key.id; + }, +}; + +describe('delete details', () => { + afterEach(() => { + jest.clearAllMocks(); + }); + + test('should call api for credentials list', () => { + getRelatedResourceDeleteCounts( + relatedResourceDeleteRequests.credential({ id: 1 }, i18n) + ); + expect(InventoriesAPI.read).toBeCalledWith({ + insights_credential: 1, + }); + expect(InventorySourcesAPI.read).toBeCalledWith({ + credentials__id: 1, + }); + expect(JobTemplatesAPI.read).toBeCalledWith({ credentials: 1 }); + expect(ProjectsAPI.read).toBeCalledWith({ credentials: 1 }); + }); + + test('should call api for projects list', () => { + getRelatedResourceDeleteCounts( + relatedResourceDeleteRequests.project({ id: 1 }, i18n) + ); + expect(WorkflowJobTemplateNodesAPI.read).toBeCalledWith({ + unified_job_template: 1, + }); + expect(InventorySourcesAPI.read).toBeCalledWith({ + source_project: 1, + }); + expect(JobTemplatesAPI.read).toBeCalledWith({ project: 1 }); + }); + + test('should call api for templates list', () => { + getRelatedResourceDeleteCounts( + relatedResourceDeleteRequests.template({ id: 1 }, i18n) + ); + expect(WorkflowJobTemplateNodesAPI.read).toBeCalledWith({ + unified_job_template: 1, + }); + }); + + test('should call api for credential type list', () => { + getRelatedResourceDeleteCounts( + relatedResourceDeleteRequests.credentialType({ id: 1 }, i18n) + ); + expect(CredentialsAPI.read).toBeCalledWith({ + credential_type__id: 1, + }); + }); + + test('should call api for inventory list', () => { + getRelatedResourceDeleteCounts( + relatedResourceDeleteRequests.inventory({ id: 1 }, i18n) + ); + expect(JobTemplatesAPI.read).toBeCalledWith({ inventory: 1 }); + expect(WorkflowJobTemplatesAPI.read).toBeCalledWith({ + inventory: 1, + }); + }); + + test('should call api for inventory source list', async () => { + InventoriesAPI.updateSources.mockResolvedValue({ + data: [{ inventory_source: 2 }], + }); + await getRelatedResourceDeleteCounts( + relatedResourceDeleteRequests.inventorySource(1, i18n) + ); + expect(InventoriesAPI.updateSources).toBeCalledWith(1); + expect(WorkflowJobTemplateNodesAPI.read).toBeCalledWith({ + unified_job_template: 2, + }); + }); + + test('should call api for organization list', async () => { + getRelatedResourceDeleteCounts( + relatedResourceDeleteRequests.organization({ id: 1 }, i18n) + ); + expect(CredentialsAPI.read).toBeCalledWith({ organization: 1 }); + }); + + test('should call return error for inventory source list', async () => { + InventoriesAPI.updateSources.mockRejectedValue({ + response: { + config: { + method: 'post', + url: '/api/v2/inventories/1/ad_hoc_commands', + }, + data: 'An error occurred', + status: 403, + }, + }); + const { error } = await getRelatedResourceDeleteCounts( + relatedResourceDeleteRequests.inventorySource(1, i18n) + ); + + expect(InventoriesAPI.updateSources).toBeCalledWith(1); + expect(error).toBeDefined(); + }); + + test('should return proper results', async () => { + JobTemplatesAPI.read.mockResolvedValue({ data: { count: 0 } }); + ProjectsAPI.read.mockResolvedValue({ data: { count: 2 } }); + InventoriesAPI.read.mockResolvedValue({ data: { count: 3 } }); + InventorySourcesAPI.read.mockResolvedValue({ data: { count: 0 } }); + + const { results } = await getRelatedResourceDeleteCounts( + relatedResourceDeleteRequests.credential({ id: 1 }, i18n) + ); + expect(results).toEqual({ Projects: 2, Inventories: 3 }); + }); +}); diff --git a/awx/ui_next/src/util/getRelatedResourceDeleteDetails.js b/awx/ui_next/src/util/getRelatedResourceDeleteDetails.js new file mode 100644 index 0000000000..c4d9c12ad1 --- /dev/null +++ b/awx/ui_next/src/util/getRelatedResourceDeleteDetails.js @@ -0,0 +1,301 @@ +import { t } from '@lingui/macro'; + +import { + UnifiedJobTemplatesAPI, + CredentialsAPI, + InventoriesAPI, + InventorySourcesAPI, + JobTemplatesAPI, + ProjectsAPI, + WorkflowJobTemplateNodesAPI, + WorkflowJobTemplatesAPI, + CredentialInputSourcesAPI, + TeamsAPI, + NotificationTemplatesAPI, + ExecutionEnvironmentsAPI, + ApplicationsAPI, + OrganizationsAPI, +} from '../api'; + +export async function getRelatedResourceDeleteCounts(requests) { + const results = {}; + let error = null; + let hasCount = false; + + try { + await Promise.all( + requests.map(async ({ request, label }) => { + const { + data: { count }, + } = await request(); + + if (count > 0) { + results[label] = count; + hasCount = true; + } + }) + ); + } catch (err) { + error = err; + } + + return { + results: hasCount && results, + error, + }; +} + +export const relatedResourceDeleteRequests = { + credential: (selected, i18n) => [ + { + request: () => + JobTemplatesAPI.read({ + credentials: selected.id, + }), + label: i18n._(t`Job Templates`), + }, + { + request: () => ProjectsAPI.read({ credentials: selected.id }), + label: i18n._(t`Projects`), + }, + { + request: () => + InventoriesAPI.read({ + insights_credential: selected.id, + }), + label: i18n._(t`Inventories`), + }, + { + request: () => + InventorySourcesAPI.read({ + credentials__id: selected.id, + }), + label: i18n._(t`Inventory Sources`), + }, + { + request: () => + CredentialInputSourcesAPI.read({ + source_credential: selected.id, + }), + label: i18n._(t`Credential`), + }, + { + request: () => + ExecutionEnvironmentsAPI.read({ + credential: selected.id, + }), + label: i18n._(t`Execution Environments`), + }, + ], + + credentialType: (selected, i18n) => [ + { + request: async () => + CredentialsAPI.read({ + credential_type__id: selected.id, + }), + label: i18n._(t`Credentials`), + }, + ], + + inventory: (selected, i18n) => [ + { + request: async () => + JobTemplatesAPI.read({ + inventory: selected.id, + }), + label: i18n._(t`Job Templates`), + }, + { + request: () => WorkflowJobTemplatesAPI.read({ inventory: selected.id }), + label: i18n._(t`Workflow Job Template`), + }, + ], + + inventorySource: (inventoryId, i18n, inventorySource) => [ + { + request: async () => { + try { + const { data } = await InventoriesAPI.updateSources(inventoryId); + + const results = await Promise.all( + data.map(async datum => + WorkflowJobTemplateNodesAPI.read({ + unified_job_template: datum.inventory_source, + }) + ) + ); + const total = results.reduce( + ({ data: { count: acc } }, { data: { count: cur } }) => acc + cur, + { data: { count: 0 } } + ); + + return { data: { count: total } }; + } catch (err) { + throw new Error(err); + } + }, + label: i18n._(t`Workflow Job Template Nodes`), + }, + { + request: async () => InventorySourcesAPI.readGroups(inventorySource.id), + label: i18n._(t`Groups`), + }, + { + request: async () => InventorySourcesAPI.readHosts(inventorySource.id), + label: i18n._(t`Hosts`), + }, + ], + + project: (selected, i18n) => [ + { + request: () => + JobTemplatesAPI.read({ + project: selected.id, + }), + label: i18n._(t`Job Templates`), + }, + { + request: () => + WorkflowJobTemplateNodesAPI.read({ + unified_job_template: selected.id, + }), + label: i18n._(t`Workflow Job Templates`), + }, + { + request: () => + InventorySourcesAPI.read({ + source_project: selected.id, + }), + label: i18n._(t`Inventory Sources`), + }, + ], + + template: (selected, i18n) => [ + { + request: async () => + WorkflowJobTemplateNodesAPI.read({ + unified_job_template: selected.id, + }), + label: [i18n._(t`Workflow Job Template Nodes`)], + }, + ], + + organization: (selected, i18n) => [ + { + request: async () => + CredentialsAPI.read({ + organization: selected.id, + }), + label: i18n._(t`Credential`), + }, + { + request: async () => + TeamsAPI.read({ + organization: selected.id, + }), + label: i18n._(t`Teams`), + }, + { + request: async () => + NotificationTemplatesAPI.read({ + organization: selected.id, + }), + label: i18n._(t`Notification Templates`), + }, + { + request: () => + ExecutionEnvironmentsAPI.read({ + organization: selected.id, + }), + label: i18n._(t`Execution Environments`), + }, + { + request: async () => + ProjectsAPI.read({ + organization: selected.id, + }), + label: [i18n._(t`Projects`)], + }, + { + request: () => + InventoriesAPI.read({ + organization: selected.id, + }), + label: i18n._(t`Inventories`), + }, + { + request: () => + ApplicationsAPI.read({ + organization: selected.id, + }), + label: i18n._(t`Applications`), + }, + ], + executionEnvironment: (selected, i18n) => [ + { + request: async () => + UnifiedJobTemplatesAPI.read({ + execution_environment: selected.id, + }), + label: [i18n._(t`Templates`)], + }, + { + request: async () => + ProjectsAPI.read({ + default_environment: selected.id, + }), + label: [i18n._(t`Projects`)], + }, + { + request: async () => + OrganizationsAPI.read({ + default_environment: selected.id, + }), + label: [i18n._(t`Organizations`)], + }, + { + request: async () => { + try { + const { + data: { results }, + } = await InventorySourcesAPI.read({ + execution_environment: selected.id, + }); + + const responses = await Promise.all( + results.map(result => + WorkflowJobTemplateNodesAPI.read({ + unified_job_template: result.id, + }) + ) + ); + + const total = responses.reduce( + ({ data: { count: acc } }, { data: { count: cur } }) => acc + cur, + { data: { count: 0 } } + ); + return { data: { count: total } }; + } catch (err) { + throw new Error(err); + } + }, + label: [i18n._(t`Workflow Job Template Nodes`)], + }, + ], + instanceGroup: (selected, i18n) => [ + { + request: () => OrganizationsAPI.read({ instance_groups: selected.id }), + label: i18n._(t`Organizations`), + }, + { + request: () => InventoriesAPI.read({ instance_groups: selected.id }), + label: i18n._(t`Inventories`), + }, + { + request: () => + UnifiedJobTemplatesAPI.read({ instance_groups: selected.id }), + label: i18n._(t`Templates`), + }, + ], +}; diff --git a/awx/ui_next/src/util/jobs.js b/awx/ui_next/src/util/jobs.js index e4129388a5..de227ffc59 100644 --- a/awx/ui_next/src/util/jobs.js +++ b/awx/ui_next/src/util/jobs.js @@ -1,3 +1,22 @@ -export default function isJobRunning(status) { +import { + JobsAPI, + ProjectUpdatesAPI, + SystemJobsAPI, + WorkflowJobsAPI, + InventoryUpdatesAPI, + AdHocCommandsAPI, +} from '../api'; + +export function isJobRunning(status) { return ['new', 'pending', 'waiting', 'running'].includes(status); } + +export function getJobModel(type) { + if (type === 'ad_hoc_command') return AdHocCommandsAPI; + if (type === 'inventory_update') return InventoryUpdatesAPI; + if (type === 'project_update') return ProjectUpdatesAPI; + if (type === 'system_job') return SystemJobsAPI; + if (type === 'workflow_job') return WorkflowJobsAPI; + + return JobsAPI; +} diff --git a/awx/ui_next/src/util/jobs.test.js b/awx/ui_next/src/util/jobs.test.js index 953b06ba17..6dcab23166 100644 --- a/awx/ui_next/src/util/jobs.test.js +++ b/awx/ui_next/src/util/jobs.test.js @@ -1,4 +1,4 @@ -import isJobRunning from './jobs'; +import { getJobModel, isJobRunning } from './jobs'; describe('isJobRunning', () => { test('should return true for new', () => { @@ -23,3 +23,23 @@ describe('isJobRunning', () => { expect(isJobRunning('failed')).toBe(false); }); }); + +describe('getJobModel', () => { + test('should return valid job model in all cases', () => { + const baseUrls = []; + [ + 'ad_hoc_command', + 'inventory_update', + 'project_update', + 'system_job', + 'workflow_job', + 'job', + 'default', + ].forEach(type => { + expect(getJobModel(type)).toHaveProperty('http'); + expect(getJobModel(type).jobEventSlug).toBeDefined(); + baseUrls.push(getJobModel(type).baseUrl); + }); + expect(new Set(baseUrls).size).toBe(baseUrls.length - 1); + }); +}); diff --git a/awx_collection/plugins/module_utils/tower_api.py b/awx_collection/plugins/module_utils/tower_api.py index 1ef663fb26..f6c63b08de 100644 --- a/awx_collection/plugins/module_utils/tower_api.py +++ b/awx_collection/plugins/module_utils/tower_api.py @@ -7,6 +7,7 @@ from ansible.module_utils.urls import Request, SSLValidationError, ConnectionErr from ansible.module_utils.six import PY2 from ansible.module_utils.six.moves.urllib.error import HTTPError from ansible.module_utils.six.moves.http_cookiejar import CookieJar +from distutils.version import LooseVersion as Version import time from json import loads, dumps @@ -259,10 +260,22 @@ class TowerAPIModule(TowerModule): tower_type = response.info().getheader('X-API-Product-Name', None) tower_version = response.info().getheader('X-API-Product-Version', None) + parsed_collection_version = Version(self._COLLECTION_VERSION).version + parsed_tower_version = Version(tower_version).version + if tower_type == 'AWX': + collection_compare_ver = parsed_collection_version[0] + tower_compare_ver = parsed_tower_version[0] + else: + collection_compare_ver = "{}.{}".format(parsed_collection_version[0], parsed_collection_version[1]) + tower_compare_ver = '{}.{}'.format(parsed_tower_version[0], parsed_tower_version[1]) + if self._COLLECTION_TYPE not in self.collection_to_version or self.collection_to_version[self._COLLECTION_TYPE] != tower_type: self.warn("You are using the {0} version of this collection but connecting to {1}".format(self._COLLECTION_TYPE, tower_type)) - elif self._COLLECTION_VERSION != tower_version: - self.warn("You are running collection version {0} but connecting to tower version {1}".format(self._COLLECTION_VERSION, tower_version)) + elif collection_compare_ver != tower_compare_ver: + self.warn( + "You are running collection version {0} but connecting to {2} version {1}".format(self._COLLECTION_VERSION, tower_version, tower_type) + ) + self.version_checked = True response_body = '' diff --git a/awx_collection/plugins/modules/tower_settings.py b/awx_collection/plugins/modules/tower_settings.py index 4e2f91dd14..18531a28d7 100644 --- a/awx_collection/plugins/modules/tower_settings.py +++ b/awx_collection/plugins/modules/tower_settings.py @@ -41,15 +41,15 @@ extends_documentation_fragment: awx.awx.auth ''' EXAMPLES = ''' -- name: Set the value of AWX_PROOT_BASE_PATH +- name: Set the value of AWX_ISOLATION_BASE_PATH tower_settings: - name: AWX_PROOT_BASE_PATH + name: AWX_ISOLATION_BASE_PATH value: "/tmp" register: testing_settings -- name: Set the value of AWX_PROOT_SHOW_PATHS +- name: Set the value of AWX_ISOLATION_SHOW_PATHS tower_settings: - name: "AWX_PROOT_SHOW_PATHS" + name: "AWX_ISOLATION_SHOW_PATHS" value: "'/var/lib/awx/projects/', '/tmp'" register: testing_settings diff --git a/awx_collection/test/awx/test_module_utils.py b/awx_collection/test/awx/test_module_utils.py index 473bfe9457..89bd44154e 100644 --- a/awx_collection/test/awx/test_module_utils.py +++ b/awx_collection/test/awx/test_module_utils.py @@ -9,9 +9,18 @@ from awx.main.models import Organization, Team, Project, Inventory from requests.models import Response from unittest import mock +awx_name = 'AWX' +tower_name = 'Red Hat Ansible Tower' +ping_version = '1.2.3' -def getheader(self, header_name, default): - mock_headers = {'X-API-Product-Name': 'not-junk', 'X-API-Product-Version': '1.2.3'} + +def getTowerheader(self, header_name, default): + mock_headers = {'X-API-Product-Name': tower_name, 'X-API-Product-Version': ping_version} + return mock_headers.get(header_name, default) + + +def getAWXheader(self, header_name, default): + mock_headers = {'X-API-Product-Name': awx_name, 'X-API-Product-Version': ping_version} return mock_headers.get(header_name, default) @@ -23,9 +32,17 @@ def status(self): return 200 -def mock_ping_response(self, method, url, **kwargs): +def mock_tower_ping_response(self, method, url, **kwargs): r = Response() - r.getheader = getheader.__get__(r) + r.getheader = getTowerheader.__get__(r) + r.read = read.__get__(r) + r.status = status.__get__(r) + return r + + +def mock_awx_ping_response(self, method, url, **kwargs): + r = Response() + r.getheader = getAWXheader.__get__(r) r.read = read.__get__(r) r.status = status.__get__(r) return r @@ -36,13 +53,62 @@ def test_version_warning(collection_import, silence_warning): cli_data = {'ANSIBLE_MODULE_ARGS': {}} testargs = ['module_file2.py', json.dumps(cli_data)] with mock.patch.object(sys, 'argv', testargs): - with mock.patch('ansible.module_utils.urls.Request.open', new=mock_ping_response): + with mock.patch('ansible.module_utils.urls.Request.open', new=mock_awx_ping_response): + my_module = TowerAPIModule(argument_spec=dict()) + my_module._COLLECTION_VERSION = "2.0.0" + my_module._COLLECTION_TYPE = "awx" + my_module.get_endpoint('ping') + silence_warning.assert_called_once_with( + 'You are running collection version {} but connecting to {} version {}'.format(my_module._COLLECTION_VERSION, awx_name, ping_version) + ) + + +def test_version_warning_strictness_awx(collection_import, silence_warning): + TowerAPIModule = collection_import('plugins.module_utils.tower_api').TowerAPIModule + cli_data = {'ANSIBLE_MODULE_ARGS': {}} + testargs = ['module_file2.py', json.dumps(cli_data)] + # Compare 1.0.0 to 1.2.3 (major matches) + with mock.patch.object(sys, 'argv', testargs): + with mock.patch('ansible.module_utils.urls.Request.open', new=mock_awx_ping_response): my_module = TowerAPIModule(argument_spec=dict()) my_module._COLLECTION_VERSION = "1.0.0" - my_module._COLLECTION_TYPE = "not-junk" - my_module.collection_to_version['not-junk'] = 'not-junk' + my_module._COLLECTION_TYPE = "awx" my_module.get_endpoint('ping') - silence_warning.assert_called_once_with('You are running collection version 1.0.0 but connecting to tower version 1.2.3') + silence_warning.assert_not_called() + + # Compare 1.2.0 to 1.2.3 (major matches minor does not count) + with mock.patch.object(sys, 'argv', testargs): + with mock.patch('ansible.module_utils.urls.Request.open', new=mock_awx_ping_response): + my_module = TowerAPIModule(argument_spec=dict()) + my_module._COLLECTION_VERSION = "1.2.0" + my_module._COLLECTION_TYPE = "awx" + my_module.get_endpoint('ping') + silence_warning.assert_not_called() + + +def test_version_warning_strictness_tower(collection_import, silence_warning): + TowerAPIModule = collection_import('plugins.module_utils.tower_api').TowerAPIModule + cli_data = {'ANSIBLE_MODULE_ARGS': {}} + testargs = ['module_file2.py', json.dumps(cli_data)] + # Compare 1.2.0 to 1.2.3 (major/minor matches) + with mock.patch.object(sys, 'argv', testargs): + with mock.patch('ansible.module_utils.urls.Request.open', new=mock_tower_ping_response): + my_module = TowerAPIModule(argument_spec=dict()) + my_module._COLLECTION_VERSION = "1.2.0" + my_module._COLLECTION_TYPE = "tower" + my_module.get_endpoint('ping') + silence_warning.assert_not_called() + + # Compare 1.0.0 to 1.2.3 (major/minor fail to match) + with mock.patch.object(sys, 'argv', testargs): + with mock.patch('ansible.module_utils.urls.Request.open', new=mock_tower_ping_response): + my_module = TowerAPIModule(argument_spec=dict()) + my_module._COLLECTION_VERSION = "1.0.0" + my_module._COLLECTION_TYPE = "tower" + my_module.get_endpoint('ping') + silence_warning.assert_called_once_with( + 'You are running collection version {} but connecting to {} version {}'.format(my_module._COLLECTION_VERSION, tower_name, ping_version) + ) def test_type_warning(collection_import, silence_warning): @@ -50,13 +116,12 @@ def test_type_warning(collection_import, silence_warning): cli_data = {'ANSIBLE_MODULE_ARGS': {}} testargs = ['module_file2.py', json.dumps(cli_data)] with mock.patch.object(sys, 'argv', testargs): - with mock.patch('ansible.module_utils.urls.Request.open', new=mock_ping_response): + with mock.patch('ansible.module_utils.urls.Request.open', new=mock_awx_ping_response): my_module = TowerAPIModule(argument_spec={}) - my_module._COLLECTION_VERSION = "1.2.3" - my_module._COLLECTION_TYPE = "junk" - my_module.collection_to_version['junk'] = 'junk' + my_module._COLLECTION_VERSION = ping_version + my_module._COLLECTION_TYPE = "tower" my_module.get_endpoint('ping') - silence_warning.assert_called_once_with('You are using the junk version of this collection but connecting to not-junk') + silence_warning.assert_called_once_with('You are using the {} version of this collection but connecting to {}'.format(my_module._COLLECTION_TYPE, awx_name)) def test_duplicate_config(collection_import, silence_warning): diff --git a/awx_collection/tests/integration/targets/tower_project_manual/tasks/create_project_dir.yml b/awx_collection/tests/integration/targets/tower_project_manual/tasks/create_project_dir.yml index 7f5b3b49c2..807c604dd9 100644 --- a/awx_collection/tests/integration/targets/tower_project_manual/tasks/create_project_dir.yml +++ b/awx_collection/tests/integration/targets/tower_project_manual/tasks/create_project_dir.yml @@ -1,25 +1,9 @@ --- -- name: get tower host variable - shell: tower-cli config host | cut -d ' ' -f2 - register: host - -- name: get tower username variable - shell: tower-cli config username | cut -d ' ' -f2 - register: username - -- name: get tower password variable - shell: tower-cli config password | cut -d ' ' -f2 - register: password - -- name: Fetch project_base_dir - uri: - url: "{{ host.stdout }}/api/v2/config/" - user: "{{ username.stdout }}" - password: "{{ password.stdout }}" - validate_certs: false - return_content: true - force_basic_auth: true - register: awx_config +- name: Load the UI settings + set_fact: + project_base_dir: "{{ tower_settings.project_base_dir }}" + vars: + tower_settings: "{{ lookup('awx.awx.tower_api', 'config/') }}" - tower_inventory: name: localhost @@ -43,16 +27,29 @@ -----END EC PRIVATE KEY----- organization: Default -- name: Disable bubblewrap - command: tower-cli setting modify AWX_PROOT_ENABLED false - - block: + - name: Add a path to a setting + tower_settings: + name: AWX_ISOLATION_SHOW_PATHS + value: "[{{ project_base_dir }}]" + - name: Create a directory for manual project - vars: - project_base_dir: "{{ awx_config.json.project_base_dir }}" - command: tower-cli ad_hoc launch --wait --inventory localhost - --credential dummy --module-name command - --module-args "mkdir -p {{ project_base_dir }}/{{ project_dir_name }}" + tower_ad_hoc_command: + credential: dummy + inventory: localhost + job_type: run + module_args: "mkdir -p {{ project_base_dir }}/{{ project_dir_name }}" + module_name: command + wait: true + always: - - name: enable bubblewrap - command: tower-cli setting modify AWX_PROOT_ENABLED true + - name: Delete path from setting + tower_settings: + name: AWX_ISOLATION_SHOW_PATHS + value: [] + + - name: Delete dummy credential + tower_credential: + name: dummy + kind: ssh + state: absent diff --git a/awx_collection/tests/integration/targets/tower_settings/tasks/main.yml b/awx_collection/tests/integration/targets/tower_settings/tasks/main.yml index 8a42f5768e..f6dcd002fd 100644 --- a/awx_collection/tests/integration/targets/tower_settings/tasks/main.yml +++ b/awx_collection/tests/integration/targets/tower_settings/tasks/main.yml @@ -1,13 +1,13 @@ --- -- name: Set the value of AWX_PROOT_SHOW_PATHS to a baseline +- name: Set the value of AWX_ISOLATION_SHOW_PATHS to a baseline tower_settings: - name: AWX_PROOT_SHOW_PATHS + name: AWX_ISOLATION_SHOW_PATHS value: '["/var/lib/awx/projects/"]' -- name: Set the value of AWX_PROOT_SHOW_PATHS to get an error back from Tower +- name: Set the value of AWX_ISOLATION_SHOW_PATHS to get an error back from Tower tower_settings: settings: - AWX_PROOT_SHOW_PATHS: + AWX_ISOLATION_SHOW_PATHS: 'not': 'a valid' 'tower': 'setting' register: result @@ -17,9 +17,9 @@ that: - "result is failed" -- name: Set the value of AWX_PROOT_SHOW_PATHS +- name: Set the value of AWX_ISOLATION_SHOW_PATHS tower_settings: - name: AWX_PROOT_SHOW_PATHS + name: AWX_ISOLATION_SHOW_PATHS value: '["/var/lib/awx/projects/", "/tmp"]' register: result @@ -27,9 +27,9 @@ that: - "result is changed" -- name: Attempt to set the value of AWX_PROOT_BASE_PATH to what it already is +- name: Attempt to set the value of AWX_ISOLATION_BASE_PATH to what it already is tower_settings: - name: AWX_PROOT_BASE_PATH + name: AWX_ISOLATION_BASE_PATH value: /tmp register: result @@ -42,7 +42,7 @@ - name: Apply a single setting via settings tower_settings: - name: AWX_PROOT_SHOW_PATHS + name: AWX_ISOLATION_SHOW_PATHS value: '["/var/lib/awx/projects/", "/var/tmp"]' register: result @@ -53,8 +53,8 @@ - name: Apply multiple setting via settings with no change tower_settings: settings: - AWX_PROOT_BASE_PATH: /tmp - AWX_PROOT_SHOW_PATHS: ["/var/lib/awx/projects/", "/var/tmp"] + AWX_ISOLATION_BASE_PATH: /tmp + AWX_ISOLATION_SHOW_PATHS: ["/var/lib/awx/projects/", "/var/tmp"] register: result - debug: @@ -67,8 +67,8 @@ - name: Apply multiple setting via settings with change tower_settings: settings: - AWX_PROOT_BASE_PATH: /tmp - AWX_PROOT_SHOW_PATHS: [] + AWX_ISOLATION_BASE_PATH: /tmp + AWX_ISOLATION_SHOW_PATHS: [] register: result - assert: @@ -77,7 +77,7 @@ - name: Handle an omit value tower_settings: - name: AWX_PROOT_BASE_PATH + name: AWX_ISOLATION_BASE_PATH value: '{{ junk_var | default(omit) }}' register: result ignore_errors: true diff --git a/awx_collection/tests/sanity/ignore-2.10.txt b/awx_collection/tests/sanity/ignore-2.10.txt index 8b5f90b44d..a0e41bd790 100644 --- a/awx_collection/tests/sanity/ignore-2.10.txt +++ b/awx_collection/tests/sanity/ignore-2.10.txt @@ -13,3 +13,4 @@ plugins/module_utils/tower_legacy.py pylint:super-with-arguments plugins/module_utils/tower_module.py pylint:super-with-arguments plugins/module_utils/tower_module.py pylint:raise-missing-from test/awx/conftest.py pylint:raise-missing-from +plugins/module_utils/tower_api.py pep8:E203 diff --git a/awx_collection/tests/sanity/ignore-2.11.txt b/awx_collection/tests/sanity/ignore-2.11.txt new file mode 100644 index 0000000000..88c3a3f195 --- /dev/null +++ b/awx_collection/tests/sanity/ignore-2.11.txt @@ -0,0 +1 @@ +plugins/module_utils/tower_api.py pep8:E203 diff --git a/awx_collection/tests/sanity/ignore-2.9.txt b/awx_collection/tests/sanity/ignore-2.9.txt index f6ee755ea0..8394f83e57 100644 --- a/awx_collection/tests/sanity/ignore-2.9.txt +++ b/awx_collection/tests/sanity/ignore-2.9.txt @@ -13,3 +13,4 @@ plugins/module_utils/tower_legacy.py pylint:super-with-arguments plugins/module_utils/tower_module.py pylint:super-with-arguments plugins/module_utils/tower_module.py pylint:raise-missing-from test/awx/conftest.py pylint:raise-missing-from +plugins/module_utils/tower_api.py pep8:E203 diff --git a/awx_collection/tools/roles/template_galaxy/templates/README.md.j2 b/awx_collection/tools/roles/template_galaxy/templates/README.md.j2 index ed02006c3d..274df392b5 100644 --- a/awx_collection/tools/roles/template_galaxy/templates/README.md.j2 +++ b/awx_collection/tools/roles/template_galaxy/templates/README.md.j2 @@ -127,7 +127,7 @@ py.test awx_collection/test/awx/ ## Running Integration Tests -The integration tests require a virtualenv with `ansible` >= 2.9 and `tower_cli`. +The integration tests require a virtualenv with `ansible` >= 2.9 and `awxkit`. The collection must first be installed, which can be done using `make install_collection`. You also need a configuration file, as described in the running section. diff --git a/awxkit/setup.py b/awxkit/setup.py index 23f4e161cb..657fce0a84 100644 --- a/awxkit/setup.py +++ b/awxkit/setup.py @@ -67,7 +67,7 @@ setup( 'PyYAML', 'requests', ], - python_requires=">=3.6", + python_requires=">=3.8", extras_require={'formatting': ['jq'], 'websockets': ['websocket-client==0.57.0'], 'crypto': ['cryptography']}, license='Apache 2.0', classifiers=[ @@ -79,7 +79,7 @@ setup( 'Operating System :: MacOS :: MacOS X', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', - 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.8', 'Topic :: System :: Software Distribution', 'Topic :: System :: Systems Administration', ], diff --git a/awxkit/tox.ini b/awxkit/tox.ini index 42c72bd608..73c9493e1d 100644 --- a/awxkit/tox.ini +++ b/awxkit/tox.ini @@ -8,7 +8,7 @@ skip_missing_interpreters = true # skipsdist = true [testenv] -basepython = python3.6 +basepython = python3.8 passenv = TRAVIS TRAVIS_JOB_ID TRAVIS_BRANCH setenv = PYTHONPATH = {toxinidir}:{env:PYTHONPATH:}:. diff --git a/docs/clustering.md b/docs/clustering.md index c77968191d..d13da47ae7 100644 --- a/docs/clustering.md +++ b/docs/clustering.md @@ -103,7 +103,7 @@ When a job is scheduled to run on an "isolated" instance: - a static inventory file - pexpect passwords - environment variables - - the `ansible`/`ansible-playbook` command invocation, _i.e._, `bwrap ... ansible-playbook -i /path/to/inventory /path/to/playbook.yml -e ...` + - the `ansible`/`ansible-playbook` command invocation, _i.e._, `ansible-playbook -i /path/to/inventory /path/to/playbook.yml -e ...` * Once the metadata has been `rsync`ed to the isolated host, the "controller instance" starts a process on the "isolated" instance which consumes the metadata and starts running `ansible`/`ansible-playbook`. As the playbook runs, job artifacts (such as `stdout` and job events) are written to disk on the "isolated" instance. diff --git a/docs/development/minikube.md b/docs/development/minikube.md index 93ad0d20a1..73bb85e2ae 100644 --- a/docs/development/minikube.md +++ b/docs/development/minikube.md @@ -66,7 +66,7 @@ In the root of awx-operator: ``` $ ansible-playbook ansible/instantiate-awx-deployment.yml \ -e development_mode=yes \ - -e tower_image=gcr.io/ansible-tower-engineering/awx_kube_devel:devel \ + -e tower_image=quay.io/awx/awx_kube_devel:devel \ -e tower_image_pull_policy=Always \ -e tower_ingress_type=ingress ``` @@ -81,7 +81,7 @@ In the root of the AWX repo: ``` $ make awx-kube-dev-build -$ docker push gcr.io/ansible-tower-engineering/awx_kube_devel:${COMPOSE_TAG} +$ docker push quay.io/awx/awx_kube_devel:${COMPOSE_TAG} ``` In the root of awx-operator: @@ -89,7 +89,7 @@ In the root of awx-operator: ``` $ ansible-playbook ansible/instantiate-awx-deployment.yml \ -e development_mode=yes \ - -e tower_image=gcr.io/ansible-tower-engineering/awx_kube_devel:${COMPOSE_TAG} \ + -e tower_image=quay.io/awx/awx_kube_devel:${COMPOSE_TAG} \ -e tower_image_pull_policy=Always \ -e tower_ingress_type=ingress ``` diff --git a/docs/licenses/distro.txt b/docs/licenses/distro.txt new file mode 100644 index 0000000000..e06d208186 --- /dev/null +++ b/docs/licenses/distro.txt @@ -0,0 +1,202 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/docs/licenses/idna-ssl.txt b/docs/licenses/idna-ssl.txt deleted file mode 100644 index 13ff0bb0c7..0000000000 --- a/docs/licenses/idna-ssl.txt +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License - -Copyright (c) 2018 aio-libs team https://github.com/aio-libs/ -Copyright (c) 2017 Ocean S. A. https://ocean.io/ - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/docs/licenses/importlib-metadata.txt b/docs/licenses/importlib-metadata.txt deleted file mode 100644 index be7e092b0b..0000000000 --- a/docs/licenses/importlib-metadata.txt +++ /dev/null @@ -1,13 +0,0 @@ -Copyright 2017-2019 Jason R. Coombs, Barry Warsaw - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/docs/licenses/importlib-resources.txt b/docs/licenses/importlib-resources.txt deleted file mode 100644 index 378b991a4d..0000000000 --- a/docs/licenses/importlib-resources.txt +++ /dev/null @@ -1,13 +0,0 @@ -Copyright 2017-2019 Brett Cannon, Barry Warsaw - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/docs/licenses/typing-extensions.txt b/docs/licenses/typing-extensions.txt deleted file mode 100644 index 583f9f6e61..0000000000 --- a/docs/licenses/typing-extensions.txt +++ /dev/null @@ -1,254 +0,0 @@ -A. HISTORY OF THE SOFTWARE -========================== - -Python was created in the early 1990s by Guido van Rossum at Stichting -Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands -as a successor of a language called ABC. Guido remains Python's -principal author, although it includes many contributions from others. - -In 1995, Guido continued his work on Python at the Corporation for -National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) -in Reston, Virginia where he released several versions of the -software. - -In May 2000, Guido and the Python core development team moved to -BeOpen.com to form the BeOpen PythonLabs team. In October of the same -year, the PythonLabs team moved to Digital Creations (now Zope -Corporation, see http://www.zope.com). In 2001, the Python Software -Foundation (PSF, see http://www.python.org/psf/) was formed, a -non-profit organization created specifically to own Python-related -Intellectual Property. Zope Corporation is a sponsoring member of -the PSF. - -All Python releases are Open Source (see http://www.opensource.org for -the Open Source Definition). Historically, most, but not all, Python -releases have also been GPL-compatible; the table below summarizes -the various releases. - - Release Derived Year Owner GPL- - from compatible? (1) - - 0.9.0 thru 1.2 1991-1995 CWI yes - 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes - 1.6 1.5.2 2000 CNRI no - 2.0 1.6 2000 BeOpen.com no - 1.6.1 1.6 2001 CNRI yes (2) - 2.1 2.0+1.6.1 2001 PSF no - 2.0.1 2.0+1.6.1 2001 PSF yes - 2.1.1 2.1+2.0.1 2001 PSF yes - 2.1.2 2.1.1 2002 PSF yes - 2.1.3 2.1.2 2002 PSF yes - 2.2 and above 2.1.1 2001-now PSF yes - -Footnotes: - -(1) GPL-compatible doesn't mean that we're distributing Python under - the GPL. All Python licenses, unlike the GPL, let you distribute - a modified version without making your changes open source. The - GPL-compatible licenses make it possible to combine Python with - other software that is released under the GPL; the others don't. - -(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, - because its license has a choice of law clause. According to - CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 - is "not incompatible" with the GPL. - -Thanks to the many outside volunteers who have worked under Guido's -direction to make these releases possible. - - -B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON -=============================================================== - -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 --------------------------------------------- - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF hereby -grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -analyze, test, perform and/or display publicly, prepare derivative works, -distribute, and otherwise use Python alone or in any derivative version, -provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are -retained in Python alone or in any derivative version prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 -------------------------------------------- - -BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 - -1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an -office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the -Individual or Organization ("Licensee") accessing and otherwise using -this software in source or binary form and its associated -documentation ("the Software"). - -2. Subject to the terms and conditions of this BeOpen Python License -Agreement, BeOpen hereby grants Licensee a non-exclusive, -royalty-free, world-wide license to reproduce, analyze, test, perform -and/or display publicly, prepare derivative works, distribute, and -otherwise use the Software alone or in any derivative version, -provided, however, that the BeOpen Python License is retained in the -Software, alone or in any derivative version prepared by Licensee. - -3. BeOpen is making the Software available to Licensee on an "AS IS" -basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE -SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS -AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY -DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -5. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -6. This License Agreement shall be governed by and interpreted in all -respects by the law of the State of California, excluding conflict of -law provisions. Nothing in this License Agreement shall be deemed to -create any relationship of agency, partnership, or joint venture -between BeOpen and Licensee. This License Agreement does not grant -permission to use BeOpen trademarks or trade names in a trademark -sense to endorse or promote products or services of Licensee, or any -third party. As an exception, the "BeOpen Python" logos available at -http://www.pythonlabs.com/logos.html may be used according to the -permissions granted on that web page. - -7. By copying, installing or otherwise using the software, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 ---------------------------------------- - -1. This LICENSE AGREEMENT is between the Corporation for National -Research Initiatives, having an office at 1895 Preston White Drive, -Reston, VA 20191 ("CNRI"), and the Individual or Organization -("Licensee") accessing and otherwise using Python 1.6.1 software in -source or binary form and its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, CNRI -hereby grants Licensee a nonexclusive, royalty-free, world-wide -license to reproduce, analyze, test, perform and/or display publicly, -prepare derivative works, distribute, and otherwise use Python 1.6.1 -alone or in any derivative version, provided, however, that CNRI's -License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) -1995-2001 Corporation for National Research Initiatives; All Rights -Reserved" are retained in Python 1.6.1 alone or in any derivative -version prepared by Licensee. Alternately, in lieu of CNRI's License -Agreement, Licensee may substitute the following text (omitting the -quotes): "Python 1.6.1 is made available subject to the terms and -conditions in CNRI's License Agreement. This Agreement together with -Python 1.6.1 may be located on the Internet using the following -unique, persistent identifier (known as a handle): 1895.22/1013. This -Agreement may also be obtained from a proxy server on the Internet -using the following URL: http://hdl.handle.net/1895.22/1013". - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python 1.6.1 or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python 1.6.1. - -4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" -basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. This License Agreement shall be governed by the federal -intellectual property law of the United States, including without -limitation the federal copyright law, and, to the extent such -U.S. federal law does not apply, by the law of the Commonwealth of -Virginia, excluding Virginia's conflict of law provisions. -Notwithstanding the foregoing, with regard to derivative works based -on Python 1.6.1 that incorporate non-separable material that was -previously distributed under the GNU General Public License (GPL), the -law of the Commonwealth of Virginia shall govern this License -Agreement only as to issues arising under or with respect to -Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this -License Agreement shall be deemed to create any relationship of -agency, partnership, or joint venture between CNRI and Licensee. This -License Agreement does not grant permission to use CNRI trademarks or -trade name in a trademark sense to endorse or promote products or -services of Licensee, or any third party. - -8. By clicking on the "ACCEPT" button where indicated, or by copying, -installing or otherwise using Python 1.6.1, Licensee agrees to be -bound by the terms and conditions of this License Agreement. - - ACCEPT - - -CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 --------------------------------------------------- - -Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, -The Netherlands. All rights reserved. - -Permission to use, copy, modify, and distribute this software and its -documentation for any purpose and without fee is hereby granted, -provided that the above copyright notice appear in all copies and that -both that copyright notice and this permission notice appear in -supporting documentation, and that the name of Stichting Mathematisch -Centrum or CWI not be used in advertising or publicity pertaining to -distribution of the software without specific, written prior -permission. - -STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO -THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE -FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/docs/licenses/zipp.txt b/docs/licenses/zipp.txt deleted file mode 100644 index 5e795a61f3..0000000000 --- a/docs/licenses/zipp.txt +++ /dev/null @@ -1,7 +0,0 @@ -Copyright Jason R. Coombs - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/docs/process_isolation.md b/docs/process_isolation.md deleted file mode 100644 index c8fe21b0ad..0000000000 --- a/docs/process_isolation.md +++ /dev/null @@ -1,50 +0,0 @@ -## Process Isolation Overview - -In older versions of Ansible Tower, we used a system called `proot` to isolate Tower job processes from the rest of the system. - -Tower version 3.1 and later switched to using `bubblewrap`, which is a much lighter-weight and maintained process isolation system. - -Tower 3.5 and later uses the process isolation feature in Ansible runner to achieve process isolation. - - -### Activating Process Isolation - -`bubblewrap` is enabled by default; it can be turned off via Tower Config or from a Tower settings file: - - AWX_PROOT_ENABLED = False - -Process isolation, when enabled, will be used for the following Job Types: - -* Job Templates - Launching jobs from regular job templates -* Ad-hoc Commands - Launching ad-hoc commands against one or more hosts in inventory - - -### Tunables - -Process Isolation will, by default, hide the following directories from the tasks mentioned above: - -* `/etc/tower` - To prevent exposing Tower configuration -* `/var/lib/awx` - With the exception of the current project being used (for regular job templates) -* `/var/log` -* `/tmp` (or whatever the system `temp dir` is) - With the exception of the processes's own temp files - -If there is other information on the system that is sensitive and should be hidden, it can be added via the Tower Configuration Screen -or by updating the following entry in a tower settings file: - - AWX_PROOT_HIDE_PATHS = ['/list/of/', '/paths'] - -If there are any directories that should specifically be exposed that can be set in a similar way: - - AWX_PROOT_SHOW_PATHS = ['/list/of/', '/paths'] - -By default, the system will use the system's `tmp dir` (`/tmp` by default) as its staging area. This can be changed via the following setting: - - AWX_PROOT_BASE_PATH = "/opt/tmp" - - -### Project Folder Isolation - -Starting in AWX versions above 6.0.0, the project folder will be copied for each job run. -This allows playbooks to make local changes to the source tree for convenience, -such as creating temporary files, without the possibility of interference with -other jobs. diff --git a/docs/tasks.md b/docs/tasks.md index 13d7f969b6..f2e29ec777 100644 --- a/docs/tasks.md +++ b/docs/tasks.md @@ -187,7 +187,7 @@ This task spawns an `ansible` process, which then runs a command using Ansible. - Build a dictionary of passwords for the SSH private key, SSH user and sudo/su. - Build an environment dictionary for Ansible. - Build a command line argument list for running Ansible, optionally using `ssh-agent` for public/private key authentication. -- Return whether the task should use `bwrap`. +- Return whether the task should use process isolation. For more information on ad hoc commands, read the [Running Ad Hoc Commands section](https://docs.ansible.com/ansible-tower/latest/html/userguide/inventories.html#running-ad-hoc-commands) of the Inventories page of the Ansible Tower User Guide. diff --git a/pytest.ini b/pytest.ini index fc407b5f17..d5d7273433 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,7 +1,7 @@ [pytest] DJANGO_SETTINGS_MODULE = awx.settings.development -python_paths = /var/lib/awx/venv/tower/lib/python3.6/site-packages -site_dirs = /var/lib/awx/venv/tower/lib/python3.6/site-packages +python_paths = /var/lib/awx/venv/tower/lib/python3.8/site-packages +site_dirs = /var/lib/awx/venv/tower/lib/python3.8/site-packages python_files = *.py addopts = --reuse-db --nomigrations --tb=native markers = diff --git a/requirements/requirements.in b/requirements/requirements.in index f54094ae72..1970b215fb 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -8,6 +8,7 @@ channels channels-redis>=3.1.0 # https://github.com/django/channels_redis/issues/212 cryptography<3.0.0 daphne +distro django==2.2.16 # see UPGRADE BLOCKERs django-auth-ldap django-cors-headers diff --git a/requirements/requirements.txt b/requirements/requirements.txt index ba3cfe3087..9450d4f879 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -83,6 +83,8 @@ defusedxml==0.6.0 # social-auth-core dictdiffer==0.8.1 # via openshift +distro==1.5.0 + # via -r /awx_devel/requirements/requirements.in django-auth-ldap==2.1.0 # via -r /awx_devel/requirements/requirements.in django-cors-headers==3.2.1 @@ -146,22 +148,12 @@ hyperlink==20.0.1 # via # autobahn # twisted -idna-ssl==1.1.0 - # via aiohttp idna==2.9 # via # hyperlink - # idna-ssl # requests # twisted # yarl -importlib-metadata==1.5.0 - # via - # importlib-resources - # irc - # jsonschema -importlib-resources==1.4.0 - # via jaraco.text incremental==17.5.0 # via twisted irc==18.0.0 @@ -389,8 +381,6 @@ twisted[tls]==20.3.0 # daphne txaio==20.12.1 # via autobahn -typing-extensions==3.7.4.1 - # via aiohttp urllib3==1.25.8 # via # kubernetes @@ -407,10 +397,6 @@ xmlsec==1.3.3 # via python3-saml yarl==1.4.2 # via aiohttp -zipp==3.1.0 - # via - # importlib-metadata - # importlib-resources zope.interface==5.0.0 # via twisted diff --git a/requirements/requirements_dev.txt b/requirements/requirements_dev.txt index df8cc1cb13..2c16fad0c5 100644 --- a/requirements/requirements_dev.txt +++ b/requirements/requirements_dev.txt @@ -1,7 +1,6 @@ django-debug-toolbar==1.11 django-rest-swagger pprofile -ipython==5.2.1 unittest2 black pytest diff --git a/requirements/updater.sh b/requirements/updater.sh index 2911aaf008..cca8b4a8ad 100755 --- a/requirements/updater.sh +++ b/requirements/updater.sh @@ -14,11 +14,11 @@ _cleanup() { generate_requirements() { venv="`pwd`/venv" echo $venv - /usr/bin/python3.6 -m venv "${venv}" + /usr/bin/python3.8 -m venv "${venv}" # shellcheck disable=SC1090 source ${venv}/bin/activate - ${venv}/bin/python3.6 -m pip install -U pip pip-tools + ${venv}/bin/python3.8 -m pip install -U pip pip-tools ${pip_compile} --output-file requirements.txt "${requirements_in}" "${requirements_git}" # consider the git requirements for purposes of resolving deps diff --git a/tools/ansible/roles/dockerfile/files/settings.py b/tools/ansible/roles/dockerfile/files/settings.py index 507e9b4d7b..c2abeb2df2 100644 --- a/tools/ansible/roles/dockerfile/files/settings.py +++ b/tools/ansible/roles/dockerfile/files/settings.py @@ -22,10 +22,6 @@ SECRET_KEY = get_secret() ALLOWED_HOSTS = ['*'] -# Container environments don't like chroots -AWX_PROOT_ENABLED = False - - CLUSTER_HOST_ID = "awx" SYSTEM_UUID = '00000000-0000-0000-0000-000000000000' diff --git a/tools/ansible/roles/dockerfile/templates/Dockerfile.j2 b/tools/ansible/roles/dockerfile/templates/Dockerfile.j2 index df076ec5c0..8feb7e90dd 100644 --- a/tools/ansible/roles/dockerfile/templates/Dockerfile.j2 +++ b/tools/ansible/roles/dockerfile/templates/Dockerfile.j2 @@ -34,16 +34,16 @@ RUN dnf -y update && \ patch \ @postgresql:12 \ postgresql-devel \ - python3-devel \ - python3-pip \ - python3-psycopg2 \ - python3-setuptools \ + python38-devel \ + python38-pip \ + python38-psycopg2 \ + python38-setuptools \ swig \ unzip \ xmlsec1-devel \ xmlsec1-openssl-devel -RUN python3 -m ensurepip && pip3 install "virtualenv < 20" +RUN python3.8 -m ensurepip && pip3 install "virtualenv < 20" # Install & build requirements ADD Makefile /tmp/Makefile @@ -94,9 +94,9 @@ RUN dnf -y update && \ @postgresql:12 \ python3-devel \ python3-libselinux \ - python3-pip \ - python3-psycopg2 \ - python3-setuptools \ + python38-pip \ + python38-psycopg2 \ + python38-setuptools \ rsync \ subversion \ sudo \ @@ -119,7 +119,7 @@ RUN curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/master RUN curl -L -o /usr/bin/tini https://github.com/krallin/tini/releases/download/v0.19.0/tini-{{ tini_architecture | default('amd64') }} && \ chmod +x /usr/bin/tini -RUN python3 -m ensurepip && pip3 install "virtualenv < 20" supervisor {% if build_dev|bool %}black{% endif %} +RUN python3.8 -m ensurepip && pip3 install "virtualenv < 20" supervisor {% if build_dev|bool %}black{% endif %} RUN rm -rf /root/.cache && rm -rf /tmp/* @@ -228,8 +228,8 @@ RUN for dir in \ RUN for dir in \ /var/lib/awx/venv \ /var/lib/awx/venv/awx/bin \ - /var/lib/awx/venv/awx/lib/python3.6 \ - /var/lib/awx/venv/awx/lib/python3.6/site-packages \ + /var/lib/awx/venv/awx/lib/python3.8 \ + /var/lib/awx/venv/awx/lib/python3.8/site-packages \ /var/lib/awx/projects \ /var/lib/awx/rsyslog \ /var/run/awx-rsyslog \ @@ -238,7 +238,7 @@ RUN for dir in \ do mkdir -m 0775 -p $dir ; chmod g+rw $dir ; chgrp root $dir ; done && \ for file in \ /var/run/nginx.pid \ - /var/lib/awx/venv/awx/lib/python3.6/site-packages/awx.egg-link ; \ + /var/lib/awx/venv/awx/lib/python3.8/site-packages/awx.egg-link ; \ do touch $file ; chmod g+rw $file ; done {% endif %} diff --git a/tools/docker-compose/ansible/roles/sources/templates/docker-compose.yml.j2 b/tools/docker-compose/ansible/roles/sources/templates/docker-compose.yml.j2 index 9810e7a7cf..3b32df378e 100644 --- a/tools/docker-compose/ansible/roles/sources/templates/docker-compose.yml.j2 +++ b/tools/docker-compose/ansible/roles/sources/templates/docker-compose.yml.j2 @@ -87,8 +87,11 @@ services: - "awx_db:/var/lib/postgresql/data" volumes: awx_db: + name: tools_awx_db {% for i in range(cluster_node_count|int) -%} {% set container_postfix = loop.index %} receptor_{{ container_postfix }}: + name: tools_receptor_{{ container_postfix }} redis_socket_{{ container_postfix }}: + name: tools_redis_socket_{{ container_postfix }} {% endfor -%}