Merge pull request #62 from ansible/devel

Rebase
This commit is contained in:
Sean Sullivan 2021-03-28 22:19:59 -05:00 committed by GitHub
commit 0b6acdbcc9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
152 changed files with 3116 additions and 1332 deletions

View File

@ -2,6 +2,12 @@
This is a list of high-level changes for each release of AWX. A full list of commits can be found at `https://github.com/ansible/awx/releases/tag/<version>`.
# 19.0.0 (TBD)
* AWX now runs on Python 3.8 (https://github.com/ansible/awx/pull/8778/)
* Added support for Execution Environments to the Activity Stream (https://github.com/ansible/awx/issues/9308)
* Fixed a bug that improperly formats OpenSSH keys specified in custom Credential Types (https://github.com/ansible/awx/issues/9361)
# 18.0.0 (March 23, 2021)
**IMPORTANT INSTALL AND UPGRADE NOTES**

View File

@ -1,4 +1,4 @@
PYTHON ?= python3
PYTHON ?= python3.8
PYTHON_VERSION = $(shell $(PYTHON) -c "from distutils.sysconfig import get_python_version; print(get_python_version())")
SITELIB=$(shell $(PYTHON) -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")
OFFICIAL ?= no
@ -23,7 +23,7 @@ VENV_BASE ?= /var/lib/awx/venv/
SCL_PREFIX ?=
CELERY_SCHEDULE_FILE ?= /var/lib/awx/beat.db
DEV_DOCKER_TAG_BASE ?= gcr.io/ansible-tower-engineering
DEV_DOCKER_TAG_BASE ?= quay.io/awx
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
# Python packages to install only from source (not from binary wheels)
@ -125,7 +125,7 @@ virtualenv_awx:
mkdir $(VENV_BASE); \
fi; \
if [ ! -d "$(VENV_BASE)/awx" ]; then \
virtualenv -p $(PYTHON) $(VENV_BASE)/awx; \
$(PYTHON) -m venv $(VENV_BASE)/awx; \
$(VENV_BASE)/awx/bin/pip install $(PIP_OPTIONS) $(VENV_BOOTSTRAP); \
fi; \
fi
@ -164,7 +164,7 @@ version_file:
if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
python -c "import awx; print(awx.__version__)" > /var/lib/awx/.awx_version; \
$(PYTHON) -c "import awx; print(awx.__version__)" > /var/lib/awx/.awx_version; \
# Do any one-time init tasks.
comma := ,
@ -272,12 +272,12 @@ reports:
mkdir -p $@
black: reports
command -v black >/dev/null 2>&1 || { echo "could not find black on your PATH, you may need to \`pip install black\`, or set AWX_IGNORE_BLACK=1" && exit 1; }
(set -o pipefail && $@ $(BLACK_ARGS) awx awxkit awx_collection | tee reports/$@.report)
@command -v black >/dev/null 2>&1 || { echo "could not find black on your PATH, you may need to \`pip install black\`, or set AWX_IGNORE_BLACK=1" && exit 1; }
@(set -o pipefail && $@ $(BLACK_ARGS) awx awxkit awx_collection | tee reports/$@.report)
.git/hooks/pre-commit:
echo "[ -z \$$AWX_IGNORE_BLACK ] && (black --check \`git diff --cached --name-only | grep -E '\.py$\'\` || (echo 'To fix this, run \`make black\` to auto-format your code prior to commit, or set AWX_IGNORE_BLACK=1' && exit 1))" > .git/hooks/pre-commit
chmod +x .git/hooks/pre-commit
@echo "[ -z \$$AWX_IGNORE_BLACK ] && (black --check \`git diff --cached --name-only --diff-filter=AM | grep -E '\.py$\'\` || (echo 'To fix this, run \`make black\` to auto-format your code prior to commit, or set AWX_IGNORE_BLACK=1' && exit 1))" > .git/hooks/pre-commit
@chmod +x .git/hooks/pre-commit
genschema: reports
$(MAKE) swagger PYTEST_ARGS="--genschema --create-db "
@ -292,7 +292,7 @@ swagger: reports
check: black
awx-link:
[ -d "/awx_devel/awx.egg-info" ] || python3 /awx_devel/setup.py egg_info_dev
[ -d "/awx_devel/awx.egg-info" ] || $(PYTHON) /awx_devel/setup.py egg_info_dev
cp -f /tmp/awx.egg-link /var/lib/awx/venv/awx/lib/python$(PYTHON_VERSION)/site-packages/awx.egg-link
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests

View File

@ -129,6 +129,18 @@ class PrometheusJSONRenderer(renderers.JSONRenderer):
parsed_metrics = text_string_to_metric_families(data)
data = {}
for family in parsed_metrics:
data[family.name] = {}
data[family.name]['help_text'] = family.documentation
data[family.name]['type'] = family.type
data[family.name]['samples'] = []
for sample in family.samples:
data[sample[0]] = {"labels": sample[1], "value": sample[2]}
sample_dict = {"labels": sample[1], "value": sample[2]}
if family.type == 'histogram':
if sample[0].endswith("_sum"):
sample_dict['sample_type'] = "sum"
elif sample[0].endswith("_count"):
sample_dict['sample_type'] = "count"
elif sample[0].endswith("_bucket"):
sample_dict['sample_type'] = "bucket"
data[family.name]['samples'].append(sample_dict)
return super(PrometheusJSONRenderer, self).render(data, accepted_media_type, renderer_context)

View File

@ -0,0 +1 @@
query params to filter response, e.g., ?subsystemonly=1&metric=callback_receiver_events_insert_db&node=awx-1

View File

@ -3043,6 +3043,8 @@ class WorkflowJobTemplateNodeCreateApproval(RetrieveAPIView):
return Response(data, status=status.HTTP_201_CREATED)
def check_permissions(self, request):
if not request.user.is_authenticated:
raise PermissionDenied()
obj = self.get_object().workflow_job_template
if request.method == 'POST':
if not request.user.can_access(models.WorkflowJobTemplate, 'change', obj, request.data):

View File

@ -14,6 +14,7 @@ from rest_framework.exceptions import PermissionDenied
# AWX
# from awx.main.analytics import collectors
import awx.main.analytics.subsystem_metrics as s_metrics
from awx.main.analytics.metrics import metrics
from awx.api import renderers
@ -33,5 +34,10 @@ class MetricsView(APIView):
def get(self, request):
''' Show Metrics Details '''
if request.user.is_superuser or request.user.is_system_auditor:
return Response(metrics().decode('UTF-8'))
metrics_to_show = ''
if not request.query_params.get('subsystemonly', "0") == "1":
metrics_to_show += metrics().decode('UTF-8')
if not request.query_params.get('dbonly', "0") == "1":
metrics_to_show += s_metrics.metrics(request)
return Response(metrics_to_show)
raise PermissionDenied()

View File

@ -24,7 +24,7 @@ from awx.api.generics import APIView
from awx.conf.registry import settings_registry
from awx.main.analytics import all_collectors
from awx.main.ha import is_ha_environment
from awx.main.utils import get_awx_version, get_ansible_version, get_custom_venv_choices, to_python_boolean
from awx.main.utils import get_awx_version, get_custom_venv_choices, to_python_boolean
from awx.main.utils.licensing import validate_entitlement_manifest
from awx.api.versioning import reverse, drf_reverse
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
@ -279,7 +279,6 @@ class ApiV2ConfigView(APIView):
time_zone=settings.TIME_ZONE,
license_info=license_data,
version=get_awx_version(),
ansible_version=get_ansible_version(),
eula=render_to_string("eula.md") if license_data.get('license_type', 'UNLICENSED') != 'open' else '',
analytics_status=pendo_state,
analytics_collectors=all_collectors(),

View File

@ -0,0 +1,16 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from awx.conf.migrations import _rename_setting
def rename_proot_settings(apps, schema_editor):
_rename_setting.rename_setting(apps, schema_editor, old_key='AWX_PROOT_BASE_PATH', new_key='AWX_ISOLATION_BASE_PATH')
_rename_setting.rename_setting(apps, schema_editor, old_key='AWX_PROOT_SHOW_PATHS', new_key='AWX_ISOLATION_SHOW_PATHS')
class Migration(migrations.Migration):
dependencies = [('conf', '0008_subscriptions')]
operations = [migrations.RunPython(rename_proot_settings)]

View File

@ -0,0 +1,14 @@
# Python
import logging
# AWX
from awx.main.analytics.subsystem_metrics import Metrics
from awx.main.dispatch.publish import task
from awx.main.dispatch import get_local_queuename
logger = logging.getLogger('awx.main.scheduler')
@task(queue=get_local_queuename)
def send_subsystem_metrics():
Metrics().send_metrics()

View File

@ -2,6 +2,7 @@ import io
import os
import os.path
import platform
import distro
from django.db import connection
from django.db.models import Count
@ -10,7 +11,7 @@ from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from awx.conf.license import get_license
from awx.main.utils import get_awx_version, get_ansible_version, get_custom_venv_choices, camelcase_to_underscore
from awx.main.utils import get_awx_version, get_custom_venv_choices, camelcase_to_underscore
from awx.main import models
from django.contrib.sessions.models import Session
from awx.main.analytics import register
@ -32,7 +33,7 @@ data _since_ the last report date - i.e., new data in the last 24 hours)
'''
@register('config', '1.2', description=_('General platform configuration.'))
@register('config', '1.3', description=_('General platform configuration.'))
def config(since, **kwargs):
license_info = get_license()
install_type = 'traditional'
@ -43,7 +44,7 @@ def config(since, **kwargs):
return {
'platform': {
'system': platform.system(),
'dist': platform.dist(),
'dist': distro.linux_distribution(),
'release': platform.release(),
'type': install_type,
},
@ -51,7 +52,6 @@ def config(since, **kwargs):
'instance_uuid': settings.SYSTEM_UUID,
'tower_url_base': settings.TOWER_URL_BASE,
'tower_version': get_awx_version(),
'ansible_version': get_ansible_version(),
'license_type': license_info.get('license_type', 'UNLICENSED'),
'free_instances': license_info.get('free_instances', 0),
'total_licensed_instances': license_info.get('instance_count', 0),

View File

@ -1,8 +1,8 @@
from django.conf import settings
from prometheus_client import REGISTRY, PROCESS_COLLECTOR, PLATFORM_COLLECTOR, GC_COLLECTOR, Gauge, Info, generate_latest
from prometheus_client import PROCESS_COLLECTOR, PLATFORM_COLLECTOR, GC_COLLECTOR, CollectorRegistry, Gauge, Info, generate_latest
from awx.conf.license import get_license
from awx.main.utils import get_awx_version, get_ansible_version
from awx.main.utils import get_awx_version
from awx.main.analytics.collectors import (
counts,
instance_info,
@ -11,115 +11,123 @@ from awx.main.analytics.collectors import (
)
REGISTRY.unregister(PROCESS_COLLECTOR)
REGISTRY.unregister(PLATFORM_COLLECTOR)
REGISTRY.unregister(GC_COLLECTOR)
SYSTEM_INFO = Info('awx_system', 'AWX System Information')
ORG_COUNT = Gauge('awx_organizations_total', 'Number of organizations')
USER_COUNT = Gauge('awx_users_total', 'Number of users')
TEAM_COUNT = Gauge('awx_teams_total', 'Number of teams')
INV_COUNT = Gauge('awx_inventories_total', 'Number of inventories')
PROJ_COUNT = Gauge('awx_projects_total', 'Number of projects')
JT_COUNT = Gauge('awx_job_templates_total', 'Number of job templates')
WFJT_COUNT = Gauge('awx_workflow_job_templates_total', 'Number of workflow job templates')
HOST_COUNT = Gauge(
'awx_hosts_total',
'Number of hosts',
[
'type',
],
)
SCHEDULE_COUNT = Gauge('awx_schedules_total', 'Number of schedules')
INV_SCRIPT_COUNT = Gauge('awx_inventory_scripts_total', 'Number of invetory scripts')
USER_SESSIONS = Gauge(
'awx_sessions_total',
'Number of sessions',
[
'type',
],
)
CUSTOM_VENVS = Gauge('awx_custom_virtualenvs_total', 'Number of virtualenvs')
RUNNING_JOBS = Gauge('awx_running_jobs_total', 'Number of running jobs on the Tower system')
PENDING_JOBS = Gauge('awx_pending_jobs_total', 'Number of pending jobs on the Tower system')
STATUS = Gauge(
'awx_status_total',
'Status of Job launched',
[
'status',
],
)
INSTANCE_CAPACITY = Gauge(
'awx_instance_capacity',
'Capacity of each node in a Tower system',
[
'hostname',
'instance_uuid',
],
)
INSTANCE_CPU = Gauge(
'awx_instance_cpu',
'CPU cores on each node in a Tower system',
[
'hostname',
'instance_uuid',
],
)
INSTANCE_MEMORY = Gauge(
'awx_instance_memory',
'RAM (Kb) on each node in a Tower system',
[
'hostname',
'instance_uuid',
],
)
INSTANCE_INFO = Info(
'awx_instance',
'Info about each node in a Tower system',
[
'hostname',
'instance_uuid',
],
)
INSTANCE_LAUNCH_TYPE = Gauge(
'awx_instance_launch_type_total',
'Type of Job launched',
[
'node',
'launch_type',
],
)
INSTANCE_STATUS = Gauge(
'awx_instance_status_total',
'Status of Job launched',
[
'node',
'status',
],
)
INSTANCE_CONSUMED_CAPACITY = Gauge(
'awx_instance_consumed_capacity',
'Consumed capacity of each node in a Tower system',
[
'hostname',
'instance_uuid',
],
)
INSTANCE_REMAINING_CAPACITY = Gauge(
'awx_instance_remaining_capacity',
'Remaining capacity of each node in a Tower system',
[
'hostname',
'instance_uuid',
],
)
LICENSE_INSTANCE_TOTAL = Gauge('awx_license_instance_total', 'Total number of managed hosts provided by your license')
LICENSE_INSTANCE_FREE = Gauge('awx_license_instance_free', 'Number of remaining managed hosts provided by your license')
def metrics():
REGISTRY = CollectorRegistry()
SYSTEM_INFO = Info('awx_system', 'AWX System Information', registry=REGISTRY)
ORG_COUNT = Gauge('awx_organizations_total', 'Number of organizations', registry=REGISTRY)
USER_COUNT = Gauge('awx_users_total', 'Number of users', registry=REGISTRY)
TEAM_COUNT = Gauge('awx_teams_total', 'Number of teams', registry=REGISTRY)
INV_COUNT = Gauge('awx_inventories_total', 'Number of inventories', registry=REGISTRY)
PROJ_COUNT = Gauge('awx_projects_total', 'Number of projects', registry=REGISTRY)
JT_COUNT = Gauge('awx_job_templates_total', 'Number of job templates', registry=REGISTRY)
WFJT_COUNT = Gauge('awx_workflow_job_templates_total', 'Number of workflow job templates', registry=REGISTRY)
HOST_COUNT = Gauge(
'awx_hosts_total',
'Number of hosts',
[
'type',
],
registry=REGISTRY,
)
SCHEDULE_COUNT = Gauge('awx_schedules_total', 'Number of schedules', registry=REGISTRY)
INV_SCRIPT_COUNT = Gauge('awx_inventory_scripts_total', 'Number of invetory scripts', registry=REGISTRY)
USER_SESSIONS = Gauge(
'awx_sessions_total',
'Number of sessions',
[
'type',
],
registry=REGISTRY,
)
CUSTOM_VENVS = Gauge('awx_custom_virtualenvs_total', 'Number of virtualenvs', registry=REGISTRY)
RUNNING_JOBS = Gauge('awx_running_jobs_total', 'Number of running jobs on the Tower system', registry=REGISTRY)
PENDING_JOBS = Gauge('awx_pending_jobs_total', 'Number of pending jobs on the Tower system', registry=REGISTRY)
STATUS = Gauge(
'awx_status_total',
'Status of Job launched',
[
'status',
],
registry=REGISTRY,
)
INSTANCE_CAPACITY = Gauge(
'awx_instance_capacity',
'Capacity of each node in a Tower system',
[
'hostname',
'instance_uuid',
],
registry=REGISTRY,
)
INSTANCE_CPU = Gauge(
'awx_instance_cpu',
'CPU cores on each node in a Tower system',
[
'hostname',
'instance_uuid',
],
registry=REGISTRY,
)
INSTANCE_MEMORY = Gauge(
'awx_instance_memory',
'RAM (Kb) on each node in a Tower system',
[
'hostname',
'instance_uuid',
],
registry=REGISTRY,
)
INSTANCE_INFO = Info(
'awx_instance',
'Info about each node in a Tower system',
[
'hostname',
'instance_uuid',
],
registry=REGISTRY,
)
INSTANCE_LAUNCH_TYPE = Gauge(
'awx_instance_launch_type_total',
'Type of Job launched',
[
'node',
'launch_type',
],
registry=REGISTRY,
)
INSTANCE_STATUS = Gauge(
'awx_instance_status_total',
'Status of Job launched',
[
'node',
'status',
],
registry=REGISTRY,
)
INSTANCE_CONSUMED_CAPACITY = Gauge(
'awx_instance_consumed_capacity',
'Consumed capacity of each node in a Tower system',
[
'hostname',
'instance_uuid',
],
registry=REGISTRY,
)
INSTANCE_REMAINING_CAPACITY = Gauge(
'awx_instance_remaining_capacity',
'Remaining capacity of each node in a Tower system',
[
'hostname',
'instance_uuid',
],
registry=REGISTRY,
)
LICENSE_INSTANCE_TOTAL = Gauge('awx_license_instance_total', 'Total number of managed hosts provided by your license', registry=REGISTRY)
LICENSE_INSTANCE_FREE = Gauge('awx_license_instance_free', 'Number of remaining managed hosts provided by your license', registry=REGISTRY)
license_info = get_license()
SYSTEM_INFO.info(
{
@ -127,7 +135,6 @@ def metrics():
'insights_analytics': str(settings.INSIGHTS_TRACKING_STATE),
'tower_url_base': settings.TOWER_URL_BASE,
'tower_version': get_awx_version(),
'ansible_version': get_ansible_version(),
'license_type': license_info.get('license_type', 'UNLICENSED'),
'license_expiry': str(license_info.get('time_remaining', 0)),
'pendo_tracking': settings.PENDO_TRACKING_STATE,
@ -197,7 +204,7 @@ def metrics():
for status, value in statuses.items():
INSTANCE_STATUS.labels(node=node, status=status).set(value)
return generate_latest()
return generate_latest(registry=REGISTRY)
__all__ = ['metrics']

View File

@ -0,0 +1,304 @@
import redis
import json
import time
import logging
from django.conf import settings
from django.apps import apps
from awx.main.consumers import emit_channel_notification
root_key = 'awx_metrics'
logger = logging.getLogger('awx.main.wsbroadcast')
class BaseM:
def __init__(self, field, help_text):
self.field = field
self.help_text = help_text
self.current_value = 0
def clear_value(self, conn):
conn.hset(root_key, self.field, 0)
self.current_value = 0
def inc(self, value):
self.current_value += value
def set(self, value):
self.current_value = value
def decode(self, conn):
value = conn.hget(root_key, self.field)
return self.decode_value(value)
def to_prometheus(self, instance_data):
output_text = f"# HELP {self.field} {self.help_text}\n# TYPE {self.field} gauge\n"
for instance in instance_data:
output_text += f'{self.field}{{node="{instance}"}} {instance_data[instance][self.field]}\n'
return output_text
class FloatM(BaseM):
def decode_value(self, value):
if value is not None:
return float(value)
else:
return 0.0
def store_value(self, conn):
conn.hincrbyfloat(root_key, self.field, self.current_value)
self.current_value = 0
class IntM(BaseM):
def decode_value(self, value):
if value is not None:
return int(value)
else:
return 0
def store_value(self, conn):
conn.hincrby(root_key, self.field, self.current_value)
self.current_value = 0
class SetIntM(BaseM):
def decode_value(self, value):
if value is not None:
return int(value)
else:
return 0
def store_value(self, conn):
# do not set value if it has not changed since last time this was called
if self.current_value is not None:
conn.hset(root_key, self.field, self.current_value)
self.current_value = None
class SetFloatM(SetIntM):
def decode_value(self, value):
if value is not None:
return float(value)
else:
return 0
class HistogramM(BaseM):
def __init__(self, field, help_text, buckets):
self.buckets = buckets
self.buckets_to_keys = {}
for b in buckets:
self.buckets_to_keys[b] = IntM(field + '_' + str(b), '')
self.inf = IntM(field + '_inf', '')
self.sum = IntM(field + '_sum', '')
super(HistogramM, self).__init__(field, help_text)
def clear_value(self, conn):
conn.hset(root_key, self.field, 0)
self.inf.clear_value(conn)
self.sum.clear_value(conn)
for b in self.buckets_to_keys.values():
b.clear_value(conn)
super(HistogramM, self).clear_value(conn)
def observe(self, value):
for b in self.buckets:
if value <= b:
self.buckets_to_keys[b].inc(1)
break
self.sum.inc(value)
self.inf.inc(1)
def decode(self, conn):
values = {'counts': []}
for b in self.buckets_to_keys:
values['counts'].append(self.buckets_to_keys[b].decode(conn))
values['sum'] = self.sum.decode(conn)
values['inf'] = self.inf.decode(conn)
return values
def store_value(self, conn):
for b in self.buckets:
self.buckets_to_keys[b].store_value(conn)
self.sum.store_value(conn)
self.inf.store_value(conn)
def to_prometheus(self, instance_data):
output_text = f"# HELP {self.field} {self.help_text}\n# TYPE {self.field} histogram\n"
for instance in instance_data:
for i, b in enumerate(self.buckets):
output_text += f'{self.field}_bucket{{le="{b}",node="{instance}"}} {sum(instance_data[instance][self.field]["counts"][0:i+1])}\n'
output_text += f'{self.field}_bucket{{le="+Inf",node="{instance}"}} {instance_data[instance][self.field]["inf"]}\n'
output_text += f'{self.field}_count{{node="{instance}"}} {instance_data[instance][self.field]["inf"]}\n'
output_text += f'{self.field}_sum{{node="{instance}"}} {instance_data[instance][self.field]["sum"]}\n'
return output_text
class Metrics:
def __init__(self, auto_pipe_execute=True):
self.pipe = redis.Redis.from_url(settings.BROKER_URL).pipeline()
self.conn = redis.Redis.from_url(settings.BROKER_URL)
self.last_pipe_execute = time.time()
# track if metrics have been modified since last saved to redis
# start with True so that we get an initial save to redis
self.metrics_have_changed = True
self.pipe_execute_interval = settings.SUBSYSTEM_METRICS_INTERVAL_SAVE_TO_REDIS
self.send_metrics_interval = settings.SUBSYSTEM_METRICS_INTERVAL_SEND_METRICS
# auto pipe execute will commit transaction of metric data to redis
# at a regular interval (pipe_execute_interval). If set to False,
# the calling function should call .pipe_execute() explicitly
self.auto_pipe_execute = auto_pipe_execute
Instance = apps.get_model('main', 'Instance')
self.instance_name = Instance.objects.me().hostname
# metric name, help_text
METRICSLIST = [
SetIntM('callback_receiver_events_queue_size_redis', 'Current number of events in redis queue'),
IntM('callback_receiver_events_popped_redis', 'Number of events popped from redis'),
IntM('callback_receiver_events_in_memory', 'Current number of events in memory (in transfer from redis to db)'),
IntM('callback_receiver_batch_events_errors', 'Number of times batch insertion failed'),
FloatM('callback_receiver_events_insert_db_seconds', 'Time spent saving events to database'),
IntM('callback_receiver_events_insert_db', 'Number of events batch inserted into database'),
HistogramM(
'callback_receiver_batch_events_insert_db', 'Number of events batch inserted into database', settings.SUBSYSTEM_METRICS_BATCH_INSERT_BUCKETS
),
FloatM('subsystem_metrics_pipe_execute_seconds', 'Time spent saving metrics to redis'),
IntM('subsystem_metrics_pipe_execute_calls', 'Number of calls to pipe_execute'),
FloatM('subsystem_metrics_send_metrics_seconds', 'Time spent sending metrics to other nodes'),
]
# turn metric list into dictionary with the metric name as a key
self.METRICS = {}
for m in METRICSLIST:
self.METRICS[m.field] = m
# track last time metrics were sent to other nodes
self.previous_send_metrics = SetFloatM('send_metrics_time', 'Timestamp of previous send_metrics call')
def clear_values(self):
for m in self.METRICS.values():
m.clear_value(self.conn)
self.metrics_have_changed = True
self.conn.delete(root_key + "_lock")
def inc(self, field, value):
if value != 0:
self.METRICS[field].inc(value)
self.metrics_have_changed = True
if self.auto_pipe_execute is True and self.should_pipe_execute() is True:
self.pipe_execute()
def set(self, field, value):
self.METRICS[field].set(value)
self.metrics_have_changed = True
if self.auto_pipe_execute is True and self.should_pipe_execute() is True:
self.pipe_execute()
def observe(self, field, value):
self.METRICS[field].observe(value)
self.metrics_have_changed = True
if self.auto_pipe_execute is True and self.should_pipe_execute() is True:
self.pipe_execute()
def serialize_local_metrics(self):
data = self.load_local_metrics()
return json.dumps(data)
def load_local_metrics(self):
# generate python dictionary of key values from metrics stored in redis
data = {}
for field in self.METRICS:
data[field] = self.METRICS[field].decode(self.conn)
return data
def store_metrics(self, data_json):
# called when receiving metrics from other instances
data = json.loads(data_json)
if self.instance_name != data['instance']:
logger.debug(f"{self.instance_name} received subsystem metrics from {data['instance']}")
self.conn.set(root_key + "_instance_" + data['instance'], data['metrics'])
def should_pipe_execute(self):
if self.metrics_have_changed is False:
return False
if time.time() - self.last_pipe_execute > self.pipe_execute_interval:
return True
else:
return False
def pipe_execute(self):
if self.metrics_have_changed is True:
duration_to_save = time.perf_counter()
for m in self.METRICS:
self.METRICS[m].store_value(self.pipe)
self.pipe.execute()
self.last_pipe_execute = time.time()
self.metrics_have_changed = False
duration_to_save = time.perf_counter() - duration_to_save
self.METRICS['subsystem_metrics_pipe_execute_seconds'].inc(duration_to_save)
self.METRICS['subsystem_metrics_pipe_execute_calls'].inc(1)
duration_to_save = time.perf_counter()
self.send_metrics()
duration_to_save = time.perf_counter() - duration_to_save
self.METRICS['subsystem_metrics_send_metrics_seconds'].inc(duration_to_save)
def send_metrics(self):
# more than one thread could be calling this at the same time, so should
# get acquire redis lock before sending metrics
lock = self.conn.lock(root_key + '_lock', thread_local=False)
if not lock.acquire(blocking=False):
return
try:
current_time = time.time()
if current_time - self.previous_send_metrics.decode(self.conn) > self.send_metrics_interval:
payload = {
'instance': self.instance_name,
'metrics': self.serialize_local_metrics(),
}
# store a local copy as well
self.store_metrics(json.dumps(payload))
emit_channel_notification("metrics", payload)
self.previous_send_metrics.set(current_time)
self.previous_send_metrics.store_value(self.conn)
finally:
lock.release()
def load_other_metrics(self, request):
# data received from other nodes are stored in their own keys
# e.g., awx_metrics_instance_awx-1, awx_metrics_instance_awx-2
# this method looks for keys with "_instance_" in the name and loads the data
# also filters data based on request query params
# if additional filtering is added, update metrics_view.md
instances_filter = request.query_params.getlist("node")
# get a sorted list of instance names
instance_names = [self.instance_name]
for m in self.conn.scan_iter(root_key + '_instance_*'):
instance_names.append(m.decode('UTF-8').split('_instance_')[1])
instance_names.sort()
# load data, including data from the this local instance
instance_data = {}
for instance in instance_names:
if len(instances_filter) == 0 or instance in instances_filter:
instance_data_from_redis = self.conn.get(root_key + '_instance_' + instance)
# data from other instances may not be available. That is OK.
if instance_data_from_redis:
instance_data[instance] = json.loads(instance_data_from_redis.decode('UTF-8'))
return instance_data
def generate_metrics(self, request):
# takes the api request, filters, and generates prometheus data
# if additional filtering is added, update metrics_view.md
instance_data = self.load_other_metrics(request)
metrics_filter = request.query_params.getlist("metric")
output_text = ''
if instance_data:
for field in self.METRICS:
if len(metrics_filter) == 0 or field in metrics_filter:
output_text += self.METRICS[field].to_prometheus(instance_data)
return output_text
def metrics(request):
m = Metrics()
return m.generate_metrics(request)

View File

@ -186,7 +186,7 @@ register(
default=None,
queryset=ExecutionEnvironment.objects.all(),
label=_('Global default execution environment'),
help_text=_('.'),
help_text=_('The Execution Environment to be used when one has not been configured for a job template.'),
category=_('System'),
category_slug='system',
)
@ -233,16 +233,7 @@ register(
)
register(
'AWX_PROOT_ENABLED',
field_class=fields.BooleanField,
label=_('Enable job isolation'),
help_text=_('Isolates an Ansible job from protected parts of the system to prevent exposing sensitive information.'),
category=_('Jobs'),
category_slug='jobs',
)
register(
'AWX_PROOT_BASE_PATH',
'AWX_ISOLATION_BASE_PATH',
field_class=fields.CharField,
label=_('Job execution path'),
help_text=_(
@ -255,17 +246,7 @@ register(
)
register(
'AWX_PROOT_HIDE_PATHS',
field_class=fields.StringListField,
required=False,
label=_('Paths to hide from isolated jobs'),
help_text=_('Additional paths to hide from isolated processes. Enter one path per line.'),
category=_('Jobs'),
category_slug='jobs',
)
register(
'AWX_PROOT_SHOW_PATHS',
'AWX_ISOLATION_SHOW_PATHS',
field_class=fields.StringListField,
required=False,
label=_('Paths to expose to isolated jobs'),

View File

@ -52,7 +52,6 @@ ENV_BLOCKLIST = frozenset(
'VIRTUAL_ENV',
'PATH',
'PYTHONPATH',
'PROOT_TMP_DIR',
'JOB_ID',
'INVENTORY_ID',
'INVENTORY_SOURCE_ID',

View File

@ -13,7 +13,6 @@ from channels.generic.websocket import AsyncJsonWebsocketConsumer
from channels.layers import get_channel_layer
from channels.db import database_sync_to_async
logger = logging.getLogger('awx.main.consumers')
XRF_KEY = '_auth_user_xrf'

View File

@ -20,7 +20,7 @@ from awx.main.models import JobEvent, AdHocCommandEvent, ProjectUpdateEvent, Inv
from awx.main.tasks import handle_success_and_failure_notifications
from awx.main.models.events import emit_event_detail
from awx.main.utils.profiling import AWXProfiler
import awx.main.analytics.subsystem_metrics as s_metrics
from .base import BaseWorker
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
@ -46,16 +46,22 @@ class CallbackBrokerWorker(BaseWorker):
self.buff = {}
self.pid = os.getpid()
self.redis = redis.Redis.from_url(settings.BROKER_URL)
self.subsystem_metrics = s_metrics.Metrics(auto_pipe_execute=False)
self.queue_pop = 0
self.queue_name = settings.CALLBACK_QUEUE
self.prof = AWXProfiler("CallbackBrokerWorker")
for key in self.redis.keys('awx_callback_receiver_statistics_*'):
self.redis.delete(key)
def read(self, queue):
try:
res = self.redis.blpop(settings.CALLBACK_QUEUE, timeout=1)
res = self.redis.blpop(self.queue_name, timeout=1)
if res is None:
return {'event': 'FLUSH'}
self.total += 1
self.queue_pop += 1
self.subsystem_metrics.inc('callback_receiver_events_popped_redis', 1)
self.subsystem_metrics.inc('callback_receiver_events_in_memory', 1)
return json.loads(res[1])
except redis.exceptions.RedisError:
logger.exception("encountered an error communicating with redis")
@ -64,8 +70,19 @@ class CallbackBrokerWorker(BaseWorker):
logger.exception("failed to decode JSON message from redis")
finally:
self.record_statistics()
self.record_read_metrics()
return {'event': 'FLUSH'}
def record_read_metrics(self):
if self.queue_pop == 0:
return
if self.subsystem_metrics.should_pipe_execute() is True:
queue_size = self.redis.llen(self.queue_name)
self.subsystem_metrics.set('callback_receiver_events_queue_size_redis', queue_size)
self.subsystem_metrics.pipe_execute()
self.queue_pop = 0
def record_statistics(self):
# buffer stat recording to once per (by default) 5s
if time.time() - self.last_stats > settings.JOB_EVENT_STATISTICS_INTERVAL:
@ -99,27 +116,44 @@ class CallbackBrokerWorker(BaseWorker):
def flush(self, force=False):
now = tz_now()
if force or (time.time() - self.last_flush) > settings.JOB_EVENT_BUFFER_SECONDS or any([len(events) >= 1000 for events in self.buff.values()]):
bulk_events_saved = 0
singular_events_saved = 0
metrics_events_batch_save_errors = 0
for cls, events in self.buff.items():
logger.debug(f'{cls.__name__}.objects.bulk_create({len(events)})')
for e in events:
if not e.created:
e.created = now
e.modified = now
duration_to_save = time.perf_counter()
try:
cls.objects.bulk_create(events)
bulk_events_saved += len(events)
except Exception:
# if an exception occurs, we should re-attempt to save the
# events one-by-one, because something in the list is
# broken/stale
metrics_events_batch_save_errors += 1
for e in events:
try:
e.save()
singular_events_saved += 1
except Exception:
logger.exception('Database Error Saving Job Event')
duration_to_save = time.perf_counter() - duration_to_save
for e in events:
emit_event_detail(e)
self.buff = {}
self.last_flush = time.time()
# only update metrics if we saved events
if (bulk_events_saved + singular_events_saved) > 0:
self.subsystem_metrics.inc('callback_receiver_batch_events_errors', metrics_events_batch_save_errors)
self.subsystem_metrics.inc('callback_receiver_events_insert_db_seconds', duration_to_save)
self.subsystem_metrics.inc('callback_receiver_events_insert_db', bulk_events_saved + singular_events_saved)
self.subsystem_metrics.observe('callback_receiver_batch_events_insert_db', bulk_events_saved)
self.subsystem_metrics.inc('callback_receiver_events_in_memory', -(bulk_events_saved + singular_events_saved))
if self.subsystem_metrics.should_pipe_execute() is True:
self.subsystem_metrics.pipe_execute()
def perform_work(self, body):
try:
@ -169,6 +203,7 @@ class CallbackBrokerWorker(BaseWorker):
except Exception:
logger.exception('Worker failed to emit notifications: Job {}'.format(job_identifier))
finally:
self.subsystem_metrics.inc('callback_receiver_events_in_memory', -1)
GuidMiddleware.set_guid('')
return

View File

@ -135,7 +135,7 @@ class IsolatedManager(object):
extravars = {
'src': self.private_data_dir,
'dest': settings.AWX_PROOT_BASE_PATH,
'dest': settings.AWX_ISOLATION_BASE_PATH,
'ident': self.ident,
'job_id': self.instance.id,
}
@ -304,7 +304,7 @@ class IsolatedManager(object):
if not len(instance_qs):
return
try:
private_data_dir = tempfile.mkdtemp(prefix='awx_iso_heartbeat_', dir=settings.AWX_PROOT_BASE_PATH)
private_data_dir = tempfile.mkdtemp(prefix='awx_iso_heartbeat_', dir=settings.AWX_ISOLATION_BASE_PATH)
self.runner_params = self.build_runner_params([instance.hostname for instance in instance_qs])
self.runner_params['private_data_dir'] = private_data_dir
self.runner_params['forks'] = len(instance_qs)

View File

@ -69,8 +69,6 @@ class AnsibleInventoryLoader(object):
def __init__(self, source, venv_path=None, verbosity=0):
self.source = source
self.verbosity = verbosity
# TODO: remove once proot has been removed
self.tmp_private_dir = None
if venv_path:
self.venv_path = venv_path
else:

View File

@ -25,7 +25,7 @@ class Command(BaseCommand):
raise CommandError("--hostname is a required argument")
try:
path = tempfile.mkdtemp(prefix='awx_isolated_ssh', dir=settings.AWX_PROOT_BASE_PATH)
path = tempfile.mkdtemp(prefix='awx_isolated_ssh', dir=settings.AWX_ISOLATION_BASE_PATH)
ssh_key = None
if all([getattr(settings, 'AWX_ISOLATED_KEY_GENERATION', False) is True, getattr(settings, 'AWX_ISOLATED_PRIVATE_KEY', None)]):
ssh_key = settings.AWX_ISOLATED_PRIVATE_KEY

View File

@ -466,10 +466,14 @@ class CredentialType(CommonModelNameNotUnique):
if len(value):
namespace[field_name] = value
# default missing boolean fields to False
for field in self.inputs.get('fields', []):
# default missing boolean fields to False
if field['type'] == 'boolean' and field['id'] not in credential.inputs.keys():
namespace[field['id']] = safe_namespace[field['id']] = False
# make sure private keys end with a \n
if field.get('format') == 'ssh_private_key':
if field['id'] in namespace and not namespace[field['id']].endswith('\n'):
namespace[field['id']] += '\n'
file_tmpls = self.injectors.get('file', {})
# If any file templates are provided, render the files and update the

View File

@ -8,7 +8,7 @@ import redis
# Django
from django.conf import settings
import awx.main.analytics.subsystem_metrics as s_metrics
__all__ = ['CallbackQueueDispatcher']
@ -28,6 +28,7 @@ class CallbackQueueDispatcher(object):
self.queue = getattr(settings, 'CALLBACK_QUEUE', '')
self.logger = logging.getLogger('awx.main.queue.CallbackQueueDispatcher')
self.connection = redis.Redis.from_url(settings.BROKER_URL)
self.subsystem_metrics = s_metrics.Metrics()
def dispatch(self, obj):
self.connection.rpush(self.queue, json.dumps(obj, cls=AnsibleJSONEncoder))

View File

@ -107,6 +107,7 @@ from awx.main.consumers import emit_channel_notification
from awx.main import analytics
from awx.conf import settings_registry
from awx.conf.license import get_license
from awx.main.analytics.subsystem_metrics import Metrics
from rest_framework.exceptions import PermissionDenied
@ -170,6 +171,7 @@ def dispatch_startup():
cluster_node_heartbeat()
if Instance.objects.me().is_controller():
awx_isolated_heartbeat()
Metrics().clear_values()
# Update Tower's rsyslog.conf file based on loggins settings in the db
reconfigure_rsyslog()
@ -841,7 +843,6 @@ class BaseTask(object):
model = None
event_model = None
abstract = True
proot_show_paths = []
def __init__(self):
self.cleanup_paths = []
@ -908,9 +909,9 @@ class BaseTask(object):
if pull:
params['container_options'].append(f'--pull={pull}')
if settings.AWX_PROOT_SHOW_PATHS:
if settings.AWX_ISOLATION_SHOW_PATHS:
params['container_volume_mounts'] = []
for this_path in settings.AWX_PROOT_SHOW_PATHS:
for this_path in settings.AWX_ISOLATION_SHOW_PATHS:
params['container_volume_mounts'].append(f'{this_path}:{this_path}:Z')
return params
@ -924,7 +925,7 @@ class BaseTask(object):
"""
Create a temporary directory for job-related files.
"""
pdd_wrapper_path = tempfile.mkdtemp(prefix=f'pdd_wrapper_{instance.pk}_', dir=settings.AWX_PROOT_BASE_PATH)
pdd_wrapper_path = tempfile.mkdtemp(prefix=f'pdd_wrapper_{instance.pk}_', dir=settings.AWX_ISOLATION_BASE_PATH)
os.chmod(pdd_wrapper_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
if settings.AWX_CLEANUP_PATHS:
self.cleanup_paths.append(pdd_wrapper_path)
@ -1090,12 +1091,6 @@ class BaseTask(object):
"""
return False
def should_use_proot(self, instance):
"""
Return whether this task should use proot.
"""
return False
def build_inventory(self, instance, private_data_dir):
script_params = dict(hostvars=True, towervars=True)
if hasattr(instance, 'job_slice_number'):
@ -1371,8 +1366,8 @@ class BaseTask(object):
status = self.instance.status
raise RuntimeError('not starting %s task' % self.instance.status)
if not os.path.exists(settings.AWX_PROOT_BASE_PATH):
raise RuntimeError('AWX_PROOT_BASE_PATH=%s does not exist' % settings.AWX_PROOT_BASE_PATH)
if not os.path.exists(settings.AWX_ISOLATION_BASE_PATH):
raise RuntimeError('AWX_ISOLATION_BASE_PATH=%s does not exist' % settings.AWX_ISOLATION_BASE_PATH)
# store a record of the venv used at runtime
if hasattr(self.instance, 'custom_virtualenv'):
@ -1598,8 +1593,7 @@ class RunJob(BaseTask):
env['ANSIBLE_CALLBACK_PLUGINS'] = ':'.join(settings.AWX_ANSIBLE_CALLBACK_PLUGINS)
env['AWX_HOST'] = settings.TOWER_URL_BASE
# Create a directory for ControlPath sockets that is unique to each
# job and visible inside the proot environment (when enabled).
# Create a directory for ControlPath sockets that is unique to each job
cp_dir = os.path.join(private_data_dir, 'cp')
if not os.path.exists(cp_dir):
os.mkdir(cp_dir, 0o700)
@ -1768,14 +1762,6 @@ class RunJob(BaseTask):
"""
return settings.AWX_RESOURCE_PROFILING_ENABLED
def should_use_proot(self, job):
"""
Return whether this task should use proot.
"""
if job.is_container_group_task:
return False
return getattr(settings, 'AWX_PROOT_ENABLED', False)
def build_execution_environment_params(self, instance):
if settings.IS_K8S:
return {}
@ -1929,10 +1915,6 @@ class RunProjectUpdate(BaseTask):
event_model = ProjectUpdateEvent
event_data_key = 'project_update_id'
@property
def proot_show_paths(self):
return [settings.PROJECTS_ROOT]
def __init__(self, *args, job_private_data_dir=None, **kwargs):
super(RunProjectUpdate, self).__init__(*args, **kwargs)
self.playbook_new_revision = None
@ -1990,7 +1972,7 @@ class RunProjectUpdate(BaseTask):
env['DISPLAY'] = '' # Prevent stupid password popup when running tests.
# give ansible a hint about the intended tmpdir to work around issues
# like https://github.com/ansible/ansible/issues/30064
env['TMP'] = settings.AWX_PROOT_BASE_PATH
env['TMP'] = settings.AWX_ISOLATION_BASE_PATH
env['PROJECT_UPDATE_ID'] = str(project_update.pk)
if settings.GALAXY_IGNORE_CERTS:
env['ANSIBLE_GALAXY_IGNORE'] = True
@ -2124,7 +2106,7 @@ class RunProjectUpdate(BaseTask):
d = super(RunProjectUpdate, self).get_password_prompts(passwords)
d[r'Username for.*:\s*?$'] = 'scm_username'
d[r'Password for.*:\s*?$'] = 'scm_password'
d['Password:\s*?$'] = 'scm_password' # noqa
d[r'Password:\s*?$'] = 'scm_password'
d[r'\S+?@\S+?\'s\s+?password:\s*?$'] = 'scm_password'
d[r'Enter passphrase for .*:\s*?$'] = 'scm_key_unlock'
d[r'Bad passphrase, try again for .*:\s*?$'] = ''
@ -2394,12 +2376,6 @@ class RunProjectUpdate(BaseTask):
if status == 'successful' and instance.launch_type != 'sync':
self._update_dependent_inventories(instance, dependent_inventory_sources)
def should_use_proot(self, project_update):
"""
Return whether this task should use proot.
"""
return getattr(settings, 'AWX_PROOT_ENABLED', False)
def build_execution_environment_params(self, instance):
if settings.IS_K8S:
return {}
@ -2790,7 +2766,7 @@ class RunAdHocCommand(BaseTask):
env['ANSIBLE_SFTP_BATCH_MODE'] = 'False'
# Create a directory for ControlPath sockets that is unique to each
# ad hoc command and visible inside the proot environment (when enabled).
# ad hoc command
cp_dir = os.path.join(private_data_dir, 'cp')
if not os.path.exists(cp_dir):
os.mkdir(cp_dir, 0o700)
@ -2894,14 +2870,6 @@ class RunAdHocCommand(BaseTask):
d[r'Password:\s*?$'] = 'ssh_password'
return d
def should_use_proot(self, ad_hoc_command):
"""
Return whether this task should use proot.
"""
if ad_hoc_command.is_container_group_task:
return False
return getattr(settings, 'AWX_PROOT_ENABLED', False)
def final_run_hook(self, adhoc_job, status, private_data_dir, fact_modification_times, isolated_manager_instance=None):
super(RunAdHocCommand, self).final_run_hook(adhoc_job, status, private_data_dir, fact_modification_times)
if isolated_manager_instance:

View File

@ -56,24 +56,28 @@ def test_metrics_counts(organization_factory, job_template_factory, workflow_job
assert EXPECTED_VALUES[name] == value
def get_metrics_view_db_only():
return reverse('api:metrics_view') + '?dbonly=1'
@pytest.mark.django_db
def test_metrics_permissions(get, admin, org_admin, alice, bob, organization):
assert get(reverse('api:metrics_view'), user=admin).status_code == 200
assert get(reverse('api:metrics_view'), user=org_admin).status_code == 403
assert get(reverse('api:metrics_view'), user=alice).status_code == 403
assert get(reverse('api:metrics_view'), user=bob).status_code == 403
assert get(get_metrics_view_db_only(), user=admin).status_code == 200
assert get(get_metrics_view_db_only(), user=org_admin).status_code == 403
assert get(get_metrics_view_db_only(), user=alice).status_code == 403
assert get(get_metrics_view_db_only(), user=bob).status_code == 403
organization.auditor_role.members.add(bob)
assert get(reverse('api:metrics_view'), user=bob).status_code == 403
assert get(get_metrics_view_db_only(), user=bob).status_code == 403
Role.singleton('system_auditor').members.add(bob)
bob.is_system_auditor = True
assert get(reverse('api:metrics_view'), user=bob).status_code == 200
assert get(get_metrics_view_db_only(), user=bob).status_code == 200
@pytest.mark.django_db
def test_metrics_http_methods(get, post, patch, put, options, admin):
assert get(reverse('api:metrics_view'), user=admin).status_code == 200
assert put(reverse('api:metrics_view'), user=admin).status_code == 405
assert patch(reverse('api:metrics_view'), user=admin).status_code == 405
assert post(reverse('api:metrics_view'), user=admin).status_code == 405
assert options(reverse('api:metrics_view'), user=admin).status_code == 200
assert get(get_metrics_view_db_only(), user=admin).status_code == 200
assert put(get_metrics_view_db_only(), user=admin).status_code == 405
assert patch(get_metrics_view_db_only(), user=admin).status_code == 405
assert post(get_metrics_view_db_only(), user=admin).status_code == 405
assert options(get_metrics_view_db_only(), user=admin).status_code == 200

View File

@ -33,16 +33,14 @@ def test_jobs_settings(get, put, patch, delete, admin):
response = get(url, user=admin, expect=200)
data = dict(response.data.items())
put(url, user=admin, data=data, expect=200)
patch(url, user=admin, data={'AWX_PROOT_HIDE_PATHS': ['/home']}, expect=200)
patch(url, user=admin, data={'AWX_ISOLATION_SHOW_PATHS': ['/home']}, expect=200)
response = get(url, user=admin, expect=200)
assert response.data['AWX_PROOT_HIDE_PATHS'] == ['/home']
data.pop('AWX_PROOT_HIDE_PATHS')
data.pop('AWX_PROOT_SHOW_PATHS')
assert response.data['AWX_ISOLATION_SHOW_PATHS'] == ['/home']
data.pop('AWX_ISOLATION_SHOW_PATHS')
data.pop('AWX_ANSIBLE_CALLBACK_PLUGINS')
put(url, user=admin, data=data, expect=200)
response = get(url, user=admin, expect=200)
assert response.data['AWX_PROOT_HIDE_PATHS'] == []
assert response.data['AWX_PROOT_SHOW_PATHS'] == []
assert response.data['AWX_ISOLATION_SHOW_PATHS'] == []
assert response.data['AWX_ANSIBLE_CALLBACK_PLUGINS'] == []

View File

@ -3,13 +3,12 @@ from unittest import mock
from unittest.mock import Mock
from awx.main.models import (
Job,
InstanceGroup,
)
def T(impact):
j = mock.Mock(Job())
j = mock.Mock(spec_set=['task_impact'])
j.task_impact = impact
return j

View File

@ -1,5 +1,4 @@
import pytest
from unittest import mock
from django.conf import settings
from awx.main.models import (
@ -14,7 +13,7 @@ from awx.main.scheduler.kubernetes import PodManager
@pytest.fixture
def container_group():
instance_group = mock.Mock(InstanceGroup(name='container-group'))
instance_group = InstanceGroup(name='container-group', id=1)
return instance_group

View File

@ -725,7 +725,6 @@ class TestIsolatedExecution(TestJobExecution):
extra_vars = json.loads(extra_vars)
assert extra_vars['dest'] == '/tmp'
assert extra_vars['src'] == private_data
assert extra_vars['proot_temp_dir'].startswith('/tmp/awx_proot_')
def test_systemctl_failure(self):
# If systemctl fails, read the contents of `artifacts/systemctl_logs`

View File

@ -44,7 +44,6 @@ __all__ = [
'underscore_to_camelcase',
'memoize',
'memoize_delete',
'get_ansible_version',
'get_licenser',
'get_awx_http_client_headers',
'get_awx_version',
@ -69,9 +68,6 @@ __all__ = [
'get_system_task_capacity',
'get_cpu_capacity',
'get_mem_capacity',
'wrap_args_with_proot',
'build_proot_temp_dir',
'check_proot_installed',
'model_to_dict',
'NullablePromptPseudoField',
'model_instance_diff',
@ -195,20 +191,6 @@ def memoize_delete(function_name):
return cache.delete(function_name)
@memoize()
def get_ansible_version():
"""
Return Ansible version installed.
Ansible path needs to be provided to account for custom virtual environments
"""
try:
proc = subprocess.Popen(['ansible', '--version'], stdout=subprocess.PIPE)
result = smart_str(proc.communicate()[0])
return result.split('\n')[0].replace('ansible', '').strip()
except Exception:
return 'unknown'
def get_awx_version():
"""
Return AWX version as reported by setuptools.
@ -842,94 +824,6 @@ def set_environ(**environ):
os.environ.update(old_environ)
@memoize()
def check_proot_installed():
"""
Check that proot is installed.
"""
from django.conf import settings
cmd = [getattr(settings, 'AWX_PROOT_CMD', 'bwrap'), '--version']
try:
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
proc.communicate()
return bool(proc.returncode == 0)
except (OSError, ValueError) as e:
if isinstance(e, ValueError) or getattr(e, 'errno', 1) != 2: # ENOENT, no such file or directory
logger.exception('bwrap unavailable for unexpected reason.')
return False
def build_proot_temp_dir():
"""
Create a temporary directory for proot to use.
"""
from django.conf import settings
path = tempfile.mkdtemp(prefix='awx_proot_', dir=settings.AWX_PROOT_BASE_PATH)
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
return path
def wrap_args_with_proot(args, cwd, **kwargs):
"""
Wrap existing command line with proot to restrict access to:
- AWX_PROOT_BASE_PATH (generally, /tmp) (except for own /tmp files)
For non-isolated nodes:
- /etc/tower (to prevent obtaining db info or secret key)
- /var/lib/awx (except for current project)
- /var/log/tower
- /var/log/supervisor
"""
from django.conf import settings
cwd = os.path.realpath(cwd)
new_args = [getattr(settings, 'AWX_PROOT_CMD', 'bwrap'), '--unshare-pid', '--dev-bind', '/', '/', '--proc', '/proc']
hide_paths = [settings.AWX_PROOT_BASE_PATH]
if not kwargs.get('isolated'):
hide_paths.extend(['/etc/tower', '/var/lib/awx', '/var/log', '/etc/ssh', settings.PROJECTS_ROOT, settings.JOBOUTPUT_ROOT])
hide_paths.extend(getattr(settings, 'AWX_PROOT_HIDE_PATHS', None) or [])
for path in sorted(set(hide_paths)):
if not os.path.exists(path):
continue
path = os.path.realpath(path)
if os.path.isdir(path):
new_path = tempfile.mkdtemp(dir=kwargs['proot_temp_dir'])
os.chmod(new_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
else:
handle, new_path = tempfile.mkstemp(dir=kwargs['proot_temp_dir'])
os.close(handle)
os.chmod(new_path, stat.S_IRUSR | stat.S_IWUSR)
new_args.extend(['--bind', '%s' % (new_path,), '%s' % (path,)])
if kwargs.get('isolated'):
show_paths = [kwargs['private_data_dir']]
elif 'private_data_dir' in kwargs:
show_paths = [cwd, kwargs['private_data_dir']]
else:
show_paths = [cwd]
for venv in (settings.ANSIBLE_VENV_PATH, settings.AWX_VENV_PATH, kwargs.get('proot_custom_virtualenv')):
if venv:
new_args.extend(['--ro-bind', venv, venv])
show_paths.extend(getattr(settings, 'AWX_PROOT_SHOW_PATHS', None) or [])
show_paths.extend(kwargs.get('proot_show_paths', []))
for path in sorted(set(show_paths)):
if not os.path.exists(path):
continue
path = os.path.realpath(path)
new_args.extend(['--bind', '%s' % (path,), '%s' % (path,)])
if kwargs.get('isolated'):
if '/bin/ansible-playbook' in ' '.join(args):
# playbook runs should cwd to the SCM checkout dir
new_args.extend(['--chdir', os.path.join(kwargs['private_data_dir'], 'project')])
else:
# ad-hoc runs should cwd to the root of the private data dir
new_args.extend(['--chdir', kwargs['private_data_dir']])
else:
new_args.extend(['--chdir', cwd])
new_args.extend(args)
return new_args
def get_pk_from_dict(_dict, key):
"""
Helper for obtaining a pk from user data dict or None if not present.

View File

@ -5,6 +5,7 @@
import logging
import sys
import traceback
from datetime import datetime
# Django
from django.conf import settings
@ -34,7 +35,8 @@ class RSysLogHandler(logging.handlers.SysLogHandler):
# because the alternative is blocking the
# socket.send() in the Python process, which we definitely don't
# want to do)
msg = f'{record.asctime} ERROR rsyslogd was unresponsive: '
dt = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
msg = f'{dt} ERROR rsyslogd was unresponsive: '
exc = traceback.format_exc()
try:
msg += exc.splitlines()[-1]

View File

@ -15,7 +15,7 @@ from awx.main.analytics.broadcast_websocket import (
BroadcastWebsocketStats,
BroadcastWebsocketStatsManager,
)
import awx.main.analytics.subsystem_metrics as s_metrics
logger = logging.getLogger('awx.main.wsbroadcast')
@ -68,6 +68,7 @@ class WebsocketTask:
self.protocol = protocol
self.verify_ssl = verify_ssl
self.channel_layer = None
self.subsystem_metrics = s_metrics.Metrics()
async def run_loop(self, websocket: aiohttp.ClientWebSocketResponse):
raise RuntimeError("Implement me")
@ -144,9 +145,10 @@ class BroadcastWebsocketTask(WebsocketTask):
logmsg = "{} {}".format(logmsg, payload)
logger.warn(logmsg)
continue
(group, message) = unwrap_broadcast_msg(payload)
if group == "metrics":
self.subsystem_metrics.store_metrics(message)
continue
await self.channel_layer.group_send(group, {"type": "internal.message", "text": message})

View File

@ -224,6 +224,15 @@ JOB_EVENT_MAX_QUEUE_SIZE = 10000
# The number of job events to migrate per-transaction when moving from int -> bigint
JOB_EVENT_MIGRATION_CHUNK_SIZE = 1000000
# Histogram buckets for the callback_receiver_batch_events_insert_db metric
SUBSYSTEM_METRICS_BATCH_INSERT_BUCKETS = [10, 50, 150, 350, 650, 2000]
# Interval in seconds for sending local metrics to other nodes
SUBSYSTEM_METRICS_INTERVAL_SEND_METRICS = 3
# Interval in seconds for saving local metrics to redis
SUBSYSTEM_METRICS_INTERVAL_SAVE_TO_REDIS = 2
# The maximum allowed jobs to start on a given task manager cycle
START_TASK_LIMIT = 100
@ -427,6 +436,7 @@ CELERYBEAT_SCHEDULE = {
'gather_analytics': {'task': 'awx.main.tasks.gather_analytics', 'schedule': timedelta(minutes=5)},
'task_manager': {'task': 'awx.main.scheduler.tasks.run_task_manager', 'schedule': timedelta(seconds=20), 'options': {'expires': 20}},
'k8s_reaper': {'task': 'awx.main.tasks.awx_k8s_reaper', 'schedule': timedelta(seconds=60), 'options': {'expires': 50}},
'send_subsystem_metrics': {'task': 'awx.main.analytics.analytics_tasks.send_subsystem_metrics', 'schedule': timedelta(seconds=20)},
# 'isolated_heartbeat': set up at the end of production.py and development.py
}
@ -569,26 +579,15 @@ AWX_SHOW_PLAYBOOK_LINKS = False
# Applies to any galaxy server
GALAXY_IGNORE_CERTS = False
# Enable bubblewrap support for running jobs (playbook runs only).
# Additional paths to show for jobs using process isolation.
# Note: This setting may be overridden by database settings.
AWX_PROOT_ENABLED = True
# Command/path to bubblewrap.
AWX_PROOT_CMD = 'bwrap'
# Additional paths to hide from jobs using bubblewrap.
# Note: This setting may be overridden by database settings.
AWX_PROOT_HIDE_PATHS = []
# Additional paths to show for jobs using bubbelwrap.
# Note: This setting may be overridden by database settings.
AWX_PROOT_SHOW_PATHS = []
AWX_ISOLATION_SHOW_PATHS = []
# The directory in which Tower will create new temporary directories for job
# execution and isolation (such as credential files and custom
# inventory scripts).
# Note: This setting may be overridden by database settings.
AWX_PROOT_BASE_PATH = "/tmp"
AWX_ISOLATION_BASE_PATH = "/tmp"
# Disable resource profiling by default
AWX_RESOURCE_PROFILING_ENABLED = False

View File

@ -67,10 +67,6 @@ CALLBACK_QUEUE = "callback_tasks"
# Note: This setting may be overridden by database settings.
AWX_ROLES_ENABLED = True
# Enable PROOT for tower-qa integration tests.
# Note: This setting may be overridden by database settings.
AWX_PROOT_ENABLED = True
AWX_ISOLATED_USERNAME = 'root'
AWX_ISOLATED_CHECK_INTERVAL = 1
AWX_ISOLATED_PERIODIC_CHECK = 30

View File

@ -1,12 +0,0 @@
const RelaunchMixin = parent =>
class extends parent {
relaunch(id, data) {
return this.http.post(`${this.baseUrl}${id}/relaunch/`, data);
}
readRelaunch(id) {
return this.http.get(`${this.baseUrl}${id}/relaunch/`);
}
};
export default RelaunchMixin;

View File

@ -0,0 +1,48 @@
const Runnable = parent =>
class extends parent {
jobEventSlug = '/events/';
cancel(id) {
const endpoint = `${this.baseUrl}${id}/cancel/`;
return this.http.post(endpoint);
}
launchUpdate(id, data) {
const endpoint = `${this.baseUrl}${id}/update/`;
return this.http.post(endpoint, data);
}
readLaunchUpdate(id) {
const endpoint = `${this.baseUrl}${id}/update/`;
return this.http.get(endpoint);
}
readEvents(id, params = {}) {
const endpoint = `${this.baseUrl}${id}${this.jobEventSlug}`;
return this.http.get(endpoint, { params });
}
readEventOptions(id) {
const endpoint = `${this.baseUrl}${id}${this.jobEventSlug}`;
return this.http.options(endpoint);
}
readRelaunch(id) {
const endpoint = `${this.baseUrl}${id}/relaunch/`;
return this.http.get(endpoint);
}
relaunch(id, data) {
const endpoint = `${this.baseUrl}${id}/relaunch/`;
return this.http.post(endpoint, data);
}
};
export default Runnable;

View File

@ -1,11 +1,15 @@
import Base from '../Base';
import RelaunchMixin from '../mixins/Relaunch.mixin';
import RunnableMixin from '../mixins/Runnable.mixin';
class AdHocCommands extends RelaunchMixin(Base) {
class AdHocCommands extends RunnableMixin(Base) {
constructor(http) {
super(http);
this.baseUrl = '/api/v2/ad_hoc_commands/';
}
readCredentials(id) {
return this.http.get(`${this.baseUrl}${id}/credentials/`);
}
}
export default AdHocCommands;

View File

@ -78,6 +78,12 @@ class Inventories extends InstanceGroupsMixin(Base) {
});
}
updateSources(inventoryId) {
return this.http.get(
`${this.baseUrl}${inventoryId}/update_inventory_sources/`
);
}
async readSourceDetail(inventoryId, sourceId) {
const {
data: { results },

View File

@ -22,6 +22,14 @@ class InventorySources extends LaunchUpdateMixin(
});
}
readGroups(id) {
return this.http.get(`${this.baseUrl}${id}/groups/`);
}
readHosts(id) {
return this.http.get(`${this.baseUrl}${id}/hosts/`);
}
destroyGroups(id) {
return this.http.delete(`${this.baseUrl}${id}/groups/`);
}

View File

@ -1,7 +1,7 @@
import Base from '../Base';
import LaunchUpdateMixin from '../mixins/LaunchUpdate.mixin';
import RunnableMixin from '../mixins/Runnable.mixin';
class InventoryUpdates extends LaunchUpdateMixin(Base) {
class InventoryUpdates extends RunnableMixin(Base) {
constructor(http) {
super(http);
this.baseUrl = '/api/v2/inventory_updates/';
@ -11,5 +11,9 @@ class InventoryUpdates extends LaunchUpdateMixin(Base) {
createSyncCancel(sourceId) {
return this.http.post(`${this.baseUrl}${sourceId}/cancel/`);
}
readCredentials(id) {
return this.http.get(`${this.baseUrl}${id}/credentials/`);
}
}
export default InventoryUpdates;

View File

@ -1,67 +1,23 @@
import Base from '../Base';
import RelaunchMixin from '../mixins/Relaunch.mixin';
import RunnableMixin from '../mixins/Runnable.mixin';
const getBaseURL = type => {
switch (type) {
case 'playbook':
case 'job':
return '/jobs/';
case 'project':
case 'project_update':
return '/project_updates/';
case 'management':
case 'management_job':
return '/system_jobs/';
case 'inventory':
case 'inventory_update':
return '/inventory_updates/';
case 'command':
case 'ad_hoc_command':
return '/ad_hoc_commands/';
case 'workflow':
case 'workflow_job':
return '/workflow_jobs/';
default:
throw new Error('Unable to find matching job type');
}
};
class Jobs extends RelaunchMixin(Base) {
class Jobs extends RunnableMixin(Base) {
constructor(http) {
super(http);
this.baseUrl = '/api/v2/jobs/';
this.jobEventSlug = '/job_events/';
}
cancel(id, type) {
return this.http.post(`/api/v2${getBaseURL(type)}${id}/cancel/`);
cancel(id) {
return this.http.post(`${this.baseUrl}${id}/cancel/`);
}
readCredentials(id, type) {
return this.http.get(`/api/v2${getBaseURL(type)}${id}/credentials/`);
readCredentials(id) {
return this.http.get(`${this.baseUrl}${id}/credentials/`);
}
readDetail(id, type) {
return this.http.get(`/api/v2${getBaseURL(type)}${id}/`);
}
readEvents(id, type = 'playbook', params = {}) {
let endpoint;
if (type === 'playbook') {
endpoint = `/api/v2${getBaseURL(type)}${id}/job_events/`;
} else {
endpoint = `/api/v2${getBaseURL(type)}${id}/events/`;
}
return this.http.get(endpoint, { params });
}
readEventOptions(id, type = 'playbook') {
let endpoint;
if (type === 'playbook') {
endpoint = `/api/v2${getBaseURL(type)}${id}/job_events/`;
} else {
endpoint = `/api/v2${getBaseURL(type)}${id}/events/`;
}
return this.http.options(endpoint);
readDetail(id) {
return this.http.get(`${this.baseUrl}${id}/`);
}
}

View File

@ -0,0 +1,9 @@
import Base from '../Base';
class Metrics extends Base {
constructor(http) {
super(http);
this.baseUrl = '/api/v2/inventories/';
}
}
export default Metrics;

View File

@ -1,10 +1,15 @@
import Base from '../Base';
import RunnableMixin from '../mixins/Runnable.mixin';
class ProjectUpdates extends Base {
class ProjectUpdates extends RunnableMixin(Base) {
constructor(http) {
super(http);
this.baseUrl = '/api/v2/project_updates/';
}
readCredentials(id) {
return this.http.get(`${this.baseUrl}${id}/credentials/`);
}
}
export default ProjectUpdates;

View File

@ -1,10 +1,16 @@
import Base from '../Base';
class SystemJobs extends Base {
import RunnableMixin from '../mixins/Runnable.mixin';
class SystemJobs extends RunnableMixin(Base) {
constructor(http) {
super(http);
this.baseUrl = '/api/v2/system_jobs/';
}
readCredentials(id) {
return this.http.get(`${this.baseUrl}${id}/credentials/`);
}
}
export default SystemJobs;

View File

@ -1,7 +1,7 @@
import Base from '../Base';
import RelaunchMixin from '../mixins/Relaunch.mixin';
import RunnableMixin from '../mixins/Runnable.mixin';
class WorkflowJobs extends RelaunchMixin(Base) {
class WorkflowJobs extends RunnableMixin(Base) {
constructor(http) {
super(http);
this.baseUrl = '/api/v2/workflow_jobs/';
@ -10,6 +10,10 @@ class WorkflowJobs extends RelaunchMixin(Base) {
readNodes(id, params) {
return this.http.get(`${this.baseUrl}${id}/workflow_nodes/`, { params });
}
readCredentials(id) {
return this.http.get(`${this.baseUrl}${id}/credentials/`);
}
}
export default WorkflowJobs;

View File

@ -2,17 +2,12 @@ import React from 'react';
import PropTypes from 'prop-types';
import { withI18n } from '@lingui/react';
import { t } from '@lingui/macro';
import {
AboutModal,
TextContent,
TextList,
TextListItem,
} from '@patternfly/react-core';
import { AboutModal } from '@patternfly/react-core';
import { BrandName } from '../../variables';
import brandLogoImg from './brand-logo.svg';
function About({ ansible_version, version, isOpen, onClose, i18n }) {
function About({ version, isOpen, onClose, i18n }) {
const createSpeechBubble = () => {
let text = `${BrandName} ${version}`;
let top = '';
@ -52,27 +47,17 @@ function About({ ansible_version, version, isOpen, onClose, i18n }) {
|| ||
`}
</pre>
<TextContent>
<TextList component="dl">
<TextListItem component="dt">
{i18n._(t`Ansible Version`)}
</TextListItem>
<TextListItem component="dd">{ansible_version}</TextListItem>
</TextList>
</TextContent>
</AboutModal>
);
}
About.propTypes = {
ansible_version: PropTypes.string,
isOpen: PropTypes.bool,
onClose: PropTypes.func.isRequired,
version: PropTypes.string,
};
About.defaultProps = {
ansible_version: null,
isOpen: false,
version: null,
};

View File

@ -54,8 +54,9 @@ function AddResourceRole({ onSave, onClose, roles, i18n, resource, onError }) {
);
if (selectedIndex > -1) {
selectedRoleRows.splice(selectedIndex, 1);
setSelectedRoleRows(selectedRoleRows);
setSelectedRoleRows(
selectedRoleRows.filter((r, index) => index !== selectedIndex)
);
} else {
setSelectedRoleRows([...selectedRoleRows, role]);
}

View File

@ -204,7 +204,6 @@ function AppContainer({ i18n, navRouteConfig = [], children }) {
{isReady && <ConfigProvider value={config}>{children}</ConfigProvider>}
</Page>
<About
ansible_version={config?.ansible_version}
version={config?.version}
isOpen={isAboutModalOpen}
onClose={handleAboutModalClose}

View File

@ -10,13 +10,11 @@ import AppContainer from './AppContainer';
jest.mock('../../api');
describe('<AppContainer />', () => {
const ansible_version = '111';
const version = '222';
beforeEach(() => {
ConfigAPI.read.mockResolvedValue({
data: {
ansible_version,
version,
},
});
@ -93,7 +91,6 @@ describe('<AppContainer />', () => {
// check about modal content
const content = await waitForElement(wrapper, aboutModalContent);
expect(content.find('dd').text()).toContain(ansible_version);
expect(content.find('pre').text()).toContain(`< AWX ${version} >`);
// close about modal

View File

@ -2,9 +2,20 @@ import React, { useState } from 'react';
import PropTypes from 'prop-types';
import { withI18n } from '@lingui/react';
import { t } from '@lingui/macro';
import { Button } from '@patternfly/react-core';
import styled from 'styled-components';
import { Button, Badge, Alert, Tooltip } from '@patternfly/react-core';
import AlertModal from '../AlertModal';
import { getRelatedResourceDeleteCounts } from '../../util/getRelatedResourceDeleteDetails';
import ErrorDetail from '../ErrorDetail';
const WarningMessage = styled(Alert)`
margin-top: 10px;
`;
const Label = styled.span`
&& {
margin-right: 10px;
}
`;
function DeleteButton({
onConfirm,
modalTitle,
@ -14,33 +25,91 @@ function DeleteButton({
children,
isDisabled,
ouiaId,
deleteMessage,
deleteDetailsRequests,
disabledTooltip,
}) {
const [isOpen, setIsOpen] = useState(false);
const [deleteMessageError, setDeleteMessageError] = useState();
const [deleteDetails, setDeleteDetails] = useState({});
const [isLoading, setIsLoading] = useState(false);
const toggleModal = async isModalOpen => {
setIsLoading(true);
if (deleteDetailsRequests?.length && isModalOpen) {
const { results, error } = await getRelatedResourceDeleteCounts(
deleteDetailsRequests
);
if (error) {
setDeleteMessageError(error);
} else {
setDeleteDetails(results);
}
}
setIsLoading(false);
setIsOpen(isModalOpen);
};
if (deleteMessageError) {
return (
<AlertModal
isOpen={deleteMessageError}
title={i18n._(t`Error!`)}
onClose={() => {
toggleModal(false);
setDeleteMessageError();
}}
>
<ErrorDetail error={deleteMessageError} />
</AlertModal>
);
}
return (
<>
<Button
variant={variant || 'secondary'}
aria-label={i18n._(t`Delete`)}
isDisabled={isDisabled}
onClick={() => setIsOpen(true)}
ouiaId={ouiaId}
>
{children || i18n._(t`Delete`)}
</Button>
{disabledTooltip ? (
<Tooltip content={disabledTooltip} position="top">
<div>
<Button
isLoading={isLoading}
spinnerAriaValueText={isLoading ? 'Loading' : undefined}
variant={variant || 'secondary'}
aria-label={i18n._(t`Delete`)}
isDisabled={isDisabled}
onClick={() => toggleModal(true)}
ouiaId={ouiaId}
>
{children || i18n._(t`Delete`)}
</Button>
</div>
</Tooltip>
) : (
<Button
isLoading={isLoading}
spinnerAriaValueText={isLoading ? 'Loading' : undefined}
variant={variant || 'secondary'}
aria-label={i18n._(t`Delete`)}
isDisabled={isDisabled}
onClick={() => toggleModal(true)}
>
{children || i18n._(t`Delete`)}
</Button>
)}
<AlertModal
isOpen={isOpen}
title={modalTitle}
variant="danger"
onClose={() => setIsOpen(false)}
onClose={() => toggleModal(false)}
actions={[
<Button
ouiaId="delete-modal-confirm"
key="delete"
variant="danger"
aria-label={i18n._(t`Delete`)}
aria-label={i18n._(t`Confirm Delete`)}
isDisabled={isDisabled}
onClick={onConfirm}
onClick={() => {
onConfirm();
toggleModal(false);
}}
>
{i18n._(t`Delete`)}
</Button>,
@ -49,7 +118,7 @@ function DeleteButton({
key="cancel"
variant="link"
aria-label={i18n._(t`Cancel`)}
onClick={() => setIsOpen(false)}
onClick={() => toggleModal(false)}
>
{i18n._(t`Cancel`)}
</Button>,
@ -58,6 +127,23 @@ function DeleteButton({
{i18n._(t`Are you sure you want to delete:`)}
<br />
<strong>{name}</strong>
{Object.values(deleteDetails).length > 0 && (
<WarningMessage
variant="warning"
isInline
title={
<div>
<div aria-label={deleteMessage}>{deleteMessage}</div>
<br />
{Object.entries(deleteDetails).map(([key, value]) => (
<div aria-label={`${key}: ${value}`} key={key}>
<Label>{key}</Label> <Badge>{value}</Badge>
</div>
))}
</div>
}
/>
)}
</AlertModal>
</>
);

View File

@ -0,0 +1,112 @@
import React from 'react';
import { act } from 'react-dom/test-utils';
import {
mountWithContexts,
waitForElement,
} from '../../../testUtils/enzymeHelpers';
import { CredentialsAPI } from '../../api';
import DeleteButton from './DeleteButton';
jest.mock('../../api');
describe('<DeleteButton />', () => {
test('should render button', () => {
const wrapper = mountWithContexts(
<DeleteButton onConfirm={() => {}} name="Foo" />
);
expect(wrapper.find('button')).toHaveLength(1);
});
test('should open confirmation modal', async () => {
let wrapper;
await act(async () => {
wrapper = mountWithContexts(
<DeleteButton
onConfirm={() => {}}
name="Foo"
deleteDetailsRequests={[
{
label: 'job',
request: CredentialsAPI.read.mockResolvedValue({
data: { count: 1 },
}),
},
]}
deleteMessage="Delete this?"
warningMessage="Are you sure to want to delete this"
/>
);
});
await act(async () => {
wrapper.find('button').prop('onClick')();
});
await waitForElement(wrapper, 'Modal', el => el.length > 0);
expect(wrapper.find('Modal')).toHaveLength(1);
expect(wrapper.find('div[aria-label="Delete this?"]')).toHaveLength(1);
});
test('should invoke onConfirm prop', async () => {
const onConfirm = jest.fn();
const wrapper = mountWithContexts(
<DeleteButton
onConfirm={onConfirm}
itemsToDelete="foo"
deleteDetailsRequests={[
{
label: 'job',
request: CredentialsAPI.read.mockResolvedValue({
data: { count: 1 },
}),
},
]}
deleteMessage="Delete this?"
/>
);
await act(async () => wrapper.find('button').simulate('click'));
wrapper.update();
await act(async () =>
wrapper
.find('ModalBoxFooter button[aria-label="Confirm Delete"]')
.simulate('click')
);
wrapper.update();
expect(onConfirm).toHaveBeenCalled();
});
test('should show delete details error', async () => {
const onConfirm = jest.fn();
let wrapper;
await act(async () => {
wrapper = mountWithContexts(
<DeleteButton
onConfirm={onConfirm}
itemsToDelete="foo"
deleteDetailsRequests={[
{
label: 'job',
request: CredentialsAPI.read.mockRejectedValue(
new Error({
response: {
config: {
method: 'get',
url: '/api/v2/credentals',
},
data: 'An error occurred',
status: 403,
},
})
),
},
]}
/>
);
});
await act(async () => wrapper.find('button').simulate('click'));
wrapper.update();
expect(wrapper.find('AlertModal[title="Error!"]')).toHaveLength(1);
});
});

View File

@ -13,20 +13,12 @@ import useRequest, {
useDeleteItems,
useDismissableError,
} from '../../util/useRequest';
import isJobRunning from '../../util/jobs';
import { isJobRunning, getJobModel } from '../../util/jobs';
import { getQSConfig, parseQueryString } from '../../util/qs';
import JobListItem from './JobListItem';
import JobListCancelButton from './JobListCancelButton';
import useWsJobs from './useWsJobs';
import {
AdHocCommandsAPI,
InventoryUpdatesAPI,
JobsAPI,
ProjectUpdatesAPI,
SystemJobsAPI,
UnifiedJobsAPI,
WorkflowJobsAPI,
} from '../../api';
import { UnifiedJobsAPI } from '../../api';
function JobList({ i18n, defaultParams, showTypeColumn = false }) {
const qsConfig = getQSConfig(
@ -104,7 +96,7 @@ function JobList({ i18n, defaultParams, showTypeColumn = false }) {
return Promise.all(
selected.map(job => {
if (isJobRunning(job.status)) {
return JobsAPI.cancel(job.id, job.type);
return getJobModel(job.type).cancel(job.id);
}
return Promise.resolve();
})
@ -127,22 +119,7 @@ function JobList({ i18n, defaultParams, showTypeColumn = false }) {
useCallback(() => {
return Promise.all(
selected.map(({ type, id }) => {
switch (type) {
case 'job':
return JobsAPI.destroy(id);
case 'ad_hoc_command':
return AdHocCommandsAPI.destroy(id);
case 'system_job':
return SystemJobsAPI.destroy(id);
case 'project_update':
return ProjectUpdatesAPI.destroy(id);
case 'inventory_update':
return InventoryUpdatesAPI.destroy(id);
case 'workflow_job':
return WorkflowJobsAPI.destroy(id);
default:
return null;
}
return getJobModel(type).destroy(id);
})
);
}, [selected]),

View File

@ -319,13 +319,12 @@ describe('<JobList />', () => {
wrapper.find('JobListCancelButton').invoke('onCancel')();
});
expect(JobsAPI.cancel).toHaveBeenCalledTimes(6);
expect(JobsAPI.cancel).toHaveBeenCalledWith(1, 'project_update');
expect(JobsAPI.cancel).toHaveBeenCalledWith(2, 'job');
expect(JobsAPI.cancel).toHaveBeenCalledWith(3, 'inventory_update');
expect(JobsAPI.cancel).toHaveBeenCalledWith(4, 'workflow_job');
expect(JobsAPI.cancel).toHaveBeenCalledWith(5, 'system_job');
expect(JobsAPI.cancel).toHaveBeenCalledWith(6, 'ad_hoc_command');
expect(ProjectUpdatesAPI.cancel).toHaveBeenCalledWith(1);
expect(JobsAPI.cancel).toHaveBeenCalledWith(2);
expect(InventoryUpdatesAPI.cancel).toHaveBeenCalledWith(3);
expect(WorkflowJobsAPI.cancel).toHaveBeenCalledWith(4);
expect(SystemJobsAPI.cancel).toHaveBeenCalledWith(5);
expect(AdHocCommandsAPI.cancel).toHaveBeenCalledWith(6);
jest.restoreAllMocks();
});

View File

@ -4,7 +4,7 @@ import { t } from '@lingui/macro';
import { arrayOf, func } from 'prop-types';
import { Button, DropdownItem, Tooltip } from '@patternfly/react-core';
import { KebabifiedContext } from '../../contexts/Kebabified';
import isJobRunning from '../../util/jobs';
import { isJobRunning } from '../../util/jobs';
import AlertModal from '../AlertModal';
import { Job } from '../../types';

View File

@ -25,6 +25,7 @@ function ExecutionEnvironmentLookup({
globallyAvailable,
i18n,
isDefaultEnvironment,
isGlobalDefaultEnvironment,
isDisabled,
onBlur,
onChange,
@ -154,17 +155,26 @@ function ExecutionEnvironmentLookup({
</>
);
const renderLabel = (
globalDefaultEnvironment,
defaultExecutionEnvironment
) => {
if (globalDefaultEnvironment) {
return i18n._(t`Global Default Execution Environment`);
}
if (defaultExecutionEnvironment) {
return i18n._(t`Default Execution Environment`);
}
return i18n._(t`Execution Environment`);
};
return (
<FormGroup
fieldId="execution-environment-lookup"
label={
isDefaultEnvironment
? i18n._(t`Default Execution Environment`)
: i18n._(t`Execution Environment`)
}
label={renderLabel(isGlobalDefaultEnvironment, isDefaultEnvironment)}
labelIcon={popoverContent && <Popover content={popoverContent} />}
>
{isDisabled ? (
{tooltip ? (
<Tooltip content={tooltip}>{renderLookup()}</Tooltip>
) : (
renderLookup()
@ -180,6 +190,7 @@ ExecutionEnvironmentLookup.propTypes = {
popoverContent: string,
onChange: func.isRequired,
isDefaultEnvironment: bool,
isGlobalDefaultEnvironment: bool,
projectId: oneOfType([number, string]),
organizationId: oneOfType([number, string]),
};
@ -187,6 +198,7 @@ ExecutionEnvironmentLookup.propTypes = {
ExecutionEnvironmentLookup.defaultProps = {
popoverContent: '',
isDefaultEnvironment: false,
isGlobalDefaultEnvironment: false,
value: null,
projectId: null,
organizationId: null,

View File

@ -10,16 +10,31 @@ import {
checkPropTypes,
} from 'prop-types';
import styled from 'styled-components';
import { Alert, Button, DropdownItem, Tooltip } from '@patternfly/react-core';
import {
Alert,
Badge,
Button,
DropdownItem,
Tooltip,
} from '@patternfly/react-core';
import { withI18n } from '@lingui/react';
import { t } from '@lingui/macro';
import AlertModal from '../AlertModal';
import { KebabifiedContext } from '../../contexts/Kebabified';
import { getRelatedResourceDeleteCounts } from '../../util/getRelatedResourceDeleteDetails';
import ErrorDetail from '../ErrorDetail';
const WarningMessage = styled(Alert)`
margin-top: 10px;
`;
const Label = styled.span`
&& {
margin-right: 10px;
}
`;
const requiredField = props => {
const { name, username, image } = props;
if (!name && !username && !image) {
@ -77,20 +92,43 @@ function ToolbarDeleteButton({
pluralizedItemName,
errorMessage,
onDelete,
deleteDetailsRequests,
warningMessage,
deleteMessage,
i18n,
cannotDelete,
}) {
const { isKebabified, onKebabModalChange } = useContext(KebabifiedContext);
const [isModalOpen, setIsModalOpen] = useState(false);
const [deleteDetails, setDeleteDetails] = useState(null);
const [isLoading, setIsLoading] = useState(false);
const [deleteMessageError, setDeleteMessageError] = useState();
const handleDelete = () => {
onDelete();
toggleModal();
};
const toggleModal = () => {
setIsModalOpen(!isModalOpen);
const toggleModal = async isOpen => {
setIsLoading(true);
setDeleteDetails(null);
if (
isOpen &&
itemsToDelete.length === 1 &&
deleteDetailsRequests?.length > 0
) {
const { results, error } = await getRelatedResourceDeleteCounts(
deleteDetailsRequests
);
if (error) {
setDeleteMessageError(error);
} else {
setDeleteDetails(results);
}
}
setIsLoading(false);
setIsModalOpen(isOpen);
};
useEffect(() => {
@ -126,27 +164,84 @@ function ToolbarDeleteButton({
const isDisabled =
itemsToDelete.length === 0 || itemsToDelete.some(cannotDelete);
// NOTE: Once PF supports tooltips on disabled elements,
// we can delete the extra <div> around the <DeleteButton> below.
// See: https://github.com/patternfly/patternfly-react/issues/1894
const buildDeleteWarning = () => {
const deleteMessages = [];
if (warningMessage) {
deleteMessages.push(warningMessage);
}
if (deleteMessage) {
if (
itemsToDelete[0]?.type !== 'inventory' &&
(itemsToDelete.length > 1 || deleteDetails)
) {
deleteMessages.push(deleteMessage);
} else if (deleteDetails || itemsToDelete.length > 1) {
deleteMessages.push(deleteMessage);
}
}
return (
<div>
{deleteMessages.map(message => (
<div aria-label={message} key={message}>
{message}
</div>
))}
{deleteDetails &&
Object.entries(deleteDetails).map(([key, value]) => (
<div key={key} aria-label={`${key}: ${value}`}>
<Label>{key}</Label>
<Badge>{value}</Badge>
</div>
))}
</div>
);
};
if (deleteMessageError) {
return (
<AlertModal
isOpen={deleteMessageError}
title={i18n._(t`Error!`)}
onClose={() => {
toggleModal(false);
setDeleteMessageError();
}}
>
<ErrorDetail error={deleteMessageError} />
</AlertModal>
);
}
const shouldShowDeleteWarning =
warningMessage ||
(itemsToDelete.length === 1 && deleteDetails) ||
(itemsToDelete.length > 1 && deleteMessage);
return (
<>
{isKebabified ? (
<DropdownItem
key="add"
isDisabled={isDisabled}
component="button"
onClick={toggleModal}
>
{i18n._(t`Delete`)}
</DropdownItem>
<Tooltip content={renderTooltip()} position="top">
<DropdownItem
key="add"
isDisabled={isDisabled}
isLoading={isLoading}
spinnerAriaValueText={isLoading ? 'Loading' : undefined}
component="button"
onClick={() => {
toggleModal(true);
}}
>
{i18n._(t`Delete`)}
</DropdownItem>
</Tooltip>
) : (
<Tooltip content={renderTooltip()} position="top">
<div>
<Button
variant="secondary"
isLoading={isLoading}
spinnerAriaValueText={isLoading ? 'Loading' : undefined}
aria-label={i18n._(t`Delete`)}
onClick={toggleModal}
onClick={() => toggleModal(true)}
isDisabled={isDisabled}
>
{i18n._(t`Delete`)}
@ -154,17 +249,22 @@ function ToolbarDeleteButton({
</div>
</Tooltip>
)}
{isModalOpen && (
<AlertModal
variant="danger"
title={modalTitle}
isOpen={isModalOpen}
onClose={toggleModal}
onClose={() => toggleModal(false)}
actions={[
<Button
ouiaId="delete-modal-confirm"
key="delete"
variant="danger"
aria-label={i18n._(t`confirm delete`)}
isDisabled={Boolean(
deleteDetails && itemsToDelete[0]?.type === 'credential_type'
)}
onClick={handleDelete}
>
{i18n._(t`Delete`)}
@ -173,7 +273,7 @@ function ToolbarDeleteButton({
key="cancel"
variant="link"
aria-label={i18n._(t`cancel delete`)}
onClick={toggleModal}
onClick={() => toggleModal(false)}
>
{i18n._(t`Cancel`)}
</Button>,
@ -186,8 +286,12 @@ function ToolbarDeleteButton({
<br />
</span>
))}
{warningMessage && (
<WarningMessage variant="warning" isInline title={warningMessage} />
{shouldShowDeleteWarning && (
<WarningMessage
variant="warning"
isInline
title={buildDeleteWarning()}
/>
)}
</AlertModal>
)}

View File

@ -1,7 +1,14 @@
import React from 'react';
import { mountWithContexts } from '../../../testUtils/enzymeHelpers';
import { act } from 'react-dom/test-utils';
import {
mountWithContexts,
waitForElement,
} from '../../../testUtils/enzymeHelpers';
import { CredentialsAPI } from '../../api';
import ToolbarDeleteButton from './ToolbarDeleteButton';
jest.mock('../../api');
const itemA = {
id: 1,
name: 'Foo',
@ -19,27 +26,180 @@ const itemC = {
};
describe('<ToolbarDeleteButton />', () => {
let deleteDetailsRequests;
let wrapper;
beforeEach(() => {
deleteDetailsRequests = [
{
label: 'Workflow Job Template Node',
request: CredentialsAPI.read.mockResolvedValue({ data: { count: 1 } }),
},
];
});
afterEach(() => {
jest.clearAllMocks();
wrapper.unmount();
});
test('should render button', () => {
const wrapper = mountWithContexts(
wrapper = mountWithContexts(
<ToolbarDeleteButton onDelete={() => {}} itemsToDelete={[]} />
);
expect(wrapper.find('button')).toHaveLength(1);
expect(wrapper.find('ToolbarDeleteButton')).toMatchSnapshot();
});
test('should open confirmation modal', () => {
const wrapper = mountWithContexts(
<ToolbarDeleteButton onDelete={() => {}} itemsToDelete={[itemA]} />
);
test('should open confirmation modal', async () => {
await act(async () => {
wrapper = mountWithContexts(
<ToolbarDeleteButton
onDelete={() => {}}
itemsToDelete={[itemA]}
deleteDetailsRequests={deleteDetailsRequests}
deleteMessage="Delete this?"
warningMessage="Are you sure to want to delete this"
/>
);
});
expect(wrapper.find('Modal')).toHaveLength(0);
wrapper.find('button').simulate('click');
wrapper.update();
await act(async () => {
wrapper.find('button').prop('onClick')();
});
await waitForElement(wrapper, 'Modal', el => el.length > 0);
expect(CredentialsAPI.read).toBeCalled();
expect(wrapper.find('Modal')).toHaveLength(1);
expect(
wrapper.find('div[aria-label="Workflow Job Template Node: 1"]')
).toHaveLength(1);
expect(
wrapper.find('Button[aria-label="confirm delete"]').prop('isDisabled')
).toBe(false);
expect(wrapper.find('div[aria-label="Delete this?"]')).toHaveLength(1);
});
test('should open confirmation with enabled delete button modal', async () => {
await act(async () => {
wrapper = mountWithContexts(
<ToolbarDeleteButton
onDelete={() => {}}
itemsToDelete={[
{
name: 'foo',
id: 1,
type: 'credential_type',
summary_fields: { user_capabilities: { delete: true } },
},
{
name: 'bar',
id: 2,
type: 'credential_type',
summary_fields: { user_capabilities: { delete: true } },
},
]}
deleteDetailsRequests={deleteDetailsRequests}
deleteMessage="Delete this?"
warningMessage="Are you sure to want to delete this"
/>
);
});
expect(wrapper.find('Modal')).toHaveLength(0);
await act(async () => {
wrapper.find('button').prop('onClick')();
});
await waitForElement(wrapper, 'Modal', el => el.length > 0);
expect(CredentialsAPI.read).not.toBeCalled();
expect(wrapper.find('Modal')).toHaveLength(1);
expect(
wrapper.find('Button[aria-label="confirm delete"]').prop('isDisabled')
).toBe(false);
});
test('should disable confirm delete button', async () => {
const request = [
{
label: 'Workflow Job Template Node',
request: CredentialsAPI.read.mockResolvedValue({ data: { count: 3 } }),
},
];
await act(async () => {
wrapper = mountWithContexts(
<ToolbarDeleteButton
onDelete={() => {}}
itemsToDelete={[
{
name: 'foo',
id: 1,
type: 'credential_type',
summary_fields: { user_capabilities: { delete: true } },
},
]}
deleteDetailsRequests={request}
deleteMessage="Delete this?"
warningMessage="Are you sure to want to delete this"
/>
);
});
expect(wrapper.find('Modal')).toHaveLength(0);
await act(async () => {
wrapper.find('button').prop('onClick')();
});
await waitForElement(wrapper, 'Modal', el => el.length > 0);
expect(CredentialsAPI.read).toBeCalled();
expect(wrapper.find('Modal')).toHaveLength(1);
expect(
wrapper.find('Button[aria-label="confirm delete"]').prop('isDisabled')
).toBe(true);
expect(wrapper.find('div[aria-label="Delete this?"]')).toHaveLength(1);
});
test('should open delete error modal', async () => {
const request = [
{
label: 'Workflow Job Template Node',
request: CredentialsAPI.read.mockRejectedValue(
new Error({
response: {
config: {
method: 'get',
url: '/api/v2/credentals',
},
data: 'An error occurred',
status: 403,
},
})
),
},
];
await act(async () => {
wrapper = mountWithContexts(
<ToolbarDeleteButton
onDelete={() => {}}
itemsToDelete={[itemA]}
deleteDetailsRequests={request}
deleteMessage="Delete this?"
warningMessage="Are you sure to want to delete this"
/>
);
});
expect(wrapper.find('Modal')).toHaveLength(0);
await act(async () => wrapper.find('button').simulate('click'));
await waitForElement(wrapper, 'Modal', el => el.length > 0);
expect(CredentialsAPI.read).toBeCalled();
wrapper.update();
expect(wrapper.find('AlertModal[title="Error!"]')).toHaveLength(1);
});
test('should invoke onDelete prop', () => {
const onDelete = jest.fn();
const wrapper = mountWithContexts(
wrapper = mountWithContexts(
<ToolbarDeleteButton onDelete={onDelete} itemsToDelete={[itemA]} />
);
wrapper.find('button').simulate('click');
@ -53,14 +213,14 @@ describe('<ToolbarDeleteButton />', () => {
});
test('should disable button when no delete permissions', () => {
const wrapper = mountWithContexts(
wrapper = mountWithContexts(
<ToolbarDeleteButton onDelete={() => {}} itemsToDelete={[itemB]} />
);
expect(wrapper.find('button[disabled]')).toHaveLength(1);
});
test('should render tooltip', () => {
const wrapper = mountWithContexts(
wrapper = mountWithContexts(
<ToolbarDeleteButton onDelete={() => {}} itemsToDelete={[itemA]} />
);
expect(wrapper.find('Tooltip')).toHaveLength(1);
@ -68,7 +228,7 @@ describe('<ToolbarDeleteButton />', () => {
});
test('should render tooltip for username', () => {
const wrapper = mountWithContexts(
wrapper = mountWithContexts(
<ToolbarDeleteButton onDelete={() => {}} itemsToDelete={[itemC]} />
);
expect(wrapper.find('Tooltip')).toHaveLength(1);

View File

@ -75,6 +75,7 @@ exports[`<ToolbarDeleteButton /> should render button 1`] = `
<Button
aria-label="Delete"
isDisabled={true}
isLoading={false}
onClick={[Function]}
variant="secondary"
>
@ -93,13 +94,14 @@ exports[`<ToolbarDeleteButton /> should render button 1`] = `
<Button
aria-label="Delete"
isDisabled={true}
isLoading={false}
onClick={[Function]}
variant="secondary"
>
<button
aria-disabled={true}
aria-label="Delete"
className="pf-c-button pf-m-secondary pf-m-disabled"
className="pf-c-button pf-m-secondary pf-m-disabled pf-m-progress"
data-ouia-component-id="OUIA-Generated-Button-secondary-1"
data-ouia-component-type="PF4/Button"
data-ouia-safe={true}

View File

@ -18,6 +18,7 @@ import { getQSConfig, parseQueryString } from '../../util/qs';
import useWsTemplates from '../../util/useWsTemplates';
import AddDropDownButton from '../AddDropDownButton';
import TemplateListItem from './TemplateListItem';
import { relatedResourceDeleteRequests } from '../../util/getRelatedResourceDeleteDetails';
function TemplateList({ defaultParams, i18n }) {
// The type value in const qsConfig below does not have a space between job_template and
@ -168,6 +169,11 @@ function TemplateList({ defaultParams, i18n }) {
<AddDropDownButton key="add" dropdownItems={addDropDownButton} />
);
const deleteDetailsRequests = relatedResourceDeleteRequests.template(
selected[0],
i18n
);
return (
<Fragment>
<Card>
@ -236,6 +242,11 @@ function TemplateList({ defaultParams, i18n }) {
onDelete={handleTemplateDelete}
itemsToDelete={selected}
pluralizedItemName={i18n._(t`Templates`)}
deleteDetailsRequests={deleteDetailsRequests}
deleteMessage={i18n._(
'{numItemsToDelete, plural, one {This template is currently being used by some workflow nodes. Are you sure you want to delete it?} other {Deleting these templates could impact some workflow nodes that rely on them. Are you sure you want to delete anyway?}}',
{ numItemsToDelete: selected.length }
)}
/>,
]}
/>

View File

@ -22,6 +22,7 @@ import ErrorDetail from '../../../components/ErrorDetail';
import { CredentialsAPI, CredentialTypesAPI } from '../../../api';
import { Credential } from '../../../types';
import useRequest, { useDismissableError } from '../../../util/useRequest';
import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails';
const PluginInputMetadata = styled(CodeEditor)`
grid-column: 1 / -1;
@ -183,6 +184,11 @@ function CredentialDetail({ i18n, credential }) {
fetchDetails();
}, [fetchDetails]);
const deleteDetailsRequests = relatedResourceDeleteRequests.credential(
credential,
i18n
);
if (hasContentLoading) {
return <ContentLoading />;
}
@ -270,9 +276,14 @@ function CredentialDetail({ i18n, credential }) {
{user_capabilities.delete && (
<DeleteButton
name={name}
itemToDelete={credential}
modalTitle={i18n._(t`Delete Credential`)}
onConfirm={deleteCredential}
isLoading={isLoading}
deleteDetailsRequests={deleteDetailsRequests}
deleteMessage={i18n._(
t`This credential is currently being used by other resources. Are you sure you want to delete it?`
)}
>
{i18n._(t`Delete`)}
</DeleteButton>

View File

@ -65,6 +65,12 @@ describe('<CredentialDetail />', () => {
expect(wrapper.find('CredentialDetail').length).toBe(1);
});
test('should have proper number of delete detail requests', () => {
expect(
wrapper.find('DeleteButton').prop('deleteDetailsRequests')
).toHaveLength(6);
});
test('should render details', () => {
expectDetailToMatch(wrapper, 'Name', mockCredential.name);
expectDetailToMatch(wrapper, 'Description', mockCredential.description);

View File

@ -1,6 +1,6 @@
import React, { useCallback, useEffect, useState } from 'react';
import { useHistory, useParams } from 'react-router-dom';
import { object } from 'prop-types';
import PropTypes from 'prop-types';
import { CardBody } from '../../../components/Card';
import {
CredentialsAPI,
@ -197,8 +197,8 @@ function CredentialEdit({ credential }) {
);
}
CredentialEdit.proptype = {
inventory: object.isRequired,
CredentialEdit.propTypes = {
credential: PropTypes.objectOf(PropTypes.object).isRequired,
};
export { CredentialEdit as _CredentialEdit };

View File

@ -1,9 +1,10 @@
import React, { useState, useEffect, useCallback } from 'react';
import React, { useEffect, useCallback } from 'react';
import { useLocation } from 'react-router-dom';
import { withI18n } from '@lingui/react';
import { t } from '@lingui/macro';
import { Card, PageSection } from '@patternfly/react-core';
import { CredentialsAPI } from '../../../api';
import useSelected from '../../../util/useSelected';
import AlertModal from '../../../components/AlertModal';
import ErrorDetail from '../../../components/ErrorDetail';
import DataListToolbar from '../../../components/DataListToolbar';
@ -18,6 +19,7 @@ import PaginatedTable, {
import useRequest, { useDeleteItems } from '../../../util/useRequest';
import { getQSConfig, parseQueryString } from '../../../util/qs';
import CredentialListItem from './CredentialListItem';
import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails';
const QS_CONFIG = getQSConfig('credential', {
page: 1,
@ -26,9 +28,7 @@ const QS_CONFIG = getQSConfig('credential', {
});
function CredentialList({ i18n }) {
const [selected, setSelected] = useState([]);
const location = useLocation();
const {
result: {
credentials,
@ -77,8 +77,10 @@ function CredentialList({ i18n }) {
fetchCredentials();
}, [fetchCredentials]);
const isAllSelected =
selected.length > 0 && selected.length === credentials.length;
const { selected, isAllSelected, handleSelect, setSelected } = useSelected(
credentials
);
const {
isLoading: isDeleteLoading,
deleteItems: deleteCredentials,
@ -100,21 +102,12 @@ function CredentialList({ i18n }) {
setSelected([]);
};
const handleSelectAll = isSelected => {
setSelected(isSelected ? [...credentials] : []);
};
const handleSelect = row => {
if (selected.some(s => s.id === row.id)) {
setSelected(selected.filter(s => s.id !== row.id));
} else {
setSelected(selected.concat(row));
}
};
const canAdd =
actions && Object.prototype.hasOwnProperty.call(actions, 'POST');
const deleteDetailsRequests = relatedResourceDeleteRequests.credential(
selected[0],
i18n
);
return (
<PageSection>
<Card>
@ -169,7 +162,9 @@ function CredentialList({ i18n }) {
{...props}
showSelectAll
isAllSelected={isAllSelected}
onSelectAll={handleSelectAll}
onSelectAll={isSelected =>
setSelected(isSelected ? [...credentials] : [])
}
qsConfig={QS_CONFIG}
additionalControls={[
...(canAdd
@ -180,6 +175,11 @@ function CredentialList({ i18n }) {
onDelete={handleDelete}
itemsToDelete={selected}
pluralizedItemName={i18n._(t`Credentials`)}
deleteDetailsRequests={deleteDetailsRequests}
deleteMessage={i18n._(
'{numItemsToDelete, plural, one {This credential is currently being used by other resources. Are you sure you want to delete it?} other {Deleting these credentials could impact other resources that rely on them. Are you sure you want to delete anyway?}}',
{ numItemsToDelete: selected.length }
)}
/>,
]}
/>

View File

@ -40,6 +40,12 @@ describe('<CredentialList />', () => {
expect(wrapper.find('CredentialList').length).toBe(1);
});
test('should have proper number of delete detail requests', () => {
expect(
wrapper.find('ToolbarDeleteButton').prop('deleteDetailsRequests')
).toHaveLength(6);
});
test('should fetch credentials from api and render the in the list', () => {
expect(CredentialsAPI.read).toHaveBeenCalled();
expect(wrapper.find('CredentialListItem').length).toBe(5);

View File

@ -1,4 +1,4 @@
import React, { useCallback } from 'react';
import React, { useCallback, useEffect } from 'react';
import { withI18n } from '@lingui/react';
import { t } from '@lingui/macro';
import { Link, useHistory } from 'react-router-dom';
@ -16,6 +16,11 @@ import {
import useRequest, { useDismissableError } from '../../../util/useRequest';
import { CredentialTypesAPI } from '../../../api';
import { jsonToYaml } from '../../../util/yaml';
import {
relatedResourceDeleteRequests,
getRelatedResourceDeleteCounts,
} from '../../../util/getRelatedResourceDeleteDetails';
import ErrorDetail from '../../../components/ErrorDetail';
function CredentialTypeDetails({ credentialType, i18n }) {
const { id, name, description, injectors, inputs } = credentialType;
@ -32,7 +37,35 @@ function CredentialTypeDetails({ credentialType, i18n }) {
}, [id, history])
);
const { error, dismissError } = useDismissableError(deleteError);
const {
result: { isDeleteDisabled },
error: deleteDetailsError,
request: fetchDeleteDetails,
} = useRequest(
useCallback(async () => {
const {
results: deleteDetails,
error,
} = await getRelatedResourceDeleteCounts(
relatedResourceDeleteRequests.credentialType(credentialType, i18n)
);
if (error) {
throw new Error(error);
}
if (deleteDetails) {
return { isDeleteDisabled: true };
}
return { isDeleteDisabled: false };
}, [credentialType, i18n]),
{ isDeleteDisabled: false }
);
useEffect(() => {
fetchDeleteDetails();
}, [fetchDeleteDetails]);
const { error, dismissError } = useDismissableError(
deleteError || deleteDetailsError
);
return (
<CardBody>
@ -82,7 +115,13 @@ function CredentialTypeDetails({ credentialType, i18n }) {
name={name}
modalTitle={i18n._(t`Delete credential type`)}
onConfirm={deleteCredentialType}
isDisabled={isLoading}
isDisabled={isLoading || isDeleteDisabled}
disabledTooltip={
isDeleteDisabled &&
i18n._(
t`This credential type is currently being used by some credentials and cannot be deleted`
)
}
>
{i18n._(t`Delete`)}
</DeleteButton>
@ -95,7 +134,9 @@ function CredentialTypeDetails({ credentialType, i18n }) {
onClose={dismissError}
title={i18n._(t`Error`)}
variant="error"
/>
>
<ErrorDetail error={error} />
</AlertModal>
)}
</CardBody>
);

View File

@ -3,7 +3,7 @@ import { act } from 'react-dom/test-utils';
import { createMemoryHistory } from 'history';
import { mountWithContexts } from '../../../../testUtils/enzymeHelpers';
import { CredentialTypesAPI } from '../../../api';
import { CredentialTypesAPI, CredentialsAPI } from '../../../api';
import { jsonToYaml } from '../../../util/yaml';
import CredentialTypeDetails from './CredentialTypeDetails';
@ -66,6 +66,10 @@ function expectDetailToMatch(wrapper, label, value) {
describe('<CredentialTypeDetails/>', () => {
let wrapper;
afterEach(() => {
wrapper.unmount();
jest.clearAllMocks();
});
test('should render details properly', async () => {
await act(async () => {
wrapper = mountWithContexts(
@ -92,6 +96,38 @@ describe('<CredentialTypeDetails/>', () => {
);
});
test('should disabled delete and show proper tooltip requests', async () => {
CredentialsAPI.read.mockResolvedValue({ data: { count: 15 } });
await act(async () => {
wrapper = mountWithContexts(
<CredentialTypeDetails credentialType={credentialTypeData} />
);
});
wrapper.update();
expect(wrapper.find('DeleteButton').prop('disabledTooltip')).toBe(
'This credential type is currently being used by some credentials and cannot be deleted'
);
expect(wrapper.find('Button[aria-label="Delete"]').prop('isDisabled')).toBe(
true
);
expect(wrapper.find('Tooltip').length).toBe(1);
expect(wrapper.find('Tooltip').prop('content')).toBe(
'This credential type is currently being used by some credentials and cannot be deleted'
);
});
test('should throw error', async () => {
CredentialsAPI.read.mockRejectedValue(new Error('error'));
await act(async () => {
wrapper = mountWithContexts(
<CredentialTypeDetails credentialType={credentialTypeData} />
);
});
wrapper.update();
expect(wrapper.find('ErrorDetail').length).toBe(1);
});
test('expected api call is made for delete', async () => {
const history = createMemoryHistory({
initialEntries: ['/credential_types/42/details'],

View File

@ -19,7 +19,7 @@ import PaginatedTable, {
import ErrorDetail from '../../../components/ErrorDetail';
import AlertModal from '../../../components/AlertModal';
import DatalistToolbar from '../../../components/DataListToolbar';
import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails';
import CredentialTypeListItem from './CredentialTypeListItem';
const QS_CONFIG = getQSConfig('credential-type', {
@ -106,6 +106,11 @@ function CredentialTypeList({ i18n }) {
const canAdd = actions && actions.POST;
const deleteDetailsRequests = relatedResourceDeleteRequests.credentialType(
selected[0],
i18n
);
return (
<>
<PageSection>
@ -162,6 +167,11 @@ function CredentialTypeList({ i18n }) {
onDelete={handleDelete}
itemsToDelete={selected}
pluralizedItemName={i18n._(t`Credential Types`)}
deleteDetailsRequests={deleteDetailsRequests}
deleteMessage={i18n._(
'{numItemsToDelete, plural, one {This credential type is currently being used by some credentials and cannot be deleted.} other {Credential types that are being used by credentials cannot be deleted. Are you sure you want to delete anyway?}}',
{ numItemsToDelete: selected.length }
)}
/>,
]}
/>

View File

@ -6,10 +6,11 @@ import {
waitForElement,
} from '../../../../testUtils/enzymeHelpers';
import { CredentialTypesAPI } from '../../../api';
import { CredentialTypesAPI, CredentialsAPI } from '../../../api';
import CredentialTypeList from './CredentialTypeList';
jest.mock('../../../api/models/CredentialTypes');
jest.mock('../../../api/models/Credentials');
const credentialTypes = {
data: {
@ -49,6 +50,12 @@ describe('<CredentialTypeList', () => {
await waitForElement(wrapper, 'CredentialTypeList', el => el.length > 0);
});
test('should have proper number of delete detail requests', () => {
expect(
wrapper.find('ToolbarDeleteButton').prop('deleteDetailsRequests')
).toHaveLength(1);
});
test('should have data fetched and render 2 rows', async () => {
CredentialTypesAPI.read.mockResolvedValue(credentialTypes);
CredentialTypesAPI.readOptions.mockResolvedValue(options);
@ -65,6 +72,7 @@ describe('<CredentialTypeList', () => {
test('should delete item successfully', async () => {
CredentialTypesAPI.read.mockResolvedValue(credentialTypes);
CredentialTypesAPI.readOptions.mockResolvedValue(options);
CredentialsAPI.read.mockResolvedValue({ data: { count: 0 } });
await act(async () => {
wrapper = mountWithContexts(<CredentialTypeList />);

View File

@ -15,6 +15,7 @@ import {
import useRequest, { useDismissableError } from '../../../util/useRequest';
import { toTitleCase } from '../../../util/strings';
import { ExecutionEnvironmentsAPI } from '../../../api';
import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails';
function ExecutionEnvironmentDetails({ executionEnvironment, i18n }) {
const history = useHistory();
@ -41,7 +42,10 @@ function ExecutionEnvironmentDetails({ executionEnvironment, i18n }) {
);
const { error, dismissError } = useDismissableError(deleteError);
const deleteDetailsRequests = relatedResourceDeleteRequests.executionEnvironment(
executionEnvironment,
i18n
);
return (
<CardBody>
<DetailList>
@ -120,6 +124,10 @@ function ExecutionEnvironmentDetails({ executionEnvironment, i18n }) {
onConfirm={deleteExecutionEnvironment}
isDisabled={isLoading}
ouiaId="delete-button"
deleteDetailsRequests={deleteDetailsRequests}
deleteMessage={i18n._(
t`This execution environment is currently being used by other resources. Are you sure you want to delete it?`
)}
>
{i18n._(t`Delete`)}
</DeleteButton>

View File

@ -175,4 +175,22 @@ describe('<ExecutionEnvironmentDetails/>', () => {
expect(wrapper.find('Button[aria-label="Delete"]')).toHaveLength(0);
});
test('should have proper number of delete detail requests', async () => {
const history = createMemoryHistory({
initialEntries: ['/execution_environments/42/details'],
});
await act(async () => {
wrapper = mountWithContexts(
<ExecutionEnvironmentDetails
executionEnvironment={executionEnvironment}
/>,
{
context: { router: { history } },
}
);
});
expect(
wrapper.find('DeleteButton').prop('deleteDetailsRequests')
).toHaveLength(4);
});
});

View File

@ -6,10 +6,22 @@ import {
waitForElement,
} from '../../../../testUtils/enzymeHelpers';
import { ExecutionEnvironmentsAPI } from '../../../api';
import {
ExecutionEnvironmentsAPI,
InventorySourcesAPI,
WorkflowJobTemplateNodesAPI,
OrganizationsAPI,
ProjectsAPI,
UnifiedJobTemplatesAPI,
} from '../../../api';
import ExecutionEnvironmentList from './ExecutionEnvironmentList';
jest.mock('../../../api/models/ExecutionEnvironments');
jest.mock('../../../api/models/UnifiedJobTemplates');
jest.mock('../../../api/models/Projects');
jest.mock('../../../api/models/Organizations');
jest.mock('../../../api/models/InventorySources');
jest.mock('../../../api/models/WorkflowJobTemplateNodes');
const executionEnvironments = {
data: {
@ -43,6 +55,16 @@ describe('<ExecutionEnvironmentList/>', () => {
beforeEach(() => {
ExecutionEnvironmentsAPI.read.mockResolvedValue(executionEnvironments);
ExecutionEnvironmentsAPI.readOptions.mockResolvedValue(options);
InventorySourcesAPI.read.mockResolvedValue({
data: { results: [{ id: 10000000 }] },
});
WorkflowJobTemplateNodesAPI.read.mockResolvedValue({ data: { count: 0 } });
OrganizationsAPI.read.mockResolvedValue({ data: { count: 0 } });
UnifiedJobTemplatesAPI.read.mockResolvedValue({ data: { count: 0 } });
ProjectsAPI.read.mockResolvedValue({ data: { count: 0 } });
});
afterEach(() => {
@ -144,6 +166,11 @@ describe('<ExecutionEnvironmentList/>', () => {
);
wrapper.update();
await waitForElement(
wrapper,
'Button[aria-label="confirm delete"]',
el => el.length > 0
);
await act(async () =>
wrapper.find('Button[aria-label="confirm delete"]').prop('onClick')()
);
@ -185,4 +212,17 @@ describe('<ExecutionEnvironmentList/>', () => {
waitForElement(wrapper, 'ExecutionEnvironmentList', el => el.length > 0);
expect(wrapper.find('ToolbarAddButton').length).toBe(0);
});
test('should have proper number of delete detail requests', async () => {
ExecutionEnvironmentsAPI.read.mockResolvedValue(executionEnvironments);
ExecutionEnvironmentsAPI.readOptions.mockResolvedValue({
data: { actions: { POST: false } },
});
await act(async () => {
wrapper = mountWithContexts(<ExecutionEnvironmentList />);
});
expect(
wrapper.find('ToolbarDeleteButton').prop('deleteDetailsRequests')
).toHaveLength(4);
});
});

View File

@ -19,7 +19,7 @@ import PaginatedTable, {
import ErrorDetail from '../../../components/ErrorDetail';
import AlertModal from '../../../components/AlertModal';
import DatalistToolbar from '../../../components/DataListToolbar';
import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails';
import ExecutionEnvironmentsListItem from './ExecutionEnvironmentListItem';
const QS_CONFIG = getQSConfig('execution_environments', {
@ -105,7 +105,10 @@ function ExecutionEnvironmentList({ i18n }) {
};
const canAdd = actions && actions.POST;
const deleteDetailsRequests = relatedResourceDeleteRequests.executionEnvironment(
selected[0],
i18n
);
return (
<>
<PageSection>
@ -181,6 +184,11 @@ function ExecutionEnvironmentList({ i18n }) {
onDelete={handleDelete}
itemsToDelete={selected}
pluralizedItemName={i18n._(t`Execution Environments`)}
deleteDetailsRequests={deleteDetailsRequests}
deleteMessage={i18n._(
'{numItemsToDelete, plural, one {This execution environment is currently being used by other resources. Are you sure you want to delete it?} other {Deleting these execution environemnts could impact other resources that rely on them. Are you sure you want to delete anyway?}}',
{ numItemsToDelete: selected.length }
)}
/>,
]}
/>

View File

@ -101,7 +101,6 @@
"VIRTUAL_ENV": "/var/lib/awx/venv/ansible",
"INVENTORY_ID": "1",
"MAX_EVENT_RES": "700000",
"PROOT_TMP_DIR": "/tmp",
"ANSIBLE_LIBRARY": "/awx_devel/awx/plugins/library",
"SDB_NOTIFY_HOST": "docker.for.mac.host.internal",
"AWX_GROUP_QUEUES": "tower",

View File

@ -16,6 +16,7 @@ import {
import useRequest, { useDismissableError } from '../../../util/useRequest';
import { jsonToYaml, isJsonString } from '../../../util/yaml';
import { InstanceGroupsAPI } from '../../../api';
import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails';
function ContainerGroupDetails({ instanceGroup, i18n }) {
const { id, name } = instanceGroup;
@ -34,7 +35,10 @@ function ContainerGroupDetails({ instanceGroup, i18n }) {
);
const { error, dismissError } = useDismissableError(deleteError);
const deleteDetailsRequests = relatedResourceDeleteRequests.instanceGroup(
instanceGroup,
i18n
);
return (
<CardBody>
<DetailList>
@ -101,6 +105,10 @@ function ContainerGroupDetails({ instanceGroup, i18n }) {
modalTitle={i18n._(t`Delete instance group`)}
onConfirm={deleteInstanceGroup}
isDisabled={isLoading}
deleteDetailsRequests={deleteDetailsRequests}
deleteMessage={i18n._(
t`This container group is currently being by other resources. Are you sure you want to delete it?`
)}
>
{i18n._(t`Delete`)}
</DeleteButton>

View File

@ -16,6 +16,7 @@ import {
} from '../../../components/DetailList';
import useRequest, { useDismissableError } from '../../../util/useRequest';
import { InstanceGroupsAPI } from '../../../api';
import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails';
const Unavailable = styled.span`
color: var(--pf-global--danger-color--200);
@ -38,7 +39,10 @@ function InstanceGroupDetails({ instanceGroup, i18n }) {
);
const { error, dismissError } = useDismissableError(deleteError);
const deleteDetailsRequests = relatedResourceDeleteRequests.instanceGroup(
instanceGroup,
i18n
);
const verifyInstanceGroup = item => {
if (item.is_isolated) {
return (
@ -142,6 +146,10 @@ function InstanceGroupDetails({ instanceGroup, i18n }) {
modalTitle={i18n._(t`Delete instance group`)}
onConfirm={deleteInstanceGroup}
isDisabled={isLoading}
deleteDetailsRequests={deleteDetailsRequests}
deleteMessage={i18n._(
t`This instance group is currently being by other resources. Are you sure you want to delete it?`
)}
>
{i18n._(t`Delete`)}
</DeleteButton>

View File

@ -17,7 +17,7 @@ import ErrorDetail from '../../../components/ErrorDetail';
import AlertModal from '../../../components/AlertModal';
import DatalistToolbar from '../../../components/DataListToolbar';
import AddDropDownButton from '../../../components/AddDropDownButton';
import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails';
import InstanceGroupListItem from './InstanceGroupListItem';
const QS_CONFIG = getQSConfig('instance-group', {
@ -186,7 +186,10 @@ function InstanceGroupList({ i18n }) {
? `${match.url}/container_group/${item.id}/details`
: `${match.url}/${item.id}/details`;
};
const deleteDetailsRequests = relatedResourceDeleteRequests.instanceGroup(
selected[0],
i18n
);
return (
<>
<PageSection>
@ -218,6 +221,11 @@ function InstanceGroupList({ i18n }) {
itemsToDelete={modifiedSelected}
pluralizedItemName={i18n._(t`Instance Groups`)}
errorMessage={errorMessageDelete}
deleteDetailsRequests={deleteDetailsRequests}
deleteMessage={i18n._(
'{numItemsToDelete, plural, one {This instance group is currently being by other resources. Are you sure you want to delete it?} other {Deleting these instance groups could impact other resources that rely on them. Are you sure you want to delete anyway?}}',
{ numItemsToDelete: selected.length }
)}
/>,
]}
/>

View File

@ -6,10 +6,18 @@ import {
waitForElement,
} from '../../../../testUtils/enzymeHelpers';
import { InstanceGroupsAPI } from '../../../api';
import {
InstanceGroupsAPI,
OrganizationsAPI,
InventoriesAPI,
UnifiedJobTemplatesAPI,
} from '../../../api';
import InstanceGroupList from './InstanceGroupList';
jest.mock('../../../api/models/InstanceGroups');
jest.mock('../../../api/models/Organizations');
jest.mock('../../../api/models/Inventories');
jest.mock('../../../api/models/UnifiedJobTemplates');
const instanceGroups = {
data: {
@ -44,6 +52,9 @@ const instanceGroups = {
};
const options = { data: { actions: { POST: true } } };
OrganizationsAPI.read.mockResolvedValue({ data: { count: 0 } });
InventoriesAPI.read.mockResolvedValue({ data: { count: 0 } });
UnifiedJobTemplatesAPI.read.mockResolvedValue({ data: { count: 0 } });
describe('<InstanceGroupList />', () => {
let wrapper;

View File

@ -19,6 +19,7 @@ import ChipGroup from '../../../components/ChipGroup';
import { InventoriesAPI } from '../../../api';
import useRequest, { useDismissableError } from '../../../util/useRequest';
import { Inventory } from '../../../types';
import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails';
function InventoryDetail({ inventory, i18n }) {
const history = useHistory();
@ -54,6 +55,11 @@ function InventoryDetail({ inventory, i18n }) {
user_capabilities: userCapabilities,
} = inventory.summary_fields;
const deleteDetailsRequests = relatedResourceDeleteRequests.inventory(
inventory,
i18n
);
if (isLoading) {
return <ContentLoading />;
}
@ -126,6 +132,10 @@ function InventoryDetail({ inventory, i18n }) {
name={inventory.name}
modalTitle={i18n._(t`Delete Inventory`)}
onConfirm={deleteInventory}
deleteDetailsRequests={deleteDetailsRequests}
deleteMessage={i18n._(
t`This inventory is currently being used by other resources. Are you sure you want to delete it?`
)}
>
{i18n._(t`Delete`)}
</DeleteButton>

View File

@ -105,6 +105,18 @@ describe('<InventoryDetail />', () => {
expect(dates.at(1).prop('date')).toEqual(mockInventory.modified);
});
test('should have proper number of delete detail requests', async () => {
let wrapper;
await act(async () => {
wrapper = mountWithContexts(
<InventoryDetail inventory={mockInventory} />
);
});
expect(
wrapper.find('DeleteButton').prop('deleteDetailsRequests')
).toHaveLength(2);
});
test('should load instance groups', async () => {
InventoriesAPI.readInstanceGroups.mockResolvedValue({
data: {

View File

@ -17,6 +17,7 @@ import { getQSConfig, parseQueryString } from '../../../util/qs';
import useWsInventories from './useWsInventories';
import AddDropDownButton from '../../../components/AddDropDownButton';
import InventoryListItem from './InventoryListItem';
import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails';
const QS_CONFIG = getQSConfig('inventory', {
page: 1,
@ -126,6 +127,12 @@ function InventoryList({ i18n }) {
}
}
};
const deleteDetailsRequests = relatedResourceDeleteRequests.inventory(
selected[0],
i18n
);
const addInventory = i18n._(t`Add inventory`);
const addSmartInventory = i18n._(t`Add smart inventory`);
const addButton = (
@ -216,6 +223,11 @@ function InventoryList({ i18n }) {
'{numItemsToDelete, plural, one {The inventory will be in a pending status until the final delete is processed.} other {The inventories will be in a pending status until the final delete is processed.}}',
{ numItemsToDelete: selected.length }
)}
deleteMessage={i18n._(
'{numItemsToDelete, plural, one {This inventory is currently being used by other resources. Are you sure you want to delete it?} other {Deleting these inventories could impact other resources that rely on them. Are you sure you want to delete anyway?}}',
{ numItemsToDelete: selected.length }
)}
deleteDetailsRequests={deleteDetailsRequests}
/>,
]}
/>

View File

@ -1,11 +1,17 @@
import React from 'react';
import { act } from 'react-dom/test-utils';
import { InventoriesAPI } from '../../../api';
import {
InventoriesAPI,
JobTemplatesAPI,
WorkflowJobTemplatesAPI,
} from '../../../api';
import { mountWithContexts } from '../../../../testUtils/enzymeHelpers';
import InventoryList from './InventoryList';
jest.mock('../../../api');
jest.mock('../../../api/models/Inventories');
jest.mock('../../../api/models/JobTemplates');
jest.mock('../../../api/models/WorkflowJobTemplates');
const mockInventories = [
{
@ -136,6 +142,8 @@ describe('<InventoryList />', () => {
},
},
});
JobTemplatesAPI.read.mockResolvedValue({ data: { count: 0 } });
WorkflowJobTemplatesAPI.read.mockResolvedValue({ data: { count: 0 } });
debug = global.console.debug; // eslint-disable-line prefer-destructuring
global.console.debug = () => {};
});
@ -155,6 +163,16 @@ describe('<InventoryList />', () => {
expect(wrapper.find('InventoryListItem')).toHaveLength(3);
});
test('should have proper number of delete detail requests', async () => {
let wrapper;
await act(async () => {
wrapper = mountWithContexts(<InventoryList />);
});
expect(
wrapper.find('ToolbarDeleteButton').prop('deleteDetailsRequests')
).toHaveLength(2);
});
test('should select inventory when checked', async () => {
let wrapper;
await act(async () => {

View File

@ -22,6 +22,7 @@ import ErrorDetail from '../../../components/ErrorDetail';
import Popover from '../../../components/Popover';
import useRequest from '../../../util/useRequest';
import { InventorySourcesAPI } from '../../../api';
import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails';
function InventorySourceDetail({ inventorySource, i18n }) {
const {
@ -96,6 +97,12 @@ function InventorySourceDetail({ inventorySource, i18n }) {
}
};
const deleteDetailsRequests = relatedResourceDeleteRequests.inventorySource(
inventorySource.inventory,
i18n,
inventorySource
);
const VERBOSITY = {
0: i18n._(t`0 (Warning)`),
1: i18n._(t`1 (Info)`),
@ -281,6 +288,10 @@ function InventorySourceDetail({ inventorySource, i18n }) {
name={name}
modalTitle={i18n._(t`Delete inventory source`)}
onConfirm={handleDelete}
deleteDetailsRequests={deleteDetailsRequests}
deleteMessage={i18n._(
t`This inventory source is currently being used by other resources that rely on it. Are you sure you want to delete it?`
)}
>
{i18n._(t`Delete`)}
</DeleteButton>

View File

@ -7,9 +7,20 @@ import {
} from '../../../../testUtils/enzymeHelpers';
import InventorySourceDetail from './InventorySourceDetail';
import mockInvSource from '../shared/data.inventory_source.json';
import { InventorySourcesAPI } from '../../../api';
import {
InventorySourcesAPI,
InventoriesAPI,
WorkflowJobTemplateNodesAPI,
} from '../../../api';
jest.mock('../../../api/models/InventorySources');
jest.mock('../../../api/models/Inventories');
jest.mock('../../../api/models/WorkflowJobTemplateNodes');
InventoriesAPI.updateSources.mockResolvedValue({
data: [{ inventory_source: 1 }],
});
WorkflowJobTemplateNodesAPI.read.mockResolvedValue({ data: { count: 0 } });
InventorySourcesAPI.readOptions.mockResolvedValue({
data: {
actions: {
@ -101,6 +112,17 @@ describe('InventorySourceDetail', () => {
expect(wrapper.find('InventorySourceSyncButton')).toHaveLength(1);
});
test('should have proper number of delete detail requests', async () => {
await act(async () => {
wrapper = mountWithContexts(
<InventorySourceDetail inventorySource={mockInvSource} />
);
});
expect(
wrapper.find('DeleteButton').prop('deleteDetailsRequests')
).toHaveLength(3);
});
test('should hide expected action buttons for users without permissions', async () => {
const userCapabilities = {
edit: false,

View File

@ -20,6 +20,7 @@ import AlertModal from '../../../components/AlertModal/AlertModal';
import ErrorDetail from '../../../components/ErrorDetail/ErrorDetail';
import InventorySourceListItem from './InventorySourceListItem';
import useWsInventorySources from './useWsInventorySources';
import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails';
const QS_CONFIG = getQSConfig('inventory', {
not__source: '',
@ -142,6 +143,12 @@ function InventorySourceList({ i18n }) {
sourceChoicesOptions &&
Object.prototype.hasOwnProperty.call(sourceChoicesOptions, 'POST');
const listUrl = `/inventories/${inventoryType}/${id}/sources/`;
const deleteDetailsRequests = relatedResourceDeleteRequests.inventorySource(
id,
i18n,
selected[0]
);
return (
<>
<PaginatedDataList
@ -174,6 +181,11 @@ function InventorySourceList({ i18n }) {
onDelete={handleDelete}
itemsToDelete={selected}
pluralizedItemName={i18n._(t`Inventory Sources`)}
deleteDetailsRequests={deleteDetailsRequests}
deleteMessage={i18n._(
'{numItemsToDelete, plural, one {This inventory source is currently being used by other resources that rely on it. Are you sure you want to delete it?} other {Deleting these inventory sources could impact other resources that rely on them. Are you sure you want to delete anyway?}}',
{ numItemsToDelete: selected.length }
)}
/>,
...(canSyncSources
? [

View File

@ -2,7 +2,11 @@ import React from 'react';
import { Route } from 'react-router-dom';
import { createMemoryHistory } from 'history';
import { act } from 'react-dom/test-utils';
import { InventoriesAPI, InventorySourcesAPI } from '../../../api';
import {
InventoriesAPI,
InventorySourcesAPI,
WorkflowJobTemplateNodesAPI,
} from '../../../api';
import {
mountWithContexts,
waitForElement,
@ -13,6 +17,7 @@ import InventorySourceList from './InventorySourceList';
jest.mock('../../../api/models/InventorySources');
jest.mock('../../../api/models/Inventories');
jest.mock('../../../api/models/InventoryUpdates');
jest.mock('../../../api/models/WorkflowJobTemplateNodes');
const sources = {
data: {
@ -61,6 +66,12 @@ describe('<InventorySourceList />', () => {
debug = global.console.debug; // eslint-disable-line prefer-destructuring
global.console.debug = () => {};
InventoriesAPI.readSources.mockResolvedValue(sources);
InventoriesAPI.updateSources.mockResolvedValue({
data: [{ inventory_source: 1 }],
});
InventorySourcesAPI.readGroups.mockResolvedValue({ data: { count: 0 } });
InventorySourcesAPI.readHosts.mockResolvedValue({ data: { count: 0 } });
WorkflowJobTemplateNodesAPI.read.mockResolvedValue({ data: { count: 0 } });
InventorySourcesAPI.readOptions.mockResolvedValue({
data: {
actions: {
@ -119,6 +130,12 @@ describe('<InventorySourceList />', () => {
expect(InventorySourcesAPI.readOptions).toHaveBeenCalled();
});
test('should have proper number of delete detail requests', async () => {
expect(
wrapper.find('ToolbarDeleteButton').prop('deleteDetailsRequests')
).toHaveLength(3);
});
test('source data should render properly', async () => {
await waitForElement(wrapper, 'InventorySourceList', el => el.length > 0);
expect(

View File

@ -122,20 +122,14 @@ const SCMSubForm = ({ autoPopulateProject, i18n }) => {
onSelect={(event, value) => {
setIsOpen(false);
value = value.trim();
if (!value.endsWith('/')) {
value += '/';
}
sourcePathHelpers.setValue(value);
}}
aria-label={i18n._(t`Select source path`)}
placeholder={i18n._(t`Select source path`)}
createText={i18n._(t`Set source path to`)}
isCreatable
onCreateOption={value => {
value.trim();
if (!value.endsWith('/')) {
value += '/';
}
setSourcePath([...sourcePath, value]);
}}
>

View File

@ -98,7 +98,7 @@ describe('<SCMSubForm />', () => {
});
wrapper.update();
expect(wrapper.find('Select#source_path').prop('selections')).toEqual(
'bar/'
'bar'
);
await act(async () => {
@ -138,7 +138,7 @@ describe('<SCMSubForm />', () => {
customWrapper.find('Select').invoke('onSelect')({}, 'newPath');
});
customWrapper.update();
expect(customWrapper.find('Select').prop('selections')).toBe('newPath/');
expect(customWrapper.find('Select').prop('selections')).toBe('newPath');
});
test('Update on project update should be disabled', async () => {
const customInitialValues = {

View File

@ -101,7 +101,6 @@
"VIRTUAL_ENV": "/var/lib/awx/venv/ansible",
"INVENTORY_ID": "1",
"MAX_EVENT_RES": "700000",
"PROOT_TMP_DIR": "/tmp",
"ANSIBLE_LIBRARY": "/awx_devel/awx/plugins/library",
"SDB_NOTIFY_HOST": "docker.for.mac.host.internal",
"AWX_GROUP_QUEUES": "tower",

View File

@ -12,20 +12,32 @@ import { withI18n } from '@lingui/react';
import { t } from '@lingui/macro';
import { CaretLeftIcon } from '@patternfly/react-icons';
import { Card, PageSection } from '@patternfly/react-core';
import { JobsAPI } from '../../api';
import ContentError from '../../components/ContentError';
import ContentLoading from '../../components/ContentLoading';
import RoutedTabs from '../../components/RoutedTabs';
import useRequest from '../../util/useRequest';
import { getJobModel } from '../../util/jobs';
import JobDetail from './JobDetail';
import JobOutput from './JobOutput';
import { WorkflowOutput } from './WorkflowOutput';
import useWsJob from './useWsJob';
// maps the displayed url segments to actual api types
export const JOB_URL_SEGMENT_MAP = {
playbook: 'job',
project: 'project_update',
management: 'system_job',
inventory: 'inventory_update',
command: 'ad_hoc_command',
workflow: 'workflow_job',
};
function Job({ i18n, setBreadcrumb }) {
const { id, type } = useParams();
const { id, typeSegment } = useParams();
const match = useRouteMatch();
const type = JOB_URL_SEGMENT_MAP[typeSegment];
const {
isLoading,
error,
@ -34,12 +46,11 @@ function Job({ i18n, setBreadcrumb }) {
} = useRequest(
useCallback(async () => {
let eventOptions = {};
const { data: jobDetailData } = await JobsAPI.readDetail(id, type);
if (jobDetailData.type !== 'workflow_job') {
const { data: jobEventOptions } = await JobsAPI.readEventOptions(
id,
const { data: jobDetailData } = await getJobModel(type).readDetail(id);
if (type !== 'workflow_job') {
const { data: jobEventOptions } = await getJobModel(
type
);
).readEventOptions(id);
eventOptions = jobEventOptions;
}
if (
@ -49,7 +60,7 @@ function Job({ i18n, setBreadcrumb }) {
) {
const {
data: { results },
} = await JobsAPI.readCredentials(jobDetailData.id, type);
} = await getJobModel(type).readCredentials(jobDetailData.id);
jobDetailData.summary_fields.credentials = results;
}
@ -125,37 +136,37 @@ function Job({ i18n, setBreadcrumb }) {
<Card>
<RoutedTabs tabsArray={tabsArray} />
<Switch>
<Redirect from="/jobs/:type/:id" to="/jobs/:type/:id/output" exact />
{job &&
job.type === 'workflow_job' && [
<Route key="workflow-details" path="/jobs/workflow/:id/details">
<JobDetail type={match.params.type} job={job} />
</Route>,
<Route key="workflow-output" path="/jobs/workflow/:id/output">
<Redirect
from="/jobs/:typeSegment/:id"
to="/jobs/:typeSegment/:id/output"
exact
/>
{job && [
<Route
key={job.type === 'workflow_job' ? 'workflow-details' : 'details'}
path="/jobs/:typeSegment/:id/details"
>
<JobDetail job={job} />
</Route>,
<Route key="output" path="/jobs/:typeSegment/:id/output">
{job.type === 'workflow_job' ? (
<WorkflowOutput job={job} />
</Route>,
]}
{job &&
job.type !== 'workflow_job' && [
<Route key="details" path="/jobs/:type/:id/details">
<JobDetail type={type} job={job} />
</Route>,
<Route key="output" path="/jobs/:type/:id/output">
) : (
<JobOutput
type={type}
job={job}
eventRelatedSearchableKeys={eventRelatedSearchableKeys}
eventSearchableKeys={eventSearchableKeys}
/>
</Route>,
<Route key="not-found" path="*">
<ContentError isNotFound>
<Link to={`/jobs/${type}/${id}/details`}>
{i18n._(t`View Job Details`)}
</Link>
</ContentError>
</Route>,
]}
)}
</Route>,
<Route key="not-found" path="*">
<ContentError isNotFound>
<Link to={`/jobs/${typeSegment}/${id}/details`}>
{i18n._(t`View Job Details`)}
</Link>
</ContentError>
</Route>,
]}
</Switch>
</Card>
</PageSection>

View File

@ -1,5 +1,5 @@
import 'styled-components/macro';
import React, { useState } from 'react';
import React, { useCallback, useState } from 'react';
import { Link, useHistory } from 'react-router-dom';
import { withI18n } from '@lingui/react';
import { t } from '@lingui/macro';
@ -25,17 +25,11 @@ import {
} from '../../../components/LaunchButton';
import StatusIcon from '../../../components/StatusIcon';
import ExecutionEnvironmentDetail from '../../../components/ExecutionEnvironmentDetail';
import { getJobModel, isJobRunning } from '../../../util/jobs';
import { toTitleCase } from '../../../util/strings';
import useRequest, { useDismissableError } from '../../../util/useRequest';
import { formatDateString } from '../../../util/dates';
import { Job } from '../../../types';
import {
JobsAPI,
ProjectUpdatesAPI,
SystemJobsAPI,
WorkflowJobsAPI,
InventoriesAPI,
AdHocCommandsAPI,
} from '../../../api';
const VariablesInput = styled(_VariablesInput)`
.pf-c-form__label {
@ -77,6 +71,24 @@ function JobDetail({ job, i18n }) {
const [errorMsg, setErrorMsg] = useState();
const history = useHistory();
const [showCancelModal, setShowCancelModal] = useState(false);
const {
error: cancelError,
isLoading: isCancelling,
request: cancelJob,
} = useRequest(
useCallback(async () => {
await getJobModel(job.type).cancel(job.id, job.type);
}, [job.id, job.type]),
{}
);
const {
error: dismissableCancelError,
dismissError: dismissCancelError,
} = useDismissableError(cancelError);
const jobTypes = {
project_update: i18n._(t`Source Control Update`),
inventory_update: i18n._(t`Inventory Sync`),
@ -91,25 +103,7 @@ function JobDetail({ job, i18n }) {
const deleteJob = async () => {
try {
switch (job.type) {
case 'project_update':
await ProjectUpdatesAPI.destroy(job.id);
break;
case 'system_job':
await SystemJobsAPI.destroy(job.id);
break;
case 'workflow_job':
await WorkflowJobsAPI.destroy(job.id);
break;
case 'ad_hoc_command':
await AdHocCommandsAPI.destroy(job.id);
break;
case 'inventory_update':
await InventoriesAPI.destroy(job.id);
break;
default:
await JobsAPI.destroy(job.id);
}
await getJobModel(job.type).destroy(job.id);
history.push('/jobs');
} catch (err) {
setErrorMsg(err);
@ -410,16 +404,75 @@ function JobDetail({ job, i18n }) {
)}
</LaunchButton>
))}
{job.summary_fields.user_capabilities.delete && (
<DeleteButton
name={job.name}
modalTitle={i18n._(t`Delete Job`)}
onConfirm={deleteJob}
>
{i18n._(t`Delete`)}
</DeleteButton>
)}
{isJobRunning(job.status) &&
job?.summary_fields?.user_capabilities?.start && (
<Button
variant="secondary"
aria-label={i18n._(t`Cancel`)}
isDisabled={isCancelling}
onClick={() => setShowCancelModal(true)}
ouiaId="job-detail-cancel-button"
>
{i18n._(t`Cancel`)}
</Button>
)}
{!isJobRunning(job.status) &&
job?.summary_fields?.user_capabilities?.delete && (
<DeleteButton
name={job.name}
modalTitle={i18n._(t`Delete Job`)}
onConfirm={deleteJob}
ouiaId="job-detail-delete-button"
>
{i18n._(t`Delete`)}
</DeleteButton>
)}
</CardActionsRow>
{showCancelModal && isJobRunning(job.status) && (
<AlertModal
isOpen={showCancelModal}
variant="danger"
onClose={() => setShowCancelModal(false)}
title={i18n._(t`Cancel Job`)}
label={i18n._(t`Cancel Job`)}
actions={[
<Button
id="cancel-job-confirm-button"
key="delete"
variant="danger"
isDisabled={isCancelling}
aria-label={i18n._(t`Cancel job`)}
onClick={cancelJob}
>
{i18n._(t`Cancel job`)}
</Button>,
<Button
id="cancel-job-return-button"
key="cancel"
variant="secondary"
aria-label={i18n._(t`Return`)}
onClick={() => setShowCancelModal(false)}
>
{i18n._(t`Return`)}
</Button>,
]}
>
{i18n._(
t`Are you sure you want to submit the request to cancel this job?`
)}
</AlertModal>
)}
{dismissableCancelError && (
<AlertModal
isOpen={dismissableCancelError}
variant="danger"
onClose={dismissCancelError}
title={i18n._(t`Job Cancel Error`)}
label={i18n._(t`Job Cancel Error`)}
>
<ErrorDetail error={dismissableCancelError} />
</AlertModal>
)}
{errorMsg && (
<AlertModal
isOpen={errorMsg}

View File

@ -116,7 +116,7 @@ describe('<JobDetail />', () => {
wrapper.update();
const modal = wrapper.find('Modal');
expect(modal.length).toBe(1);
modal.find('button[aria-label="Delete"]').simulate('click');
modal.find('button[aria-label="Confirm Delete"]').simulate('click');
expect(JobsAPI.destroy).toHaveBeenCalledTimes(1);
});
@ -138,7 +138,7 @@ describe('<JobDetail />', () => {
const modal = wrapper.find('Modal');
expect(modal.length).toBe(1);
await act(async () => {
modal.find('button[aria-label="Delete"]').simulate('click');
modal.find('button[aria-label="Confirm Delete"]').simulate('click');
});
wrapper.update();

View File

@ -37,7 +37,7 @@ import PageControls from './PageControls';
import HostEventModal from './HostEventModal';
import { HostStatusBar, OutputToolbar } from './shared';
import getRowRangePageSize from './shared/jobOutputUtils';
import isJobRunning from '../../../util/jobs';
import { getJobModel, isJobRunning } from '../../../util/jobs';
import useRequest, { useDismissableError } from '../../../util/useRequest';
import {
encodeNonDefaultQueryString,
@ -47,14 +47,6 @@ import {
removeParams,
getQSConfig,
} from '../../../util/qs';
import {
JobsAPI,
ProjectUpdatesAPI,
SystemJobsAPI,
WorkflowJobsAPI,
InventoriesAPI,
AdHocCommandsAPI,
} from '../../../api';
const QS_CONFIG = getQSConfig('job_output', {
order_by: 'start_line',
@ -280,12 +272,7 @@ const cache = new CellMeasurerCache({
defaultHeight: 25,
});
function JobOutput({
job,
type,
eventRelatedSearchableKeys,
eventSearchableKeys,
}) {
function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) {
const location = useLocation();
const listRef = useRef(null);
const isMounted = useRef(false);
@ -348,8 +335,8 @@ function JobOutput({
request: cancelJob,
} = useRequest(
useCallback(async () => {
await JobsAPI.cancel(job.id, type);
}, [job.id, type]),
await getJobModel(job.type).cancel(job.id);
}, [job.id, job.type]),
{}
);
@ -364,27 +351,10 @@ function JobOutput({
error: deleteError,
} = useRequest(
useCallback(async () => {
switch (job.type) {
case 'project_update':
await ProjectUpdatesAPI.destroy(job.id);
break;
case 'system_job':
await SystemJobsAPI.destroy(job.id);
break;
case 'workflow_job':
await WorkflowJobsAPI.destroy(job.id);
break;
case 'ad_hoc_command':
await AdHocCommandsAPI.destroy(job.id);
break;
case 'inventory_update':
await InventoriesAPI.destroy(job.id);
break;
default:
await JobsAPI.destroy(job.id);
}
await getJobModel(job.type).destroy(job.id);
history.push('/jobs');
}, [job, history])
}, [job.type, job.id, history])
);
const {
@ -417,7 +387,7 @@ function JobOutput({
try {
const {
data: { results: fetchedEvents = [], count },
} = await JobsAPI.readEvents(job.id, type, {
} = await getJobModel(job.type).readEvents(job.id, {
page: 1,
page_size: 50,
...parseQueryString(QS_CONFIG, location.search),
@ -557,31 +527,33 @@ function JobOutput({
...parseQueryString(QS_CONFIG, location.search),
};
return JobsAPI.readEvents(job.id, type, params).then(response => {
if (isMounted.current) {
const newResults = {};
let newResultsCssMap = {};
response.data.results.forEach((jobEvent, index) => {
newResults[firstIndex + index] = jobEvent;
const { lineCssMap } = getLineTextHtml(jobEvent);
newResultsCssMap = { ...newResultsCssMap, ...lineCssMap };
});
setResults(prevResults => ({
...prevResults,
...newResults,
}));
setCssMap(prevCssMap => ({
...prevCssMap,
...newResultsCssMap,
}));
setCurrentlyLoading(prevCurrentlyLoading =>
prevCurrentlyLoading.filter(n => !loadRange.includes(n))
);
loadRange.forEach(n => {
cache.clear(n);
});
}
});
return getJobModel(job.type)
.readEvents(job.id, params)
.then(response => {
if (isMounted.current) {
const newResults = {};
let newResultsCssMap = {};
response.data.results.forEach((jobEvent, index) => {
newResults[firstIndex + index] = jobEvent;
const { lineCssMap } = getLineTextHtml(jobEvent);
newResultsCssMap = { ...newResultsCssMap, ...lineCssMap };
});
setResults(prevResults => ({
...prevResults,
...newResults,
}));
setCssMap(prevCssMap => ({
...prevCssMap,
...newResultsCssMap,
}));
setCurrentlyLoading(prevCurrentlyLoading =>
prevCurrentlyLoading.filter(n => !loadRange.includes(n))
);
loadRange.forEach(n => {
cache.clear(n);
});
}
});
};
const scrollToRow = rowIndex => {

View File

@ -188,9 +188,19 @@ describe('<JobOutput />', () => {
wrapper = mountWithContexts(<JobOutput job={mockJob} />);
});
await waitForElement(wrapper, 'JobEvent', el => el.length > 0);
await act(async () => {
wrapper.find('DeleteButton').invoke('onConfirm')();
});
await act(async () =>
wrapper.find('button[aria-label="Delete"]').simulate('click')
);
await waitForElement(
wrapper,
'Modal',
el => el.props().isOpen === true && el.props().title === 'Delete Job'
);
await act(async () =>
wrapper
.find('Modal button[aria-label="Confirm Delete"]')
.simulate('click')
);
expect(JobsAPI.destroy).toHaveBeenCalledTimes(1);
});
@ -268,7 +278,7 @@ describe('<JobOutput />', () => {
wrapper.find(searchBtn).simulate('click');
});
wrapper.update();
expect(JobsAPI.readEvents).toHaveBeenCalledWith(2, undefined, {
expect(JobsAPI.readEvents).toHaveBeenCalledWith(2, {
order_by: 'start_line',
page: 1,
page_size: 50,

View File

@ -55,8 +55,8 @@ function JobTypeRedirect({ id, path, view, i18n }) {
</PageSection>
);
}
const type = JOB_TYPE_URL_SEGMENTS[job.type];
return <Redirect from={path} to={`/jobs/${type}/${job.id}/${view}`} />;
const typeSegment = JOB_TYPE_URL_SEGMENTS[job.type];
return <Redirect from={path} to={`/jobs/${typeSegment}/${job.id}/${view}`} />;
}
JobTypeRedirect.defaultProps = {

View File

@ -21,12 +21,12 @@ function Jobs({ i18n }) {
return;
}
const type = JOB_TYPE_URL_SEGMENTS[job.type];
const typeSegment = JOB_TYPE_URL_SEGMENTS[job.type];
setBreadcrumbConfig({
'/jobs': i18n._(t`Jobs`),
[`/jobs/${type}/${job.id}`]: `${job.name}`,
[`/jobs/${type}/${job.id}/output`]: i18n._(t`Output`),
[`/jobs/${type}/${job.id}/details`]: i18n._(t`Details`),
[`/jobs/${typeSegment}/${job.id}`]: `${job.name}`,
[`/jobs/${typeSegment}/${job.id}/output`]: i18n._(t`Output`),
[`/jobs/${typeSegment}/${job.id}/details`]: i18n._(t`Details`),
});
},
[i18n]
@ -53,7 +53,7 @@ function Jobs({ i18n }) {
<Route path={`${match.path}/:id/output`}>
<TypeRedirect view="output" />
</Route>
<Route path={`${match.path}/:type/:id`}>
<Route path={`${match.path}/:typeSegment/:id`}>
<Job setBreadcrumb={buildBreadcrumbConfig} />
</Route>
<Route path={`${match.path}/:id`}>

View File

@ -154,7 +154,6 @@
"ANSIBLE_INVENTORY_UNPARSED_FAILED": "True",
"ANSIBLE_PARAMIKO_RECORD_HOST_KEYS": "False",
"ANSIBLE_VENV_PATH": "/var/lib/awx/venv/ansible",
"PROOT_TMP_DIR": "/tmp",
"AWX_PRIVATE_DATA_DIR": "/tmp/awx_2_a4b1afiw",
"ANSIBLE_COLLECTIONS_PATHS": "/tmp/collections",
"PYTHONPATH": "/var/lib/awx/venv/ansible/lib/python2.7/site-packages:/awx_devel/awx/lib:",

View File

@ -1,10 +1,8 @@
import { useState, useEffect } from 'react';
import { useParams } from 'react-router-dom';
import useWebsocket from '../../util/useWebsocket';
import { JobsAPI } from '../../api';
import { getJobModel } from '../../util/jobs';
export default function useWsJob(initialJob) {
const { type } = useParams();
const [job, setJob] = useState(initialJob);
const lastMessage = useWebsocket({
jobs: ['status_changed'],
@ -18,7 +16,7 @@ export default function useWsJob(initialJob) {
useEffect(
function parseWsMessage() {
async function fetchJob() {
const { data } = await JobsAPI.readDetail(job.id, type);
const { data } = await getJobModel(job.type).readDetail(job.id);
setJob(data);
}

View File

@ -20,6 +20,7 @@ import ErrorDetail from '../../../components/ErrorDetail';
import useRequest, { useDismissableError } from '../../../util/useRequest';
import { useConfig } from '../../../contexts/Config';
import ExecutionEnvironmentDetail from '../../../components/ExecutionEnvironmentDetail';
import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails';
function OrganizationDetail({ i18n, organization }) {
const {
@ -71,6 +72,11 @@ function OrganizationDetail({ i18n, organization }) {
const { error, dismissError } = useDismissableError(deleteError);
const deleteDetailsRequests = relatedResourceDeleteRequests.organization(
organization,
i18n
);
if (hasContentLoading) {
return <ContentLoading />;
}
@ -157,6 +163,10 @@ function OrganizationDetail({ i18n, organization }) {
modalTitle={i18n._(t`Delete Organization`)}
onConfirm={deleteOrganization}
isDisabled={isLoading}
deleteDetailsRequests={deleteDetailsRequests}
deleteMessage={i18n._(
t`This organization is currently being by other resources. Are you sure you want to delete it?`
)}
>
{i18n._(t`Delete`)}
</DeleteButton>

View File

@ -1,7 +1,7 @@
import React from 'react';
import { act } from 'react-dom/test-utils';
import { OrganizationsAPI } from '../../../api';
import { OrganizationsAPI, CredentialsAPI } from '../../../api';
import {
mountWithContexts,
waitForElement,
@ -44,6 +44,8 @@ describe('<OrganizationDetail />', () => {
};
beforeEach(() => {
CredentialsAPI.read.mockResolvedValue({ data: { count: 0 } });
OrganizationsAPI.readInstanceGroups.mockResolvedValue(mockInstanceGroups);
});
@ -64,6 +66,20 @@ describe('<OrganizationDetail />', () => {
expect(OrganizationsAPI.readInstanceGroups).toHaveBeenCalledTimes(1);
});
test('should have proper number of delete detail requests', async () => {
let component;
await act(async () => {
component = mountWithContexts(
<OrganizationDetail organization={mockOrganization} />
);
});
await waitForElement(component, 'ContentLoading', el => el.length === 0);
expect(
component.find('DeleteButton').prop('deleteDetailsRequests')
).toHaveLength(7);
});
test('should render the expected instance group', async () => {
let component;
await act(async () => {

View File

@ -19,6 +19,7 @@ import PaginatedTable, {
} from '../../../components/PaginatedTable';
import { getQSConfig, parseQueryString } from '../../../util/qs';
import OrganizationListItem from './OrganizationListItem';
import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails';
const QS_CONFIG = getQSConfig('organization', {
page: 1,
@ -116,6 +117,10 @@ function OrganizationsList({ i18n }) {
setSelected(selected.concat(row));
}
};
const deleteDetailsRequests = relatedResourceDeleteRequests.organization(
selected[0],
i18n
);
return (
<>
@ -173,6 +178,11 @@ function OrganizationsList({ i18n }) {
onDelete={handleOrgDelete}
itemsToDelete={selected}
pluralizedItemName={i18n._(t`Organizations`)}
deleteDetailsRequests={deleteDetailsRequests}
deleteMessage={i18n._(
'{numItemsToDelete, plural, one {This organization is currently being by other resources. Are you sure you want to delete it?} other {Deleting these organizations could impact other resources that rely on them. Are you sure you want to delete anyway?}}',
{ numItemsToDelete: selected.length }
)}
/>,
]}
/>

View File

@ -1,7 +1,7 @@
import React from 'react';
import { act } from 'react-dom/test-utils';
import { OrganizationsAPI } from '../../../api';
import { OrganizationsAPI, CredentialsAPI } from '../../../api';
import {
mountWithContexts,
waitForElement,
@ -70,6 +70,7 @@ const mockOrganizations = {
describe('<OrganizationsList />', () => {
let wrapper;
beforeEach(() => {
CredentialsAPI.read.mockResolvedValue({ data: { count: 0 } });
OrganizationsAPI.read.mockResolvedValue(mockOrganizations);
OrganizationsAPI.readOptions.mockResolvedValue({
data: {
@ -90,6 +91,20 @@ describe('<OrganizationsList />', () => {
});
});
test('should have proper number of delete detail requests', async () => {
await act(async () => {
wrapper = mountWithContexts(<OrganizationsList />);
});
await waitForElement(
wrapper,
'OrganizationsList',
el => el.find('ContentLoading').length === 0
);
expect(
wrapper.find('ToolbarDeleteButton').prop('deleteDetailsRequests')
).toHaveLength(7);
});
test('Items are rendered after loading', async () => {
await act(async () => {
wrapper = mountWithContexts(<OrganizationsList />);

View File

@ -20,6 +20,7 @@ import CredentialChip from '../../../components/CredentialChip';
import { ProjectsAPI } from '../../../api';
import { toTitleCase } from '../../../util/strings';
import useRequest, { useDismissableError } from '../../../util/useRequest';
import { relatedResourceDeleteRequests } from '../../../util/getRelatedResourceDeleteDetails';
import ProjectSyncButton from '../shared/ProjectSyncButton';
function ProjectDetail({ project, i18n }) {
@ -52,7 +53,10 @@ function ProjectDetail({ project, i18n }) {
);
const { error, dismissError } = useDismissableError(deleteError);
const deleteDetailsRequests = relatedResourceDeleteRequests.project(
project,
i18n
);
let optionsList = '';
if (
scm_clean ||
@ -171,6 +175,10 @@ function ProjectDetail({ project, i18n }) {
modalTitle={i18n._(t`Delete Project`)}
onConfirm={deleteProject}
isDisabled={isLoading}
deleteDetailsRequests={deleteDetailsRequests}
deleteMessage={i18n._(
t`This project is currently being used by other resources. Are you sure you want to delete it?`
)}
>
{i18n._(t`Delete`)}
</DeleteButton>

Some files were not shown because too many files have changed in this diff Show More